1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3  * Copyright (C) 2020-2022 Loongson Technology Corporation Limited
4  */
5 #ifndef __ASM_BARRIER_H
6 #define __ASM_BARRIER_H
7 
8 #define __sync()	__asm__ __volatile__("dbar 0" : : : "memory")
9 
10 #define fast_wmb()	__sync()
11 #define fast_rmb()	__sync()
12 #define fast_mb()	__sync()
13 #define fast_iob()	__sync()
14 #define wbflush()	__sync()
15 
16 #define wmb()		fast_wmb()
17 #define rmb()		fast_rmb()
18 #define mb()		fast_mb()
19 #define iob()		fast_iob()
20 
21 #define __smp_mb()	__asm__ __volatile__("dbar 0" : : : "memory")
22 #define __smp_rmb()	__asm__ __volatile__("dbar 0" : : : "memory")
23 #define __smp_wmb()	__asm__ __volatile__("dbar 0" : : : "memory")
24 
25 #ifdef CONFIG_SMP
26 #define __WEAK_LLSC_MB		"	dbar 0  \n"
27 #else
28 #define __WEAK_LLSC_MB		"		\n"
29 #endif
30 
31 #define __smp_mb__before_atomic()	barrier()
32 #define __smp_mb__after_atomic()	barrier()
33 
34 /**
35  * array_index_mask_nospec() - generate a ~0 mask when index < size, 0 otherwise
36  * @index: array element index
37  * @size: number of elements in array
38  *
39  * Returns:
40  *     0 - (@index < @size)
41  */
42 #define array_index_mask_nospec array_index_mask_nospec
43 static inline unsigned long array_index_mask_nospec(unsigned long index,
44 						    unsigned long size)
45 {
46 	unsigned long mask;
47 
48 	__asm__ __volatile__(
49 		"sltu	%0, %1, %2\n\t"
50 #if (__SIZEOF_LONG__ == 4)
51 		"sub.w	%0, $zero, %0\n\t"
52 #elif (__SIZEOF_LONG__ == 8)
53 		"sub.d	%0, $zero, %0\n\t"
54 #endif
55 		: "=r" (mask)
56 		: "r" (index), "r" (size)
57 		:);
58 
59 	return mask;
60 }
61 
62 #define __smp_load_acquire(p)							\
63 ({										\
64 	union { typeof(*p) __val; char __c[1]; } __u;				\
65 	unsigned long __tmp = 0;							\
66 	compiletime_assert_atomic_type(*p);					\
67 	switch (sizeof(*p)) {							\
68 	case 1:									\
69 		*(__u8 *)__u.__c = *(volatile __u8 *)p;				\
70 		__smp_mb();							\
71 		break;								\
72 	case 2:									\
73 		*(__u16 *)__u.__c = *(volatile __u16 *)p;			\
74 		__smp_mb();							\
75 		break;								\
76 	case 4:									\
77 		__asm__ __volatile__(						\
78 		"amor_db.w %[val], %[tmp], %[mem]	\n"				\
79 		: [val] "=&r" (*(__u32 *)__u.__c)				\
80 		: [mem] "ZB" (*(u32 *) p), [tmp] "r" (__tmp)			\
81 		: "memory");							\
82 		break;								\
83 	case 8:									\
84 		__asm__ __volatile__(						\
85 		"amor_db.d %[val], %[tmp], %[mem]	\n"				\
86 		: [val] "=&r" (*(__u64 *)__u.__c)				\
87 		: [mem] "ZB" (*(u64 *) p), [tmp] "r" (__tmp)			\
88 		: "memory");							\
89 		break;								\
90 	}									\
91 	(typeof(*p))__u.__val;								\
92 })
93 
94 #define __smp_store_release(p, v)						\
95 do {										\
96 	union { typeof(*p) __val; char __c[1]; } __u =				\
97 		{ .__val = (__force typeof(*p)) (v) };				\
98 	unsigned long __tmp;							\
99 	compiletime_assert_atomic_type(*p);					\
100 	switch (sizeof(*p)) {							\
101 	case 1:									\
102 		__smp_mb();							\
103 		*(volatile __u8 *)p = *(__u8 *)__u.__c;				\
104 		break;								\
105 	case 2:									\
106 		__smp_mb();							\
107 		*(volatile __u16 *)p = *(__u16 *)__u.__c;			\
108 		break;								\
109 	case 4:									\
110 		__asm__ __volatile__(						\
111 		"amswap_db.w %[tmp], %[val], %[mem]	\n"			\
112 		: [mem] "+ZB" (*(u32 *)p), [tmp] "=&r" (__tmp)			\
113 		: [val] "r" (*(__u32 *)__u.__c)					\
114 		: );								\
115 		break;								\
116 	case 8:									\
117 		__asm__ __volatile__(						\
118 		"amswap_db.d %[tmp], %[val], %[mem]	\n"			\
119 		: [mem] "+ZB" (*(u64 *)p), [tmp] "=&r" (__tmp)			\
120 		: [val] "r" (*(__u64 *)__u.__c)					\
121 		: );								\
122 		break;								\
123 	}									\
124 } while (0)
125 
126 #define __smp_store_mb(p, v)							\
127 do {										\
128 	union { typeof(p) __val; char __c[1]; } __u =				\
129 		{ .__val = (__force typeof(p)) (v) };				\
130 	unsigned long __tmp;							\
131 	switch (sizeof(p)) {							\
132 	case 1:									\
133 		*(volatile __u8 *)&p = *(__u8 *)__u.__c;			\
134 		__smp_mb();							\
135 		break;								\
136 	case 2:									\
137 		*(volatile __u16 *)&p = *(__u16 *)__u.__c;			\
138 		__smp_mb();							\
139 		break;								\
140 	case 4:									\
141 		__asm__ __volatile__(						\
142 		"amswap_db.w %[tmp], %[val], %[mem]	\n"			\
143 		: [mem] "+ZB" (*(u32 *)&p), [tmp] "=&r" (__tmp)			\
144 		: [val] "r" (*(__u32 *)__u.__c)					\
145 		: );								\
146 		break;								\
147 	case 8:									\
148 		__asm__ __volatile__(						\
149 		"amswap_db.d %[tmp], %[val], %[mem]	\n"			\
150 		: [mem] "+ZB" (*(u64 *)&p), [tmp] "=&r" (__tmp)			\
151 		: [val] "r" (*(__u64 *)__u.__c)					\
152 		: );								\
153 		break;								\
154 	}									\
155 } while (0)
156 
157 #include <asm-generic/barrier.h>
158 
159 #endif /* __ASM_BARRIER_H */
160