1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3 * Copyright (C) 2020-2022 Loongson Technology Corporation Limited
4 */
5 #ifndef __ASM_PERCPU_H
6 #define __ASM_PERCPU_H
7
8 #include <asm/cmpxchg.h>
9 #include <asm/loongarch.h>
10
11 /*
12 * The "address" (in fact, offset from $r21) of a per-CPU variable is close to
13 * the loading address of main kernel image, but far from where the modules are
14 * loaded. Tell the compiler this fact when using explicit relocs.
15 */
16 #if defined(MODULE) && defined(CONFIG_AS_HAS_EXPLICIT_RELOCS)
17 # if __has_attribute(model)
18 # define PER_CPU_ATTRIBUTES __attribute__((model("extreme")))
19 # else
20 # error compiler support for the model attribute is necessary when a recent assembler is used
21 # endif
22 #endif
23
24 /* Use r21 for fast access */
25 register unsigned long __my_cpu_offset __asm__("$r21");
26
set_my_cpu_offset(unsigned long off)27 static inline void set_my_cpu_offset(unsigned long off)
28 {
29 __my_cpu_offset = off;
30 csr_write64(off, PERCPU_BASE_KS);
31 }
32
33 #define __my_cpu_offset \
34 ({ \
35 __asm__ __volatile__("":"+r"(__my_cpu_offset)); \
36 __my_cpu_offset; \
37 })
38
39 #define PERCPU_OP(op, asm_op, c_op) \
40 static __always_inline unsigned long __percpu_##op(void *ptr, \
41 unsigned long val, int size) \
42 { \
43 unsigned long ret; \
44 \
45 switch (size) { \
46 case 4: \
47 __asm__ __volatile__( \
48 "am"#asm_op".w" " %[ret], %[val], %[ptr] \n" \
49 : [ret] "=&r" (ret), [ptr] "+ZB"(*(u32 *)ptr) \
50 : [val] "r" (val)); \
51 break; \
52 case 8: \
53 __asm__ __volatile__( \
54 "am"#asm_op".d" " %[ret], %[val], %[ptr] \n" \
55 : [ret] "=&r" (ret), [ptr] "+ZB"(*(u64 *)ptr) \
56 : [val] "r" (val)); \
57 break; \
58 default: \
59 ret = 0; \
60 BUILD_BUG(); \
61 } \
62 \
63 return ret c_op val; \
64 }
65
66 PERCPU_OP(add, add, +)
67 PERCPU_OP(and, and, &)
68 PERCPU_OP(or, or, |)
69 #undef PERCPU_OP
70
__percpu_read(void * ptr,int size)71 static __always_inline unsigned long __percpu_read(void *ptr, int size)
72 {
73 unsigned long ret;
74
75 switch (size) {
76 case 1:
77 __asm__ __volatile__ ("ldx.b %[ret], $r21, %[ptr] \n"
78 : [ret] "=&r"(ret)
79 : [ptr] "r"(ptr)
80 : "memory");
81 break;
82 case 2:
83 __asm__ __volatile__ ("ldx.h %[ret], $r21, %[ptr] \n"
84 : [ret] "=&r"(ret)
85 : [ptr] "r"(ptr)
86 : "memory");
87 break;
88 case 4:
89 __asm__ __volatile__ ("ldx.w %[ret], $r21, %[ptr] \n"
90 : [ret] "=&r"(ret)
91 : [ptr] "r"(ptr)
92 : "memory");
93 break;
94 case 8:
95 __asm__ __volatile__ ("ldx.d %[ret], $r21, %[ptr] \n"
96 : [ret] "=&r"(ret)
97 : [ptr] "r"(ptr)
98 : "memory");
99 break;
100 default:
101 ret = 0;
102 BUILD_BUG();
103 }
104
105 return ret;
106 }
107
__percpu_write(void * ptr,unsigned long val,int size)108 static __always_inline void __percpu_write(void *ptr, unsigned long val, int size)
109 {
110 switch (size) {
111 case 1:
112 __asm__ __volatile__("stx.b %[val], $r21, %[ptr] \n"
113 :
114 : [val] "r" (val), [ptr] "r" (ptr)
115 : "memory");
116 break;
117 case 2:
118 __asm__ __volatile__("stx.h %[val], $r21, %[ptr] \n"
119 :
120 : [val] "r" (val), [ptr] "r" (ptr)
121 : "memory");
122 break;
123 case 4:
124 __asm__ __volatile__("stx.w %[val], $r21, %[ptr] \n"
125 :
126 : [val] "r" (val), [ptr] "r" (ptr)
127 : "memory");
128 break;
129 case 8:
130 __asm__ __volatile__("stx.d %[val], $r21, %[ptr] \n"
131 :
132 : [val] "r" (val), [ptr] "r" (ptr)
133 : "memory");
134 break;
135 default:
136 BUILD_BUG();
137 }
138 }
139
__percpu_xchg(void * ptr,unsigned long val,int size)140 static __always_inline unsigned long __percpu_xchg(void *ptr, unsigned long val,
141 int size)
142 {
143 switch (size) {
144 case 1:
145 case 2:
146 return __xchg_small((volatile void *)ptr, val, size);
147
148 case 4:
149 return __xchg_asm("amswap.w", (volatile u32 *)ptr, (u32)val);
150
151 case 8:
152 return __xchg_asm("amswap.d", (volatile u64 *)ptr, (u64)val);
153
154 default:
155 BUILD_BUG();
156 }
157
158 return 0;
159 }
160
161 /* this_cpu_cmpxchg */
162 #define _protect_cmpxchg_local(pcp, o, n) \
163 ({ \
164 typeof(*raw_cpu_ptr(&(pcp))) __ret; \
165 preempt_disable_notrace(); \
166 __ret = cmpxchg_local(raw_cpu_ptr(&(pcp)), o, n); \
167 preempt_enable_notrace(); \
168 __ret; \
169 })
170
171 #define _percpu_read(pcp) \
172 ({ \
173 typeof(pcp) __retval; \
174 __retval = (typeof(pcp))__percpu_read(&(pcp), sizeof(pcp)); \
175 __retval; \
176 })
177
178 #define _percpu_write(pcp, val) \
179 do { \
180 __percpu_write(&(pcp), (unsigned long)(val), sizeof(pcp)); \
181 } while (0) \
182
183 #define _pcp_protect(operation, pcp, val) \
184 ({ \
185 typeof(pcp) __retval; \
186 preempt_disable_notrace(); \
187 __retval = (typeof(pcp))operation(raw_cpu_ptr(&(pcp)), \
188 (val), sizeof(pcp)); \
189 preempt_enable_notrace(); \
190 __retval; \
191 })
192
193 #define _percpu_add(pcp, val) \
194 _pcp_protect(__percpu_add, pcp, val)
195
196 #define _percpu_add_return(pcp, val) _percpu_add(pcp, val)
197
198 #define _percpu_and(pcp, val) \
199 _pcp_protect(__percpu_and, pcp, val)
200
201 #define _percpu_or(pcp, val) \
202 _pcp_protect(__percpu_or, pcp, val)
203
204 #define _percpu_xchg(pcp, val) ((typeof(pcp)) \
205 _pcp_protect(__percpu_xchg, pcp, (unsigned long)(val)))
206
207 #define this_cpu_add_4(pcp, val) _percpu_add(pcp, val)
208 #define this_cpu_add_8(pcp, val) _percpu_add(pcp, val)
209
210 #define this_cpu_add_return_4(pcp, val) _percpu_add_return(pcp, val)
211 #define this_cpu_add_return_8(pcp, val) _percpu_add_return(pcp, val)
212
213 #define this_cpu_and_4(pcp, val) _percpu_and(pcp, val)
214 #define this_cpu_and_8(pcp, val) _percpu_and(pcp, val)
215
216 #define this_cpu_or_4(pcp, val) _percpu_or(pcp, val)
217 #define this_cpu_or_8(pcp, val) _percpu_or(pcp, val)
218
219 #define this_cpu_read_1(pcp) _percpu_read(pcp)
220 #define this_cpu_read_2(pcp) _percpu_read(pcp)
221 #define this_cpu_read_4(pcp) _percpu_read(pcp)
222 #define this_cpu_read_8(pcp) _percpu_read(pcp)
223
224 #define this_cpu_write_1(pcp, val) _percpu_write(pcp, val)
225 #define this_cpu_write_2(pcp, val) _percpu_write(pcp, val)
226 #define this_cpu_write_4(pcp, val) _percpu_write(pcp, val)
227 #define this_cpu_write_8(pcp, val) _percpu_write(pcp, val)
228
229 #define this_cpu_xchg_1(pcp, val) _percpu_xchg(pcp, val)
230 #define this_cpu_xchg_2(pcp, val) _percpu_xchg(pcp, val)
231 #define this_cpu_xchg_4(pcp, val) _percpu_xchg(pcp, val)
232 #define this_cpu_xchg_8(pcp, val) _percpu_xchg(pcp, val)
233
234 #define this_cpu_cmpxchg_1(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
235 #define this_cpu_cmpxchg_2(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
236 #define this_cpu_cmpxchg_4(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
237 #define this_cpu_cmpxchg_8(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
238
239 #include <asm-generic/percpu.h>
240
241 #endif /* __ASM_PERCPU_H */
242