xref: /openbmc/linux/include/asm-generic/percpu.h (revision 9dbbc3b9)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_GENERIC_PERCPU_H_
3 #define _ASM_GENERIC_PERCPU_H_
4 
5 #include <linux/compiler.h>
6 #include <linux/threads.h>
7 #include <linux/percpu-defs.h>
8 
9 #ifdef CONFIG_SMP
10 
11 /*
12  * per_cpu_offset() is the offset that has to be added to a
13  * percpu variable to get to the instance for a certain processor.
14  *
15  * Most arches use the __per_cpu_offset array for those offsets but
16  * some arches have their own ways of determining the offset (x86_64, s390).
17  */
18 #ifndef __per_cpu_offset
19 extern unsigned long __per_cpu_offset[NR_CPUS];
20 
21 #define per_cpu_offset(x) (__per_cpu_offset[x])
22 #endif
23 
24 /*
25  * Determine the offset for the currently active processor.
26  * An arch may define __my_cpu_offset to provide a more effective
27  * means of obtaining the offset to the per cpu variables of the
28  * current processor.
29  */
30 #ifndef __my_cpu_offset
31 #define __my_cpu_offset per_cpu_offset(raw_smp_processor_id())
32 #endif
33 #ifdef CONFIG_DEBUG_PREEMPT
34 #define my_cpu_offset per_cpu_offset(smp_processor_id())
35 #else
36 #define my_cpu_offset __my_cpu_offset
37 #endif
38 
39 /*
40  * Arch may define arch_raw_cpu_ptr() to provide more efficient address
41  * translations for raw_cpu_ptr().
42  */
43 #ifndef arch_raw_cpu_ptr
44 #define arch_raw_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset)
45 #endif
46 
47 #ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA
48 extern void setup_per_cpu_areas(void);
49 #endif
50 
51 #endif	/* SMP */
52 
53 #ifndef PER_CPU_BASE_SECTION
54 #ifdef CONFIG_SMP
55 #define PER_CPU_BASE_SECTION ".data..percpu"
56 #else
57 #define PER_CPU_BASE_SECTION ".data"
58 #endif
59 #endif
60 
61 #ifndef PER_CPU_ATTRIBUTES
62 #define PER_CPU_ATTRIBUTES
63 #endif
64 
65 #define raw_cpu_generic_read(pcp)					\
66 ({									\
67 	*raw_cpu_ptr(&(pcp));						\
68 })
69 
70 #define raw_cpu_generic_to_op(pcp, val, op)				\
71 do {									\
72 	*raw_cpu_ptr(&(pcp)) op val;					\
73 } while (0)
74 
75 #define raw_cpu_generic_add_return(pcp, val)				\
76 ({									\
77 	typeof(pcp) *__p = raw_cpu_ptr(&(pcp));				\
78 									\
79 	*__p += val;							\
80 	*__p;								\
81 })
82 
83 #define raw_cpu_generic_xchg(pcp, nval)					\
84 ({									\
85 	typeof(pcp) *__p = raw_cpu_ptr(&(pcp));				\
86 	typeof(pcp) __ret;						\
87 	__ret = *__p;							\
88 	*__p = nval;							\
89 	__ret;								\
90 })
91 
92 #define raw_cpu_generic_cmpxchg(pcp, oval, nval)			\
93 ({									\
94 	typeof(pcp) *__p = raw_cpu_ptr(&(pcp));				\
95 	typeof(pcp) __ret;						\
96 	__ret = *__p;							\
97 	if (__ret == (oval))						\
98 		*__p = nval;						\
99 	__ret;								\
100 })
101 
102 #define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
103 ({									\
104 	typeof(pcp1) *__p1 = raw_cpu_ptr(&(pcp1));			\
105 	typeof(pcp2) *__p2 = raw_cpu_ptr(&(pcp2));			\
106 	int __ret = 0;							\
107 	if (*__p1 == (oval1) && *__p2  == (oval2)) {			\
108 		*__p1 = nval1;						\
109 		*__p2 = nval2;						\
110 		__ret = 1;						\
111 	}								\
112 	(__ret);							\
113 })
114 
115 #define __this_cpu_generic_read_nopreempt(pcp)				\
116 ({									\
117 	typeof(pcp) ___ret;						\
118 	preempt_disable_notrace();					\
119 	___ret = READ_ONCE(*raw_cpu_ptr(&(pcp)));			\
120 	preempt_enable_notrace();					\
121 	___ret;								\
122 })
123 
124 #define __this_cpu_generic_read_noirq(pcp)				\
125 ({									\
126 	typeof(pcp) ___ret;						\
127 	unsigned long ___flags;						\
128 	raw_local_irq_save(___flags);					\
129 	___ret = raw_cpu_generic_read(pcp);				\
130 	raw_local_irq_restore(___flags);				\
131 	___ret;								\
132 })
133 
134 #define this_cpu_generic_read(pcp)					\
135 ({									\
136 	typeof(pcp) __ret;						\
137 	if (__native_word(pcp))						\
138 		__ret = __this_cpu_generic_read_nopreempt(pcp);		\
139 	else								\
140 		__ret = __this_cpu_generic_read_noirq(pcp);		\
141 	__ret;								\
142 })
143 
144 #define this_cpu_generic_to_op(pcp, val, op)				\
145 do {									\
146 	unsigned long __flags;						\
147 	raw_local_irq_save(__flags);					\
148 	raw_cpu_generic_to_op(pcp, val, op);				\
149 	raw_local_irq_restore(__flags);					\
150 } while (0)
151 
152 
153 #define this_cpu_generic_add_return(pcp, val)				\
154 ({									\
155 	typeof(pcp) __ret;						\
156 	unsigned long __flags;						\
157 	raw_local_irq_save(__flags);					\
158 	__ret = raw_cpu_generic_add_return(pcp, val);			\
159 	raw_local_irq_restore(__flags);					\
160 	__ret;								\
161 })
162 
163 #define this_cpu_generic_xchg(pcp, nval)				\
164 ({									\
165 	typeof(pcp) __ret;						\
166 	unsigned long __flags;						\
167 	raw_local_irq_save(__flags);					\
168 	__ret = raw_cpu_generic_xchg(pcp, nval);			\
169 	raw_local_irq_restore(__flags);					\
170 	__ret;								\
171 })
172 
173 #define this_cpu_generic_cmpxchg(pcp, oval, nval)			\
174 ({									\
175 	typeof(pcp) __ret;						\
176 	unsigned long __flags;						\
177 	raw_local_irq_save(__flags);					\
178 	__ret = raw_cpu_generic_cmpxchg(pcp, oval, nval);		\
179 	raw_local_irq_restore(__flags);					\
180 	__ret;								\
181 })
182 
183 #define this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)	\
184 ({									\
185 	int __ret;							\
186 	unsigned long __flags;						\
187 	raw_local_irq_save(__flags);					\
188 	__ret = raw_cpu_generic_cmpxchg_double(pcp1, pcp2,		\
189 			oval1, oval2, nval1, nval2);			\
190 	raw_local_irq_restore(__flags);					\
191 	__ret;								\
192 })
193 
194 #ifndef raw_cpu_read_1
195 #define raw_cpu_read_1(pcp)		raw_cpu_generic_read(pcp)
196 #endif
197 #ifndef raw_cpu_read_2
198 #define raw_cpu_read_2(pcp)		raw_cpu_generic_read(pcp)
199 #endif
200 #ifndef raw_cpu_read_4
201 #define raw_cpu_read_4(pcp)		raw_cpu_generic_read(pcp)
202 #endif
203 #ifndef raw_cpu_read_8
204 #define raw_cpu_read_8(pcp)		raw_cpu_generic_read(pcp)
205 #endif
206 
207 #ifndef raw_cpu_write_1
208 #define raw_cpu_write_1(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
209 #endif
210 #ifndef raw_cpu_write_2
211 #define raw_cpu_write_2(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
212 #endif
213 #ifndef raw_cpu_write_4
214 #define raw_cpu_write_4(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
215 #endif
216 #ifndef raw_cpu_write_8
217 #define raw_cpu_write_8(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
218 #endif
219 
220 #ifndef raw_cpu_add_1
221 #define raw_cpu_add_1(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
222 #endif
223 #ifndef raw_cpu_add_2
224 #define raw_cpu_add_2(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
225 #endif
226 #ifndef raw_cpu_add_4
227 #define raw_cpu_add_4(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
228 #endif
229 #ifndef raw_cpu_add_8
230 #define raw_cpu_add_8(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
231 #endif
232 
233 #ifndef raw_cpu_and_1
234 #define raw_cpu_and_1(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
235 #endif
236 #ifndef raw_cpu_and_2
237 #define raw_cpu_and_2(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
238 #endif
239 #ifndef raw_cpu_and_4
240 #define raw_cpu_and_4(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
241 #endif
242 #ifndef raw_cpu_and_8
243 #define raw_cpu_and_8(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
244 #endif
245 
246 #ifndef raw_cpu_or_1
247 #define raw_cpu_or_1(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
248 #endif
249 #ifndef raw_cpu_or_2
250 #define raw_cpu_or_2(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
251 #endif
252 #ifndef raw_cpu_or_4
253 #define raw_cpu_or_4(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
254 #endif
255 #ifndef raw_cpu_or_8
256 #define raw_cpu_or_8(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
257 #endif
258 
259 #ifndef raw_cpu_add_return_1
260 #define raw_cpu_add_return_1(pcp, val)	raw_cpu_generic_add_return(pcp, val)
261 #endif
262 #ifndef raw_cpu_add_return_2
263 #define raw_cpu_add_return_2(pcp, val)	raw_cpu_generic_add_return(pcp, val)
264 #endif
265 #ifndef raw_cpu_add_return_4
266 #define raw_cpu_add_return_4(pcp, val)	raw_cpu_generic_add_return(pcp, val)
267 #endif
268 #ifndef raw_cpu_add_return_8
269 #define raw_cpu_add_return_8(pcp, val)	raw_cpu_generic_add_return(pcp, val)
270 #endif
271 
272 #ifndef raw_cpu_xchg_1
273 #define raw_cpu_xchg_1(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
274 #endif
275 #ifndef raw_cpu_xchg_2
276 #define raw_cpu_xchg_2(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
277 #endif
278 #ifndef raw_cpu_xchg_4
279 #define raw_cpu_xchg_4(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
280 #endif
281 #ifndef raw_cpu_xchg_8
282 #define raw_cpu_xchg_8(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
283 #endif
284 
285 #ifndef raw_cpu_cmpxchg_1
286 #define raw_cpu_cmpxchg_1(pcp, oval, nval) \
287 	raw_cpu_generic_cmpxchg(pcp, oval, nval)
288 #endif
289 #ifndef raw_cpu_cmpxchg_2
290 #define raw_cpu_cmpxchg_2(pcp, oval, nval) \
291 	raw_cpu_generic_cmpxchg(pcp, oval, nval)
292 #endif
293 #ifndef raw_cpu_cmpxchg_4
294 #define raw_cpu_cmpxchg_4(pcp, oval, nval) \
295 	raw_cpu_generic_cmpxchg(pcp, oval, nval)
296 #endif
297 #ifndef raw_cpu_cmpxchg_8
298 #define raw_cpu_cmpxchg_8(pcp, oval, nval) \
299 	raw_cpu_generic_cmpxchg(pcp, oval, nval)
300 #endif
301 
302 #ifndef raw_cpu_cmpxchg_double_1
303 #define raw_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
304 	raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
305 #endif
306 #ifndef raw_cpu_cmpxchg_double_2
307 #define raw_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
308 	raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
309 #endif
310 #ifndef raw_cpu_cmpxchg_double_4
311 #define raw_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
312 	raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
313 #endif
314 #ifndef raw_cpu_cmpxchg_double_8
315 #define raw_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
316 	raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
317 #endif
318 
319 #ifndef this_cpu_read_1
320 #define this_cpu_read_1(pcp)		this_cpu_generic_read(pcp)
321 #endif
322 #ifndef this_cpu_read_2
323 #define this_cpu_read_2(pcp)		this_cpu_generic_read(pcp)
324 #endif
325 #ifndef this_cpu_read_4
326 #define this_cpu_read_4(pcp)		this_cpu_generic_read(pcp)
327 #endif
328 #ifndef this_cpu_read_8
329 #define this_cpu_read_8(pcp)		this_cpu_generic_read(pcp)
330 #endif
331 
332 #ifndef this_cpu_write_1
333 #define this_cpu_write_1(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
334 #endif
335 #ifndef this_cpu_write_2
336 #define this_cpu_write_2(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
337 #endif
338 #ifndef this_cpu_write_4
339 #define this_cpu_write_4(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
340 #endif
341 #ifndef this_cpu_write_8
342 #define this_cpu_write_8(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
343 #endif
344 
345 #ifndef this_cpu_add_1
346 #define this_cpu_add_1(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
347 #endif
348 #ifndef this_cpu_add_2
349 #define this_cpu_add_2(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
350 #endif
351 #ifndef this_cpu_add_4
352 #define this_cpu_add_4(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
353 #endif
354 #ifndef this_cpu_add_8
355 #define this_cpu_add_8(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
356 #endif
357 
358 #ifndef this_cpu_and_1
359 #define this_cpu_and_1(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
360 #endif
361 #ifndef this_cpu_and_2
362 #define this_cpu_and_2(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
363 #endif
364 #ifndef this_cpu_and_4
365 #define this_cpu_and_4(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
366 #endif
367 #ifndef this_cpu_and_8
368 #define this_cpu_and_8(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
369 #endif
370 
371 #ifndef this_cpu_or_1
372 #define this_cpu_or_1(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
373 #endif
374 #ifndef this_cpu_or_2
375 #define this_cpu_or_2(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
376 #endif
377 #ifndef this_cpu_or_4
378 #define this_cpu_or_4(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
379 #endif
380 #ifndef this_cpu_or_8
381 #define this_cpu_or_8(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
382 #endif
383 
384 #ifndef this_cpu_add_return_1
385 #define this_cpu_add_return_1(pcp, val)	this_cpu_generic_add_return(pcp, val)
386 #endif
387 #ifndef this_cpu_add_return_2
388 #define this_cpu_add_return_2(pcp, val)	this_cpu_generic_add_return(pcp, val)
389 #endif
390 #ifndef this_cpu_add_return_4
391 #define this_cpu_add_return_4(pcp, val)	this_cpu_generic_add_return(pcp, val)
392 #endif
393 #ifndef this_cpu_add_return_8
394 #define this_cpu_add_return_8(pcp, val)	this_cpu_generic_add_return(pcp, val)
395 #endif
396 
397 #ifndef this_cpu_xchg_1
398 #define this_cpu_xchg_1(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
399 #endif
400 #ifndef this_cpu_xchg_2
401 #define this_cpu_xchg_2(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
402 #endif
403 #ifndef this_cpu_xchg_4
404 #define this_cpu_xchg_4(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
405 #endif
406 #ifndef this_cpu_xchg_8
407 #define this_cpu_xchg_8(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
408 #endif
409 
410 #ifndef this_cpu_cmpxchg_1
411 #define this_cpu_cmpxchg_1(pcp, oval, nval) \
412 	this_cpu_generic_cmpxchg(pcp, oval, nval)
413 #endif
414 #ifndef this_cpu_cmpxchg_2
415 #define this_cpu_cmpxchg_2(pcp, oval, nval) \
416 	this_cpu_generic_cmpxchg(pcp, oval, nval)
417 #endif
418 #ifndef this_cpu_cmpxchg_4
419 #define this_cpu_cmpxchg_4(pcp, oval, nval) \
420 	this_cpu_generic_cmpxchg(pcp, oval, nval)
421 #endif
422 #ifndef this_cpu_cmpxchg_8
423 #define this_cpu_cmpxchg_8(pcp, oval, nval) \
424 	this_cpu_generic_cmpxchg(pcp, oval, nval)
425 #endif
426 
427 #ifndef this_cpu_cmpxchg_double_1
428 #define this_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
429 	this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
430 #endif
431 #ifndef this_cpu_cmpxchg_double_2
432 #define this_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
433 	this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
434 #endif
435 #ifndef this_cpu_cmpxchg_double_4
436 #define this_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
437 	this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
438 #endif
439 #ifndef this_cpu_cmpxchg_double_8
440 #define this_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
441 	this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
442 #endif
443 
444 #endif /* _ASM_GENERIC_PERCPU_H_ */
445