xref: /openbmc/linux/include/asm-generic/percpu.h (revision 5d7800d9)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_GENERIC_PERCPU_H_
3 #define _ASM_GENERIC_PERCPU_H_
4 
5 #include <linux/compiler.h>
6 #include <linux/threads.h>
7 #include <linux/percpu-defs.h>
8 
9 #ifdef CONFIG_SMP
10 
11 /*
12  * per_cpu_offset() is the offset that has to be added to a
13  * percpu variable to get to the instance for a certain processor.
14  *
15  * Most arches use the __per_cpu_offset array for those offsets but
16  * some arches have their own ways of determining the offset (x86_64, s390).
17  */
18 #ifndef __per_cpu_offset
19 extern unsigned long __per_cpu_offset[NR_CPUS];
20 
21 #define per_cpu_offset(x) (__per_cpu_offset[x])
22 #endif
23 
24 /*
25  * Determine the offset for the currently active processor.
26  * An arch may define __my_cpu_offset to provide a more effective
27  * means of obtaining the offset to the per cpu variables of the
28  * current processor.
29  */
30 #ifndef __my_cpu_offset
31 #define __my_cpu_offset per_cpu_offset(raw_smp_processor_id())
32 #endif
33 #ifdef CONFIG_DEBUG_PREEMPT
34 #define my_cpu_offset per_cpu_offset(smp_processor_id())
35 #else
36 #define my_cpu_offset __my_cpu_offset
37 #endif
38 
39 /*
40  * Arch may define arch_raw_cpu_ptr() to provide more efficient address
41  * translations for raw_cpu_ptr().
42  */
43 #ifndef arch_raw_cpu_ptr
44 #define arch_raw_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset)
45 #endif
46 
47 #ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA
48 extern void setup_per_cpu_areas(void);
49 #endif
50 
51 #endif	/* SMP */
52 
53 #ifndef PER_CPU_BASE_SECTION
54 #ifdef CONFIG_SMP
55 #define PER_CPU_BASE_SECTION ".data..percpu"
56 #else
57 #define PER_CPU_BASE_SECTION ".data"
58 #endif
59 #endif
60 
61 #ifndef PER_CPU_ATTRIBUTES
62 #define PER_CPU_ATTRIBUTES
63 #endif
64 
65 #define raw_cpu_generic_read(pcp)					\
66 ({									\
67 	*raw_cpu_ptr(&(pcp));						\
68 })
69 
70 #define raw_cpu_generic_to_op(pcp, val, op)				\
71 do {									\
72 	*raw_cpu_ptr(&(pcp)) op val;					\
73 } while (0)
74 
75 #define raw_cpu_generic_add_return(pcp, val)				\
76 ({									\
77 	typeof(pcp) *__p = raw_cpu_ptr(&(pcp));				\
78 									\
79 	*__p += val;							\
80 	*__p;								\
81 })
82 
83 #define raw_cpu_generic_xchg(pcp, nval)					\
84 ({									\
85 	typeof(pcp) *__p = raw_cpu_ptr(&(pcp));				\
86 	typeof(pcp) __ret;						\
87 	__ret = *__p;							\
88 	*__p = nval;							\
89 	__ret;								\
90 })
91 
92 #define __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, _cmpxchg)		\
93 ({									\
94 	typeof(pcp) __val, __old = *(ovalp);				\
95 	__val = _cmpxchg(pcp, __old, nval);				\
96 	if (__val != __old)						\
97 		*(ovalp) = __val;					\
98 	__val == __old;							\
99 })
100 
101 #define raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)			\
102 ({									\
103 	typeof(pcp) *__p = raw_cpu_ptr(&(pcp));				\
104 	typeof(pcp) __val = *__p, ___old = *(ovalp);			\
105 	bool __ret;							\
106 	if (__val == ___old) {						\
107 		*__p = nval;						\
108 		__ret = true;						\
109 	} else {							\
110 		*(ovalp) = __val;					\
111 		__ret = false;						\
112 	}								\
113 	__ret;								\
114 })
115 
116 #define raw_cpu_generic_cmpxchg(pcp, oval, nval)			\
117 ({									\
118 	typeof(pcp) __old = (oval);					\
119 	raw_cpu_generic_try_cmpxchg(pcp, &__old, nval);			\
120 	__old;								\
121 })
122 
123 #define __this_cpu_generic_read_nopreempt(pcp)				\
124 ({									\
125 	typeof(pcp) ___ret;						\
126 	preempt_disable_notrace();					\
127 	___ret = READ_ONCE(*raw_cpu_ptr(&(pcp)));			\
128 	preempt_enable_notrace();					\
129 	___ret;								\
130 })
131 
132 #define __this_cpu_generic_read_noirq(pcp)				\
133 ({									\
134 	typeof(pcp) ___ret;						\
135 	unsigned long ___flags;						\
136 	raw_local_irq_save(___flags);					\
137 	___ret = raw_cpu_generic_read(pcp);				\
138 	raw_local_irq_restore(___flags);				\
139 	___ret;								\
140 })
141 
142 #define this_cpu_generic_read(pcp)					\
143 ({									\
144 	typeof(pcp) __ret;						\
145 	if (__native_word(pcp))						\
146 		__ret = __this_cpu_generic_read_nopreempt(pcp);		\
147 	else								\
148 		__ret = __this_cpu_generic_read_noirq(pcp);		\
149 	__ret;								\
150 })
151 
152 #define this_cpu_generic_to_op(pcp, val, op)				\
153 do {									\
154 	unsigned long __flags;						\
155 	raw_local_irq_save(__flags);					\
156 	raw_cpu_generic_to_op(pcp, val, op);				\
157 	raw_local_irq_restore(__flags);					\
158 } while (0)
159 
160 
161 #define this_cpu_generic_add_return(pcp, val)				\
162 ({									\
163 	typeof(pcp) __ret;						\
164 	unsigned long __flags;						\
165 	raw_local_irq_save(__flags);					\
166 	__ret = raw_cpu_generic_add_return(pcp, val);			\
167 	raw_local_irq_restore(__flags);					\
168 	__ret;								\
169 })
170 
171 #define this_cpu_generic_xchg(pcp, nval)				\
172 ({									\
173 	typeof(pcp) __ret;						\
174 	unsigned long __flags;						\
175 	raw_local_irq_save(__flags);					\
176 	__ret = raw_cpu_generic_xchg(pcp, nval);			\
177 	raw_local_irq_restore(__flags);					\
178 	__ret;								\
179 })
180 
181 #define this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)			\
182 ({									\
183 	bool __ret;							\
184 	unsigned long __flags;						\
185 	raw_local_irq_save(__flags);					\
186 	__ret = raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval);		\
187 	raw_local_irq_restore(__flags);					\
188 	__ret;								\
189 })
190 
191 #define this_cpu_generic_cmpxchg(pcp, oval, nval)			\
192 ({									\
193 	typeof(pcp) __ret;						\
194 	unsigned long __flags;						\
195 	raw_local_irq_save(__flags);					\
196 	__ret = raw_cpu_generic_cmpxchg(pcp, oval, nval);		\
197 	raw_local_irq_restore(__flags);					\
198 	__ret;								\
199 })
200 
201 #ifndef raw_cpu_read_1
202 #define raw_cpu_read_1(pcp)		raw_cpu_generic_read(pcp)
203 #endif
204 #ifndef raw_cpu_read_2
205 #define raw_cpu_read_2(pcp)		raw_cpu_generic_read(pcp)
206 #endif
207 #ifndef raw_cpu_read_4
208 #define raw_cpu_read_4(pcp)		raw_cpu_generic_read(pcp)
209 #endif
210 #ifndef raw_cpu_read_8
211 #define raw_cpu_read_8(pcp)		raw_cpu_generic_read(pcp)
212 #endif
213 
214 #ifndef raw_cpu_write_1
215 #define raw_cpu_write_1(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
216 #endif
217 #ifndef raw_cpu_write_2
218 #define raw_cpu_write_2(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
219 #endif
220 #ifndef raw_cpu_write_4
221 #define raw_cpu_write_4(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
222 #endif
223 #ifndef raw_cpu_write_8
224 #define raw_cpu_write_8(pcp, val)	raw_cpu_generic_to_op(pcp, val, =)
225 #endif
226 
227 #ifndef raw_cpu_add_1
228 #define raw_cpu_add_1(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
229 #endif
230 #ifndef raw_cpu_add_2
231 #define raw_cpu_add_2(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
232 #endif
233 #ifndef raw_cpu_add_4
234 #define raw_cpu_add_4(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
235 #endif
236 #ifndef raw_cpu_add_8
237 #define raw_cpu_add_8(pcp, val)		raw_cpu_generic_to_op(pcp, val, +=)
238 #endif
239 
240 #ifndef raw_cpu_and_1
241 #define raw_cpu_and_1(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
242 #endif
243 #ifndef raw_cpu_and_2
244 #define raw_cpu_and_2(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
245 #endif
246 #ifndef raw_cpu_and_4
247 #define raw_cpu_and_4(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
248 #endif
249 #ifndef raw_cpu_and_8
250 #define raw_cpu_and_8(pcp, val)		raw_cpu_generic_to_op(pcp, val, &=)
251 #endif
252 
253 #ifndef raw_cpu_or_1
254 #define raw_cpu_or_1(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
255 #endif
256 #ifndef raw_cpu_or_2
257 #define raw_cpu_or_2(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
258 #endif
259 #ifndef raw_cpu_or_4
260 #define raw_cpu_or_4(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
261 #endif
262 #ifndef raw_cpu_or_8
263 #define raw_cpu_or_8(pcp, val)		raw_cpu_generic_to_op(pcp, val, |=)
264 #endif
265 
266 #ifndef raw_cpu_add_return_1
267 #define raw_cpu_add_return_1(pcp, val)	raw_cpu_generic_add_return(pcp, val)
268 #endif
269 #ifndef raw_cpu_add_return_2
270 #define raw_cpu_add_return_2(pcp, val)	raw_cpu_generic_add_return(pcp, val)
271 #endif
272 #ifndef raw_cpu_add_return_4
273 #define raw_cpu_add_return_4(pcp, val)	raw_cpu_generic_add_return(pcp, val)
274 #endif
275 #ifndef raw_cpu_add_return_8
276 #define raw_cpu_add_return_8(pcp, val)	raw_cpu_generic_add_return(pcp, val)
277 #endif
278 
279 #ifndef raw_cpu_xchg_1
280 #define raw_cpu_xchg_1(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
281 #endif
282 #ifndef raw_cpu_xchg_2
283 #define raw_cpu_xchg_2(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
284 #endif
285 #ifndef raw_cpu_xchg_4
286 #define raw_cpu_xchg_4(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
287 #endif
288 #ifndef raw_cpu_xchg_8
289 #define raw_cpu_xchg_8(pcp, nval)	raw_cpu_generic_xchg(pcp, nval)
290 #endif
291 
292 #ifndef raw_cpu_try_cmpxchg_1
293 #ifdef raw_cpu_cmpxchg_1
294 #define raw_cpu_try_cmpxchg_1(pcp, ovalp, nval) \
295 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg_1)
296 #else
297 #define raw_cpu_try_cmpxchg_1(pcp, ovalp, nval) \
298 	raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
299 #endif
300 #endif
301 #ifndef raw_cpu_try_cmpxchg_2
302 #ifdef raw_cpu_cmpxchg_2
303 #define raw_cpu_try_cmpxchg_2(pcp, ovalp, nval) \
304 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg_2)
305 #else
306 #define raw_cpu_try_cmpxchg_2(pcp, ovalp, nval) \
307 	raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
308 #endif
309 #endif
310 #ifndef raw_cpu_try_cmpxchg_4
311 #ifdef raw_cpu_cmpxchg_4
312 #define raw_cpu_try_cmpxchg_4(pcp, ovalp, nval) \
313 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg_4)
314 #else
315 #define raw_cpu_try_cmpxchg_4(pcp, ovalp, nval) \
316 	raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
317 #endif
318 #endif
319 #ifndef raw_cpu_try_cmpxchg_8
320 #ifdef raw_cpu_cmpxchg_8
321 #define raw_cpu_try_cmpxchg_8(pcp, ovalp, nval) \
322 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg_8)
323 #else
324 #define raw_cpu_try_cmpxchg_8(pcp, ovalp, nval) \
325 	raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
326 #endif
327 #endif
328 
329 #ifndef raw_cpu_try_cmpxchg64
330 #ifdef raw_cpu_cmpxchg64
331 #define raw_cpu_try_cmpxchg64(pcp, ovalp, nval) \
332 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg64)
333 #else
334 #define raw_cpu_try_cmpxchg64(pcp, ovalp, nval) \
335 	raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
336 #endif
337 #endif
338 #ifndef raw_cpu_try_cmpxchg128
339 #ifdef raw_cpu_cmpxchg128
340 #define raw_cpu_try_cmpxchg128(pcp, ovalp, nval) \
341 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg128)
342 #else
343 #define raw_cpu_try_cmpxchg128(pcp, ovalp, nval) \
344 	raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
345 #endif
346 #endif
347 
348 #ifndef raw_cpu_cmpxchg_1
349 #define raw_cpu_cmpxchg_1(pcp, oval, nval) \
350 	raw_cpu_generic_cmpxchg(pcp, oval, nval)
351 #endif
352 #ifndef raw_cpu_cmpxchg_2
353 #define raw_cpu_cmpxchg_2(pcp, oval, nval) \
354 	raw_cpu_generic_cmpxchg(pcp, oval, nval)
355 #endif
356 #ifndef raw_cpu_cmpxchg_4
357 #define raw_cpu_cmpxchg_4(pcp, oval, nval) \
358 	raw_cpu_generic_cmpxchg(pcp, oval, nval)
359 #endif
360 #ifndef raw_cpu_cmpxchg_8
361 #define raw_cpu_cmpxchg_8(pcp, oval, nval) \
362 	raw_cpu_generic_cmpxchg(pcp, oval, nval)
363 #endif
364 
365 #ifndef raw_cpu_cmpxchg64
366 #define raw_cpu_cmpxchg64(pcp, oval, nval) \
367 	raw_cpu_generic_cmpxchg(pcp, oval, nval)
368 #endif
369 #ifndef raw_cpu_cmpxchg128
370 #define raw_cpu_cmpxchg128(pcp, oval, nval) \
371 	raw_cpu_generic_cmpxchg(pcp, oval, nval)
372 #endif
373 
374 #ifndef this_cpu_read_1
375 #define this_cpu_read_1(pcp)		this_cpu_generic_read(pcp)
376 #endif
377 #ifndef this_cpu_read_2
378 #define this_cpu_read_2(pcp)		this_cpu_generic_read(pcp)
379 #endif
380 #ifndef this_cpu_read_4
381 #define this_cpu_read_4(pcp)		this_cpu_generic_read(pcp)
382 #endif
383 #ifndef this_cpu_read_8
384 #define this_cpu_read_8(pcp)		this_cpu_generic_read(pcp)
385 #endif
386 
387 #ifndef this_cpu_write_1
388 #define this_cpu_write_1(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
389 #endif
390 #ifndef this_cpu_write_2
391 #define this_cpu_write_2(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
392 #endif
393 #ifndef this_cpu_write_4
394 #define this_cpu_write_4(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
395 #endif
396 #ifndef this_cpu_write_8
397 #define this_cpu_write_8(pcp, val)	this_cpu_generic_to_op(pcp, val, =)
398 #endif
399 
400 #ifndef this_cpu_add_1
401 #define this_cpu_add_1(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
402 #endif
403 #ifndef this_cpu_add_2
404 #define this_cpu_add_2(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
405 #endif
406 #ifndef this_cpu_add_4
407 #define this_cpu_add_4(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
408 #endif
409 #ifndef this_cpu_add_8
410 #define this_cpu_add_8(pcp, val)	this_cpu_generic_to_op(pcp, val, +=)
411 #endif
412 
413 #ifndef this_cpu_and_1
414 #define this_cpu_and_1(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
415 #endif
416 #ifndef this_cpu_and_2
417 #define this_cpu_and_2(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
418 #endif
419 #ifndef this_cpu_and_4
420 #define this_cpu_and_4(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
421 #endif
422 #ifndef this_cpu_and_8
423 #define this_cpu_and_8(pcp, val)	this_cpu_generic_to_op(pcp, val, &=)
424 #endif
425 
426 #ifndef this_cpu_or_1
427 #define this_cpu_or_1(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
428 #endif
429 #ifndef this_cpu_or_2
430 #define this_cpu_or_2(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
431 #endif
432 #ifndef this_cpu_or_4
433 #define this_cpu_or_4(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
434 #endif
435 #ifndef this_cpu_or_8
436 #define this_cpu_or_8(pcp, val)		this_cpu_generic_to_op(pcp, val, |=)
437 #endif
438 
439 #ifndef this_cpu_add_return_1
440 #define this_cpu_add_return_1(pcp, val)	this_cpu_generic_add_return(pcp, val)
441 #endif
442 #ifndef this_cpu_add_return_2
443 #define this_cpu_add_return_2(pcp, val)	this_cpu_generic_add_return(pcp, val)
444 #endif
445 #ifndef this_cpu_add_return_4
446 #define this_cpu_add_return_4(pcp, val)	this_cpu_generic_add_return(pcp, val)
447 #endif
448 #ifndef this_cpu_add_return_8
449 #define this_cpu_add_return_8(pcp, val)	this_cpu_generic_add_return(pcp, val)
450 #endif
451 
452 #ifndef this_cpu_xchg_1
453 #define this_cpu_xchg_1(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
454 #endif
455 #ifndef this_cpu_xchg_2
456 #define this_cpu_xchg_2(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
457 #endif
458 #ifndef this_cpu_xchg_4
459 #define this_cpu_xchg_4(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
460 #endif
461 #ifndef this_cpu_xchg_8
462 #define this_cpu_xchg_8(pcp, nval)	this_cpu_generic_xchg(pcp, nval)
463 #endif
464 
465 #ifndef this_cpu_try_cmpxchg_1
466 #ifdef this_cpu_cmpxchg_1
467 #define this_cpu_try_cmpxchg_1(pcp, ovalp, nval) \
468 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg_1)
469 #else
470 #define this_cpu_try_cmpxchg_1(pcp, ovalp, nval) \
471 	this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
472 #endif
473 #endif
474 #ifndef this_cpu_try_cmpxchg_2
475 #ifdef this_cpu_cmpxchg_2
476 #define this_cpu_try_cmpxchg_2(pcp, ovalp, nval) \
477 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg_2)
478 #else
479 #define this_cpu_try_cmpxchg_2(pcp, ovalp, nval) \
480 	this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
481 #endif
482 #endif
483 #ifndef this_cpu_try_cmpxchg_4
484 #ifdef this_cpu_cmpxchg_4
485 #define this_cpu_try_cmpxchg_4(pcp, ovalp, nval) \
486 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg_4)
487 #else
488 #define this_cpu_try_cmpxchg_4(pcp, ovalp, nval) \
489 	this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
490 #endif
491 #endif
492 #ifndef this_cpu_try_cmpxchg_8
493 #ifdef this_cpu_cmpxchg_8
494 #define this_cpu_try_cmpxchg_8(pcp, ovalp, nval) \
495 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg_8)
496 #else
497 #define this_cpu_try_cmpxchg_8(pcp, ovalp, nval) \
498 	this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
499 #endif
500 #endif
501 
502 #ifndef this_cpu_try_cmpxchg64
503 #ifdef this_cpu_cmpxchg64
504 #define this_cpu_try_cmpxchg64(pcp, ovalp, nval) \
505 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg64)
506 #else
507 #define this_cpu_try_cmpxchg64(pcp, ovalp, nval) \
508 	this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
509 #endif
510 #endif
511 #ifndef this_cpu_try_cmpxchg128
512 #ifdef this_cpu_cmpxchg128
513 #define this_cpu_try_cmpxchg128(pcp, ovalp, nval) \
514 	__cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg128)
515 #else
516 #define this_cpu_try_cmpxchg128(pcp, ovalp, nval) \
517 	this_cpu_generic_try_cmpxchg(pcp, ovalp, nval)
518 #endif
519 #endif
520 
521 #ifndef this_cpu_cmpxchg_1
522 #define this_cpu_cmpxchg_1(pcp, oval, nval) \
523 	this_cpu_generic_cmpxchg(pcp, oval, nval)
524 #endif
525 #ifndef this_cpu_cmpxchg_2
526 #define this_cpu_cmpxchg_2(pcp, oval, nval) \
527 	this_cpu_generic_cmpxchg(pcp, oval, nval)
528 #endif
529 #ifndef this_cpu_cmpxchg_4
530 #define this_cpu_cmpxchg_4(pcp, oval, nval) \
531 	this_cpu_generic_cmpxchg(pcp, oval, nval)
532 #endif
533 #ifndef this_cpu_cmpxchg_8
534 #define this_cpu_cmpxchg_8(pcp, oval, nval) \
535 	this_cpu_generic_cmpxchg(pcp, oval, nval)
536 #endif
537 
538 #ifndef this_cpu_cmpxchg64
539 #define this_cpu_cmpxchg64(pcp, oval, nval) \
540 	this_cpu_generic_cmpxchg(pcp, oval, nval)
541 #endif
542 #ifndef this_cpu_cmpxchg128
543 #define this_cpu_cmpxchg128(pcp, oval, nval) \
544 	this_cpu_generic_cmpxchg(pcp, oval, nval)
545 #endif
546 
547 #endif /* _ASM_GENERIC_PERCPU_H_ */
548