xref: /openbmc/linux/arch/s390/include/asm/percpu.h (revision 2612e3bbc0386368a850140a6c9b990cd496a5ec)
1b2441318SGreg Kroah-Hartman /* SPDX-License-Identifier: GPL-2.0 */
2c6557e7fSMartin Schwidefsky #ifndef __ARCH_S390_PERCPU__
3c6557e7fSMartin Schwidefsky #define __ARCH_S390_PERCPU__
4c6557e7fSMartin Schwidefsky 
54c2241fdSHeiko Carstens #include <linux/preempt.h>
64c2241fdSHeiko Carstens #include <asm/cmpxchg.h>
74c2241fdSHeiko Carstens 
8c6557e7fSMartin Schwidefsky /*
9c6557e7fSMartin Schwidefsky  * s390 uses its own implementation for per cpu data, the offset of
10c6557e7fSMartin Schwidefsky  * the cpu local data area is cached in the cpu's lowcore memory.
11c6557e7fSMartin Schwidefsky  */
12c6557e7fSMartin Schwidefsky #define __my_cpu_offset S390_lowcore.percpu_offset
13c6557e7fSMartin Schwidefsky 
149a0ef292STejun Heo /*
159a0ef292STejun Heo  * For 64 bit module code, the module may be more than 4G above the
169a0ef292STejun Heo  * per cpu area, use weak definitions to force the compiler to
179a0ef292STejun Heo  * generate external references.
189a0ef292STejun Heo  */
1967626fadSHeiko Carstens #if defined(MODULE)
209a0ef292STejun Heo #define ARCH_NEEDS_WEAK_PER_CPU
219a0ef292STejun Heo #endif
229a0ef292STejun Heo 
23f84cd97eSHeiko Carstens /*
24f84cd97eSHeiko Carstens  * We use a compare-and-swap loop since that uses less cpu cycles than
25f84cd97eSHeiko Carstens  * disabling and enabling interrupts like the generic variant would do.
26f84cd97eSHeiko Carstens  */
27f84cd97eSHeiko Carstens #define arch_this_cpu_to_op_simple(pcp, val, op)			\
28ba6f5c2aSHeiko Carstens ({									\
294c2241fdSHeiko Carstens 	typedef typeof(pcp) pcp_op_T__;					\
304c2241fdSHeiko Carstens 	pcp_op_T__ old__, new__, prev__;				\
314c2241fdSHeiko Carstens 	pcp_op_T__ *ptr__;						\
321196f12aSSven Schnelle 	preempt_disable_notrace();					\
33eb7e7d76SChristoph Lameter 	ptr__ = raw_cpu_ptr(&(pcp));					\
34e3f360dbSHeiko Carstens 	prev__ = READ_ONCE(*ptr__);					\
354c2241fdSHeiko Carstens 	do {								\
364c2241fdSHeiko Carstens 		old__ = prev__;						\
374c2241fdSHeiko Carstens 		new__ = old__ op (val);					\
384c2241fdSHeiko Carstens 		prev__ = cmpxchg(ptr__, old__, new__);			\
394c2241fdSHeiko Carstens 	} while (prev__ != old__);					\
401196f12aSSven Schnelle 	preempt_enable_notrace();					\
41ba6f5c2aSHeiko Carstens 	new__;								\
42ba6f5c2aSHeiko Carstens })
434c2241fdSHeiko Carstens 
44f84cd97eSHeiko Carstens #define this_cpu_add_1(pcp, val)	arch_this_cpu_to_op_simple(pcp, val, +)
45f84cd97eSHeiko Carstens #define this_cpu_add_2(pcp, val)	arch_this_cpu_to_op_simple(pcp, val, +)
46f84cd97eSHeiko Carstens #define this_cpu_add_return_1(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
47f84cd97eSHeiko Carstens #define this_cpu_add_return_2(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
48f84cd97eSHeiko Carstens #define this_cpu_and_1(pcp, val)	arch_this_cpu_to_op_simple(pcp, val, &)
49f84cd97eSHeiko Carstens #define this_cpu_and_2(pcp, val)	arch_this_cpu_to_op_simple(pcp, val, &)
50f84cd97eSHeiko Carstens #define this_cpu_or_1(pcp, val)		arch_this_cpu_to_op_simple(pcp, val, |)
51f84cd97eSHeiko Carstens #define this_cpu_or_2(pcp, val)		arch_this_cpu_to_op_simple(pcp, val, |)
524c2241fdSHeiko Carstens 
53f84cd97eSHeiko Carstens #ifndef CONFIG_HAVE_MARCH_Z196_FEATURES
54ba6f5c2aSHeiko Carstens 
55f84cd97eSHeiko Carstens #define this_cpu_add_4(pcp, val)	arch_this_cpu_to_op_simple(pcp, val, +)
56f84cd97eSHeiko Carstens #define this_cpu_add_8(pcp, val)	arch_this_cpu_to_op_simple(pcp, val, +)
57f84cd97eSHeiko Carstens #define this_cpu_add_return_4(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
58f84cd97eSHeiko Carstens #define this_cpu_add_return_8(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
59f84cd97eSHeiko Carstens #define this_cpu_and_4(pcp, val)	arch_this_cpu_to_op_simple(pcp, val, &)
60f84cd97eSHeiko Carstens #define this_cpu_and_8(pcp, val)	arch_this_cpu_to_op_simple(pcp, val, &)
61f84cd97eSHeiko Carstens #define this_cpu_or_4(pcp, val)		arch_this_cpu_to_op_simple(pcp, val, |)
62f84cd97eSHeiko Carstens #define this_cpu_or_8(pcp, val)		arch_this_cpu_to_op_simple(pcp, val, |)
634c2241fdSHeiko Carstens 
64f84cd97eSHeiko Carstens #else /* CONFIG_HAVE_MARCH_Z196_FEATURES */
654c2241fdSHeiko Carstens 
66f84cd97eSHeiko Carstens #define arch_this_cpu_add(pcp, val, op1, op2, szcast)			\
67f84cd97eSHeiko Carstens {									\
68f84cd97eSHeiko Carstens 	typedef typeof(pcp) pcp_op_T__; 				\
69f84cd97eSHeiko Carstens 	pcp_op_T__ val__ = (val);					\
70f84cd97eSHeiko Carstens 	pcp_op_T__ old__, *ptr__;					\
711196f12aSSven Schnelle 	preempt_disable_notrace();					\
72eb7e7d76SChristoph Lameter 	ptr__ = raw_cpu_ptr(&(pcp)); 				\
73f84cd97eSHeiko Carstens 	if (__builtin_constant_p(val__) &&				\
74f84cd97eSHeiko Carstens 	    ((szcast)val__ > -129) && ((szcast)val__ < 128)) {		\
75f84cd97eSHeiko Carstens 		asm volatile(						\
76f84cd97eSHeiko Carstens 			op2 "   %[ptr__],%[val__]\n"			\
77f84cd97eSHeiko Carstens 			: [ptr__] "+Q" (*ptr__) 			\
78f84cd97eSHeiko Carstens 			: [val__] "i" ((szcast)val__)			\
79f84cd97eSHeiko Carstens 			: "cc");					\
80f84cd97eSHeiko Carstens 	} else {							\
81f84cd97eSHeiko Carstens 		asm volatile(						\
82f84cd97eSHeiko Carstens 			op1 "   %[old__],%[val__],%[ptr__]\n"		\
83f84cd97eSHeiko Carstens 			: [old__] "=d" (old__), [ptr__] "+Q" (*ptr__)	\
84f84cd97eSHeiko Carstens 			: [val__] "d" (val__)				\
85f84cd97eSHeiko Carstens 			: "cc");					\
86f84cd97eSHeiko Carstens 	}								\
871196f12aSSven Schnelle 	preempt_enable_notrace();					\
88f84cd97eSHeiko Carstens }
89f84cd97eSHeiko Carstens 
90f84cd97eSHeiko Carstens #define this_cpu_add_4(pcp, val) arch_this_cpu_add(pcp, val, "laa", "asi", int)
91f84cd97eSHeiko Carstens #define this_cpu_add_8(pcp, val) arch_this_cpu_add(pcp, val, "laag", "agsi", long)
92f84cd97eSHeiko Carstens 
93f84cd97eSHeiko Carstens #define arch_this_cpu_add_return(pcp, val, op)				\
94f84cd97eSHeiko Carstens ({									\
95f84cd97eSHeiko Carstens 	typedef typeof(pcp) pcp_op_T__; 				\
96f84cd97eSHeiko Carstens 	pcp_op_T__ val__ = (val);					\
97f84cd97eSHeiko Carstens 	pcp_op_T__ old__, *ptr__;					\
981196f12aSSven Schnelle 	preempt_disable_notrace();					\
99eb7e7d76SChristoph Lameter 	ptr__ = raw_cpu_ptr(&(pcp));	 				\
100f84cd97eSHeiko Carstens 	asm volatile(							\
101f84cd97eSHeiko Carstens 		op "    %[old__],%[val__],%[ptr__]\n"			\
102f84cd97eSHeiko Carstens 		: [old__] "=d" (old__), [ptr__] "+Q" (*ptr__)		\
103f84cd97eSHeiko Carstens 		: [val__] "d" (val__)					\
104f84cd97eSHeiko Carstens 		: "cc");						\
1051196f12aSSven Schnelle 	preempt_enable_notrace();						\
106f84cd97eSHeiko Carstens 	old__ + val__;							\
107f84cd97eSHeiko Carstens })
108f84cd97eSHeiko Carstens 
109f84cd97eSHeiko Carstens #define this_cpu_add_return_4(pcp, val) arch_this_cpu_add_return(pcp, val, "laa")
110f84cd97eSHeiko Carstens #define this_cpu_add_return_8(pcp, val) arch_this_cpu_add_return(pcp, val, "laag")
111f84cd97eSHeiko Carstens 
112f84cd97eSHeiko Carstens #define arch_this_cpu_to_op(pcp, val, op)				\
113f84cd97eSHeiko Carstens {									\
114f84cd97eSHeiko Carstens 	typedef typeof(pcp) pcp_op_T__; 				\
115f84cd97eSHeiko Carstens 	pcp_op_T__ val__ = (val);					\
116f84cd97eSHeiko Carstens 	pcp_op_T__ old__, *ptr__;					\
1171196f12aSSven Schnelle 	preempt_disable_notrace();					\
118eb7e7d76SChristoph Lameter 	ptr__ = raw_cpu_ptr(&(pcp));	 				\
119f84cd97eSHeiko Carstens 	asm volatile(							\
120f84cd97eSHeiko Carstens 		op "    %[old__],%[val__],%[ptr__]\n"			\
121f84cd97eSHeiko Carstens 		: [old__] "=d" (old__), [ptr__] "+Q" (*ptr__)		\
122f84cd97eSHeiko Carstens 		: [val__] "d" (val__)					\
123f84cd97eSHeiko Carstens 		: "cc");						\
1241196f12aSSven Schnelle 	preempt_enable_notrace();					\
125f84cd97eSHeiko Carstens }
126f84cd97eSHeiko Carstens 
127f84cd97eSHeiko Carstens #define this_cpu_and_4(pcp, val)	arch_this_cpu_to_op(pcp, val, "lan")
128f84cd97eSHeiko Carstens #define this_cpu_and_8(pcp, val)	arch_this_cpu_to_op(pcp, val, "lang")
129f84cd97eSHeiko Carstens #define this_cpu_or_4(pcp, val)		arch_this_cpu_to_op(pcp, val, "lao")
130f84cd97eSHeiko Carstens #define this_cpu_or_8(pcp, val)		arch_this_cpu_to_op(pcp, val, "laog")
131f84cd97eSHeiko Carstens 
132f84cd97eSHeiko Carstens #endif /* CONFIG_HAVE_MARCH_Z196_FEATURES */
1334c2241fdSHeiko Carstens 
134933393f5SChristoph Lameter #define arch_this_cpu_cmpxchg(pcp, oval, nval)				\
1354c2241fdSHeiko Carstens ({									\
1364c2241fdSHeiko Carstens 	typedef typeof(pcp) pcp_op_T__;					\
1374c2241fdSHeiko Carstens 	pcp_op_T__ ret__;						\
1384c2241fdSHeiko Carstens 	pcp_op_T__ *ptr__;						\
1391196f12aSSven Schnelle 	preempt_disable_notrace();					\
140eb7e7d76SChristoph Lameter 	ptr__ = raw_cpu_ptr(&(pcp));					\
1414c2241fdSHeiko Carstens 	ret__ = cmpxchg(ptr__, oval, nval);				\
1421196f12aSSven Schnelle 	preempt_enable_notrace();					\
1434c2241fdSHeiko Carstens 	ret__;								\
1444c2241fdSHeiko Carstens })
1454c2241fdSHeiko Carstens 
146933393f5SChristoph Lameter #define this_cpu_cmpxchg_1(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval)
147933393f5SChristoph Lameter #define this_cpu_cmpxchg_2(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval)
148933393f5SChristoph Lameter #define this_cpu_cmpxchg_4(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval)
149933393f5SChristoph Lameter #define this_cpu_cmpxchg_8(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval)
1504c2241fdSHeiko Carstens 
151*6d12c8d3SPeter Zijlstra #define this_cpu_cmpxchg64(pcp, o, n)	this_cpu_cmpxchg_8(pcp, o, n)
152*6d12c8d3SPeter Zijlstra 
153*6d12c8d3SPeter Zijlstra #define this_cpu_cmpxchg128(pcp, oval, nval)				\
154*6d12c8d3SPeter Zijlstra ({									\
155*6d12c8d3SPeter Zijlstra 	typedef typeof(pcp) pcp_op_T__;					\
156*6d12c8d3SPeter Zijlstra 	u128 old__, new__, ret__;					\
157*6d12c8d3SPeter Zijlstra 	pcp_op_T__ *ptr__;						\
158*6d12c8d3SPeter Zijlstra 	old__ = oval;							\
159*6d12c8d3SPeter Zijlstra 	new__ = nval;							\
160*6d12c8d3SPeter Zijlstra 	preempt_disable_notrace();					\
161*6d12c8d3SPeter Zijlstra 	ptr__ = raw_cpu_ptr(&(pcp));					\
162*6d12c8d3SPeter Zijlstra 	ret__ = cmpxchg128((void *)ptr__, old__, new__);		\
163*6d12c8d3SPeter Zijlstra 	preempt_enable_notrace();					\
164*6d12c8d3SPeter Zijlstra 	ret__;								\
165*6d12c8d3SPeter Zijlstra })
166*6d12c8d3SPeter Zijlstra 
16728634a07SHeiko Carstens #define arch_this_cpu_xchg(pcp, nval)					\
16828634a07SHeiko Carstens ({									\
16928634a07SHeiko Carstens 	typeof(pcp) *ptr__;						\
17028634a07SHeiko Carstens 	typeof(pcp) ret__;						\
1711196f12aSSven Schnelle 	preempt_disable_notrace();					\
172eb7e7d76SChristoph Lameter 	ptr__ = raw_cpu_ptr(&(pcp));					\
17328634a07SHeiko Carstens 	ret__ = xchg(ptr__, nval);					\
1741196f12aSSven Schnelle 	preempt_enable_notrace();					\
17528634a07SHeiko Carstens 	ret__;								\
17628634a07SHeiko Carstens })
17728634a07SHeiko Carstens 
17828634a07SHeiko Carstens #define this_cpu_xchg_1(pcp, nval) arch_this_cpu_xchg(pcp, nval)
17928634a07SHeiko Carstens #define this_cpu_xchg_2(pcp, nval) arch_this_cpu_xchg(pcp, nval)
18028634a07SHeiko Carstens #define this_cpu_xchg_4(pcp, nval) arch_this_cpu_xchg(pcp, nval)
18128634a07SHeiko Carstens #define this_cpu_xchg_8(pcp, nval) arch_this_cpu_xchg(pcp, nval)
182b1d6b40cSHeiko Carstens 
183c6557e7fSMartin Schwidefsky #include <asm-generic/percpu.h>
184c6557e7fSMartin Schwidefsky 
185c6557e7fSMartin Schwidefsky #endif /* __ARCH_S390_PERCPU__ */
186