xref: /openbmc/linux/arch/arm64/include/asm/barrier.h (revision c005e2f62f8421b13b9a31adb9db7281f1a19e68)
1caab277bSThomas Gleixner /* SPDX-License-Identifier: GPL-2.0-only */
210b663aeSCatalin Marinas /*
310b663aeSCatalin Marinas  * Based on arch/arm/include/asm/barrier.h
410b663aeSCatalin Marinas  *
510b663aeSCatalin Marinas  * Copyright (C) 2012 ARM Ltd.
610b663aeSCatalin Marinas  */
710b663aeSCatalin Marinas #ifndef __ASM_BARRIER_H
810b663aeSCatalin Marinas #define __ASM_BARRIER_H
910b663aeSCatalin Marinas 
1010b663aeSCatalin Marinas #ifndef __ASSEMBLY__
1110b663aeSCatalin Marinas 
12131e135fSMark Rutland #include <linux/kasan-checks.h>
13131e135fSMark Rutland 
148bf0a804SMark Rutland #include <asm/alternative-macros.h>
158bf0a804SMark Rutland 
16f99a250cSWill Deacon #define __nops(n)	".rept	" #n "\nnop\n.endr\n"
17f99a250cSWill Deacon #define nops(n)		asm volatile(__nops(n))
18f99a250cSWill Deacon 
1910b663aeSCatalin Marinas #define sev()		asm volatile("sev" : : : "memory")
2010b663aeSCatalin Marinas #define wfe()		asm volatile("wfe" : : : "memory")
219eae5885SMarc Zyngier #define wfet(val)	asm volatile("msr s0_3_c1_c0_0, %0"	\
229eae5885SMarc Zyngier 				     : : "r" (val) : "memory")
2310b663aeSCatalin Marinas #define wfi()		asm volatile("wfi" : : : "memory")
249eae5885SMarc Zyngier #define wfit(val)	asm volatile("msr s0_3_c1_c0_1, %0"	\
259eae5885SMarc Zyngier 				     : : "r" (val) : "memory")
2610b663aeSCatalin Marinas 
2710b663aeSCatalin Marinas #define isb()		asm volatile("isb" : : : "memory")
28493e6874SWill Deacon #define dmb(opt)	asm volatile("dmb " #opt : : : "memory")
29493e6874SWill Deacon #define dsb(opt)	asm volatile("dsb " #opt : : : "memory")
3010b663aeSCatalin Marinas 
31a173c390SWill Deacon #define psb_csync()	asm volatile("hint #17" : : : "memory")
32fa82d0b4SSuzuki K Poulose #define __tsb_csync()	asm volatile("hint #18" : : : "memory")
33669474e7SWill Deacon #define csdb()		asm volatile("hint #20" : : : "memory")
34a173c390SWill Deacon 
35d5624bb2SXiongfeng Wang /*
36d5624bb2SXiongfeng Wang  * Data Gathering Hint:
37d5624bb2SXiongfeng Wang  * This instruction prevents merging memory accesses with Normal-NC or
38d5624bb2SXiongfeng Wang  * Device-GRE attributes before the hint instruction with any memory accesses
39d5624bb2SXiongfeng Wang  * appearing after the hint instruction.
40d5624bb2SXiongfeng Wang  */
41d5624bb2SXiongfeng Wang #define dgh()		asm volatile("hint #6" : : : "memory")
42d5624bb2SXiongfeng Wang 
43*1ae80faeSMark Rutland #define spec_bar()	asm volatile(ALTERNATIVE("dsb nsh\nisb\n",		\
44*1ae80faeSMark Rutland 						 SB_BARRIER_INSN"nop\n",	\
45*1ae80faeSMark Rutland 						 ARM64_HAS_SB))
46*1ae80faeSMark Rutland 
47f2266504SMarc Zyngier #ifdef CONFIG_ARM64_PSEUDO_NMI
48f2266504SMarc Zyngier #define pmr_sync()						\
49f2266504SMarc Zyngier 	do {							\
508bf0a804SMark Rutland 		asm volatile(					\
518bf0a804SMark Rutland 		ALTERNATIVE_CB("dsb sy",			\
528bf0a804SMark Rutland 			       ARM64_HAS_GIC_PRIO_RELAXED_SYNC,	\
538bf0a804SMark Rutland 			       alt_cb_patch_nops)		\
548bf0a804SMark Rutland 		);						\
55f2266504SMarc Zyngier 	} while(0)
56f2266504SMarc Zyngier #else
57f2266504SMarc Zyngier #define pmr_sync()	do {} while (0)
58f2266504SMarc Zyngier #endif
59f2266504SMarc Zyngier 
604d09caecSKefeng Wang #define __mb()		dsb(sy)
614d09caecSKefeng Wang #define __rmb()		dsb(ld)
624d09caecSKefeng Wang #define __wmb()		dsb(st)
6310b663aeSCatalin Marinas 
644d09caecSKefeng Wang #define __dma_mb()	dmb(osh)
654d09caecSKefeng Wang #define __dma_rmb()	dmb(oshld)
664d09caecSKefeng Wang #define __dma_wmb()	dmb(oshst)
671077fa36SAlexander Duyck 
68d5624bb2SXiongfeng Wang #define io_stop_wc()	dgh()
69fa82d0b4SSuzuki K Poulose 
70fa82d0b4SSuzuki K Poulose #define tsb_csync()								\
71fa82d0b4SSuzuki K Poulose 	do {									\
72fa82d0b4SSuzuki K Poulose 		/*								\
73fa82d0b4SSuzuki K Poulose 		 * CPUs affected by Arm Erratum 2054223 or 2067961 needs	\
74fa82d0b4SSuzuki K Poulose 		 * another TSB to ensure the trace is flushed. The barriers	\
75fa82d0b4SSuzuki K Poulose 		 * don't have to be strictly back to back, as long as the	\
76fa82d0b4SSuzuki K Poulose 		 * CPU is in trace prohibited state.				\
77fa82d0b4SSuzuki K Poulose 		 */								\
78fa82d0b4SSuzuki K Poulose 		if (cpus_have_final_cap(ARM64_WORKAROUND_TSB_FLUSH_FAILURE))	\
79fa82d0b4SSuzuki K Poulose 			__tsb_csync();						\
80fa82d0b4SSuzuki K Poulose 		__tsb_csync();							\
81fa82d0b4SSuzuki K Poulose 	} while (0)
82fa82d0b4SSuzuki K Poulose 
83022620eeSRobin Murphy /*
84022620eeSRobin Murphy  * Generate a mask for array_index__nospec() that is ~0UL when 0 <= idx < sz
85022620eeSRobin Murphy  * and 0 otherwise.
86022620eeSRobin Murphy  */
87022620eeSRobin Murphy #define array_index_mask_nospec array_index_mask_nospec
array_index_mask_nospec(unsigned long idx,unsigned long sz)88022620eeSRobin Murphy static inline unsigned long array_index_mask_nospec(unsigned long idx,
89022620eeSRobin Murphy 						    unsigned long sz)
90022620eeSRobin Murphy {
91022620eeSRobin Murphy 	unsigned long mask;
92022620eeSRobin Murphy 
93022620eeSRobin Murphy 	asm volatile(
94022620eeSRobin Murphy 	"	cmp	%1, %2\n"
95022620eeSRobin Murphy 	"	sbc	%0, xzr, xzr\n"
96022620eeSRobin Murphy 	: "=r" (mask)
97022620eeSRobin Murphy 	: "r" (idx), "Ir" (sz)
98022620eeSRobin Murphy 	: "cc");
99022620eeSRobin Murphy 
100022620eeSRobin Murphy 	csdb();
101022620eeSRobin Murphy 	return mask;
102022620eeSRobin Murphy }
103022620eeSRobin Murphy 
10477ec4625SWill Deacon /*
10577ec4625SWill Deacon  * Ensure that reads of the counter are treated the same as memory reads
10677ec4625SWill Deacon  * for the purposes of ordering by subsequent memory barriers.
10777ec4625SWill Deacon  *
10877ec4625SWill Deacon  * This insanity brought to you by speculative system register reads,
10977ec4625SWill Deacon  * out-of-order memory accesses, sequence locks and Thomas Gleixner.
11077ec4625SWill Deacon  *
11176d0fc5eSCatalin Marinas  * https://lore.kernel.org/r/alpine.DEB.2.21.1902081950260.1662@nanos.tec.linutronix.de/
11277ec4625SWill Deacon  */
11377ec4625SWill Deacon #define arch_counter_enforce_ordering(val) do {				\
11477ec4625SWill Deacon 	u64 tmp, _val = (val);						\
11577ec4625SWill Deacon 									\
11677ec4625SWill Deacon 	asm volatile(							\
11777ec4625SWill Deacon 	"	eor	%0, %1, %1\n"					\
11877ec4625SWill Deacon 	"	add	%0, sp, %0\n"					\
11977ec4625SWill Deacon 	"	ldr	xzr, [%0]"					\
12077ec4625SWill Deacon 	: "=r" (tmp) : "r" (_val));					\
12177ec4625SWill Deacon } while (0)
12277ec4625SWill Deacon 
123fd072df8SMichael S. Tsirkin #define __smp_mb()	dmb(ish)
124fd072df8SMichael S. Tsirkin #define __smp_rmb()	dmb(ishld)
125fd072df8SMichael S. Tsirkin #define __smp_wmb()	dmb(ishst)
12647933ad4SPeter Zijlstra 
127fd072df8SMichael S. Tsirkin #define __smp_store_release(p, v)					\
12847933ad4SPeter Zijlstra do {									\
129131e135fSMark Rutland 	typeof(p) __p = (p);						\
13010223c52SWill Deacon 	union { __unqual_scalar_typeof(*p) __val; char __c[1]; } __u =	\
13110223c52SWill Deacon 		{ .__val = (__force __unqual_scalar_typeof(*p)) (v) };	\
13247933ad4SPeter Zijlstra 	compiletime_assert_atomic_type(*p);				\
133131e135fSMark Rutland 	kasan_check_write(__p, sizeof(*p));				\
13447933ad4SPeter Zijlstra 	switch (sizeof(*p)) {						\
135878a84d5SAndre Przywara 	case 1:								\
136878a84d5SAndre Przywara 		asm volatile ("stlrb %w1, %0"				\
137131e135fSMark Rutland 				: "=Q" (*__p)				\
13839c8275dSMark Rutland 				: "rZ" (*(__u8 *)__u.__c)		\
139994870beSMark Rutland 				: "memory");				\
140878a84d5SAndre Przywara 		break;							\
141878a84d5SAndre Przywara 	case 2:								\
142878a84d5SAndre Przywara 		asm volatile ("stlrh %w1, %0"				\
143131e135fSMark Rutland 				: "=Q" (*__p)				\
14439c8275dSMark Rutland 				: "rZ" (*(__u16 *)__u.__c)		\
145994870beSMark Rutland 				: "memory");				\
146878a84d5SAndre Przywara 		break;							\
14747933ad4SPeter Zijlstra 	case 4:								\
14847933ad4SPeter Zijlstra 		asm volatile ("stlr %w1, %0"				\
149131e135fSMark Rutland 				: "=Q" (*__p)				\
15039c8275dSMark Rutland 				: "rZ" (*(__u32 *)__u.__c)		\
151994870beSMark Rutland 				: "memory");				\
15247933ad4SPeter Zijlstra 		break;							\
15347933ad4SPeter Zijlstra 	case 8:								\
15439c8275dSMark Rutland 		asm volatile ("stlr %x1, %0"				\
155131e135fSMark Rutland 				: "=Q" (*__p)				\
15639c8275dSMark Rutland 				: "rZ" (*(__u64 *)__u.__c)		\
157994870beSMark Rutland 				: "memory");				\
15847933ad4SPeter Zijlstra 		break;							\
15947933ad4SPeter Zijlstra 	}								\
16047933ad4SPeter Zijlstra } while (0)
16147933ad4SPeter Zijlstra 
162fd072df8SMichael S. Tsirkin #define __smp_load_acquire(p)						\
16347933ad4SPeter Zijlstra ({									\
16410223c52SWill Deacon 	union { __unqual_scalar_typeof(*p) __val; char __c[1]; } __u;	\
165131e135fSMark Rutland 	typeof(p) __p = (p);						\
16647933ad4SPeter Zijlstra 	compiletime_assert_atomic_type(*p);				\
167131e135fSMark Rutland 	kasan_check_read(__p, sizeof(*p));				\
16847933ad4SPeter Zijlstra 	switch (sizeof(*p)) {						\
169878a84d5SAndre Przywara 	case 1:								\
170878a84d5SAndre Przywara 		asm volatile ("ldarb %w0, %1"				\
171c139aa60SWill Deacon 			: "=r" (*(__u8 *)__u.__c)			\
172131e135fSMark Rutland 			: "Q" (*__p) : "memory");			\
173878a84d5SAndre Przywara 		break;							\
174878a84d5SAndre Przywara 	case 2:								\
175878a84d5SAndre Przywara 		asm volatile ("ldarh %w0, %1"				\
176c139aa60SWill Deacon 			: "=r" (*(__u16 *)__u.__c)			\
177131e135fSMark Rutland 			: "Q" (*__p) : "memory");			\
178878a84d5SAndre Przywara 		break;							\
17947933ad4SPeter Zijlstra 	case 4:								\
18047933ad4SPeter Zijlstra 		asm volatile ("ldar %w0, %1"				\
181c139aa60SWill Deacon 			: "=r" (*(__u32 *)__u.__c)			\
182131e135fSMark Rutland 			: "Q" (*__p) : "memory");			\
18347933ad4SPeter Zijlstra 		break;							\
18447933ad4SPeter Zijlstra 	case 8:								\
18547933ad4SPeter Zijlstra 		asm volatile ("ldar %0, %1"				\
186c139aa60SWill Deacon 			: "=r" (*(__u64 *)__u.__c)			\
187131e135fSMark Rutland 			: "Q" (*__p) : "memory");			\
18847933ad4SPeter Zijlstra 		break;							\
18947933ad4SPeter Zijlstra 	}								\
19010223c52SWill Deacon 	(typeof(*p))__u.__val;						\
19147933ad4SPeter Zijlstra })
19247933ad4SPeter Zijlstra 
193598865c5SWill Deacon #define smp_cond_load_relaxed(ptr, cond_expr)				\
194598865c5SWill Deacon ({									\
195598865c5SWill Deacon 	typeof(ptr) __PTR = (ptr);					\
19610223c52SWill Deacon 	__unqual_scalar_typeof(*ptr) VAL;				\
197598865c5SWill Deacon 	for (;;) {							\
198598865c5SWill Deacon 		VAL = READ_ONCE(*__PTR);				\
199598865c5SWill Deacon 		if (cond_expr)						\
200598865c5SWill Deacon 			break;						\
201598865c5SWill Deacon 		__cmpwait_relaxed(__PTR, VAL);				\
202598865c5SWill Deacon 	}								\
20310223c52SWill Deacon 	(typeof(*ptr))VAL;						\
204598865c5SWill Deacon })
205598865c5SWill Deacon 
20603e3c2b7SWill Deacon #define smp_cond_load_acquire(ptr, cond_expr)				\
20703e3c2b7SWill Deacon ({									\
20803e3c2b7SWill Deacon 	typeof(ptr) __PTR = (ptr);					\
20910223c52SWill Deacon 	__unqual_scalar_typeof(*ptr) VAL;				\
21003e3c2b7SWill Deacon 	for (;;) {							\
21103e3c2b7SWill Deacon 		VAL = smp_load_acquire(__PTR);				\
21203e3c2b7SWill Deacon 		if (cond_expr)						\
21303e3c2b7SWill Deacon 			break;						\
21403e3c2b7SWill Deacon 		__cmpwait_relaxed(__PTR, VAL);				\
21503e3c2b7SWill Deacon 	}								\
21610223c52SWill Deacon 	(typeof(*ptr))VAL;						\
21703e3c2b7SWill Deacon })
21803e3c2b7SWill Deacon 
21990ff6a17SMichael S. Tsirkin #include <asm-generic/barrier.h>
2208715466bSPeter Zijlstra 
22110b663aeSCatalin Marinas #endif	/* __ASSEMBLY__ */
22210b663aeSCatalin Marinas 
22310b663aeSCatalin Marinas #endif	/* __ASM_BARRIER_H */
224