xref: /openbmc/linux/arch/arm64/include/asm/barrier.h (revision 565485b8)
1 /*
2  * Based on arch/arm/include/asm/barrier.h
3  *
4  * Copyright (C) 2012 ARM Ltd.
5  *
6  * This program is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License version 2 as
8  * published by the Free Software Foundation.
9  *
10  * This program is distributed in the hope that it will be useful,
11  * but WITHOUT ANY WARRANTY; without even the implied warranty of
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
13  * GNU General Public License for more details.
14  *
15  * You should have received a copy of the GNU General Public License
16  * along with this program.  If not, see <http://www.gnu.org/licenses/>.
17  */
18 #ifndef __ASM_BARRIER_H
19 #define __ASM_BARRIER_H
20 
21 #ifndef __ASSEMBLY__
22 
23 #define __nops(n)	".rept	" #n "\nnop\n.endr\n"
24 #define nops(n)		asm volatile(__nops(n))
25 
26 #define sev()		asm volatile("sev" : : : "memory")
27 #define wfe()		asm volatile("wfe" : : : "memory")
28 #define wfi()		asm volatile("wfi" : : : "memory")
29 
30 #define isb()		asm volatile("isb" : : : "memory")
31 #define dmb(opt)	asm volatile("dmb " #opt : : : "memory")
32 #define dsb(opt)	asm volatile("dsb " #opt : : : "memory")
33 
34 #define psb_csync()	asm volatile("hint #17" : : : "memory")
35 #define csdb()		asm volatile("hint #20" : : : "memory")
36 
37 #define spec_bar()	asm volatile(ALTERNATIVE("dsb nsh\nisb\n",		\
38 						 SB_BARRIER_INSN"nop\n",	\
39 						 ARM64_HAS_SB))
40 
41 #define mb()		dsb(sy)
42 #define rmb()		dsb(ld)
43 #define wmb()		dsb(st)
44 
45 #define dma_rmb()	dmb(oshld)
46 #define dma_wmb()	dmb(oshst)
47 
48 /*
49  * Generate a mask for array_index__nospec() that is ~0UL when 0 <= idx < sz
50  * and 0 otherwise.
51  */
52 #define array_index_mask_nospec array_index_mask_nospec
53 static inline unsigned long array_index_mask_nospec(unsigned long idx,
54 						    unsigned long sz)
55 {
56 	unsigned long mask;
57 
58 	asm volatile(
59 	"	cmp	%1, %2\n"
60 	"	sbc	%0, xzr, xzr\n"
61 	: "=r" (mask)
62 	: "r" (idx), "Ir" (sz)
63 	: "cc");
64 
65 	csdb();
66 	return mask;
67 }
68 
69 #define __smp_mb()	dmb(ish)
70 #define __smp_rmb()	dmb(ishld)
71 #define __smp_wmb()	dmb(ishst)
72 
73 #define __smp_store_release(p, v)					\
74 do {									\
75 	union { typeof(*p) __val; char __c[1]; } __u =			\
76 		{ .__val = (__force typeof(*p)) (v) }; 			\
77 	compiletime_assert_atomic_type(*p);				\
78 	switch (sizeof(*p)) {						\
79 	case 1:								\
80 		asm volatile ("stlrb %w1, %0"				\
81 				: "=Q" (*p)				\
82 				: "r" (*(__u8 *)__u.__c)		\
83 				: "memory");				\
84 		break;							\
85 	case 2:								\
86 		asm volatile ("stlrh %w1, %0"				\
87 				: "=Q" (*p)				\
88 				: "r" (*(__u16 *)__u.__c)		\
89 				: "memory");				\
90 		break;							\
91 	case 4:								\
92 		asm volatile ("stlr %w1, %0"				\
93 				: "=Q" (*p)				\
94 				: "r" (*(__u32 *)__u.__c)		\
95 				: "memory");				\
96 		break;							\
97 	case 8:								\
98 		asm volatile ("stlr %1, %0"				\
99 				: "=Q" (*p)				\
100 				: "r" (*(__u64 *)__u.__c)		\
101 				: "memory");				\
102 		break;							\
103 	}								\
104 } while (0)
105 
106 #define __smp_load_acquire(p)						\
107 ({									\
108 	union { typeof(*p) __val; char __c[1]; } __u;			\
109 	compiletime_assert_atomic_type(*p);				\
110 	switch (sizeof(*p)) {						\
111 	case 1:								\
112 		asm volatile ("ldarb %w0, %1"				\
113 			: "=r" (*(__u8 *)__u.__c)			\
114 			: "Q" (*p) : "memory");				\
115 		break;							\
116 	case 2:								\
117 		asm volatile ("ldarh %w0, %1"				\
118 			: "=r" (*(__u16 *)__u.__c)			\
119 			: "Q" (*p) : "memory");				\
120 		break;							\
121 	case 4:								\
122 		asm volatile ("ldar %w0, %1"				\
123 			: "=r" (*(__u32 *)__u.__c)			\
124 			: "Q" (*p) : "memory");				\
125 		break;							\
126 	case 8:								\
127 		asm volatile ("ldar %0, %1"				\
128 			: "=r" (*(__u64 *)__u.__c)			\
129 			: "Q" (*p) : "memory");				\
130 		break;							\
131 	}								\
132 	__u.__val;							\
133 })
134 
135 #define smp_cond_load_relaxed(ptr, cond_expr)				\
136 ({									\
137 	typeof(ptr) __PTR = (ptr);					\
138 	typeof(*ptr) VAL;						\
139 	for (;;) {							\
140 		VAL = READ_ONCE(*__PTR);				\
141 		if (cond_expr)						\
142 			break;						\
143 		__cmpwait_relaxed(__PTR, VAL);				\
144 	}								\
145 	VAL;								\
146 })
147 
148 #define smp_cond_load_acquire(ptr, cond_expr)				\
149 ({									\
150 	typeof(ptr) __PTR = (ptr);					\
151 	typeof(*ptr) VAL;						\
152 	for (;;) {							\
153 		VAL = smp_load_acquire(__PTR);				\
154 		if (cond_expr)						\
155 			break;						\
156 		__cmpwait_relaxed(__PTR, VAL);				\
157 	}								\
158 	VAL;								\
159 })
160 
161 #include <asm-generic/barrier.h>
162 
163 #endif	/* __ASSEMBLY__ */
164 
165 #endif	/* __ASM_BARRIER_H */
166