xref: /openbmc/linux/arch/arm64/include/asm/barrier.h (revision 160b8e75)
1 /*
2  * Based on arch/arm/include/asm/barrier.h
3  *
4  * Copyright (C) 2012 ARM Ltd.
5  *
6  * This program is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License version 2 as
8  * published by the Free Software Foundation.
9  *
10  * This program is distributed in the hope that it will be useful,
11  * but WITHOUT ANY WARRANTY; without even the implied warranty of
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
13  * GNU General Public License for more details.
14  *
15  * You should have received a copy of the GNU General Public License
16  * along with this program.  If not, see <http://www.gnu.org/licenses/>.
17  */
18 #ifndef __ASM_BARRIER_H
19 #define __ASM_BARRIER_H
20 
21 #ifndef __ASSEMBLY__
22 
23 #define __nops(n)	".rept	" #n "\nnop\n.endr\n"
24 #define nops(n)		asm volatile(__nops(n))
25 
26 #define sev()		asm volatile("sev" : : : "memory")
27 #define wfe()		asm volatile("wfe" : : : "memory")
28 #define wfi()		asm volatile("wfi" : : : "memory")
29 
30 #define isb()		asm volatile("isb" : : : "memory")
31 #define dmb(opt)	asm volatile("dmb " #opt : : : "memory")
32 #define dsb(opt)	asm volatile("dsb " #opt : : : "memory")
33 
34 #define psb_csync()	asm volatile("hint #17" : : : "memory")
35 #define csdb()		asm volatile("hint #20" : : : "memory")
36 
37 #define mb()		dsb(sy)
38 #define rmb()		dsb(ld)
39 #define wmb()		dsb(st)
40 
41 #define dma_rmb()	dmb(oshld)
42 #define dma_wmb()	dmb(oshst)
43 
44 /*
45  * Generate a mask for array_index__nospec() that is ~0UL when 0 <= idx < sz
46  * and 0 otherwise.
47  */
48 #define array_index_mask_nospec array_index_mask_nospec
49 static inline unsigned long array_index_mask_nospec(unsigned long idx,
50 						    unsigned long sz)
51 {
52 	unsigned long mask;
53 
54 	asm volatile(
55 	"	cmp	%1, %2\n"
56 	"	sbc	%0, xzr, xzr\n"
57 	: "=r" (mask)
58 	: "r" (idx), "Ir" (sz)
59 	: "cc");
60 
61 	csdb();
62 	return mask;
63 }
64 
65 #define __smp_mb()	dmb(ish)
66 #define __smp_rmb()	dmb(ishld)
67 #define __smp_wmb()	dmb(ishst)
68 
69 #define __smp_store_release(p, v)					\
70 do {									\
71 	union { typeof(*p) __val; char __c[1]; } __u =			\
72 		{ .__val = (__force typeof(*p)) (v) }; 			\
73 	compiletime_assert_atomic_type(*p);				\
74 	switch (sizeof(*p)) {						\
75 	case 1:								\
76 		asm volatile ("stlrb %w1, %0"				\
77 				: "=Q" (*p)				\
78 				: "r" (*(__u8 *)__u.__c)		\
79 				: "memory");				\
80 		break;							\
81 	case 2:								\
82 		asm volatile ("stlrh %w1, %0"				\
83 				: "=Q" (*p)				\
84 				: "r" (*(__u16 *)__u.__c)		\
85 				: "memory");				\
86 		break;							\
87 	case 4:								\
88 		asm volatile ("stlr %w1, %0"				\
89 				: "=Q" (*p)				\
90 				: "r" (*(__u32 *)__u.__c)		\
91 				: "memory");				\
92 		break;							\
93 	case 8:								\
94 		asm volatile ("stlr %1, %0"				\
95 				: "=Q" (*p)				\
96 				: "r" (*(__u64 *)__u.__c)		\
97 				: "memory");				\
98 		break;							\
99 	}								\
100 } while (0)
101 
102 #define __smp_load_acquire(p)						\
103 ({									\
104 	union { typeof(*p) __val; char __c[1]; } __u;			\
105 	compiletime_assert_atomic_type(*p);				\
106 	switch (sizeof(*p)) {						\
107 	case 1:								\
108 		asm volatile ("ldarb %w0, %1"				\
109 			: "=r" (*(__u8 *)__u.__c)			\
110 			: "Q" (*p) : "memory");				\
111 		break;							\
112 	case 2:								\
113 		asm volatile ("ldarh %w0, %1"				\
114 			: "=r" (*(__u16 *)__u.__c)			\
115 			: "Q" (*p) : "memory");				\
116 		break;							\
117 	case 4:								\
118 		asm volatile ("ldar %w0, %1"				\
119 			: "=r" (*(__u32 *)__u.__c)			\
120 			: "Q" (*p) : "memory");				\
121 		break;							\
122 	case 8:								\
123 		asm volatile ("ldar %0, %1"				\
124 			: "=r" (*(__u64 *)__u.__c)			\
125 			: "Q" (*p) : "memory");				\
126 		break;							\
127 	}								\
128 	__u.__val;							\
129 })
130 
131 #define smp_cond_load_acquire(ptr, cond_expr)				\
132 ({									\
133 	typeof(ptr) __PTR = (ptr);					\
134 	typeof(*ptr) VAL;						\
135 	for (;;) {							\
136 		VAL = smp_load_acquire(__PTR);				\
137 		if (cond_expr)						\
138 			break;						\
139 		__cmpwait_relaxed(__PTR, VAL);				\
140 	}								\
141 	VAL;								\
142 })
143 
144 #include <asm-generic/barrier.h>
145 
146 #endif	/* __ASSEMBLY__ */
147 
148 #endif	/* __ASM_BARRIER_H */
149