xref: /openbmc/linux/arch/arm64/include/asm/barrier.h (revision 1b39eacd)
1 /*
2  * Based on arch/arm/include/asm/barrier.h
3  *
4  * Copyright (C) 2012 ARM Ltd.
5  *
6  * This program is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License version 2 as
8  * published by the Free Software Foundation.
9  *
10  * This program is distributed in the hope that it will be useful,
11  * but WITHOUT ANY WARRANTY; without even the implied warranty of
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
13  * GNU General Public License for more details.
14  *
15  * You should have received a copy of the GNU General Public License
16  * along with this program.  If not, see <http://www.gnu.org/licenses/>.
17  */
18 #ifndef __ASM_BARRIER_H
19 #define __ASM_BARRIER_H
20 
21 #ifndef __ASSEMBLY__
22 
23 #define __nops(n)	".rept	" #n "\nnop\n.endr\n"
24 #define nops(n)		asm volatile(__nops(n))
25 
26 #define sev()		asm volatile("sev" : : : "memory")
27 #define wfe()		asm volatile("wfe" : : : "memory")
28 #define wfi()		asm volatile("wfi" : : : "memory")
29 
30 #define isb()		asm volatile("isb" : : : "memory")
31 #define dmb(opt)	asm volatile("dmb " #opt : : : "memory")
32 #define dsb(opt)	asm volatile("dsb " #opt : : : "memory")
33 
34 #define psb_csync()	asm volatile("hint #17" : : : "memory")
35 
36 #define mb()		dsb(sy)
37 #define rmb()		dsb(ld)
38 #define wmb()		dsb(st)
39 
40 #define dma_rmb()	dmb(oshld)
41 #define dma_wmb()	dmb(oshst)
42 
43 #define __smp_mb()	dmb(ish)
44 #define __smp_rmb()	dmb(ishld)
45 #define __smp_wmb()	dmb(ishst)
46 
47 #define __smp_store_release(p, v)					\
48 do {									\
49 	union { typeof(*p) __val; char __c[1]; } __u =			\
50 		{ .__val = (__force typeof(*p)) (v) }; 			\
51 	compiletime_assert_atomic_type(*p);				\
52 	switch (sizeof(*p)) {						\
53 	case 1:								\
54 		asm volatile ("stlrb %w1, %0"				\
55 				: "=Q" (*p)				\
56 				: "r" (*(__u8 *)__u.__c)		\
57 				: "memory");				\
58 		break;							\
59 	case 2:								\
60 		asm volatile ("stlrh %w1, %0"				\
61 				: "=Q" (*p)				\
62 				: "r" (*(__u16 *)__u.__c)		\
63 				: "memory");				\
64 		break;							\
65 	case 4:								\
66 		asm volatile ("stlr %w1, %0"				\
67 				: "=Q" (*p)				\
68 				: "r" (*(__u32 *)__u.__c)		\
69 				: "memory");				\
70 		break;							\
71 	case 8:								\
72 		asm volatile ("stlr %1, %0"				\
73 				: "=Q" (*p)				\
74 				: "r" (*(__u64 *)__u.__c)		\
75 				: "memory");				\
76 		break;							\
77 	}								\
78 } while (0)
79 
80 #define __smp_load_acquire(p)						\
81 ({									\
82 	union { typeof(*p) __val; char __c[1]; } __u;			\
83 	compiletime_assert_atomic_type(*p);				\
84 	switch (sizeof(*p)) {						\
85 	case 1:								\
86 		asm volatile ("ldarb %w0, %1"				\
87 			: "=r" (*(__u8 *)__u.__c)			\
88 			: "Q" (*p) : "memory");				\
89 		break;							\
90 	case 2:								\
91 		asm volatile ("ldarh %w0, %1"				\
92 			: "=r" (*(__u16 *)__u.__c)			\
93 			: "Q" (*p) : "memory");				\
94 		break;							\
95 	case 4:								\
96 		asm volatile ("ldar %w0, %1"				\
97 			: "=r" (*(__u32 *)__u.__c)			\
98 			: "Q" (*p) : "memory");				\
99 		break;							\
100 	case 8:								\
101 		asm volatile ("ldar %0, %1"				\
102 			: "=r" (*(__u64 *)__u.__c)			\
103 			: "Q" (*p) : "memory");				\
104 		break;							\
105 	}								\
106 	__u.__val;							\
107 })
108 
109 #define smp_cond_load_acquire(ptr, cond_expr)				\
110 ({									\
111 	typeof(ptr) __PTR = (ptr);					\
112 	typeof(*ptr) VAL;						\
113 	for (;;) {							\
114 		VAL = smp_load_acquire(__PTR);				\
115 		if (cond_expr)						\
116 			break;						\
117 		__cmpwait_relaxed(__PTR, VAL);				\
118 	}								\
119 	VAL;								\
120 })
121 
122 #include <asm-generic/barrier.h>
123 
124 #endif	/* __ASSEMBLY__ */
125 
126 #endif	/* __ASM_BARRIER_H */
127