xref: /openbmc/linux/arch/arm64/include/asm/barrier.h (revision e0bf6c5c)
1 /*
2  * Based on arch/arm/include/asm/barrier.h
3  *
4  * Copyright (C) 2012 ARM Ltd.
5  *
6  * This program is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License version 2 as
8  * published by the Free Software Foundation.
9  *
10  * This program is distributed in the hope that it will be useful,
11  * but WITHOUT ANY WARRANTY; without even the implied warranty of
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
13  * GNU General Public License for more details.
14  *
15  * You should have received a copy of the GNU General Public License
16  * along with this program.  If not, see <http://www.gnu.org/licenses/>.
17  */
18 #ifndef __ASM_BARRIER_H
19 #define __ASM_BARRIER_H
20 
21 #ifndef __ASSEMBLY__
22 
23 #define sev()		asm volatile("sev" : : : "memory")
24 #define wfe()		asm volatile("wfe" : : : "memory")
25 #define wfi()		asm volatile("wfi" : : : "memory")
26 
27 #define isb()		asm volatile("isb" : : : "memory")
28 #define dmb(opt)	asm volatile("dmb " #opt : : : "memory")
29 #define dsb(opt)	asm volatile("dsb " #opt : : : "memory")
30 
31 #define mb()		dsb(sy)
32 #define rmb()		dsb(ld)
33 #define wmb()		dsb(st)
34 
35 #define dma_rmb()	dmb(oshld)
36 #define dma_wmb()	dmb(oshst)
37 
38 #ifndef CONFIG_SMP
39 #define smp_mb()	barrier()
40 #define smp_rmb()	barrier()
41 #define smp_wmb()	barrier()
42 
43 #define smp_store_release(p, v)						\
44 do {									\
45 	compiletime_assert_atomic_type(*p);				\
46 	barrier();							\
47 	ACCESS_ONCE(*p) = (v);						\
48 } while (0)
49 
50 #define smp_load_acquire(p)						\
51 ({									\
52 	typeof(*p) ___p1 = ACCESS_ONCE(*p);				\
53 	compiletime_assert_atomic_type(*p);				\
54 	barrier();							\
55 	___p1;								\
56 })
57 
58 #else
59 
60 #define smp_mb()	dmb(ish)
61 #define smp_rmb()	dmb(ishld)
62 #define smp_wmb()	dmb(ishst)
63 
64 #define smp_store_release(p, v)						\
65 do {									\
66 	compiletime_assert_atomic_type(*p);				\
67 	switch (sizeof(*p)) {						\
68 	case 4:								\
69 		asm volatile ("stlr %w1, %0"				\
70 				: "=Q" (*p) : "r" (v) : "memory");	\
71 		break;							\
72 	case 8:								\
73 		asm volatile ("stlr %1, %0"				\
74 				: "=Q" (*p) : "r" (v) : "memory");	\
75 		break;							\
76 	}								\
77 } while (0)
78 
79 #define smp_load_acquire(p)						\
80 ({									\
81 	typeof(*p) ___p1;						\
82 	compiletime_assert_atomic_type(*p);				\
83 	switch (sizeof(*p)) {						\
84 	case 4:								\
85 		asm volatile ("ldar %w0, %1"				\
86 			: "=r" (___p1) : "Q" (*p) : "memory");		\
87 		break;							\
88 	case 8:								\
89 		asm volatile ("ldar %0, %1"				\
90 			: "=r" (___p1) : "Q" (*p) : "memory");		\
91 		break;							\
92 	}								\
93 	___p1;								\
94 })
95 
96 #endif
97 
98 #define read_barrier_depends()		do { } while(0)
99 #define smp_read_barrier_depends()	do { } while(0)
100 
101 #define set_mb(var, value)	do { var = value; smp_mb(); } while (0)
102 #define nop()		asm volatile("nop");
103 
104 #define smp_mb__before_atomic()	smp_mb()
105 #define smp_mb__after_atomic()	smp_mb()
106 
107 #endif	/* __ASSEMBLY__ */
108 
109 #endif	/* __ASM_BARRIER_H */
110