xref: /openbmc/linux/arch/parisc/include/asm/barrier.h (revision 6caf55e5)
1fedb8da9SJohn David Anglin /* SPDX-License-Identifier: GPL-2.0 */
2fedb8da9SJohn David Anglin #ifndef __ASM_BARRIER_H
3fedb8da9SJohn David Anglin #define __ASM_BARRIER_H
4fedb8da9SJohn David Anglin 
56caf55e5SJohn David Anglin #include <asm/alternative.h>
66caf55e5SJohn David Anglin 
7fedb8da9SJohn David Anglin #ifndef __ASSEMBLY__
8fedb8da9SJohn David Anglin 
9fedb8da9SJohn David Anglin /* The synchronize caches instruction executes as a nop on systems in
10fedb8da9SJohn David Anglin    which all memory references are performed in order. */
116caf55e5SJohn David Anglin #define synchronize_caches() asm volatile("sync" \
126caf55e5SJohn David Anglin 	ALTERNATIVE(ALT_COND_NO_SMP, INSN_NOP) \
136caf55e5SJohn David Anglin 	: : : "memory")
14fedb8da9SJohn David Anglin 
15fedb8da9SJohn David Anglin #if defined(CONFIG_SMP)
16fedb8da9SJohn David Anglin #define mb()		do { synchronize_caches(); } while (0)
17fedb8da9SJohn David Anglin #define rmb()		mb()
18fedb8da9SJohn David Anglin #define wmb()		mb()
19fedb8da9SJohn David Anglin #define dma_rmb()	mb()
20fedb8da9SJohn David Anglin #define dma_wmb()	mb()
21fedb8da9SJohn David Anglin #else
22fedb8da9SJohn David Anglin #define mb()		barrier()
23fedb8da9SJohn David Anglin #define rmb()		barrier()
24fedb8da9SJohn David Anglin #define wmb()		barrier()
25fedb8da9SJohn David Anglin #define dma_rmb()	barrier()
26fedb8da9SJohn David Anglin #define dma_wmb()	barrier()
27fedb8da9SJohn David Anglin #endif
28fedb8da9SJohn David Anglin 
29fedb8da9SJohn David Anglin #define __smp_mb()	mb()
30fedb8da9SJohn David Anglin #define __smp_rmb()	mb()
31fedb8da9SJohn David Anglin #define __smp_wmb()	mb()
32fedb8da9SJohn David Anglin 
33e96ebd58SJohn David Anglin #define __smp_store_release(p, v)					\
34e96ebd58SJohn David Anglin do {									\
35e96ebd58SJohn David Anglin 	typeof(p) __p = (p);						\
36e96ebd58SJohn David Anglin         union { typeof(*p) __val; char __c[1]; } __u =			\
37e96ebd58SJohn David Anglin                 { .__val = (__force typeof(*p)) (v) };			\
38e96ebd58SJohn David Anglin 	compiletime_assert_atomic_type(*p);				\
39e96ebd58SJohn David Anglin 	switch (sizeof(*p)) {						\
40e96ebd58SJohn David Anglin 	case 1:								\
41e96ebd58SJohn David Anglin 		asm volatile("stb,ma %0,0(%1)"				\
42e96ebd58SJohn David Anglin 				: : "r"(*(__u8 *)__u.__c), "r"(__p)	\
43e96ebd58SJohn David Anglin 				: "memory");				\
44e96ebd58SJohn David Anglin 		break;							\
45e96ebd58SJohn David Anglin 	case 2:								\
46e96ebd58SJohn David Anglin 		asm volatile("sth,ma %0,0(%1)"				\
47e96ebd58SJohn David Anglin 				: : "r"(*(__u16 *)__u.__c), "r"(__p)	\
48e96ebd58SJohn David Anglin 				: "memory");				\
49e96ebd58SJohn David Anglin 		break;							\
50e96ebd58SJohn David Anglin 	case 4:								\
51e96ebd58SJohn David Anglin 		asm volatile("stw,ma %0,0(%1)"				\
52e96ebd58SJohn David Anglin 				: : "r"(*(__u32 *)__u.__c), "r"(__p)	\
53e96ebd58SJohn David Anglin 				: "memory");				\
54e96ebd58SJohn David Anglin 		break;							\
55e96ebd58SJohn David Anglin 	case 8:								\
56e96ebd58SJohn David Anglin 		if (IS_ENABLED(CONFIG_64BIT))				\
57e96ebd58SJohn David Anglin 			asm volatile("std,ma %0,0(%1)"			\
58e96ebd58SJohn David Anglin 				: : "r"(*(__u64 *)__u.__c), "r"(__p)	\
59e96ebd58SJohn David Anglin 				: "memory");				\
60e96ebd58SJohn David Anglin 		break;							\
61e96ebd58SJohn David Anglin 	}								\
62e96ebd58SJohn David Anglin } while (0)
63e96ebd58SJohn David Anglin 
64e96ebd58SJohn David Anglin #define __smp_load_acquire(p)						\
65e96ebd58SJohn David Anglin ({									\
66e96ebd58SJohn David Anglin 	union { typeof(*p) __val; char __c[1]; } __u;			\
67e96ebd58SJohn David Anglin 	typeof(p) __p = (p);						\
68e96ebd58SJohn David Anglin 	compiletime_assert_atomic_type(*p);				\
69e96ebd58SJohn David Anglin 	switch (sizeof(*p)) {						\
70e96ebd58SJohn David Anglin 	case 1:								\
71e96ebd58SJohn David Anglin 		asm volatile("ldb,ma 0(%1),%0"				\
72e96ebd58SJohn David Anglin 				: "=r"(*(__u8 *)__u.__c) : "r"(__p)	\
73e96ebd58SJohn David Anglin 				: "memory");				\
74e96ebd58SJohn David Anglin 		break;							\
75e96ebd58SJohn David Anglin 	case 2:								\
76e96ebd58SJohn David Anglin 		asm volatile("ldh,ma 0(%1),%0"				\
77e96ebd58SJohn David Anglin 				: "=r"(*(__u16 *)__u.__c) : "r"(__p)	\
78e96ebd58SJohn David Anglin 				: "memory");				\
79e96ebd58SJohn David Anglin 		break;							\
80e96ebd58SJohn David Anglin 	case 4:								\
81e96ebd58SJohn David Anglin 		asm volatile("ldw,ma 0(%1),%0"				\
82e96ebd58SJohn David Anglin 				: "=r"(*(__u32 *)__u.__c) : "r"(__p)	\
83e96ebd58SJohn David Anglin 				: "memory");				\
84e96ebd58SJohn David Anglin 		break;							\
85e96ebd58SJohn David Anglin 	case 8:								\
86e96ebd58SJohn David Anglin 		if (IS_ENABLED(CONFIG_64BIT))				\
87e96ebd58SJohn David Anglin 			asm volatile("ldd,ma 0(%1),%0"			\
88e96ebd58SJohn David Anglin 				: "=r"(*(__u64 *)__u.__c) : "r"(__p)	\
89e96ebd58SJohn David Anglin 				: "memory");				\
90e96ebd58SJohn David Anglin 		break;							\
91e96ebd58SJohn David Anglin 	}								\
92e96ebd58SJohn David Anglin 	__u.__val;							\
93e96ebd58SJohn David Anglin })
94fedb8da9SJohn David Anglin #include <asm-generic/barrier.h>
95fedb8da9SJohn David Anglin 
96fedb8da9SJohn David Anglin #endif /* !__ASSEMBLY__ */
97fedb8da9SJohn David Anglin #endif /* __ASM_BARRIER_H */
98