1b2441318SGreg Kroah-Hartman /* SPDX-License-Identifier: GPL-2.0 */
29f97da78SDavid Howells #ifndef __ASM_BARRIER_H
39f97da78SDavid Howells #define __ASM_BARRIER_H
49f97da78SDavid Howells
59f97da78SDavid Howells #ifndef __ASSEMBLY__
69f97da78SDavid Howells
79f97da78SDavid Howells #define nop() __asm__ __volatile__("mov\tr0,r0\t@ nop\n\t");
89f97da78SDavid Howells
99f97da78SDavid Howells #if __LINUX_ARM_ARCH__ >= 7 || \
109f97da78SDavid Howells (__LINUX_ARM_ARCH__ == 6 && defined(CONFIG_CPU_32v6K))
119f97da78SDavid Howells #define sev() __asm__ __volatile__ ("sev" : : : "memory")
129f97da78SDavid Howells #define wfe() __asm__ __volatile__ ("wfe" : : : "memory")
139f97da78SDavid Howells #define wfi() __asm__ __volatile__ ("wfi" : : : "memory")
145388a5b8SRussell King #else
155388a5b8SRussell King #define wfe() do { } while (0)
169f97da78SDavid Howells #endif
179f97da78SDavid Howells
189f97da78SDavid Howells #if __LINUX_ARM_ARCH__ >= 7
193ea12806SWill Deacon #define isb(option) __asm__ __volatile__ ("isb " #option : : : "memory")
203ea12806SWill Deacon #define dsb(option) __asm__ __volatile__ ("dsb " #option : : : "memory")
213ea12806SWill Deacon #define dmb(option) __asm__ __volatile__ ("dmb " #option : : : "memory")
22a78d1565SRussell King #ifdef CONFIG_THUMB2_KERNEL
23a78d1565SRussell King #define CSDB ".inst.w 0xf3af8014"
24a78d1565SRussell King #else
25a78d1565SRussell King #define CSDB ".inst 0xe320f014"
26a78d1565SRussell King #endif
27a78d1565SRussell King #define csdb() __asm__ __volatile__(CSDB : : : "memory")
289f97da78SDavid Howells #elif defined(CONFIG_CPU_XSC3) || __LINUX_ARM_ARCH__ == 6
293ea12806SWill Deacon #define isb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c5, 4" \
309f97da78SDavid Howells : : "r" (0) : "memory")
313ea12806SWill Deacon #define dsb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \
329f97da78SDavid Howells : : "r" (0) : "memory")
333ea12806SWill Deacon #define dmb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 5" \
349f97da78SDavid Howells : : "r" (0) : "memory")
359f97da78SDavid Howells #elif defined(CONFIG_CPU_FA526)
363ea12806SWill Deacon #define isb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c5, 4" \
379f97da78SDavid Howells : : "r" (0) : "memory")
383ea12806SWill Deacon #define dsb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \
399f97da78SDavid Howells : : "r" (0) : "memory")
403ea12806SWill Deacon #define dmb(x) __asm__ __volatile__ ("" : : : "memory")
419f97da78SDavid Howells #else
423ea12806SWill Deacon #define isb(x) __asm__ __volatile__ ("" : : : "memory")
433ea12806SWill Deacon #define dsb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \
449f97da78SDavid Howells : : "r" (0) : "memory")
453ea12806SWill Deacon #define dmb(x) __asm__ __volatile__ ("" : : : "memory")
469f97da78SDavid Howells #endif
479f97da78SDavid Howells
48a78d1565SRussell King #ifndef CSDB
49a78d1565SRussell King #define CSDB
50a78d1565SRussell King #endif
51a78d1565SRussell King #ifndef csdb
52a78d1565SRussell King #define csdb()
53a78d1565SRussell King #endif
54a78d1565SRussell King
55f8130906SRussell King #ifdef CONFIG_ARM_HEAVY_MB
564e1f8a6fSRussell King extern void (*soc_mb)(void);
57f8130906SRussell King extern void arm_heavy_mb(void);
58f8130906SRussell King #define __arm_heavy_mb(x...) do { dsb(x); arm_heavy_mb(); } while (0)
59f8130906SRussell King #else
60f8130906SRussell King #define __arm_heavy_mb(x...) dsb(x)
61f8130906SRussell King #endif
62f8130906SRussell King
63520319deSMasahiro Yamada #if defined(CONFIG_ARM_DMA_MEM_BUFFERABLE) || defined(CONFIG_SMP)
64f8130906SRussell King #define mb() __arm_heavy_mb()
659f97da78SDavid Howells #define rmb() dsb()
66f8130906SRussell King #define wmb() __arm_heavy_mb(st)
671077fa36SAlexander Duyck #define dma_rmb() dmb(osh)
681077fa36SAlexander Duyck #define dma_wmb() dmb(oshst)
699f97da78SDavid Howells #else
7048aa820fSRob Herring #define mb() barrier()
7148aa820fSRob Herring #define rmb() barrier()
7248aa820fSRob Herring #define wmb() barrier()
731077fa36SAlexander Duyck #define dma_rmb() barrier()
741077fa36SAlexander Duyck #define dma_wmb() barrier()
759f97da78SDavid Howells #endif
769f97da78SDavid Howells
772b1f3de1SMichael S. Tsirkin #define __smp_mb() dmb(ish)
782b1f3de1SMichael S. Tsirkin #define __smp_rmb() __smp_mb()
792b1f3de1SMichael S. Tsirkin #define __smp_wmb() dmb(ishst)
809f97da78SDavid Howells
811d4238c5SRussell King #ifdef CONFIG_CPU_SPECTRE
array_index_mask_nospec(unsigned long idx,unsigned long sz)821d4238c5SRussell King static inline unsigned long array_index_mask_nospec(unsigned long idx,
831d4238c5SRussell King unsigned long sz)
841d4238c5SRussell King {
851d4238c5SRussell King unsigned long mask;
861d4238c5SRussell King
871d4238c5SRussell King asm volatile(
881d4238c5SRussell King "cmp %1, %2\n"
891d4238c5SRussell King " sbc %0, %1, %1\n"
901d4238c5SRussell King CSDB
911d4238c5SRussell King : "=r" (mask)
921d4238c5SRussell King : "r" (idx), "Ir" (sz)
931d4238c5SRussell King : "cc");
941d4238c5SRussell King
951d4238c5SRussell King return mask;
961d4238c5SRussell King }
971d4238c5SRussell King #define array_index_mask_nospec array_index_mask_nospec
981d4238c5SRussell King #endif
991d4238c5SRussell King
100335390d6SMichael S. Tsirkin #include <asm-generic/barrier.h>
101030d0178SPeter Zijlstra
1029f97da78SDavid Howells #endif /* !__ASSEMBLY__ */
1039f97da78SDavid Howells #endif /* __ASM_BARRIER_H */
104