xref: /openbmc/linux/arch/powerpc/include/asm/synch.h (revision 4f2c0a4acffbec01079c28f839422e64ddeff004)
1b2441318SGreg Kroah-Hartman /* SPDX-License-Identifier: GPL-2.0 */
2b8b572e1SStephen Rothwell #ifndef _ASM_POWERPC_SYNCH_H
3b8b572e1SStephen Rothwell #define _ASM_POWERPC_SYNCH_H
4b8b572e1SStephen Rothwell #ifdef __KERNEL__
5b8b572e1SStephen Rothwell 
605504b42SNicholas Piggin #include <asm/cputable.h>
7b8b572e1SStephen Rothwell #include <asm/feature-fixups.h>
805504b42SNicholas Piggin #include <asm/ppc-opcode.h>
9b8b572e1SStephen Rothwell 
10b8b572e1SStephen Rothwell #ifndef __ASSEMBLY__
11b8b572e1SStephen Rothwell extern unsigned int __start___lwsync_fixup, __stop___lwsync_fixup;
12b8b572e1SStephen Rothwell extern void do_lwsync_fixups(unsigned long value, void *fixup_start,
13b8b572e1SStephen Rothwell 			     void *fixup_end);
14b8b572e1SStephen Rothwell 
eieio(void)15b8b572e1SStephen Rothwell static inline void eieio(void)
16b8b572e1SStephen Rothwell {
172255411dSChristophe Leroy 	if (IS_ENABLED(CONFIG_BOOKE))
182255411dSChristophe Leroy 		__asm__ __volatile__ ("mbar" : : : "memory");
192255411dSChristophe Leroy 	else
20b8b572e1SStephen Rothwell 		__asm__ __volatile__ ("eieio" : : : "memory");
21b8b572e1SStephen Rothwell }
22b8b572e1SStephen Rothwell 
isync(void)23b8b572e1SStephen Rothwell static inline void isync(void)
24b8b572e1SStephen Rothwell {
25b8b572e1SStephen Rothwell 	__asm__ __volatile__ ("isync" : : : "memory");
26b8b572e1SStephen Rothwell }
2705504b42SNicholas Piggin 
ppc_after_tlbiel_barrier(void)2805504b42SNicholas Piggin static inline void ppc_after_tlbiel_barrier(void)
2905504b42SNicholas Piggin {
3005504b42SNicholas Piggin 	asm volatile("ptesync": : :"memory");
3105504b42SNicholas Piggin 	/*
3205504b42SNicholas Piggin 	 * POWER9, POWER10 need a cp_abort after tlbiel to ensure the copy is
3305504b42SNicholas Piggin 	 * invalidated correctly. If this is not done, the paste can take data
3405504b42SNicholas Piggin 	 * from the physical address that was translated at copy time.
3505504b42SNicholas Piggin 	 *
3605504b42SNicholas Piggin 	 * POWER9 in practice does not need this, because address spaces with
3705504b42SNicholas Piggin 	 * accelerators mapped will use tlbie (which does invalidate the copy)
3805504b42SNicholas Piggin 	 * to invalidate translations. It's not possible to limit POWER10 this
3905504b42SNicholas Piggin 	 * way due to local copy-paste.
4005504b42SNicholas Piggin 	 */
4105504b42SNicholas Piggin 	asm volatile(ASM_FTR_IFSET(PPC_CP_ABORT, "", %0) : : "i" (CPU_FTR_ARCH_31) : "memory");
4205504b42SNicholas Piggin }
43b8b572e1SStephen Rothwell #endif /* __ASSEMBLY__ */
44b8b572e1SStephen Rothwell 
45b8b572e1SStephen Rothwell #if defined(__powerpc64__)
46b8b572e1SStephen Rothwell #    define LWSYNC	lwsync
47*688de017SChristophe Leroy #elif defined(CONFIG_PPC_E500)
48b8b572e1SStephen Rothwell #    define LWSYNC					\
49b8b572e1SStephen Rothwell 	START_LWSYNC_SECTION(96);			\
50b8b572e1SStephen Rothwell 	sync;						\
51b8b572e1SStephen Rothwell 	MAKE_LWSYNC_SECTION_ENTRY(96, __lwsync_fixup);
52b8b572e1SStephen Rothwell #else
53b8b572e1SStephen Rothwell #    define LWSYNC	sync
54b8b572e1SStephen Rothwell #endif
55b8b572e1SStephen Rothwell 
56b8b572e1SStephen Rothwell #ifdef CONFIG_SMP
575a0e9b57SAnton Blanchard #define __PPC_ACQUIRE_BARRIER				\
585a0e9b57SAnton Blanchard 	START_LWSYNC_SECTION(97);			\
595a0e9b57SAnton Blanchard 	isync;						\
605a0e9b57SAnton Blanchard 	MAKE_LWSYNC_SECTION_ENTRY(97, __lwsync_fixup);
615a0e9b57SAnton Blanchard #define PPC_ACQUIRE_BARRIER	 "\n" stringify_in_c(__PPC_ACQUIRE_BARRIER)
62f10e2e5bSAnton Blanchard #define PPC_RELEASE_BARRIER	 stringify_in_c(LWSYNC) "\n"
6349e9cf3fSBoqun Feng #define PPC_ATOMIC_ENTRY_BARRIER "\n" stringify_in_c(sync) "\n"
64b97021f8SBenjamin Herrenschmidt #define PPC_ATOMIC_EXIT_BARRIER	 "\n" stringify_in_c(sync) "\n"
65b8b572e1SStephen Rothwell #else
66f10e2e5bSAnton Blanchard #define PPC_ACQUIRE_BARRIER
67f10e2e5bSAnton Blanchard #define PPC_RELEASE_BARRIER
68b97021f8SBenjamin Herrenschmidt #define PPC_ATOMIC_ENTRY_BARRIER
69b97021f8SBenjamin Herrenschmidt #define PPC_ATOMIC_EXIT_BARRIER
70b8b572e1SStephen Rothwell #endif
71b8b572e1SStephen Rothwell 
72b8b572e1SStephen Rothwell #endif /* __KERNEL__ */
73b8b572e1SStephen Rothwell #endif	/* _ASM_POWERPC_SYNCH_H */
74