1 /* SPDX-License-Identifier: GPL-2.0 */ 2 #ifndef _ASM_POWERPC_SYNCH_H 3 #define _ASM_POWERPC_SYNCH_H 4 #ifdef __KERNEL__ 5 6 #include <asm/cputable.h> 7 #include <asm/feature-fixups.h> 8 #include <asm/ppc-opcode.h> 9 10 #ifndef __ASSEMBLY__ 11 extern unsigned int __start___lwsync_fixup, __stop___lwsync_fixup; 12 extern void do_lwsync_fixups(unsigned long value, void *fixup_start, 13 void *fixup_end); 14 15 static inline void eieio(void) 16 { 17 if (IS_ENABLED(CONFIG_BOOKE)) 18 __asm__ __volatile__ ("mbar" : : : "memory"); 19 else 20 __asm__ __volatile__ ("eieio" : : : "memory"); 21 } 22 23 static inline void isync(void) 24 { 25 __asm__ __volatile__ ("isync" : : : "memory"); 26 } 27 28 static inline void ppc_after_tlbiel_barrier(void) 29 { 30 asm volatile("ptesync": : :"memory"); 31 /* 32 * POWER9, POWER10 need a cp_abort after tlbiel to ensure the copy is 33 * invalidated correctly. If this is not done, the paste can take data 34 * from the physical address that was translated at copy time. 35 * 36 * POWER9 in practice does not need this, because address spaces with 37 * accelerators mapped will use tlbie (which does invalidate the copy) 38 * to invalidate translations. It's not possible to limit POWER10 this 39 * way due to local copy-paste. 40 */ 41 asm volatile(ASM_FTR_IFSET(PPC_CP_ABORT, "", %0) : : "i" (CPU_FTR_ARCH_31) : "memory"); 42 } 43 #endif /* __ASSEMBLY__ */ 44 45 #if defined(__powerpc64__) 46 # define LWSYNC lwsync 47 #elif defined(CONFIG_E500) 48 # define LWSYNC \ 49 START_LWSYNC_SECTION(96); \ 50 sync; \ 51 MAKE_LWSYNC_SECTION_ENTRY(96, __lwsync_fixup); 52 #else 53 # define LWSYNC sync 54 #endif 55 56 #ifdef CONFIG_SMP 57 #define __PPC_ACQUIRE_BARRIER \ 58 START_LWSYNC_SECTION(97); \ 59 isync; \ 60 MAKE_LWSYNC_SECTION_ENTRY(97, __lwsync_fixup); 61 #define PPC_ACQUIRE_BARRIER "\n" stringify_in_c(__PPC_ACQUIRE_BARRIER) 62 #define PPC_RELEASE_BARRIER stringify_in_c(LWSYNC) "\n" 63 #define PPC_ATOMIC_ENTRY_BARRIER "\n" stringify_in_c(sync) "\n" 64 #define PPC_ATOMIC_EXIT_BARRIER "\n" stringify_in_c(sync) "\n" 65 #else 66 #define PPC_ACQUIRE_BARRIER 67 #define PPC_RELEASE_BARRIER 68 #define PPC_ATOMIC_ENTRY_BARRIER 69 #define PPC_ATOMIC_EXIT_BARRIER 70 #endif 71 72 #endif /* __KERNEL__ */ 73 #endif /* _ASM_POWERPC_SYNCH_H */ 74