1 /* SPDX-License-Identifier: GPL-2.0 */ 2 #ifndef _ASM_POWERPC_KUP_BOOKE_H_ 3 #define _ASM_POWERPC_KUP_BOOKE_H_ 4 5 #include <asm/bug.h> 6 #include <asm/mmu.h> 7 8 #ifdef CONFIG_PPC_KUAP 9 10 #ifdef __ASSEMBLY__ 11 12 .macro kuap_check_amr gpr1, gpr2 13 .endm 14 15 #else 16 17 #include <linux/sched.h> 18 19 #include <asm/reg.h> 20 21 static __always_inline void __kuap_lock(void) 22 { 23 mtspr(SPRN_PID, 0); 24 isync(); 25 } 26 #define __kuap_lock __kuap_lock 27 28 static __always_inline void __kuap_save_and_lock(struct pt_regs *regs) 29 { 30 regs->kuap = mfspr(SPRN_PID); 31 mtspr(SPRN_PID, 0); 32 isync(); 33 } 34 #define __kuap_save_and_lock __kuap_save_and_lock 35 36 static __always_inline void kuap_user_restore(struct pt_regs *regs) 37 { 38 if (kuap_is_disabled()) 39 return; 40 41 mtspr(SPRN_PID, current->thread.pid); 42 43 /* Context synchronisation is performed by rfi */ 44 } 45 46 static __always_inline void __kuap_kernel_restore(struct pt_regs *regs, unsigned long kuap) 47 { 48 if (regs->kuap) 49 mtspr(SPRN_PID, current->thread.pid); 50 51 /* Context synchronisation is performed by rfi */ 52 } 53 54 #ifdef CONFIG_PPC_KUAP_DEBUG 55 static __always_inline unsigned long __kuap_get_and_assert_locked(void) 56 { 57 WARN_ON_ONCE(mfspr(SPRN_PID)); 58 59 return 0; 60 } 61 #define __kuap_get_and_assert_locked __kuap_get_and_assert_locked 62 #endif 63 64 static __always_inline void uaccess_begin_booke(unsigned long val) 65 { 66 asm(ASM_MMU_FTR_IFSET("mtspr %0, %1; isync", "", %2) : : 67 "i"(SPRN_PID), "r"(val), "i"(MMU_FTR_KUAP) : "memory"); 68 } 69 70 static __always_inline void uaccess_end_booke(void) 71 { 72 asm(ASM_MMU_FTR_IFSET("mtspr %0, %1; isync", "", %2) : : 73 "i"(SPRN_PID), "r"(0), "i"(MMU_FTR_KUAP) : "memory"); 74 } 75 76 static __always_inline void allow_user_access(void __user *to, const void __user *from, 77 unsigned long size, unsigned long dir) 78 { 79 uaccess_begin_booke(current->thread.pid); 80 } 81 82 static __always_inline void prevent_user_access(unsigned long dir) 83 { 84 uaccess_end_booke(); 85 } 86 87 static __always_inline unsigned long prevent_user_access_return(void) 88 { 89 unsigned long flags = mfspr(SPRN_PID); 90 91 uaccess_end_booke(); 92 93 return flags; 94 } 95 96 static __always_inline void restore_user_access(unsigned long flags) 97 { 98 if (flags) 99 uaccess_begin_booke(current->thread.pid); 100 } 101 102 static __always_inline bool 103 __bad_kuap_fault(struct pt_regs *regs, unsigned long address, bool is_write) 104 { 105 return !regs->kuap; 106 } 107 108 #endif /* !__ASSEMBLY__ */ 109 110 #endif /* CONFIG_PPC_KUAP */ 111 112 #endif /* _ASM_POWERPC_KUP_BOOKE_H_ */ 113