1 /* SPDX-License-Identifier: GPL-2.0 */ 2 /* 3 * Author: Huacai Chen <chenhuacai@loongson.cn> 4 * Copyright (C) 2020-2022 Loongson Technology Corporation Limited 5 */ 6 #ifndef _ASM_FPU_H 7 #define _ASM_FPU_H 8 9 #include <linux/sched.h> 10 #include <linux/sched/task_stack.h> 11 #include <linux/ptrace.h> 12 #include <linux/thread_info.h> 13 #include <linux/bitops.h> 14 15 #include <asm/cpu.h> 16 #include <asm/cpu-features.h> 17 #include <asm/current.h> 18 #include <asm/loongarch.h> 19 #include <asm/processor.h> 20 #include <asm/ptrace.h> 21 22 struct sigcontext; 23 24 extern void kernel_fpu_begin(void); 25 extern void kernel_fpu_end(void); 26 27 extern void _init_fpu(unsigned int); 28 extern void _save_fp(struct loongarch_fpu *); 29 extern void _restore_fp(struct loongarch_fpu *); 30 31 extern void _save_lsx(struct loongarch_fpu *fpu); 32 extern void _restore_lsx(struct loongarch_fpu *fpu); 33 extern void _init_lsx_upper(void); 34 extern void _restore_lsx_upper(struct loongarch_fpu *fpu); 35 36 extern void _save_lasx(struct loongarch_fpu *fpu); 37 extern void _restore_lasx(struct loongarch_fpu *fpu); 38 extern void _init_lasx_upper(void); 39 extern void _restore_lasx_upper(struct loongarch_fpu *fpu); 40 41 static inline void enable_lsx(void); 42 static inline void disable_lsx(void); 43 static inline void save_lsx(struct task_struct *t); 44 static inline void restore_lsx(struct task_struct *t); 45 46 static inline void enable_lasx(void); 47 static inline void disable_lasx(void); 48 static inline void save_lasx(struct task_struct *t); 49 static inline void restore_lasx(struct task_struct *t); 50 51 /* 52 * Mask the FCSR Cause bits according to the Enable bits, observing 53 * that Unimplemented is always enabled. 54 */ 55 static inline unsigned long mask_fcsr_x(unsigned long fcsr) 56 { 57 return fcsr & ((fcsr & FPU_CSR_ALL_E) << 58 (ffs(FPU_CSR_ALL_X) - ffs(FPU_CSR_ALL_E))); 59 } 60 61 static inline int is_fp_enabled(void) 62 { 63 return (csr_read32(LOONGARCH_CSR_EUEN) & CSR_EUEN_FPEN) ? 64 1 : 0; 65 } 66 67 static inline int is_lsx_enabled(void) 68 { 69 if (!cpu_has_lsx) 70 return 0; 71 72 return (csr_read32(LOONGARCH_CSR_EUEN) & CSR_EUEN_LSXEN) ? 73 1 : 0; 74 } 75 76 static inline int is_lasx_enabled(void) 77 { 78 if (!cpu_has_lasx) 79 return 0; 80 81 return (csr_read32(LOONGARCH_CSR_EUEN) & CSR_EUEN_LASXEN) ? 82 1 : 0; 83 } 84 85 static inline int is_simd_enabled(void) 86 { 87 return is_lsx_enabled() | is_lasx_enabled(); 88 } 89 90 #define enable_fpu() set_csr_euen(CSR_EUEN_FPEN) 91 92 #define disable_fpu() clear_csr_euen(CSR_EUEN_FPEN) 93 94 #define clear_fpu_owner() clear_thread_flag(TIF_USEDFPU) 95 96 static inline int is_fpu_owner(void) 97 { 98 return test_thread_flag(TIF_USEDFPU); 99 } 100 101 static inline void __own_fpu(void) 102 { 103 enable_fpu(); 104 set_thread_flag(TIF_USEDFPU); 105 KSTK_EUEN(current) |= CSR_EUEN_FPEN; 106 } 107 108 static inline void own_fpu_inatomic(int restore) 109 { 110 if (cpu_has_fpu && !is_fpu_owner()) { 111 __own_fpu(); 112 if (restore) 113 _restore_fp(¤t->thread.fpu); 114 } 115 } 116 117 static inline void own_fpu(int restore) 118 { 119 preempt_disable(); 120 own_fpu_inatomic(restore); 121 preempt_enable(); 122 } 123 124 static inline void lose_fpu_inatomic(int save, struct task_struct *tsk) 125 { 126 if (is_fpu_owner()) { 127 if (!is_simd_enabled()) { 128 if (save) 129 _save_fp(&tsk->thread.fpu); 130 disable_fpu(); 131 } else { 132 if (save) { 133 if (!is_lasx_enabled()) 134 save_lsx(tsk); 135 else 136 save_lasx(tsk); 137 } 138 disable_fpu(); 139 disable_lsx(); 140 disable_lasx(); 141 clear_tsk_thread_flag(tsk, TIF_USEDSIMD); 142 } 143 clear_tsk_thread_flag(tsk, TIF_USEDFPU); 144 } 145 KSTK_EUEN(tsk) &= ~(CSR_EUEN_FPEN | CSR_EUEN_LSXEN | CSR_EUEN_LASXEN); 146 } 147 148 static inline void lose_fpu(int save) 149 { 150 preempt_disable(); 151 lose_fpu_inatomic(save, current); 152 preempt_enable(); 153 } 154 155 static inline void init_fpu(void) 156 { 157 unsigned int fcsr = current->thread.fpu.fcsr; 158 159 __own_fpu(); 160 _init_fpu(fcsr); 161 set_used_math(); 162 } 163 164 static inline void save_fp(struct task_struct *tsk) 165 { 166 if (cpu_has_fpu) 167 _save_fp(&tsk->thread.fpu); 168 } 169 170 static inline void restore_fp(struct task_struct *tsk) 171 { 172 if (cpu_has_fpu) 173 _restore_fp(&tsk->thread.fpu); 174 } 175 176 static inline union fpureg *get_fpu_regs(struct task_struct *tsk) 177 { 178 if (tsk == current) { 179 preempt_disable(); 180 if (is_fpu_owner()) 181 _save_fp(¤t->thread.fpu); 182 preempt_enable(); 183 } 184 185 return tsk->thread.fpu.fpr; 186 } 187 188 static inline int is_simd_owner(void) 189 { 190 return test_thread_flag(TIF_USEDSIMD); 191 } 192 193 #ifdef CONFIG_CPU_HAS_LSX 194 195 static inline void enable_lsx(void) 196 { 197 if (cpu_has_lsx) 198 csr_xchg32(CSR_EUEN_LSXEN, CSR_EUEN_LSXEN, LOONGARCH_CSR_EUEN); 199 } 200 201 static inline void disable_lsx(void) 202 { 203 if (cpu_has_lsx) 204 csr_xchg32(0, CSR_EUEN_LSXEN, LOONGARCH_CSR_EUEN); 205 } 206 207 static inline void save_lsx(struct task_struct *t) 208 { 209 if (cpu_has_lsx) 210 _save_lsx(&t->thread.fpu); 211 } 212 213 static inline void restore_lsx(struct task_struct *t) 214 { 215 if (cpu_has_lsx) 216 _restore_lsx(&t->thread.fpu); 217 } 218 219 static inline void init_lsx_upper(void) 220 { 221 if (cpu_has_lsx) 222 _init_lsx_upper(); 223 } 224 225 static inline void restore_lsx_upper(struct task_struct *t) 226 { 227 if (cpu_has_lsx) 228 _restore_lsx_upper(&t->thread.fpu); 229 } 230 231 #else 232 static inline void enable_lsx(void) {} 233 static inline void disable_lsx(void) {} 234 static inline void save_lsx(struct task_struct *t) {} 235 static inline void restore_lsx(struct task_struct *t) {} 236 static inline void init_lsx_upper(void) {} 237 static inline void restore_lsx_upper(struct task_struct *t) {} 238 #endif 239 240 #ifdef CONFIG_CPU_HAS_LASX 241 242 static inline void enable_lasx(void) 243 { 244 245 if (cpu_has_lasx) 246 csr_xchg32(CSR_EUEN_LASXEN, CSR_EUEN_LASXEN, LOONGARCH_CSR_EUEN); 247 } 248 249 static inline void disable_lasx(void) 250 { 251 if (cpu_has_lasx) 252 csr_xchg32(0, CSR_EUEN_LASXEN, LOONGARCH_CSR_EUEN); 253 } 254 255 static inline void save_lasx(struct task_struct *t) 256 { 257 if (cpu_has_lasx) 258 _save_lasx(&t->thread.fpu); 259 } 260 261 static inline void restore_lasx(struct task_struct *t) 262 { 263 if (cpu_has_lasx) 264 _restore_lasx(&t->thread.fpu); 265 } 266 267 static inline void init_lasx_upper(void) 268 { 269 if (cpu_has_lasx) 270 _init_lasx_upper(); 271 } 272 273 static inline void restore_lasx_upper(struct task_struct *t) 274 { 275 if (cpu_has_lasx) 276 _restore_lasx_upper(&t->thread.fpu); 277 } 278 279 #else 280 static inline void enable_lasx(void) {} 281 static inline void disable_lasx(void) {} 282 static inline void save_lasx(struct task_struct *t) {} 283 static inline void restore_lasx(struct task_struct *t) {} 284 static inline void init_lasx_upper(void) {} 285 static inline void restore_lasx_upper(struct task_struct *t) {} 286 #endif 287 288 static inline int thread_lsx_context_live(void) 289 { 290 if (!cpu_has_lsx) 291 return 0; 292 293 return test_thread_flag(TIF_LSX_CTX_LIVE); 294 } 295 296 static inline int thread_lasx_context_live(void) 297 { 298 if (!cpu_has_lasx) 299 return 0; 300 301 return test_thread_flag(TIF_LASX_CTX_LIVE); 302 } 303 304 #endif /* _ASM_FPU_H */ 305