1/* SPDX-License-Identifier: GPL-2.0-or-later */ 2/* 3 * FPU support code, moved here from head.S so that it can be used 4 * by chips which use other head-whatever.S files. 5 * 6 * Copyright (C) 1995-1996 Gary Thomas (gdt@linuxppc.org) 7 * Copyright (C) 1996 Cort Dougan <cort@cs.nmt.edu> 8 * Copyright (C) 1996 Paul Mackerras. 9 * Copyright (C) 1997 Dan Malek (dmalek@jlc.net). 10 */ 11 12#include <asm/reg.h> 13#include <asm/page.h> 14#include <asm/mmu.h> 15#include <asm/pgtable.h> 16#include <asm/cputable.h> 17#include <asm/cache.h> 18#include <asm/thread_info.h> 19#include <asm/ppc_asm.h> 20#include <asm/asm-offsets.h> 21#include <asm/ptrace.h> 22#include <asm/export.h> 23#include <asm/asm-compat.h> 24#include <asm/feature-fixups.h> 25 26#ifdef CONFIG_VSX 27#define __REST_32FPVSRS(n,c,base) \ 28BEGIN_FTR_SECTION \ 29 b 2f; \ 30END_FTR_SECTION_IFSET(CPU_FTR_VSX); \ 31 REST_32FPRS(n,base); \ 32 b 3f; \ 332: REST_32VSRS(n,c,base); \ 343: 35 36#define __SAVE_32FPVSRS(n,c,base) \ 37BEGIN_FTR_SECTION \ 38 b 2f; \ 39END_FTR_SECTION_IFSET(CPU_FTR_VSX); \ 40 SAVE_32FPRS(n,base); \ 41 b 3f; \ 422: SAVE_32VSRS(n,c,base); \ 433: 44#else 45#define __REST_32FPVSRS(n,b,base) REST_32FPRS(n, base) 46#define __SAVE_32FPVSRS(n,b,base) SAVE_32FPRS(n, base) 47#endif 48#define REST_32FPVSRS(n,c,base) __REST_32FPVSRS(n,__REG_##c,__REG_##base) 49#define SAVE_32FPVSRS(n,c,base) __SAVE_32FPVSRS(n,__REG_##c,__REG_##base) 50 51/* 52 * Load state from memory into FP registers including FPSCR. 53 * Assumes the caller has enabled FP in the MSR. 54 */ 55_GLOBAL(load_fp_state) 56 lfd fr0,FPSTATE_FPSCR(r3) 57 MTFSF_L(fr0) 58 REST_32FPVSRS(0, R4, R3) 59 blr 60EXPORT_SYMBOL(load_fp_state) 61_ASM_NOKPROBE_SYMBOL(load_fp_state); /* used by restore_math */ 62 63/* 64 * Store FP state into memory, including FPSCR 65 * Assumes the caller has enabled FP in the MSR. 66 */ 67_GLOBAL(store_fp_state) 68 SAVE_32FPVSRS(0, R4, R3) 69 mffs fr0 70 stfd fr0,FPSTATE_FPSCR(r3) 71 blr 72EXPORT_SYMBOL(store_fp_state) 73 74/* 75 * This task wants to use the FPU now. 76 * On UP, disable FP for the task which had the FPU previously, 77 * and save its floating-point registers in its thread_struct. 78 * Load up this task's FP registers from its thread_struct, 79 * enable the FPU for the current task and return to the task. 80 * Note that on 32-bit this can only use registers that will be 81 * restored by fast_exception_return, i.e. r3 - r6, r10 and r11. 82 */ 83_GLOBAL(load_up_fpu) 84 mfmsr r5 85 ori r5,r5,MSR_FP 86#ifdef CONFIG_VSX 87BEGIN_FTR_SECTION 88 oris r5,r5,MSR_VSX@h 89END_FTR_SECTION_IFSET(CPU_FTR_VSX) 90#endif 91 SYNC 92 MTMSRD(r5) /* enable use of fpu now */ 93 isync 94 /* enable use of FP after return */ 95#ifdef CONFIG_PPC32 96 mfspr r5,SPRN_SPRG_THREAD /* current task's THREAD (phys) */ 97 lwz r4,THREAD_FPEXC_MODE(r5) 98 ori r9,r9,MSR_FP /* enable FP for current */ 99 or r9,r9,r4 100#else 101 ld r4,PACACURRENT(r13) 102 addi r5,r4,THREAD /* Get THREAD */ 103 lwz r4,THREAD_FPEXC_MODE(r5) 104 ori r12,r12,MSR_FP 105 or r12,r12,r4 106 std r12,_MSR(r1) 107#endif 108 /* Don't care if r4 overflows, this is desired behaviour */ 109 lbz r4,THREAD_LOAD_FP(r5) 110 addi r4,r4,1 111 stb r4,THREAD_LOAD_FP(r5) 112 addi r10,r5,THREAD_FPSTATE 113 lfd fr0,FPSTATE_FPSCR(r10) 114 MTFSF_L(fr0) 115 REST_32FPVSRS(0, R4, R10) 116 /* restore registers and return */ 117 /* we haven't used ctr or xer or lr */ 118 blr 119 120/* 121 * save_fpu(tsk) 122 * Save the floating-point registers in its thread_struct. 123 * Enables the FPU for use in the kernel on return. 124 */ 125_GLOBAL(save_fpu) 126 addi r3,r3,THREAD /* want THREAD of task */ 127 PPC_LL r6,THREAD_FPSAVEAREA(r3) 128 PPC_LL r5,PT_REGS(r3) 129 PPC_LCMPI 0,r6,0 130 bne 2f 131 addi r6,r3,THREAD_FPSTATE 1322: SAVE_32FPVSRS(0, R4, R6) 133 mffs fr0 134 stfd fr0,FPSTATE_FPSCR(r6) 135 blr 136 137/* 138 * These are used in the alignment trap handler when emulating 139 * single-precision loads and stores. 140 */ 141 142_GLOBAL(cvt_fd) 143 lfs 0,0(r3) 144 stfd 0,0(r4) 145 blr 146 147_GLOBAL(cvt_df) 148 lfd 0,0(r3) 149 stfs 0,0(r4) 150 blr 151