1/* 2 * FPU support code, moved here from head.S so that it can be used 3 * by chips which use other head-whatever.S files. 4 * 5 * Copyright (C) 1995-1996 Gary Thomas (gdt@linuxppc.org) 6 * Copyright (C) 1996 Cort Dougan <cort@cs.nmt.edu> 7 * Copyright (C) 1996 Paul Mackerras. 8 * Copyright (C) 1997 Dan Malek (dmalek@jlc.net). 9 * 10 * This program is free software; you can redistribute it and/or 11 * modify it under the terms of the GNU General Public License 12 * as published by the Free Software Foundation; either version 13 * 2 of the License, or (at your option) any later version. 14 * 15 */ 16 17#include <asm/reg.h> 18#include <asm/page.h> 19#include <asm/mmu.h> 20#include <asm/pgtable.h> 21#include <asm/cputable.h> 22#include <asm/cache.h> 23#include <asm/thread_info.h> 24#include <asm/ppc_asm.h> 25#include <asm/asm-offsets.h> 26#include <asm/ptrace.h> 27#include <asm/export.h> 28#include <asm/asm-compat.h> 29#include <asm/feature-fixups.h> 30 31#ifdef CONFIG_VSX 32#define __REST_32FPVSRS(n,c,base) \ 33BEGIN_FTR_SECTION \ 34 b 2f; \ 35END_FTR_SECTION_IFSET(CPU_FTR_VSX); \ 36 REST_32FPRS(n,base); \ 37 b 3f; \ 382: REST_32VSRS(n,c,base); \ 393: 40 41#define __SAVE_32FPVSRS(n,c,base) \ 42BEGIN_FTR_SECTION \ 43 b 2f; \ 44END_FTR_SECTION_IFSET(CPU_FTR_VSX); \ 45 SAVE_32FPRS(n,base); \ 46 b 3f; \ 472: SAVE_32VSRS(n,c,base); \ 483: 49#else 50#define __REST_32FPVSRS(n,b,base) REST_32FPRS(n, base) 51#define __SAVE_32FPVSRS(n,b,base) SAVE_32FPRS(n, base) 52#endif 53#define REST_32FPVSRS(n,c,base) __REST_32FPVSRS(n,__REG_##c,__REG_##base) 54#define SAVE_32FPVSRS(n,c,base) __SAVE_32FPVSRS(n,__REG_##c,__REG_##base) 55 56/* 57 * Load state from memory into FP registers including FPSCR. 58 * Assumes the caller has enabled FP in the MSR. 59 */ 60_GLOBAL(load_fp_state) 61 lfd fr0,FPSTATE_FPSCR(r3) 62 MTFSF_L(fr0) 63 REST_32FPVSRS(0, R4, R3) 64 blr 65EXPORT_SYMBOL(load_fp_state) 66 67/* 68 * Store FP state into memory, including FPSCR 69 * Assumes the caller has enabled FP in the MSR. 70 */ 71_GLOBAL(store_fp_state) 72 SAVE_32FPVSRS(0, R4, R3) 73 mffs fr0 74 stfd fr0,FPSTATE_FPSCR(r3) 75 blr 76EXPORT_SYMBOL(store_fp_state) 77 78/* 79 * This task wants to use the FPU now. 80 * On UP, disable FP for the task which had the FPU previously, 81 * and save its floating-point registers in its thread_struct. 82 * Load up this task's FP registers from its thread_struct, 83 * enable the FPU for the current task and return to the task. 84 * Note that on 32-bit this can only use registers that will be 85 * restored by fast_exception_return, i.e. r3 - r6, r10 and r11. 86 */ 87_GLOBAL(load_up_fpu) 88 mfmsr r5 89 ori r5,r5,MSR_FP 90#ifdef CONFIG_VSX 91BEGIN_FTR_SECTION 92 oris r5,r5,MSR_VSX@h 93END_FTR_SECTION_IFSET(CPU_FTR_VSX) 94#endif 95 SYNC 96 MTMSRD(r5) /* enable use of fpu now */ 97 isync 98 /* enable use of FP after return */ 99#ifdef CONFIG_PPC32 100 mfspr r5,SPRN_SPRG_THREAD /* current task's THREAD (phys) */ 101 lwz r4,THREAD_FPEXC_MODE(r5) 102 ori r9,r9,MSR_FP /* enable FP for current */ 103 or r9,r9,r4 104#else 105 ld r4,PACACURRENT(r13) 106 addi r5,r4,THREAD /* Get THREAD */ 107 lwz r4,THREAD_FPEXC_MODE(r5) 108 ori r12,r12,MSR_FP 109 or r12,r12,r4 110 std r12,_MSR(r1) 111#endif 112 /* Don't care if r4 overflows, this is desired behaviour */ 113 lbz r4,THREAD_LOAD_FP(r5) 114 addi r4,r4,1 115 stb r4,THREAD_LOAD_FP(r5) 116 addi r10,r5,THREAD_FPSTATE 117 lfd fr0,FPSTATE_FPSCR(r10) 118 MTFSF_L(fr0) 119 REST_32FPVSRS(0, R4, R10) 120 /* restore registers and return */ 121 /* we haven't used ctr or xer or lr */ 122 blr 123 124/* 125 * save_fpu(tsk) 126 * Save the floating-point registers in its thread_struct. 127 * Enables the FPU for use in the kernel on return. 128 */ 129_GLOBAL(save_fpu) 130 addi r3,r3,THREAD /* want THREAD of task */ 131 PPC_LL r6,THREAD_FPSAVEAREA(r3) 132 PPC_LL r5,PT_REGS(r3) 133 PPC_LCMPI 0,r6,0 134 bne 2f 135 addi r6,r3,THREAD_FPSTATE 1362: SAVE_32FPVSRS(0, R4, R6) 137 mffs fr0 138 stfd fr0,FPSTATE_FPSCR(r6) 139 blr 140 141/* 142 * These are used in the alignment trap handler when emulating 143 * single-precision loads and stores. 144 */ 145 146_GLOBAL(cvt_fd) 147 lfs 0,0(r3) 148 stfd 0,0(r4) 149 blr 150 151_GLOBAL(cvt_df) 152 lfd 0,0(r3) 153 stfs 0,0(r4) 154 blr 155