1/* 2 * This file is subject to the terms and conditions of the GNU General Public 3 * License. See the file "COPYING" in the main directory of this archive 4 * for more details. 5 * 6 * Copyright (C) 1994, 1995, 1996, 1998, 1999, 2002, 2003 Ralf Baechle 7 * Copyright (C) 1996 David S. Miller (davem@davemloft.net) 8 * Copyright (C) 1994, 1995, 1996, by Andreas Busse 9 * Copyright (C) 1999 Silicon Graphics, Inc. 10 * Copyright (C) 2000 MIPS Technologies, Inc. 11 * written by Carsten Langgaard, carstenl@mips.com 12 */ 13#include <asm/asm.h> 14#include <asm/cachectl.h> 15#include <asm/fpregdef.h> 16#include <asm/mipsregs.h> 17#include <asm/asm-offsets.h> 18#include <asm/pgtable-bits.h> 19#include <asm/regdef.h> 20#include <asm/stackframe.h> 21#include <asm/thread_info.h> 22 23#include <asm/asmmacro.h> 24 25/* 26 * Offset to the current process status flags, the first 32 bytes of the 27 * stack are not used. 28 */ 29#define ST_OFF (_THREAD_SIZE - 32 - PT_SIZE + PT_STATUS) 30 31/* 32 * task_struct *resume(task_struct *prev, task_struct *next, 33 * struct thread_info *next_ti, s32 fp_save) 34 */ 35 .align 5 36 LEAF(resume) 37 mfc0 t1, CP0_STATUS 38 LONG_S t1, THREAD_STATUS(a0) 39 cpu_save_nonscratch a0 40 LONG_S ra, THREAD_REG31(a0) 41 42 /* 43 * Check whether we need to save any FP context. FP context is saved 44 * iff the process has used the context with the scalar FPU or the MSA 45 * ASE in the current time slice, as indicated by _TIF_USEDFPU and 46 * _TIF_USEDMSA respectively. switch_to will have set fp_save 47 * accordingly to an FP_SAVE_ enum value. 48 */ 49 beqz a3, 2f 50 51 /* 52 * We do. Clear the saved CU1 bit for prev, such that next time it is 53 * scheduled it will start in userland with the FPU disabled. If the 54 * task uses the FPU then it will be enabled again via the do_cpu trap. 55 * This allows us to lazily restore the FP context. 56 */ 57 PTR_L t3, TASK_THREAD_INFO(a0) 58 LONG_L t0, ST_OFF(t3) 59 li t1, ~ST0_CU1 60 and t0, t0, t1 61 LONG_S t0, ST_OFF(t3) 62 63 /* Check whether we're saving scalar or vector context. */ 64 bgtz a3, 1f 65 66 /* Save 128b MSA vector context. */ 67 msa_save_all a0 68 b 2f 69 701: /* Save 32b/64b scalar FP context. */ 71 fpu_save_double a0 t0 t1 # c0_status passed in t0 72 # clobbers t1 732: 74 75#if defined(CONFIG_CC_STACKPROTECTOR) && !defined(CONFIG_SMP) 76 PTR_LA t8, __stack_chk_guard 77 LONG_L t9, TASK_STACK_CANARY(a1) 78 LONG_S t9, 0(t8) 79#endif 80 81 /* 82 * The order of restoring the registers takes care of the race 83 * updating $28, $29 and kernelsp without disabling ints. 84 */ 85 move $28, a2 86 cpu_restore_nonscratch a1 87 88 PTR_ADDU t0, $28, _THREAD_SIZE - 32 89 set_saved_sp t0, t1, t2 90#ifdef CONFIG_MIPS_MT_SMTC 91 /* Read-modify-writes of Status must be atomic on a VPE */ 92 mfc0 t2, CP0_TCSTATUS 93 ori t1, t2, TCSTATUS_IXMT 94 mtc0 t1, CP0_TCSTATUS 95 andi t2, t2, TCSTATUS_IXMT 96 _ehb 97 DMT 8 # dmt t0 98 move t1,ra 99 jal mips_ihb 100 move ra,t1 101#endif /* CONFIG_MIPS_MT_SMTC */ 102 mfc0 t1, CP0_STATUS /* Do we really need this? */ 103 li a3, 0xff01 104 and t1, a3 105 LONG_L a2, THREAD_STATUS(a1) 106 nor a3, $0, a3 107 and a2, a3 108 or a2, t1 109 mtc0 a2, CP0_STATUS 110#ifdef CONFIG_MIPS_MT_SMTC 111 _ehb 112 andi t0, t0, VPECONTROL_TE 113 beqz t0, 1f 114 emt 1151: 116 mfc0 t1, CP0_TCSTATUS 117 xori t1, t1, TCSTATUS_IXMT 118 or t1, t1, t2 119 mtc0 t1, CP0_TCSTATUS 120 _ehb 121#endif /* CONFIG_MIPS_MT_SMTC */ 122 move v0, a0 123 jr ra 124 END(resume) 125 126/* 127 * Save a thread's fp context. 128 */ 129LEAF(_save_fp) 130#if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2) 131 mfc0 t0, CP0_STATUS 132#endif 133 fpu_save_double a0 t0 t1 # clobbers t1 134 jr ra 135 END(_save_fp) 136 137/* 138 * Restore a thread's fp context. 139 */ 140LEAF(_restore_fp) 141#if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2) 142 mfc0 t0, CP0_STATUS 143#endif 144 fpu_restore_double a0 t0 t1 # clobbers t1 145 jr ra 146 END(_restore_fp) 147 148#ifdef CONFIG_CPU_HAS_MSA 149 150/* 151 * Save a thread's MSA vector context. 152 */ 153LEAF(_save_msa) 154 msa_save_all a0 155 jr ra 156 END(_save_msa) 157 158/* 159 * Restore a thread's MSA vector context. 160 */ 161LEAF(_restore_msa) 162 msa_restore_all a0 163 jr ra 164 END(_restore_msa) 165 166#endif 167 168/* 169 * Load the FPU with signalling NANS. This bit pattern we're using has 170 * the property that no matter whether considered as single or as double 171 * precision represents signaling NANS. 172 * 173 * We initialize fcr31 to rounding to nearest, no exceptions. 174 */ 175 176#define FPU_DEFAULT 0x00000000 177 178LEAF(_init_fpu) 179#ifdef CONFIG_MIPS_MT_SMTC 180 /* Rather than manipulate per-VPE Status, set per-TC bit in TCStatus */ 181 mfc0 t0, CP0_TCSTATUS 182 /* Bit position is the same for Status, TCStatus */ 183 li t1, ST0_CU1 184 or t0, t1 185 mtc0 t0, CP0_TCSTATUS 186#else /* Normal MIPS CU1 enable */ 187 mfc0 t0, CP0_STATUS 188 li t1, ST0_CU1 189 or t0, t1 190 mtc0 t0, CP0_STATUS 191#endif /* CONFIG_MIPS_MT_SMTC */ 192 enable_fpu_hazard 193 194 li t1, FPU_DEFAULT 195 ctc1 t1, fcr31 196 197 li t1, -1 # SNaN 198 199#ifdef CONFIG_64BIT 200 sll t0, t0, 5 201 bgez t0, 1f # 16 / 32 register mode? 202 203 dmtc1 t1, $f1 204 dmtc1 t1, $f3 205 dmtc1 t1, $f5 206 dmtc1 t1, $f7 207 dmtc1 t1, $f9 208 dmtc1 t1, $f11 209 dmtc1 t1, $f13 210 dmtc1 t1, $f15 211 dmtc1 t1, $f17 212 dmtc1 t1, $f19 213 dmtc1 t1, $f21 214 dmtc1 t1, $f23 215 dmtc1 t1, $f25 216 dmtc1 t1, $f27 217 dmtc1 t1, $f29 218 dmtc1 t1, $f31 2191: 220#endif 221 222#ifdef CONFIG_CPU_MIPS32 223 mtc1 t1, $f0 224 mtc1 t1, $f1 225 mtc1 t1, $f2 226 mtc1 t1, $f3 227 mtc1 t1, $f4 228 mtc1 t1, $f5 229 mtc1 t1, $f6 230 mtc1 t1, $f7 231 mtc1 t1, $f8 232 mtc1 t1, $f9 233 mtc1 t1, $f10 234 mtc1 t1, $f11 235 mtc1 t1, $f12 236 mtc1 t1, $f13 237 mtc1 t1, $f14 238 mtc1 t1, $f15 239 mtc1 t1, $f16 240 mtc1 t1, $f17 241 mtc1 t1, $f18 242 mtc1 t1, $f19 243 mtc1 t1, $f20 244 mtc1 t1, $f21 245 mtc1 t1, $f22 246 mtc1 t1, $f23 247 mtc1 t1, $f24 248 mtc1 t1, $f25 249 mtc1 t1, $f26 250 mtc1 t1, $f27 251 mtc1 t1, $f28 252 mtc1 t1, $f29 253 mtc1 t1, $f30 254 mtc1 t1, $f31 255 256#ifdef CONFIG_CPU_MIPS32_R2 257 .set push 258 .set mips64r2 259 sll t0, t0, 5 # is Status.FR set? 260 bgez t0, 1f # no: skip setting upper 32b 261 262 mthc1 t1, $f0 263 mthc1 t1, $f1 264 mthc1 t1, $f2 265 mthc1 t1, $f3 266 mthc1 t1, $f4 267 mthc1 t1, $f5 268 mthc1 t1, $f6 269 mthc1 t1, $f7 270 mthc1 t1, $f8 271 mthc1 t1, $f9 272 mthc1 t1, $f10 273 mthc1 t1, $f11 274 mthc1 t1, $f12 275 mthc1 t1, $f13 276 mthc1 t1, $f14 277 mthc1 t1, $f15 278 mthc1 t1, $f16 279 mthc1 t1, $f17 280 mthc1 t1, $f18 281 mthc1 t1, $f19 282 mthc1 t1, $f20 283 mthc1 t1, $f21 284 mthc1 t1, $f22 285 mthc1 t1, $f23 286 mthc1 t1, $f24 287 mthc1 t1, $f25 288 mthc1 t1, $f26 289 mthc1 t1, $f27 290 mthc1 t1, $f28 291 mthc1 t1, $f29 292 mthc1 t1, $f30 293 mthc1 t1, $f31 2941: .set pop 295#endif /* CONFIG_CPU_MIPS32_R2 */ 296#else 297 .set arch=r4000 298 dmtc1 t1, $f0 299 dmtc1 t1, $f2 300 dmtc1 t1, $f4 301 dmtc1 t1, $f6 302 dmtc1 t1, $f8 303 dmtc1 t1, $f10 304 dmtc1 t1, $f12 305 dmtc1 t1, $f14 306 dmtc1 t1, $f16 307 dmtc1 t1, $f18 308 dmtc1 t1, $f20 309 dmtc1 t1, $f22 310 dmtc1 t1, $f24 311 dmtc1 t1, $f26 312 dmtc1 t1, $f28 313 dmtc1 t1, $f30 314#endif 315 jr ra 316 END(_init_fpu) 317