1/* 2 * This file is subject to the terms and conditions of the GNU General Public 3 * License. See the file "COPYING" in the main directory of this archive 4 * for more details. 5 * 6 * Copyright (C) 1996, 98, 99, 2000, 01 Ralf Baechle 7 * 8 * Multi-arch abstraction and asm macros for easier reading: 9 * Copyright (C) 1996 David S. Miller (davem@davemloft.net) 10 * 11 * Carsten Langgaard, carstenl@mips.com 12 * Copyright (C) 2000 MIPS Technologies, Inc. 13 * Copyright (C) 1999, 2001 Silicon Graphics, Inc. 14 */ 15#include <asm/asm.h> 16#include <asm/asmmacro.h> 17#include <asm/errno.h> 18#include <asm/fpregdef.h> 19#include <asm/mipsregs.h> 20#include <asm/asm-offsets.h> 21#include <asm/regdef.h> 22 23 .macro EX insn, reg, src 24 .set push 25 .set nomacro 26.ex\@: \insn \reg, \src 27 .set pop 28 .section __ex_table,"a" 29 PTR .ex\@, fault 30 .previous 31 .endm 32 33 .set noreorder 34 .set arch=r4000 35 36LEAF(_save_fp_context) 37 cfc1 t1, fcr31 38 39#if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2) 40 .set push 41#ifdef CONFIG_CPU_MIPS32_R2 42 .set mips64r2 43 mfc0 t0, CP0_STATUS 44 sll t0, t0, 5 45 bgez t0, 1f # skip storing odd if FR=0 46 nop 47#endif 48 /* Store the 16 odd double precision registers */ 49 EX sdc1 $f1, SC_FPREGS+8(a0) 50 EX sdc1 $f3, SC_FPREGS+24(a0) 51 EX sdc1 $f5, SC_FPREGS+40(a0) 52 EX sdc1 $f7, SC_FPREGS+56(a0) 53 EX sdc1 $f9, SC_FPREGS+72(a0) 54 EX sdc1 $f11, SC_FPREGS+88(a0) 55 EX sdc1 $f13, SC_FPREGS+104(a0) 56 EX sdc1 $f15, SC_FPREGS+120(a0) 57 EX sdc1 $f17, SC_FPREGS+136(a0) 58 EX sdc1 $f19, SC_FPREGS+152(a0) 59 EX sdc1 $f21, SC_FPREGS+168(a0) 60 EX sdc1 $f23, SC_FPREGS+184(a0) 61 EX sdc1 $f25, SC_FPREGS+200(a0) 62 EX sdc1 $f27, SC_FPREGS+216(a0) 63 EX sdc1 $f29, SC_FPREGS+232(a0) 64 EX sdc1 $f31, SC_FPREGS+248(a0) 651: .set pop 66#endif 67 68 /* Store the 16 even double precision registers */ 69 EX sdc1 $f0, SC_FPREGS+0(a0) 70 EX sdc1 $f2, SC_FPREGS+16(a0) 71 EX sdc1 $f4, SC_FPREGS+32(a0) 72 EX sdc1 $f6, SC_FPREGS+48(a0) 73 EX sdc1 $f8, SC_FPREGS+64(a0) 74 EX sdc1 $f10, SC_FPREGS+80(a0) 75 EX sdc1 $f12, SC_FPREGS+96(a0) 76 EX sdc1 $f14, SC_FPREGS+112(a0) 77 EX sdc1 $f16, SC_FPREGS+128(a0) 78 EX sdc1 $f18, SC_FPREGS+144(a0) 79 EX sdc1 $f20, SC_FPREGS+160(a0) 80 EX sdc1 $f22, SC_FPREGS+176(a0) 81 EX sdc1 $f24, SC_FPREGS+192(a0) 82 EX sdc1 $f26, SC_FPREGS+208(a0) 83 EX sdc1 $f28, SC_FPREGS+224(a0) 84 EX sdc1 $f30, SC_FPREGS+240(a0) 85 EX sw t1, SC_FPC_CSR(a0) 86 jr ra 87 li v0, 0 # success 88 END(_save_fp_context) 89 90#ifdef CONFIG_MIPS32_COMPAT 91 /* Save 32-bit process floating point context */ 92LEAF(_save_fp_context32) 93 cfc1 t1, fcr31 94 95 mfc0 t0, CP0_STATUS 96 sll t0, t0, 5 97 bgez t0, 1f # skip storing odd if FR=0 98 nop 99 100 /* Store the 16 odd double precision registers */ 101 EX sdc1 $f1, SC32_FPREGS+8(a0) 102 EX sdc1 $f3, SC32_FPREGS+24(a0) 103 EX sdc1 $f5, SC32_FPREGS+40(a0) 104 EX sdc1 $f7, SC32_FPREGS+56(a0) 105 EX sdc1 $f9, SC32_FPREGS+72(a0) 106 EX sdc1 $f11, SC32_FPREGS+88(a0) 107 EX sdc1 $f13, SC32_FPREGS+104(a0) 108 EX sdc1 $f15, SC32_FPREGS+120(a0) 109 EX sdc1 $f17, SC32_FPREGS+136(a0) 110 EX sdc1 $f19, SC32_FPREGS+152(a0) 111 EX sdc1 $f21, SC32_FPREGS+168(a0) 112 EX sdc1 $f23, SC32_FPREGS+184(a0) 113 EX sdc1 $f25, SC32_FPREGS+200(a0) 114 EX sdc1 $f27, SC32_FPREGS+216(a0) 115 EX sdc1 $f29, SC32_FPREGS+232(a0) 116 EX sdc1 $f31, SC32_FPREGS+248(a0) 117 118 /* Store the 16 even double precision registers */ 1191: EX sdc1 $f0, SC32_FPREGS+0(a0) 120 EX sdc1 $f2, SC32_FPREGS+16(a0) 121 EX sdc1 $f4, SC32_FPREGS+32(a0) 122 EX sdc1 $f6, SC32_FPREGS+48(a0) 123 EX sdc1 $f8, SC32_FPREGS+64(a0) 124 EX sdc1 $f10, SC32_FPREGS+80(a0) 125 EX sdc1 $f12, SC32_FPREGS+96(a0) 126 EX sdc1 $f14, SC32_FPREGS+112(a0) 127 EX sdc1 $f16, SC32_FPREGS+128(a0) 128 EX sdc1 $f18, SC32_FPREGS+144(a0) 129 EX sdc1 $f20, SC32_FPREGS+160(a0) 130 EX sdc1 $f22, SC32_FPREGS+176(a0) 131 EX sdc1 $f24, SC32_FPREGS+192(a0) 132 EX sdc1 $f26, SC32_FPREGS+208(a0) 133 EX sdc1 $f28, SC32_FPREGS+224(a0) 134 EX sdc1 $f30, SC32_FPREGS+240(a0) 135 EX sw t1, SC32_FPC_CSR(a0) 136 cfc1 t0, $0 # implementation/version 137 EX sw t0, SC32_FPC_EIR(a0) 138 139 jr ra 140 li v0, 0 # success 141 END(_save_fp_context32) 142#endif 143 144/* 145 * Restore FPU state: 146 * - fp gp registers 147 * - cp1 status/control register 148 */ 149LEAF(_restore_fp_context) 150 EX lw t1, SC_FPC_CSR(a0) 151 152#if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2) 153 .set push 154#ifdef CONFIG_CPU_MIPS32_R2 155 .set mips64r2 156 mfc0 t0, CP0_STATUS 157 sll t0, t0, 5 158 bgez t0, 1f # skip loading odd if FR=0 159 nop 160#endif 161 EX ldc1 $f1, SC_FPREGS+8(a0) 162 EX ldc1 $f3, SC_FPREGS+24(a0) 163 EX ldc1 $f5, SC_FPREGS+40(a0) 164 EX ldc1 $f7, SC_FPREGS+56(a0) 165 EX ldc1 $f9, SC_FPREGS+72(a0) 166 EX ldc1 $f11, SC_FPREGS+88(a0) 167 EX ldc1 $f13, SC_FPREGS+104(a0) 168 EX ldc1 $f15, SC_FPREGS+120(a0) 169 EX ldc1 $f17, SC_FPREGS+136(a0) 170 EX ldc1 $f19, SC_FPREGS+152(a0) 171 EX ldc1 $f21, SC_FPREGS+168(a0) 172 EX ldc1 $f23, SC_FPREGS+184(a0) 173 EX ldc1 $f25, SC_FPREGS+200(a0) 174 EX ldc1 $f27, SC_FPREGS+216(a0) 175 EX ldc1 $f29, SC_FPREGS+232(a0) 176 EX ldc1 $f31, SC_FPREGS+248(a0) 1771: .set pop 178#endif 179 EX ldc1 $f0, SC_FPREGS+0(a0) 180 EX ldc1 $f2, SC_FPREGS+16(a0) 181 EX ldc1 $f4, SC_FPREGS+32(a0) 182 EX ldc1 $f6, SC_FPREGS+48(a0) 183 EX ldc1 $f8, SC_FPREGS+64(a0) 184 EX ldc1 $f10, SC_FPREGS+80(a0) 185 EX ldc1 $f12, SC_FPREGS+96(a0) 186 EX ldc1 $f14, SC_FPREGS+112(a0) 187 EX ldc1 $f16, SC_FPREGS+128(a0) 188 EX ldc1 $f18, SC_FPREGS+144(a0) 189 EX ldc1 $f20, SC_FPREGS+160(a0) 190 EX ldc1 $f22, SC_FPREGS+176(a0) 191 EX ldc1 $f24, SC_FPREGS+192(a0) 192 EX ldc1 $f26, SC_FPREGS+208(a0) 193 EX ldc1 $f28, SC_FPREGS+224(a0) 194 EX ldc1 $f30, SC_FPREGS+240(a0) 195 ctc1 t1, fcr31 196 jr ra 197 li v0, 0 # success 198 END(_restore_fp_context) 199 200#ifdef CONFIG_MIPS32_COMPAT 201LEAF(_restore_fp_context32) 202 /* Restore an o32 sigcontext. */ 203 EX lw t1, SC32_FPC_CSR(a0) 204 205 mfc0 t0, CP0_STATUS 206 sll t0, t0, 5 207 bgez t0, 1f # skip loading odd if FR=0 208 nop 209 210 EX ldc1 $f1, SC32_FPREGS+8(a0) 211 EX ldc1 $f3, SC32_FPREGS+24(a0) 212 EX ldc1 $f5, SC32_FPREGS+40(a0) 213 EX ldc1 $f7, SC32_FPREGS+56(a0) 214 EX ldc1 $f9, SC32_FPREGS+72(a0) 215 EX ldc1 $f11, SC32_FPREGS+88(a0) 216 EX ldc1 $f13, SC32_FPREGS+104(a0) 217 EX ldc1 $f15, SC32_FPREGS+120(a0) 218 EX ldc1 $f17, SC32_FPREGS+136(a0) 219 EX ldc1 $f19, SC32_FPREGS+152(a0) 220 EX ldc1 $f21, SC32_FPREGS+168(a0) 221 EX ldc1 $f23, SC32_FPREGS+184(a0) 222 EX ldc1 $f25, SC32_FPREGS+200(a0) 223 EX ldc1 $f27, SC32_FPREGS+216(a0) 224 EX ldc1 $f29, SC32_FPREGS+232(a0) 225 EX ldc1 $f31, SC32_FPREGS+248(a0) 226 2271: EX ldc1 $f0, SC32_FPREGS+0(a0) 228 EX ldc1 $f2, SC32_FPREGS+16(a0) 229 EX ldc1 $f4, SC32_FPREGS+32(a0) 230 EX ldc1 $f6, SC32_FPREGS+48(a0) 231 EX ldc1 $f8, SC32_FPREGS+64(a0) 232 EX ldc1 $f10, SC32_FPREGS+80(a0) 233 EX ldc1 $f12, SC32_FPREGS+96(a0) 234 EX ldc1 $f14, SC32_FPREGS+112(a0) 235 EX ldc1 $f16, SC32_FPREGS+128(a0) 236 EX ldc1 $f18, SC32_FPREGS+144(a0) 237 EX ldc1 $f20, SC32_FPREGS+160(a0) 238 EX ldc1 $f22, SC32_FPREGS+176(a0) 239 EX ldc1 $f24, SC32_FPREGS+192(a0) 240 EX ldc1 $f26, SC32_FPREGS+208(a0) 241 EX ldc1 $f28, SC32_FPREGS+224(a0) 242 EX ldc1 $f30, SC32_FPREGS+240(a0) 243 ctc1 t1, fcr31 244 jr ra 245 li v0, 0 # success 246 END(_restore_fp_context32) 247#endif 248 249#ifdef CONFIG_CPU_HAS_MSA 250 251 .macro save_sc_msareg wr, off, sc, tmp 252#ifdef CONFIG_64BIT 253 copy_u_d \tmp, \wr, 1 254 EX sd \tmp, (\off+(\wr*8))(\sc) 255#elif defined(CONFIG_CPU_LITTLE_ENDIAN) 256 copy_u_w \tmp, \wr, 2 257 EX sw \tmp, (\off+(\wr*8)+0)(\sc) 258 copy_u_w \tmp, \wr, 3 259 EX sw \tmp, (\off+(\wr*8)+4)(\sc) 260#else /* CONFIG_CPU_BIG_ENDIAN */ 261 copy_u_w \tmp, \wr, 2 262 EX sw \tmp, (\off+(\wr*8)+4)(\sc) 263 copy_u_w \tmp, \wr, 3 264 EX sw \tmp, (\off+(\wr*8)+0)(\sc) 265#endif 266 .endm 267 268/* 269 * int _save_msa_context(struct sigcontext *sc) 270 * 271 * Save the upper 64 bits of each vector register along with the MSA_CSR 272 * register into sc. Returns zero on success, else non-zero. 273 */ 274LEAF(_save_msa_context) 275 save_sc_msareg 0, SC_MSAREGS, a0, t0 276 save_sc_msareg 1, SC_MSAREGS, a0, t0 277 save_sc_msareg 2, SC_MSAREGS, a0, t0 278 save_sc_msareg 3, SC_MSAREGS, a0, t0 279 save_sc_msareg 4, SC_MSAREGS, a0, t0 280 save_sc_msareg 5, SC_MSAREGS, a0, t0 281 save_sc_msareg 6, SC_MSAREGS, a0, t0 282 save_sc_msareg 7, SC_MSAREGS, a0, t0 283 save_sc_msareg 8, SC_MSAREGS, a0, t0 284 save_sc_msareg 9, SC_MSAREGS, a0, t0 285 save_sc_msareg 10, SC_MSAREGS, a0, t0 286 save_sc_msareg 11, SC_MSAREGS, a0, t0 287 save_sc_msareg 12, SC_MSAREGS, a0, t0 288 save_sc_msareg 13, SC_MSAREGS, a0, t0 289 save_sc_msareg 14, SC_MSAREGS, a0, t0 290 save_sc_msareg 15, SC_MSAREGS, a0, t0 291 save_sc_msareg 16, SC_MSAREGS, a0, t0 292 save_sc_msareg 17, SC_MSAREGS, a0, t0 293 save_sc_msareg 18, SC_MSAREGS, a0, t0 294 save_sc_msareg 19, SC_MSAREGS, a0, t0 295 save_sc_msareg 20, SC_MSAREGS, a0, t0 296 save_sc_msareg 21, SC_MSAREGS, a0, t0 297 save_sc_msareg 22, SC_MSAREGS, a0, t0 298 save_sc_msareg 23, SC_MSAREGS, a0, t0 299 save_sc_msareg 24, SC_MSAREGS, a0, t0 300 save_sc_msareg 25, SC_MSAREGS, a0, t0 301 save_sc_msareg 26, SC_MSAREGS, a0, t0 302 save_sc_msareg 27, SC_MSAREGS, a0, t0 303 save_sc_msareg 28, SC_MSAREGS, a0, t0 304 save_sc_msareg 29, SC_MSAREGS, a0, t0 305 save_sc_msareg 30, SC_MSAREGS, a0, t0 306 save_sc_msareg 31, SC_MSAREGS, a0, t0 307 jr ra 308 li v0, 0 309 END(_save_msa_context) 310 311#ifdef CONFIG_MIPS32_COMPAT 312 313/* 314 * int _save_msa_context32(struct sigcontext32 *sc) 315 * 316 * Save the upper 64 bits of each vector register along with the MSA_CSR 317 * register into sc. Returns zero on success, else non-zero. 318 */ 319LEAF(_save_msa_context32) 320 save_sc_msareg 0, SC32_MSAREGS, a0, t0 321 save_sc_msareg 1, SC32_MSAREGS, a0, t0 322 save_sc_msareg 2, SC32_MSAREGS, a0, t0 323 save_sc_msareg 3, SC32_MSAREGS, a0, t0 324 save_sc_msareg 4, SC32_MSAREGS, a0, t0 325 save_sc_msareg 5, SC32_MSAREGS, a0, t0 326 save_sc_msareg 6, SC32_MSAREGS, a0, t0 327 save_sc_msareg 7, SC32_MSAREGS, a0, t0 328 save_sc_msareg 8, SC32_MSAREGS, a0, t0 329 save_sc_msareg 9, SC32_MSAREGS, a0, t0 330 save_sc_msareg 10, SC32_MSAREGS, a0, t0 331 save_sc_msareg 11, SC32_MSAREGS, a0, t0 332 save_sc_msareg 12, SC32_MSAREGS, a0, t0 333 save_sc_msareg 13, SC32_MSAREGS, a0, t0 334 save_sc_msareg 14, SC32_MSAREGS, a0, t0 335 save_sc_msareg 15, SC32_MSAREGS, a0, t0 336 save_sc_msareg 16, SC32_MSAREGS, a0, t0 337 save_sc_msareg 17, SC32_MSAREGS, a0, t0 338 save_sc_msareg 18, SC32_MSAREGS, a0, t0 339 save_sc_msareg 19, SC32_MSAREGS, a0, t0 340 save_sc_msareg 20, SC32_MSAREGS, a0, t0 341 save_sc_msareg 21, SC32_MSAREGS, a0, t0 342 save_sc_msareg 22, SC32_MSAREGS, a0, t0 343 save_sc_msareg 23, SC32_MSAREGS, a0, t0 344 save_sc_msareg 24, SC32_MSAREGS, a0, t0 345 save_sc_msareg 25, SC32_MSAREGS, a0, t0 346 save_sc_msareg 26, SC32_MSAREGS, a0, t0 347 save_sc_msareg 27, SC32_MSAREGS, a0, t0 348 save_sc_msareg 28, SC32_MSAREGS, a0, t0 349 save_sc_msareg 29, SC32_MSAREGS, a0, t0 350 save_sc_msareg 30, SC32_MSAREGS, a0, t0 351 save_sc_msareg 31, SC32_MSAREGS, a0, t0 352 jr ra 353 li v0, 0 354 END(_save_msa_context32) 355 356#endif /* CONFIG_MIPS32_COMPAT */ 357 358 .macro restore_sc_msareg wr, off, sc, tmp 359#ifdef CONFIG_64BIT 360 EX ld \tmp, (\off+(\wr*8))(\sc) 361 insert_d \wr, 1, \tmp 362#elif defined(CONFIG_CPU_LITTLE_ENDIAN) 363 EX lw \tmp, (\off+(\wr*8)+0)(\sc) 364 insert_w \wr, 2, \tmp 365 EX lw \tmp, (\off+(\wr*8)+4)(\sc) 366 insert_w \wr, 3, \tmp 367#else /* CONFIG_CPU_BIG_ENDIAN */ 368 EX lw \tmp, (\off+(\wr*8)+4)(\sc) 369 insert_w \wr, 2, \tmp 370 EX lw \tmp, (\off+(\wr*8)+0)(\sc) 371 insert_w \wr, 3, \tmp 372#endif 373 .endm 374 375/* 376 * int _restore_msa_context(struct sigcontext *sc) 377 */ 378LEAF(_restore_msa_context) 379 restore_sc_msareg 0, SC_MSAREGS, a0, t0 380 restore_sc_msareg 1, SC_MSAREGS, a0, t0 381 restore_sc_msareg 2, SC_MSAREGS, a0, t0 382 restore_sc_msareg 3, SC_MSAREGS, a0, t0 383 restore_sc_msareg 4, SC_MSAREGS, a0, t0 384 restore_sc_msareg 5, SC_MSAREGS, a0, t0 385 restore_sc_msareg 6, SC_MSAREGS, a0, t0 386 restore_sc_msareg 7, SC_MSAREGS, a0, t0 387 restore_sc_msareg 8, SC_MSAREGS, a0, t0 388 restore_sc_msareg 9, SC_MSAREGS, a0, t0 389 restore_sc_msareg 10, SC_MSAREGS, a0, t0 390 restore_sc_msareg 11, SC_MSAREGS, a0, t0 391 restore_sc_msareg 12, SC_MSAREGS, a0, t0 392 restore_sc_msareg 13, SC_MSAREGS, a0, t0 393 restore_sc_msareg 14, SC_MSAREGS, a0, t0 394 restore_sc_msareg 15, SC_MSAREGS, a0, t0 395 restore_sc_msareg 16, SC_MSAREGS, a0, t0 396 restore_sc_msareg 17, SC_MSAREGS, a0, t0 397 restore_sc_msareg 18, SC_MSAREGS, a0, t0 398 restore_sc_msareg 19, SC_MSAREGS, a0, t0 399 restore_sc_msareg 20, SC_MSAREGS, a0, t0 400 restore_sc_msareg 21, SC_MSAREGS, a0, t0 401 restore_sc_msareg 22, SC_MSAREGS, a0, t0 402 restore_sc_msareg 23, SC_MSAREGS, a0, t0 403 restore_sc_msareg 24, SC_MSAREGS, a0, t0 404 restore_sc_msareg 25, SC_MSAREGS, a0, t0 405 restore_sc_msareg 26, SC_MSAREGS, a0, t0 406 restore_sc_msareg 27, SC_MSAREGS, a0, t0 407 restore_sc_msareg 28, SC_MSAREGS, a0, t0 408 restore_sc_msareg 29, SC_MSAREGS, a0, t0 409 restore_sc_msareg 30, SC_MSAREGS, a0, t0 410 restore_sc_msareg 31, SC_MSAREGS, a0, t0 411 jr ra 412 li v0, 0 413 END(_restore_msa_context) 414 415#ifdef CONFIG_MIPS32_COMPAT 416 417/* 418 * int _restore_msa_context32(struct sigcontext32 *sc) 419 */ 420LEAF(_restore_msa_context32) 421 restore_sc_msareg 0, SC32_MSAREGS, a0, t0 422 restore_sc_msareg 1, SC32_MSAREGS, a0, t0 423 restore_sc_msareg 2, SC32_MSAREGS, a0, t0 424 restore_sc_msareg 3, SC32_MSAREGS, a0, t0 425 restore_sc_msareg 4, SC32_MSAREGS, a0, t0 426 restore_sc_msareg 5, SC32_MSAREGS, a0, t0 427 restore_sc_msareg 6, SC32_MSAREGS, a0, t0 428 restore_sc_msareg 7, SC32_MSAREGS, a0, t0 429 restore_sc_msareg 8, SC32_MSAREGS, a0, t0 430 restore_sc_msareg 9, SC32_MSAREGS, a0, t0 431 restore_sc_msareg 10, SC32_MSAREGS, a0, t0 432 restore_sc_msareg 11, SC32_MSAREGS, a0, t0 433 restore_sc_msareg 12, SC32_MSAREGS, a0, t0 434 restore_sc_msareg 13, SC32_MSAREGS, a0, t0 435 restore_sc_msareg 14, SC32_MSAREGS, a0, t0 436 restore_sc_msareg 15, SC32_MSAREGS, a0, t0 437 restore_sc_msareg 16, SC32_MSAREGS, a0, t0 438 restore_sc_msareg 17, SC32_MSAREGS, a0, t0 439 restore_sc_msareg 18, SC32_MSAREGS, a0, t0 440 restore_sc_msareg 19, SC32_MSAREGS, a0, t0 441 restore_sc_msareg 20, SC32_MSAREGS, a0, t0 442 restore_sc_msareg 21, SC32_MSAREGS, a0, t0 443 restore_sc_msareg 22, SC32_MSAREGS, a0, t0 444 restore_sc_msareg 23, SC32_MSAREGS, a0, t0 445 restore_sc_msareg 24, SC32_MSAREGS, a0, t0 446 restore_sc_msareg 25, SC32_MSAREGS, a0, t0 447 restore_sc_msareg 26, SC32_MSAREGS, a0, t0 448 restore_sc_msareg 27, SC32_MSAREGS, a0, t0 449 restore_sc_msareg 28, SC32_MSAREGS, a0, t0 450 restore_sc_msareg 29, SC32_MSAREGS, a0, t0 451 restore_sc_msareg 30, SC32_MSAREGS, a0, t0 452 restore_sc_msareg 31, SC32_MSAREGS, a0, t0 453 jr ra 454 li v0, 0 455 END(_restore_msa_context32) 456 457#endif /* CONFIG_MIPS32_COMPAT */ 458 459#endif /* CONFIG_CPU_HAS_MSA */ 460 461 .set reorder 462 463 .type fault@function 464 .ent fault 465fault: li v0, -EFAULT # failure 466 jr ra 467 .end fault 468