1384740dcSRalf Baechle /* 2384740dcSRalf Baechle * This file is subject to the terms and conditions of the GNU General Public 3384740dcSRalf Baechle * License. See the file "COPYING" in the main directory of this archive 4384740dcSRalf Baechle * for more details. 5384740dcSRalf Baechle * 6384740dcSRalf Baechle * Copyright (C) 1994, 95, 96, 99, 2001 Ralf Baechle 7384740dcSRalf Baechle * Copyright (C) 1994, 1995, 1996 Paul M. Antoine. 8384740dcSRalf Baechle * Copyright (C) 1999 Silicon Graphics, Inc. 9384740dcSRalf Baechle * Copyright (C) 2007 Maciej W. Rozycki 10384740dcSRalf Baechle */ 11384740dcSRalf Baechle #ifndef _ASM_STACKFRAME_H 12384740dcSRalf Baechle #define _ASM_STACKFRAME_H 13384740dcSRalf Baechle 14384740dcSRalf Baechle #include <linux/threads.h> 15384740dcSRalf Baechle 16384740dcSRalf Baechle #include <asm/asm.h> 17384740dcSRalf Baechle #include <asm/asmmacro.h> 18384740dcSRalf Baechle #include <asm/mipsregs.h> 19384740dcSRalf Baechle #include <asm/asm-offsets.h> 20c2377a42SJayachandran C #include <asm/thread_info.h> 21384740dcSRalf Baechle 22b633648cSRalf Baechle #if defined(CONFIG_CPU_R3000) || defined(CONFIG_CPU_TX39XX) 23384740dcSRalf Baechle #define STATMASK 0x3f 24384740dcSRalf Baechle #else 25384740dcSRalf Baechle #define STATMASK 0x1f 26384740dcSRalf Baechle #endif 27384740dcSRalf Baechle 28384740dcSRalf Baechle .macro SAVE_AT 29384740dcSRalf Baechle .set push 30384740dcSRalf Baechle .set noat 31384740dcSRalf Baechle LONG_S $1, PT_R1(sp) 32384740dcSRalf Baechle .set pop 33384740dcSRalf Baechle .endm 34384740dcSRalf Baechle 35384740dcSRalf Baechle .macro SAVE_TEMP 36384740dcSRalf Baechle #ifdef CONFIG_CPU_HAS_SMARTMIPS 37384740dcSRalf Baechle mflhxu v1 38384740dcSRalf Baechle LONG_S v1, PT_LO(sp) 39384740dcSRalf Baechle mflhxu v1 40384740dcSRalf Baechle LONG_S v1, PT_HI(sp) 41384740dcSRalf Baechle mflhxu v1 42384740dcSRalf Baechle LONG_S v1, PT_ACX(sp) 436a0e9865SLeonid Yegoshin #elif !defined(CONFIG_CPU_MIPSR6) 44384740dcSRalf Baechle mfhi v1 45384740dcSRalf Baechle #endif 46384740dcSRalf Baechle #ifdef CONFIG_32BIT 47384740dcSRalf Baechle LONG_S $8, PT_R8(sp) 48384740dcSRalf Baechle LONG_S $9, PT_R9(sp) 49384740dcSRalf Baechle #endif 50384740dcSRalf Baechle LONG_S $10, PT_R10(sp) 51384740dcSRalf Baechle LONG_S $11, PT_R11(sp) 52384740dcSRalf Baechle LONG_S $12, PT_R12(sp) 536a0e9865SLeonid Yegoshin #if !defined(CONFIG_CPU_HAS_SMARTMIPS) && !defined(CONFIG_CPU_MIPSR6) 54362e6964SDavid Daney LONG_S v1, PT_HI(sp) 55362e6964SDavid Daney mflo v1 56362e6964SDavid Daney #endif 57384740dcSRalf Baechle LONG_S $13, PT_R13(sp) 58384740dcSRalf Baechle LONG_S $14, PT_R14(sp) 59384740dcSRalf Baechle LONG_S $15, PT_R15(sp) 60384740dcSRalf Baechle LONG_S $24, PT_R24(sp) 616a0e9865SLeonid Yegoshin #if !defined(CONFIG_CPU_HAS_SMARTMIPS) && !defined(CONFIG_CPU_MIPSR6) 62362e6964SDavid Daney LONG_S v1, PT_LO(sp) 63362e6964SDavid Daney #endif 648dfdd02aSDavid Daney #ifdef CONFIG_CPU_CAVIUM_OCTEON 658dfdd02aSDavid Daney /* 668dfdd02aSDavid Daney * The Octeon multiplier state is affected by general 678dfdd02aSDavid Daney * multiply instructions. It must be saved before and 688dfdd02aSDavid Daney * kernel code might corrupt it 698dfdd02aSDavid Daney */ 708dfdd02aSDavid Daney jal octeon_mult_save 718dfdd02aSDavid Daney #endif 72384740dcSRalf Baechle .endm 73384740dcSRalf Baechle 74384740dcSRalf Baechle .macro SAVE_STATIC 75384740dcSRalf Baechle LONG_S $16, PT_R16(sp) 76384740dcSRalf Baechle LONG_S $17, PT_R17(sp) 77384740dcSRalf Baechle LONG_S $18, PT_R18(sp) 78384740dcSRalf Baechle LONG_S $19, PT_R19(sp) 79384740dcSRalf Baechle LONG_S $20, PT_R20(sp) 80384740dcSRalf Baechle LONG_S $21, PT_R21(sp) 81384740dcSRalf Baechle LONG_S $22, PT_R22(sp) 82384740dcSRalf Baechle LONG_S $23, PT_R23(sp) 83384740dcSRalf Baechle LONG_S $30, PT_R30(sp) 84384740dcSRalf Baechle .endm 85384740dcSRalf Baechle 86384740dcSRalf Baechle #ifdef CONFIG_SMP 87384740dcSRalf Baechle .macro get_saved_sp /* SMP variation */ 88c2377a42SJayachandran C ASM_CPUID_MFC0 k0, ASM_SMP_CPUID_REG 89384740dcSRalf Baechle #if defined(CONFIG_32BIT) || defined(KBUILD_64BIT_SYM32) 90384740dcSRalf Baechle lui k1, %hi(kernelsp) 91384740dcSRalf Baechle #else 92384740dcSRalf Baechle lui k1, %highest(kernelsp) 93384740dcSRalf Baechle daddiu k1, %higher(kernelsp) 94384740dcSRalf Baechle dsll k1, 16 95384740dcSRalf Baechle daddiu k1, %hi(kernelsp) 96384740dcSRalf Baechle dsll k1, 16 97384740dcSRalf Baechle #endif 98c2377a42SJayachandran C LONG_SRL k0, SMP_CPUID_PTRSHIFT 99384740dcSRalf Baechle LONG_ADDU k1, k0 100384740dcSRalf Baechle LONG_L k1, %lo(kernelsp)(k1) 101384740dcSRalf Baechle .endm 102384740dcSRalf Baechle 103384740dcSRalf Baechle .macro set_saved_sp stackp temp temp2 104c2377a42SJayachandran C ASM_CPUID_MFC0 \temp, ASM_SMP_CPUID_REG 105c2377a42SJayachandran C LONG_SRL \temp, SMP_CPUID_PTRSHIFT 106384740dcSRalf Baechle LONG_S \stackp, kernelsp(\temp) 107384740dcSRalf Baechle .endm 108c2377a42SJayachandran C #else /* !CONFIG_SMP */ 109384740dcSRalf Baechle .macro get_saved_sp /* Uniprocessor variation */ 110b197b628SWu Zhangjin #ifdef CONFIG_CPU_JUMP_WORKAROUNDS 111f1df3239SWu Zhangjin /* 112f1df3239SWu Zhangjin * Clear BTB (branch target buffer), forbid RAS (return address 113f1df3239SWu Zhangjin * stack) to workaround the Out-of-order Issue in Loongson2F 114f1df3239SWu Zhangjin * via its diagnostic register. 115f1df3239SWu Zhangjin */ 116f1df3239SWu Zhangjin move k0, ra 117f1df3239SWu Zhangjin jal 1f 118f1df3239SWu Zhangjin nop 119f1df3239SWu Zhangjin 1: jal 1f 120f1df3239SWu Zhangjin nop 121f1df3239SWu Zhangjin 1: jal 1f 122f1df3239SWu Zhangjin nop 123f1df3239SWu Zhangjin 1: jal 1f 124f1df3239SWu Zhangjin nop 125f1df3239SWu Zhangjin 1: move ra, k0 126f1df3239SWu Zhangjin li k0, 3 127f1df3239SWu Zhangjin mtc0 k0, $22 1282a0b24f5SSteven J. Hill #endif /* CONFIG_CPU_JUMP_WORKAROUNDS */ 129384740dcSRalf Baechle #if defined(CONFIG_32BIT) || defined(KBUILD_64BIT_SYM32) 130384740dcSRalf Baechle lui k1, %hi(kernelsp) 131384740dcSRalf Baechle #else 132384740dcSRalf Baechle lui k1, %highest(kernelsp) 133384740dcSRalf Baechle daddiu k1, %higher(kernelsp) 134384740dcSRalf Baechle dsll k1, k1, 16 135384740dcSRalf Baechle daddiu k1, %hi(kernelsp) 136384740dcSRalf Baechle dsll k1, k1, 16 137384740dcSRalf Baechle #endif 138384740dcSRalf Baechle LONG_L k1, %lo(kernelsp)(k1) 139384740dcSRalf Baechle .endm 140384740dcSRalf Baechle 141384740dcSRalf Baechle .macro set_saved_sp stackp temp temp2 142384740dcSRalf Baechle LONG_S \stackp, kernelsp 143384740dcSRalf Baechle .endm 144384740dcSRalf Baechle #endif 145384740dcSRalf Baechle 146384740dcSRalf Baechle .macro SAVE_SOME 147384740dcSRalf Baechle .set push 148384740dcSRalf Baechle .set noat 149384740dcSRalf Baechle .set reorder 150384740dcSRalf Baechle mfc0 k0, CP0_STATUS 151384740dcSRalf Baechle sll k0, 3 /* extract cu0 bit */ 152384740dcSRalf Baechle .set noreorder 153384740dcSRalf Baechle bltz k0, 8f 154384740dcSRalf Baechle move k1, sp 1553aff47c0SJames Hogan #ifdef CONFIG_EVA 1563aff47c0SJames Hogan /* 1573aff47c0SJames Hogan * Flush interAptiv's Return Prediction Stack (RPS) by writing 1583aff47c0SJames Hogan * EntryHi. Toggling Config7.RPS is slower and less portable. 1593aff47c0SJames Hogan * 1603aff47c0SJames Hogan * The RPS isn't automatically flushed when exceptions are 1613aff47c0SJames Hogan * taken, which can result in kernel mode speculative accesses 1623aff47c0SJames Hogan * to user addresses if the RPS mispredicts. That's harmless 1633aff47c0SJames Hogan * when user and kernel share the same address space, but with 1643aff47c0SJames Hogan * EVA the same user segments may be unmapped to kernel mode, 1653aff47c0SJames Hogan * even containing sensitive MMIO regions or invalid memory. 1663aff47c0SJames Hogan * 1673aff47c0SJames Hogan * This can happen when the kernel sets the return address to 1683aff47c0SJames Hogan * ret_from_* and jr's to the exception handler, which looks 1693aff47c0SJames Hogan * more like a tail call than a function call. If nested calls 1703aff47c0SJames Hogan * don't evict the last user address in the RPS, it will 1713aff47c0SJames Hogan * mispredict the return and fetch from a user controlled 1723aff47c0SJames Hogan * address into the icache. 1733aff47c0SJames Hogan * 1743aff47c0SJames Hogan * More recent EVA-capable cores with MAAR to restrict 1753aff47c0SJames Hogan * speculative accesses aren't affected. 1763aff47c0SJames Hogan */ 1773aff47c0SJames Hogan MFC0 k0, CP0_ENTRYHI 1783aff47c0SJames Hogan MTC0 k0, CP0_ENTRYHI 1793aff47c0SJames Hogan #endif 180384740dcSRalf Baechle .set reorder 181384740dcSRalf Baechle /* Called from user mode, new stack. */ 182384740dcSRalf Baechle get_saved_sp 183384740dcSRalf Baechle #ifndef CONFIG_CPU_DADDI_WORKAROUNDS 184384740dcSRalf Baechle 8: move k0, sp 185384740dcSRalf Baechle PTR_SUBU sp, k1, PT_SIZE 186384740dcSRalf Baechle #else 187384740dcSRalf Baechle .set at=k0 188384740dcSRalf Baechle 8: PTR_SUBU k1, PT_SIZE 189384740dcSRalf Baechle .set noat 190384740dcSRalf Baechle move k0, sp 191384740dcSRalf Baechle move sp, k1 192384740dcSRalf Baechle #endif 193384740dcSRalf Baechle LONG_S k0, PT_R29(sp) 194384740dcSRalf Baechle LONG_S $3, PT_R3(sp) 195384740dcSRalf Baechle /* 196384740dcSRalf Baechle * You might think that you don't need to save $0, 197384740dcSRalf Baechle * but the FPU emulator and gdb remote debug stub 198384740dcSRalf Baechle * need it to operate correctly 199384740dcSRalf Baechle */ 200384740dcSRalf Baechle LONG_S $0, PT_R0(sp) 201384740dcSRalf Baechle mfc0 v1, CP0_STATUS 202384740dcSRalf Baechle LONG_S $2, PT_R2(sp) 2032a0b24f5SSteven J. Hill LONG_S v1, PT_STATUS(sp) 204384740dcSRalf Baechle LONG_S $4, PT_R4(sp) 205362e6964SDavid Daney mfc0 v1, CP0_CAUSE 2062a0b24f5SSteven J. Hill LONG_S $5, PT_R5(sp) 207362e6964SDavid Daney LONG_S v1, PT_CAUSE(sp) 2082a0b24f5SSteven J. Hill LONG_S $6, PT_R6(sp) 209362e6964SDavid Daney MFC0 v1, CP0_EPC 2102a0b24f5SSteven J. Hill LONG_S $7, PT_R7(sp) 211384740dcSRalf Baechle #ifdef CONFIG_64BIT 212384740dcSRalf Baechle LONG_S $8, PT_R8(sp) 213384740dcSRalf Baechle LONG_S $9, PT_R9(sp) 214384740dcSRalf Baechle #endif 2152a0b24f5SSteven J. Hill LONG_S v1, PT_EPC(sp) 216384740dcSRalf Baechle LONG_S $25, PT_R25(sp) 217384740dcSRalf Baechle LONG_S $28, PT_R28(sp) 218384740dcSRalf Baechle LONG_S $31, PT_R31(sp) 219*510d8636SMatt Redfearn 220*510d8636SMatt Redfearn /* Set thread_info if we're coming from user mode */ 221*510d8636SMatt Redfearn mfc0 k0, CP0_STATUS 222*510d8636SMatt Redfearn sll k0, 3 /* extract cu0 bit */ 223*510d8636SMatt Redfearn bltz k0, 9f 224*510d8636SMatt Redfearn 225384740dcSRalf Baechle ori $28, sp, _THREAD_MASK 226384740dcSRalf Baechle xori $28, _THREAD_MASK 2272a219b0eSDavid Daney #ifdef CONFIG_CPU_CAVIUM_OCTEON 2282a219b0eSDavid Daney .set mips64 2292a219b0eSDavid Daney pref 0, 0($28) /* Prefetch the current pointer */ 2302a219b0eSDavid Daney #endif 231*510d8636SMatt Redfearn 9: 232384740dcSRalf Baechle .set pop 233384740dcSRalf Baechle .endm 234384740dcSRalf Baechle 235384740dcSRalf Baechle .macro SAVE_ALL 236384740dcSRalf Baechle SAVE_SOME 237384740dcSRalf Baechle SAVE_AT 238384740dcSRalf Baechle SAVE_TEMP 239384740dcSRalf Baechle SAVE_STATIC 240384740dcSRalf Baechle .endm 241384740dcSRalf Baechle 242384740dcSRalf Baechle .macro RESTORE_AT 243384740dcSRalf Baechle .set push 244384740dcSRalf Baechle .set noat 245384740dcSRalf Baechle LONG_L $1, PT_R1(sp) 246384740dcSRalf Baechle .set pop 247384740dcSRalf Baechle .endm 248384740dcSRalf Baechle 249384740dcSRalf Baechle .macro RESTORE_TEMP 2508dfdd02aSDavid Daney #ifdef CONFIG_CPU_CAVIUM_OCTEON 2518dfdd02aSDavid Daney /* Restore the Octeon multiplier state */ 2528dfdd02aSDavid Daney jal octeon_mult_restore 2538dfdd02aSDavid Daney #endif 254384740dcSRalf Baechle #ifdef CONFIG_CPU_HAS_SMARTMIPS 255384740dcSRalf Baechle LONG_L $24, PT_ACX(sp) 256384740dcSRalf Baechle mtlhx $24 257384740dcSRalf Baechle LONG_L $24, PT_HI(sp) 258384740dcSRalf Baechle mtlhx $24 259384740dcSRalf Baechle LONG_L $24, PT_LO(sp) 260384740dcSRalf Baechle mtlhx $24 2616a0e9865SLeonid Yegoshin #elif !defined(CONFIG_CPU_MIPSR6) 262384740dcSRalf Baechle LONG_L $24, PT_LO(sp) 263384740dcSRalf Baechle mtlo $24 264384740dcSRalf Baechle LONG_L $24, PT_HI(sp) 265384740dcSRalf Baechle mthi $24 266384740dcSRalf Baechle #endif 267384740dcSRalf Baechle #ifdef CONFIG_32BIT 268384740dcSRalf Baechle LONG_L $8, PT_R8(sp) 269384740dcSRalf Baechle LONG_L $9, PT_R9(sp) 270384740dcSRalf Baechle #endif 271384740dcSRalf Baechle LONG_L $10, PT_R10(sp) 272384740dcSRalf Baechle LONG_L $11, PT_R11(sp) 273384740dcSRalf Baechle LONG_L $12, PT_R12(sp) 274384740dcSRalf Baechle LONG_L $13, PT_R13(sp) 275384740dcSRalf Baechle LONG_L $14, PT_R14(sp) 276384740dcSRalf Baechle LONG_L $15, PT_R15(sp) 277384740dcSRalf Baechle LONG_L $24, PT_R24(sp) 278384740dcSRalf Baechle .endm 279384740dcSRalf Baechle 280384740dcSRalf Baechle .macro RESTORE_STATIC 281384740dcSRalf Baechle LONG_L $16, PT_R16(sp) 282384740dcSRalf Baechle LONG_L $17, PT_R17(sp) 283384740dcSRalf Baechle LONG_L $18, PT_R18(sp) 284384740dcSRalf Baechle LONG_L $19, PT_R19(sp) 285384740dcSRalf Baechle LONG_L $20, PT_R20(sp) 286384740dcSRalf Baechle LONG_L $21, PT_R21(sp) 287384740dcSRalf Baechle LONG_L $22, PT_R22(sp) 288384740dcSRalf Baechle LONG_L $23, PT_R23(sp) 289384740dcSRalf Baechle LONG_L $30, PT_R30(sp) 290384740dcSRalf Baechle .endm 291384740dcSRalf Baechle 292384740dcSRalf Baechle #if defined(CONFIG_CPU_R3000) || defined(CONFIG_CPU_TX39XX) 293384740dcSRalf Baechle 294384740dcSRalf Baechle .macro RESTORE_SOME 295384740dcSRalf Baechle .set push 296384740dcSRalf Baechle .set reorder 297384740dcSRalf Baechle .set noat 298384740dcSRalf Baechle mfc0 a0, CP0_STATUS 29900fe56dcSJames Hogan li v1, ST0_CU1 | ST0_IM 300384740dcSRalf Baechle ori a0, STATMASK 301384740dcSRalf Baechle xori a0, STATMASK 302384740dcSRalf Baechle mtc0 a0, CP0_STATUS 303384740dcSRalf Baechle and a0, v1 304384740dcSRalf Baechle LONG_L v0, PT_STATUS(sp) 305384740dcSRalf Baechle nor v1, $0, v1 306384740dcSRalf Baechle and v0, v1 307384740dcSRalf Baechle or v0, a0 308384740dcSRalf Baechle mtc0 v0, CP0_STATUS 309384740dcSRalf Baechle LONG_L $31, PT_R31(sp) 310384740dcSRalf Baechle LONG_L $28, PT_R28(sp) 311384740dcSRalf Baechle LONG_L $25, PT_R25(sp) 312384740dcSRalf Baechle LONG_L $7, PT_R7(sp) 313384740dcSRalf Baechle LONG_L $6, PT_R6(sp) 314384740dcSRalf Baechle LONG_L $5, PT_R5(sp) 315384740dcSRalf Baechle LONG_L $4, PT_R4(sp) 316384740dcSRalf Baechle LONG_L $3, PT_R3(sp) 317384740dcSRalf Baechle LONG_L $2, PT_R2(sp) 318384740dcSRalf Baechle .set pop 319384740dcSRalf Baechle .endm 320384740dcSRalf Baechle 321384740dcSRalf Baechle .macro RESTORE_SP_AND_RET 322384740dcSRalf Baechle .set push 323384740dcSRalf Baechle .set noreorder 324384740dcSRalf Baechle LONG_L k0, PT_EPC(sp) 325384740dcSRalf Baechle LONG_L sp, PT_R29(sp) 326384740dcSRalf Baechle jr k0 327384740dcSRalf Baechle rfe 328384740dcSRalf Baechle .set pop 329384740dcSRalf Baechle .endm 330384740dcSRalf Baechle 331384740dcSRalf Baechle #else 332384740dcSRalf Baechle .macro RESTORE_SOME 333384740dcSRalf Baechle .set push 334384740dcSRalf Baechle .set reorder 335384740dcSRalf Baechle .set noat 336384740dcSRalf Baechle mfc0 a0, CP0_STATUS 337384740dcSRalf Baechle ori a0, STATMASK 338384740dcSRalf Baechle xori a0, STATMASK 339384740dcSRalf Baechle mtc0 a0, CP0_STATUS 34000fe56dcSJames Hogan li v1, ST0_CU1 | ST0_FR | ST0_IM 341384740dcSRalf Baechle and a0, v1 342384740dcSRalf Baechle LONG_L v0, PT_STATUS(sp) 343384740dcSRalf Baechle nor v1, $0, v1 344384740dcSRalf Baechle and v0, v1 345384740dcSRalf Baechle or v0, a0 346384740dcSRalf Baechle mtc0 v0, CP0_STATUS 347384740dcSRalf Baechle LONG_L v1, PT_EPC(sp) 348384740dcSRalf Baechle MTC0 v1, CP0_EPC 349384740dcSRalf Baechle LONG_L $31, PT_R31(sp) 350384740dcSRalf Baechle LONG_L $28, PT_R28(sp) 351384740dcSRalf Baechle LONG_L $25, PT_R25(sp) 352384740dcSRalf Baechle #ifdef CONFIG_64BIT 353384740dcSRalf Baechle LONG_L $8, PT_R8(sp) 354384740dcSRalf Baechle LONG_L $9, PT_R9(sp) 355384740dcSRalf Baechle #endif 356384740dcSRalf Baechle LONG_L $7, PT_R7(sp) 357384740dcSRalf Baechle LONG_L $6, PT_R6(sp) 358384740dcSRalf Baechle LONG_L $5, PT_R5(sp) 359384740dcSRalf Baechle LONG_L $4, PT_R4(sp) 360384740dcSRalf Baechle LONG_L $3, PT_R3(sp) 361384740dcSRalf Baechle LONG_L $2, PT_R2(sp) 362384740dcSRalf Baechle .set pop 363384740dcSRalf Baechle .endm 364384740dcSRalf Baechle 365384740dcSRalf Baechle .macro RESTORE_SP_AND_RET 366384740dcSRalf Baechle LONG_L sp, PT_R29(sp) 367a809d460SRalf Baechle .set arch=r4000 368384740dcSRalf Baechle eret 369384740dcSRalf Baechle .set mips0 370384740dcSRalf Baechle .endm 371384740dcSRalf Baechle 372384740dcSRalf Baechle #endif 373384740dcSRalf Baechle 374384740dcSRalf Baechle .macro RESTORE_SP 375384740dcSRalf Baechle LONG_L sp, PT_R29(sp) 376384740dcSRalf Baechle .endm 377384740dcSRalf Baechle 378384740dcSRalf Baechle .macro RESTORE_ALL 379384740dcSRalf Baechle RESTORE_TEMP 380384740dcSRalf Baechle RESTORE_STATIC 381384740dcSRalf Baechle RESTORE_AT 382384740dcSRalf Baechle RESTORE_SOME 383384740dcSRalf Baechle RESTORE_SP 384384740dcSRalf Baechle .endm 385384740dcSRalf Baechle 386384740dcSRalf Baechle .macro RESTORE_ALL_AND_RET 387384740dcSRalf Baechle RESTORE_TEMP 388384740dcSRalf Baechle RESTORE_STATIC 389384740dcSRalf Baechle RESTORE_AT 390384740dcSRalf Baechle RESTORE_SOME 391384740dcSRalf Baechle RESTORE_SP_AND_RET 392384740dcSRalf Baechle .endm 393384740dcSRalf Baechle 394384740dcSRalf Baechle /* 395384740dcSRalf Baechle * Move to kernel mode and disable interrupts. 396384740dcSRalf Baechle * Set cp0 enable bit as sign that we're running on the kernel stack 397384740dcSRalf Baechle */ 398384740dcSRalf Baechle .macro CLI 399384740dcSRalf Baechle mfc0 t0, CP0_STATUS 400384740dcSRalf Baechle li t1, ST0_CU0 | STATMASK 401384740dcSRalf Baechle or t0, t1 402384740dcSRalf Baechle xori t0, STATMASK 403384740dcSRalf Baechle mtc0 t0, CP0_STATUS 404384740dcSRalf Baechle irq_disable_hazard 405384740dcSRalf Baechle .endm 406384740dcSRalf Baechle 407384740dcSRalf Baechle /* 408384740dcSRalf Baechle * Move to kernel mode and enable interrupts. 409384740dcSRalf Baechle * Set cp0 enable bit as sign that we're running on the kernel stack 410384740dcSRalf Baechle */ 411384740dcSRalf Baechle .macro STI 412384740dcSRalf Baechle mfc0 t0, CP0_STATUS 413384740dcSRalf Baechle li t1, ST0_CU0 | STATMASK 414384740dcSRalf Baechle or t0, t1 415384740dcSRalf Baechle xori t0, STATMASK & ~1 416384740dcSRalf Baechle mtc0 t0, CP0_STATUS 417384740dcSRalf Baechle irq_enable_hazard 418384740dcSRalf Baechle .endm 419384740dcSRalf Baechle 420384740dcSRalf Baechle /* 421384740dcSRalf Baechle * Just move to kernel mode and leave interrupts as they are. Note 422384740dcSRalf Baechle * for the R3000 this means copying the previous enable from IEp. 423384740dcSRalf Baechle * Set cp0 enable bit as sign that we're running on the kernel stack 424384740dcSRalf Baechle */ 425384740dcSRalf Baechle .macro KMODE 426384740dcSRalf Baechle mfc0 t0, CP0_STATUS 427384740dcSRalf Baechle li t1, ST0_CU0 | (STATMASK & ~1) 428384740dcSRalf Baechle #if defined(CONFIG_CPU_R3000) || defined(CONFIG_CPU_TX39XX) 429384740dcSRalf Baechle andi t2, t0, ST0_IEP 430384740dcSRalf Baechle srl t2, 2 431384740dcSRalf Baechle or t0, t2 432384740dcSRalf Baechle #endif 433384740dcSRalf Baechle or t0, t1 434384740dcSRalf Baechle xori t0, STATMASK & ~1 435384740dcSRalf Baechle mtc0 t0, CP0_STATUS 436384740dcSRalf Baechle irq_disable_hazard 437384740dcSRalf Baechle .endm 438384740dcSRalf Baechle 439384740dcSRalf Baechle #endif /* _ASM_STACKFRAME_H */ 440