1384740dcSRalf Baechle /* 2384740dcSRalf Baechle * This file is subject to the terms and conditions of the GNU General Public 3384740dcSRalf Baechle * License. See the file "COPYING" in the main directory of this archive 4384740dcSRalf Baechle * for more details. 5384740dcSRalf Baechle * 6384740dcSRalf Baechle * Copyright (C) 1994, 95, 96, 99, 2001 Ralf Baechle 7384740dcSRalf Baechle * Copyright (C) 1994, 1995, 1996 Paul M. Antoine. 8384740dcSRalf Baechle * Copyright (C) 1999 Silicon Graphics, Inc. 9384740dcSRalf Baechle * Copyright (C) 2007 Maciej W. Rozycki 10384740dcSRalf Baechle */ 11384740dcSRalf Baechle #ifndef _ASM_STACKFRAME_H 12384740dcSRalf Baechle #define _ASM_STACKFRAME_H 13384740dcSRalf Baechle 14384740dcSRalf Baechle #include <linux/threads.h> 15384740dcSRalf Baechle 16384740dcSRalf Baechle #include <asm/asm.h> 17384740dcSRalf Baechle #include <asm/asmmacro.h> 18384740dcSRalf Baechle #include <asm/mipsregs.h> 19384740dcSRalf Baechle #include <asm/asm-offsets.h> 20c2377a42SJayachandran C #include <asm/thread_info.h> 21384740dcSRalf Baechle 22b633648cSRalf Baechle #if defined(CONFIG_CPU_R3000) || defined(CONFIG_CPU_TX39XX) 23384740dcSRalf Baechle #define STATMASK 0x3f 24384740dcSRalf Baechle #else 25384740dcSRalf Baechle #define STATMASK 0x1f 26384740dcSRalf Baechle #endif 27384740dcSRalf Baechle 28384740dcSRalf Baechle .macro SAVE_AT 29384740dcSRalf Baechle .set push 30384740dcSRalf Baechle .set noat 31384740dcSRalf Baechle LONG_S $1, PT_R1(sp) 32384740dcSRalf Baechle .set pop 33384740dcSRalf Baechle .endm 34384740dcSRalf Baechle 35384740dcSRalf Baechle .macro SAVE_TEMP 36384740dcSRalf Baechle #ifdef CONFIG_CPU_HAS_SMARTMIPS 37384740dcSRalf Baechle mflhxu v1 38384740dcSRalf Baechle LONG_S v1, PT_LO(sp) 39384740dcSRalf Baechle mflhxu v1 40384740dcSRalf Baechle LONG_S v1, PT_HI(sp) 41384740dcSRalf Baechle mflhxu v1 42384740dcSRalf Baechle LONG_S v1, PT_ACX(sp) 436a0e9865SLeonid Yegoshin #elif !defined(CONFIG_CPU_MIPSR6) 44384740dcSRalf Baechle mfhi v1 45384740dcSRalf Baechle #endif 46384740dcSRalf Baechle #ifdef CONFIG_32BIT 47384740dcSRalf Baechle LONG_S $8, PT_R8(sp) 48384740dcSRalf Baechle LONG_S $9, PT_R9(sp) 49384740dcSRalf Baechle #endif 50384740dcSRalf Baechle LONG_S $10, PT_R10(sp) 51384740dcSRalf Baechle LONG_S $11, PT_R11(sp) 52384740dcSRalf Baechle LONG_S $12, PT_R12(sp) 536a0e9865SLeonid Yegoshin #if !defined(CONFIG_CPU_HAS_SMARTMIPS) && !defined(CONFIG_CPU_MIPSR6) 54362e6964SDavid Daney LONG_S v1, PT_HI(sp) 55362e6964SDavid Daney mflo v1 56362e6964SDavid Daney #endif 57384740dcSRalf Baechle LONG_S $13, PT_R13(sp) 58384740dcSRalf Baechle LONG_S $14, PT_R14(sp) 59384740dcSRalf Baechle LONG_S $15, PT_R15(sp) 60384740dcSRalf Baechle LONG_S $24, PT_R24(sp) 616a0e9865SLeonid Yegoshin #if !defined(CONFIG_CPU_HAS_SMARTMIPS) && !defined(CONFIG_CPU_MIPSR6) 62362e6964SDavid Daney LONG_S v1, PT_LO(sp) 63362e6964SDavid Daney #endif 648dfdd02aSDavid Daney #ifdef CONFIG_CPU_CAVIUM_OCTEON 658dfdd02aSDavid Daney /* 668dfdd02aSDavid Daney * The Octeon multiplier state is affected by general 678dfdd02aSDavid Daney * multiply instructions. It must be saved before and 688dfdd02aSDavid Daney * kernel code might corrupt it 698dfdd02aSDavid Daney */ 708dfdd02aSDavid Daney jal octeon_mult_save 718dfdd02aSDavid Daney #endif 72384740dcSRalf Baechle .endm 73384740dcSRalf Baechle 74384740dcSRalf Baechle .macro SAVE_STATIC 75384740dcSRalf Baechle LONG_S $16, PT_R16(sp) 76384740dcSRalf Baechle LONG_S $17, PT_R17(sp) 77384740dcSRalf Baechle LONG_S $18, PT_R18(sp) 78384740dcSRalf Baechle LONG_S $19, PT_R19(sp) 79384740dcSRalf Baechle LONG_S $20, PT_R20(sp) 80384740dcSRalf Baechle LONG_S $21, PT_R21(sp) 81384740dcSRalf Baechle LONG_S $22, PT_R22(sp) 82384740dcSRalf Baechle LONG_S $23, PT_R23(sp) 83384740dcSRalf Baechle LONG_S $30, PT_R30(sp) 84384740dcSRalf Baechle .endm 85384740dcSRalf Baechle 86384740dcSRalf Baechle #ifdef CONFIG_SMP 87384740dcSRalf Baechle .macro get_saved_sp /* SMP variation */ 88c2377a42SJayachandran C ASM_CPUID_MFC0 k0, ASM_SMP_CPUID_REG 89384740dcSRalf Baechle #if defined(CONFIG_32BIT) || defined(KBUILD_64BIT_SYM32) 90384740dcSRalf Baechle lui k1, %hi(kernelsp) 91384740dcSRalf Baechle #else 92384740dcSRalf Baechle lui k1, %highest(kernelsp) 93384740dcSRalf Baechle daddiu k1, %higher(kernelsp) 94384740dcSRalf Baechle dsll k1, 16 95384740dcSRalf Baechle daddiu k1, %hi(kernelsp) 96384740dcSRalf Baechle dsll k1, 16 97384740dcSRalf Baechle #endif 98c2377a42SJayachandran C LONG_SRL k0, SMP_CPUID_PTRSHIFT 99384740dcSRalf Baechle LONG_ADDU k1, k0 100384740dcSRalf Baechle LONG_L k1, %lo(kernelsp)(k1) 101384740dcSRalf Baechle .endm 102384740dcSRalf Baechle 103384740dcSRalf Baechle .macro set_saved_sp stackp temp temp2 104c2377a42SJayachandran C ASM_CPUID_MFC0 \temp, ASM_SMP_CPUID_REG 105c2377a42SJayachandran C LONG_SRL \temp, SMP_CPUID_PTRSHIFT 106384740dcSRalf Baechle LONG_S \stackp, kernelsp(\temp) 107384740dcSRalf Baechle .endm 108c2377a42SJayachandran C #else /* !CONFIG_SMP */ 109384740dcSRalf Baechle .macro get_saved_sp /* Uniprocessor variation */ 110b197b628SWu Zhangjin #ifdef CONFIG_CPU_JUMP_WORKAROUNDS 111f1df3239SWu Zhangjin /* 112f1df3239SWu Zhangjin * Clear BTB (branch target buffer), forbid RAS (return address 113f1df3239SWu Zhangjin * stack) to workaround the Out-of-order Issue in Loongson2F 114f1df3239SWu Zhangjin * via its diagnostic register. 115f1df3239SWu Zhangjin */ 116f1df3239SWu Zhangjin move k0, ra 117f1df3239SWu Zhangjin jal 1f 118f1df3239SWu Zhangjin nop 119f1df3239SWu Zhangjin 1: jal 1f 120f1df3239SWu Zhangjin nop 121f1df3239SWu Zhangjin 1: jal 1f 122f1df3239SWu Zhangjin nop 123f1df3239SWu Zhangjin 1: jal 1f 124f1df3239SWu Zhangjin nop 125f1df3239SWu Zhangjin 1: move ra, k0 126f1df3239SWu Zhangjin li k0, 3 127f1df3239SWu Zhangjin mtc0 k0, $22 1282a0b24f5SSteven J. Hill #endif /* CONFIG_CPU_JUMP_WORKAROUNDS */ 129384740dcSRalf Baechle #if defined(CONFIG_32BIT) || defined(KBUILD_64BIT_SYM32) 130384740dcSRalf Baechle lui k1, %hi(kernelsp) 131384740dcSRalf Baechle #else 132384740dcSRalf Baechle lui k1, %highest(kernelsp) 133384740dcSRalf Baechle daddiu k1, %higher(kernelsp) 134384740dcSRalf Baechle dsll k1, k1, 16 135384740dcSRalf Baechle daddiu k1, %hi(kernelsp) 136384740dcSRalf Baechle dsll k1, k1, 16 137384740dcSRalf Baechle #endif 138384740dcSRalf Baechle LONG_L k1, %lo(kernelsp)(k1) 139384740dcSRalf Baechle .endm 140384740dcSRalf Baechle 141384740dcSRalf Baechle .macro set_saved_sp stackp temp temp2 142384740dcSRalf Baechle LONG_S \stackp, kernelsp 143384740dcSRalf Baechle .endm 144384740dcSRalf Baechle #endif 145384740dcSRalf Baechle 146384740dcSRalf Baechle .macro SAVE_SOME 147384740dcSRalf Baechle .set push 148384740dcSRalf Baechle .set noat 149384740dcSRalf Baechle .set reorder 150384740dcSRalf Baechle mfc0 k0, CP0_STATUS 151384740dcSRalf Baechle sll k0, 3 /* extract cu0 bit */ 152384740dcSRalf Baechle .set noreorder 153384740dcSRalf Baechle bltz k0, 8f 154384740dcSRalf Baechle move k1, sp 155*3aff47c0SJames Hogan #ifdef CONFIG_EVA 156*3aff47c0SJames Hogan /* 157*3aff47c0SJames Hogan * Flush interAptiv's Return Prediction Stack (RPS) by writing 158*3aff47c0SJames Hogan * EntryHi. Toggling Config7.RPS is slower and less portable. 159*3aff47c0SJames Hogan * 160*3aff47c0SJames Hogan * The RPS isn't automatically flushed when exceptions are 161*3aff47c0SJames Hogan * taken, which can result in kernel mode speculative accesses 162*3aff47c0SJames Hogan * to user addresses if the RPS mispredicts. That's harmless 163*3aff47c0SJames Hogan * when user and kernel share the same address space, but with 164*3aff47c0SJames Hogan * EVA the same user segments may be unmapped to kernel mode, 165*3aff47c0SJames Hogan * even containing sensitive MMIO regions or invalid memory. 166*3aff47c0SJames Hogan * 167*3aff47c0SJames Hogan * This can happen when the kernel sets the return address to 168*3aff47c0SJames Hogan * ret_from_* and jr's to the exception handler, which looks 169*3aff47c0SJames Hogan * more like a tail call than a function call. If nested calls 170*3aff47c0SJames Hogan * don't evict the last user address in the RPS, it will 171*3aff47c0SJames Hogan * mispredict the return and fetch from a user controlled 172*3aff47c0SJames Hogan * address into the icache. 173*3aff47c0SJames Hogan * 174*3aff47c0SJames Hogan * More recent EVA-capable cores with MAAR to restrict 175*3aff47c0SJames Hogan * speculative accesses aren't affected. 176*3aff47c0SJames Hogan */ 177*3aff47c0SJames Hogan MFC0 k0, CP0_ENTRYHI 178*3aff47c0SJames Hogan MTC0 k0, CP0_ENTRYHI 179*3aff47c0SJames Hogan #endif 180384740dcSRalf Baechle .set reorder 181384740dcSRalf Baechle /* Called from user mode, new stack. */ 182384740dcSRalf Baechle get_saved_sp 183384740dcSRalf Baechle #ifndef CONFIG_CPU_DADDI_WORKAROUNDS 184384740dcSRalf Baechle 8: move k0, sp 185384740dcSRalf Baechle PTR_SUBU sp, k1, PT_SIZE 186384740dcSRalf Baechle #else 187384740dcSRalf Baechle .set at=k0 188384740dcSRalf Baechle 8: PTR_SUBU k1, PT_SIZE 189384740dcSRalf Baechle .set noat 190384740dcSRalf Baechle move k0, sp 191384740dcSRalf Baechle move sp, k1 192384740dcSRalf Baechle #endif 193384740dcSRalf Baechle LONG_S k0, PT_R29(sp) 194384740dcSRalf Baechle LONG_S $3, PT_R3(sp) 195384740dcSRalf Baechle /* 196384740dcSRalf Baechle * You might think that you don't need to save $0, 197384740dcSRalf Baechle * but the FPU emulator and gdb remote debug stub 198384740dcSRalf Baechle * need it to operate correctly 199384740dcSRalf Baechle */ 200384740dcSRalf Baechle LONG_S $0, PT_R0(sp) 201384740dcSRalf Baechle mfc0 v1, CP0_STATUS 202384740dcSRalf Baechle LONG_S $2, PT_R2(sp) 2032a0b24f5SSteven J. Hill LONG_S v1, PT_STATUS(sp) 204384740dcSRalf Baechle LONG_S $4, PT_R4(sp) 205362e6964SDavid Daney mfc0 v1, CP0_CAUSE 2062a0b24f5SSteven J. Hill LONG_S $5, PT_R5(sp) 207362e6964SDavid Daney LONG_S v1, PT_CAUSE(sp) 2082a0b24f5SSteven J. Hill LONG_S $6, PT_R6(sp) 209362e6964SDavid Daney MFC0 v1, CP0_EPC 2102a0b24f5SSteven J. Hill LONG_S $7, PT_R7(sp) 211384740dcSRalf Baechle #ifdef CONFIG_64BIT 212384740dcSRalf Baechle LONG_S $8, PT_R8(sp) 213384740dcSRalf Baechle LONG_S $9, PT_R9(sp) 214384740dcSRalf Baechle #endif 2152a0b24f5SSteven J. Hill LONG_S v1, PT_EPC(sp) 216384740dcSRalf Baechle LONG_S $25, PT_R25(sp) 217384740dcSRalf Baechle LONG_S $28, PT_R28(sp) 218384740dcSRalf Baechle LONG_S $31, PT_R31(sp) 219384740dcSRalf Baechle ori $28, sp, _THREAD_MASK 220384740dcSRalf Baechle xori $28, _THREAD_MASK 2212a219b0eSDavid Daney #ifdef CONFIG_CPU_CAVIUM_OCTEON 2222a219b0eSDavid Daney .set mips64 2232a219b0eSDavid Daney pref 0, 0($28) /* Prefetch the current pointer */ 2242a219b0eSDavid Daney #endif 225384740dcSRalf Baechle .set pop 226384740dcSRalf Baechle .endm 227384740dcSRalf Baechle 228384740dcSRalf Baechle .macro SAVE_ALL 229384740dcSRalf Baechle SAVE_SOME 230384740dcSRalf Baechle SAVE_AT 231384740dcSRalf Baechle SAVE_TEMP 232384740dcSRalf Baechle SAVE_STATIC 233384740dcSRalf Baechle .endm 234384740dcSRalf Baechle 235384740dcSRalf Baechle .macro RESTORE_AT 236384740dcSRalf Baechle .set push 237384740dcSRalf Baechle .set noat 238384740dcSRalf Baechle LONG_L $1, PT_R1(sp) 239384740dcSRalf Baechle .set pop 240384740dcSRalf Baechle .endm 241384740dcSRalf Baechle 242384740dcSRalf Baechle .macro RESTORE_TEMP 2438dfdd02aSDavid Daney #ifdef CONFIG_CPU_CAVIUM_OCTEON 2448dfdd02aSDavid Daney /* Restore the Octeon multiplier state */ 2458dfdd02aSDavid Daney jal octeon_mult_restore 2468dfdd02aSDavid Daney #endif 247384740dcSRalf Baechle #ifdef CONFIG_CPU_HAS_SMARTMIPS 248384740dcSRalf Baechle LONG_L $24, PT_ACX(sp) 249384740dcSRalf Baechle mtlhx $24 250384740dcSRalf Baechle LONG_L $24, PT_HI(sp) 251384740dcSRalf Baechle mtlhx $24 252384740dcSRalf Baechle LONG_L $24, PT_LO(sp) 253384740dcSRalf Baechle mtlhx $24 2546a0e9865SLeonid Yegoshin #elif !defined(CONFIG_CPU_MIPSR6) 255384740dcSRalf Baechle LONG_L $24, PT_LO(sp) 256384740dcSRalf Baechle mtlo $24 257384740dcSRalf Baechle LONG_L $24, PT_HI(sp) 258384740dcSRalf Baechle mthi $24 259384740dcSRalf Baechle #endif 260384740dcSRalf Baechle #ifdef CONFIG_32BIT 261384740dcSRalf Baechle LONG_L $8, PT_R8(sp) 262384740dcSRalf Baechle LONG_L $9, PT_R9(sp) 263384740dcSRalf Baechle #endif 264384740dcSRalf Baechle LONG_L $10, PT_R10(sp) 265384740dcSRalf Baechle LONG_L $11, PT_R11(sp) 266384740dcSRalf Baechle LONG_L $12, PT_R12(sp) 267384740dcSRalf Baechle LONG_L $13, PT_R13(sp) 268384740dcSRalf Baechle LONG_L $14, PT_R14(sp) 269384740dcSRalf Baechle LONG_L $15, PT_R15(sp) 270384740dcSRalf Baechle LONG_L $24, PT_R24(sp) 271384740dcSRalf Baechle .endm 272384740dcSRalf Baechle 273384740dcSRalf Baechle .macro RESTORE_STATIC 274384740dcSRalf Baechle LONG_L $16, PT_R16(sp) 275384740dcSRalf Baechle LONG_L $17, PT_R17(sp) 276384740dcSRalf Baechle LONG_L $18, PT_R18(sp) 277384740dcSRalf Baechle LONG_L $19, PT_R19(sp) 278384740dcSRalf Baechle LONG_L $20, PT_R20(sp) 279384740dcSRalf Baechle LONG_L $21, PT_R21(sp) 280384740dcSRalf Baechle LONG_L $22, PT_R22(sp) 281384740dcSRalf Baechle LONG_L $23, PT_R23(sp) 282384740dcSRalf Baechle LONG_L $30, PT_R30(sp) 283384740dcSRalf Baechle .endm 284384740dcSRalf Baechle 285384740dcSRalf Baechle #if defined(CONFIG_CPU_R3000) || defined(CONFIG_CPU_TX39XX) 286384740dcSRalf Baechle 287384740dcSRalf Baechle .macro RESTORE_SOME 288384740dcSRalf Baechle .set push 289384740dcSRalf Baechle .set reorder 290384740dcSRalf Baechle .set noat 291384740dcSRalf Baechle mfc0 a0, CP0_STATUS 292384740dcSRalf Baechle li v1, 0xff00 293384740dcSRalf Baechle ori a0, STATMASK 294384740dcSRalf Baechle xori a0, STATMASK 295384740dcSRalf Baechle mtc0 a0, CP0_STATUS 296384740dcSRalf Baechle and a0, v1 297384740dcSRalf Baechle LONG_L v0, PT_STATUS(sp) 298384740dcSRalf Baechle nor v1, $0, v1 299384740dcSRalf Baechle and v0, v1 300384740dcSRalf Baechle or v0, a0 301384740dcSRalf Baechle mtc0 v0, CP0_STATUS 302384740dcSRalf Baechle LONG_L $31, PT_R31(sp) 303384740dcSRalf Baechle LONG_L $28, PT_R28(sp) 304384740dcSRalf Baechle LONG_L $25, PT_R25(sp) 305384740dcSRalf Baechle LONG_L $7, PT_R7(sp) 306384740dcSRalf Baechle LONG_L $6, PT_R6(sp) 307384740dcSRalf Baechle LONG_L $5, PT_R5(sp) 308384740dcSRalf Baechle LONG_L $4, PT_R4(sp) 309384740dcSRalf Baechle LONG_L $3, PT_R3(sp) 310384740dcSRalf Baechle LONG_L $2, PT_R2(sp) 311384740dcSRalf Baechle .set pop 312384740dcSRalf Baechle .endm 313384740dcSRalf Baechle 314384740dcSRalf Baechle .macro RESTORE_SP_AND_RET 315384740dcSRalf Baechle .set push 316384740dcSRalf Baechle .set noreorder 317384740dcSRalf Baechle LONG_L k0, PT_EPC(sp) 318384740dcSRalf Baechle LONG_L sp, PT_R29(sp) 319384740dcSRalf Baechle jr k0 320384740dcSRalf Baechle rfe 321384740dcSRalf Baechle .set pop 322384740dcSRalf Baechle .endm 323384740dcSRalf Baechle 324384740dcSRalf Baechle #else 325384740dcSRalf Baechle .macro RESTORE_SOME 326384740dcSRalf Baechle .set push 327384740dcSRalf Baechle .set reorder 328384740dcSRalf Baechle .set noat 329384740dcSRalf Baechle mfc0 a0, CP0_STATUS 330384740dcSRalf Baechle ori a0, STATMASK 331384740dcSRalf Baechle xori a0, STATMASK 332384740dcSRalf Baechle mtc0 a0, CP0_STATUS 333384740dcSRalf Baechle li v1, 0xff00 334384740dcSRalf Baechle and a0, v1 335384740dcSRalf Baechle LONG_L v0, PT_STATUS(sp) 336384740dcSRalf Baechle nor v1, $0, v1 337384740dcSRalf Baechle and v0, v1 338384740dcSRalf Baechle or v0, a0 339384740dcSRalf Baechle mtc0 v0, CP0_STATUS 340384740dcSRalf Baechle LONG_L v1, PT_EPC(sp) 341384740dcSRalf Baechle MTC0 v1, CP0_EPC 342384740dcSRalf Baechle LONG_L $31, PT_R31(sp) 343384740dcSRalf Baechle LONG_L $28, PT_R28(sp) 344384740dcSRalf Baechle LONG_L $25, PT_R25(sp) 345384740dcSRalf Baechle #ifdef CONFIG_64BIT 346384740dcSRalf Baechle LONG_L $8, PT_R8(sp) 347384740dcSRalf Baechle LONG_L $9, PT_R9(sp) 348384740dcSRalf Baechle #endif 349384740dcSRalf Baechle LONG_L $7, PT_R7(sp) 350384740dcSRalf Baechle LONG_L $6, PT_R6(sp) 351384740dcSRalf Baechle LONG_L $5, PT_R5(sp) 352384740dcSRalf Baechle LONG_L $4, PT_R4(sp) 353384740dcSRalf Baechle LONG_L $3, PT_R3(sp) 354384740dcSRalf Baechle LONG_L $2, PT_R2(sp) 355384740dcSRalf Baechle .set pop 356384740dcSRalf Baechle .endm 357384740dcSRalf Baechle 358384740dcSRalf Baechle .macro RESTORE_SP_AND_RET 359384740dcSRalf Baechle LONG_L sp, PT_R29(sp) 360a809d460SRalf Baechle .set arch=r4000 361384740dcSRalf Baechle eret 362384740dcSRalf Baechle .set mips0 363384740dcSRalf Baechle .endm 364384740dcSRalf Baechle 365384740dcSRalf Baechle #endif 366384740dcSRalf Baechle 367384740dcSRalf Baechle .macro RESTORE_SP 368384740dcSRalf Baechle LONG_L sp, PT_R29(sp) 369384740dcSRalf Baechle .endm 370384740dcSRalf Baechle 371384740dcSRalf Baechle .macro RESTORE_ALL 372384740dcSRalf Baechle RESTORE_TEMP 373384740dcSRalf Baechle RESTORE_STATIC 374384740dcSRalf Baechle RESTORE_AT 375384740dcSRalf Baechle RESTORE_SOME 376384740dcSRalf Baechle RESTORE_SP 377384740dcSRalf Baechle .endm 378384740dcSRalf Baechle 379384740dcSRalf Baechle .macro RESTORE_ALL_AND_RET 380384740dcSRalf Baechle RESTORE_TEMP 381384740dcSRalf Baechle RESTORE_STATIC 382384740dcSRalf Baechle RESTORE_AT 383384740dcSRalf Baechle RESTORE_SOME 384384740dcSRalf Baechle RESTORE_SP_AND_RET 385384740dcSRalf Baechle .endm 386384740dcSRalf Baechle 387384740dcSRalf Baechle /* 388384740dcSRalf Baechle * Move to kernel mode and disable interrupts. 389384740dcSRalf Baechle * Set cp0 enable bit as sign that we're running on the kernel stack 390384740dcSRalf Baechle */ 391384740dcSRalf Baechle .macro CLI 392384740dcSRalf Baechle mfc0 t0, CP0_STATUS 393384740dcSRalf Baechle li t1, ST0_CU0 | STATMASK 394384740dcSRalf Baechle or t0, t1 395384740dcSRalf Baechle xori t0, STATMASK 396384740dcSRalf Baechle mtc0 t0, CP0_STATUS 397384740dcSRalf Baechle irq_disable_hazard 398384740dcSRalf Baechle .endm 399384740dcSRalf Baechle 400384740dcSRalf Baechle /* 401384740dcSRalf Baechle * Move to kernel mode and enable interrupts. 402384740dcSRalf Baechle * Set cp0 enable bit as sign that we're running on the kernel stack 403384740dcSRalf Baechle */ 404384740dcSRalf Baechle .macro STI 405384740dcSRalf Baechle mfc0 t0, CP0_STATUS 406384740dcSRalf Baechle li t1, ST0_CU0 | STATMASK 407384740dcSRalf Baechle or t0, t1 408384740dcSRalf Baechle xori t0, STATMASK & ~1 409384740dcSRalf Baechle mtc0 t0, CP0_STATUS 410384740dcSRalf Baechle irq_enable_hazard 411384740dcSRalf Baechle .endm 412384740dcSRalf Baechle 413384740dcSRalf Baechle /* 414384740dcSRalf Baechle * Just move to kernel mode and leave interrupts as they are. Note 415384740dcSRalf Baechle * for the R3000 this means copying the previous enable from IEp. 416384740dcSRalf Baechle * Set cp0 enable bit as sign that we're running on the kernel stack 417384740dcSRalf Baechle */ 418384740dcSRalf Baechle .macro KMODE 419384740dcSRalf Baechle mfc0 t0, CP0_STATUS 420384740dcSRalf Baechle li t1, ST0_CU0 | (STATMASK & ~1) 421384740dcSRalf Baechle #if defined(CONFIG_CPU_R3000) || defined(CONFIG_CPU_TX39XX) 422384740dcSRalf Baechle andi t2, t0, ST0_IEP 423384740dcSRalf Baechle srl t2, 2 424384740dcSRalf Baechle or t0, t2 425384740dcSRalf Baechle #endif 426384740dcSRalf Baechle or t0, t1 427384740dcSRalf Baechle xori t0, STATMASK & ~1 428384740dcSRalf Baechle mtc0 t0, CP0_STATUS 429384740dcSRalf Baechle irq_disable_hazard 430384740dcSRalf Baechle .endm 431384740dcSRalf Baechle 432384740dcSRalf Baechle #endif /* _ASM_STACKFRAME_H */ 433