1384740dcSRalf Baechle /* 2384740dcSRalf Baechle * This file is subject to the terms and conditions of the GNU General Public 3384740dcSRalf Baechle * License. See the file "COPYING" in the main directory of this archive 4384740dcSRalf Baechle * for more details. 5384740dcSRalf Baechle * 6384740dcSRalf Baechle * Copyright (C) 1994, 95, 96, 99, 2001 Ralf Baechle 7384740dcSRalf Baechle * Copyright (C) 1994, 1995, 1996 Paul M. Antoine. 8384740dcSRalf Baechle * Copyright (C) 1999 Silicon Graphics, Inc. 9384740dcSRalf Baechle * Copyright (C) 2007 Maciej W. Rozycki 10384740dcSRalf Baechle */ 11384740dcSRalf Baechle #ifndef _ASM_STACKFRAME_H 12384740dcSRalf Baechle #define _ASM_STACKFRAME_H 13384740dcSRalf Baechle 14384740dcSRalf Baechle #include <linux/threads.h> 15384740dcSRalf Baechle 16384740dcSRalf Baechle #include <asm/asm.h> 17384740dcSRalf Baechle #include <asm/asmmacro.h> 18384740dcSRalf Baechle #include <asm/mipsregs.h> 19384740dcSRalf Baechle #include <asm/asm-offsets.h> 20c2377a42SJayachandran C #include <asm/thread_info.h> 21384740dcSRalf Baechle 22*866b6a89SCorey Minyard /* Make the addition of cfi info a little easier. */ 23*866b6a89SCorey Minyard .macro cfi_rel_offset reg offset=0 docfi=0 24*866b6a89SCorey Minyard .if \docfi 25*866b6a89SCorey Minyard .cfi_rel_offset \reg, \offset 26*866b6a89SCorey Minyard .endif 27*866b6a89SCorey Minyard .endm 28*866b6a89SCorey Minyard 29*866b6a89SCorey Minyard .macro cfi_st reg offset=0 docfi=0 30*866b6a89SCorey Minyard LONG_S \reg, \offset(sp) 31*866b6a89SCorey Minyard cfi_rel_offset \reg, \offset, \docfi 32*866b6a89SCorey Minyard .endm 33*866b6a89SCorey Minyard 34*866b6a89SCorey Minyard .macro cfi_restore reg offset=0 docfi=0 35*866b6a89SCorey Minyard .if \docfi 36*866b6a89SCorey Minyard .cfi_restore \reg 37*866b6a89SCorey Minyard .endif 38*866b6a89SCorey Minyard .endm 39*866b6a89SCorey Minyard 40*866b6a89SCorey Minyard .macro cfi_ld reg offset=0 docfi=0 41*866b6a89SCorey Minyard LONG_L \reg, \offset(sp) 42*866b6a89SCorey Minyard cfi_restore \reg \offset \docfi 43*866b6a89SCorey Minyard .endm 44*866b6a89SCorey Minyard 45b633648cSRalf Baechle #if defined(CONFIG_CPU_R3000) || defined(CONFIG_CPU_TX39XX) 46384740dcSRalf Baechle #define STATMASK 0x3f 47384740dcSRalf Baechle #else 48384740dcSRalf Baechle #define STATMASK 0x1f 49384740dcSRalf Baechle #endif 50384740dcSRalf Baechle 51*866b6a89SCorey Minyard .macro SAVE_AT docfi=0 52384740dcSRalf Baechle .set push 53384740dcSRalf Baechle .set noat 54*866b6a89SCorey Minyard cfi_st $1, PT_R1, \docfi 55384740dcSRalf Baechle .set pop 56384740dcSRalf Baechle .endm 57384740dcSRalf Baechle 58*866b6a89SCorey Minyard .macro SAVE_TEMP docfi=0 59384740dcSRalf Baechle #ifdef CONFIG_CPU_HAS_SMARTMIPS 60384740dcSRalf Baechle mflhxu v1 61384740dcSRalf Baechle LONG_S v1, PT_LO(sp) 62384740dcSRalf Baechle mflhxu v1 63384740dcSRalf Baechle LONG_S v1, PT_HI(sp) 64384740dcSRalf Baechle mflhxu v1 65384740dcSRalf Baechle LONG_S v1, PT_ACX(sp) 666a0e9865SLeonid Yegoshin #elif !defined(CONFIG_CPU_MIPSR6) 67384740dcSRalf Baechle mfhi v1 68384740dcSRalf Baechle #endif 69384740dcSRalf Baechle #ifdef CONFIG_32BIT 70*866b6a89SCorey Minyard cfi_st $8, PT_R8, \docfi 71*866b6a89SCorey Minyard cfi_st $9, PT_R9, \docfi 72384740dcSRalf Baechle #endif 73*866b6a89SCorey Minyard cfi_st $10, PT_R10, \docfi 74*866b6a89SCorey Minyard cfi_st $11, PT_R11, \docfi 75*866b6a89SCorey Minyard cfi_st $12, PT_R12, \docfi 766a0e9865SLeonid Yegoshin #if !defined(CONFIG_CPU_HAS_SMARTMIPS) && !defined(CONFIG_CPU_MIPSR6) 77362e6964SDavid Daney LONG_S v1, PT_HI(sp) 78362e6964SDavid Daney mflo v1 79362e6964SDavid Daney #endif 80*866b6a89SCorey Minyard cfi_st $13, PT_R13, \docfi 81*866b6a89SCorey Minyard cfi_st $14, PT_R14, \docfi 82*866b6a89SCorey Minyard cfi_st $15, PT_R15, \docfi 83*866b6a89SCorey Minyard cfi_st $24, PT_R24, \docfi 846a0e9865SLeonid Yegoshin #if !defined(CONFIG_CPU_HAS_SMARTMIPS) && !defined(CONFIG_CPU_MIPSR6) 85362e6964SDavid Daney LONG_S v1, PT_LO(sp) 86362e6964SDavid Daney #endif 878dfdd02aSDavid Daney #ifdef CONFIG_CPU_CAVIUM_OCTEON 888dfdd02aSDavid Daney /* 898dfdd02aSDavid Daney * The Octeon multiplier state is affected by general 908dfdd02aSDavid Daney * multiply instructions. It must be saved before and 918dfdd02aSDavid Daney * kernel code might corrupt it 928dfdd02aSDavid Daney */ 938dfdd02aSDavid Daney jal octeon_mult_save 948dfdd02aSDavid Daney #endif 95384740dcSRalf Baechle .endm 96384740dcSRalf Baechle 97*866b6a89SCorey Minyard .macro SAVE_STATIC docfi=0 98*866b6a89SCorey Minyard cfi_st $16, PT_R16, \docfi 99*866b6a89SCorey Minyard cfi_st $17, PT_R17, \docfi 100*866b6a89SCorey Minyard cfi_st $18, PT_R18, \docfi 101*866b6a89SCorey Minyard cfi_st $19, PT_R19, \docfi 102*866b6a89SCorey Minyard cfi_st $20, PT_R20, \docfi 103*866b6a89SCorey Minyard cfi_st $21, PT_R21, \docfi 104*866b6a89SCorey Minyard cfi_st $22, PT_R22, \docfi 105*866b6a89SCorey Minyard cfi_st $23, PT_R23, \docfi 106*866b6a89SCorey Minyard cfi_st $30, PT_R30, \docfi 107384740dcSRalf Baechle .endm 108384740dcSRalf Baechle 1099fef6868SCorey Minyard /* 1109fef6868SCorey Minyard * get_saved_sp returns the SP for the current CPU by looking in the 1119fef6868SCorey Minyard * kernelsp array for it. If tosp is set, it stores the current sp in 1129fef6868SCorey Minyard * k0 and loads the new value in sp. If not, it clobbers k0 and 1139fef6868SCorey Minyard * stores the new value in k1, leaving sp unaffected. 1149fef6868SCorey Minyard */ 115384740dcSRalf Baechle #ifdef CONFIG_SMP 1169fef6868SCorey Minyard 1179fef6868SCorey Minyard /* SMP variation */ 1189fef6868SCorey Minyard .macro get_saved_sp docfi=0 tosp=0 119c2377a42SJayachandran C ASM_CPUID_MFC0 k0, ASM_SMP_CPUID_REG 120384740dcSRalf Baechle #if defined(CONFIG_32BIT) || defined(KBUILD_64BIT_SYM32) 121384740dcSRalf Baechle lui k1, %hi(kernelsp) 122384740dcSRalf Baechle #else 123384740dcSRalf Baechle lui k1, %highest(kernelsp) 124384740dcSRalf Baechle daddiu k1, %higher(kernelsp) 125384740dcSRalf Baechle dsll k1, 16 126384740dcSRalf Baechle daddiu k1, %hi(kernelsp) 127384740dcSRalf Baechle dsll k1, 16 128384740dcSRalf Baechle #endif 129c2377a42SJayachandran C LONG_SRL k0, SMP_CPUID_PTRSHIFT 130384740dcSRalf Baechle LONG_ADDU k1, k0 1319fef6868SCorey Minyard .if \tosp 1329fef6868SCorey Minyard move k0, sp 1339fef6868SCorey Minyard .if \docfi 1349fef6868SCorey Minyard .cfi_register sp, k0 1359fef6868SCorey Minyard .endif 1369fef6868SCorey Minyard LONG_L sp, %lo(kernelsp)(k1) 1379fef6868SCorey Minyard .else 138384740dcSRalf Baechle LONG_L k1, %lo(kernelsp)(k1) 1399fef6868SCorey Minyard .endif 140384740dcSRalf Baechle .endm 141384740dcSRalf Baechle 142384740dcSRalf Baechle .macro set_saved_sp stackp temp temp2 143c2377a42SJayachandran C ASM_CPUID_MFC0 \temp, ASM_SMP_CPUID_REG 144c2377a42SJayachandran C LONG_SRL \temp, SMP_CPUID_PTRSHIFT 145384740dcSRalf Baechle LONG_S \stackp, kernelsp(\temp) 146384740dcSRalf Baechle .endm 147c2377a42SJayachandran C #else /* !CONFIG_SMP */ 1489fef6868SCorey Minyard /* Uniprocessor variation */ 1499fef6868SCorey Minyard .macro get_saved_sp docfi=0 tosp=0 150b197b628SWu Zhangjin #ifdef CONFIG_CPU_JUMP_WORKAROUNDS 151f1df3239SWu Zhangjin /* 152f1df3239SWu Zhangjin * Clear BTB (branch target buffer), forbid RAS (return address 153f1df3239SWu Zhangjin * stack) to workaround the Out-of-order Issue in Loongson2F 154f1df3239SWu Zhangjin * via its diagnostic register. 155f1df3239SWu Zhangjin */ 156f1df3239SWu Zhangjin move k0, ra 157f1df3239SWu Zhangjin jal 1f 158f1df3239SWu Zhangjin nop 159f1df3239SWu Zhangjin 1: jal 1f 160f1df3239SWu Zhangjin nop 161f1df3239SWu Zhangjin 1: jal 1f 162f1df3239SWu Zhangjin nop 163f1df3239SWu Zhangjin 1: jal 1f 164f1df3239SWu Zhangjin nop 165f1df3239SWu Zhangjin 1: move ra, k0 166f1df3239SWu Zhangjin li k0, 3 167f1df3239SWu Zhangjin mtc0 k0, $22 1682a0b24f5SSteven J. Hill #endif /* CONFIG_CPU_JUMP_WORKAROUNDS */ 169384740dcSRalf Baechle #if defined(CONFIG_32BIT) || defined(KBUILD_64BIT_SYM32) 170384740dcSRalf Baechle lui k1, %hi(kernelsp) 171384740dcSRalf Baechle #else 172384740dcSRalf Baechle lui k1, %highest(kernelsp) 173384740dcSRalf Baechle daddiu k1, %higher(kernelsp) 174384740dcSRalf Baechle dsll k1, k1, 16 175384740dcSRalf Baechle daddiu k1, %hi(kernelsp) 176384740dcSRalf Baechle dsll k1, k1, 16 177384740dcSRalf Baechle #endif 1789fef6868SCorey Minyard .if \tosp 1799fef6868SCorey Minyard move k0, sp 1809fef6868SCorey Minyard .if \docfi 1819fef6868SCorey Minyard .cfi_register sp, k0 1829fef6868SCorey Minyard .endif 1839fef6868SCorey Minyard LONG_L sp, %lo(kernelsp)(k1) 1849fef6868SCorey Minyard .else 185384740dcSRalf Baechle LONG_L k1, %lo(kernelsp)(k1) 1869fef6868SCorey Minyard .endif 187384740dcSRalf Baechle .endm 188384740dcSRalf Baechle 189384740dcSRalf Baechle .macro set_saved_sp stackp temp temp2 190384740dcSRalf Baechle LONG_S \stackp, kernelsp 191384740dcSRalf Baechle .endm 192384740dcSRalf Baechle #endif 193384740dcSRalf Baechle 194*866b6a89SCorey Minyard .macro SAVE_SOME docfi=0 195384740dcSRalf Baechle .set push 196384740dcSRalf Baechle .set noat 197384740dcSRalf Baechle .set reorder 198384740dcSRalf Baechle mfc0 k0, CP0_STATUS 199384740dcSRalf Baechle sll k0, 3 /* extract cu0 bit */ 200384740dcSRalf Baechle .set noreorder 201384740dcSRalf Baechle bltz k0, 8f 2023aff47c0SJames Hogan #ifdef CONFIG_EVA 2033aff47c0SJames Hogan /* 2043aff47c0SJames Hogan * Flush interAptiv's Return Prediction Stack (RPS) by writing 2053aff47c0SJames Hogan * EntryHi. Toggling Config7.RPS is slower and less portable. 2063aff47c0SJames Hogan * 2073aff47c0SJames Hogan * The RPS isn't automatically flushed when exceptions are 2083aff47c0SJames Hogan * taken, which can result in kernel mode speculative accesses 2093aff47c0SJames Hogan * to user addresses if the RPS mispredicts. That's harmless 2103aff47c0SJames Hogan * when user and kernel share the same address space, but with 2113aff47c0SJames Hogan * EVA the same user segments may be unmapped to kernel mode, 2123aff47c0SJames Hogan * even containing sensitive MMIO regions or invalid memory. 2133aff47c0SJames Hogan * 2143aff47c0SJames Hogan * This can happen when the kernel sets the return address to 2153aff47c0SJames Hogan * ret_from_* and jr's to the exception handler, which looks 2163aff47c0SJames Hogan * more like a tail call than a function call. If nested calls 2173aff47c0SJames Hogan * don't evict the last user address in the RPS, it will 2183aff47c0SJames Hogan * mispredict the return and fetch from a user controlled 2193aff47c0SJames Hogan * address into the icache. 2203aff47c0SJames Hogan * 2213aff47c0SJames Hogan * More recent EVA-capable cores with MAAR to restrict 2223aff47c0SJames Hogan * speculative accesses aren't affected. 2233aff47c0SJames Hogan */ 2243aff47c0SJames Hogan MFC0 k0, CP0_ENTRYHI 2253aff47c0SJames Hogan MTC0 k0, CP0_ENTRYHI 2263aff47c0SJames Hogan #endif 227384740dcSRalf Baechle .set reorder 2289fef6868SCorey Minyard move k0, sp 229*866b6a89SCorey Minyard .if \docfi 230*866b6a89SCorey Minyard .cfi_register sp, k0 231*866b6a89SCorey Minyard .endif 232384740dcSRalf Baechle /* Called from user mode, new stack. */ 233*866b6a89SCorey Minyard get_saved_sp docfi=\docfi tosp=1 2349fef6868SCorey Minyard 8: 2359fef6868SCorey Minyard #ifdef CONFIG_CPU_DADDI_WORKAROUNDS 2369fef6868SCorey Minyard .set at=k1 2379fef6868SCorey Minyard #endif 2389fef6868SCorey Minyard PTR_SUBU sp, PT_SIZE 2399fef6868SCorey Minyard #ifdef CONFIG_CPU_DADDI_WORKAROUNDS 240384740dcSRalf Baechle .set noat 241384740dcSRalf Baechle #endif 242*866b6a89SCorey Minyard .if \docfi 243*866b6a89SCorey Minyard .cfi_def_cfa sp,0 244*866b6a89SCorey Minyard .endif 245*866b6a89SCorey Minyard cfi_st k0, PT_R29, \docfi 246*866b6a89SCorey Minyard cfi_rel_offset sp, PT_R29, \docfi 247*866b6a89SCorey Minyard cfi_st v1, PT_R3, \docfi 248384740dcSRalf Baechle /* 249384740dcSRalf Baechle * You might think that you don't need to save $0, 250384740dcSRalf Baechle * but the FPU emulator and gdb remote debug stub 251384740dcSRalf Baechle * need it to operate correctly 252384740dcSRalf Baechle */ 253384740dcSRalf Baechle LONG_S $0, PT_R0(sp) 254384740dcSRalf Baechle mfc0 v1, CP0_STATUS 255*866b6a89SCorey Minyard cfi_st v0, PT_R2, \docfi 2562a0b24f5SSteven J. Hill LONG_S v1, PT_STATUS(sp) 257*866b6a89SCorey Minyard cfi_st $4, PT_R4, \docfi 258362e6964SDavid Daney mfc0 v1, CP0_CAUSE 259*866b6a89SCorey Minyard cfi_st $5, PT_R5, \docfi 260362e6964SDavid Daney LONG_S v1, PT_CAUSE(sp) 261*866b6a89SCorey Minyard cfi_st $6, PT_R6, \docfi 262*866b6a89SCorey Minyard cfi_st ra, PT_R31, \docfi 2639fef6868SCorey Minyard MFC0 ra, CP0_EPC 264*866b6a89SCorey Minyard cfi_st $7, PT_R7, \docfi 265384740dcSRalf Baechle #ifdef CONFIG_64BIT 266*866b6a89SCorey Minyard cfi_st $8, PT_R8, \docfi 267*866b6a89SCorey Minyard cfi_st $9, PT_R9, \docfi 268384740dcSRalf Baechle #endif 2699fef6868SCorey Minyard LONG_S ra, PT_EPC(sp) 270*866b6a89SCorey Minyard .if \docfi 271*866b6a89SCorey Minyard .cfi_rel_offset ra, PT_EPC 272*866b6a89SCorey Minyard .endif 273*866b6a89SCorey Minyard cfi_st $25, PT_R25, \docfi 274*866b6a89SCorey Minyard cfi_st $28, PT_R28, \docfi 275510d8636SMatt Redfearn 276510d8636SMatt Redfearn /* Set thread_info if we're coming from user mode */ 277510d8636SMatt Redfearn mfc0 k0, CP0_STATUS 278510d8636SMatt Redfearn sll k0, 3 /* extract cu0 bit */ 279510d8636SMatt Redfearn bltz k0, 9f 280510d8636SMatt Redfearn 281384740dcSRalf Baechle ori $28, sp, _THREAD_MASK 282384740dcSRalf Baechle xori $28, _THREAD_MASK 2832a219b0eSDavid Daney #ifdef CONFIG_CPU_CAVIUM_OCTEON 2842a219b0eSDavid Daney .set mips64 2852a219b0eSDavid Daney pref 0, 0($28) /* Prefetch the current pointer */ 2862a219b0eSDavid Daney #endif 287510d8636SMatt Redfearn 9: 288384740dcSRalf Baechle .set pop 289384740dcSRalf Baechle .endm 290384740dcSRalf Baechle 291*866b6a89SCorey Minyard .macro SAVE_ALL docfi=0 292*866b6a89SCorey Minyard SAVE_SOME \docfi 293*866b6a89SCorey Minyard SAVE_AT \docfi 294*866b6a89SCorey Minyard SAVE_TEMP \docfi 295*866b6a89SCorey Minyard SAVE_STATIC \docfi 296384740dcSRalf Baechle .endm 297384740dcSRalf Baechle 298*866b6a89SCorey Minyard .macro RESTORE_AT docfi=0 299384740dcSRalf Baechle .set push 300384740dcSRalf Baechle .set noat 301*866b6a89SCorey Minyard cfi_ld $1, PT_R1, \docfi 302384740dcSRalf Baechle .set pop 303384740dcSRalf Baechle .endm 304384740dcSRalf Baechle 305*866b6a89SCorey Minyard .macro RESTORE_TEMP docfi=0 3068dfdd02aSDavid Daney #ifdef CONFIG_CPU_CAVIUM_OCTEON 3078dfdd02aSDavid Daney /* Restore the Octeon multiplier state */ 3088dfdd02aSDavid Daney jal octeon_mult_restore 3098dfdd02aSDavid Daney #endif 310384740dcSRalf Baechle #ifdef CONFIG_CPU_HAS_SMARTMIPS 311384740dcSRalf Baechle LONG_L $24, PT_ACX(sp) 312384740dcSRalf Baechle mtlhx $24 313384740dcSRalf Baechle LONG_L $24, PT_HI(sp) 314384740dcSRalf Baechle mtlhx $24 315384740dcSRalf Baechle LONG_L $24, PT_LO(sp) 316384740dcSRalf Baechle mtlhx $24 3176a0e9865SLeonid Yegoshin #elif !defined(CONFIG_CPU_MIPSR6) 318384740dcSRalf Baechle LONG_L $24, PT_LO(sp) 319384740dcSRalf Baechle mtlo $24 320384740dcSRalf Baechle LONG_L $24, PT_HI(sp) 321384740dcSRalf Baechle mthi $24 322384740dcSRalf Baechle #endif 323384740dcSRalf Baechle #ifdef CONFIG_32BIT 324*866b6a89SCorey Minyard cfi_ld $8, PT_R8, \docfi 325*866b6a89SCorey Minyard cfi_ld $9, PT_R9, \docfi 326384740dcSRalf Baechle #endif 327*866b6a89SCorey Minyard cfi_ld $10, PT_R10, \docfi 328*866b6a89SCorey Minyard cfi_ld $11, PT_R11, \docfi 329*866b6a89SCorey Minyard cfi_ld $12, PT_R12, \docfi 330*866b6a89SCorey Minyard cfi_ld $13, PT_R13, \docfi 331*866b6a89SCorey Minyard cfi_ld $14, PT_R14, \docfi 332*866b6a89SCorey Minyard cfi_ld $15, PT_R15, \docfi 333*866b6a89SCorey Minyard cfi_ld $24, PT_R24, \docfi 334384740dcSRalf Baechle .endm 335384740dcSRalf Baechle 336*866b6a89SCorey Minyard .macro RESTORE_STATIC docfi=0 337*866b6a89SCorey Minyard cfi_ld $16, PT_R16, \docfi 338*866b6a89SCorey Minyard cfi_ld $17, PT_R17, \docfi 339*866b6a89SCorey Minyard cfi_ld $18, PT_R18, \docfi 340*866b6a89SCorey Minyard cfi_ld $19, PT_R19, \docfi 341*866b6a89SCorey Minyard cfi_ld $20, PT_R20, \docfi 342*866b6a89SCorey Minyard cfi_ld $21, PT_R21, \docfi 343*866b6a89SCorey Minyard cfi_ld $22, PT_R22, \docfi 344*866b6a89SCorey Minyard cfi_ld $23, PT_R23, \docfi 345*866b6a89SCorey Minyard cfi_ld $30, PT_R30, \docfi 346*866b6a89SCorey Minyard .endm 347*866b6a89SCorey Minyard 348*866b6a89SCorey Minyard .macro RESTORE_SP docfi=0 349*866b6a89SCorey Minyard cfi_ld sp, PT_R29, \docfi 350384740dcSRalf Baechle .endm 351384740dcSRalf Baechle 352384740dcSRalf Baechle #if defined(CONFIG_CPU_R3000) || defined(CONFIG_CPU_TX39XX) 353384740dcSRalf Baechle 354*866b6a89SCorey Minyard .macro RESTORE_SOME docfi=0 355384740dcSRalf Baechle .set push 356384740dcSRalf Baechle .set reorder 357384740dcSRalf Baechle .set noat 358384740dcSRalf Baechle mfc0 a0, CP0_STATUS 35900fe56dcSJames Hogan li v1, ST0_CU1 | ST0_IM 360384740dcSRalf Baechle ori a0, STATMASK 361384740dcSRalf Baechle xori a0, STATMASK 362384740dcSRalf Baechle mtc0 a0, CP0_STATUS 363384740dcSRalf Baechle and a0, v1 364384740dcSRalf Baechle LONG_L v0, PT_STATUS(sp) 365384740dcSRalf Baechle nor v1, $0, v1 366384740dcSRalf Baechle and v0, v1 367384740dcSRalf Baechle or v0, a0 368384740dcSRalf Baechle mtc0 v0, CP0_STATUS 369*866b6a89SCorey Minyard cfi_ld $31, PT_R31, \docfi 370*866b6a89SCorey Minyard cfi_ld $28, PT_R28, \docfi 371*866b6a89SCorey Minyard cfi_ld $25, PT_R25, \docfi 372*866b6a89SCorey Minyard cfi_ld $7, PT_R7, \docfi 373*866b6a89SCorey Minyard cfi_ld $6, PT_R6, \docfi 374*866b6a89SCorey Minyard cfi_ld $5, PT_R5, \docfi 375*866b6a89SCorey Minyard cfi_ld $4, PT_R4, \docfi 376*866b6a89SCorey Minyard cfi_ld $3, PT_R3, \docfi 377*866b6a89SCorey Minyard cfi_ld $2, PT_R2, \docfi 378384740dcSRalf Baechle .set pop 379384740dcSRalf Baechle .endm 380384740dcSRalf Baechle 381*866b6a89SCorey Minyard .macro RESTORE_SP_AND_RET docfi=0 382384740dcSRalf Baechle .set push 383384740dcSRalf Baechle .set noreorder 384384740dcSRalf Baechle LONG_L k0, PT_EPC(sp) 385*866b6a89SCorey Minyard RESTORE_SP \docfi 386384740dcSRalf Baechle jr k0 387384740dcSRalf Baechle rfe 388384740dcSRalf Baechle .set pop 389384740dcSRalf Baechle .endm 390384740dcSRalf Baechle 391384740dcSRalf Baechle #else 392*866b6a89SCorey Minyard .macro RESTORE_SOME docfi=0 393384740dcSRalf Baechle .set push 394384740dcSRalf Baechle .set reorder 395384740dcSRalf Baechle .set noat 396384740dcSRalf Baechle mfc0 a0, CP0_STATUS 397384740dcSRalf Baechle ori a0, STATMASK 398384740dcSRalf Baechle xori a0, STATMASK 399384740dcSRalf Baechle mtc0 a0, CP0_STATUS 40000fe56dcSJames Hogan li v1, ST0_CU1 | ST0_FR | ST0_IM 401384740dcSRalf Baechle and a0, v1 402384740dcSRalf Baechle LONG_L v0, PT_STATUS(sp) 403384740dcSRalf Baechle nor v1, $0, v1 404384740dcSRalf Baechle and v0, v1 405384740dcSRalf Baechle or v0, a0 406384740dcSRalf Baechle mtc0 v0, CP0_STATUS 407384740dcSRalf Baechle LONG_L v1, PT_EPC(sp) 408384740dcSRalf Baechle MTC0 v1, CP0_EPC 409*866b6a89SCorey Minyard cfi_ld $31, PT_R31, \docfi 410*866b6a89SCorey Minyard cfi_ld $28, PT_R28, \docfi 411*866b6a89SCorey Minyard cfi_ld $25, PT_R25, \docfi 412384740dcSRalf Baechle #ifdef CONFIG_64BIT 413*866b6a89SCorey Minyard cfi_ld $8, PT_R8, \docfi 414*866b6a89SCorey Minyard cfi_ld $9, PT_R9, \docfi 415384740dcSRalf Baechle #endif 416*866b6a89SCorey Minyard cfi_ld $7, PT_R7, \docfi 417*866b6a89SCorey Minyard cfi_ld $6, PT_R6, \docfi 418*866b6a89SCorey Minyard cfi_ld $5, PT_R5, \docfi 419*866b6a89SCorey Minyard cfi_ld $4, PT_R4, \docfi 420*866b6a89SCorey Minyard cfi_ld $3, PT_R3, \docfi 421*866b6a89SCorey Minyard cfi_ld $2, PT_R2, \docfi 422384740dcSRalf Baechle .set pop 423384740dcSRalf Baechle .endm 424384740dcSRalf Baechle 425*866b6a89SCorey Minyard .macro RESTORE_SP_AND_RET docfi=0 426*866b6a89SCorey Minyard RESTORE_SP \docfi 427e11124d8SPaul Burton #ifdef CONFIG_CPU_MIPSR6 428e11124d8SPaul Burton eretnc 429e11124d8SPaul Burton #else 430a809d460SRalf Baechle .set arch=r4000 431384740dcSRalf Baechle eret 432384740dcSRalf Baechle .set mips0 433e11124d8SPaul Burton #endif 434384740dcSRalf Baechle .endm 435384740dcSRalf Baechle 436384740dcSRalf Baechle #endif 437384740dcSRalf Baechle 438*866b6a89SCorey Minyard .macro RESTORE_ALL docfi=0 439*866b6a89SCorey Minyard RESTORE_TEMP \docfi 440*866b6a89SCorey Minyard RESTORE_STATIC \docfi 441*866b6a89SCorey Minyard RESTORE_AT \docfi 442*866b6a89SCorey Minyard RESTORE_SOME \docfi 443*866b6a89SCorey Minyard RESTORE_SP \docfi 444384740dcSRalf Baechle .endm 445384740dcSRalf Baechle 446384740dcSRalf Baechle /* 447384740dcSRalf Baechle * Move to kernel mode and disable interrupts. 448384740dcSRalf Baechle * Set cp0 enable bit as sign that we're running on the kernel stack 449384740dcSRalf Baechle */ 450384740dcSRalf Baechle .macro CLI 451384740dcSRalf Baechle mfc0 t0, CP0_STATUS 452384740dcSRalf Baechle li t1, ST0_CU0 | STATMASK 453384740dcSRalf Baechle or t0, t1 454384740dcSRalf Baechle xori t0, STATMASK 455384740dcSRalf Baechle mtc0 t0, CP0_STATUS 456384740dcSRalf Baechle irq_disable_hazard 457384740dcSRalf Baechle .endm 458384740dcSRalf Baechle 459384740dcSRalf Baechle /* 460384740dcSRalf Baechle * Move to kernel mode and enable interrupts. 461384740dcSRalf Baechle * Set cp0 enable bit as sign that we're running on the kernel stack 462384740dcSRalf Baechle */ 463384740dcSRalf Baechle .macro STI 464384740dcSRalf Baechle mfc0 t0, CP0_STATUS 465384740dcSRalf Baechle li t1, ST0_CU0 | STATMASK 466384740dcSRalf Baechle or t0, t1 467384740dcSRalf Baechle xori t0, STATMASK & ~1 468384740dcSRalf Baechle mtc0 t0, CP0_STATUS 469384740dcSRalf Baechle irq_enable_hazard 470384740dcSRalf Baechle .endm 471384740dcSRalf Baechle 472384740dcSRalf Baechle /* 473384740dcSRalf Baechle * Just move to kernel mode and leave interrupts as they are. Note 474384740dcSRalf Baechle * for the R3000 this means copying the previous enable from IEp. 475384740dcSRalf Baechle * Set cp0 enable bit as sign that we're running on the kernel stack 476384740dcSRalf Baechle */ 477384740dcSRalf Baechle .macro KMODE 478384740dcSRalf Baechle mfc0 t0, CP0_STATUS 479384740dcSRalf Baechle li t1, ST0_CU0 | (STATMASK & ~1) 480384740dcSRalf Baechle #if defined(CONFIG_CPU_R3000) || defined(CONFIG_CPU_TX39XX) 481384740dcSRalf Baechle andi t2, t0, ST0_IEP 482384740dcSRalf Baechle srl t2, 2 483384740dcSRalf Baechle or t0, t2 484384740dcSRalf Baechle #endif 485384740dcSRalf Baechle or t0, t1 486384740dcSRalf Baechle xori t0, STATMASK & ~1 487384740dcSRalf Baechle mtc0 t0, CP0_STATUS 488384740dcSRalf Baechle irq_disable_hazard 489384740dcSRalf Baechle .endm 490384740dcSRalf Baechle 491384740dcSRalf Baechle #endif /* _ASM_STACKFRAME_H */ 492