18a23fdecSChristophe Leroy /* SPDX-License-Identifier: GPL-2.0 */ 28a23fdecSChristophe Leroy #ifndef __HEAD_32_H__ 38a23fdecSChristophe Leroy #define __HEAD_32_H__ 48a23fdecSChristophe Leroy 58a23fdecSChristophe Leroy #include <asm/ptrace.h> /* for STACK_FRAME_REGS_MARKER */ 68a23fdecSChristophe Leroy 78a23fdecSChristophe Leroy /* 88a23fdecSChristophe Leroy * Exception entry code. This code runs with address translation 98a23fdecSChristophe Leroy * turned off, i.e. using physical addresses. 108a23fdecSChristophe Leroy * We assume sprg3 has the physical address of the current 118a23fdecSChristophe Leroy * task's thread_struct. 128a23fdecSChristophe Leroy */ 1302847487SChristophe Leroy .macro EXCEPTION_PROLOG handle_dar_dsisr=0 1402847487SChristophe Leroy EXCEPTION_PROLOG_0 handle_dar_dsisr=\handle_dar_dsisr 151f1c4d01SChristophe Leroy EXCEPTION_PROLOG_1 1602847487SChristophe Leroy EXCEPTION_PROLOG_2 handle_dar_dsisr=\handle_dar_dsisr 171f1c4d01SChristophe Leroy .endm 181f1c4d01SChristophe Leroy 1902847487SChristophe Leroy .macro EXCEPTION_PROLOG_0 handle_dar_dsisr=0 208a23fdecSChristophe Leroy mtspr SPRN_SPRG_SCRATCH0,r10 218a23fdecSChristophe Leroy mtspr SPRN_SPRG_SCRATCH1,r11 2202847487SChristophe Leroy #ifdef CONFIG_VMAP_STACK 2302847487SChristophe Leroy mfspr r10, SPRN_SPRG_THREAD 2402847487SChristophe Leroy .if \handle_dar_dsisr 2502847487SChristophe Leroy mfspr r11, SPRN_DAR 2602847487SChristophe Leroy stw r11, DAR(r10) 2702847487SChristophe Leroy mfspr r11, SPRN_DSISR 2802847487SChristophe Leroy stw r11, DSISR(r10) 2902847487SChristophe Leroy .endif 3002847487SChristophe Leroy mfspr r11, SPRN_SRR0 3102847487SChristophe Leroy stw r11, SRR0(r10) 3202847487SChristophe Leroy #endif 335ae8fabcSChristophe Leroy mfspr r11, SPRN_SRR1 /* check whether user or kernel */ 3402847487SChristophe Leroy #ifdef CONFIG_VMAP_STACK 3502847487SChristophe Leroy stw r11, SRR1(r10) 3602847487SChristophe Leroy #endif 378a23fdecSChristophe Leroy mfcr r10 385ae8fabcSChristophe Leroy andi. r11, r11, MSR_PR 398a23fdecSChristophe Leroy .endm 408a23fdecSChristophe Leroy 418a23fdecSChristophe Leroy .macro EXCEPTION_PROLOG_1 4202847487SChristophe Leroy #ifdef CONFIG_VMAP_STACK 4302847487SChristophe Leroy li r11, MSR_KERNEL & ~(MSR_IR | MSR_RI) /* can take DTLB miss */ 4402847487SChristophe Leroy mtmsr r11 4502847487SChristophe Leroy isync 4602847487SChristophe Leroy subi r11, r1, INT_FRAME_SIZE /* use r1 if kernel */ 4702847487SChristophe Leroy #else 488a23fdecSChristophe Leroy tophys(r11,r1) /* use tophys(r1) if kernel */ 4902847487SChristophe Leroy subi r11, r11, INT_FRAME_SIZE /* alloc exc. frame */ 5002847487SChristophe Leroy #endif 518a23fdecSChristophe Leroy beq 1f 528a23fdecSChristophe Leroy mfspr r11,SPRN_SPRG_THREAD 5302847487SChristophe Leroy tovirt_vmstack r11, r11 548a23fdecSChristophe Leroy lwz r11,TASK_STACK-THREAD(r11) 5502847487SChristophe Leroy addi r11, r11, THREAD_SIZE - INT_FRAME_SIZE 5602847487SChristophe Leroy tophys_novmstack r11, r11 5702847487SChristophe Leroy 1: 58*3978eb78SChristophe Leroy #ifdef CONFIG_VMAP_STACK 59*3978eb78SChristophe Leroy mtcrf 0x7f, r11 60*3978eb78SChristophe Leroy bt 32 - THREAD_ALIGN_SHIFT, stack_overflow 61*3978eb78SChristophe Leroy #endif 628a23fdecSChristophe Leroy .endm 638a23fdecSChristophe Leroy 6402847487SChristophe Leroy .macro EXCEPTION_PROLOG_2 handle_dar_dsisr=0 658a23fdecSChristophe Leroy stw r10,_CCR(r11) /* save registers */ 668a23fdecSChristophe Leroy stw r12,GPR12(r11) 678a23fdecSChristophe Leroy stw r9,GPR9(r11) 688a23fdecSChristophe Leroy mfspr r10,SPRN_SPRG_SCRATCH0 698a23fdecSChristophe Leroy stw r10,GPR10(r11) 708a23fdecSChristophe Leroy mfspr r12,SPRN_SPRG_SCRATCH1 718a23fdecSChristophe Leroy stw r12,GPR11(r11) 728a23fdecSChristophe Leroy mflr r10 738a23fdecSChristophe Leroy stw r10,_LINK(r11) 7402847487SChristophe Leroy #ifdef CONFIG_VMAP_STACK 7502847487SChristophe Leroy mfspr r12, SPRN_SPRG_THREAD 7602847487SChristophe Leroy tovirt(r12, r12) 7702847487SChristophe Leroy .if \handle_dar_dsisr 7802847487SChristophe Leroy lwz r10, DAR(r12) 7902847487SChristophe Leroy stw r10, _DAR(r11) 8002847487SChristophe Leroy lwz r10, DSISR(r12) 8102847487SChristophe Leroy stw r10, _DSISR(r11) 8202847487SChristophe Leroy .endif 8302847487SChristophe Leroy lwz r9, SRR1(r12) 8402847487SChristophe Leroy lwz r12, SRR0(r12) 8502847487SChristophe Leroy #else 868a23fdecSChristophe Leroy mfspr r12,SPRN_SRR0 878a23fdecSChristophe Leroy mfspr r9,SPRN_SRR1 8802847487SChristophe Leroy #endif 898a23fdecSChristophe Leroy stw r1,GPR1(r11) 908a23fdecSChristophe Leroy stw r1,0(r11) 9102847487SChristophe Leroy tovirt_novmstack r1, r11 /* set new kernel sp */ 9290f204b9SChristophe Leroy #ifdef CONFIG_40x 9390f204b9SChristophe Leroy rlwinm r9,r9,0,14,12 /* clear MSR_WE (necessary?) */ 9490f204b9SChristophe Leroy #else 9502847487SChristophe Leroy #ifdef CONFIG_VMAP_STACK 9602847487SChristophe Leroy li r10, MSR_KERNEL & ~MSR_IR /* can take exceptions */ 9702847487SChristophe Leroy #else 988a23fdecSChristophe Leroy li r10,MSR_KERNEL & ~(MSR_IR|MSR_DR) /* can take exceptions */ 9902847487SChristophe Leroy #endif 10039bccfd1SChristophe Leroy mtmsr r10 /* (except for mach check in rtas) */ 10190f204b9SChristophe Leroy #endif 1028a23fdecSChristophe Leroy stw r0,GPR0(r11) 1038a23fdecSChristophe Leroy lis r10,STACK_FRAME_REGS_MARKER@ha /* exception frame marker */ 1048a23fdecSChristophe Leroy addi r10,r10,STACK_FRAME_REGS_MARKER@l 1058a23fdecSChristophe Leroy stw r10,8(r11) 1068a23fdecSChristophe Leroy SAVE_4GPRS(3, r11) 1078a23fdecSChristophe Leroy SAVE_2GPRS(7, r11) 1088a23fdecSChristophe Leroy .endm 1098a23fdecSChristophe Leroy 110b86fb888SChristophe Leroy .macro SYSCALL_ENTRY trapno 111b86fb888SChristophe Leroy mfspr r12,SPRN_SPRG_THREAD 11202847487SChristophe Leroy #ifdef CONFIG_VMAP_STACK 11302847487SChristophe Leroy mfspr r9, SPRN_SRR0 11402847487SChristophe Leroy mfspr r11, SPRN_SRR1 11502847487SChristophe Leroy stw r9, SRR0(r12) 11602847487SChristophe Leroy stw r11, SRR1(r12) 11702847487SChristophe Leroy #endif 118b86fb888SChristophe Leroy mfcr r10 119b86fb888SChristophe Leroy lwz r11,TASK_STACK-THREAD(r12) 120b86fb888SChristophe Leroy rlwinm r10,r10,0,4,2 /* Clear SO bit in CR */ 12102847487SChristophe Leroy addi r11, r11, THREAD_SIZE - INT_FRAME_SIZE 12202847487SChristophe Leroy #ifdef CONFIG_VMAP_STACK 12302847487SChristophe Leroy li r9, MSR_KERNEL & ~(MSR_IR | MSR_RI) /* can take DTLB miss */ 12402847487SChristophe Leroy mtmsr r9 12502847487SChristophe Leroy isync 12602847487SChristophe Leroy #endif 12702847487SChristophe Leroy tovirt_vmstack r12, r12 12802847487SChristophe Leroy tophys_novmstack r11, r11 12902847487SChristophe Leroy mflr r9 130b86fb888SChristophe Leroy stw r10,_CCR(r11) /* save registers */ 131b86fb888SChristophe Leroy stw r9, _LINK(r11) 13202847487SChristophe Leroy #ifdef CONFIG_VMAP_STACK 13302847487SChristophe Leroy lwz r10, SRR0(r12) 13402847487SChristophe Leroy lwz r9, SRR1(r12) 13502847487SChristophe Leroy #else 13602847487SChristophe Leroy mfspr r10,SPRN_SRR0 137b86fb888SChristophe Leroy mfspr r9,SPRN_SRR1 13802847487SChristophe Leroy #endif 139b86fb888SChristophe Leroy stw r1,GPR1(r11) 140b86fb888SChristophe Leroy stw r1,0(r11) 14102847487SChristophe Leroy tovirt_novmstack r1, r11 /* set new kernel sp */ 142b86fb888SChristophe Leroy stw r10,_NIP(r11) 143b86fb888SChristophe Leroy #ifdef CONFIG_40x 144b86fb888SChristophe Leroy rlwinm r9,r9,0,14,12 /* clear MSR_WE (necessary?) */ 145b86fb888SChristophe Leroy #else 14602847487SChristophe Leroy #ifdef CONFIG_VMAP_STACK 14702847487SChristophe Leroy LOAD_REG_IMMEDIATE(r10, MSR_KERNEL & ~MSR_IR) /* can take exceptions */ 14802847487SChristophe Leroy #else 149ba18025fSChristophe Leroy LOAD_REG_IMMEDIATE(r10, MSR_KERNEL & ~(MSR_IR|MSR_DR)) /* can take exceptions */ 15002847487SChristophe Leroy #endif 15139bccfd1SChristophe Leroy mtmsr r10 /* (except for mach check in rtas) */ 152b86fb888SChristophe Leroy #endif 153b86fb888SChristophe Leroy lis r10,STACK_FRAME_REGS_MARKER@ha /* exception frame marker */ 154b86fb888SChristophe Leroy stw r2,GPR2(r11) 155b86fb888SChristophe Leroy addi r10,r10,STACK_FRAME_REGS_MARKER@l 156b86fb888SChristophe Leroy stw r9,_MSR(r11) 157b86fb888SChristophe Leroy li r2, \trapno + 1 158b86fb888SChristophe Leroy stw r10,8(r11) 159b86fb888SChristophe Leroy stw r2,_TRAP(r11) 160b86fb888SChristophe Leroy SAVE_GPR(0, r11) 161b86fb888SChristophe Leroy SAVE_4GPRS(3, r11) 162b86fb888SChristophe Leroy SAVE_2GPRS(7, r11) 163b86fb888SChristophe Leroy addi r11,r1,STACK_FRAME_OVERHEAD 164b86fb888SChristophe Leroy addi r2,r12,-THREAD 165b86fb888SChristophe Leroy stw r11,PT_REGS(r12) 166b86fb888SChristophe Leroy #if defined(CONFIG_40x) 167b86fb888SChristophe Leroy /* Check to see if the dbcr0 register is set up to debug. Use the 168b86fb888SChristophe Leroy internal debug mode bit to do this. */ 169b86fb888SChristophe Leroy lwz r12,THREAD_DBCR0(r12) 170b86fb888SChristophe Leroy andis. r12,r12,DBCR0_IDM@h 171b86fb888SChristophe Leroy #endif 172b86fb888SChristophe Leroy ACCOUNT_CPU_USER_ENTRY(r2, r11, r12) 173b86fb888SChristophe Leroy #if defined(CONFIG_40x) 174b86fb888SChristophe Leroy beq+ 3f 175b86fb888SChristophe Leroy /* From user and task is ptraced - load up global dbcr0 */ 176b86fb888SChristophe Leroy li r12,-1 /* clear all pending debug events */ 177b86fb888SChristophe Leroy mtspr SPRN_DBSR,r12 178b86fb888SChristophe Leroy lis r11,global_dbcr0@ha 179b86fb888SChristophe Leroy tophys(r11,r11) 180b86fb888SChristophe Leroy addi r11,r11,global_dbcr0@l 181b86fb888SChristophe Leroy lwz r12,0(r11) 182b86fb888SChristophe Leroy mtspr SPRN_DBCR0,r12 183b86fb888SChristophe Leroy lwz r12,4(r11) 184b86fb888SChristophe Leroy addi r12,r12,-1 185b86fb888SChristophe Leroy stw r12,4(r11) 186b86fb888SChristophe Leroy #endif 187b86fb888SChristophe Leroy 188b86fb888SChristophe Leroy 3: 18902847487SChristophe Leroy tovirt_novmstack r2, r2 /* set r2 to current */ 190b86fb888SChristophe Leroy lis r11, transfer_to_syscall@h 191b86fb888SChristophe Leroy ori r11, r11, transfer_to_syscall@l 192b86fb888SChristophe Leroy #ifdef CONFIG_TRACE_IRQFLAGS 193b86fb888SChristophe Leroy /* 194b86fb888SChristophe Leroy * If MSR is changing we need to keep interrupts disabled at this point 195b86fb888SChristophe Leroy * otherwise we might risk taking an interrupt before we tell lockdep 196b86fb888SChristophe Leroy * they are enabled. 197b86fb888SChristophe Leroy */ 198ba18025fSChristophe Leroy LOAD_REG_IMMEDIATE(r10, MSR_KERNEL) 199b86fb888SChristophe Leroy rlwimi r10, r9, 0, MSR_EE 200b86fb888SChristophe Leroy #else 201ba18025fSChristophe Leroy LOAD_REG_IMMEDIATE(r10, MSR_KERNEL | MSR_EE) 202b86fb888SChristophe Leroy #endif 203b86fb888SChristophe Leroy #if defined(CONFIG_PPC_8xx) && defined(CONFIG_PERF_EVENTS) 204b86fb888SChristophe Leroy mtspr SPRN_NRI, r0 205b86fb888SChristophe Leroy #endif 206b86fb888SChristophe Leroy mtspr SPRN_SRR1,r10 207b86fb888SChristophe Leroy mtspr SPRN_SRR0,r11 208b86fb888SChristophe Leroy SYNC 209b86fb888SChristophe Leroy RFI /* jump to handler, enable MMU */ 210b86fb888SChristophe Leroy .endm 211b86fb888SChristophe Leroy 212c9c84fd9SChristophe Leroy .macro save_dar_dsisr_on_stack reg1, reg2, sp 21302847487SChristophe Leroy #ifndef CONFIG_VMAP_STACK 214c9c84fd9SChristophe Leroy mfspr \reg1, SPRN_DAR 215c9c84fd9SChristophe Leroy mfspr \reg2, SPRN_DSISR 216c9c84fd9SChristophe Leroy stw \reg1, _DAR(\sp) 217c9c84fd9SChristophe Leroy stw \reg2, _DSISR(\sp) 21802847487SChristophe Leroy #endif 219c9c84fd9SChristophe Leroy .endm 220c9c84fd9SChristophe Leroy 221c9c84fd9SChristophe Leroy .macro get_and_save_dar_dsisr_on_stack reg1, reg2, sp 22202847487SChristophe Leroy #ifdef CONFIG_VMAP_STACK 22302847487SChristophe Leroy lwz \reg1, _DAR(\sp) 22402847487SChristophe Leroy lwz \reg2, _DSISR(\sp) 22502847487SChristophe Leroy #else 226c9c84fd9SChristophe Leroy save_dar_dsisr_on_stack \reg1, \reg2, \sp 22702847487SChristophe Leroy #endif 22802847487SChristophe Leroy .endm 22902847487SChristophe Leroy 23002847487SChristophe Leroy .macro tovirt_vmstack dst, src 23102847487SChristophe Leroy #ifdef CONFIG_VMAP_STACK 23202847487SChristophe Leroy tovirt(\dst, \src) 23302847487SChristophe Leroy #else 23402847487SChristophe Leroy .ifnc \dst, \src 23502847487SChristophe Leroy mr \dst, \src 23602847487SChristophe Leroy .endif 23702847487SChristophe Leroy #endif 23802847487SChristophe Leroy .endm 23902847487SChristophe Leroy 24002847487SChristophe Leroy .macro tovirt_novmstack dst, src 24102847487SChristophe Leroy #ifndef CONFIG_VMAP_STACK 24202847487SChristophe Leroy tovirt(\dst, \src) 24302847487SChristophe Leroy #else 24402847487SChristophe Leroy .ifnc \dst, \src 24502847487SChristophe Leroy mr \dst, \src 24602847487SChristophe Leroy .endif 24702847487SChristophe Leroy #endif 24802847487SChristophe Leroy .endm 24902847487SChristophe Leroy 25002847487SChristophe Leroy .macro tophys_novmstack dst, src 25102847487SChristophe Leroy #ifndef CONFIG_VMAP_STACK 25202847487SChristophe Leroy tophys(\dst, \src) 25302847487SChristophe Leroy #else 25402847487SChristophe Leroy .ifnc \dst, \src 25502847487SChristophe Leroy mr \dst, \src 25602847487SChristophe Leroy .endif 25702847487SChristophe Leroy #endif 258c9c84fd9SChristophe Leroy .endm 259c9c84fd9SChristophe Leroy 2608a23fdecSChristophe Leroy /* 2618a23fdecSChristophe Leroy * Note: code which follows this uses cr0.eq (set if from kernel), 2628a23fdecSChristophe Leroy * r11, r12 (SRR0), and r9 (SRR1). 2638a23fdecSChristophe Leroy * 2648a23fdecSChristophe Leroy * Note2: once we have set r1 we are in a position to take exceptions 2658a23fdecSChristophe Leroy * again, and we could thus set MSR:RI at that point. 2668a23fdecSChristophe Leroy */ 2678a23fdecSChristophe Leroy 2688a23fdecSChristophe Leroy /* 2698a23fdecSChristophe Leroy * Exception vectors. 2708a23fdecSChristophe Leroy */ 2718a23fdecSChristophe Leroy #ifdef CONFIG_PPC_BOOK3S 2728a23fdecSChristophe Leroy #define START_EXCEPTION(n, label) \ 2738a23fdecSChristophe Leroy . = n; \ 2748a23fdecSChristophe Leroy DO_KVM n; \ 2758a23fdecSChristophe Leroy label: 2768a23fdecSChristophe Leroy 2778a23fdecSChristophe Leroy #else 2788a23fdecSChristophe Leroy #define START_EXCEPTION(n, label) \ 2798a23fdecSChristophe Leroy . = n; \ 2808a23fdecSChristophe Leroy label: 2818a23fdecSChristophe Leroy 2828a23fdecSChristophe Leroy #endif 2838a23fdecSChristophe Leroy 2848a23fdecSChristophe Leroy #define EXCEPTION(n, label, hdlr, xfer) \ 2858a23fdecSChristophe Leroy START_EXCEPTION(n, label) \ 2868a23fdecSChristophe Leroy EXCEPTION_PROLOG; \ 2878a23fdecSChristophe Leroy addi r3,r1,STACK_FRAME_OVERHEAD; \ 2888a23fdecSChristophe Leroy xfer(n, hdlr) 2898a23fdecSChristophe Leroy 2901ae99b4bSChristophe Leroy #define EXC_XFER_TEMPLATE(hdlr, trap, msr, tfer, ret) \ 2918a23fdecSChristophe Leroy li r10,trap; \ 2928a23fdecSChristophe Leroy stw r10,_TRAP(r11); \ 293ba18025fSChristophe Leroy LOAD_REG_IMMEDIATE(r10, msr); \ 2948a23fdecSChristophe Leroy bl tfer; \ 2958a23fdecSChristophe Leroy .long hdlr; \ 2968a23fdecSChristophe Leroy .long ret 2978a23fdecSChristophe Leroy 2988a23fdecSChristophe Leroy #define EXC_XFER_STD(n, hdlr) \ 2991ae99b4bSChristophe Leroy EXC_XFER_TEMPLATE(hdlr, n, MSR_KERNEL, transfer_to_handler_full, \ 3008a23fdecSChristophe Leroy ret_from_except_full) 3018a23fdecSChristophe Leroy 3028a23fdecSChristophe Leroy #define EXC_XFER_LITE(n, hdlr) \ 3031ae99b4bSChristophe Leroy EXC_XFER_TEMPLATE(hdlr, n+1, MSR_KERNEL, transfer_to_handler, \ 3048a23fdecSChristophe Leroy ret_from_except) 3058a23fdecSChristophe Leroy 306*3978eb78SChristophe Leroy .macro vmap_stack_overflow_exception 307*3978eb78SChristophe Leroy #ifdef CONFIG_VMAP_STACK 308*3978eb78SChristophe Leroy #ifdef CONFIG_SMP 309*3978eb78SChristophe Leroy mfspr r11, SPRN_SPRG_THREAD 310*3978eb78SChristophe Leroy tovirt(r11, r11) 311*3978eb78SChristophe Leroy lwz r11, TASK_CPU - THREAD(r11) 312*3978eb78SChristophe Leroy slwi r11, r11, 3 313*3978eb78SChristophe Leroy addis r11, r11, emergency_ctx@ha 314*3978eb78SChristophe Leroy #else 315*3978eb78SChristophe Leroy lis r11, emergency_ctx@ha 316*3978eb78SChristophe Leroy #endif 317*3978eb78SChristophe Leroy lwz r11, emergency_ctx@l(r11) 318*3978eb78SChristophe Leroy cmpwi cr1, r11, 0 319*3978eb78SChristophe Leroy bne cr1, 1f 320*3978eb78SChristophe Leroy lis r11, init_thread_union@ha 321*3978eb78SChristophe Leroy addi r11, r11, init_thread_union@l 322*3978eb78SChristophe Leroy 1: addi r11, r11, THREAD_SIZE - INT_FRAME_SIZE 323*3978eb78SChristophe Leroy EXCEPTION_PROLOG_2 324*3978eb78SChristophe Leroy SAVE_NVGPRS(r11) 325*3978eb78SChristophe Leroy addi r3, r1, STACK_FRAME_OVERHEAD 326*3978eb78SChristophe Leroy EXC_XFER_STD(0, stack_overflow_exception) 327*3978eb78SChristophe Leroy #endif 328*3978eb78SChristophe Leroy .endm 329*3978eb78SChristophe Leroy 3308a23fdecSChristophe Leroy #endif /* __HEAD_32_H__ */ 331