18a23fdecSChristophe Leroy /* SPDX-License-Identifier: GPL-2.0 */ 28a23fdecSChristophe Leroy #ifndef __HEAD_32_H__ 38a23fdecSChristophe Leroy #define __HEAD_32_H__ 48a23fdecSChristophe Leroy 58a23fdecSChristophe Leroy #include <asm/ptrace.h> /* for STACK_FRAME_REGS_MARKER */ 68a23fdecSChristophe Leroy 78a23fdecSChristophe Leroy /* 837737a2aSChristophe Leroy * MSR_KERNEL is > 0x8000 on 4xx/Book-E since it include MSR_CE. 937737a2aSChristophe Leroy */ 1037737a2aSChristophe Leroy .macro __LOAD_MSR_KERNEL r, x 1137737a2aSChristophe Leroy .if \x >= 0x8000 1237737a2aSChristophe Leroy lis \r, (\x)@h 1337737a2aSChristophe Leroy ori \r, \r, (\x)@l 1437737a2aSChristophe Leroy .else 1537737a2aSChristophe Leroy li \r, (\x) 1637737a2aSChristophe Leroy .endif 1737737a2aSChristophe Leroy .endm 1837737a2aSChristophe Leroy #define LOAD_MSR_KERNEL(r, x) __LOAD_MSR_KERNEL r, x 1937737a2aSChristophe Leroy 2037737a2aSChristophe Leroy /* 218a23fdecSChristophe Leroy * Exception entry code. This code runs with address translation 228a23fdecSChristophe Leroy * turned off, i.e. using physical addresses. 238a23fdecSChristophe Leroy * We assume sprg3 has the physical address of the current 248a23fdecSChristophe Leroy * task's thread_struct. 258a23fdecSChristophe Leroy */ 268a23fdecSChristophe Leroy 278a23fdecSChristophe Leroy .macro EXCEPTION_PROLOG 288a23fdecSChristophe Leroy mtspr SPRN_SPRG_SCRATCH0,r10 298a23fdecSChristophe Leroy mtspr SPRN_SPRG_SCRATCH1,r11 308a23fdecSChristophe Leroy mfcr r10 318a23fdecSChristophe Leroy EXCEPTION_PROLOG_1 328a23fdecSChristophe Leroy EXCEPTION_PROLOG_2 338a23fdecSChristophe Leroy .endm 348a23fdecSChristophe Leroy 358a23fdecSChristophe Leroy .macro EXCEPTION_PROLOG_1 368a23fdecSChristophe Leroy mfspr r11,SPRN_SRR1 /* check whether user or kernel */ 378a23fdecSChristophe Leroy andi. r11,r11,MSR_PR 388a23fdecSChristophe Leroy tophys(r11,r1) /* use tophys(r1) if kernel */ 398a23fdecSChristophe Leroy beq 1f 408a23fdecSChristophe Leroy mfspr r11,SPRN_SPRG_THREAD 418a23fdecSChristophe Leroy lwz r11,TASK_STACK-THREAD(r11) 428a23fdecSChristophe Leroy addi r11,r11,THREAD_SIZE 438a23fdecSChristophe Leroy tophys(r11,r11) 448a23fdecSChristophe Leroy 1: subi r11,r11,INT_FRAME_SIZE /* alloc exc. frame */ 458a23fdecSChristophe Leroy .endm 468a23fdecSChristophe Leroy 478a23fdecSChristophe Leroy .macro EXCEPTION_PROLOG_2 488a23fdecSChristophe Leroy stw r10,_CCR(r11) /* save registers */ 498a23fdecSChristophe Leroy stw r12,GPR12(r11) 508a23fdecSChristophe Leroy stw r9,GPR9(r11) 518a23fdecSChristophe Leroy mfspr r10,SPRN_SPRG_SCRATCH0 528a23fdecSChristophe Leroy stw r10,GPR10(r11) 538a23fdecSChristophe Leroy mfspr r12,SPRN_SPRG_SCRATCH1 548a23fdecSChristophe Leroy stw r12,GPR11(r11) 558a23fdecSChristophe Leroy mflr r10 568a23fdecSChristophe Leroy stw r10,_LINK(r11) 578a23fdecSChristophe Leroy mfspr r12,SPRN_SRR0 588a23fdecSChristophe Leroy mfspr r9,SPRN_SRR1 598a23fdecSChristophe Leroy stw r1,GPR1(r11) 608a23fdecSChristophe Leroy stw r1,0(r11) 618a23fdecSChristophe Leroy tovirt(r1,r11) /* set new kernel sp */ 6290f204b9SChristophe Leroy #ifdef CONFIG_40x 6390f204b9SChristophe Leroy rlwinm r9,r9,0,14,12 /* clear MSR_WE (necessary?) */ 6490f204b9SChristophe Leroy #else 658a23fdecSChristophe Leroy li r10,MSR_KERNEL & ~(MSR_IR|MSR_DR) /* can take exceptions */ 668a23fdecSChristophe Leroy MTMSRD(r10) /* (except for mach check in rtas) */ 6790f204b9SChristophe Leroy #endif 688a23fdecSChristophe Leroy stw r0,GPR0(r11) 698a23fdecSChristophe Leroy lis r10,STACK_FRAME_REGS_MARKER@ha /* exception frame marker */ 708a23fdecSChristophe Leroy addi r10,r10,STACK_FRAME_REGS_MARKER@l 718a23fdecSChristophe Leroy stw r10,8(r11) 728a23fdecSChristophe Leroy SAVE_4GPRS(3, r11) 738a23fdecSChristophe Leroy SAVE_2GPRS(7, r11) 748a23fdecSChristophe Leroy .endm 758a23fdecSChristophe Leroy 76*b86fb888SChristophe Leroy .macro SYSCALL_ENTRY trapno 77*b86fb888SChristophe Leroy mfspr r12,SPRN_SPRG_THREAD 78*b86fb888SChristophe Leroy mfcr r10 79*b86fb888SChristophe Leroy lwz r11,TASK_STACK-THREAD(r12) 80*b86fb888SChristophe Leroy mflr r9 81*b86fb888SChristophe Leroy addi r11,r11,THREAD_SIZE - INT_FRAME_SIZE 82*b86fb888SChristophe Leroy rlwinm r10,r10,0,4,2 /* Clear SO bit in CR */ 83*b86fb888SChristophe Leroy tophys(r11,r11) 84*b86fb888SChristophe Leroy stw r10,_CCR(r11) /* save registers */ 85*b86fb888SChristophe Leroy mfspr r10,SPRN_SRR0 86*b86fb888SChristophe Leroy stw r9,_LINK(r11) 87*b86fb888SChristophe Leroy mfspr r9,SPRN_SRR1 88*b86fb888SChristophe Leroy stw r1,GPR1(r11) 89*b86fb888SChristophe Leroy stw r1,0(r11) 90*b86fb888SChristophe Leroy tovirt(r1,r11) /* set new kernel sp */ 91*b86fb888SChristophe Leroy stw r10,_NIP(r11) 92*b86fb888SChristophe Leroy #ifdef CONFIG_40x 93*b86fb888SChristophe Leroy rlwinm r9,r9,0,14,12 /* clear MSR_WE (necessary?) */ 94*b86fb888SChristophe Leroy #else 95*b86fb888SChristophe Leroy LOAD_MSR_KERNEL(r10, MSR_KERNEL & ~(MSR_IR|MSR_DR)) /* can take exceptions */ 96*b86fb888SChristophe Leroy MTMSRD(r10) /* (except for mach check in rtas) */ 97*b86fb888SChristophe Leroy #endif 98*b86fb888SChristophe Leroy lis r10,STACK_FRAME_REGS_MARKER@ha /* exception frame marker */ 99*b86fb888SChristophe Leroy stw r2,GPR2(r11) 100*b86fb888SChristophe Leroy addi r10,r10,STACK_FRAME_REGS_MARKER@l 101*b86fb888SChristophe Leroy stw r9,_MSR(r11) 102*b86fb888SChristophe Leroy li r2, \trapno + 1 103*b86fb888SChristophe Leroy stw r10,8(r11) 104*b86fb888SChristophe Leroy stw r2,_TRAP(r11) 105*b86fb888SChristophe Leroy SAVE_GPR(0, r11) 106*b86fb888SChristophe Leroy SAVE_4GPRS(3, r11) 107*b86fb888SChristophe Leroy SAVE_2GPRS(7, r11) 108*b86fb888SChristophe Leroy addi r11,r1,STACK_FRAME_OVERHEAD 109*b86fb888SChristophe Leroy addi r2,r12,-THREAD 110*b86fb888SChristophe Leroy stw r11,PT_REGS(r12) 111*b86fb888SChristophe Leroy #if defined(CONFIG_40x) 112*b86fb888SChristophe Leroy /* Check to see if the dbcr0 register is set up to debug. Use the 113*b86fb888SChristophe Leroy internal debug mode bit to do this. */ 114*b86fb888SChristophe Leroy lwz r12,THREAD_DBCR0(r12) 115*b86fb888SChristophe Leroy andis. r12,r12,DBCR0_IDM@h 116*b86fb888SChristophe Leroy #endif 117*b86fb888SChristophe Leroy ACCOUNT_CPU_USER_ENTRY(r2, r11, r12) 118*b86fb888SChristophe Leroy #if defined(CONFIG_40x) 119*b86fb888SChristophe Leroy beq+ 3f 120*b86fb888SChristophe Leroy /* From user and task is ptraced - load up global dbcr0 */ 121*b86fb888SChristophe Leroy li r12,-1 /* clear all pending debug events */ 122*b86fb888SChristophe Leroy mtspr SPRN_DBSR,r12 123*b86fb888SChristophe Leroy lis r11,global_dbcr0@ha 124*b86fb888SChristophe Leroy tophys(r11,r11) 125*b86fb888SChristophe Leroy addi r11,r11,global_dbcr0@l 126*b86fb888SChristophe Leroy lwz r12,0(r11) 127*b86fb888SChristophe Leroy mtspr SPRN_DBCR0,r12 128*b86fb888SChristophe Leroy lwz r12,4(r11) 129*b86fb888SChristophe Leroy addi r12,r12,-1 130*b86fb888SChristophe Leroy stw r12,4(r11) 131*b86fb888SChristophe Leroy #endif 132*b86fb888SChristophe Leroy 133*b86fb888SChristophe Leroy 3: 134*b86fb888SChristophe Leroy tovirt(r2, r2) /* set r2 to current */ 135*b86fb888SChristophe Leroy lis r11, transfer_to_syscall@h 136*b86fb888SChristophe Leroy ori r11, r11, transfer_to_syscall@l 137*b86fb888SChristophe Leroy #ifdef CONFIG_TRACE_IRQFLAGS 138*b86fb888SChristophe Leroy /* 139*b86fb888SChristophe Leroy * If MSR is changing we need to keep interrupts disabled at this point 140*b86fb888SChristophe Leroy * otherwise we might risk taking an interrupt before we tell lockdep 141*b86fb888SChristophe Leroy * they are enabled. 142*b86fb888SChristophe Leroy */ 143*b86fb888SChristophe Leroy LOAD_MSR_KERNEL(r10, MSR_KERNEL) 144*b86fb888SChristophe Leroy rlwimi r10, r9, 0, MSR_EE 145*b86fb888SChristophe Leroy #else 146*b86fb888SChristophe Leroy LOAD_MSR_KERNEL(r10, MSR_KERNEL | MSR_EE) 147*b86fb888SChristophe Leroy #endif 148*b86fb888SChristophe Leroy #if defined(CONFIG_PPC_8xx) && defined(CONFIG_PERF_EVENTS) 149*b86fb888SChristophe Leroy mtspr SPRN_NRI, r0 150*b86fb888SChristophe Leroy #endif 151*b86fb888SChristophe Leroy mtspr SPRN_SRR1,r10 152*b86fb888SChristophe Leroy mtspr SPRN_SRR0,r11 153*b86fb888SChristophe Leroy SYNC 154*b86fb888SChristophe Leroy RFI /* jump to handler, enable MMU */ 155*b86fb888SChristophe Leroy .endm 156*b86fb888SChristophe Leroy 1578a23fdecSChristophe Leroy /* 1588a23fdecSChristophe Leroy * Note: code which follows this uses cr0.eq (set if from kernel), 1598a23fdecSChristophe Leroy * r11, r12 (SRR0), and r9 (SRR1). 1608a23fdecSChristophe Leroy * 1618a23fdecSChristophe Leroy * Note2: once we have set r1 we are in a position to take exceptions 1628a23fdecSChristophe Leroy * again, and we could thus set MSR:RI at that point. 1638a23fdecSChristophe Leroy */ 1648a23fdecSChristophe Leroy 1658a23fdecSChristophe Leroy /* 1668a23fdecSChristophe Leroy * Exception vectors. 1678a23fdecSChristophe Leroy */ 1688a23fdecSChristophe Leroy #ifdef CONFIG_PPC_BOOK3S 1698a23fdecSChristophe Leroy #define START_EXCEPTION(n, label) \ 1708a23fdecSChristophe Leroy . = n; \ 1718a23fdecSChristophe Leroy DO_KVM n; \ 1728a23fdecSChristophe Leroy label: 1738a23fdecSChristophe Leroy 1748a23fdecSChristophe Leroy #else 1758a23fdecSChristophe Leroy #define START_EXCEPTION(n, label) \ 1768a23fdecSChristophe Leroy . = n; \ 1778a23fdecSChristophe Leroy label: 1788a23fdecSChristophe Leroy 1798a23fdecSChristophe Leroy #endif 1808a23fdecSChristophe Leroy 1818a23fdecSChristophe Leroy #define EXCEPTION(n, label, hdlr, xfer) \ 1828a23fdecSChristophe Leroy START_EXCEPTION(n, label) \ 1838a23fdecSChristophe Leroy EXCEPTION_PROLOG; \ 1848a23fdecSChristophe Leroy addi r3,r1,STACK_FRAME_OVERHEAD; \ 1858a23fdecSChristophe Leroy xfer(n, hdlr) 1868a23fdecSChristophe Leroy 1871ae99b4bSChristophe Leroy #define EXC_XFER_TEMPLATE(hdlr, trap, msr, tfer, ret) \ 1888a23fdecSChristophe Leroy li r10,trap; \ 1898a23fdecSChristophe Leroy stw r10,_TRAP(r11); \ 1901d3034aeSChristophe Leroy LOAD_MSR_KERNEL(r10, msr); \ 1918a23fdecSChristophe Leroy bl tfer; \ 1928a23fdecSChristophe Leroy .long hdlr; \ 1938a23fdecSChristophe Leroy .long ret 1948a23fdecSChristophe Leroy 1958a23fdecSChristophe Leroy #define EXC_XFER_STD(n, hdlr) \ 1961ae99b4bSChristophe Leroy EXC_XFER_TEMPLATE(hdlr, n, MSR_KERNEL, transfer_to_handler_full, \ 1978a23fdecSChristophe Leroy ret_from_except_full) 1988a23fdecSChristophe Leroy 1998a23fdecSChristophe Leroy #define EXC_XFER_LITE(n, hdlr) \ 2001ae99b4bSChristophe Leroy EXC_XFER_TEMPLATE(hdlr, n+1, MSR_KERNEL, transfer_to_handler, \ 2018a23fdecSChristophe Leroy ret_from_except) 2028a23fdecSChristophe Leroy 2038a23fdecSChristophe Leroy #endif /* __HEAD_32_H__ */ 204