xref: /openbmc/linux/arch/powerpc/kernel/head_32.h (revision 15a1fbdcfb519c2bd291ed01c6c94e0b89537a77)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef __HEAD_32_H__
3 #define __HEAD_32_H__
4 
5 #include <asm/ptrace.h>	/* for STACK_FRAME_REGS_MARKER */
6 
7 /*
8  * Exception entry code.  This code runs with address translation
9  * turned off, i.e. using physical addresses.
10  * We assume sprg3 has the physical address of the current
11  * task's thread_struct.
12  */
13 .macro EXCEPTION_PROLOG handle_dar_dsisr=0
14 	EXCEPTION_PROLOG_0	handle_dar_dsisr=\handle_dar_dsisr
15 	EXCEPTION_PROLOG_1
16 	EXCEPTION_PROLOG_2	handle_dar_dsisr=\handle_dar_dsisr
17 .endm
18 
19 .macro EXCEPTION_PROLOG_0 handle_dar_dsisr=0
20 	mtspr	SPRN_SPRG_SCRATCH0,r10
21 	mtspr	SPRN_SPRG_SCRATCH1,r11
22 #ifdef CONFIG_VMAP_STACK
23 	mfspr	r10, SPRN_SPRG_THREAD
24 	.if	\handle_dar_dsisr
25 	mfspr	r11, SPRN_DAR
26 	stw	r11, DAR(r10)
27 	mfspr	r11, SPRN_DSISR
28 	stw	r11, DSISR(r10)
29 	.endif
30 	mfspr	r11, SPRN_SRR0
31 	stw	r11, SRR0(r10)
32 #endif
33 	mfspr	r11, SPRN_SRR1		/* check whether user or kernel */
34 #ifdef CONFIG_VMAP_STACK
35 	stw	r11, SRR1(r10)
36 #endif
37 	mfcr	r10
38 	andi.	r11, r11, MSR_PR
39 .endm
40 
41 .macro EXCEPTION_PROLOG_1 for_rtas=0
42 #ifdef CONFIG_VMAP_STACK
43 	.ifeq	\for_rtas
44 	li	r11, MSR_KERNEL & ~(MSR_IR | MSR_RI) /* can take DTLB miss */
45 	mtmsr	r11
46 	isync
47 	.endif
48 	subi	r11, r1, INT_FRAME_SIZE		/* use r1 if kernel */
49 #else
50 	tophys(r11,r1)			/* use tophys(r1) if kernel */
51 	subi	r11, r11, INT_FRAME_SIZE	/* alloc exc. frame */
52 #endif
53 	beq	1f
54 	mfspr	r11,SPRN_SPRG_THREAD
55 	tovirt_vmstack r11, r11
56 	lwz	r11,TASK_STACK-THREAD(r11)
57 	addi	r11, r11, THREAD_SIZE - INT_FRAME_SIZE
58 	tophys_novmstack r11, r11
59 1:
60 #ifdef CONFIG_VMAP_STACK
61 	mtcrf	0x7f, r11
62 	bt	32 - THREAD_ALIGN_SHIFT, stack_overflow
63 #endif
64 .endm
65 
66 .macro EXCEPTION_PROLOG_2 handle_dar_dsisr=0
67 	stw	r10,_CCR(r11)		/* save registers */
68 	stw	r12,GPR12(r11)
69 	stw	r9,GPR9(r11)
70 	mfspr	r10,SPRN_SPRG_SCRATCH0
71 	stw	r10,GPR10(r11)
72 	mfspr	r12,SPRN_SPRG_SCRATCH1
73 	stw	r12,GPR11(r11)
74 	mflr	r10
75 	stw	r10,_LINK(r11)
76 #ifdef CONFIG_VMAP_STACK
77 	mfspr	r12, SPRN_SPRG_THREAD
78 	tovirt(r12, r12)
79 	.if	\handle_dar_dsisr
80 	lwz	r10, DAR(r12)
81 	stw	r10, _DAR(r11)
82 	lwz	r10, DSISR(r12)
83 	stw	r10, _DSISR(r11)
84 	.endif
85 	lwz	r9, SRR1(r12)
86 	lwz	r12, SRR0(r12)
87 #else
88 	mfspr	r12,SPRN_SRR0
89 	mfspr	r9,SPRN_SRR1
90 #endif
91 	stw	r1,GPR1(r11)
92 	stw	r1,0(r11)
93 	tovirt_novmstack r1, r11	/* set new kernel sp */
94 #ifdef CONFIG_40x
95 	rlwinm	r9,r9,0,14,12		/* clear MSR_WE (necessary?) */
96 #else
97 #ifdef CONFIG_VMAP_STACK
98 	li	r10, MSR_KERNEL & ~MSR_IR /* can take exceptions */
99 #else
100 	li	r10,MSR_KERNEL & ~(MSR_IR|MSR_DR) /* can take exceptions */
101 #endif
102 	mtmsr	r10			/* (except for mach check in rtas) */
103 #endif
104 	stw	r0,GPR0(r11)
105 	lis	r10,STACK_FRAME_REGS_MARKER@ha /* exception frame marker */
106 	addi	r10,r10,STACK_FRAME_REGS_MARKER@l
107 	stw	r10,8(r11)
108 	SAVE_4GPRS(3, r11)
109 	SAVE_2GPRS(7, r11)
110 .endm
111 
112 .macro SYSCALL_ENTRY trapno
113 	mfspr	r12,SPRN_SPRG_THREAD
114 #ifdef CONFIG_VMAP_STACK
115 	mfspr	r9, SPRN_SRR0
116 	mfspr	r11, SPRN_SRR1
117 	stw	r9, SRR0(r12)
118 	stw	r11, SRR1(r12)
119 #endif
120 	mfcr	r10
121 	lwz	r11,TASK_STACK-THREAD(r12)
122 	rlwinm	r10,r10,0,4,2	/* Clear SO bit in CR */
123 	addi	r11, r11, THREAD_SIZE - INT_FRAME_SIZE
124 #ifdef CONFIG_VMAP_STACK
125 	li	r9, MSR_KERNEL & ~(MSR_IR | MSR_RI) /* can take DTLB miss */
126 	mtmsr	r9
127 	isync
128 #endif
129 	tovirt_vmstack r12, r12
130 	tophys_novmstack r11, r11
131 	mflr	r9
132 	stw	r10,_CCR(r11)		/* save registers */
133 	stw	r9, _LINK(r11)
134 #ifdef CONFIG_VMAP_STACK
135 	lwz	r10, SRR0(r12)
136 	lwz	r9, SRR1(r12)
137 #else
138 	mfspr	r10,SPRN_SRR0
139 	mfspr	r9,SPRN_SRR1
140 #endif
141 	stw	r1,GPR1(r11)
142 	stw	r1,0(r11)
143 	tovirt_novmstack r1, r11	/* set new kernel sp */
144 	stw	r10,_NIP(r11)
145 #ifdef CONFIG_40x
146 	rlwinm	r9,r9,0,14,12		/* clear MSR_WE (necessary?) */
147 #else
148 #ifdef CONFIG_VMAP_STACK
149 	LOAD_REG_IMMEDIATE(r10, MSR_KERNEL & ~MSR_IR) /* can take exceptions */
150 #else
151 	LOAD_REG_IMMEDIATE(r10, MSR_KERNEL & ~(MSR_IR|MSR_DR)) /* can take exceptions */
152 #endif
153 	mtmsr	r10			/* (except for mach check in rtas) */
154 #endif
155 	lis	r10,STACK_FRAME_REGS_MARKER@ha /* exception frame marker */
156 	stw	r2,GPR2(r11)
157 	addi	r10,r10,STACK_FRAME_REGS_MARKER@l
158 	stw	r9,_MSR(r11)
159 	li	r2, \trapno + 1
160 	stw	r10,8(r11)
161 	stw	r2,_TRAP(r11)
162 	SAVE_GPR(0, r11)
163 	SAVE_4GPRS(3, r11)
164 	SAVE_2GPRS(7, r11)
165 	addi	r11,r1,STACK_FRAME_OVERHEAD
166 	addi	r2,r12,-THREAD
167 	stw	r11,PT_REGS(r12)
168 #if defined(CONFIG_40x)
169 	/* Check to see if the dbcr0 register is set up to debug.  Use the
170 	   internal debug mode bit to do this. */
171 	lwz	r12,THREAD_DBCR0(r12)
172 	andis.	r12,r12,DBCR0_IDM@h
173 #endif
174 	ACCOUNT_CPU_USER_ENTRY(r2, r11, r12)
175 #if defined(CONFIG_40x)
176 	beq+	3f
177 	/* From user and task is ptraced - load up global dbcr0 */
178 	li	r12,-1			/* clear all pending debug events */
179 	mtspr	SPRN_DBSR,r12
180 	lis	r11,global_dbcr0@ha
181 	tophys(r11,r11)
182 	addi	r11,r11,global_dbcr0@l
183 	lwz	r12,0(r11)
184 	mtspr	SPRN_DBCR0,r12
185 	lwz	r12,4(r11)
186 	addi	r12,r12,-1
187 	stw	r12,4(r11)
188 #endif
189 
190 3:
191 	tovirt_novmstack r2, r2 	/* set r2 to current */
192 	lis	r11, transfer_to_syscall@h
193 	ori	r11, r11, transfer_to_syscall@l
194 #ifdef CONFIG_TRACE_IRQFLAGS
195 	/*
196 	 * If MSR is changing we need to keep interrupts disabled at this point
197 	 * otherwise we might risk taking an interrupt before we tell lockdep
198 	 * they are enabled.
199 	 */
200 	LOAD_REG_IMMEDIATE(r10, MSR_KERNEL)
201 	rlwimi	r10, r9, 0, MSR_EE
202 #else
203 	LOAD_REG_IMMEDIATE(r10, MSR_KERNEL | MSR_EE)
204 #endif
205 #if defined(CONFIG_PPC_8xx) && defined(CONFIG_PERF_EVENTS)
206 	mtspr	SPRN_NRI, r0
207 #endif
208 	mtspr	SPRN_SRR1,r10
209 	mtspr	SPRN_SRR0,r11
210 	SYNC
211 	RFI				/* jump to handler, enable MMU */
212 .endm
213 
214 .macro save_dar_dsisr_on_stack reg1, reg2, sp
215 #ifndef CONFIG_VMAP_STACK
216 	mfspr	\reg1, SPRN_DAR
217 	mfspr	\reg2, SPRN_DSISR
218 	stw	\reg1, _DAR(\sp)
219 	stw	\reg2, _DSISR(\sp)
220 #endif
221 .endm
222 
223 .macro get_and_save_dar_dsisr_on_stack reg1, reg2, sp
224 #ifdef CONFIG_VMAP_STACK
225 	lwz	\reg1, _DAR(\sp)
226 	lwz	\reg2, _DSISR(\sp)
227 #else
228 	save_dar_dsisr_on_stack \reg1, \reg2, \sp
229 #endif
230 .endm
231 
232 .macro tovirt_vmstack dst, src
233 #ifdef CONFIG_VMAP_STACK
234 	tovirt(\dst, \src)
235 #else
236 	.ifnc	\dst, \src
237 	mr	\dst, \src
238 	.endif
239 #endif
240 .endm
241 
242 .macro tovirt_novmstack dst, src
243 #ifndef CONFIG_VMAP_STACK
244 	tovirt(\dst, \src)
245 #else
246 	.ifnc	\dst, \src
247 	mr	\dst, \src
248 	.endif
249 #endif
250 .endm
251 
252 .macro tophys_novmstack dst, src
253 #ifndef CONFIG_VMAP_STACK
254 	tophys(\dst, \src)
255 #else
256 	.ifnc	\dst, \src
257 	mr	\dst, \src
258 	.endif
259 #endif
260 .endm
261 
262 /*
263  * Note: code which follows this uses cr0.eq (set if from kernel),
264  * r11, r12 (SRR0), and r9 (SRR1).
265  *
266  * Note2: once we have set r1 we are in a position to take exceptions
267  * again, and we could thus set MSR:RI at that point.
268  */
269 
270 /*
271  * Exception vectors.
272  */
273 #ifdef CONFIG_PPC_BOOK3S
274 #define	START_EXCEPTION(n, label)		\
275 	. = n;					\
276 	DO_KVM n;				\
277 label:
278 
279 #else
280 #define	START_EXCEPTION(n, label)		\
281 	. = n;					\
282 label:
283 
284 #endif
285 
286 #define EXCEPTION(n, label, hdlr, xfer)		\
287 	START_EXCEPTION(n, label)		\
288 	EXCEPTION_PROLOG;			\
289 	addi	r3,r1,STACK_FRAME_OVERHEAD;	\
290 	xfer(n, hdlr)
291 
292 #define EXC_XFER_TEMPLATE(hdlr, trap, msr, tfer, ret)		\
293 	li	r10,trap;					\
294 	stw	r10,_TRAP(r11);					\
295 	LOAD_REG_IMMEDIATE(r10, msr);				\
296 	bl	tfer;						\
297 	.long	hdlr;						\
298 	.long	ret
299 
300 #define EXC_XFER_STD(n, hdlr)		\
301 	EXC_XFER_TEMPLATE(hdlr, n, MSR_KERNEL, transfer_to_handler_full,	\
302 			  ret_from_except_full)
303 
304 #define EXC_XFER_LITE(n, hdlr)		\
305 	EXC_XFER_TEMPLATE(hdlr, n+1, MSR_KERNEL, transfer_to_handler, \
306 			  ret_from_except)
307 
308 .macro vmap_stack_overflow_exception
309 #ifdef CONFIG_VMAP_STACK
310 #ifdef CONFIG_SMP
311 	mfspr	r11, SPRN_SPRG_THREAD
312 	tovirt(r11, r11)
313 	lwz	r11, TASK_CPU - THREAD(r11)
314 	slwi	r11, r11, 3
315 	addis	r11, r11, emergency_ctx@ha
316 #else
317 	lis	r11, emergency_ctx@ha
318 #endif
319 	lwz	r11, emergency_ctx@l(r11)
320 	cmpwi	cr1, r11, 0
321 	bne	cr1, 1f
322 	lis	r11, init_thread_union@ha
323 	addi	r11, r11, init_thread_union@l
324 1:	addi	r11, r11, THREAD_SIZE - INT_FRAME_SIZE
325 	EXCEPTION_PROLOG_2
326 	SAVE_NVGPRS(r11)
327 	addi	r3, r1, STACK_FRAME_OVERHEAD
328 	EXC_XFER_STD(0, stack_overflow_exception)
329 #endif
330 .endm
331 
332 #endif /* __HEAD_32_H__ */
333