xref: /openbmc/linux/arch/sh/kernel/cpu/sh2/entry.S (revision 4f6cce39)
1/*
2 * arch/sh/kernel/cpu/sh2/entry.S
3 *
4 * The SH-2 exception entry
5 *
6 * Copyright (C) 2005-2008 Yoshinori Sato
7 * Copyright (C) 2005  AXE,Inc.
8 *
9 * This file is subject to the terms and conditions of the GNU General Public
10 * License.  See the file "COPYING" in the main directory of this archive
11 * for more details.
12 */
13
14#include <linux/linkage.h>
15#include <asm/asm-offsets.h>
16#include <asm/thread_info.h>
17#include <cpu/mmu_context.h>
18#include <asm/unistd.h>
19#include <asm/errno.h>
20#include <asm/page.h>
21
22/* Offsets to the stack */
23OFF_R0  =  0		/* Return value. New ABI also arg4 */
24OFF_R1  =  4     	/* New ABI: arg5 */
25OFF_R2  =  8     	/* New ABI: arg6 */
26OFF_R3  =  12     	/* New ABI: syscall_nr */
27OFF_R4  =  16     	/* New ABI: arg0 */
28OFF_R5  =  20     	/* New ABI: arg1 */
29OFF_R6  =  24     	/* New ABI: arg2 */
30OFF_R7  =  28     	/* New ABI: arg3 */
31OFF_SP	=  (15*4)
32OFF_PC  =  (16*4)
33OFF_SR	=  (16*4+2*4)
34OFF_TRA	=  (16*4+6*4)
35
36#include <asm/entry-macros.S>
37
38ENTRY(exception_handler)
39	! stack
40	! r0 <- point sp
41	! r1
42	! pc
43	! sr
44	! r0 = temporary
45	! r1 = vector (pseudo EXPEVT / INTEVT / TRA)
46	mov.l	r2,@-sp
47	mov.l	r3,@-sp
48	cli
49	mov.l	$cpu_mode,r2
50#ifdef CONFIG_SMP
51	mov.l	$cpuid,r3
52	mov.l	@r3,r3
53	mov.l	@r3,r3
54	shll2	r3
55	add	r3,r2
56#endif
57	mov.l	@r2,r0
58	mov.l	@(5*4,r15),r3	! previous SR
59	or	r0,r3		! set MD
60	tst	r0,r0
61	bf/s	1f		! previous mode check
62	 mov.l	r3,@(5*4,r15)	! update SR
63	! switch to kernel mode
64	mov.l	__md_bit,r0
65	mov.l	r0,@r2		! enter kernel mode
66	mov.l	$current_thread_info,r2
67#ifdef CONFIG_SMP
68	mov.l	$cpuid,r0
69	mov.l	@r0,r0
70	mov.l	@r0,r0
71	shll2	r0
72	add	r0,r2
73#endif
74	mov.l	@r2,r2
75	mov	#(THREAD_SIZE >> 8),r0
76	shll8	r0
77	add	r2,r0
78	mov	r15,r2		! r2 = user stack top
79	mov	r0,r15		! switch kernel stack
80	mov.l	r1,@-r15	! TRA
81	sts.l	macl, @-r15
82	sts.l	mach, @-r15
83	stc.l	gbr, @-r15
84	mov.l	@(5*4,r2),r0
85	mov.l	r0,@-r15	! original SR
86	sts.l	pr,@-r15
87	mov.l	@(4*4,r2),r0
88	mov.l	r0,@-r15	! original PC
89	mov	r2,r3
90	add	#(4+2)*4,r3	! rewind r0 - r3 + exception frame
91	mov.l	r3,@-r15	! original SP
92	mov.l	r14,@-r15
93	mov.l	r13,@-r15
94	mov.l	r12,@-r15
95	mov.l	r11,@-r15
96	mov.l	r10,@-r15
97	mov.l	r9,@-r15
98	mov.l	r8,@-r15
99	mov.l	r7,@-r15
100	mov.l	r6,@-r15
101	mov.l	r5,@-r15
102	mov.l	r4,@-r15
103	mov	r1,r9		! save TRA
104	mov	r2,r8		! copy user -> kernel stack
105	mov.l	@(0,r8),r3
106	mov.l	r3,@-r15
107	mov.l	@(4,r8),r2
108	mov.l	r2,@-r15
109	mov.l	@(12,r8),r1
110	mov.l	r1,@-r15
111	mov.l	@(8,r8),r0
112	bra	2f
113	 mov.l	r0,@-r15
1141:
115	! in kernel exception
116	mov	#(22-4-4-1)*4+4,r0
117	mov	r15,r2
118	sub	r0,r15
119	mov.l	@r2+,r0		! old R3
120	mov.l	r0,@-r15
121	mov.l	@r2+,r0		! old R2
122	mov.l	r0,@-r15
123	mov.l	@(4,r2),r0	! old R1
124	mov.l	r0,@-r15
125	mov.l	@r2,r0		! old R0
126	mov.l	r0,@-r15
127	add	#8,r2
128	mov.l	@r2+,r3		! old PC
129	mov.l	@r2+,r0		! old SR
130	add	#-4,r2		! exception frame stub (sr)
131	mov.l	r1,@-r2		! TRA
132	sts.l	macl, @-r2
133	sts.l	mach, @-r2
134	stc.l	gbr, @-r2
135	mov.l	r0,@-r2		! save old SR
136	sts.l	pr,@-r2
137	mov.l	r3,@-r2		! save old PC
138	mov	r2,r0
139	add	#8*4,r0
140	mov.l	r0,@-r2		! save old SP
141	mov.l	r14,@-r2
142	mov.l	r13,@-r2
143	mov.l	r12,@-r2
144	mov.l	r11,@-r2
145	mov.l	r10,@-r2
146	mov.l	r9,@-r2
147	mov.l	r8,@-r2
148	mov.l	r7,@-r2
149	mov.l	r6,@-r2
150	mov.l	r5,@-r2
151	mov.l	r4,@-r2
152	mov	r1,r9
153	mov.l	@(OFF_R0,r15),r0
154	mov.l	@(OFF_R1,r15),r1
155	mov.l	@(OFF_R2,r15),r2
156	mov.l	@(OFF_R3,r15),r3
1572:
158	mov	#64,r8
159	cmp/hs	r8,r9
160	bt	interrupt_entry	! vec >= 64 is interrupt
161	mov	#31,r8
162	cmp/hs	r8,r9
163	bt	trap_entry	! 64 > vec >= 31  is trap
164#ifdef CONFIG_CPU_J2
165	mov	#16,r8
166	cmp/hs	r8,r9
167	bt	interrupt_entry	! 31 > vec >= 16 is interrupt
168#endif
169
170	mov.l	4f,r8
171	mov	r9,r4
172	shll2	r9
173	add	r9,r8
174	mov.l	@r8,r8		! exception handler address
175	tst	r8,r8
176	bf	3f
177	mov.l	8f,r8		! unhandled exception
1783:
179	mov.l	5f,r10
180	jmp	@r8
181	 lds	r10,pr
182
183interrupt_entry:
184	mov	r9,r4
185	mov	r15,r5
186	mov.l	6f,r9
187	mov.l	7f,r8
188	jmp	@r8
189	 lds	r9,pr
190
191	.align	2
1924:	.long	exception_handling_table
1935:	.long	ret_from_exception
1946:	.long	ret_from_irq
1957:	.long	do_IRQ
1968:	.long	exception_error
197
198trap_entry:
199	mov	#0x30,r8
200	cmp/ge	r8,r9		! vector 0x1f-0x2f is systemcall
201	bt	1f
202	mov     #0x1f,r9	! convert to unified SH2/3/4 trap number
2031:
204	shll2	r9			! TRA
205	bra	system_call	! jump common systemcall entry
206	 mov	r9,r8
207
208#if defined(CONFIG_SH_STANDARD_BIOS)
209	/* Unwind the stack and jmp to the debug entry */
210ENTRY(sh_bios_handler)
211	mov	r15,r0
212	add	#(22-4)*4-4,r0
213	ldc.l	@r0+,gbr
214	lds.l	@r0+,mach
215	lds.l	@r0+,macl
216	mov	r15,r0
217	mov.l	@(OFF_SP,r0),r1
218	mov	#OFF_SR,r2
219	mov.l	@(r0,r2),r3
220	mov.l	r3,@-r1
221	mov	#OFF_SP,r2
222	mov.l	@(r0,r2),r3
223	mov.l	r3,@-r1
224	mov	r15,r0
225	add	#(22-4)*4-8,r0
226	mov.l	1f,r2
227	mov.l	@r2,r2
228	stc	sr,r3
229	mov.l	r2,@r0
230	mov.l	r3,@(4,r0)
231	mov.l	r1,@(8,r0)
232	mov.l	@r15+, r0
233	mov.l	@r15+, r1
234	mov.l	@r15+, r2
235	mov.l	@r15+, r3
236	mov.l	@r15+, r4
237	mov.l	@r15+, r5
238	mov.l	@r15+, r6
239	mov.l	@r15+, r7
240	mov.l	@r15+, r8
241	mov.l	@r15+, r9
242	mov.l	@r15+, r10
243	mov.l	@r15+, r11
244	mov.l	@r15+, r12
245	mov.l	@r15+, r13
246	mov.l	@r15+, r14
247	add	#8,r15
248	lds.l	@r15+, pr
249	mov.l	@r15+,r15
250	rte
251	 nop
252	.align	2
2531:	.long	gdb_vbr_vector
254#endif /* CONFIG_SH_STANDARD_BIOS */
255
256ENTRY(address_error_trap_handler)
257	mov	r15,r4				! regs
258	mov	#OFF_PC,r0
259	mov.l	@(r0,r15),r6			! pc
260	mov.l	1f,r0
261	jmp	@r0
262	 mov	#0,r5				! writeaccess is unknown
263
264	.align	2
2651:	.long	do_address_error
266
267restore_all:
268	stc	sr,r0
269	or	#0xf0,r0
270	ldc	r0,sr				! all interrupt block (same BL = 1)
271	! restore special register
272	! overlap exception frame
273	mov	r15,r0
274	add	#17*4,r0
275	lds.l	@r0+,pr
276	add	#4,r0
277	ldc.l	@r0+,gbr
278	lds.l	@r0+,mach
279	lds.l	@r0+,macl
280	mov	r15,r0
281	mov.l	$cpu_mode,r2
282#ifdef CONFIG_SMP
283	mov.l	$cpuid,r3
284	mov.l	@r3,r3
285	mov.l	@r3,r3
286	shll2	r3
287	add	r3,r2
288#endif
289	mov	#OFF_SR,r3
290	mov.l	@(r0,r3),r1
291	mov.l	__md_bit,r3
292	and	r1,r3				! copy MD bit
293	mov.l	r3,@r2
294	shll2	r1				! clear MD bit
295	shlr2	r1
296	mov.l	@(OFF_SP,r0),r2
297	add	#-8,r2
298	mov.l	r2,@(OFF_SP,r0)			! point exception frame top
299	mov.l	r1,@(4,r2)			! set sr
300	mov	#OFF_PC,r3
301	mov.l	@(r0,r3),r1
302	mov.l	r1,@r2				! set pc
303	get_current_thread_info r0, r1
304	mov.l	$current_thread_info,r1
305#ifdef CONFIG_SMP
306	mov.l	$cpuid,r3
307	mov.l	@r3,r3
308	mov.l	@r3,r3
309	shll2	r3
310	add	r3,r1
311#endif
312	mov.l	r0,@r1
313	mov.l	@r15+,r0
314	mov.l	@r15+,r1
315	mov.l	@r15+,r2
316	mov.l	@r15+,r3
317	mov.l	@r15+,r4
318	mov.l	@r15+,r5
319	mov.l	@r15+,r6
320	mov.l	@r15+,r7
321	mov.l	@r15+,r8
322	mov.l	@r15+,r9
323	mov.l	@r15+,r10
324	mov.l	@r15+,r11
325	mov.l	@r15+,r12
326	mov.l	@r15+,r13
327	mov.l	@r15+,r14
328	mov.l	@r15,r15
329	rte
330	 nop
331
332	.align 2
333__md_bit:
334	.long	0x40000000
335$current_thread_info:
336	.long	__current_thread_info
337$cpu_mode:
338	.long	__cpu_mode
339#ifdef CONFIG_SMP
340$cpuid:
341	.long sh2_cpuid_addr
342#endif
343
344! common exception handler
345#include "../../entry-common.S"
346
347#ifdef CONFIG_NR_CPUS
348#define NR_CPUS CONFIG_NR_CPUS
349#else
350#define NR_CPUS 1
351#endif
352
353	.data
354! cpu operation mode
355! bit30 = MD (compatible SH3/4)
356__cpu_mode:
357	.rept	NR_CPUS
358	.long	0x40000000
359	.endr
360
361#ifdef CONFIG_SMP
362.global sh2_cpuid_addr
363sh2_cpuid_addr:
364	.long	dummy_cpuid
365dummy_cpuid:
366	.long	0
367#endif
368
369	.section	.bss
370__current_thread_info:
371	.rept	NR_CPUS
372	.long	0
373	.endr
374
375ENTRY(exception_handling_table)
376	.space	4*32
377