xref: /openbmc/linux/arch/mips/kernel/r4k_switch.S (revision 12eb4683)
1/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License.  See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (C) 1994, 1995, 1996, 1998, 1999, 2002, 2003 Ralf Baechle
7 * Copyright (C) 1996 David S. Miller (davem@davemloft.net)
8 * Copyright (C) 1994, 1995, 1996, by Andreas Busse
9 * Copyright (C) 1999 Silicon Graphics, Inc.
10 * Copyright (C) 2000 MIPS Technologies, Inc.
11 *    written by Carsten Langgaard, carstenl@mips.com
12 */
13#include <asm/asm.h>
14#include <asm/cachectl.h>
15#include <asm/fpregdef.h>
16#include <asm/mipsregs.h>
17#include <asm/asm-offsets.h>
18#include <asm/pgtable-bits.h>
19#include <asm/regdef.h>
20#include <asm/stackframe.h>
21#include <asm/thread_info.h>
22
23#include <asm/asmmacro.h>
24
25/*
26 * Offset to the current process status flags, the first 32 bytes of the
27 * stack are not used.
28 */
29#define ST_OFF (_THREAD_SIZE - 32 - PT_SIZE + PT_STATUS)
30
31/*
32 * FPU context is saved iff the process has used it's FPU in the current
33 * time slice as indicated by _TIF_USEDFPU.  In any case, the CU1 bit for user
34 * space STATUS register should be 0, so that a process *always* starts its
35 * userland with FPU disabled after each context switch.
36 *
37 * FPU will be enabled as soon as the process accesses FPU again, through
38 * do_cpu() trap.
39 */
40
41/*
42 * task_struct *resume(task_struct *prev, task_struct *next,
43 *		       struct thread_info *next_ti, int usedfpu)
44 */
45	.align	5
46	LEAF(resume)
47	mfc0	t1, CP0_STATUS
48	LONG_S	t1, THREAD_STATUS(a0)
49	cpu_save_nonscratch a0
50	LONG_S	ra, THREAD_REG31(a0)
51
52	/*
53	 * check if we need to save FPU registers
54	 */
55
56	beqz	a3, 1f
57
58	PTR_L	t3, TASK_THREAD_INFO(a0)
59	/*
60	 * clear saved user stack CU1 bit
61	 */
62	LONG_L	t0, ST_OFF(t3)
63	li	t1, ~ST0_CU1
64	and	t0, t0, t1
65	LONG_S	t0, ST_OFF(t3)
66
67	fpu_save_double a0 t0 t1		# c0_status passed in t0
68						# clobbers t1
691:
70
71#if defined(CONFIG_CC_STACKPROTECTOR) && !defined(CONFIG_SMP)
72	PTR_LA	t8, __stack_chk_guard
73	LONG_L	t9, TASK_STACK_CANARY(a1)
74	LONG_S	t9, 0(t8)
75#endif
76
77	/*
78	 * The order of restoring the registers takes care of the race
79	 * updating $28, $29 and kernelsp without disabling ints.
80	 */
81	move	$28, a2
82	cpu_restore_nonscratch a1
83
84	PTR_ADDU	t0, $28, _THREAD_SIZE - 32
85	set_saved_sp	t0, t1, t2
86#ifdef CONFIG_MIPS_MT_SMTC
87	/* Read-modify-writes of Status must be atomic on a VPE */
88	mfc0	t2, CP0_TCSTATUS
89	ori	t1, t2, TCSTATUS_IXMT
90	mtc0	t1, CP0_TCSTATUS
91	andi	t2, t2, TCSTATUS_IXMT
92	_ehb
93	DMT	8				# dmt	t0
94	move	t1,ra
95	jal	mips_ihb
96	move	ra,t1
97#endif /* CONFIG_MIPS_MT_SMTC */
98	mfc0	t1, CP0_STATUS		/* Do we really need this? */
99	li	a3, 0xff01
100	and	t1, a3
101	LONG_L	a2, THREAD_STATUS(a1)
102	nor	a3, $0, a3
103	and	a2, a3
104	or	a2, t1
105	mtc0	a2, CP0_STATUS
106#ifdef CONFIG_MIPS_MT_SMTC
107	_ehb
108	andi	t0, t0, VPECONTROL_TE
109	beqz	t0, 1f
110	emt
1111:
112	mfc0	t1, CP0_TCSTATUS
113	xori	t1, t1, TCSTATUS_IXMT
114	or	t1, t1, t2
115	mtc0	t1, CP0_TCSTATUS
116	_ehb
117#endif /* CONFIG_MIPS_MT_SMTC */
118	move	v0, a0
119	jr	ra
120	END(resume)
121
122/*
123 * Save a thread's fp context.
124 */
125LEAF(_save_fp)
126#ifdef CONFIG_64BIT
127	mfc0	t0, CP0_STATUS
128#endif
129	fpu_save_double a0 t0 t1		# clobbers t1
130	jr	ra
131	END(_save_fp)
132
133/*
134 * Restore a thread's fp context.
135 */
136LEAF(_restore_fp)
137#ifdef CONFIG_64BIT
138	mfc0	t0, CP0_STATUS
139#endif
140	fpu_restore_double a0 t0 t1		# clobbers t1
141	jr	ra
142	END(_restore_fp)
143
144/*
145 * Load the FPU with signalling NANS.  This bit pattern we're using has
146 * the property that no matter whether considered as single or as double
147 * precision represents signaling NANS.
148 *
149 * We initialize fcr31 to rounding to nearest, no exceptions.
150 */
151
152#define FPU_DEFAULT  0x00000000
153
154LEAF(_init_fpu)
155#ifdef CONFIG_MIPS_MT_SMTC
156	/* Rather than manipulate per-VPE Status, set per-TC bit in TCStatus */
157	mfc0	t0, CP0_TCSTATUS
158	/* Bit position is the same for Status, TCStatus */
159	li	t1, ST0_CU1
160	or	t0, t1
161	mtc0	t0, CP0_TCSTATUS
162#else /* Normal MIPS CU1 enable */
163	mfc0	t0, CP0_STATUS
164	li	t1, ST0_CU1
165	or	t0, t1
166	mtc0	t0, CP0_STATUS
167#endif /* CONFIG_MIPS_MT_SMTC */
168	enable_fpu_hazard
169
170	li	t1, FPU_DEFAULT
171	ctc1	t1, fcr31
172
173	li	t1, -1				# SNaN
174
175#ifdef CONFIG_64BIT
176	sll	t0, t0, 5
177	bgez	t0, 1f				# 16 / 32 register mode?
178
179	dmtc1	t1, $f1
180	dmtc1	t1, $f3
181	dmtc1	t1, $f5
182	dmtc1	t1, $f7
183	dmtc1	t1, $f9
184	dmtc1	t1, $f11
185	dmtc1	t1, $f13
186	dmtc1	t1, $f15
187	dmtc1	t1, $f17
188	dmtc1	t1, $f19
189	dmtc1	t1, $f21
190	dmtc1	t1, $f23
191	dmtc1	t1, $f25
192	dmtc1	t1, $f27
193	dmtc1	t1, $f29
194	dmtc1	t1, $f31
1951:
196#endif
197
198#ifdef CONFIG_CPU_MIPS32
199	mtc1	t1, $f0
200	mtc1	t1, $f1
201	mtc1	t1, $f2
202	mtc1	t1, $f3
203	mtc1	t1, $f4
204	mtc1	t1, $f5
205	mtc1	t1, $f6
206	mtc1	t1, $f7
207	mtc1	t1, $f8
208	mtc1	t1, $f9
209	mtc1	t1, $f10
210	mtc1	t1, $f11
211	mtc1	t1, $f12
212	mtc1	t1, $f13
213	mtc1	t1, $f14
214	mtc1	t1, $f15
215	mtc1	t1, $f16
216	mtc1	t1, $f17
217	mtc1	t1, $f18
218	mtc1	t1, $f19
219	mtc1	t1, $f20
220	mtc1	t1, $f21
221	mtc1	t1, $f22
222	mtc1	t1, $f23
223	mtc1	t1, $f24
224	mtc1	t1, $f25
225	mtc1	t1, $f26
226	mtc1	t1, $f27
227	mtc1	t1, $f28
228	mtc1	t1, $f29
229	mtc1	t1, $f30
230	mtc1	t1, $f31
231#else
232	.set	mips3
233	dmtc1	t1, $f0
234	dmtc1	t1, $f2
235	dmtc1	t1, $f4
236	dmtc1	t1, $f6
237	dmtc1	t1, $f8
238	dmtc1	t1, $f10
239	dmtc1	t1, $f12
240	dmtc1	t1, $f14
241	dmtc1	t1, $f16
242	dmtc1	t1, $f18
243	dmtc1	t1, $f20
244	dmtc1	t1, $f22
245	dmtc1	t1, $f24
246	dmtc1	t1, $f26
247	dmtc1	t1, $f28
248	dmtc1	t1, $f30
249#endif
250	jr	ra
251	END(_init_fpu)
252