xref: /openbmc/linux/arch/mips/kernel/r4k_switch.S (revision e5c86679)
1/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License.  See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (C) 1994, 1995, 1996, 1998, 1999, 2002, 2003 Ralf Baechle
7 * Copyright (C) 1996 David S. Miller (davem@davemloft.net)
8 * Copyright (C) 1994, 1995, 1996, by Andreas Busse
9 * Copyright (C) 1999 Silicon Graphics, Inc.
10 * Copyright (C) 2000 MIPS Technologies, Inc.
11 *    written by Carsten Langgaard, carstenl@mips.com
12 */
13#include <asm/asm.h>
14#include <asm/cachectl.h>
15#include <asm/export.h>
16#include <asm/fpregdef.h>
17#include <asm/mipsregs.h>
18#include <asm/asm-offsets.h>
19#include <asm/regdef.h>
20#include <asm/stackframe.h>
21#include <asm/thread_info.h>
22
23#include <asm/asmmacro.h>
24
25/* preprocessor replaces the fp in ".set fp=64" with $30 otherwise */
26#undef fp
27
28/*
29 * Offset to the current process status flags, the first 32 bytes of the
30 * stack are not used.
31 */
32#define ST_OFF (_THREAD_SIZE - 32 - PT_SIZE + PT_STATUS)
33
34#ifndef USE_ALTERNATE_RESUME_IMPL
35/*
36 * task_struct *resume(task_struct *prev, task_struct *next,
37 *		       struct thread_info *next_ti)
38 */
39	.align	5
40	LEAF(resume)
41	mfc0	t1, CP0_STATUS
42	LONG_S	t1, THREAD_STATUS(a0)
43	cpu_save_nonscratch a0
44	LONG_S	ra, THREAD_REG31(a0)
45
46#if defined(CONFIG_CC_STACKPROTECTOR) && !defined(CONFIG_SMP)
47	PTR_LA	t8, __stack_chk_guard
48	LONG_L	t9, TASK_STACK_CANARY(a1)
49	LONG_S	t9, 0(t8)
50#endif
51
52	/*
53	 * The order of restoring the registers takes care of the race
54	 * updating $28, $29 and kernelsp without disabling ints.
55	 */
56	move	$28, a2
57	cpu_restore_nonscratch a1
58
59	PTR_ADDU	t0, $28, _THREAD_SIZE - 32
60	set_saved_sp	t0, t1, t2
61	mfc0	t1, CP0_STATUS		/* Do we really need this? */
62	li	a3, 0xff01
63	and	t1, a3
64	LONG_L	a2, THREAD_STATUS(a1)
65	nor	a3, $0, a3
66	and	a2, a3
67	or	a2, t1
68	mtc0	a2, CP0_STATUS
69	move	v0, a0
70	jr	ra
71	END(resume)
72
73#endif /* USE_ALTERNATE_RESUME_IMPL */
74
75/*
76 * Save a thread's fp context.
77 */
78LEAF(_save_fp)
79EXPORT_SYMBOL(_save_fp)
80#if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2) || \
81		defined(CONFIG_CPU_MIPS32_R6)
82	mfc0	t0, CP0_STATUS
83#endif
84	fpu_save_double a0 t0 t1		# clobbers t1
85	jr	ra
86	END(_save_fp)
87
88/*
89 * Restore a thread's fp context.
90 */
91LEAF(_restore_fp)
92#if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2) || \
93		defined(CONFIG_CPU_MIPS32_R6)
94	mfc0	t0, CP0_STATUS
95#endif
96	fpu_restore_double a0 t0 t1		# clobbers t1
97	jr	ra
98	END(_restore_fp)
99
100#ifdef CONFIG_CPU_HAS_MSA
101
102/*
103 * Save a thread's MSA vector context.
104 */
105LEAF(_save_msa)
106EXPORT_SYMBOL(_save_msa)
107	msa_save_all	a0
108	jr	ra
109	END(_save_msa)
110
111/*
112 * Restore a thread's MSA vector context.
113 */
114LEAF(_restore_msa)
115	msa_restore_all	a0
116	jr	ra
117	END(_restore_msa)
118
119LEAF(_init_msa_upper)
120	msa_init_all_upper
121	jr	ra
122	END(_init_msa_upper)
123
124#endif
125
126/*
127 * Load the FPU with signalling NANS.  This bit pattern we're using has
128 * the property that no matter whether considered as single or as double
129 * precision represents signaling NANS.
130 *
131 * The value to initialize fcr31 to comes in $a0.
132 */
133
134	.set push
135	SET_HARDFLOAT
136
137LEAF(_init_fpu)
138	mfc0	t0, CP0_STATUS
139	li	t1, ST0_CU1
140	or	t0, t1
141	mtc0	t0, CP0_STATUS
142	enable_fpu_hazard
143
144	ctc1	a0, fcr31
145
146	li	t1, -1				# SNaN
147
148#ifdef CONFIG_64BIT
149	sll	t0, t0, 5
150	bgez	t0, 1f				# 16 / 32 register mode?
151
152	dmtc1	t1, $f1
153	dmtc1	t1, $f3
154	dmtc1	t1, $f5
155	dmtc1	t1, $f7
156	dmtc1	t1, $f9
157	dmtc1	t1, $f11
158	dmtc1	t1, $f13
159	dmtc1	t1, $f15
160	dmtc1	t1, $f17
161	dmtc1	t1, $f19
162	dmtc1	t1, $f21
163	dmtc1	t1, $f23
164	dmtc1	t1, $f25
165	dmtc1	t1, $f27
166	dmtc1	t1, $f29
167	dmtc1	t1, $f31
1681:
169#endif
170
171#ifdef CONFIG_CPU_MIPS32
172	mtc1	t1, $f0
173	mtc1	t1, $f1
174	mtc1	t1, $f2
175	mtc1	t1, $f3
176	mtc1	t1, $f4
177	mtc1	t1, $f5
178	mtc1	t1, $f6
179	mtc1	t1, $f7
180	mtc1	t1, $f8
181	mtc1	t1, $f9
182	mtc1	t1, $f10
183	mtc1	t1, $f11
184	mtc1	t1, $f12
185	mtc1	t1, $f13
186	mtc1	t1, $f14
187	mtc1	t1, $f15
188	mtc1	t1, $f16
189	mtc1	t1, $f17
190	mtc1	t1, $f18
191	mtc1	t1, $f19
192	mtc1	t1, $f20
193	mtc1	t1, $f21
194	mtc1	t1, $f22
195	mtc1	t1, $f23
196	mtc1	t1, $f24
197	mtc1	t1, $f25
198	mtc1	t1, $f26
199	mtc1	t1, $f27
200	mtc1	t1, $f28
201	mtc1	t1, $f29
202	mtc1	t1, $f30
203	mtc1	t1, $f31
204
205#if defined(CONFIG_CPU_MIPS32_R2) || defined(CONFIG_CPU_MIPS32_R6)
206	.set    push
207	.set    MIPS_ISA_LEVEL_RAW
208	.set	fp=64
209	sll     t0, t0, 5			# is Status.FR set?
210	bgez    t0, 1f				# no: skip setting upper 32b
211
212	mthc1   t1, $f0
213	mthc1   t1, $f1
214	mthc1   t1, $f2
215	mthc1   t1, $f3
216	mthc1   t1, $f4
217	mthc1   t1, $f5
218	mthc1   t1, $f6
219	mthc1   t1, $f7
220	mthc1   t1, $f8
221	mthc1   t1, $f9
222	mthc1   t1, $f10
223	mthc1   t1, $f11
224	mthc1   t1, $f12
225	mthc1   t1, $f13
226	mthc1   t1, $f14
227	mthc1   t1, $f15
228	mthc1   t1, $f16
229	mthc1   t1, $f17
230	mthc1   t1, $f18
231	mthc1   t1, $f19
232	mthc1   t1, $f20
233	mthc1   t1, $f21
234	mthc1   t1, $f22
235	mthc1   t1, $f23
236	mthc1   t1, $f24
237	mthc1   t1, $f25
238	mthc1   t1, $f26
239	mthc1   t1, $f27
240	mthc1   t1, $f28
241	mthc1   t1, $f29
242	mthc1   t1, $f30
243	mthc1   t1, $f31
2441:	.set    pop
245#endif /* CONFIG_CPU_MIPS32_R2 || CONFIG_CPU_MIPS32_R6 */
246#else
247	.set	MIPS_ISA_ARCH_LEVEL_RAW
248	dmtc1	t1, $f0
249	dmtc1	t1, $f2
250	dmtc1	t1, $f4
251	dmtc1	t1, $f6
252	dmtc1	t1, $f8
253	dmtc1	t1, $f10
254	dmtc1	t1, $f12
255	dmtc1	t1, $f14
256	dmtc1	t1, $f16
257	dmtc1	t1, $f18
258	dmtc1	t1, $f20
259	dmtc1	t1, $f22
260	dmtc1	t1, $f24
261	dmtc1	t1, $f26
262	dmtc1	t1, $f28
263	dmtc1	t1, $f30
264#endif
265	jr	ra
266	END(_init_fpu)
267
268	.set pop	/* SET_HARDFLOAT */
269