xref: /openbmc/linux/arch/mips/kernel/r4k_fpu.S (revision f3539c12)
1/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License.  See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (C) 1996, 98, 99, 2000, 01 Ralf Baechle
7 *
8 * Multi-arch abstraction and asm macros for easier reading:
9 * Copyright (C) 1996 David S. Miller (davem@davemloft.net)
10 *
11 * Carsten Langgaard, carstenl@mips.com
12 * Copyright (C) 2000 MIPS Technologies, Inc.
13 * Copyright (C) 1999, 2001 Silicon Graphics, Inc.
14 */
15#include <asm/asm.h>
16#include <asm/asmmacro.h>
17#include <asm/errno.h>
18#include <asm/fpregdef.h>
19#include <asm/mipsregs.h>
20#include <asm/asm-offsets.h>
21#include <asm/regdef.h>
22
23/* preprocessor replaces the fp in ".set fp=64" with $30 otherwise */
24#undef fp
25
26	.macro	EX insn, reg, src
27	.set	push
28	SET_HARDFLOAT
29	.set	nomacro
30.ex\@:	\insn	\reg, \src
31	.set	pop
32	.section __ex_table,"a"
33	PTR	.ex\@, fault
34	.previous
35	.endm
36
37	.set	noreorder
38
39/**
40 * _save_fp_context() - save FP context from the FPU
41 * @a0 - pointer to fpregs field of sigcontext
42 * @a1 - pointer to fpc_csr field of sigcontext
43 *
44 * Save FP context, including the 32 FP data registers and the FP
45 * control & status register, from the FPU to signal context.
46 */
47LEAF(_save_fp_context)
48	.set	push
49	SET_HARDFLOAT
50	cfc1	t1, fcr31
51	.set	pop
52
53#if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2) || \
54		defined(CONFIG_CPU_MIPS32_R6)
55	.set	push
56	SET_HARDFLOAT
57#ifdef CONFIG_CPU_MIPS32_R2
58	.set	mips32r2
59	.set	fp=64
60	mfc0	t0, CP0_STATUS
61	sll	t0, t0, 5
62	bgez	t0, 1f			# skip storing odd if FR=0
63	 nop
64#endif
65	/* Store the 16 odd double precision registers */
66	EX	sdc1 $f1, 8(a0)
67	EX	sdc1 $f3, 24(a0)
68	EX	sdc1 $f5, 40(a0)
69	EX	sdc1 $f7, 56(a0)
70	EX	sdc1 $f9, 72(a0)
71	EX	sdc1 $f11, 88(a0)
72	EX	sdc1 $f13, 104(a0)
73	EX	sdc1 $f15, 120(a0)
74	EX	sdc1 $f17, 136(a0)
75	EX	sdc1 $f19, 152(a0)
76	EX	sdc1 $f21, 168(a0)
77	EX	sdc1 $f23, 184(a0)
78	EX	sdc1 $f25, 200(a0)
79	EX	sdc1 $f27, 216(a0)
80	EX	sdc1 $f29, 232(a0)
81	EX	sdc1 $f31, 248(a0)
821:	.set	pop
83#endif
84
85	.set push
86	SET_HARDFLOAT
87	/* Store the 16 even double precision registers */
88	EX	sdc1 $f0, 0(a0)
89	EX	sdc1 $f2, 16(a0)
90	EX	sdc1 $f4, 32(a0)
91	EX	sdc1 $f6, 48(a0)
92	EX	sdc1 $f8, 64(a0)
93	EX	sdc1 $f10, 80(a0)
94	EX	sdc1 $f12, 96(a0)
95	EX	sdc1 $f14, 112(a0)
96	EX	sdc1 $f16, 128(a0)
97	EX	sdc1 $f18, 144(a0)
98	EX	sdc1 $f20, 160(a0)
99	EX	sdc1 $f22, 176(a0)
100	EX	sdc1 $f24, 192(a0)
101	EX	sdc1 $f26, 208(a0)
102	EX	sdc1 $f28, 224(a0)
103	EX	sdc1 $f30, 240(a0)
104	EX	sw t1, 0(a1)
105	jr	ra
106	 li	v0, 0					# success
107	.set pop
108	END(_save_fp_context)
109
110/**
111 * _restore_fp_context() - restore FP context to the FPU
112 * @a0 - pointer to fpregs field of sigcontext
113 * @a1 - pointer to fpc_csr field of sigcontext
114 *
115 * Restore FP context, including the 32 FP data registers and the FP
116 * control & status register, from signal context to the FPU.
117 */
118LEAF(_restore_fp_context)
119	EX	lw t1, 0(a1)
120
121#if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2)  || \
122		defined(CONFIG_CPU_MIPS32_R6)
123	.set	push
124	SET_HARDFLOAT
125#ifdef CONFIG_CPU_MIPS32_R2
126	.set	mips32r2
127	.set	fp=64
128	mfc0	t0, CP0_STATUS
129	sll	t0, t0, 5
130	bgez	t0, 1f			# skip loading odd if FR=0
131	 nop
132#endif
133	EX	ldc1 $f1, 8(a0)
134	EX	ldc1 $f3, 24(a0)
135	EX	ldc1 $f5, 40(a0)
136	EX	ldc1 $f7, 56(a0)
137	EX	ldc1 $f9, 72(a0)
138	EX	ldc1 $f11, 88(a0)
139	EX	ldc1 $f13, 104(a0)
140	EX	ldc1 $f15, 120(a0)
141	EX	ldc1 $f17, 136(a0)
142	EX	ldc1 $f19, 152(a0)
143	EX	ldc1 $f21, 168(a0)
144	EX	ldc1 $f23, 184(a0)
145	EX	ldc1 $f25, 200(a0)
146	EX	ldc1 $f27, 216(a0)
147	EX	ldc1 $f29, 232(a0)
148	EX	ldc1 $f31, 248(a0)
1491:	.set pop
150#endif
151	.set push
152	SET_HARDFLOAT
153	EX	ldc1 $f0, 0(a0)
154	EX	ldc1 $f2, 16(a0)
155	EX	ldc1 $f4, 32(a0)
156	EX	ldc1 $f6, 48(a0)
157	EX	ldc1 $f8, 64(a0)
158	EX	ldc1 $f10, 80(a0)
159	EX	ldc1 $f12, 96(a0)
160	EX	ldc1 $f14, 112(a0)
161	EX	ldc1 $f16, 128(a0)
162	EX	ldc1 $f18, 144(a0)
163	EX	ldc1 $f20, 160(a0)
164	EX	ldc1 $f22, 176(a0)
165	EX	ldc1 $f24, 192(a0)
166	EX	ldc1 $f26, 208(a0)
167	EX	ldc1 $f28, 224(a0)
168	EX	ldc1 $f30, 240(a0)
169	ctc1	t1, fcr31
170	.set pop
171	jr	ra
172	 li	v0, 0					# success
173	END(_restore_fp_context)
174
175#ifdef CONFIG_CPU_HAS_MSA
176
177	.macro	op_one_wr	op, idx, base
178	.align	4
179\idx:	\op	\idx, 0, \base
180	jr	ra
181	 nop
182	.endm
183
184	.macro	op_msa_wr	name, op
185LEAF(\name)
186	.set		push
187	.set		noreorder
188	sll		t0, a0, 4
189	PTR_LA		t1, 0f
190	PTR_ADDU	t0, t0, t1
191	jr		t0
192	  nop
193	op_one_wr	\op, 0, a1
194	op_one_wr	\op, 1, a1
195	op_one_wr	\op, 2, a1
196	op_one_wr	\op, 3, a1
197	op_one_wr	\op, 4, a1
198	op_one_wr	\op, 5, a1
199	op_one_wr	\op, 6, a1
200	op_one_wr	\op, 7, a1
201	op_one_wr	\op, 8, a1
202	op_one_wr	\op, 9, a1
203	op_one_wr	\op, 10, a1
204	op_one_wr	\op, 11, a1
205	op_one_wr	\op, 12, a1
206	op_one_wr	\op, 13, a1
207	op_one_wr	\op, 14, a1
208	op_one_wr	\op, 15, a1
209	op_one_wr	\op, 16, a1
210	op_one_wr	\op, 17, a1
211	op_one_wr	\op, 18, a1
212	op_one_wr	\op, 19, a1
213	op_one_wr	\op, 20, a1
214	op_one_wr	\op, 21, a1
215	op_one_wr	\op, 22, a1
216	op_one_wr	\op, 23, a1
217	op_one_wr	\op, 24, a1
218	op_one_wr	\op, 25, a1
219	op_one_wr	\op, 26, a1
220	op_one_wr	\op, 27, a1
221	op_one_wr	\op, 28, a1
222	op_one_wr	\op, 29, a1
223	op_one_wr	\op, 30, a1
224	op_one_wr	\op, 31, a1
225	.set		pop
226	END(\name)
227	.endm
228
229	op_msa_wr	read_msa_wr_b, st_b
230	op_msa_wr	read_msa_wr_h, st_h
231	op_msa_wr	read_msa_wr_w, st_w
232	op_msa_wr	read_msa_wr_d, st_d
233
234	op_msa_wr	write_msa_wr_b, ld_b
235	op_msa_wr	write_msa_wr_h, ld_h
236	op_msa_wr	write_msa_wr_w, ld_w
237	op_msa_wr	write_msa_wr_d, ld_d
238
239#endif /* CONFIG_CPU_HAS_MSA */
240
241#ifdef CONFIG_CPU_HAS_MSA
242
243	.macro	save_msa_upper	wr, off, base
244	.set	push
245	.set	noat
246#ifdef CONFIG_64BIT
247	copy_s_d \wr, 1
248	EX sd	$1, \off(\base)
249#elif defined(CONFIG_CPU_LITTLE_ENDIAN)
250	copy_s_w \wr, 2
251	EX sw	$1, \off(\base)
252	copy_s_w \wr, 3
253	EX sw	$1, (\off+4)(\base)
254#else /* CONFIG_CPU_BIG_ENDIAN */
255	copy_s_w \wr, 2
256	EX sw	$1, (\off+4)(\base)
257	copy_s_w \wr, 3
258	EX sw	$1, \off(\base)
259#endif
260	.set	pop
261	.endm
262
263LEAF(_save_msa_all_upper)
264	save_msa_upper	0, 0x00, a0
265	save_msa_upper	1, 0x08, a0
266	save_msa_upper	2, 0x10, a0
267	save_msa_upper	3, 0x18, a0
268	save_msa_upper	4, 0x20, a0
269	save_msa_upper	5, 0x28, a0
270	save_msa_upper	6, 0x30, a0
271	save_msa_upper	7, 0x38, a0
272	save_msa_upper	8, 0x40, a0
273	save_msa_upper	9, 0x48, a0
274	save_msa_upper	10, 0x50, a0
275	save_msa_upper	11, 0x58, a0
276	save_msa_upper	12, 0x60, a0
277	save_msa_upper	13, 0x68, a0
278	save_msa_upper	14, 0x70, a0
279	save_msa_upper	15, 0x78, a0
280	save_msa_upper	16, 0x80, a0
281	save_msa_upper	17, 0x88, a0
282	save_msa_upper	18, 0x90, a0
283	save_msa_upper	19, 0x98, a0
284	save_msa_upper	20, 0xa0, a0
285	save_msa_upper	21, 0xa8, a0
286	save_msa_upper	22, 0xb0, a0
287	save_msa_upper	23, 0xb8, a0
288	save_msa_upper	24, 0xc0, a0
289	save_msa_upper	25, 0xc8, a0
290	save_msa_upper	26, 0xd0, a0
291	save_msa_upper	27, 0xd8, a0
292	save_msa_upper	28, 0xe0, a0
293	save_msa_upper	29, 0xe8, a0
294	save_msa_upper	30, 0xf0, a0
295	save_msa_upper	31, 0xf8, a0
296	jr	ra
297	 li	v0, 0
298	END(_save_msa_all_upper)
299
300	.macro	restore_msa_upper	wr, off, base
301	.set	push
302	.set	noat
303#ifdef CONFIG_64BIT
304	EX ld	$1, \off(\base)
305	insert_d \wr, 1
306#elif defined(CONFIG_CPU_LITTLE_ENDIAN)
307	EX lw	$1, \off(\base)
308	insert_w \wr, 2
309	EX lw	$1, (\off+4)(\base)
310	insert_w \wr, 3
311#else /* CONFIG_CPU_BIG_ENDIAN */
312	EX lw	$1, (\off+4)(\base)
313	insert_w \wr, 2
314	EX lw	$1, \off(\base)
315	insert_w \wr, 3
316#endif
317	.set	pop
318	.endm
319
320LEAF(_restore_msa_all_upper)
321	restore_msa_upper	0, 0x00, a0
322	restore_msa_upper	1, 0x08, a0
323	restore_msa_upper	2, 0x10, a0
324	restore_msa_upper	3, 0x18, a0
325	restore_msa_upper	4, 0x20, a0
326	restore_msa_upper	5, 0x28, a0
327	restore_msa_upper	6, 0x30, a0
328	restore_msa_upper	7, 0x38, a0
329	restore_msa_upper	8, 0x40, a0
330	restore_msa_upper	9, 0x48, a0
331	restore_msa_upper	10, 0x50, a0
332	restore_msa_upper	11, 0x58, a0
333	restore_msa_upper	12, 0x60, a0
334	restore_msa_upper	13, 0x68, a0
335	restore_msa_upper	14, 0x70, a0
336	restore_msa_upper	15, 0x78, a0
337	restore_msa_upper	16, 0x80, a0
338	restore_msa_upper	17, 0x88, a0
339	restore_msa_upper	18, 0x90, a0
340	restore_msa_upper	19, 0x98, a0
341	restore_msa_upper	20, 0xa0, a0
342	restore_msa_upper	21, 0xa8, a0
343	restore_msa_upper	22, 0xb0, a0
344	restore_msa_upper	23, 0xb8, a0
345	restore_msa_upper	24, 0xc0, a0
346	restore_msa_upper	25, 0xc8, a0
347	restore_msa_upper	26, 0xd0, a0
348	restore_msa_upper	27, 0xd8, a0
349	restore_msa_upper	28, 0xe0, a0
350	restore_msa_upper	29, 0xe8, a0
351	restore_msa_upper	30, 0xf0, a0
352	restore_msa_upper	31, 0xf8, a0
353	jr	ra
354	 li	v0, 0
355	END(_restore_msa_all_upper)
356
357#endif /* CONFIG_CPU_HAS_MSA */
358
359	.set	reorder
360
361	.type	fault, @function
362	.ent	fault
363fault:	li	v0, -EFAULT				# failure
364	jr	ra
365	.end	fault
366