xref: /openbmc/linux/arch/arm/vfp/vfphw.S (revision f15cbe6f1a4b4d9df59142fc8e4abb973302cf44)
1/*
2 *  linux/arch/arm/vfp/vfphw.S
3 *
4 *  Copyright (C) 2004 ARM Limited.
5 *  Written by Deep Blue Solutions Limited.
6 *
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License version 2 as
9 * published by the Free Software Foundation.
10 *
11 * This code is called from the kernel's undefined instruction trap.
12 * r9 holds the return address for successful handling.
13 * lr holds the return address for unrecognised instructions.
14 * r10 points at the start of the private FP workspace in the thread structure
15 * sp points to a struct pt_regs (as defined in include/asm/proc/ptrace.h)
16 */
17#include <asm/thread_info.h>
18#include <asm/vfpmacros.h>
19#include "../kernel/entry-header.S"
20
21	.macro	DBGSTR, str
22#ifdef DEBUG
23	stmfd	sp!, {r0-r3, ip, lr}
24	add	r0, pc, #4
25	bl	printk
26	b	1f
27	.asciz  "<7>VFP: \str\n"
28	.balign 4
291:	ldmfd	sp!, {r0-r3, ip, lr}
30#endif
31	.endm
32
33	.macro  DBGSTR1, str, arg
34#ifdef DEBUG
35	stmfd	sp!, {r0-r3, ip, lr}
36	mov	r1, \arg
37	add	r0, pc, #4
38	bl	printk
39	b	1f
40	.asciz  "<7>VFP: \str\n"
41	.balign 4
421:	ldmfd	sp!, {r0-r3, ip, lr}
43#endif
44	.endm
45
46	.macro  DBGSTR3, str, arg1, arg2, arg3
47#ifdef DEBUG
48	stmfd	sp!, {r0-r3, ip, lr}
49	mov	r3, \arg3
50	mov	r2, \arg2
51	mov	r1, \arg1
52	add	r0, pc, #4
53	bl	printk
54	b	1f
55	.asciz  "<7>VFP: \str\n"
56	.balign 4
571:	ldmfd	sp!, {r0-r3, ip, lr}
58#endif
59	.endm
60
61
62@ VFP hardware support entry point.
63@
64@  r0  = faulted instruction
65@  r2  = faulted PC+4
66@  r9  = successful return
67@  r10 = vfp_state union
68@  r11 = CPU number
69@  lr  = failure return
70
71	.globl	vfp_support_entry
72vfp_support_entry:
73	DBGSTR3	"instr %08x pc %08x state %p", r0, r2, r10
74
75	VFPFMRX	r1, FPEXC		@ Is the VFP enabled?
76	DBGSTR1	"fpexc %08x", r1
77	tst	r1, #FPEXC_EN
78	bne	look_for_VFP_exceptions	@ VFP is already enabled
79
80	DBGSTR1 "enable %x", r10
81	ldr	r3, last_VFP_context_address
82	orr	r1, r1, #FPEXC_EN	@ user FPEXC has the enable bit set
83	ldr	r4, [r3, r11, lsl #2]	@ last_VFP_context pointer
84	bic	r5, r1, #FPEXC_EX	@ make sure exceptions are disabled
85	cmp	r4, r10
86	beq	check_for_exception	@ we are returning to the same
87					@ process, so the registers are
88					@ still there.  In this case, we do
89					@ not want to drop a pending exception.
90
91	VFPFMXR	FPEXC, r5		@ enable VFP, disable any pending
92					@ exceptions, so we can get at the
93					@ rest of it
94
95#ifndef CONFIG_SMP
96	@ Save out the current registers to the old thread state
97	@ No need for SMP since this is not done lazily
98
99	DBGSTR1	"save old state %p", r4
100	cmp	r4, #0
101	beq	no_old_VFP_process
102	VFPFSTMIA r4, r5		@ save the working registers
103	VFPFMRX	r5, FPSCR		@ current status
104	tst	r1, #FPEXC_EX		@ is there additional state to save?
105	VFPFMRX	r6, FPINST, NE		@ FPINST (only if FPEXC.EX is set)
106	tstne	r1, #FPEXC_FP2V		@ is there an FPINST2 to read?
107	VFPFMRX	r8, FPINST2, NE		@ FPINST2 if needed (and present)
108	stmia	r4, {r1, r5, r6, r8}	@ save FPEXC, FPSCR, FPINST, FPINST2
109					@ and point r4 at the word at the
110					@ start of the register dump
111#endif
112
113no_old_VFP_process:
114	DBGSTR1	"load state %p", r10
115	str	r10, [r3, r11, lsl #2]	@ update the last_VFP_context pointer
116					@ Load the saved state back into the VFP
117	VFPFLDMIA r10, r5		@ reload the working registers while
118					@ FPEXC is in a safe state
119	ldmia	r10, {r1, r5, r6, r8}	@ load FPEXC, FPSCR, FPINST, FPINST2
120	tst	r1, #FPEXC_EX		@ is there additional state to restore?
121	VFPFMXR	FPINST, r6, NE		@ restore FPINST (only if FPEXC.EX is set)
122	tstne	r1, #FPEXC_FP2V		@ is there an FPINST2 to write?
123	VFPFMXR	FPINST2, r8, NE		@ FPINST2 if needed (and present)
124	VFPFMXR	FPSCR, r5		@ restore status
125
126check_for_exception:
127	tst	r1, #FPEXC_EX
128	bne	process_exception	@ might as well handle the pending
129					@ exception before retrying branch
130					@ out before setting an FPEXC that
131					@ stops us reading stuff
132	VFPFMXR	FPEXC, r1		@ restore FPEXC last
133	sub	r2, r2, #4
134	str	r2, [sp, #S_PC]		@ retry the instruction
135	mov	pc, r9			@ we think we have handled things
136
137
138look_for_VFP_exceptions:
139	@ Check for synchronous or asynchronous exception
140	tst	r1, #FPEXC_EX | FPEXC_DEX
141	bne	process_exception
142	@ On some implementations of the VFP subarch 1, setting FPSCR.IXE
143	@ causes all the CDP instructions to be bounced synchronously without
144	@ setting the FPEXC.EX bit
145	VFPFMRX	r5, FPSCR
146	tst	r5, #FPSCR_IXE
147	bne	process_exception
148
149	@ Fall into hand on to next handler - appropriate coproc instr
150	@ not recognised by VFP
151
152	DBGSTR	"not VFP"
153	mov	pc, lr
154
155process_exception:
156	DBGSTR	"bounce"
157	mov	r2, sp			@ nothing stacked - regdump is at TOS
158	mov	lr, r9			@ setup for a return to the user code.
159
160	@ Now call the C code to package up the bounce to the support code
161	@   r0 holds the trigger instruction
162	@   r1 holds the FPEXC value
163	@   r2 pointer to register dump
164	b	VFP_bounce		@ we have handled this - the support
165					@ code will raise an exception if
166					@ required. If not, the user code will
167					@ retry the faulted instruction
168
169#ifdef CONFIG_SMP
170	.globl	vfp_save_state
171	.type	vfp_save_state, %function
172vfp_save_state:
173	@ Save the current VFP state
174	@ r0 - save location
175	@ r1 - FPEXC
176	DBGSTR1	"save VFP state %p", r0
177	VFPFSTMIA r0, r2		@ save the working registers
178	VFPFMRX	r2, FPSCR		@ current status
179	tst	r1, #FPEXC_EX		@ is there additional state to save?
180	VFPFMRX	r3, FPINST, NE		@ FPINST (only if FPEXC.EX is set)
181	tstne	r1, #FPEXC_FP2V		@ is there an FPINST2 to read?
182	VFPFMRX	r12, FPINST2, NE	@ FPINST2 if needed (and present)
183	stmia	r0, {r1, r2, r3, r12}	@ save FPEXC, FPSCR, FPINST, FPINST2
184	mov	pc, lr
185#endif
186
187last_VFP_context_address:
188	.word	last_VFP_context
189
190	.globl	vfp_get_float
191vfp_get_float:
192	add	pc, pc, r0, lsl #3
193	mov	r0, r0
194	.irp	dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
195	mrc	p10, 0, r0, c\dr, c0, 0	@ fmrs	r0, s0
196	mov	pc, lr
197	mrc	p10, 0, r0, c\dr, c0, 4	@ fmrs	r0, s1
198	mov	pc, lr
199	.endr
200
201	.globl	vfp_put_float
202vfp_put_float:
203	add	pc, pc, r1, lsl #3
204	mov	r0, r0
205	.irp	dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
206	mcr	p10, 0, r0, c\dr, c0, 0	@ fmsr	r0, s0
207	mov	pc, lr
208	mcr	p10, 0, r0, c\dr, c0, 4	@ fmsr	r0, s1
209	mov	pc, lr
210	.endr
211
212	.globl	vfp_get_double
213vfp_get_double:
214	add	pc, pc, r0, lsl #3
215	mov	r0, r0
216	.irp	dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
217	fmrrd	r0, r1, d\dr
218	mov	pc, lr
219	.endr
220#ifdef CONFIG_VFPv3
221	@ d16 - d31 registers
222	.irp	dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
223	mrrc	p11, 3, r0, r1, c\dr	@ fmrrd	r0, r1, d\dr
224	mov	pc, lr
225	.endr
226#endif
227
228	@ virtual register 16 (or 32 if VFPv3) for compare with zero
229	mov	r0, #0
230	mov	r1, #0
231	mov	pc, lr
232
233	.globl	vfp_put_double
234vfp_put_double:
235	add	pc, pc, r2, lsl #3
236	mov	r0, r0
237	.irp	dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
238	fmdrr	d\dr, r0, r1
239	mov	pc, lr
240	.endr
241#ifdef CONFIG_VFPv3
242	@ d16 - d31 registers
243	.irp	dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
244	mcrr	p11, 3, r1, r2, c\dr	@ fmdrr	r1, r2, d\dr
245	mov	pc, lr
246	.endr
247#endif
248