xref: /openbmc/linux/arch/mips/kvm/fpu.S (revision 3db55767)
1/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License.  See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * FPU context handling code for KVM.
7 *
8 * Copyright (C) 2015 Imagination Technologies Ltd.
9 */
10
11#include <asm/asm.h>
12#include <asm/asm-offsets.h>
13#include <asm/fpregdef.h>
14#include <asm/mipsregs.h>
15#include <asm/regdef.h>
16
17/* preprocessor replaces the fp in ".set fp=64" with $30 otherwise */
18#undef fp
19
20	.set	noreorder
21	.set	noat
22
23LEAF(__kvm_save_fpu)
24	.set	push
25	.set	hardfloat
26	.set	fp=64
27	mfc0	t0, CP0_STATUS
28	sll     t0, t0, 5			# is Status.FR set?
29	bgez    t0, 1f				# no: skip odd doubles
30	 nop
31	sdc1	$f1,  VCPU_FPR1(a0)
32	sdc1	$f3,  VCPU_FPR3(a0)
33	sdc1	$f5,  VCPU_FPR5(a0)
34	sdc1	$f7,  VCPU_FPR7(a0)
35	sdc1	$f9,  VCPU_FPR9(a0)
36	sdc1	$f11, VCPU_FPR11(a0)
37	sdc1	$f13, VCPU_FPR13(a0)
38	sdc1	$f15, VCPU_FPR15(a0)
39	sdc1	$f17, VCPU_FPR17(a0)
40	sdc1	$f19, VCPU_FPR19(a0)
41	sdc1	$f21, VCPU_FPR21(a0)
42	sdc1	$f23, VCPU_FPR23(a0)
43	sdc1	$f25, VCPU_FPR25(a0)
44	sdc1	$f27, VCPU_FPR27(a0)
45	sdc1	$f29, VCPU_FPR29(a0)
46	sdc1	$f31, VCPU_FPR31(a0)
471:	sdc1	$f0,  VCPU_FPR0(a0)
48	sdc1	$f2,  VCPU_FPR2(a0)
49	sdc1	$f4,  VCPU_FPR4(a0)
50	sdc1	$f6,  VCPU_FPR6(a0)
51	sdc1	$f8,  VCPU_FPR8(a0)
52	sdc1	$f10, VCPU_FPR10(a0)
53	sdc1	$f12, VCPU_FPR12(a0)
54	sdc1	$f14, VCPU_FPR14(a0)
55	sdc1	$f16, VCPU_FPR16(a0)
56	sdc1	$f18, VCPU_FPR18(a0)
57	sdc1	$f20, VCPU_FPR20(a0)
58	sdc1	$f22, VCPU_FPR22(a0)
59	sdc1	$f24, VCPU_FPR24(a0)
60	sdc1	$f26, VCPU_FPR26(a0)
61	sdc1	$f28, VCPU_FPR28(a0)
62	jr	ra
63	 sdc1	$f30, VCPU_FPR30(a0)
64	.set	pop
65	END(__kvm_save_fpu)
66
67LEAF(__kvm_restore_fpu)
68	.set	push
69	.set	hardfloat
70	.set	fp=64
71	mfc0	t0, CP0_STATUS
72	sll     t0, t0, 5			# is Status.FR set?
73	bgez    t0, 1f				# no: skip odd doubles
74	 nop
75	ldc1	$f1,  VCPU_FPR1(a0)
76	ldc1	$f3,  VCPU_FPR3(a0)
77	ldc1	$f5,  VCPU_FPR5(a0)
78	ldc1	$f7,  VCPU_FPR7(a0)
79	ldc1	$f9,  VCPU_FPR9(a0)
80	ldc1	$f11, VCPU_FPR11(a0)
81	ldc1	$f13, VCPU_FPR13(a0)
82	ldc1	$f15, VCPU_FPR15(a0)
83	ldc1	$f17, VCPU_FPR17(a0)
84	ldc1	$f19, VCPU_FPR19(a0)
85	ldc1	$f21, VCPU_FPR21(a0)
86	ldc1	$f23, VCPU_FPR23(a0)
87	ldc1	$f25, VCPU_FPR25(a0)
88	ldc1	$f27, VCPU_FPR27(a0)
89	ldc1	$f29, VCPU_FPR29(a0)
90	ldc1	$f31, VCPU_FPR31(a0)
911:	ldc1	$f0,  VCPU_FPR0(a0)
92	ldc1	$f2,  VCPU_FPR2(a0)
93	ldc1	$f4,  VCPU_FPR4(a0)
94	ldc1	$f6,  VCPU_FPR6(a0)
95	ldc1	$f8,  VCPU_FPR8(a0)
96	ldc1	$f10, VCPU_FPR10(a0)
97	ldc1	$f12, VCPU_FPR12(a0)
98	ldc1	$f14, VCPU_FPR14(a0)
99	ldc1	$f16, VCPU_FPR16(a0)
100	ldc1	$f18, VCPU_FPR18(a0)
101	ldc1	$f20, VCPU_FPR20(a0)
102	ldc1	$f22, VCPU_FPR22(a0)
103	ldc1	$f24, VCPU_FPR24(a0)
104	ldc1	$f26, VCPU_FPR26(a0)
105	ldc1	$f28, VCPU_FPR28(a0)
106	jr	ra
107	 ldc1	$f30, VCPU_FPR30(a0)
108	.set	pop
109	END(__kvm_restore_fpu)
110
111LEAF(__kvm_restore_fcsr)
112	.set	push
113	.set	hardfloat
114	lw	t0, VCPU_FCR31(a0)
115	/*
116	 * The ctc1 must stay at this offset in __kvm_restore_fcsr.
117	 * See kvm_mips_csr_die_notify() which handles t0 containing a value
118	 * which triggers an FP Exception, which must be stepped over and
119	 * ignored since the set cause bits must remain there for the guest.
120	 */
121	ctc1	t0, fcr31
122	jr	ra
123	 nop
124	.set	pop
125	END(__kvm_restore_fcsr)
126