xref: /openbmc/linux/arch/arm64/kvm/hyp/hyp-entry.S (revision 8c749ce9)
1/*
2 * Copyright (C) 2015 - ARM Ltd
3 * Author: Marc Zyngier <marc.zyngier@arm.com>
4 *
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License version 2 as
7 * published by the Free Software Foundation.
8 *
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12 * GNU General Public License for more details.
13 *
14 * You should have received a copy of the GNU General Public License
15 * along with this program.  If not, see <http://www.gnu.org/licenses/>.
16 */
17
18#include <linux/linkage.h>
19
20#include <asm/alternative.h>
21#include <asm/assembler.h>
22#include <asm/asm-offsets.h>
23#include <asm/cpufeature.h>
24#include <asm/kvm_arm.h>
25#include <asm/kvm_asm.h>
26#include <asm/kvm_mmu.h>
27
28	.text
29	.pushsection	.hyp.text, "ax"
30
31.macro	save_x0_to_x3
32	stp	x0, x1, [sp, #-16]!
33	stp	x2, x3, [sp, #-16]!
34.endm
35
36.macro	restore_x0_to_x3
37	ldp	x2, x3, [sp], #16
38	ldp	x0, x1, [sp], #16
39.endm
40
41el1_sync:				// Guest trapped into EL2
42	save_x0_to_x3
43
44	mrs	x1, esr_el2
45	lsr	x2, x1, #ESR_ELx_EC_SHIFT
46
47	cmp	x2, #ESR_ELx_EC_HVC64
48	b.ne	el1_trap
49
50	mrs	x3, vttbr_el2		// If vttbr is valid, the 64bit guest
51	cbnz	x3, el1_trap		// called HVC
52
53	/* Here, we're pretty sure the host called HVC. */
54	restore_x0_to_x3
55
56	/* Check for __hyp_get_vectors */
57	cbnz	x0, 1f
58	mrs	x0, vbar_el2
59	b	2f
60
611:	stp	lr, xzr, [sp, #-16]!
62
63	/*
64	 * Compute the function address in EL2, and shuffle the parameters.
65	 */
66	kern_hyp_va	x0
67	mov	lr, x0
68	mov	x0, x1
69	mov	x1, x2
70	mov	x2, x3
71	blr	lr
72
73	ldp	lr, xzr, [sp], #16
742:	eret
75
76el1_trap:
77	/*
78	 * x1: ESR
79	 * x2: ESR_EC
80	 */
81
82	/* Guest accessed VFP/SIMD registers, save host, restore Guest */
83	cmp	x2, #ESR_ELx_EC_FP_ASIMD
84	b.eq	__fpsimd_guest_restore
85
86	cmp	x2, #ESR_ELx_EC_DABT_LOW
87	mov	x0, #ESR_ELx_EC_IABT_LOW
88	ccmp	x2, x0, #4, ne
89	b.ne	1f		// Not an abort we care about
90
91	/* This is an abort. Check for permission fault */
92alternative_if_not ARM64_WORKAROUND_834220
93	and	x2, x1, #ESR_ELx_FSC_TYPE
94	cmp	x2, #FSC_PERM
95	b.ne	1f		// Not a permission fault
96alternative_else
97	nop			// Use the permission fault path to
98	nop			// check for a valid S1 translation,
99	nop			// regardless of the ESR value.
100alternative_endif
101
102	/*
103	 * Check for Stage-1 page table walk, which is guaranteed
104	 * to give a valid HPFAR_EL2.
105	 */
106	tbnz	x1, #7, 1f	// S1PTW is set
107
108	/* Preserve PAR_EL1 */
109	mrs	x3, par_el1
110	stp	x3, xzr, [sp, #-16]!
111
112	/*
113	 * Permission fault, HPFAR_EL2 is invalid.
114	 * Resolve the IPA the hard way using the guest VA.
115	 * Stage-1 translation already validated the memory access rights.
116	 * As such, we can use the EL1 translation regime, and don't have
117	 * to distinguish between EL0 and EL1 access.
118	 */
119	mrs	x2, far_el2
120	at	s1e1r, x2
121	isb
122
123	/* Read result */
124	mrs	x3, par_el1
125	ldp	x0, xzr, [sp], #16	// Restore PAR_EL1 from the stack
126	msr	par_el1, x0
127	tbnz	x3, #0, 3f		// Bail out if we failed the translation
128	ubfx	x3, x3, #12, #36	// Extract IPA
129	lsl	x3, x3, #4		// and present it like HPFAR
130	b	2f
131
1321:	mrs	x3, hpfar_el2
133	mrs	x2, far_el2
134
1352:	mrs	x0, tpidr_el2
136	str	w1, [x0, #VCPU_ESR_EL2]
137	str	x2, [x0, #VCPU_FAR_EL2]
138	str	x3, [x0, #VCPU_HPFAR_EL2]
139
140	mov	x1, #ARM_EXCEPTION_TRAP
141	b	__guest_exit
142
143	/*
144	 * Translation failed. Just return to the guest and
145	 * let it fault again. Another CPU is probably playing
146	 * behind our back.
147	 */
1483:	restore_x0_to_x3
149
150	eret
151
152el1_irq:
153	save_x0_to_x3
154	mrs	x0, tpidr_el2
155	mov	x1, #ARM_EXCEPTION_IRQ
156	b	__guest_exit
157
158ENTRY(__hyp_do_panic)
159	mov	lr, #(PSR_F_BIT | PSR_I_BIT | PSR_A_BIT | PSR_D_BIT |\
160		      PSR_MODE_EL1h)
161	msr	spsr_el2, lr
162	ldr	lr, =panic
163	msr	elr_el2, lr
164	eret
165ENDPROC(__hyp_do_panic)
166
167.macro invalid_vector	label, target = __hyp_panic
168	.align	2
169\label:
170	b \target
171ENDPROC(\label)
172.endm
173
174	/* None of these should ever happen */
175	invalid_vector	el2t_sync_invalid
176	invalid_vector	el2t_irq_invalid
177	invalid_vector	el2t_fiq_invalid
178	invalid_vector	el2t_error_invalid
179	invalid_vector	el2h_sync_invalid
180	invalid_vector	el2h_irq_invalid
181	invalid_vector	el2h_fiq_invalid
182	invalid_vector	el2h_error_invalid
183	invalid_vector	el1_sync_invalid
184	invalid_vector	el1_irq_invalid
185	invalid_vector	el1_fiq_invalid
186	invalid_vector	el1_error_invalid
187
188	.ltorg
189
190	.align 11
191
192ENTRY(__kvm_hyp_vector)
193	ventry	el2t_sync_invalid		// Synchronous EL2t
194	ventry	el2t_irq_invalid		// IRQ EL2t
195	ventry	el2t_fiq_invalid		// FIQ EL2t
196	ventry	el2t_error_invalid		// Error EL2t
197
198	ventry	el2h_sync_invalid		// Synchronous EL2h
199	ventry	el2h_irq_invalid		// IRQ EL2h
200	ventry	el2h_fiq_invalid		// FIQ EL2h
201	ventry	el2h_error_invalid		// Error EL2h
202
203	ventry	el1_sync			// Synchronous 64-bit EL1
204	ventry	el1_irq				// IRQ 64-bit EL1
205	ventry	el1_fiq_invalid			// FIQ 64-bit EL1
206	ventry	el1_error_invalid		// Error 64-bit EL1
207
208	ventry	el1_sync			// Synchronous 32-bit EL1
209	ventry	el1_irq				// IRQ 32-bit EL1
210	ventry	el1_fiq_invalid			// FIQ 32-bit EL1
211	ventry	el1_error_invalid		// Error 32-bit EL1
212ENDPROC(__kvm_hyp_vector)
213