xref: /openbmc/linux/arch/arm64/kvm/hyp/hyp-entry.S (revision 2b28162cf65a6fe1c93d172675e4f2792792f17e)
1*2b28162cSMarc Zyngier/*
2*2b28162cSMarc Zyngier * Copyright (C) 2015 - ARM Ltd
3*2b28162cSMarc Zyngier * Author: Marc Zyngier <marc.zyngier@arm.com>
4*2b28162cSMarc Zyngier *
5*2b28162cSMarc Zyngier * This program is free software; you can redistribute it and/or modify
6*2b28162cSMarc Zyngier * it under the terms of the GNU General Public License version 2 as
7*2b28162cSMarc Zyngier * published by the Free Software Foundation.
8*2b28162cSMarc Zyngier *
9*2b28162cSMarc Zyngier * This program is distributed in the hope that it will be useful,
10*2b28162cSMarc Zyngier * but WITHOUT ANY WARRANTY; without even the implied warranty of
11*2b28162cSMarc Zyngier * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12*2b28162cSMarc Zyngier * GNU General Public License for more details.
13*2b28162cSMarc Zyngier *
14*2b28162cSMarc Zyngier * You should have received a copy of the GNU General Public License
15*2b28162cSMarc Zyngier * along with this program.  If not, see <http://www.gnu.org/licenses/>.
16*2b28162cSMarc Zyngier */
17*2b28162cSMarc Zyngier
18*2b28162cSMarc Zyngier#include <linux/linkage.h>
19*2b28162cSMarc Zyngier
20*2b28162cSMarc Zyngier#include <asm/alternative.h>
21*2b28162cSMarc Zyngier#include <asm/assembler.h>
22*2b28162cSMarc Zyngier#include <asm/asm-offsets.h>
23*2b28162cSMarc Zyngier#include <asm/cpufeature.h>
24*2b28162cSMarc Zyngier#include <asm/kvm_arm.h>
25*2b28162cSMarc Zyngier#include <asm/kvm_asm.h>
26*2b28162cSMarc Zyngier#include <asm/kvm_mmu.h>
27*2b28162cSMarc Zyngier
28*2b28162cSMarc Zyngier	.text
29*2b28162cSMarc Zyngier	.pushsection	.hyp.text, "ax"
30*2b28162cSMarc Zyngier
31*2b28162cSMarc Zyngier.macro	save_x0_to_x3
32*2b28162cSMarc Zyngier	stp	x0, x1, [sp, #-16]!
33*2b28162cSMarc Zyngier	stp	x2, x3, [sp, #-16]!
34*2b28162cSMarc Zyngier.endm
35*2b28162cSMarc Zyngier
36*2b28162cSMarc Zyngier.macro	restore_x0_to_x3
37*2b28162cSMarc Zyngier	ldp	x2, x3, [sp], #16
38*2b28162cSMarc Zyngier	ldp	x0, x1, [sp], #16
39*2b28162cSMarc Zyngier.endm
40*2b28162cSMarc Zyngier
41*2b28162cSMarc Zyngierel1_sync:				// Guest trapped into EL2
42*2b28162cSMarc Zyngier	save_x0_to_x3
43*2b28162cSMarc Zyngier
44*2b28162cSMarc Zyngier	mrs	x1, esr_el2
45*2b28162cSMarc Zyngier	lsr	x2, x1, #ESR_ELx_EC_SHIFT
46*2b28162cSMarc Zyngier
47*2b28162cSMarc Zyngier	cmp	x2, #ESR_ELx_EC_HVC64
48*2b28162cSMarc Zyngier	b.ne	el1_trap
49*2b28162cSMarc Zyngier
50*2b28162cSMarc Zyngier	mrs	x3, vttbr_el2		// If vttbr is valid, the 64bit guest
51*2b28162cSMarc Zyngier	cbnz	x3, el1_trap		// called HVC
52*2b28162cSMarc Zyngier
53*2b28162cSMarc Zyngier	/* Here, we're pretty sure the host called HVC. */
54*2b28162cSMarc Zyngier	restore_x0_to_x3
55*2b28162cSMarc Zyngier
56*2b28162cSMarc Zyngier	/* Check for __hyp_get_vectors */
57*2b28162cSMarc Zyngier	cbnz	x0, 1f
58*2b28162cSMarc Zyngier	mrs	x0, vbar_el2
59*2b28162cSMarc Zyngier	b	2f
60*2b28162cSMarc Zyngier
61*2b28162cSMarc Zyngier1:	stp	lr, xzr, [sp, #-16]!
62*2b28162cSMarc Zyngier
63*2b28162cSMarc Zyngier	/*
64*2b28162cSMarc Zyngier	 * Compute the function address in EL2, and shuffle the parameters.
65*2b28162cSMarc Zyngier	 */
66*2b28162cSMarc Zyngier	kern_hyp_va	x0
67*2b28162cSMarc Zyngier	mov	lr, x0
68*2b28162cSMarc Zyngier	mov	x0, x1
69*2b28162cSMarc Zyngier	mov	x1, x2
70*2b28162cSMarc Zyngier	mov	x2, x3
71*2b28162cSMarc Zyngier	blr	lr
72*2b28162cSMarc Zyngier
73*2b28162cSMarc Zyngier	ldp	lr, xzr, [sp], #16
74*2b28162cSMarc Zyngier2:	eret
75*2b28162cSMarc Zyngier
76*2b28162cSMarc Zyngierel1_trap:
77*2b28162cSMarc Zyngier	/*
78*2b28162cSMarc Zyngier	 * x1: ESR
79*2b28162cSMarc Zyngier	 * x2: ESR_EC
80*2b28162cSMarc Zyngier	 */
81*2b28162cSMarc Zyngier
82*2b28162cSMarc Zyngier	/* Guest accessed VFP/SIMD registers, save host, restore Guest */
83*2b28162cSMarc Zyngier	cmp	x2, #ESR_ELx_EC_FP_ASIMD
84*2b28162cSMarc Zyngier	b.eq	__fpsimd_guest_restore
85*2b28162cSMarc Zyngier
86*2b28162cSMarc Zyngier	cmp	x2, #ESR_ELx_EC_DABT_LOW
87*2b28162cSMarc Zyngier	mov	x0, #ESR_ELx_EC_IABT_LOW
88*2b28162cSMarc Zyngier	ccmp	x2, x0, #4, ne
89*2b28162cSMarc Zyngier	b.ne	1f		// Not an abort we care about
90*2b28162cSMarc Zyngier
91*2b28162cSMarc Zyngier	/* This is an abort. Check for permission fault */
92*2b28162cSMarc Zyngieralternative_if_not ARM64_WORKAROUND_834220
93*2b28162cSMarc Zyngier	and	x2, x1, #ESR_ELx_FSC_TYPE
94*2b28162cSMarc Zyngier	cmp	x2, #FSC_PERM
95*2b28162cSMarc Zyngier	b.ne	1f		// Not a permission fault
96*2b28162cSMarc Zyngieralternative_else
97*2b28162cSMarc Zyngier	nop			// Use the permission fault path to
98*2b28162cSMarc Zyngier	nop			// check for a valid S1 translation,
99*2b28162cSMarc Zyngier	nop			// regardless of the ESR value.
100*2b28162cSMarc Zyngieralternative_endif
101*2b28162cSMarc Zyngier
102*2b28162cSMarc Zyngier	/*
103*2b28162cSMarc Zyngier	 * Check for Stage-1 page table walk, which is guaranteed
104*2b28162cSMarc Zyngier	 * to give a valid HPFAR_EL2.
105*2b28162cSMarc Zyngier	 */
106*2b28162cSMarc Zyngier	tbnz	x1, #7, 1f	// S1PTW is set
107*2b28162cSMarc Zyngier
108*2b28162cSMarc Zyngier	/* Preserve PAR_EL1 */
109*2b28162cSMarc Zyngier	mrs	x3, par_el1
110*2b28162cSMarc Zyngier	stp	x3, xzr, [sp, #-16]!
111*2b28162cSMarc Zyngier
112*2b28162cSMarc Zyngier	/*
113*2b28162cSMarc Zyngier	 * Permission fault, HPFAR_EL2 is invalid.
114*2b28162cSMarc Zyngier	 * Resolve the IPA the hard way using the guest VA.
115*2b28162cSMarc Zyngier	 * Stage-1 translation already validated the memory access rights.
116*2b28162cSMarc Zyngier	 * As such, we can use the EL1 translation regime, and don't have
117*2b28162cSMarc Zyngier	 * to distinguish between EL0 and EL1 access.
118*2b28162cSMarc Zyngier	 */
119*2b28162cSMarc Zyngier	mrs	x2, far_el2
120*2b28162cSMarc Zyngier	at	s1e1r, x2
121*2b28162cSMarc Zyngier	isb
122*2b28162cSMarc Zyngier
123*2b28162cSMarc Zyngier	/* Read result */
124*2b28162cSMarc Zyngier	mrs	x3, par_el1
125*2b28162cSMarc Zyngier	ldp	x0, xzr, [sp], #16	// Restore PAR_EL1 from the stack
126*2b28162cSMarc Zyngier	msr	par_el1, x0
127*2b28162cSMarc Zyngier	tbnz	x3, #0, 3f		// Bail out if we failed the translation
128*2b28162cSMarc Zyngier	ubfx	x3, x3, #12, #36	// Extract IPA
129*2b28162cSMarc Zyngier	lsl	x3, x3, #4		// and present it like HPFAR
130*2b28162cSMarc Zyngier	b	2f
131*2b28162cSMarc Zyngier
132*2b28162cSMarc Zyngier1:	mrs	x3, hpfar_el2
133*2b28162cSMarc Zyngier	mrs	x2, far_el2
134*2b28162cSMarc Zyngier
135*2b28162cSMarc Zyngier2:	mrs	x0, tpidr_el2
136*2b28162cSMarc Zyngier	str	w1, [x0, #VCPU_ESR_EL2]
137*2b28162cSMarc Zyngier	str	x2, [x0, #VCPU_FAR_EL2]
138*2b28162cSMarc Zyngier	str	x3, [x0, #VCPU_HPFAR_EL2]
139*2b28162cSMarc Zyngier
140*2b28162cSMarc Zyngier	mov	x1, #ARM_EXCEPTION_TRAP
141*2b28162cSMarc Zyngier	b	__guest_exit
142*2b28162cSMarc Zyngier
143*2b28162cSMarc Zyngier	/*
144*2b28162cSMarc Zyngier	 * Translation failed. Just return to the guest and
145*2b28162cSMarc Zyngier	 * let it fault again. Another CPU is probably playing
146*2b28162cSMarc Zyngier	 * behind our back.
147*2b28162cSMarc Zyngier	 */
148*2b28162cSMarc Zyngier3:	restore_x0_to_x3
149*2b28162cSMarc Zyngier
150*2b28162cSMarc Zyngier	eret
151*2b28162cSMarc Zyngier
152*2b28162cSMarc Zyngierel1_irq:
153*2b28162cSMarc Zyngier	save_x0_to_x3
154*2b28162cSMarc Zyngier	mrs	x0, tpidr_el2
155*2b28162cSMarc Zyngier	mov	x1, #ARM_EXCEPTION_IRQ
156*2b28162cSMarc Zyngier	b	__guest_exit
157*2b28162cSMarc Zyngier
158*2b28162cSMarc Zyngier.macro invalid_vector	label, target = __kvm_hyp_panic
159*2b28162cSMarc Zyngier	.align	2
160*2b28162cSMarc Zyngier\label:
161*2b28162cSMarc Zyngier	b \target
162*2b28162cSMarc ZyngierENDPROC(\label)
163*2b28162cSMarc Zyngier.endm
164*2b28162cSMarc Zyngier
165*2b28162cSMarc Zyngier	/* None of these should ever happen */
166*2b28162cSMarc Zyngier	invalid_vector	el2t_sync_invalid
167*2b28162cSMarc Zyngier	invalid_vector	el2t_irq_invalid
168*2b28162cSMarc Zyngier	invalid_vector	el2t_fiq_invalid
169*2b28162cSMarc Zyngier	invalid_vector	el2t_error_invalid
170*2b28162cSMarc Zyngier	invalid_vector	el2h_sync_invalid
171*2b28162cSMarc Zyngier	invalid_vector	el2h_irq_invalid
172*2b28162cSMarc Zyngier	invalid_vector	el2h_fiq_invalid
173*2b28162cSMarc Zyngier	invalid_vector	el2h_error_invalid
174*2b28162cSMarc Zyngier	invalid_vector	el1_sync_invalid
175*2b28162cSMarc Zyngier	invalid_vector	el1_irq_invalid
176*2b28162cSMarc Zyngier	invalid_vector	el1_fiq_invalid
177*2b28162cSMarc Zyngier	invalid_vector	el1_error_invalid
178*2b28162cSMarc Zyngier
179*2b28162cSMarc Zyngier	.ltorg
180*2b28162cSMarc Zyngier
181*2b28162cSMarc Zyngier	.align 11
182*2b28162cSMarc Zyngier
183*2b28162cSMarc ZyngierENTRY(__hyp_vector)
184*2b28162cSMarc Zyngier	ventry	el2t_sync_invalid		// Synchronous EL2t
185*2b28162cSMarc Zyngier	ventry	el2t_irq_invalid		// IRQ EL2t
186*2b28162cSMarc Zyngier	ventry	el2t_fiq_invalid		// FIQ EL2t
187*2b28162cSMarc Zyngier	ventry	el2t_error_invalid		// Error EL2t
188*2b28162cSMarc Zyngier
189*2b28162cSMarc Zyngier	ventry	el2h_sync_invalid		// Synchronous EL2h
190*2b28162cSMarc Zyngier	ventry	el2h_irq_invalid		// IRQ EL2h
191*2b28162cSMarc Zyngier	ventry	el2h_fiq_invalid		// FIQ EL2h
192*2b28162cSMarc Zyngier	ventry	el2h_error_invalid		// Error EL2h
193*2b28162cSMarc Zyngier
194*2b28162cSMarc Zyngier	ventry	el1_sync			// Synchronous 64-bit EL1
195*2b28162cSMarc Zyngier	ventry	el1_irq				// IRQ 64-bit EL1
196*2b28162cSMarc Zyngier	ventry	el1_fiq_invalid			// FIQ 64-bit EL1
197*2b28162cSMarc Zyngier	ventry	el1_error_invalid		// Error 64-bit EL1
198*2b28162cSMarc Zyngier
199*2b28162cSMarc Zyngier	ventry	el1_sync			// Synchronous 32-bit EL1
200*2b28162cSMarc Zyngier	ventry	el1_irq				// IRQ 32-bit EL1
201*2b28162cSMarc Zyngier	ventry	el1_fiq_invalid			// FIQ 32-bit EL1
202*2b28162cSMarc Zyngier	ventry	el1_error_invalid		// Error 32-bit EL1
203*2b28162cSMarc ZyngierENDPROC(__hyp_vector)
204