xref: /openbmc/linux/arch/arm64/kvm/hyp/hyp-entry.S (revision e8b22d0f4500c7bb6aab879d4e32b2a00c89d5f8)
12b28162cSMarc Zyngier/*
2*e8b22d0fSMarc Zyngier * Copyright (C) 2015-2018 - ARM Ltd
32b28162cSMarc Zyngier * Author: Marc Zyngier <marc.zyngier@arm.com>
42b28162cSMarc Zyngier *
52b28162cSMarc Zyngier * This program is free software; you can redistribute it and/or modify
62b28162cSMarc Zyngier * it under the terms of the GNU General Public License version 2 as
72b28162cSMarc Zyngier * published by the Free Software Foundation.
82b28162cSMarc Zyngier *
92b28162cSMarc Zyngier * This program is distributed in the hope that it will be useful,
102b28162cSMarc Zyngier * but WITHOUT ANY WARRANTY; without even the implied warranty of
112b28162cSMarc Zyngier * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
122b28162cSMarc Zyngier * GNU General Public License for more details.
132b28162cSMarc Zyngier *
142b28162cSMarc Zyngier * You should have received a copy of the GNU General Public License
152b28162cSMarc Zyngier * along with this program.  If not, see <http://www.gnu.org/licenses/>.
162b28162cSMarc Zyngier */
172b28162cSMarc Zyngier
18f72af90cSMarc Zyngier#include <linux/arm-smccc.h>
192b28162cSMarc Zyngier#include <linux/linkage.h>
202b28162cSMarc Zyngier
212b28162cSMarc Zyngier#include <asm/alternative.h>
222b28162cSMarc Zyngier#include <asm/assembler.h>
232b28162cSMarc Zyngier#include <asm/cpufeature.h>
242b28162cSMarc Zyngier#include <asm/kvm_arm.h>
252b28162cSMarc Zyngier#include <asm/kvm_asm.h>
262b28162cSMarc Zyngier#include <asm/kvm_mmu.h>
27*e8b22d0fSMarc Zyngier#include <asm/mmu.h>
282b28162cSMarc Zyngier
292b28162cSMarc Zyngier	.text
302b28162cSMarc Zyngier	.pushsection	.hyp.text, "ax"
312b28162cSMarc Zyngier
32b81125c7SMarc Zyngier.macro do_el2_call
33b81125c7SMarc Zyngier	/*
34b81125c7SMarc Zyngier	 * Shuffle the parameters before calling the function
35b81125c7SMarc Zyngier	 * pointed to in x0. Assumes parameters in x[1,2,3].
36b81125c7SMarc Zyngier	 */
376c9ae25dSMarc Zyngier	str	lr, [sp, #-16]!
38b81125c7SMarc Zyngier	mov	lr, x0
39b81125c7SMarc Zyngier	mov	x0, x1
40b81125c7SMarc Zyngier	mov	x1, x2
41b81125c7SMarc Zyngier	mov	x2, x3
42b81125c7SMarc Zyngier	blr	lr
436c9ae25dSMarc Zyngier	ldr	lr, [sp], #16
44b81125c7SMarc Zyngier.endm
45b81125c7SMarc Zyngier
46b81125c7SMarc ZyngierENTRY(__vhe_hyp_call)
47b81125c7SMarc Zyngier	do_el2_call
48b81125c7SMarc Zyngier	/*
49b81125c7SMarc Zyngier	 * We used to rely on having an exception return to get
50b81125c7SMarc Zyngier	 * an implicit isb. In the E2H case, we don't have it anymore.
51b81125c7SMarc Zyngier	 * rather than changing all the leaf functions, just do it here
52b81125c7SMarc Zyngier	 * before returning to the rest of the kernel.
53b81125c7SMarc Zyngier	 */
54b81125c7SMarc Zyngier	isb
55b81125c7SMarc Zyngier	ret
56b81125c7SMarc ZyngierENDPROC(__vhe_hyp_call)
573421e9d8SMarc Zyngier
582b28162cSMarc Zyngierel1_sync:				// Guest trapped into EL2
592b28162cSMarc Zyngier
604464e210SChristoffer Dall	mrs	x0, esr_el2
614464e210SChristoffer Dall	lsr	x0, x0, #ESR_ELx_EC_SHIFT
6268381b2bSShanker Donthineni	cmp	x0, #ESR_ELx_EC_HVC64
63f72af90cSMarc Zyngier	ccmp	x0, #ESR_ELx_EC_HVC32, #4, ne
642b28162cSMarc Zyngier	b.ne	el1_trap
652b28162cSMarc Zyngier
66f72af90cSMarc Zyngier	mrs	x1, vttbr_el2		// If vttbr is valid, the guest
67f72af90cSMarc Zyngier	cbnz	x1, el1_hvc_guest	// called HVC
682b28162cSMarc Zyngier
692b28162cSMarc Zyngier	/* Here, we're pretty sure the host called HVC. */
7068381b2bSShanker Donthineni	ldp	x0, x1, [sp], #16
712b28162cSMarc Zyngier
725fbe9a59SMarc Zyngier	/* Check for a stub HVC call */
735fbe9a59SMarc Zyngier	cmp	x0, #HVC_STUB_HCALL_NR
745fbe9a59SMarc Zyngier	b.hs	1f
755fbe9a59SMarc Zyngier
765fbe9a59SMarc Zyngier	/*
775fbe9a59SMarc Zyngier	 * Compute the idmap address of __kvm_handle_stub_hvc and
785fbe9a59SMarc Zyngier	 * jump there. Since we use kimage_voffset, do not use the
795fbe9a59SMarc Zyngier	 * HYP VA for __kvm_handle_stub_hvc, but the kernel VA instead
805fbe9a59SMarc Zyngier	 * (by loading it from the constant pool).
815fbe9a59SMarc Zyngier	 *
825fbe9a59SMarc Zyngier	 * Preserve x0-x4, which may contain stub parameters.
835fbe9a59SMarc Zyngier	 */
845fbe9a59SMarc Zyngier	ldr	x5, =__kvm_handle_stub_hvc
855fbe9a59SMarc Zyngier	ldr_l	x6, kimage_voffset
865fbe9a59SMarc Zyngier
875fbe9a59SMarc Zyngier	/* x5 = __pa(x5) */
885fbe9a59SMarc Zyngier	sub	x5, x5, x6
895fbe9a59SMarc Zyngier	br	x5
902b28162cSMarc Zyngier
91b81125c7SMarc Zyngier1:
922b28162cSMarc Zyngier	/*
93b81125c7SMarc Zyngier	 * Perform the EL2 call
942b28162cSMarc Zyngier	 */
952b28162cSMarc Zyngier	kern_hyp_va	x0
96b81125c7SMarc Zyngier	do_el2_call
972b28162cSMarc Zyngier
985fbe9a59SMarc Zyngier	eret
992b28162cSMarc Zyngier
100f72af90cSMarc Zyngierel1_hvc_guest:
101f72af90cSMarc Zyngier	/*
102f72af90cSMarc Zyngier	 * Fastest possible path for ARM_SMCCC_ARCH_WORKAROUND_1.
103f72af90cSMarc Zyngier	 * The workaround has already been applied on the host,
104f72af90cSMarc Zyngier	 * so let's quickly get back to the guest. We don't bother
105f72af90cSMarc Zyngier	 * restoring x1, as it can be clobbered anyway.
106f72af90cSMarc Zyngier	 */
107f72af90cSMarc Zyngier	ldr	x1, [sp]				// Guest's x0
108f72af90cSMarc Zyngier	eor	w1, w1, #ARM_SMCCC_ARCH_WORKAROUND_1
109f72af90cSMarc Zyngier	cbnz	w1, el1_trap
110f72af90cSMarc Zyngier	mov	x0, x1
111f72af90cSMarc Zyngier	add	sp, sp, #16
112f72af90cSMarc Zyngier	eret
113f72af90cSMarc Zyngier
1142b28162cSMarc Zyngierel1_trap:
1154464e210SChristoffer Dall	get_vcpu_ptr	x1, x0
1164464e210SChristoffer Dall
1174464e210SChristoffer Dall	mrs		x0, esr_el2
1184464e210SChristoffer Dall	lsr		x0, x0, #ESR_ELx_EC_SHIFT
1192b28162cSMarc Zyngier	/*
12068381b2bSShanker Donthineni	 * x0: ESR_EC
1214464e210SChristoffer Dall	 * x1: vcpu pointer
1222b28162cSMarc Zyngier	 */
1232b28162cSMarc Zyngier
12482e0191aSSuzuki K Poulose	/*
12582e0191aSSuzuki K Poulose	 * We trap the first access to the FP/SIMD to save the host context
12682e0191aSSuzuki K Poulose	 * and restore the guest context lazily.
12782e0191aSSuzuki K Poulose	 * If FP/SIMD is not implemented, handle the trap and inject an
12882e0191aSSuzuki K Poulose	 * undefined instruction exception to the guest.
12982e0191aSSuzuki K Poulose	 */
13082e0191aSSuzuki K Poulosealternative_if_not ARM64_HAS_NO_FPSIMD
13168381b2bSShanker Donthineni	cmp	x0, #ESR_ELx_EC_FP_ASIMD
1322b28162cSMarc Zyngier	b.eq	__fpsimd_guest_restore
13382e0191aSSuzuki K Poulosealternative_else_nop_endif
1342b28162cSMarc Zyngier
13568381b2bSShanker Donthineni	mov	x0, #ARM_EXCEPTION_TRAP
1362b28162cSMarc Zyngier	b	__guest_exit
1372b28162cSMarc Zyngier
1382b28162cSMarc Zyngierel1_irq:
1394464e210SChristoffer Dall	get_vcpu_ptr	x1, x0
14068381b2bSShanker Donthineni	mov	x0, #ARM_EXCEPTION_IRQ
1412b28162cSMarc Zyngier	b	__guest_exit
1422b28162cSMarc Zyngier
1431b51e5faSMarc Zyngierel1_error:
1444464e210SChristoffer Dall	get_vcpu_ptr	x1, x0
1451b51e5faSMarc Zyngier	mov	x0, #ARM_EXCEPTION_EL1_SERROR
1461b51e5faSMarc Zyngier	b	__guest_exit
1471b51e5faSMarc Zyngier
148395ea79eSMarc Zyngierel2_error:
1497e80f637SMarc Zyngier	ldp	x0, x1, [sp], #16
1507e80f637SMarc Zyngier
151395ea79eSMarc Zyngier	/*
152395ea79eSMarc Zyngier	 * Only two possibilities:
153395ea79eSMarc Zyngier	 * 1) Either we come from the exit path, having just unmasked
154395ea79eSMarc Zyngier	 *    PSTATE.A: change the return code to an EL2 fault, and
155395ea79eSMarc Zyngier	 *    carry on, as we're already in a sane state to handle it.
156395ea79eSMarc Zyngier	 * 2) Or we come from anywhere else, and that's a bug: we panic.
157395ea79eSMarc Zyngier	 *
158395ea79eSMarc Zyngier	 * For (1), x0 contains the original return code and x1 doesn't
159395ea79eSMarc Zyngier	 * contain anything meaningful at that stage. We can reuse them
160395ea79eSMarc Zyngier	 * as temp registers.
161395ea79eSMarc Zyngier	 * For (2), who cares?
162395ea79eSMarc Zyngier	 */
163395ea79eSMarc Zyngier	mrs	x0, elr_el2
164395ea79eSMarc Zyngier	adr	x1, abort_guest_exit_start
165395ea79eSMarc Zyngier	cmp	x0, x1
166395ea79eSMarc Zyngier	adr	x1, abort_guest_exit_end
167395ea79eSMarc Zyngier	ccmp	x0, x1, #4, ne
168395ea79eSMarc Zyngier	b.ne	__hyp_panic
169395ea79eSMarc Zyngier	mov	x0, #(1 << ARM_EXIT_WITH_SERROR_BIT)
170395ea79eSMarc Zyngier	eret
171395ea79eSMarc Zyngier
17253fd5b64SMarc ZyngierENTRY(__hyp_do_panic)
17353fd5b64SMarc Zyngier	mov	lr, #(PSR_F_BIT | PSR_I_BIT | PSR_A_BIT | PSR_D_BIT |\
17453fd5b64SMarc Zyngier		      PSR_MODE_EL1h)
17553fd5b64SMarc Zyngier	msr	spsr_el2, lr
17653fd5b64SMarc Zyngier	ldr	lr, =panic
17753fd5b64SMarc Zyngier	msr	elr_el2, lr
17853fd5b64SMarc Zyngier	eret
17953fd5b64SMarc ZyngierENDPROC(__hyp_do_panic)
18053fd5b64SMarc Zyngier
181c97e166eSJames MorseENTRY(__hyp_panic)
1824464e210SChristoffer Dall	get_host_ctxt x0, x1
183c97e166eSJames Morse	b	hyp_panic
184c97e166eSJames MorseENDPROC(__hyp_panic)
185c97e166eSJames Morse
18653fd5b64SMarc Zyngier.macro invalid_vector	label, target = __hyp_panic
1872b28162cSMarc Zyngier	.align	2
1882b28162cSMarc Zyngier\label:
1892b28162cSMarc Zyngier	b \target
1902b28162cSMarc ZyngierENDPROC(\label)
1912b28162cSMarc Zyngier.endm
1922b28162cSMarc Zyngier
1932b28162cSMarc Zyngier	/* None of these should ever happen */
1942b28162cSMarc Zyngier	invalid_vector	el2t_sync_invalid
1952b28162cSMarc Zyngier	invalid_vector	el2t_irq_invalid
1962b28162cSMarc Zyngier	invalid_vector	el2t_fiq_invalid
1972b28162cSMarc Zyngier	invalid_vector	el2t_error_invalid
1982b28162cSMarc Zyngier	invalid_vector	el2h_sync_invalid
1992b28162cSMarc Zyngier	invalid_vector	el2h_irq_invalid
2002b28162cSMarc Zyngier	invalid_vector	el2h_fiq_invalid
2012b28162cSMarc Zyngier	invalid_vector	el1_fiq_invalid
2022b28162cSMarc Zyngier
2032b28162cSMarc Zyngier	.ltorg
2042b28162cSMarc Zyngier
2052b28162cSMarc Zyngier	.align 11
2062b28162cSMarc Zyngier
2077e80f637SMarc Zyngier.macro valid_vect target
2087e80f637SMarc Zyngier	.align 7
2097e80f637SMarc Zyngier	stp	x0, x1, [sp, #-16]!
2107e80f637SMarc Zyngier	b	\target
2117e80f637SMarc Zyngier.endm
2127e80f637SMarc Zyngier
2137e80f637SMarc Zyngier.macro invalid_vect target
2147e80f637SMarc Zyngier	.align 7
2157e80f637SMarc Zyngier	b	\target
21671dcb8beSMarc Zyngier	ldp	x0, x1, [sp], #16
21771dcb8beSMarc Zyngier	b	\target
2187e80f637SMarc Zyngier.endm
2197e80f637SMarc Zyngier
220044ac37dSMarc ZyngierENTRY(__kvm_hyp_vector)
2217e80f637SMarc Zyngier	invalid_vect	el2t_sync_invalid	// Synchronous EL2t
2227e80f637SMarc Zyngier	invalid_vect	el2t_irq_invalid	// IRQ EL2t
2237e80f637SMarc Zyngier	invalid_vect	el2t_fiq_invalid	// FIQ EL2t
2247e80f637SMarc Zyngier	invalid_vect	el2t_error_invalid	// Error EL2t
2252b28162cSMarc Zyngier
2267e80f637SMarc Zyngier	invalid_vect	el2h_sync_invalid	// Synchronous EL2h
2277e80f637SMarc Zyngier	invalid_vect	el2h_irq_invalid	// IRQ EL2h
2287e80f637SMarc Zyngier	invalid_vect	el2h_fiq_invalid	// FIQ EL2h
2297e80f637SMarc Zyngier	valid_vect	el2_error		// Error EL2h
2302b28162cSMarc Zyngier
2317e80f637SMarc Zyngier	valid_vect	el1_sync		// Synchronous 64-bit EL1
2327e80f637SMarc Zyngier	valid_vect	el1_irq			// IRQ 64-bit EL1
2337e80f637SMarc Zyngier	invalid_vect	el1_fiq_invalid		// FIQ 64-bit EL1
2347e80f637SMarc Zyngier	valid_vect	el1_error		// Error 64-bit EL1
2352b28162cSMarc Zyngier
2367e80f637SMarc Zyngier	valid_vect	el1_sync		// Synchronous 32-bit EL1
2377e80f637SMarc Zyngier	valid_vect	el1_irq			// IRQ 32-bit EL1
2387e80f637SMarc Zyngier	invalid_vect	el1_fiq_invalid		// FIQ 32-bit EL1
2397e80f637SMarc Zyngier	valid_vect	el1_error		// Error 32-bit EL1
240044ac37dSMarc ZyngierENDPROC(__kvm_hyp_vector)
241*e8b22d0fSMarc Zyngier
242*e8b22d0fSMarc Zyngier#ifdef CONFIG_KVM_INDIRECT_VECTORS
243*e8b22d0fSMarc Zyngier.macro hyp_ventry
244*e8b22d0fSMarc Zyngier	.align 7
245*e8b22d0fSMarc Zyngier1:	.rept 27
246*e8b22d0fSMarc Zyngier	nop
247*e8b22d0fSMarc Zyngier	.endr
248*e8b22d0fSMarc Zyngier/*
249*e8b22d0fSMarc Zyngier * The default sequence is to directly branch to the KVM vectors,
250*e8b22d0fSMarc Zyngier * using the computed offset. This applies for VHE as well as
251*e8b22d0fSMarc Zyngier * !ARM64_HARDEN_EL2_VECTORS.
252*e8b22d0fSMarc Zyngier *
253*e8b22d0fSMarc Zyngier * For ARM64_HARDEN_EL2_VECTORS configurations, this gets replaced
254*e8b22d0fSMarc Zyngier * with:
255*e8b22d0fSMarc Zyngier *
256*e8b22d0fSMarc Zyngier * stp	x0, x1, [sp, #-16]!
257*e8b22d0fSMarc Zyngier * movz	x0, #(addr & 0xffff)
258*e8b22d0fSMarc Zyngier * movk	x0, #((addr >> 16) & 0xffff), lsl #16
259*e8b22d0fSMarc Zyngier * movk	x0, #((addr >> 32) & 0xffff), lsl #32
260*e8b22d0fSMarc Zyngier * br	x0
261*e8b22d0fSMarc Zyngier *
262*e8b22d0fSMarc Zyngier * Where addr = kern_hyp_va(__kvm_hyp_vector) + vector-offset + 4.
263*e8b22d0fSMarc Zyngier * See kvm_patch_vector_branch for details.
264*e8b22d0fSMarc Zyngier */
265*e8b22d0fSMarc Zyngieralternative_cb	kvm_patch_vector_branch
266*e8b22d0fSMarc Zyngier	b	__kvm_hyp_vector + (1b - 0b)
267*e8b22d0fSMarc Zyngier	nop
268*e8b22d0fSMarc Zyngier	nop
269*e8b22d0fSMarc Zyngier	nop
270*e8b22d0fSMarc Zyngier	nop
271*e8b22d0fSMarc Zyngieralternative_cb_end
272*e8b22d0fSMarc Zyngier.endm
273*e8b22d0fSMarc Zyngier
274*e8b22d0fSMarc Zyngier.macro generate_vectors
275*e8b22d0fSMarc Zyngier0:
276*e8b22d0fSMarc Zyngier	.rept 16
277*e8b22d0fSMarc Zyngier	hyp_ventry
278*e8b22d0fSMarc Zyngier	.endr
279*e8b22d0fSMarc Zyngier	.org 0b + SZ_2K		// Safety measure
280*e8b22d0fSMarc Zyngier.endm
281*e8b22d0fSMarc Zyngier
282*e8b22d0fSMarc Zyngier	.align	11
283*e8b22d0fSMarc ZyngierENTRY(__bp_harden_hyp_vecs_start)
284*e8b22d0fSMarc Zyngier	.rept BP_HARDEN_EL2_SLOTS
285*e8b22d0fSMarc Zyngier	generate_vectors
286*e8b22d0fSMarc Zyngier	.endr
287*e8b22d0fSMarc ZyngierENTRY(__bp_harden_hyp_vecs_end)
288*e8b22d0fSMarc Zyngier
289*e8b22d0fSMarc Zyngier	.popsection
290*e8b22d0fSMarc Zyngier
291*e8b22d0fSMarc ZyngierENTRY(__smccc_workaround_1_smc_start)
292*e8b22d0fSMarc Zyngier	sub	sp, sp, #(8 * 4)
293*e8b22d0fSMarc Zyngier	stp	x2, x3, [sp, #(8 * 0)]
294*e8b22d0fSMarc Zyngier	stp	x0, x1, [sp, #(8 * 2)]
295*e8b22d0fSMarc Zyngier	mov	w0, #ARM_SMCCC_ARCH_WORKAROUND_1
296*e8b22d0fSMarc Zyngier	smc	#0
297*e8b22d0fSMarc Zyngier	ldp	x2, x3, [sp, #(8 * 0)]
298*e8b22d0fSMarc Zyngier	ldp	x0, x1, [sp, #(8 * 2)]
299*e8b22d0fSMarc Zyngier	add	sp, sp, #(8 * 4)
300*e8b22d0fSMarc ZyngierENTRY(__smccc_workaround_1_smc_end)
301*e8b22d0fSMarc Zyngier#endif
302