xref: /openbmc/linux/arch/arm64/kvm/hyp/hyp-entry.S (revision 88a84ccccb3966bcc3f309cdb76092a9892c0260)
1caab277bSThomas Gleixner/* SPDX-License-Identifier: GPL-2.0-only */
22b28162cSMarc Zyngier/*
3e8b22d0fSMarc Zyngier * Copyright (C) 2015-2018 - ARM Ltd
42b28162cSMarc Zyngier * Author: Marc Zyngier <marc.zyngier@arm.com>
52b28162cSMarc Zyngier */
62b28162cSMarc Zyngier
7f72af90cSMarc Zyngier#include <linux/arm-smccc.h>
82b28162cSMarc Zyngier#include <linux/linkage.h>
92b28162cSMarc Zyngier
102b28162cSMarc Zyngier#include <asm/alternative.h>
112b28162cSMarc Zyngier#include <asm/assembler.h>
122b28162cSMarc Zyngier#include <asm/cpufeature.h>
132b28162cSMarc Zyngier#include <asm/kvm_arm.h>
142b28162cSMarc Zyngier#include <asm/kvm_asm.h>
152b28162cSMarc Zyngier#include <asm/kvm_mmu.h>
16e8b22d0fSMarc Zyngier#include <asm/mmu.h>
172b28162cSMarc Zyngier
18e9ee186bSJames Morse.macro save_caller_saved_regs_vect
19e9ee186bSJames Morse	/* x0 and x1 were saved in the vector entry */
20e9ee186bSJames Morse	stp	x2, x3,   [sp, #-16]!
21e9ee186bSJames Morse	stp	x4, x5,   [sp, #-16]!
22e9ee186bSJames Morse	stp	x6, x7,   [sp, #-16]!
23e9ee186bSJames Morse	stp	x8, x9,   [sp, #-16]!
24e9ee186bSJames Morse	stp	x10, x11, [sp, #-16]!
25e9ee186bSJames Morse	stp	x12, x13, [sp, #-16]!
26e9ee186bSJames Morse	stp	x14, x15, [sp, #-16]!
27e9ee186bSJames Morse	stp	x16, x17, [sp, #-16]!
28e9ee186bSJames Morse.endm
29e9ee186bSJames Morse
30e9ee186bSJames Morse.macro restore_caller_saved_regs_vect
31e9ee186bSJames Morse	ldp	x16, x17, [sp], #16
32e9ee186bSJames Morse	ldp	x14, x15, [sp], #16
33e9ee186bSJames Morse	ldp	x12, x13, [sp], #16
34e9ee186bSJames Morse	ldp	x10, x11, [sp], #16
35e9ee186bSJames Morse	ldp	x8, x9,   [sp], #16
36e9ee186bSJames Morse	ldp	x6, x7,   [sp], #16
37e9ee186bSJames Morse	ldp	x4, x5,   [sp], #16
38e9ee186bSJames Morse	ldp	x2, x3,   [sp], #16
39e9ee186bSJames Morse	ldp	x0, x1,   [sp], #16
40e9ee186bSJames Morse.endm
41e9ee186bSJames Morse
422b28162cSMarc Zyngier	.text
432b28162cSMarc Zyngier
44b81125c7SMarc Zyngier.macro do_el2_call
45b81125c7SMarc Zyngier	/*
46b81125c7SMarc Zyngier	 * Shuffle the parameters before calling the function
47b81125c7SMarc Zyngier	 * pointed to in x0. Assumes parameters in x[1,2,3].
48b81125c7SMarc Zyngier	 */
496c9ae25dSMarc Zyngier	str	lr, [sp, #-16]!
50b81125c7SMarc Zyngier	mov	lr, x0
51b81125c7SMarc Zyngier	mov	x0, x1
52b81125c7SMarc Zyngier	mov	x1, x2
53b81125c7SMarc Zyngier	mov	x2, x3
54b81125c7SMarc Zyngier	blr	lr
556c9ae25dSMarc Zyngier	ldr	lr, [sp], #16
56b81125c7SMarc Zyngier.endm
57b81125c7SMarc Zyngier
582b28162cSMarc Zyngierel1_sync:				// Guest trapped into EL2
592b28162cSMarc Zyngier
604464e210SChristoffer Dall	mrs	x0, esr_el2
614464e210SChristoffer Dall	lsr	x0, x0, #ESR_ELx_EC_SHIFT
6268381b2bSShanker Donthineni	cmp	x0, #ESR_ELx_EC_HVC64
63f72af90cSMarc Zyngier	ccmp	x0, #ESR_ELx_EC_HVC32, #4, ne
642b28162cSMarc Zyngier	b.ne	el1_trap
652b28162cSMarc Zyngier
66b877e984SDavid Brazdil#ifdef __KVM_NVHE_HYPERVISOR__
67f72af90cSMarc Zyngier	mrs	x1, vttbr_el2		// If vttbr is valid, the guest
68f72af90cSMarc Zyngier	cbnz	x1, el1_hvc_guest	// called HVC
692b28162cSMarc Zyngier
702b28162cSMarc Zyngier	/* Here, we're pretty sure the host called HVC. */
7168381b2bSShanker Donthineni	ldp	x0, x1, [sp], #16
722b28162cSMarc Zyngier
735fbe9a59SMarc Zyngier	/* Check for a stub HVC call */
745fbe9a59SMarc Zyngier	cmp	x0, #HVC_STUB_HCALL_NR
755fbe9a59SMarc Zyngier	b.hs	1f
765fbe9a59SMarc Zyngier
775fbe9a59SMarc Zyngier	/*
785fbe9a59SMarc Zyngier	 * Compute the idmap address of __kvm_handle_stub_hvc and
795fbe9a59SMarc Zyngier	 * jump there. Since we use kimage_voffset, do not use the
805fbe9a59SMarc Zyngier	 * HYP VA for __kvm_handle_stub_hvc, but the kernel VA instead
815fbe9a59SMarc Zyngier	 * (by loading it from the constant pool).
825fbe9a59SMarc Zyngier	 *
835fbe9a59SMarc Zyngier	 * Preserve x0-x4, which may contain stub parameters.
845fbe9a59SMarc Zyngier	 */
855fbe9a59SMarc Zyngier	ldr	x5, =__kvm_handle_stub_hvc
865fbe9a59SMarc Zyngier	ldr_l	x6, kimage_voffset
875fbe9a59SMarc Zyngier
885fbe9a59SMarc Zyngier	/* x5 = __pa(x5) */
895fbe9a59SMarc Zyngier	sub	x5, x5, x6
905fbe9a59SMarc Zyngier	br	x5
912b28162cSMarc Zyngier
92b81125c7SMarc Zyngier1:
932b28162cSMarc Zyngier	/*
94b81125c7SMarc Zyngier	 * Perform the EL2 call
952b28162cSMarc Zyngier	 */
962b28162cSMarc Zyngier	kern_hyp_va	x0
97b81125c7SMarc Zyngier	do_el2_call
982b28162cSMarc Zyngier
995fbe9a59SMarc Zyngier	eret
100679db708SWill Deacon	sb
101b877e984SDavid Brazdil#endif /* __KVM_NVHE_HYPERVISOR__ */
1022b28162cSMarc Zyngier
103f72af90cSMarc Zyngierel1_hvc_guest:
104f72af90cSMarc Zyngier	/*
105f72af90cSMarc Zyngier	 * Fastest possible path for ARM_SMCCC_ARCH_WORKAROUND_1.
106f72af90cSMarc Zyngier	 * The workaround has already been applied on the host,
107f72af90cSMarc Zyngier	 * so let's quickly get back to the guest. We don't bother
108f72af90cSMarc Zyngier	 * restoring x1, as it can be clobbered anyway.
109f72af90cSMarc Zyngier	 */
110f72af90cSMarc Zyngier	ldr	x1, [sp]				// Guest's x0
111f72af90cSMarc Zyngier	eor	w1, w1, #ARM_SMCCC_ARCH_WORKAROUND_1
112b4f18c06SMarc Zyngier	cbz	w1, wa_epilogue
113b4f18c06SMarc Zyngier
114b4f18c06SMarc Zyngier	/* ARM_SMCCC_ARCH_WORKAROUND_2 handling */
115b4f18c06SMarc Zyngier	eor	w1, w1, #(ARM_SMCCC_ARCH_WORKAROUND_1 ^ \
116b4f18c06SMarc Zyngier			  ARM_SMCCC_ARCH_WORKAROUND_2)
117f72af90cSMarc Zyngier	cbnz	w1, el1_trap
118b4f18c06SMarc Zyngier
119b4f18c06SMarc Zyngier#ifdef CONFIG_ARM64_SSBD
120b4f18c06SMarc Zyngieralternative_cb	arm64_enable_wa2_handling
121b4f18c06SMarc Zyngier	b	wa2_end
122b4f18c06SMarc Zyngieralternative_cb_end
123b4f18c06SMarc Zyngier	get_vcpu_ptr	x2, x0
124b4f18c06SMarc Zyngier	ldr	x0, [x2, #VCPU_WORKAROUND_FLAGS]
125b4f18c06SMarc Zyngier
126b4f18c06SMarc Zyngier	// Sanitize the argument and update the guest flags
127b4f18c06SMarc Zyngier	ldr	x1, [sp, #8]			// Guest's x1
128b4f18c06SMarc Zyngier	clz	w1, w1				// Murphy's device:
129b4f18c06SMarc Zyngier	lsr	w1, w1, #5			// w1 = !!w1 without using
130b4f18c06SMarc Zyngier	eor	w1, w1, #1			// the flags...
131b4f18c06SMarc Zyngier	bfi	x0, x1, #VCPU_WORKAROUND_2_FLAG_SHIFT, #1
132b4f18c06SMarc Zyngier	str	x0, [x2, #VCPU_WORKAROUND_FLAGS]
133b4f18c06SMarc Zyngier
134b4f18c06SMarc Zyngier	/* Check that we actually need to perform the call */
135b4f18c06SMarc Zyngier	hyp_ldr_this_cpu x0, arm64_ssbd_callback_required, x2
136b4f18c06SMarc Zyngier	cbz	x0, wa2_end
137b4f18c06SMarc Zyngier
138b4f18c06SMarc Zyngier	mov	w0, #ARM_SMCCC_ARCH_WORKAROUND_2
139b4f18c06SMarc Zyngier	smc	#0
140b4f18c06SMarc Zyngier
141b4f18c06SMarc Zyngier	/* Don't leak data from the SMC call */
142b4f18c06SMarc Zyngier	mov	x3, xzr
143b4f18c06SMarc Zyngierwa2_end:
144b4f18c06SMarc Zyngier	mov	x2, xzr
145b4f18c06SMarc Zyngier	mov	x1, xzr
146b4f18c06SMarc Zyngier#endif
147b4f18c06SMarc Zyngier
148b4f18c06SMarc Zyngierwa_epilogue:
149b4f18c06SMarc Zyngier	mov	x0, xzr
150f72af90cSMarc Zyngier	add	sp, sp, #16
151f72af90cSMarc Zyngier	eret
152679db708SWill Deacon	sb
153f72af90cSMarc Zyngier
1542b28162cSMarc Zyngierel1_trap:
1554464e210SChristoffer Dall	get_vcpu_ptr	x1, x0
15668381b2bSShanker Donthineni	mov	x0, #ARM_EXCEPTION_TRAP
1572b28162cSMarc Zyngier	b	__guest_exit
1582b28162cSMarc Zyngier
1592b28162cSMarc Zyngierel1_irq:
1604464e210SChristoffer Dall	get_vcpu_ptr	x1, x0
16168381b2bSShanker Donthineni	mov	x0, #ARM_EXCEPTION_IRQ
1622b28162cSMarc Zyngier	b	__guest_exit
1632b28162cSMarc Zyngier
1641b51e5faSMarc Zyngierel1_error:
1654464e210SChristoffer Dall	get_vcpu_ptr	x1, x0
1661b51e5faSMarc Zyngier	mov	x0, #ARM_EXCEPTION_EL1_SERROR
1671b51e5faSMarc Zyngier	b	__guest_exit
1681b51e5faSMarc Zyngier
169e4e11cc0SChristoffer Dallel2_sync:
170*88a84cccSJames Morse	/* Check for illegal exception return */
171e4e11cc0SChristoffer Dall	mrs	x0, spsr_el2
172*88a84cccSJames Morse	tbnz	x0, #20, 1f
173e4e11cc0SChristoffer Dall
174*88a84cccSJames Morse	save_caller_saved_regs_vect
175*88a84cccSJames Morse	stp     x29, x30, [sp, #-16]!
176*88a84cccSJames Morse	bl	kvm_unexpected_el2_exception
177*88a84cccSJames Morse	ldp     x29, x30, [sp], #16
178*88a84cccSJames Morse	restore_caller_saved_regs_vect
179e4e11cc0SChristoffer Dall
180*88a84cccSJames Morse	eret
181*88a84cccSJames Morse
182*88a84cccSJames Morse1:
183e4e11cc0SChristoffer Dall	/* Let's attempt a recovery from the illegal exception return */
184e4e11cc0SChristoffer Dall	get_vcpu_ptr	x1, x0
185e4e11cc0SChristoffer Dall	mov	x0, #ARM_EXCEPTION_IL
186e4e11cc0SChristoffer Dall	b	__guest_exit
187e4e11cc0SChristoffer Dall
188e4e11cc0SChristoffer Dall
189395ea79eSMarc Zyngierel2_error:
190e9ee186bSJames Morse	save_caller_saved_regs_vect
191e9ee186bSJames Morse	stp     x29, x30, [sp, #-16]!
1927e80f637SMarc Zyngier
193e9ee186bSJames Morse	bl	kvm_unexpected_el2_exception
194e9ee186bSJames Morse
195e9ee186bSJames Morse	ldp     x29, x30, [sp], #16
196e9ee186bSJames Morse	restore_caller_saved_regs_vect
197e9ee186bSJames Morse
198395ea79eSMarc Zyngier	eret
199679db708SWill Deacon	sb
200395ea79eSMarc Zyngier
20109cf57ebSDavid Brazdil#ifdef __KVM_NVHE_HYPERVISOR__
202617a2f39SMark BrownSYM_FUNC_START(__hyp_do_panic)
20353fd5b64SMarc Zyngier	mov	lr, #(PSR_F_BIT | PSR_I_BIT | PSR_A_BIT | PSR_D_BIT |\
20453fd5b64SMarc Zyngier		      PSR_MODE_EL1h)
20553fd5b64SMarc Zyngier	msr	spsr_el2, lr
20653fd5b64SMarc Zyngier	ldr	lr, =panic
20753fd5b64SMarc Zyngier	msr	elr_el2, lr
20853fd5b64SMarc Zyngier	eret
209679db708SWill Deacon	sb
210617a2f39SMark BrownSYM_FUNC_END(__hyp_do_panic)
21109cf57ebSDavid Brazdil#endif
21253fd5b64SMarc Zyngier
213617a2f39SMark BrownSYM_CODE_START(__hyp_panic)
2144464e210SChristoffer Dall	get_host_ctxt x0, x1
215c97e166eSJames Morse	b	hyp_panic
216617a2f39SMark BrownSYM_CODE_END(__hyp_panic)
217c97e166eSJames Morse
21853fd5b64SMarc Zyngier.macro invalid_vector	label, target = __hyp_panic
2192b28162cSMarc Zyngier	.align	2
220617a2f39SMark BrownSYM_CODE_START(\label)
2212b28162cSMarc Zyngier	b \target
222617a2f39SMark BrownSYM_CODE_END(\label)
2232b28162cSMarc Zyngier.endm
2242b28162cSMarc Zyngier
2252b28162cSMarc Zyngier	/* None of these should ever happen */
2262b28162cSMarc Zyngier	invalid_vector	el2t_sync_invalid
2272b28162cSMarc Zyngier	invalid_vector	el2t_irq_invalid
2282b28162cSMarc Zyngier	invalid_vector	el2t_fiq_invalid
2292b28162cSMarc Zyngier	invalid_vector	el2t_error_invalid
2302b28162cSMarc Zyngier	invalid_vector	el2h_sync_invalid
2312b28162cSMarc Zyngier	invalid_vector	el2h_irq_invalid
2322b28162cSMarc Zyngier	invalid_vector	el2h_fiq_invalid
2332b28162cSMarc Zyngier	invalid_vector	el1_fiq_invalid
2342b28162cSMarc Zyngier
2352b28162cSMarc Zyngier	.ltorg
2362b28162cSMarc Zyngier
2372b28162cSMarc Zyngier	.align 11
2382b28162cSMarc Zyngier
2393dbf100bSJames Morse.macro check_preamble_length start, end
2403dbf100bSJames Morse/* kvm_patch_vector_branch() generates code that jumps over the preamble. */
2413dbf100bSJames Morse.if ((\end-\start) != KVM_VECTOR_PREAMBLE)
2423dbf100bSJames Morse	.error "KVM vector preamble length mismatch"
2433dbf100bSJames Morse.endif
2443dbf100bSJames Morse.endm
2453dbf100bSJames Morse
2467e80f637SMarc Zyngier.macro valid_vect target
2477e80f637SMarc Zyngier	.align 7
2483dbf100bSJames Morse661:
2490e5b9c08SJames Morse	esb
2507e80f637SMarc Zyngier	stp	x0, x1, [sp, #-16]!
2513dbf100bSJames Morse662:
2527e80f637SMarc Zyngier	b	\target
2533dbf100bSJames Morse
2543dbf100bSJames Morsecheck_preamble_length 661b, 662b
2557e80f637SMarc Zyngier.endm
2567e80f637SMarc Zyngier
2577e80f637SMarc Zyngier.macro invalid_vect target
2587e80f637SMarc Zyngier	.align 7
2593dbf100bSJames Morse661:
2607e80f637SMarc Zyngier	b	\target
2610e5b9c08SJames Morse	nop
2623dbf100bSJames Morse662:
26371dcb8beSMarc Zyngier	ldp	x0, x1, [sp], #16
26471dcb8beSMarc Zyngier	b	\target
2653dbf100bSJames Morse
2663dbf100bSJames Morsecheck_preamble_length 661b, 662b
2677e80f637SMarc Zyngier.endm
2687e80f637SMarc Zyngier
269617a2f39SMark BrownSYM_CODE_START(__kvm_hyp_vector)
2707e80f637SMarc Zyngier	invalid_vect	el2t_sync_invalid	// Synchronous EL2t
2717e80f637SMarc Zyngier	invalid_vect	el2t_irq_invalid	// IRQ EL2t
2727e80f637SMarc Zyngier	invalid_vect	el2t_fiq_invalid	// FIQ EL2t
2737e80f637SMarc Zyngier	invalid_vect	el2t_error_invalid	// Error EL2t
2742b28162cSMarc Zyngier
275e4e11cc0SChristoffer Dall	valid_vect	el2_sync		// Synchronous EL2h
2767e80f637SMarc Zyngier	invalid_vect	el2h_irq_invalid	// IRQ EL2h
2777e80f637SMarc Zyngier	invalid_vect	el2h_fiq_invalid	// FIQ EL2h
2787e80f637SMarc Zyngier	valid_vect	el2_error		// Error EL2h
2792b28162cSMarc Zyngier
2807e80f637SMarc Zyngier	valid_vect	el1_sync		// Synchronous 64-bit EL1
2817e80f637SMarc Zyngier	valid_vect	el1_irq			// IRQ 64-bit EL1
2827e80f637SMarc Zyngier	invalid_vect	el1_fiq_invalid		// FIQ 64-bit EL1
2837e80f637SMarc Zyngier	valid_vect	el1_error		// Error 64-bit EL1
2842b28162cSMarc Zyngier
2857e80f637SMarc Zyngier	valid_vect	el1_sync		// Synchronous 32-bit EL1
2867e80f637SMarc Zyngier	valid_vect	el1_irq			// IRQ 32-bit EL1
2877e80f637SMarc Zyngier	invalid_vect	el1_fiq_invalid		// FIQ 32-bit EL1
2887e80f637SMarc Zyngier	valid_vect	el1_error		// Error 32-bit EL1
289617a2f39SMark BrownSYM_CODE_END(__kvm_hyp_vector)
290e8b22d0fSMarc Zyngier
291e8b22d0fSMarc Zyngier#ifdef CONFIG_KVM_INDIRECT_VECTORS
292e8b22d0fSMarc Zyngier.macro hyp_ventry
293e8b22d0fSMarc Zyngier	.align 7
2940e5b9c08SJames Morse1:	esb
2950e5b9c08SJames Morse	.rept 26
296e8b22d0fSMarc Zyngier	nop
297e8b22d0fSMarc Zyngier	.endr
298e8b22d0fSMarc Zyngier/*
299e8b22d0fSMarc Zyngier * The default sequence is to directly branch to the KVM vectors,
300e8b22d0fSMarc Zyngier * using the computed offset. This applies for VHE as well as
3015d994374SJames Morse * !ARM64_HARDEN_EL2_VECTORS. The first vector must always run the preamble.
302e8b22d0fSMarc Zyngier *
303e8b22d0fSMarc Zyngier * For ARM64_HARDEN_EL2_VECTORS configurations, this gets replaced
304e8b22d0fSMarc Zyngier * with:
305e8b22d0fSMarc Zyngier *
306e8b22d0fSMarc Zyngier * stp	x0, x1, [sp, #-16]!
307e8b22d0fSMarc Zyngier * movz	x0, #(addr & 0xffff)
308e8b22d0fSMarc Zyngier * movk	x0, #((addr >> 16) & 0xffff), lsl #16
309e8b22d0fSMarc Zyngier * movk	x0, #((addr >> 32) & 0xffff), lsl #32
310e8b22d0fSMarc Zyngier * br	x0
311e8b22d0fSMarc Zyngier *
3123dbf100bSJames Morse * Where:
3133dbf100bSJames Morse * addr = kern_hyp_va(__kvm_hyp_vector) + vector-offset + KVM_VECTOR_PREAMBLE.
314e8b22d0fSMarc Zyngier * See kvm_patch_vector_branch for details.
315e8b22d0fSMarc Zyngier */
316e8b22d0fSMarc Zyngieralternative_cb	kvm_patch_vector_branch
3175d994374SJames Morse	stp	x0, x1, [sp, #-16]!
3185d994374SJames Morse	b	__kvm_hyp_vector + (1b - 0b + KVM_VECTOR_PREAMBLE)
319e8b22d0fSMarc Zyngier	nop
320e8b22d0fSMarc Zyngier	nop
321e8b22d0fSMarc Zyngier	nop
322e8b22d0fSMarc Zyngieralternative_cb_end
323e8b22d0fSMarc Zyngier.endm
324e8b22d0fSMarc Zyngier
325e8b22d0fSMarc Zyngier.macro generate_vectors
326e8b22d0fSMarc Zyngier0:
327e8b22d0fSMarc Zyngier	.rept 16
328e8b22d0fSMarc Zyngier	hyp_ventry
329e8b22d0fSMarc Zyngier	.endr
330e8b22d0fSMarc Zyngier	.org 0b + SZ_2K		// Safety measure
331e8b22d0fSMarc Zyngier.endm
332e8b22d0fSMarc Zyngier
333e8b22d0fSMarc Zyngier	.align	11
3346e52aab9SMark BrownSYM_CODE_START(__bp_harden_hyp_vecs)
335e8b22d0fSMarc Zyngier	.rept BP_HARDEN_EL2_SLOTS
336e8b22d0fSMarc Zyngier	generate_vectors
337e8b22d0fSMarc Zyngier	.endr
3386e52aab9SMark Brown1:	.org __bp_harden_hyp_vecs + __BP_HARDEN_HYP_VECS_SZ
3396e52aab9SMark Brown	.org 1b
3406e52aab9SMark BrownSYM_CODE_END(__bp_harden_hyp_vecs)
341e8b22d0fSMarc Zyngier#endif
342