xref: /openbmc/linux/arch/arm64/kvm/hyp/hyp-entry.S (revision 0e5b9c085dcef61163f3f277964c1a1623043f67)
1caab277bSThomas Gleixner/* SPDX-License-Identifier: GPL-2.0-only */
22b28162cSMarc Zyngier/*
3e8b22d0fSMarc Zyngier * Copyright (C) 2015-2018 - ARM Ltd
42b28162cSMarc Zyngier * Author: Marc Zyngier <marc.zyngier@arm.com>
52b28162cSMarc Zyngier */
62b28162cSMarc Zyngier
7f72af90cSMarc Zyngier#include <linux/arm-smccc.h>
82b28162cSMarc Zyngier#include <linux/linkage.h>
92b28162cSMarc Zyngier
102b28162cSMarc Zyngier#include <asm/alternative.h>
112b28162cSMarc Zyngier#include <asm/assembler.h>
122b28162cSMarc Zyngier#include <asm/cpufeature.h>
132b28162cSMarc Zyngier#include <asm/kvm_arm.h>
142b28162cSMarc Zyngier#include <asm/kvm_asm.h>
152b28162cSMarc Zyngier#include <asm/kvm_mmu.h>
16e8b22d0fSMarc Zyngier#include <asm/mmu.h>
172b28162cSMarc Zyngier
182b28162cSMarc Zyngier	.text
192b28162cSMarc Zyngier	.pushsection	.hyp.text, "ax"
202b28162cSMarc Zyngier
21b81125c7SMarc Zyngier.macro do_el2_call
22b81125c7SMarc Zyngier	/*
23b81125c7SMarc Zyngier	 * Shuffle the parameters before calling the function
24b81125c7SMarc Zyngier	 * pointed to in x0. Assumes parameters in x[1,2,3].
25b81125c7SMarc Zyngier	 */
266c9ae25dSMarc Zyngier	str	lr, [sp, #-16]!
27b81125c7SMarc Zyngier	mov	lr, x0
28b81125c7SMarc Zyngier	mov	x0, x1
29b81125c7SMarc Zyngier	mov	x1, x2
30b81125c7SMarc Zyngier	mov	x2, x3
31b81125c7SMarc Zyngier	blr	lr
326c9ae25dSMarc Zyngier	ldr	lr, [sp], #16
33b81125c7SMarc Zyngier.endm
34b81125c7SMarc Zyngier
352b28162cSMarc Zyngierel1_sync:				// Guest trapped into EL2
362b28162cSMarc Zyngier
374464e210SChristoffer Dall	mrs	x0, esr_el2
384464e210SChristoffer Dall	lsr	x0, x0, #ESR_ELx_EC_SHIFT
3968381b2bSShanker Donthineni	cmp	x0, #ESR_ELx_EC_HVC64
40f72af90cSMarc Zyngier	ccmp	x0, #ESR_ELx_EC_HVC32, #4, ne
412b28162cSMarc Zyngier	b.ne	el1_trap
422b28162cSMarc Zyngier
43f72af90cSMarc Zyngier	mrs	x1, vttbr_el2		// If vttbr is valid, the guest
44f72af90cSMarc Zyngier	cbnz	x1, el1_hvc_guest	// called HVC
452b28162cSMarc Zyngier
462b28162cSMarc Zyngier	/* Here, we're pretty sure the host called HVC. */
4768381b2bSShanker Donthineni	ldp	x0, x1, [sp], #16
482b28162cSMarc Zyngier
495fbe9a59SMarc Zyngier	/* Check for a stub HVC call */
505fbe9a59SMarc Zyngier	cmp	x0, #HVC_STUB_HCALL_NR
515fbe9a59SMarc Zyngier	b.hs	1f
525fbe9a59SMarc Zyngier
535fbe9a59SMarc Zyngier	/*
545fbe9a59SMarc Zyngier	 * Compute the idmap address of __kvm_handle_stub_hvc and
555fbe9a59SMarc Zyngier	 * jump there. Since we use kimage_voffset, do not use the
565fbe9a59SMarc Zyngier	 * HYP VA for __kvm_handle_stub_hvc, but the kernel VA instead
575fbe9a59SMarc Zyngier	 * (by loading it from the constant pool).
585fbe9a59SMarc Zyngier	 *
595fbe9a59SMarc Zyngier	 * Preserve x0-x4, which may contain stub parameters.
605fbe9a59SMarc Zyngier	 */
615fbe9a59SMarc Zyngier	ldr	x5, =__kvm_handle_stub_hvc
625fbe9a59SMarc Zyngier	ldr_l	x6, kimage_voffset
635fbe9a59SMarc Zyngier
645fbe9a59SMarc Zyngier	/* x5 = __pa(x5) */
655fbe9a59SMarc Zyngier	sub	x5, x5, x6
665fbe9a59SMarc Zyngier	br	x5
672b28162cSMarc Zyngier
68b81125c7SMarc Zyngier1:
692b28162cSMarc Zyngier	/*
70b81125c7SMarc Zyngier	 * Perform the EL2 call
712b28162cSMarc Zyngier	 */
722b28162cSMarc Zyngier	kern_hyp_va	x0
73b81125c7SMarc Zyngier	do_el2_call
742b28162cSMarc Zyngier
755fbe9a59SMarc Zyngier	eret
76679db708SWill Deacon	sb
772b28162cSMarc Zyngier
78f72af90cSMarc Zyngierel1_hvc_guest:
79f72af90cSMarc Zyngier	/*
80f72af90cSMarc Zyngier	 * Fastest possible path for ARM_SMCCC_ARCH_WORKAROUND_1.
81f72af90cSMarc Zyngier	 * The workaround has already been applied on the host,
82f72af90cSMarc Zyngier	 * so let's quickly get back to the guest. We don't bother
83f72af90cSMarc Zyngier	 * restoring x1, as it can be clobbered anyway.
84f72af90cSMarc Zyngier	 */
85f72af90cSMarc Zyngier	ldr	x1, [sp]				// Guest's x0
86f72af90cSMarc Zyngier	eor	w1, w1, #ARM_SMCCC_ARCH_WORKAROUND_1
87b4f18c06SMarc Zyngier	cbz	w1, wa_epilogue
88b4f18c06SMarc Zyngier
89b4f18c06SMarc Zyngier	/* ARM_SMCCC_ARCH_WORKAROUND_2 handling */
90b4f18c06SMarc Zyngier	eor	w1, w1, #(ARM_SMCCC_ARCH_WORKAROUND_1 ^ \
91b4f18c06SMarc Zyngier			  ARM_SMCCC_ARCH_WORKAROUND_2)
92f72af90cSMarc Zyngier	cbnz	w1, el1_trap
93b4f18c06SMarc Zyngier
94b4f18c06SMarc Zyngier#ifdef CONFIG_ARM64_SSBD
95b4f18c06SMarc Zyngieralternative_cb	arm64_enable_wa2_handling
96b4f18c06SMarc Zyngier	b	wa2_end
97b4f18c06SMarc Zyngieralternative_cb_end
98b4f18c06SMarc Zyngier	get_vcpu_ptr	x2, x0
99b4f18c06SMarc Zyngier	ldr	x0, [x2, #VCPU_WORKAROUND_FLAGS]
100b4f18c06SMarc Zyngier
101b4f18c06SMarc Zyngier	// Sanitize the argument and update the guest flags
102b4f18c06SMarc Zyngier	ldr	x1, [sp, #8]			// Guest's x1
103b4f18c06SMarc Zyngier	clz	w1, w1				// Murphy's device:
104b4f18c06SMarc Zyngier	lsr	w1, w1, #5			// w1 = !!w1 without using
105b4f18c06SMarc Zyngier	eor	w1, w1, #1			// the flags...
106b4f18c06SMarc Zyngier	bfi	x0, x1, #VCPU_WORKAROUND_2_FLAG_SHIFT, #1
107b4f18c06SMarc Zyngier	str	x0, [x2, #VCPU_WORKAROUND_FLAGS]
108b4f18c06SMarc Zyngier
109b4f18c06SMarc Zyngier	/* Check that we actually need to perform the call */
110b4f18c06SMarc Zyngier	hyp_ldr_this_cpu x0, arm64_ssbd_callback_required, x2
111b4f18c06SMarc Zyngier	cbz	x0, wa2_end
112b4f18c06SMarc Zyngier
113b4f18c06SMarc Zyngier	mov	w0, #ARM_SMCCC_ARCH_WORKAROUND_2
114b4f18c06SMarc Zyngier	smc	#0
115b4f18c06SMarc Zyngier
116b4f18c06SMarc Zyngier	/* Don't leak data from the SMC call */
117b4f18c06SMarc Zyngier	mov	x3, xzr
118b4f18c06SMarc Zyngierwa2_end:
119b4f18c06SMarc Zyngier	mov	x2, xzr
120b4f18c06SMarc Zyngier	mov	x1, xzr
121b4f18c06SMarc Zyngier#endif
122b4f18c06SMarc Zyngier
123b4f18c06SMarc Zyngierwa_epilogue:
124b4f18c06SMarc Zyngier	mov	x0, xzr
125f72af90cSMarc Zyngier	add	sp, sp, #16
126f72af90cSMarc Zyngier	eret
127679db708SWill Deacon	sb
128f72af90cSMarc Zyngier
1292b28162cSMarc Zyngierel1_trap:
1304464e210SChristoffer Dall	get_vcpu_ptr	x1, x0
13168381b2bSShanker Donthineni	mov	x0, #ARM_EXCEPTION_TRAP
1322b28162cSMarc Zyngier	b	__guest_exit
1332b28162cSMarc Zyngier
1342b28162cSMarc Zyngierel1_irq:
1354464e210SChristoffer Dall	get_vcpu_ptr	x1, x0
13668381b2bSShanker Donthineni	mov	x0, #ARM_EXCEPTION_IRQ
1372b28162cSMarc Zyngier	b	__guest_exit
1382b28162cSMarc Zyngier
1391b51e5faSMarc Zyngierel1_error:
1404464e210SChristoffer Dall	get_vcpu_ptr	x1, x0
1411b51e5faSMarc Zyngier	mov	x0, #ARM_EXCEPTION_EL1_SERROR
1421b51e5faSMarc Zyngier	b	__guest_exit
1431b51e5faSMarc Zyngier
144e4e11cc0SChristoffer Dallel2_sync:
145e4e11cc0SChristoffer Dall	/* Check for illegal exception return, otherwise panic */
146e4e11cc0SChristoffer Dall	mrs	x0, spsr_el2
147e4e11cc0SChristoffer Dall
148e4e11cc0SChristoffer Dall	/* if this was something else, then panic! */
149e4e11cc0SChristoffer Dall	tst	x0, #PSR_IL_BIT
150e4e11cc0SChristoffer Dall	b.eq	__hyp_panic
151e4e11cc0SChristoffer Dall
152e4e11cc0SChristoffer Dall	/* Let's attempt a recovery from the illegal exception return */
153e4e11cc0SChristoffer Dall	get_vcpu_ptr	x1, x0
154e4e11cc0SChristoffer Dall	mov	x0, #ARM_EXCEPTION_IL
155e4e11cc0SChristoffer Dall	b	__guest_exit
156e4e11cc0SChristoffer Dall
157e4e11cc0SChristoffer Dall
158395ea79eSMarc Zyngierel2_error:
1597e80f637SMarc Zyngier	ldp	x0, x1, [sp], #16
1607e80f637SMarc Zyngier
161395ea79eSMarc Zyngier	/*
162395ea79eSMarc Zyngier	 * Only two possibilities:
163395ea79eSMarc Zyngier	 * 1) Either we come from the exit path, having just unmasked
164395ea79eSMarc Zyngier	 *    PSTATE.A: change the return code to an EL2 fault, and
165395ea79eSMarc Zyngier	 *    carry on, as we're already in a sane state to handle it.
166395ea79eSMarc Zyngier	 * 2) Or we come from anywhere else, and that's a bug: we panic.
167395ea79eSMarc Zyngier	 *
168395ea79eSMarc Zyngier	 * For (1), x0 contains the original return code and x1 doesn't
169395ea79eSMarc Zyngier	 * contain anything meaningful at that stage. We can reuse them
170395ea79eSMarc Zyngier	 * as temp registers.
171395ea79eSMarc Zyngier	 * For (2), who cares?
172395ea79eSMarc Zyngier	 */
173395ea79eSMarc Zyngier	mrs	x0, elr_el2
174395ea79eSMarc Zyngier	adr	x1, abort_guest_exit_start
175395ea79eSMarc Zyngier	cmp	x0, x1
176395ea79eSMarc Zyngier	adr	x1, abort_guest_exit_end
177395ea79eSMarc Zyngier	ccmp	x0, x1, #4, ne
178395ea79eSMarc Zyngier	b.ne	__hyp_panic
179395ea79eSMarc Zyngier	mov	x0, #(1 << ARM_EXIT_WITH_SERROR_BIT)
180395ea79eSMarc Zyngier	eret
181679db708SWill Deacon	sb
182395ea79eSMarc Zyngier
18353fd5b64SMarc ZyngierENTRY(__hyp_do_panic)
18453fd5b64SMarc Zyngier	mov	lr, #(PSR_F_BIT | PSR_I_BIT | PSR_A_BIT | PSR_D_BIT |\
18553fd5b64SMarc Zyngier		      PSR_MODE_EL1h)
18653fd5b64SMarc Zyngier	msr	spsr_el2, lr
18753fd5b64SMarc Zyngier	ldr	lr, =panic
18853fd5b64SMarc Zyngier	msr	elr_el2, lr
18953fd5b64SMarc Zyngier	eret
190679db708SWill Deacon	sb
19153fd5b64SMarc ZyngierENDPROC(__hyp_do_panic)
19253fd5b64SMarc Zyngier
193c97e166eSJames MorseENTRY(__hyp_panic)
1944464e210SChristoffer Dall	get_host_ctxt x0, x1
195c97e166eSJames Morse	b	hyp_panic
196c97e166eSJames MorseENDPROC(__hyp_panic)
197c97e166eSJames Morse
19853fd5b64SMarc Zyngier.macro invalid_vector	label, target = __hyp_panic
1992b28162cSMarc Zyngier	.align	2
2002b28162cSMarc Zyngier\label:
2012b28162cSMarc Zyngier	b \target
2022b28162cSMarc ZyngierENDPROC(\label)
2032b28162cSMarc Zyngier.endm
2042b28162cSMarc Zyngier
2052b28162cSMarc Zyngier	/* None of these should ever happen */
2062b28162cSMarc Zyngier	invalid_vector	el2t_sync_invalid
2072b28162cSMarc Zyngier	invalid_vector	el2t_irq_invalid
2082b28162cSMarc Zyngier	invalid_vector	el2t_fiq_invalid
2092b28162cSMarc Zyngier	invalid_vector	el2t_error_invalid
2102b28162cSMarc Zyngier	invalid_vector	el2h_sync_invalid
2112b28162cSMarc Zyngier	invalid_vector	el2h_irq_invalid
2122b28162cSMarc Zyngier	invalid_vector	el2h_fiq_invalid
2132b28162cSMarc Zyngier	invalid_vector	el1_fiq_invalid
2142b28162cSMarc Zyngier
2152b28162cSMarc Zyngier	.ltorg
2162b28162cSMarc Zyngier
2172b28162cSMarc Zyngier	.align 11
2182b28162cSMarc Zyngier
2193dbf100bSJames Morse.macro check_preamble_length start, end
2203dbf100bSJames Morse/* kvm_patch_vector_branch() generates code that jumps over the preamble. */
2213dbf100bSJames Morse.if ((\end-\start) != KVM_VECTOR_PREAMBLE)
2223dbf100bSJames Morse	.error "KVM vector preamble length mismatch"
2233dbf100bSJames Morse.endif
2243dbf100bSJames Morse.endm
2253dbf100bSJames Morse
2267e80f637SMarc Zyngier.macro valid_vect target
2277e80f637SMarc Zyngier	.align 7
2283dbf100bSJames Morse661:
229*0e5b9c08SJames Morse	esb
2307e80f637SMarc Zyngier	stp	x0, x1, [sp, #-16]!
2313dbf100bSJames Morse662:
2327e80f637SMarc Zyngier	b	\target
2333dbf100bSJames Morse
2343dbf100bSJames Morsecheck_preamble_length 661b, 662b
2357e80f637SMarc Zyngier.endm
2367e80f637SMarc Zyngier
2377e80f637SMarc Zyngier.macro invalid_vect target
2387e80f637SMarc Zyngier	.align 7
2393dbf100bSJames Morse661:
2407e80f637SMarc Zyngier	b	\target
241*0e5b9c08SJames Morse	nop
2423dbf100bSJames Morse662:
24371dcb8beSMarc Zyngier	ldp	x0, x1, [sp], #16
24471dcb8beSMarc Zyngier	b	\target
2453dbf100bSJames Morse
2463dbf100bSJames Morsecheck_preamble_length 661b, 662b
2477e80f637SMarc Zyngier.endm
2487e80f637SMarc Zyngier
249044ac37dSMarc ZyngierENTRY(__kvm_hyp_vector)
2507e80f637SMarc Zyngier	invalid_vect	el2t_sync_invalid	// Synchronous EL2t
2517e80f637SMarc Zyngier	invalid_vect	el2t_irq_invalid	// IRQ EL2t
2527e80f637SMarc Zyngier	invalid_vect	el2t_fiq_invalid	// FIQ EL2t
2537e80f637SMarc Zyngier	invalid_vect	el2t_error_invalid	// Error EL2t
2542b28162cSMarc Zyngier
255e4e11cc0SChristoffer Dall	valid_vect	el2_sync		// Synchronous EL2h
2567e80f637SMarc Zyngier	invalid_vect	el2h_irq_invalid	// IRQ EL2h
2577e80f637SMarc Zyngier	invalid_vect	el2h_fiq_invalid	// FIQ EL2h
2587e80f637SMarc Zyngier	valid_vect	el2_error		// Error EL2h
2592b28162cSMarc Zyngier
2607e80f637SMarc Zyngier	valid_vect	el1_sync		// Synchronous 64-bit EL1
2617e80f637SMarc Zyngier	valid_vect	el1_irq			// IRQ 64-bit EL1
2627e80f637SMarc Zyngier	invalid_vect	el1_fiq_invalid		// FIQ 64-bit EL1
2637e80f637SMarc Zyngier	valid_vect	el1_error		// Error 64-bit EL1
2642b28162cSMarc Zyngier
2657e80f637SMarc Zyngier	valid_vect	el1_sync		// Synchronous 32-bit EL1
2667e80f637SMarc Zyngier	valid_vect	el1_irq			// IRQ 32-bit EL1
2677e80f637SMarc Zyngier	invalid_vect	el1_fiq_invalid		// FIQ 32-bit EL1
2687e80f637SMarc Zyngier	valid_vect	el1_error		// Error 32-bit EL1
269044ac37dSMarc ZyngierENDPROC(__kvm_hyp_vector)
270e8b22d0fSMarc Zyngier
271e8b22d0fSMarc Zyngier#ifdef CONFIG_KVM_INDIRECT_VECTORS
272e8b22d0fSMarc Zyngier.macro hyp_ventry
273e8b22d0fSMarc Zyngier	.align 7
274*0e5b9c08SJames Morse1:	esb
275*0e5b9c08SJames Morse	.rept 26
276e8b22d0fSMarc Zyngier	nop
277e8b22d0fSMarc Zyngier	.endr
278e8b22d0fSMarc Zyngier/*
279e8b22d0fSMarc Zyngier * The default sequence is to directly branch to the KVM vectors,
280e8b22d0fSMarc Zyngier * using the computed offset. This applies for VHE as well as
2815d994374SJames Morse * !ARM64_HARDEN_EL2_VECTORS. The first vector must always run the preamble.
282e8b22d0fSMarc Zyngier *
283e8b22d0fSMarc Zyngier * For ARM64_HARDEN_EL2_VECTORS configurations, this gets replaced
284e8b22d0fSMarc Zyngier * with:
285e8b22d0fSMarc Zyngier *
286e8b22d0fSMarc Zyngier * stp	x0, x1, [sp, #-16]!
287e8b22d0fSMarc Zyngier * movz	x0, #(addr & 0xffff)
288e8b22d0fSMarc Zyngier * movk	x0, #((addr >> 16) & 0xffff), lsl #16
289e8b22d0fSMarc Zyngier * movk	x0, #((addr >> 32) & 0xffff), lsl #32
290e8b22d0fSMarc Zyngier * br	x0
291e8b22d0fSMarc Zyngier *
2923dbf100bSJames Morse * Where:
2933dbf100bSJames Morse * addr = kern_hyp_va(__kvm_hyp_vector) + vector-offset + KVM_VECTOR_PREAMBLE.
294e8b22d0fSMarc Zyngier * See kvm_patch_vector_branch for details.
295e8b22d0fSMarc Zyngier */
296e8b22d0fSMarc Zyngieralternative_cb	kvm_patch_vector_branch
2975d994374SJames Morse	stp	x0, x1, [sp, #-16]!
2985d994374SJames Morse	b	__kvm_hyp_vector + (1b - 0b + KVM_VECTOR_PREAMBLE)
299e8b22d0fSMarc Zyngier	nop
300e8b22d0fSMarc Zyngier	nop
301e8b22d0fSMarc Zyngier	nop
302e8b22d0fSMarc Zyngieralternative_cb_end
303e8b22d0fSMarc Zyngier.endm
304e8b22d0fSMarc Zyngier
305e8b22d0fSMarc Zyngier.macro generate_vectors
306e8b22d0fSMarc Zyngier0:
307e8b22d0fSMarc Zyngier	.rept 16
308e8b22d0fSMarc Zyngier	hyp_ventry
309e8b22d0fSMarc Zyngier	.endr
310e8b22d0fSMarc Zyngier	.org 0b + SZ_2K		// Safety measure
311e8b22d0fSMarc Zyngier.endm
312e8b22d0fSMarc Zyngier
313e8b22d0fSMarc Zyngier	.align	11
314e8b22d0fSMarc ZyngierENTRY(__bp_harden_hyp_vecs_start)
315e8b22d0fSMarc Zyngier	.rept BP_HARDEN_EL2_SLOTS
316e8b22d0fSMarc Zyngier	generate_vectors
317e8b22d0fSMarc Zyngier	.endr
318e8b22d0fSMarc ZyngierENTRY(__bp_harden_hyp_vecs_end)
319e8b22d0fSMarc Zyngier
320e8b22d0fSMarc Zyngier	.popsection
321e8b22d0fSMarc Zyngier
322e8b22d0fSMarc ZyngierENTRY(__smccc_workaround_1_smc_start)
323*0e5b9c08SJames Morse	esb
324e8b22d0fSMarc Zyngier	sub	sp, sp, #(8 * 4)
325e8b22d0fSMarc Zyngier	stp	x2, x3, [sp, #(8 * 0)]
326e8b22d0fSMarc Zyngier	stp	x0, x1, [sp, #(8 * 2)]
327e8b22d0fSMarc Zyngier	mov	w0, #ARM_SMCCC_ARCH_WORKAROUND_1
328e8b22d0fSMarc Zyngier	smc	#0
329e8b22d0fSMarc Zyngier	ldp	x2, x3, [sp, #(8 * 0)]
330e8b22d0fSMarc Zyngier	ldp	x0, x1, [sp, #(8 * 2)]
331e8b22d0fSMarc Zyngier	add	sp, sp, #(8 * 4)
332e8b22d0fSMarc ZyngierENTRY(__smccc_workaround_1_smc_end)
333e8b22d0fSMarc Zyngier#endif
334