xref: /openbmc/linux/arch/arm64/kvm/hyp/hyp-entry.S (revision f72af90c3783d924337624659b43e2d36f1b36b4)
12b28162cSMarc Zyngier/*
22b28162cSMarc Zyngier * Copyright (C) 2015 - ARM Ltd
32b28162cSMarc Zyngier * Author: Marc Zyngier <marc.zyngier@arm.com>
42b28162cSMarc Zyngier *
52b28162cSMarc Zyngier * This program is free software; you can redistribute it and/or modify
62b28162cSMarc Zyngier * it under the terms of the GNU General Public License version 2 as
72b28162cSMarc Zyngier * published by the Free Software Foundation.
82b28162cSMarc Zyngier *
92b28162cSMarc Zyngier * This program is distributed in the hope that it will be useful,
102b28162cSMarc Zyngier * but WITHOUT ANY WARRANTY; without even the implied warranty of
112b28162cSMarc Zyngier * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
122b28162cSMarc Zyngier * GNU General Public License for more details.
132b28162cSMarc Zyngier *
142b28162cSMarc Zyngier * You should have received a copy of the GNU General Public License
152b28162cSMarc Zyngier * along with this program.  If not, see <http://www.gnu.org/licenses/>.
162b28162cSMarc Zyngier */
172b28162cSMarc Zyngier
18*f72af90cSMarc Zyngier#include <linux/arm-smccc.h>
192b28162cSMarc Zyngier#include <linux/linkage.h>
202b28162cSMarc Zyngier
212b28162cSMarc Zyngier#include <asm/alternative.h>
222b28162cSMarc Zyngier#include <asm/assembler.h>
232b28162cSMarc Zyngier#include <asm/cpufeature.h>
242b28162cSMarc Zyngier#include <asm/kvm_arm.h>
252b28162cSMarc Zyngier#include <asm/kvm_asm.h>
262b28162cSMarc Zyngier#include <asm/kvm_mmu.h>
272b28162cSMarc Zyngier
282b28162cSMarc Zyngier	.text
292b28162cSMarc Zyngier	.pushsection	.hyp.text, "ax"
302b28162cSMarc Zyngier
31b81125c7SMarc Zyngier.macro do_el2_call
32b81125c7SMarc Zyngier	/*
33b81125c7SMarc Zyngier	 * Shuffle the parameters before calling the function
34b81125c7SMarc Zyngier	 * pointed to in x0. Assumes parameters in x[1,2,3].
35b81125c7SMarc Zyngier	 */
366c9ae25dSMarc Zyngier	str	lr, [sp, #-16]!
37b81125c7SMarc Zyngier	mov	lr, x0
38b81125c7SMarc Zyngier	mov	x0, x1
39b81125c7SMarc Zyngier	mov	x1, x2
40b81125c7SMarc Zyngier	mov	x2, x3
41b81125c7SMarc Zyngier	blr	lr
426c9ae25dSMarc Zyngier	ldr	lr, [sp], #16
43b81125c7SMarc Zyngier.endm
44b81125c7SMarc Zyngier
45b81125c7SMarc ZyngierENTRY(__vhe_hyp_call)
46b81125c7SMarc Zyngier	do_el2_call
47b81125c7SMarc Zyngier	/*
48b81125c7SMarc Zyngier	 * We used to rely on having an exception return to get
49b81125c7SMarc Zyngier	 * an implicit isb. In the E2H case, we don't have it anymore.
50b81125c7SMarc Zyngier	 * rather than changing all the leaf functions, just do it here
51b81125c7SMarc Zyngier	 * before returning to the rest of the kernel.
52b81125c7SMarc Zyngier	 */
53b81125c7SMarc Zyngier	isb
54b81125c7SMarc Zyngier	ret
55b81125c7SMarc ZyngierENDPROC(__vhe_hyp_call)
563421e9d8SMarc Zyngier
572b28162cSMarc Zyngierel1_sync:				// Guest trapped into EL2
5868381b2bSShanker Donthineni	stp	x0, x1, [sp, #-16]!
592b28162cSMarc Zyngier
605f05a72aSMarc Zyngieralternative_if_not ARM64_HAS_VIRT_HOST_EXTN
612b28162cSMarc Zyngier	mrs	x1, esr_el2
625f05a72aSMarc Zyngieralternative_else
635f05a72aSMarc Zyngier	mrs	x1, esr_el1
645f05a72aSMarc Zyngieralternative_endif
6568381b2bSShanker Donthineni	lsr	x0, x1, #ESR_ELx_EC_SHIFT
662b28162cSMarc Zyngier
6768381b2bSShanker Donthineni	cmp	x0, #ESR_ELx_EC_HVC64
68*f72af90cSMarc Zyngier	ccmp	x0, #ESR_ELx_EC_HVC32, #4, ne
692b28162cSMarc Zyngier	b.ne	el1_trap
702b28162cSMarc Zyngier
71*f72af90cSMarc Zyngier	mrs	x1, vttbr_el2		// If vttbr is valid, the guest
72*f72af90cSMarc Zyngier	cbnz	x1, el1_hvc_guest	// called HVC
732b28162cSMarc Zyngier
742b28162cSMarc Zyngier	/* Here, we're pretty sure the host called HVC. */
7568381b2bSShanker Donthineni	ldp	x0, x1, [sp], #16
762b28162cSMarc Zyngier
775fbe9a59SMarc Zyngier	/* Check for a stub HVC call */
785fbe9a59SMarc Zyngier	cmp	x0, #HVC_STUB_HCALL_NR
795fbe9a59SMarc Zyngier	b.hs	1f
805fbe9a59SMarc Zyngier
815fbe9a59SMarc Zyngier	/*
825fbe9a59SMarc Zyngier	 * Compute the idmap address of __kvm_handle_stub_hvc and
835fbe9a59SMarc Zyngier	 * jump there. Since we use kimage_voffset, do not use the
845fbe9a59SMarc Zyngier	 * HYP VA for __kvm_handle_stub_hvc, but the kernel VA instead
855fbe9a59SMarc Zyngier	 * (by loading it from the constant pool).
865fbe9a59SMarc Zyngier	 *
875fbe9a59SMarc Zyngier	 * Preserve x0-x4, which may contain stub parameters.
885fbe9a59SMarc Zyngier	 */
895fbe9a59SMarc Zyngier	ldr	x5, =__kvm_handle_stub_hvc
905fbe9a59SMarc Zyngier	ldr_l	x6, kimage_voffset
915fbe9a59SMarc Zyngier
925fbe9a59SMarc Zyngier	/* x5 = __pa(x5) */
935fbe9a59SMarc Zyngier	sub	x5, x5, x6
945fbe9a59SMarc Zyngier	br	x5
952b28162cSMarc Zyngier
96b81125c7SMarc Zyngier1:
972b28162cSMarc Zyngier	/*
98b81125c7SMarc Zyngier	 * Perform the EL2 call
992b28162cSMarc Zyngier	 */
1002b28162cSMarc Zyngier	kern_hyp_va	x0
101b81125c7SMarc Zyngier	do_el2_call
1022b28162cSMarc Zyngier
1035fbe9a59SMarc Zyngier	eret
1042b28162cSMarc Zyngier
105*f72af90cSMarc Zyngierel1_hvc_guest:
106*f72af90cSMarc Zyngier	/*
107*f72af90cSMarc Zyngier	 * Fastest possible path for ARM_SMCCC_ARCH_WORKAROUND_1.
108*f72af90cSMarc Zyngier	 * The workaround has already been applied on the host,
109*f72af90cSMarc Zyngier	 * so let's quickly get back to the guest. We don't bother
110*f72af90cSMarc Zyngier	 * restoring x1, as it can be clobbered anyway.
111*f72af90cSMarc Zyngier	 */
112*f72af90cSMarc Zyngier	ldr	x1, [sp]				// Guest's x0
113*f72af90cSMarc Zyngier	eor	w1, w1, #ARM_SMCCC_ARCH_WORKAROUND_1
114*f72af90cSMarc Zyngier	cbnz	w1, el1_trap
115*f72af90cSMarc Zyngier	mov	x0, x1
116*f72af90cSMarc Zyngier	add	sp, sp, #16
117*f72af90cSMarc Zyngier	eret
118*f72af90cSMarc Zyngier
1192b28162cSMarc Zyngierel1_trap:
1202b28162cSMarc Zyngier	/*
12168381b2bSShanker Donthineni	 * x0: ESR_EC
1222b28162cSMarc Zyngier	 */
12332b03d10SJames Morse	ldr	x1, [sp, #16 + 8]	// vcpu stored by __guest_enter
1242b28162cSMarc Zyngier
12582e0191aSSuzuki K Poulose	/*
12682e0191aSSuzuki K Poulose	 * We trap the first access to the FP/SIMD to save the host context
12782e0191aSSuzuki K Poulose	 * and restore the guest context lazily.
12882e0191aSSuzuki K Poulose	 * If FP/SIMD is not implemented, handle the trap and inject an
12982e0191aSSuzuki K Poulose	 * undefined instruction exception to the guest.
13082e0191aSSuzuki K Poulose	 */
13182e0191aSSuzuki K Poulosealternative_if_not ARM64_HAS_NO_FPSIMD
13268381b2bSShanker Donthineni	cmp	x0, #ESR_ELx_EC_FP_ASIMD
1332b28162cSMarc Zyngier	b.eq	__fpsimd_guest_restore
13482e0191aSSuzuki K Poulosealternative_else_nop_endif
1352b28162cSMarc Zyngier
13668381b2bSShanker Donthineni	mov	x0, #ARM_EXCEPTION_TRAP
1372b28162cSMarc Zyngier	b	__guest_exit
1382b28162cSMarc Zyngier
1392b28162cSMarc Zyngierel1_irq:
14068381b2bSShanker Donthineni	stp     x0, x1, [sp, #-16]!
14132b03d10SJames Morse	ldr	x1, [sp, #16 + 8]
14268381b2bSShanker Donthineni	mov	x0, #ARM_EXCEPTION_IRQ
1432b28162cSMarc Zyngier	b	__guest_exit
1442b28162cSMarc Zyngier
1451b51e5faSMarc Zyngierel1_error:
1461b51e5faSMarc Zyngier	stp     x0, x1, [sp, #-16]!
14732b03d10SJames Morse	ldr	x1, [sp, #16 + 8]
1481b51e5faSMarc Zyngier	mov	x0, #ARM_EXCEPTION_EL1_SERROR
1491b51e5faSMarc Zyngier	b	__guest_exit
1501b51e5faSMarc Zyngier
151395ea79eSMarc Zyngierel2_error:
152395ea79eSMarc Zyngier	/*
153395ea79eSMarc Zyngier	 * Only two possibilities:
154395ea79eSMarc Zyngier	 * 1) Either we come from the exit path, having just unmasked
155395ea79eSMarc Zyngier	 *    PSTATE.A: change the return code to an EL2 fault, and
156395ea79eSMarc Zyngier	 *    carry on, as we're already in a sane state to handle it.
157395ea79eSMarc Zyngier	 * 2) Or we come from anywhere else, and that's a bug: we panic.
158395ea79eSMarc Zyngier	 *
159395ea79eSMarc Zyngier	 * For (1), x0 contains the original return code and x1 doesn't
160395ea79eSMarc Zyngier	 * contain anything meaningful at that stage. We can reuse them
161395ea79eSMarc Zyngier	 * as temp registers.
162395ea79eSMarc Zyngier	 * For (2), who cares?
163395ea79eSMarc Zyngier	 */
164395ea79eSMarc Zyngier	mrs	x0, elr_el2
165395ea79eSMarc Zyngier	adr	x1, abort_guest_exit_start
166395ea79eSMarc Zyngier	cmp	x0, x1
167395ea79eSMarc Zyngier	adr	x1, abort_guest_exit_end
168395ea79eSMarc Zyngier	ccmp	x0, x1, #4, ne
169395ea79eSMarc Zyngier	b.ne	__hyp_panic
170395ea79eSMarc Zyngier	mov	x0, #(1 << ARM_EXIT_WITH_SERROR_BIT)
171395ea79eSMarc Zyngier	eret
172395ea79eSMarc Zyngier
17353fd5b64SMarc ZyngierENTRY(__hyp_do_panic)
17453fd5b64SMarc Zyngier	mov	lr, #(PSR_F_BIT | PSR_I_BIT | PSR_A_BIT | PSR_D_BIT |\
17553fd5b64SMarc Zyngier		      PSR_MODE_EL1h)
17653fd5b64SMarc Zyngier	msr	spsr_el2, lr
17753fd5b64SMarc Zyngier	ldr	lr, =panic
17853fd5b64SMarc Zyngier	msr	elr_el2, lr
17953fd5b64SMarc Zyngier	eret
18053fd5b64SMarc ZyngierENDPROC(__hyp_do_panic)
18153fd5b64SMarc Zyngier
182c97e166eSJames MorseENTRY(__hyp_panic)
183c97e166eSJames Morse	/*
184c97e166eSJames Morse	 * '=kvm_host_cpu_state' is a host VA from the constant pool, it may
185c97e166eSJames Morse	 * not be accessible by this address from EL2, hyp_panic() converts
186c97e166eSJames Morse	 * it with kern_hyp_va() before use.
187c97e166eSJames Morse	 */
188c97e166eSJames Morse	ldr	x0, =kvm_host_cpu_state
189c97e166eSJames Morse	mrs	x1, tpidr_el2
190c97e166eSJames Morse	add	x0, x0, x1
191c97e166eSJames Morse	b	hyp_panic
192c97e166eSJames MorseENDPROC(__hyp_panic)
193c97e166eSJames Morse
19453fd5b64SMarc Zyngier.macro invalid_vector	label, target = __hyp_panic
1952b28162cSMarc Zyngier	.align	2
1962b28162cSMarc Zyngier\label:
1972b28162cSMarc Zyngier	b \target
1982b28162cSMarc ZyngierENDPROC(\label)
1992b28162cSMarc Zyngier.endm
2002b28162cSMarc Zyngier
2012b28162cSMarc Zyngier	/* None of these should ever happen */
2022b28162cSMarc Zyngier	invalid_vector	el2t_sync_invalid
2032b28162cSMarc Zyngier	invalid_vector	el2t_irq_invalid
2042b28162cSMarc Zyngier	invalid_vector	el2t_fiq_invalid
2052b28162cSMarc Zyngier	invalid_vector	el2t_error_invalid
2062b28162cSMarc Zyngier	invalid_vector	el2h_sync_invalid
2072b28162cSMarc Zyngier	invalid_vector	el2h_irq_invalid
2082b28162cSMarc Zyngier	invalid_vector	el2h_fiq_invalid
2092b28162cSMarc Zyngier	invalid_vector	el1_sync_invalid
2102b28162cSMarc Zyngier	invalid_vector	el1_irq_invalid
2112b28162cSMarc Zyngier	invalid_vector	el1_fiq_invalid
2122b28162cSMarc Zyngier
2132b28162cSMarc Zyngier	.ltorg
2142b28162cSMarc Zyngier
2152b28162cSMarc Zyngier	.align 11
2162b28162cSMarc Zyngier
217044ac37dSMarc ZyngierENTRY(__kvm_hyp_vector)
2182b28162cSMarc Zyngier	ventry	el2t_sync_invalid		// Synchronous EL2t
2192b28162cSMarc Zyngier	ventry	el2t_irq_invalid		// IRQ EL2t
2202b28162cSMarc Zyngier	ventry	el2t_fiq_invalid		// FIQ EL2t
2212b28162cSMarc Zyngier	ventry	el2t_error_invalid		// Error EL2t
2222b28162cSMarc Zyngier
2232b28162cSMarc Zyngier	ventry	el2h_sync_invalid		// Synchronous EL2h
2242b28162cSMarc Zyngier	ventry	el2h_irq_invalid		// IRQ EL2h
2252b28162cSMarc Zyngier	ventry	el2h_fiq_invalid		// FIQ EL2h
226395ea79eSMarc Zyngier	ventry	el2_error			// Error EL2h
2272b28162cSMarc Zyngier
2282b28162cSMarc Zyngier	ventry	el1_sync			// Synchronous 64-bit EL1
2292b28162cSMarc Zyngier	ventry	el1_irq				// IRQ 64-bit EL1
2302b28162cSMarc Zyngier	ventry	el1_fiq_invalid			// FIQ 64-bit EL1
2311b51e5faSMarc Zyngier	ventry	el1_error			// Error 64-bit EL1
2322b28162cSMarc Zyngier
2332b28162cSMarc Zyngier	ventry	el1_sync			// Synchronous 32-bit EL1
2342b28162cSMarc Zyngier	ventry	el1_irq				// IRQ 32-bit EL1
2352b28162cSMarc Zyngier	ventry	el1_fiq_invalid			// FIQ 32-bit EL1
2361b51e5faSMarc Zyngier	ventry	el1_error			// Error 32-bit EL1
237044ac37dSMarc ZyngierENDPROC(__kvm_hyp_vector)
238