xref: /openbmc/linux/arch/arm64/kvm/hyp/hyp-entry.S (revision 395ea79ebe55d6b01bb8f67bfad0550e6b7cd6d6)
12b28162cSMarc Zyngier/*
22b28162cSMarc Zyngier * Copyright (C) 2015 - ARM Ltd
32b28162cSMarc Zyngier * Author: Marc Zyngier <marc.zyngier@arm.com>
42b28162cSMarc Zyngier *
52b28162cSMarc Zyngier * This program is free software; you can redistribute it and/or modify
62b28162cSMarc Zyngier * it under the terms of the GNU General Public License version 2 as
72b28162cSMarc Zyngier * published by the Free Software Foundation.
82b28162cSMarc Zyngier *
92b28162cSMarc Zyngier * This program is distributed in the hope that it will be useful,
102b28162cSMarc Zyngier * but WITHOUT ANY WARRANTY; without even the implied warranty of
112b28162cSMarc Zyngier * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
122b28162cSMarc Zyngier * GNU General Public License for more details.
132b28162cSMarc Zyngier *
142b28162cSMarc Zyngier * You should have received a copy of the GNU General Public License
152b28162cSMarc Zyngier * along with this program.  If not, see <http://www.gnu.org/licenses/>.
162b28162cSMarc Zyngier */
172b28162cSMarc Zyngier
182b28162cSMarc Zyngier#include <linux/linkage.h>
192b28162cSMarc Zyngier
202b28162cSMarc Zyngier#include <asm/alternative.h>
212b28162cSMarc Zyngier#include <asm/assembler.h>
222b28162cSMarc Zyngier#include <asm/cpufeature.h>
232b28162cSMarc Zyngier#include <asm/kvm_arm.h>
242b28162cSMarc Zyngier#include <asm/kvm_asm.h>
252b28162cSMarc Zyngier#include <asm/kvm_mmu.h>
262b28162cSMarc Zyngier
272b28162cSMarc Zyngier	.text
282b28162cSMarc Zyngier	.pushsection	.hyp.text, "ax"
292b28162cSMarc Zyngier
30b81125c7SMarc Zyngier.macro do_el2_call
31b81125c7SMarc Zyngier	/*
32b81125c7SMarc Zyngier	 * Shuffle the parameters before calling the function
33b81125c7SMarc Zyngier	 * pointed to in x0. Assumes parameters in x[1,2,3].
34b81125c7SMarc Zyngier	 */
35b81125c7SMarc Zyngier	mov	lr, x0
36b81125c7SMarc Zyngier	mov	x0, x1
37b81125c7SMarc Zyngier	mov	x1, x2
38b81125c7SMarc Zyngier	mov	x2, x3
39b81125c7SMarc Zyngier	blr	lr
40b81125c7SMarc Zyngier.endm
41b81125c7SMarc Zyngier
42b81125c7SMarc ZyngierENTRY(__vhe_hyp_call)
4300a44cdaSJames Morse	str	lr, [sp, #-16]!
44b81125c7SMarc Zyngier	do_el2_call
4500a44cdaSJames Morse	ldr	lr, [sp], #16
46b81125c7SMarc Zyngier	/*
47b81125c7SMarc Zyngier	 * We used to rely on having an exception return to get
48b81125c7SMarc Zyngier	 * an implicit isb. In the E2H case, we don't have it anymore.
49b81125c7SMarc Zyngier	 * rather than changing all the leaf functions, just do it here
50b81125c7SMarc Zyngier	 * before returning to the rest of the kernel.
51b81125c7SMarc Zyngier	 */
52b81125c7SMarc Zyngier	isb
53b81125c7SMarc Zyngier	ret
54b81125c7SMarc ZyngierENDPROC(__vhe_hyp_call)
553421e9d8SMarc Zyngier
563421e9d8SMarc Zyngier/*
573421e9d8SMarc Zyngier * Compute the idmap address of __kvm_hyp_reset based on the idmap
583421e9d8SMarc Zyngier * start passed as a parameter, and jump there.
593421e9d8SMarc Zyngier *
603421e9d8SMarc Zyngier * x0: HYP phys_idmap_start
613421e9d8SMarc Zyngier */
623421e9d8SMarc ZyngierENTRY(__kvm_hyp_teardown)
633421e9d8SMarc Zyngier	mov	x4, x0
643421e9d8SMarc Zyngier	adr_l	x3, __kvm_hyp_reset
653421e9d8SMarc Zyngier
663421e9d8SMarc Zyngier	/* insert __kvm_hyp_reset()s offset into phys_idmap_start */
673421e9d8SMarc Zyngier	bfi	x4, x3, #0, #PAGE_SHIFT
683421e9d8SMarc Zyngier	br	x4
693421e9d8SMarc ZyngierENDPROC(__kvm_hyp_teardown)
70b81125c7SMarc Zyngier
712b28162cSMarc Zyngierel1_sync:				// Guest trapped into EL2
7268381b2bSShanker Donthineni	stp	x0, x1, [sp, #-16]!
732b28162cSMarc Zyngier
745f05a72aSMarc Zyngieralternative_if_not ARM64_HAS_VIRT_HOST_EXTN
752b28162cSMarc Zyngier	mrs	x1, esr_el2
765f05a72aSMarc Zyngieralternative_else
775f05a72aSMarc Zyngier	mrs	x1, esr_el1
785f05a72aSMarc Zyngieralternative_endif
7968381b2bSShanker Donthineni	lsr	x0, x1, #ESR_ELx_EC_SHIFT
802b28162cSMarc Zyngier
8168381b2bSShanker Donthineni	cmp	x0, #ESR_ELx_EC_HVC64
822b28162cSMarc Zyngier	b.ne	el1_trap
832b28162cSMarc Zyngier
8468381b2bSShanker Donthineni	mrs	x1, vttbr_el2		// If vttbr is valid, the 64bit guest
8568381b2bSShanker Donthineni	cbnz	x1, el1_trap		// called HVC
862b28162cSMarc Zyngier
872b28162cSMarc Zyngier	/* Here, we're pretty sure the host called HVC. */
8868381b2bSShanker Donthineni	ldp	x0, x1, [sp], #16
892b28162cSMarc Zyngier
90ad72e59fSGeoff Levand	cmp	x0, #HVC_GET_VECTORS
91ad72e59fSGeoff Levand	b.ne	1f
922b28162cSMarc Zyngier	mrs	x0, vbar_el2
932b28162cSMarc Zyngier	b	2f
942b28162cSMarc Zyngier
95b81125c7SMarc Zyngier1:
962b28162cSMarc Zyngier	/*
97b81125c7SMarc Zyngier	 * Perform the EL2 call
982b28162cSMarc Zyngier	 */
992b28162cSMarc Zyngier	kern_hyp_va	x0
100b81125c7SMarc Zyngier	do_el2_call
1012b28162cSMarc Zyngier
1022b28162cSMarc Zyngier2:	eret
1032b28162cSMarc Zyngier
1042b28162cSMarc Zyngierel1_trap:
1052b28162cSMarc Zyngier	/*
10668381b2bSShanker Donthineni	 * x0: ESR_EC
1072b28162cSMarc Zyngier	 */
1082b28162cSMarc Zyngier
1092b28162cSMarc Zyngier	/* Guest accessed VFP/SIMD registers, save host, restore Guest */
11068381b2bSShanker Donthineni	cmp	x0, #ESR_ELx_EC_FP_ASIMD
1112b28162cSMarc Zyngier	b.eq	__fpsimd_guest_restore
1122b28162cSMarc Zyngier
11368381b2bSShanker Donthineni	mrs	x1, tpidr_el2
11468381b2bSShanker Donthineni	mov	x0, #ARM_EXCEPTION_TRAP
1152b28162cSMarc Zyngier	b	__guest_exit
1162b28162cSMarc Zyngier
1172b28162cSMarc Zyngierel1_irq:
11868381b2bSShanker Donthineni	stp     x0, x1, [sp, #-16]!
11968381b2bSShanker Donthineni	mrs	x1, tpidr_el2
12068381b2bSShanker Donthineni	mov	x0, #ARM_EXCEPTION_IRQ
1212b28162cSMarc Zyngier	b	__guest_exit
1222b28162cSMarc Zyngier
1231b51e5faSMarc Zyngierel1_error:
1241b51e5faSMarc Zyngier	stp     x0, x1, [sp, #-16]!
1251b51e5faSMarc Zyngier	mrs	x1, tpidr_el2
1261b51e5faSMarc Zyngier	mov	x0, #ARM_EXCEPTION_EL1_SERROR
1271b51e5faSMarc Zyngier	b	__guest_exit
1281b51e5faSMarc Zyngier
129*395ea79eSMarc Zyngierel2_error:
130*395ea79eSMarc Zyngier	/*
131*395ea79eSMarc Zyngier	 * Only two possibilities:
132*395ea79eSMarc Zyngier	 * 1) Either we come from the exit path, having just unmasked
133*395ea79eSMarc Zyngier	 *    PSTATE.A: change the return code to an EL2 fault, and
134*395ea79eSMarc Zyngier	 *    carry on, as we're already in a sane state to handle it.
135*395ea79eSMarc Zyngier	 * 2) Or we come from anywhere else, and that's a bug: we panic.
136*395ea79eSMarc Zyngier	 *
137*395ea79eSMarc Zyngier	 * For (1), x0 contains the original return code and x1 doesn't
138*395ea79eSMarc Zyngier	 * contain anything meaningful at that stage. We can reuse them
139*395ea79eSMarc Zyngier	 * as temp registers.
140*395ea79eSMarc Zyngier	 * For (2), who cares?
141*395ea79eSMarc Zyngier	 */
142*395ea79eSMarc Zyngier	mrs	x0, elr_el2
143*395ea79eSMarc Zyngier	adr	x1, abort_guest_exit_start
144*395ea79eSMarc Zyngier	cmp	x0, x1
145*395ea79eSMarc Zyngier	adr	x1, abort_guest_exit_end
146*395ea79eSMarc Zyngier	ccmp	x0, x1, #4, ne
147*395ea79eSMarc Zyngier	b.ne	__hyp_panic
148*395ea79eSMarc Zyngier	mov	x0, #(1 << ARM_EXIT_WITH_SERROR_BIT)
149*395ea79eSMarc Zyngier	eret
150*395ea79eSMarc Zyngier
15153fd5b64SMarc ZyngierENTRY(__hyp_do_panic)
15253fd5b64SMarc Zyngier	mov	lr, #(PSR_F_BIT | PSR_I_BIT | PSR_A_BIT | PSR_D_BIT |\
15353fd5b64SMarc Zyngier		      PSR_MODE_EL1h)
15453fd5b64SMarc Zyngier	msr	spsr_el2, lr
15553fd5b64SMarc Zyngier	ldr	lr, =panic
15653fd5b64SMarc Zyngier	msr	elr_el2, lr
15753fd5b64SMarc Zyngier	eret
15853fd5b64SMarc ZyngierENDPROC(__hyp_do_panic)
15953fd5b64SMarc Zyngier
16053fd5b64SMarc Zyngier.macro invalid_vector	label, target = __hyp_panic
1612b28162cSMarc Zyngier	.align	2
1622b28162cSMarc Zyngier\label:
1632b28162cSMarc Zyngier	b \target
1642b28162cSMarc ZyngierENDPROC(\label)
1652b28162cSMarc Zyngier.endm
1662b28162cSMarc Zyngier
1672b28162cSMarc Zyngier	/* None of these should ever happen */
1682b28162cSMarc Zyngier	invalid_vector	el2t_sync_invalid
1692b28162cSMarc Zyngier	invalid_vector	el2t_irq_invalid
1702b28162cSMarc Zyngier	invalid_vector	el2t_fiq_invalid
1712b28162cSMarc Zyngier	invalid_vector	el2t_error_invalid
1722b28162cSMarc Zyngier	invalid_vector	el2h_sync_invalid
1732b28162cSMarc Zyngier	invalid_vector	el2h_irq_invalid
1742b28162cSMarc Zyngier	invalid_vector	el2h_fiq_invalid
1752b28162cSMarc Zyngier	invalid_vector	el1_sync_invalid
1762b28162cSMarc Zyngier	invalid_vector	el1_irq_invalid
1772b28162cSMarc Zyngier	invalid_vector	el1_fiq_invalid
1782b28162cSMarc Zyngier
1792b28162cSMarc Zyngier	.ltorg
1802b28162cSMarc Zyngier
1812b28162cSMarc Zyngier	.align 11
1822b28162cSMarc Zyngier
183044ac37dSMarc ZyngierENTRY(__kvm_hyp_vector)
1842b28162cSMarc Zyngier	ventry	el2t_sync_invalid		// Synchronous EL2t
1852b28162cSMarc Zyngier	ventry	el2t_irq_invalid		// IRQ EL2t
1862b28162cSMarc Zyngier	ventry	el2t_fiq_invalid		// FIQ EL2t
1872b28162cSMarc Zyngier	ventry	el2t_error_invalid		// Error EL2t
1882b28162cSMarc Zyngier
1892b28162cSMarc Zyngier	ventry	el2h_sync_invalid		// Synchronous EL2h
1902b28162cSMarc Zyngier	ventry	el2h_irq_invalid		// IRQ EL2h
1912b28162cSMarc Zyngier	ventry	el2h_fiq_invalid		// FIQ EL2h
192*395ea79eSMarc Zyngier	ventry	el2_error			// Error EL2h
1932b28162cSMarc Zyngier
1942b28162cSMarc Zyngier	ventry	el1_sync			// Synchronous 64-bit EL1
1952b28162cSMarc Zyngier	ventry	el1_irq				// IRQ 64-bit EL1
1962b28162cSMarc Zyngier	ventry	el1_fiq_invalid			// FIQ 64-bit EL1
1971b51e5faSMarc Zyngier	ventry	el1_error			// Error 64-bit EL1
1982b28162cSMarc Zyngier
1992b28162cSMarc Zyngier	ventry	el1_sync			// Synchronous 32-bit EL1
2002b28162cSMarc Zyngier	ventry	el1_irq				// IRQ 32-bit EL1
2012b28162cSMarc Zyngier	ventry	el1_fiq_invalid			// FIQ 32-bit EL1
2021b51e5faSMarc Zyngier	ventry	el1_error			// Error 32-bit EL1
203044ac37dSMarc ZyngierENDPROC(__kvm_hyp_vector)
204