xref: /openbmc/linux/arch/x86/kvm/vmx/vmenter.S (revision 5e0781df)
1453eafbeSSean Christopherson/* SPDX-License-Identifier: GPL-2.0 */
2453eafbeSSean Christopherson#include <linux/linkage.h>
3453eafbeSSean Christopherson#include <asm/asm.h>
45e0781dfSSean Christopherson#include <asm/bitsperlong.h>
55e0781dfSSean Christopherson#include <asm/kvm_vcpu_regs.h>
65e0781dfSSean Christopherson
75e0781dfSSean Christopherson#define WORD_SIZE (BITS_PER_LONG / 8)
85e0781dfSSean Christopherson
95e0781dfSSean Christopherson#define VCPU_RAX	__VCPU_REGS_RAX * WORD_SIZE
105e0781dfSSean Christopherson#define VCPU_RCX	__VCPU_REGS_RCX * WORD_SIZE
115e0781dfSSean Christopherson#define VCPU_RDX	__VCPU_REGS_RDX * WORD_SIZE
125e0781dfSSean Christopherson#define VCPU_RBX	__VCPU_REGS_RBX * WORD_SIZE
135e0781dfSSean Christopherson/* Intentionally omit RSP as it's context switched by hardware */
145e0781dfSSean Christopherson#define VCPU_RBP	__VCPU_REGS_RBP * WORD_SIZE
155e0781dfSSean Christopherson#define VCPU_RSI	__VCPU_REGS_RSI * WORD_SIZE
165e0781dfSSean Christopherson#define VCPU_RDI	__VCPU_REGS_RDI * WORD_SIZE
175e0781dfSSean Christopherson
185e0781dfSSean Christopherson#ifdef CONFIG_X86_64
195e0781dfSSean Christopherson#define VCPU_R8		__VCPU_REGS_R8  * WORD_SIZE
205e0781dfSSean Christopherson#define VCPU_R9		__VCPU_REGS_R9  * WORD_SIZE
215e0781dfSSean Christopherson#define VCPU_R10	__VCPU_REGS_R10 * WORD_SIZE
225e0781dfSSean Christopherson#define VCPU_R11	__VCPU_REGS_R11 * WORD_SIZE
235e0781dfSSean Christopherson#define VCPU_R12	__VCPU_REGS_R12 * WORD_SIZE
245e0781dfSSean Christopherson#define VCPU_R13	__VCPU_REGS_R13 * WORD_SIZE
255e0781dfSSean Christopherson#define VCPU_R14	__VCPU_REGS_R14 * WORD_SIZE
265e0781dfSSean Christopherson#define VCPU_R15	__VCPU_REGS_R15 * WORD_SIZE
275e0781dfSSean Christopherson#endif
28453eafbeSSean Christopherson
29453eafbeSSean Christopherson	.text
30453eafbeSSean Christopherson
31453eafbeSSean Christopherson/**
32453eafbeSSean Christopherson * vmx_vmenter - VM-Enter the current loaded VMCS
33453eafbeSSean Christopherson *
34453eafbeSSean Christopherson * %RFLAGS.ZF:	!VMCS.LAUNCHED, i.e. controls VMLAUNCH vs. VMRESUME
35453eafbeSSean Christopherson *
36453eafbeSSean Christopherson * Returns:
37453eafbeSSean Christopherson *	%RFLAGS.CF is set on VM-Fail Invalid
38453eafbeSSean Christopherson *	%RFLAGS.ZF is set on VM-Fail Valid
39453eafbeSSean Christopherson *	%RFLAGS.{CF,ZF} are cleared on VM-Success, i.e. VM-Exit
40453eafbeSSean Christopherson *
41453eafbeSSean Christopherson * Note that VMRESUME/VMLAUNCH fall-through and return directly if
42453eafbeSSean Christopherson * they VM-Fail, whereas a successful VM-Enter + VM-Exit will jump
43453eafbeSSean Christopherson * to vmx_vmexit.
44453eafbeSSean Christopherson */
45453eafbeSSean ChristophersonENTRY(vmx_vmenter)
46453eafbeSSean Christopherson	/* EFLAGS.ZF is set if VMCS.LAUNCHED == 0 */
47453eafbeSSean Christopherson	je 2f
48453eafbeSSean Christopherson
49453eafbeSSean Christopherson1:	vmresume
50453eafbeSSean Christopherson	ret
51453eafbeSSean Christopherson
52453eafbeSSean Christopherson2:	vmlaunch
53453eafbeSSean Christopherson	ret
54453eafbeSSean Christopherson
55453eafbeSSean Christopherson3:	cmpb $0, kvm_rebooting
56453eafbeSSean Christopherson	jne 4f
57453eafbeSSean Christopherson	call kvm_spurious_fault
58453eafbeSSean Christopherson4:	ret
59453eafbeSSean Christopherson
60453eafbeSSean Christopherson	.pushsection .fixup, "ax"
61453eafbeSSean Christopherson5:	jmp 3b
62453eafbeSSean Christopherson	.popsection
63453eafbeSSean Christopherson
64453eafbeSSean Christopherson	_ASM_EXTABLE(1b, 5b)
65453eafbeSSean Christopherson	_ASM_EXTABLE(2b, 5b)
66453eafbeSSean Christopherson
67453eafbeSSean ChristophersonENDPROC(vmx_vmenter)
68453eafbeSSean Christopherson
69453eafbeSSean Christopherson/**
70453eafbeSSean Christopherson * vmx_vmexit - Handle a VMX VM-Exit
71453eafbeSSean Christopherson *
72453eafbeSSean Christopherson * Returns:
73453eafbeSSean Christopherson *	%RFLAGS.{CF,ZF} are cleared on VM-Success, i.e. VM-Exit
74453eafbeSSean Christopherson *
75453eafbeSSean Christopherson * This is vmx_vmenter's partner in crime.  On a VM-Exit, control will jump
76453eafbeSSean Christopherson * here after hardware loads the host's state, i.e. this is the destination
77453eafbeSSean Christopherson * referred to by VMCS.HOST_RIP.
78453eafbeSSean Christopherson */
79453eafbeSSean ChristophersonENTRY(vmx_vmexit)
80453eafbeSSean Christopherson	ret
81453eafbeSSean ChristophersonENDPROC(vmx_vmexit)
825e0781dfSSean Christopherson
835e0781dfSSean Christopherson/**
845e0781dfSSean Christopherson * ____vmx_vcpu_run - Run a vCPU via a transition to VMX guest mode
855e0781dfSSean Christopherson * @vmx:	struct vcpu_vmx *
865e0781dfSSean Christopherson * @regs:	unsigned long * (to guest registers)
875e0781dfSSean Christopherson * %RBX:	VMCS launched status (non-zero indicates already launched)
885e0781dfSSean Christopherson *
895e0781dfSSean Christopherson * Returns:
905e0781dfSSean Christopherson *	%RBX is 0 on VM-Exit, 1 on VM-Fail
915e0781dfSSean Christopherson */
925e0781dfSSean ChristophersonENTRY(____vmx_vcpu_run)
935e0781dfSSean Christopherson	push %_ASM_BP
945e0781dfSSean Christopherson	mov  %_ASM_SP, %_ASM_BP
955e0781dfSSean Christopherson
965e0781dfSSean Christopherson	/*
975e0781dfSSean Christopherson	 * Save @regs, _ASM_ARG2 may be modified by vmx_update_host_rsp() and
985e0781dfSSean Christopherson	 * @regs is needed after VM-Exit to save the guest's register values.
995e0781dfSSean Christopherson	 */
1005e0781dfSSean Christopherson	push %_ASM_ARG2
1015e0781dfSSean Christopherson
1025e0781dfSSean Christopherson	/* Adjust RSP to account for the CALL to vmx_vmenter(). */
1035e0781dfSSean Christopherson	lea -WORD_SIZE(%_ASM_SP), %_ASM_ARG2
1045e0781dfSSean Christopherson	call vmx_update_host_rsp
1055e0781dfSSean Christopherson
1065e0781dfSSean Christopherson	/* Load @regs to RCX. */
1075e0781dfSSean Christopherson	mov (%_ASM_SP), %_ASM_CX
1085e0781dfSSean Christopherson
1095e0781dfSSean Christopherson	/* Check if vmlaunch or vmresume is needed */
1105e0781dfSSean Christopherson	cmpb $0, %bl
1115e0781dfSSean Christopherson
1125e0781dfSSean Christopherson	/* Load guest registers.  Don't clobber flags. */
1135e0781dfSSean Christopherson	mov VCPU_RAX(%_ASM_CX), %_ASM_AX
1145e0781dfSSean Christopherson	mov VCPU_RBX(%_ASM_CX), %_ASM_BX
1155e0781dfSSean Christopherson	mov VCPU_RDX(%_ASM_CX), %_ASM_DX
1165e0781dfSSean Christopherson	mov VCPU_RSI(%_ASM_CX), %_ASM_SI
1175e0781dfSSean Christopherson	mov VCPU_RDI(%_ASM_CX), %_ASM_DI
1185e0781dfSSean Christopherson	mov VCPU_RBP(%_ASM_CX), %_ASM_BP
1195e0781dfSSean Christopherson#ifdef CONFIG_X86_64
1205e0781dfSSean Christopherson	mov VCPU_R8 (%_ASM_CX),  %r8
1215e0781dfSSean Christopherson	mov VCPU_R9 (%_ASM_CX),  %r9
1225e0781dfSSean Christopherson	mov VCPU_R10(%_ASM_CX), %r10
1235e0781dfSSean Christopherson	mov VCPU_R11(%_ASM_CX), %r11
1245e0781dfSSean Christopherson	mov VCPU_R12(%_ASM_CX), %r12
1255e0781dfSSean Christopherson	mov VCPU_R13(%_ASM_CX), %r13
1265e0781dfSSean Christopherson	mov VCPU_R14(%_ASM_CX), %r14
1275e0781dfSSean Christopherson	mov VCPU_R15(%_ASM_CX), %r15
1285e0781dfSSean Christopherson#endif
1295e0781dfSSean Christopherson	/* Load guest RCX.  This kills the vmx_vcpu pointer! */
1305e0781dfSSean Christopherson	mov VCPU_RCX(%_ASM_CX), %_ASM_CX
1315e0781dfSSean Christopherson
1325e0781dfSSean Christopherson	/* Enter guest mode */
1335e0781dfSSean Christopherson	call vmx_vmenter
1345e0781dfSSean Christopherson
1355e0781dfSSean Christopherson	/* Jump on VM-Fail. */
1365e0781dfSSean Christopherson	jbe 2f
1375e0781dfSSean Christopherson
1385e0781dfSSean Christopherson	/* Temporarily save guest's RCX. */
1395e0781dfSSean Christopherson	push %_ASM_CX
1405e0781dfSSean Christopherson
1415e0781dfSSean Christopherson	/* Reload @regs to RCX. */
1425e0781dfSSean Christopherson	mov WORD_SIZE(%_ASM_SP), %_ASM_CX
1435e0781dfSSean Christopherson
1445e0781dfSSean Christopherson	/* Save all guest registers, including RCX from the stack */
1455e0781dfSSean Christopherson	mov %_ASM_AX,   VCPU_RAX(%_ASM_CX)
1465e0781dfSSean Christopherson	mov %_ASM_BX,   VCPU_RBX(%_ASM_CX)
1475e0781dfSSean Christopherson	__ASM_SIZE(pop) VCPU_RCX(%_ASM_CX)
1485e0781dfSSean Christopherson	mov %_ASM_DX,   VCPU_RDX(%_ASM_CX)
1495e0781dfSSean Christopherson	mov %_ASM_SI,   VCPU_RSI(%_ASM_CX)
1505e0781dfSSean Christopherson	mov %_ASM_DI,   VCPU_RDI(%_ASM_CX)
1515e0781dfSSean Christopherson	mov %_ASM_BP,   VCPU_RBP(%_ASM_CX)
1525e0781dfSSean Christopherson#ifdef CONFIG_X86_64
1535e0781dfSSean Christopherson	mov %r8,  VCPU_R8 (%_ASM_CX)
1545e0781dfSSean Christopherson	mov %r9,  VCPU_R9 (%_ASM_CX)
1555e0781dfSSean Christopherson	mov %r10, VCPU_R10(%_ASM_CX)
1565e0781dfSSean Christopherson	mov %r11, VCPU_R11(%_ASM_CX)
1575e0781dfSSean Christopherson	mov %r12, VCPU_R12(%_ASM_CX)
1585e0781dfSSean Christopherson	mov %r13, VCPU_R13(%_ASM_CX)
1595e0781dfSSean Christopherson	mov %r14, VCPU_R14(%_ASM_CX)
1605e0781dfSSean Christopherson	mov %r15, VCPU_R15(%_ASM_CX)
1615e0781dfSSean Christopherson#endif
1625e0781dfSSean Christopherson
1635e0781dfSSean Christopherson	/* Clear EBX to indicate VM-Exit (as opposed to VM-Fail). */
1645e0781dfSSean Christopherson	xor %ebx, %ebx
1655e0781dfSSean Christopherson
1665e0781dfSSean Christopherson	/*
1675e0781dfSSean Christopherson	 * Clear all general purpose registers except RSP and RBX to prevent
1685e0781dfSSean Christopherson	 * speculative use of the guest's values, even those that are reloaded
1695e0781dfSSean Christopherson	 * via the stack.  In theory, an L1 cache miss when restoring registers
1705e0781dfSSean Christopherson	 * could lead to speculative execution with the guest's values.
1715e0781dfSSean Christopherson	 * Zeroing XORs are dirt cheap, i.e. the extra paranoia is essentially
1725e0781dfSSean Christopherson	 * free.  RSP and RBX are exempt as RSP is restored by hardware during
1735e0781dfSSean Christopherson	 * VM-Exit and RBX is explicitly loaded with 0 or 1 to "return" VM-Fail.
1745e0781dfSSean Christopherson	 */
1755e0781dfSSean Christopherson1:
1765e0781dfSSean Christopherson#ifdef CONFIG_X86_64
1775e0781dfSSean Christopherson	xor %r8d,  %r8d
1785e0781dfSSean Christopherson	xor %r9d,  %r9d
1795e0781dfSSean Christopherson	xor %r10d, %r10d
1805e0781dfSSean Christopherson	xor %r11d, %r11d
1815e0781dfSSean Christopherson	xor %r12d, %r12d
1825e0781dfSSean Christopherson	xor %r13d, %r13d
1835e0781dfSSean Christopherson	xor %r14d, %r14d
1845e0781dfSSean Christopherson	xor %r15d, %r15d
1855e0781dfSSean Christopherson#endif
1865e0781dfSSean Christopherson	xor %eax, %eax
1875e0781dfSSean Christopherson	xor %ecx, %ecx
1885e0781dfSSean Christopherson	xor %edx, %edx
1895e0781dfSSean Christopherson	xor %esi, %esi
1905e0781dfSSean Christopherson	xor %edi, %edi
1915e0781dfSSean Christopherson	xor %ebp, %ebp
1925e0781dfSSean Christopherson
1935e0781dfSSean Christopherson	/* "POP" @regs. */
1945e0781dfSSean Christopherson	add $WORD_SIZE, %_ASM_SP
1955e0781dfSSean Christopherson	pop %_ASM_BP
1965e0781dfSSean Christopherson	ret
1975e0781dfSSean Christopherson
1985e0781dfSSean Christopherson	/* VM-Fail.  Out-of-line to avoid a taken Jcc after VM-Exit. */
1995e0781dfSSean Christopherson2:	mov $1, %ebx
2005e0781dfSSean Christopherson	jmp 1b
2015e0781dfSSean ChristophersonENDPROC(____vmx_vcpu_run)
202