xref: /openbmc/linux/arch/x86/kvm/vmx/vmenter.S (revision e75c3c3a)
1453eafbeSSean Christopherson/* SPDX-License-Identifier: GPL-2.0 */
2453eafbeSSean Christopherson#include <linux/linkage.h>
3453eafbeSSean Christopherson#include <asm/asm.h>
45e0781dfSSean Christopherson#include <asm/bitsperlong.h>
55e0781dfSSean Christopherson#include <asm/kvm_vcpu_regs.h>
65e0781dfSSean Christopherson
75e0781dfSSean Christopherson#define WORD_SIZE (BITS_PER_LONG / 8)
85e0781dfSSean Christopherson
95e0781dfSSean Christopherson#define VCPU_RAX	__VCPU_REGS_RAX * WORD_SIZE
105e0781dfSSean Christopherson#define VCPU_RCX	__VCPU_REGS_RCX * WORD_SIZE
115e0781dfSSean Christopherson#define VCPU_RDX	__VCPU_REGS_RDX * WORD_SIZE
125e0781dfSSean Christopherson#define VCPU_RBX	__VCPU_REGS_RBX * WORD_SIZE
135e0781dfSSean Christopherson/* Intentionally omit RSP as it's context switched by hardware */
145e0781dfSSean Christopherson#define VCPU_RBP	__VCPU_REGS_RBP * WORD_SIZE
155e0781dfSSean Christopherson#define VCPU_RSI	__VCPU_REGS_RSI * WORD_SIZE
165e0781dfSSean Christopherson#define VCPU_RDI	__VCPU_REGS_RDI * WORD_SIZE
175e0781dfSSean Christopherson
185e0781dfSSean Christopherson#ifdef CONFIG_X86_64
195e0781dfSSean Christopherson#define VCPU_R8		__VCPU_REGS_R8  * WORD_SIZE
205e0781dfSSean Christopherson#define VCPU_R9		__VCPU_REGS_R9  * WORD_SIZE
215e0781dfSSean Christopherson#define VCPU_R10	__VCPU_REGS_R10 * WORD_SIZE
225e0781dfSSean Christopherson#define VCPU_R11	__VCPU_REGS_R11 * WORD_SIZE
235e0781dfSSean Christopherson#define VCPU_R12	__VCPU_REGS_R12 * WORD_SIZE
245e0781dfSSean Christopherson#define VCPU_R13	__VCPU_REGS_R13 * WORD_SIZE
255e0781dfSSean Christopherson#define VCPU_R14	__VCPU_REGS_R14 * WORD_SIZE
265e0781dfSSean Christopherson#define VCPU_R15	__VCPU_REGS_R15 * WORD_SIZE
275e0781dfSSean Christopherson#endif
28453eafbeSSean Christopherson
29453eafbeSSean Christopherson	.text
30453eafbeSSean Christopherson
31453eafbeSSean Christopherson/**
32453eafbeSSean Christopherson * vmx_vmenter - VM-Enter the current loaded VMCS
33453eafbeSSean Christopherson *
34453eafbeSSean Christopherson * %RFLAGS.ZF:	!VMCS.LAUNCHED, i.e. controls VMLAUNCH vs. VMRESUME
35453eafbeSSean Christopherson *
36453eafbeSSean Christopherson * Returns:
37453eafbeSSean Christopherson *	%RFLAGS.CF is set on VM-Fail Invalid
38453eafbeSSean Christopherson *	%RFLAGS.ZF is set on VM-Fail Valid
39453eafbeSSean Christopherson *	%RFLAGS.{CF,ZF} are cleared on VM-Success, i.e. VM-Exit
40453eafbeSSean Christopherson *
41453eafbeSSean Christopherson * Note that VMRESUME/VMLAUNCH fall-through and return directly if
42453eafbeSSean Christopherson * they VM-Fail, whereas a successful VM-Enter + VM-Exit will jump
43453eafbeSSean Christopherson * to vmx_vmexit.
44453eafbeSSean Christopherson */
45453eafbeSSean ChristophersonENTRY(vmx_vmenter)
46453eafbeSSean Christopherson	/* EFLAGS.ZF is set if VMCS.LAUNCHED == 0 */
47453eafbeSSean Christopherson	je 2f
48453eafbeSSean Christopherson
49453eafbeSSean Christopherson1:	vmresume
50453eafbeSSean Christopherson	ret
51453eafbeSSean Christopherson
52453eafbeSSean Christopherson2:	vmlaunch
53453eafbeSSean Christopherson	ret
54453eafbeSSean Christopherson
55453eafbeSSean Christopherson3:	cmpb $0, kvm_rebooting
56453eafbeSSean Christopherson	jne 4f
57453eafbeSSean Christopherson	call kvm_spurious_fault
58453eafbeSSean Christopherson4:	ret
59453eafbeSSean Christopherson
60453eafbeSSean Christopherson	.pushsection .fixup, "ax"
61453eafbeSSean Christopherson5:	jmp 3b
62453eafbeSSean Christopherson	.popsection
63453eafbeSSean Christopherson
64453eafbeSSean Christopherson	_ASM_EXTABLE(1b, 5b)
65453eafbeSSean Christopherson	_ASM_EXTABLE(2b, 5b)
66453eafbeSSean Christopherson
67453eafbeSSean ChristophersonENDPROC(vmx_vmenter)
68453eafbeSSean Christopherson
69453eafbeSSean Christopherson/**
70453eafbeSSean Christopherson * vmx_vmexit - Handle a VMX VM-Exit
71453eafbeSSean Christopherson *
72453eafbeSSean Christopherson * Returns:
73453eafbeSSean Christopherson *	%RFLAGS.{CF,ZF} are cleared on VM-Success, i.e. VM-Exit
74453eafbeSSean Christopherson *
75453eafbeSSean Christopherson * This is vmx_vmenter's partner in crime.  On a VM-Exit, control will jump
76453eafbeSSean Christopherson * here after hardware loads the host's state, i.e. this is the destination
77453eafbeSSean Christopherson * referred to by VMCS.HOST_RIP.
78453eafbeSSean Christopherson */
79453eafbeSSean ChristophersonENTRY(vmx_vmexit)
80453eafbeSSean Christopherson	ret
81453eafbeSSean ChristophersonENDPROC(vmx_vmexit)
825e0781dfSSean Christopherson
835e0781dfSSean Christopherson/**
84ee2fc635SSean Christopherson * __vmx_vcpu_run - Run a vCPU via a transition to VMX guest mode
855e0781dfSSean Christopherson * @vmx:	struct vcpu_vmx *
865e0781dfSSean Christopherson * @regs:	unsigned long * (to guest registers)
8777df5495SSean Christopherson * @launched:	%true if the VMCS has been launched
885e0781dfSSean Christopherson *
895e0781dfSSean Christopherson * Returns:
90e75c3c3aSSean Christopherson *	0 on VM-Exit, 1 on VM-Fail
915e0781dfSSean Christopherson */
92ee2fc635SSean ChristophersonENTRY(__vmx_vcpu_run)
935e0781dfSSean Christopherson	push %_ASM_BP
945e0781dfSSean Christopherson	mov  %_ASM_SP, %_ASM_BP
955e0781dfSSean Christopherson
965e0781dfSSean Christopherson	/*
975e0781dfSSean Christopherson	 * Save @regs, _ASM_ARG2 may be modified by vmx_update_host_rsp() and
985e0781dfSSean Christopherson	 * @regs is needed after VM-Exit to save the guest's register values.
995e0781dfSSean Christopherson	 */
1005e0781dfSSean Christopherson	push %_ASM_ARG2
1015e0781dfSSean Christopherson
10277df5495SSean Christopherson	/* Copy @launched to BL, _ASM_ARG3 is volatile. */
10377df5495SSean Christopherson	mov %_ASM_ARG3B, %bl
10477df5495SSean Christopherson
1055e0781dfSSean Christopherson	/* Adjust RSP to account for the CALL to vmx_vmenter(). */
1065e0781dfSSean Christopherson	lea -WORD_SIZE(%_ASM_SP), %_ASM_ARG2
1075e0781dfSSean Christopherson	call vmx_update_host_rsp
1085e0781dfSSean Christopherson
109a62fd5a7SSean Christopherson	/* Load @regs to RAX. */
110a62fd5a7SSean Christopherson	mov (%_ASM_SP), %_ASM_AX
1115e0781dfSSean Christopherson
1125e0781dfSSean Christopherson	/* Check if vmlaunch or vmresume is needed */
1135e0781dfSSean Christopherson	cmpb $0, %bl
1145e0781dfSSean Christopherson
1155e0781dfSSean Christopherson	/* Load guest registers.  Don't clobber flags. */
116a62fd5a7SSean Christopherson	mov VCPU_RBX(%_ASM_AX), %_ASM_BX
117a62fd5a7SSean Christopherson	mov VCPU_RCX(%_ASM_AX), %_ASM_CX
118a62fd5a7SSean Christopherson	mov VCPU_RDX(%_ASM_AX), %_ASM_DX
119a62fd5a7SSean Christopherson	mov VCPU_RSI(%_ASM_AX), %_ASM_SI
120a62fd5a7SSean Christopherson	mov VCPU_RDI(%_ASM_AX), %_ASM_DI
121a62fd5a7SSean Christopherson	mov VCPU_RBP(%_ASM_AX), %_ASM_BP
1225e0781dfSSean Christopherson#ifdef CONFIG_X86_64
123a62fd5a7SSean Christopherson	mov VCPU_R8 (%_ASM_AX),  %r8
124a62fd5a7SSean Christopherson	mov VCPU_R9 (%_ASM_AX),  %r9
125a62fd5a7SSean Christopherson	mov VCPU_R10(%_ASM_AX), %r10
126a62fd5a7SSean Christopherson	mov VCPU_R11(%_ASM_AX), %r11
127a62fd5a7SSean Christopherson	mov VCPU_R12(%_ASM_AX), %r12
128a62fd5a7SSean Christopherson	mov VCPU_R13(%_ASM_AX), %r13
129a62fd5a7SSean Christopherson	mov VCPU_R14(%_ASM_AX), %r14
130a62fd5a7SSean Christopherson	mov VCPU_R15(%_ASM_AX), %r15
1315e0781dfSSean Christopherson#endif
132a62fd5a7SSean Christopherson	/* Load guest RAX.  This kills the vmx_vcpu pointer! */
133a62fd5a7SSean Christopherson	mov VCPU_RAX(%_ASM_AX), %_ASM_AX
1345e0781dfSSean Christopherson
1355e0781dfSSean Christopherson	/* Enter guest mode */
1365e0781dfSSean Christopherson	call vmx_vmenter
1375e0781dfSSean Christopherson
1385e0781dfSSean Christopherson	/* Jump on VM-Fail. */
1395e0781dfSSean Christopherson	jbe 2f
1405e0781dfSSean Christopherson
141a62fd5a7SSean Christopherson	/* Temporarily save guest's RAX. */
142a62fd5a7SSean Christopherson	push %_ASM_AX
1435e0781dfSSean Christopherson
144a62fd5a7SSean Christopherson	/* Reload @regs to RAX. */
145a62fd5a7SSean Christopherson	mov WORD_SIZE(%_ASM_SP), %_ASM_AX
1465e0781dfSSean Christopherson
147a62fd5a7SSean Christopherson	/* Save all guest registers, including RAX from the stack */
148a62fd5a7SSean Christopherson	__ASM_SIZE(pop) VCPU_RAX(%_ASM_AX)
149a62fd5a7SSean Christopherson	mov %_ASM_BX,   VCPU_RBX(%_ASM_AX)
150a62fd5a7SSean Christopherson	mov %_ASM_CX,   VCPU_RCX(%_ASM_AX)
151a62fd5a7SSean Christopherson	mov %_ASM_DX,   VCPU_RDX(%_ASM_AX)
152a62fd5a7SSean Christopherson	mov %_ASM_SI,   VCPU_RSI(%_ASM_AX)
153a62fd5a7SSean Christopherson	mov %_ASM_DI,   VCPU_RDI(%_ASM_AX)
154a62fd5a7SSean Christopherson	mov %_ASM_BP,   VCPU_RBP(%_ASM_AX)
1555e0781dfSSean Christopherson#ifdef CONFIG_X86_64
156a62fd5a7SSean Christopherson	mov %r8,  VCPU_R8 (%_ASM_AX)
157a62fd5a7SSean Christopherson	mov %r9,  VCPU_R9 (%_ASM_AX)
158a62fd5a7SSean Christopherson	mov %r10, VCPU_R10(%_ASM_AX)
159a62fd5a7SSean Christopherson	mov %r11, VCPU_R11(%_ASM_AX)
160a62fd5a7SSean Christopherson	mov %r12, VCPU_R12(%_ASM_AX)
161a62fd5a7SSean Christopherson	mov %r13, VCPU_R13(%_ASM_AX)
162a62fd5a7SSean Christopherson	mov %r14, VCPU_R14(%_ASM_AX)
163a62fd5a7SSean Christopherson	mov %r15, VCPU_R15(%_ASM_AX)
1645e0781dfSSean Christopherson#endif
1655e0781dfSSean Christopherson
166e75c3c3aSSean Christopherson	/* Clear RAX to indicate VM-Exit (as opposed to VM-Fail). */
167e75c3c3aSSean Christopherson	xor %eax, %eax
1685e0781dfSSean Christopherson
1695e0781dfSSean Christopherson	/*
170e75c3c3aSSean Christopherson	 * Clear all general purpose registers except RSP and RAX to prevent
1715e0781dfSSean Christopherson	 * speculative use of the guest's values, even those that are reloaded
1725e0781dfSSean Christopherson	 * via the stack.  In theory, an L1 cache miss when restoring registers
1735e0781dfSSean Christopherson	 * could lead to speculative execution with the guest's values.
1745e0781dfSSean Christopherson	 * Zeroing XORs are dirt cheap, i.e. the extra paranoia is essentially
175e75c3c3aSSean Christopherson	 * free.  RSP and RAX are exempt as RSP is restored by hardware during
176e75c3c3aSSean Christopherson	 * VM-Exit and RAX is explicitly loaded with 0 or 1 to return VM-Fail.
1775e0781dfSSean Christopherson	 */
1785e0781dfSSean Christopherson1:
1795e0781dfSSean Christopherson#ifdef CONFIG_X86_64
1805e0781dfSSean Christopherson	xor %r8d,  %r8d
1815e0781dfSSean Christopherson	xor %r9d,  %r9d
1825e0781dfSSean Christopherson	xor %r10d, %r10d
1835e0781dfSSean Christopherson	xor %r11d, %r11d
1845e0781dfSSean Christopherson	xor %r12d, %r12d
1855e0781dfSSean Christopherson	xor %r13d, %r13d
1865e0781dfSSean Christopherson	xor %r14d, %r14d
1875e0781dfSSean Christopherson	xor %r15d, %r15d
1885e0781dfSSean Christopherson#endif
189e75c3c3aSSean Christopherson	xor %ebx, %ebx
1905e0781dfSSean Christopherson	xor %ecx, %ecx
1915e0781dfSSean Christopherson	xor %edx, %edx
1925e0781dfSSean Christopherson	xor %esi, %esi
1935e0781dfSSean Christopherson	xor %edi, %edi
1945e0781dfSSean Christopherson	xor %ebp, %ebp
1955e0781dfSSean Christopherson
1965e0781dfSSean Christopherson	/* "POP" @regs. */
1975e0781dfSSean Christopherson	add $WORD_SIZE, %_ASM_SP
1985e0781dfSSean Christopherson	pop %_ASM_BP
1995e0781dfSSean Christopherson	ret
2005e0781dfSSean Christopherson
2015e0781dfSSean Christopherson	/* VM-Fail.  Out-of-line to avoid a taken Jcc after VM-Exit. */
202e75c3c3aSSean Christopherson2:	mov $1, %eax
2035e0781dfSSean Christopherson	jmp 1b
204ee2fc635SSean ChristophersonENDPROC(__vmx_vcpu_run)
205