xref: /openbmc/linux/arch/x86/kvm/vmx/vmenter.S (revision b6852ae7)
1453eafbeSSean Christopherson/* SPDX-License-Identifier: GPL-2.0 */
2453eafbeSSean Christopherson#include <linux/linkage.h>
3453eafbeSSean Christopherson#include <asm/asm.h>
45e0781dfSSean Christopherson#include <asm/bitsperlong.h>
55e0781dfSSean Christopherson#include <asm/kvm_vcpu_regs.h>
6f2fde6a5SRick Edgecombe#include <asm/nospec-branch.h>
75e0781dfSSean Christopherson
85e0781dfSSean Christopherson#define WORD_SIZE (BITS_PER_LONG / 8)
95e0781dfSSean Christopherson
105e0781dfSSean Christopherson#define VCPU_RAX	__VCPU_REGS_RAX * WORD_SIZE
115e0781dfSSean Christopherson#define VCPU_RCX	__VCPU_REGS_RCX * WORD_SIZE
125e0781dfSSean Christopherson#define VCPU_RDX	__VCPU_REGS_RDX * WORD_SIZE
135e0781dfSSean Christopherson#define VCPU_RBX	__VCPU_REGS_RBX * WORD_SIZE
145e0781dfSSean Christopherson/* Intentionally omit RSP as it's context switched by hardware */
155e0781dfSSean Christopherson#define VCPU_RBP	__VCPU_REGS_RBP * WORD_SIZE
165e0781dfSSean Christopherson#define VCPU_RSI	__VCPU_REGS_RSI * WORD_SIZE
175e0781dfSSean Christopherson#define VCPU_RDI	__VCPU_REGS_RDI * WORD_SIZE
185e0781dfSSean Christopherson
195e0781dfSSean Christopherson#ifdef CONFIG_X86_64
205e0781dfSSean Christopherson#define VCPU_R8		__VCPU_REGS_R8  * WORD_SIZE
215e0781dfSSean Christopherson#define VCPU_R9		__VCPU_REGS_R9  * WORD_SIZE
225e0781dfSSean Christopherson#define VCPU_R10	__VCPU_REGS_R10 * WORD_SIZE
235e0781dfSSean Christopherson#define VCPU_R11	__VCPU_REGS_R11 * WORD_SIZE
245e0781dfSSean Christopherson#define VCPU_R12	__VCPU_REGS_R12 * WORD_SIZE
255e0781dfSSean Christopherson#define VCPU_R13	__VCPU_REGS_R13 * WORD_SIZE
265e0781dfSSean Christopherson#define VCPU_R14	__VCPU_REGS_R14 * WORD_SIZE
275e0781dfSSean Christopherson#define VCPU_R15	__VCPU_REGS_R15 * WORD_SIZE
285e0781dfSSean Christopherson#endif
29453eafbeSSean Christopherson
30453eafbeSSean Christopherson	.text
31453eafbeSSean Christopherson
32453eafbeSSean Christopherson/**
33453eafbeSSean Christopherson * vmx_vmenter - VM-Enter the current loaded VMCS
34453eafbeSSean Christopherson *
35453eafbeSSean Christopherson * %RFLAGS.ZF:	!VMCS.LAUNCHED, i.e. controls VMLAUNCH vs. VMRESUME
36453eafbeSSean Christopherson *
37453eafbeSSean Christopherson * Returns:
38453eafbeSSean Christopherson *	%RFLAGS.CF is set on VM-Fail Invalid
39453eafbeSSean Christopherson *	%RFLAGS.ZF is set on VM-Fail Valid
40453eafbeSSean Christopherson *	%RFLAGS.{CF,ZF} are cleared on VM-Success, i.e. VM-Exit
41453eafbeSSean Christopherson *
42453eafbeSSean Christopherson * Note that VMRESUME/VMLAUNCH fall-through and return directly if
43453eafbeSSean Christopherson * they VM-Fail, whereas a successful VM-Enter + VM-Exit will jump
44453eafbeSSean Christopherson * to vmx_vmexit.
45453eafbeSSean Christopherson */
46453eafbeSSean ChristophersonENTRY(vmx_vmenter)
47453eafbeSSean Christopherson	/* EFLAGS.ZF is set if VMCS.LAUNCHED == 0 */
48453eafbeSSean Christopherson	je 2f
49453eafbeSSean Christopherson
50453eafbeSSean Christopherson1:	vmresume
51453eafbeSSean Christopherson	ret
52453eafbeSSean Christopherson
53453eafbeSSean Christopherson2:	vmlaunch
54453eafbeSSean Christopherson	ret
55453eafbeSSean Christopherson
56453eafbeSSean Christopherson3:	cmpb $0, kvm_rebooting
5719f2d8faSJosh Poimboeuf	je 4f
5819f2d8faSJosh Poimboeuf	ret
5919f2d8faSJosh Poimboeuf4:	ud2
60453eafbeSSean Christopherson
61453eafbeSSean Christopherson	.pushsection .fixup, "ax"
62453eafbeSSean Christopherson5:	jmp 3b
63453eafbeSSean Christopherson	.popsection
64453eafbeSSean Christopherson
65453eafbeSSean Christopherson	_ASM_EXTABLE(1b, 5b)
66453eafbeSSean Christopherson	_ASM_EXTABLE(2b, 5b)
67453eafbeSSean Christopherson
68453eafbeSSean ChristophersonENDPROC(vmx_vmenter)
69453eafbeSSean Christopherson
70453eafbeSSean Christopherson/**
71453eafbeSSean Christopherson * vmx_vmexit - Handle a VMX VM-Exit
72453eafbeSSean Christopherson *
73453eafbeSSean Christopherson * Returns:
74453eafbeSSean Christopherson *	%RFLAGS.{CF,ZF} are cleared on VM-Success, i.e. VM-Exit
75453eafbeSSean Christopherson *
76453eafbeSSean Christopherson * This is vmx_vmenter's partner in crime.  On a VM-Exit, control will jump
77453eafbeSSean Christopherson * here after hardware loads the host's state, i.e. this is the destination
78453eafbeSSean Christopherson * referred to by VMCS.HOST_RIP.
79453eafbeSSean Christopherson */
80453eafbeSSean ChristophersonENTRY(vmx_vmexit)
81f2fde6a5SRick Edgecombe#ifdef CONFIG_RETPOLINE
82f2fde6a5SRick Edgecombe	ALTERNATIVE "jmp .Lvmexit_skip_rsb", "", X86_FEATURE_RETPOLINE
83f2fde6a5SRick Edgecombe	/* Preserve guest's RAX, it's used to stuff the RSB. */
84f2fde6a5SRick Edgecombe	push %_ASM_AX
85f2fde6a5SRick Edgecombe
86f2fde6a5SRick Edgecombe	/* IMPORTANT: Stuff the RSB immediately after VM-Exit, before RET! */
87f2fde6a5SRick Edgecombe	FILL_RETURN_BUFFER %_ASM_AX, RSB_CLEAR_LOOPS, X86_FEATURE_RETPOLINE
88f2fde6a5SRick Edgecombe
89f2fde6a5SRick Edgecombe	pop %_ASM_AX
90f2fde6a5SRick Edgecombe.Lvmexit_skip_rsb:
91f2fde6a5SRick Edgecombe#endif
92453eafbeSSean Christopherson	ret
93453eafbeSSean ChristophersonENDPROC(vmx_vmexit)
945e0781dfSSean Christopherson
955e0781dfSSean Christopherson/**
96ee2fc635SSean Christopherson * __vmx_vcpu_run - Run a vCPU via a transition to VMX guest mode
97b6852ae7SSean Christopherson * @vmx:	struct vcpu_vmx * (forwarded to vmx_update_host_rsp)
985e0781dfSSean Christopherson * @regs:	unsigned long * (to guest registers)
9977df5495SSean Christopherson * @launched:	%true if the VMCS has been launched
1005e0781dfSSean Christopherson *
1015e0781dfSSean Christopherson * Returns:
102e75c3c3aSSean Christopherson *	0 on VM-Exit, 1 on VM-Fail
1035e0781dfSSean Christopherson */
104ee2fc635SSean ChristophersonENTRY(__vmx_vcpu_run)
1055e0781dfSSean Christopherson	push %_ASM_BP
1065e0781dfSSean Christopherson	mov  %_ASM_SP, %_ASM_BP
1073b895ef4SSean Christopherson#ifdef CONFIG_X86_64
1083b895ef4SSean Christopherson	push %r15
1093b895ef4SSean Christopherson	push %r14
1103b895ef4SSean Christopherson	push %r13
1113b895ef4SSean Christopherson	push %r12
1123b895ef4SSean Christopherson#else
1133b895ef4SSean Christopherson	push %edi
1143b895ef4SSean Christopherson	push %esi
1153b895ef4SSean Christopherson#endif
1163b895ef4SSean Christopherson	push %_ASM_BX
1175e0781dfSSean Christopherson
1185e0781dfSSean Christopherson	/*
1195e0781dfSSean Christopherson	 * Save @regs, _ASM_ARG2 may be modified by vmx_update_host_rsp() and
1205e0781dfSSean Christopherson	 * @regs is needed after VM-Exit to save the guest's register values.
1215e0781dfSSean Christopherson	 */
1225e0781dfSSean Christopherson	push %_ASM_ARG2
1235e0781dfSSean Christopherson
12477df5495SSean Christopherson	/* Copy @launched to BL, _ASM_ARG3 is volatile. */
12577df5495SSean Christopherson	mov %_ASM_ARG3B, %bl
12677df5495SSean Christopherson
1275e0781dfSSean Christopherson	/* Adjust RSP to account for the CALL to vmx_vmenter(). */
1285e0781dfSSean Christopherson	lea -WORD_SIZE(%_ASM_SP), %_ASM_ARG2
1295e0781dfSSean Christopherson	call vmx_update_host_rsp
1305e0781dfSSean Christopherson
131a62fd5a7SSean Christopherson	/* Load @regs to RAX. */
132a62fd5a7SSean Christopherson	mov (%_ASM_SP), %_ASM_AX
1335e0781dfSSean Christopherson
1345e0781dfSSean Christopherson	/* Check if vmlaunch or vmresume is needed */
1355e0781dfSSean Christopherson	cmpb $0, %bl
1365e0781dfSSean Christopherson
1375e0781dfSSean Christopherson	/* Load guest registers.  Don't clobber flags. */
138a62fd5a7SSean Christopherson	mov VCPU_RBX(%_ASM_AX), %_ASM_BX
139a62fd5a7SSean Christopherson	mov VCPU_RCX(%_ASM_AX), %_ASM_CX
140a62fd5a7SSean Christopherson	mov VCPU_RDX(%_ASM_AX), %_ASM_DX
141a62fd5a7SSean Christopherson	mov VCPU_RSI(%_ASM_AX), %_ASM_SI
142a62fd5a7SSean Christopherson	mov VCPU_RDI(%_ASM_AX), %_ASM_DI
143a62fd5a7SSean Christopherson	mov VCPU_RBP(%_ASM_AX), %_ASM_BP
1445e0781dfSSean Christopherson#ifdef CONFIG_X86_64
145a62fd5a7SSean Christopherson	mov VCPU_R8 (%_ASM_AX),  %r8
146a62fd5a7SSean Christopherson	mov VCPU_R9 (%_ASM_AX),  %r9
147a62fd5a7SSean Christopherson	mov VCPU_R10(%_ASM_AX), %r10
148a62fd5a7SSean Christopherson	mov VCPU_R11(%_ASM_AX), %r11
149a62fd5a7SSean Christopherson	mov VCPU_R12(%_ASM_AX), %r12
150a62fd5a7SSean Christopherson	mov VCPU_R13(%_ASM_AX), %r13
151a62fd5a7SSean Christopherson	mov VCPU_R14(%_ASM_AX), %r14
152a62fd5a7SSean Christopherson	mov VCPU_R15(%_ASM_AX), %r15
1535e0781dfSSean Christopherson#endif
154b6852ae7SSean Christopherson	/* Load guest RAX.  This kills the @regs pointer! */
155a62fd5a7SSean Christopherson	mov VCPU_RAX(%_ASM_AX), %_ASM_AX
1565e0781dfSSean Christopherson
1575e0781dfSSean Christopherson	/* Enter guest mode */
1585e0781dfSSean Christopherson	call vmx_vmenter
1595e0781dfSSean Christopherson
1605e0781dfSSean Christopherson	/* Jump on VM-Fail. */
1615e0781dfSSean Christopherson	jbe 2f
1625e0781dfSSean Christopherson
163a62fd5a7SSean Christopherson	/* Temporarily save guest's RAX. */
164a62fd5a7SSean Christopherson	push %_ASM_AX
1655e0781dfSSean Christopherson
166a62fd5a7SSean Christopherson	/* Reload @regs to RAX. */
167a62fd5a7SSean Christopherson	mov WORD_SIZE(%_ASM_SP), %_ASM_AX
1685e0781dfSSean Christopherson
169a62fd5a7SSean Christopherson	/* Save all guest registers, including RAX from the stack */
170a62fd5a7SSean Christopherson	__ASM_SIZE(pop) VCPU_RAX(%_ASM_AX)
171a62fd5a7SSean Christopherson	mov %_ASM_BX,   VCPU_RBX(%_ASM_AX)
172a62fd5a7SSean Christopherson	mov %_ASM_CX,   VCPU_RCX(%_ASM_AX)
173a62fd5a7SSean Christopherson	mov %_ASM_DX,   VCPU_RDX(%_ASM_AX)
174a62fd5a7SSean Christopherson	mov %_ASM_SI,   VCPU_RSI(%_ASM_AX)
175a62fd5a7SSean Christopherson	mov %_ASM_DI,   VCPU_RDI(%_ASM_AX)
176a62fd5a7SSean Christopherson	mov %_ASM_BP,   VCPU_RBP(%_ASM_AX)
1775e0781dfSSean Christopherson#ifdef CONFIG_X86_64
178a62fd5a7SSean Christopherson	mov %r8,  VCPU_R8 (%_ASM_AX)
179a62fd5a7SSean Christopherson	mov %r9,  VCPU_R9 (%_ASM_AX)
180a62fd5a7SSean Christopherson	mov %r10, VCPU_R10(%_ASM_AX)
181a62fd5a7SSean Christopherson	mov %r11, VCPU_R11(%_ASM_AX)
182a62fd5a7SSean Christopherson	mov %r12, VCPU_R12(%_ASM_AX)
183a62fd5a7SSean Christopherson	mov %r13, VCPU_R13(%_ASM_AX)
184a62fd5a7SSean Christopherson	mov %r14, VCPU_R14(%_ASM_AX)
185a62fd5a7SSean Christopherson	mov %r15, VCPU_R15(%_ASM_AX)
1865e0781dfSSean Christopherson#endif
1875e0781dfSSean Christopherson
188e75c3c3aSSean Christopherson	/* Clear RAX to indicate VM-Exit (as opposed to VM-Fail). */
189e75c3c3aSSean Christopherson	xor %eax, %eax
1905e0781dfSSean Christopherson
1915e0781dfSSean Christopherson	/*
192e75c3c3aSSean Christopherson	 * Clear all general purpose registers except RSP and RAX to prevent
1935e0781dfSSean Christopherson	 * speculative use of the guest's values, even those that are reloaded
1945e0781dfSSean Christopherson	 * via the stack.  In theory, an L1 cache miss when restoring registers
1955e0781dfSSean Christopherson	 * could lead to speculative execution with the guest's values.
1965e0781dfSSean Christopherson	 * Zeroing XORs are dirt cheap, i.e. the extra paranoia is essentially
197e75c3c3aSSean Christopherson	 * free.  RSP and RAX are exempt as RSP is restored by hardware during
198e75c3c3aSSean Christopherson	 * VM-Exit and RAX is explicitly loaded with 0 or 1 to return VM-Fail.
1995e0781dfSSean Christopherson	 */
2004f44c4eeSSean Christopherson1:	xor %ebx, %ebx
2014f44c4eeSSean Christopherson	xor %ecx, %ecx
2024f44c4eeSSean Christopherson	xor %edx, %edx
2034f44c4eeSSean Christopherson	xor %esi, %esi
2044f44c4eeSSean Christopherson	xor %edi, %edi
2054f44c4eeSSean Christopherson	xor %ebp, %ebp
2065e0781dfSSean Christopherson#ifdef CONFIG_X86_64
2075e0781dfSSean Christopherson	xor %r8d,  %r8d
2085e0781dfSSean Christopherson	xor %r9d,  %r9d
2095e0781dfSSean Christopherson	xor %r10d, %r10d
2105e0781dfSSean Christopherson	xor %r11d, %r11d
2115e0781dfSSean Christopherson	xor %r12d, %r12d
2125e0781dfSSean Christopherson	xor %r13d, %r13d
2135e0781dfSSean Christopherson	xor %r14d, %r14d
2145e0781dfSSean Christopherson	xor %r15d, %r15d
2155e0781dfSSean Christopherson#endif
2165e0781dfSSean Christopherson
2175e0781dfSSean Christopherson	/* "POP" @regs. */
2185e0781dfSSean Christopherson	add $WORD_SIZE, %_ASM_SP
2193b895ef4SSean Christopherson	pop %_ASM_BX
2203b895ef4SSean Christopherson
2213b895ef4SSean Christopherson#ifdef CONFIG_X86_64
2223b895ef4SSean Christopherson	pop %r12
2233b895ef4SSean Christopherson	pop %r13
2243b895ef4SSean Christopherson	pop %r14
2253b895ef4SSean Christopherson	pop %r15
2263b895ef4SSean Christopherson#else
2273b895ef4SSean Christopherson	pop %esi
2283b895ef4SSean Christopherson	pop %edi
2293b895ef4SSean Christopherson#endif
2305e0781dfSSean Christopherson	pop %_ASM_BP
2315e0781dfSSean Christopherson	ret
2325e0781dfSSean Christopherson
2335e0781dfSSean Christopherson	/* VM-Fail.  Out-of-line to avoid a taken Jcc after VM-Exit. */
234e75c3c3aSSean Christopherson2:	mov $1, %eax
2355e0781dfSSean Christopherson	jmp 1b
236ee2fc635SSean ChristophersonENDPROC(__vmx_vcpu_run)
237