Lines Matching refs:vcpu_vmx

190 	struct vcpu_vmx *vmx = to_vmx(vcpu);  in nested_vmx_fail()
220 static void vmx_disable_shadow_vmcs(struct vcpu_vmx *vmx) in vmx_disable_shadow_vmcs()
230 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_release_evmcs()
246 static void vmx_sync_vmcs_host_state(struct vcpu_vmx *vmx, in vmx_sync_vmcs_host_state()
267 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_switch_vmcs()
296 struct vcpu_vmx *vmx = to_vmx(vcpu); in free_nested()
382 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_ept_inject_page_fault()
413 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_ept_new_eptp()
538 void nested_vmx_set_msr_##rw##_intercept(struct vcpu_vmx *vmx, \
551 static inline void nested_vmx_set_intercept_for_msr(struct vcpu_vmx *vmx, in BUILD_NVMX_MSR_INTERCEPT_HELPER()
571 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_vmx_prepare_msr_bitmap()
673 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_cache_shadow_vmcs12()
692 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_flush_cached_shadow_vmcs12()
891 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_vmx_max_atomic_switch_msrs()
947 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_vmx_get_vmexit_msr_value()
1045 struct vcpu_vmx *vmx = to_vmx(vcpu); in prepare_vmx_msr_autostore_list()
1140 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_vmx_transition_tlb_flush()
1204 static int vmx_restore_vmx_basic(struct vcpu_vmx *vmx, u64 data) in vmx_restore_vmx_basic()
1264 vmx_restore_control_msr(struct vcpu_vmx *vmx, u32 msr_index, u64 data) in vmx_restore_control_msr()
1287 static int vmx_restore_vmx_misc(struct vcpu_vmx *vmx, u64 data) in vmx_restore_vmx_misc()
1322 static int vmx_restore_vmx_ept_vpid_cap(struct vcpu_vmx *vmx, u64 data) in vmx_restore_vmx_ept_vpid_cap()
1348 static int vmx_restore_fixed0_msr(struct vcpu_vmx *vmx, u32 msr_index, u64 data) in vmx_restore_fixed0_msr()
1370 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_set_vmx_msr()
1518 static void copy_shadow_to_vmcs12(struct vcpu_vmx *vmx) in copy_shadow_to_vmcs12()
1545 static void copy_vmcs12_to_shadow(struct vcpu_vmx *vmx) in copy_vmcs12_to_shadow()
1579 static void copy_enlightened_to_vmcs12(struct vcpu_vmx *vmx, u32 hv_clean_fields) in copy_enlightened_to_vmcs12()
1823 static void copy_vmcs12_to_enlightened(struct vcpu_vmx *vmx) in copy_vmcs12_to_enlightened()
2004 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_vmx_handle_enlightened_vmptrld()
2089 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_sync_vmcs12_to_shadow()
2101 struct vcpu_vmx *vmx = in vmx_preemption_timer_fn()
2102 container_of(timer, struct vcpu_vmx, nested.preemption_timer); in vmx_preemption_timer_fn()
2113 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_calc_preemption_timer_value()
2130 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_start_preemption_timer()
2152 static u64 nested_vmx_calc_efer(struct vcpu_vmx *vmx, struct vmcs12 *vmcs12) in nested_vmx_calc_efer()
2163 static void prepare_vmcs02_constant_state(struct vcpu_vmx *vmx) in prepare_vmcs02_constant_state()
2224 static void prepare_vmcs02_early_rare(struct vcpu_vmx *vmx, in prepare_vmcs02_early_rare()
2239 static void prepare_vmcs02_early(struct vcpu_vmx *vmx, struct loaded_vmcs *vmcs01, in prepare_vmcs02_early()
2404 static void prepare_vmcs02_rare(struct vcpu_vmx *vmx, struct vmcs12 *vmcs12) in prepare_vmcs02_rare()
2537 struct vcpu_vmx *vmx = to_vmx(vcpu); in prepare_vmcs02()
2689 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_vmx_check_eptp()
2738 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_check_vm_execution_controls()
2797 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_check_vm_exit_controls()
2814 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_check_vm_entry_controls()
2980 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_vmx_check_vmcs_link_ptr()
3088 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_vmx_check_vmentry_hw()
3166 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_get_evmcs_page()
3195 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_get_vmcs12_pages()
3308 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_vmx_write_pml_buffer()
3388 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_vmx_enter_non_root_mode()
3554 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_vmx_run()
3817 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_complete_nested_posted_interrupt()
4057 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_check_nested_events()
4257 struct vcpu_vmx *vmx = to_vmx(vcpu); in sync_vmcs02_to_vmcs12_rare()
4302 struct vcpu_vmx *vmx = to_vmx(vcpu); in copy_vmcs02_to_vmcs12_rare()
4330 struct vcpu_vmx *vmx = to_vmx(vcpu); in sync_vmcs02_to_vmcs12()
4590 static inline u64 nested_vmx_get_vmcs01_guest_efer(struct vcpu_vmx *vmx) in nested_vmx_get_vmcs01_guest_efer()
4616 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_vmx_restore_host_state()
4726 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_vmx_vmexit()
5071 struct vcpu_vmx *vmx = to_vmx(vcpu); in alloc_shadow_vmcs()
5094 struct vcpu_vmx *vmx = to_vmx(vcpu); in enter_vmx_operation()
5148 struct vcpu_vmx *vmx = to_vmx(vcpu); in handle_vmxon()
5236 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_release_vmcs12()
5278 struct vcpu_vmx *vmx = to_vmx(vcpu); in handle_vmclear()
5349 struct vcpu_vmx *vmx = to_vmx(vcpu); in handle_vmread()
5455 struct vcpu_vmx *vmx = to_vmx(vcpu); in handle_vmwrite()
5556 static void set_current_vmptr(struct vcpu_vmx *vmx, gpa_t vmptr) in set_current_vmptr()
5572 struct vcpu_vmx *vmx = to_vmx(vcpu); in handle_vmptrld()
5670 struct vcpu_vmx *vmx = to_vmx(vcpu); in handle_invept()
5750 struct vcpu_vmx *vmx = to_vmx(vcpu); in handle_invvpid()
5872 struct vcpu_vmx *vmx = to_vmx(vcpu); in handle_vmfunc()
6355 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_vmx_reflect_vmexit()
6409 struct vcpu_vmx *vmx; in vmx_get_nested_state()
6541 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_set_nested_state()