Lines Matching refs:nested

183 	if (to_vmx(vcpu)->nested.hv_evmcs_vmptr != EVMPTR_INVALID)  in nested_vmx_failValid()
184 to_vmx(vcpu)->nested.need_vmcs12_to_shadow_sync = true; in nested_vmx_failValid()
197 if (vmx->nested.current_vmptr == INVALID_GPA && in nested_vmx_fail()
198 !evmptr_is_valid(vmx->nested.hv_evmcs_vmptr)) in nested_vmx_fail()
225 vmx->nested.need_vmcs12_to_shadow_sync = false; in vmx_disable_shadow_vmcs()
233 if (evmptr_is_valid(vmx->nested.hv_evmcs_vmptr)) { in nested_release_evmcs()
234 kvm_vcpu_unmap(vcpu, &vmx->nested.hv_evmcs_map, true); in nested_release_evmcs()
235 vmx->nested.hv_evmcs = NULL; in nested_release_evmcs()
238 vmx->nested.hv_evmcs_vmptr = EVMPTR_INVALID; in nested_release_evmcs()
241 hv_vcpu->nested.pa_page_gpa = INVALID_GPA; in nested_release_evmcs()
242 hv_vcpu->nested.vm_id = 0; in nested_release_evmcs()
243 hv_vcpu->nested.vp_id = 0; in nested_release_evmcs()
302 if (!vmx->nested.vmxon && !vmx->nested.smm.vmxon) in free_nested()
307 vmx->nested.vmxon = false; in free_nested()
308 vmx->nested.smm.vmxon = false; in free_nested()
309 vmx->nested.vmxon_ptr = INVALID_GPA; in free_nested()
310 free_vpid(vmx->nested.vpid02); in free_nested()
311 vmx->nested.posted_intr_nv = -1; in free_nested()
312 vmx->nested.current_vmptr = INVALID_GPA; in free_nested()
319 kfree(vmx->nested.cached_vmcs12); in free_nested()
320 vmx->nested.cached_vmcs12 = NULL; in free_nested()
321 kfree(vmx->nested.cached_shadow_vmcs12); in free_nested()
322 vmx->nested.cached_shadow_vmcs12 = NULL; in free_nested()
328 kvm_vcpu_unmap(vcpu, &vmx->nested.apic_access_page_map, false); in free_nested()
329 kvm_vcpu_unmap(vcpu, &vmx->nested.virtual_apic_map, true); in free_nested()
330 kvm_vcpu_unmap(vcpu, &vmx->nested.pi_desc_map, true); in free_nested()
331 vmx->nested.pi_desc = NULL; in free_nested()
337 free_loaded_vmcs(&vmx->nested.vmcs02); in free_nested()
387 if (vmx->nested.pml_full) { in nested_ept_inject_page_fault()
389 vmx->nested.pml_full = false; in nested_ept_inject_page_fault()
415 bool execonly = vmx->nested.msrs.ept_caps & VMX_EPT_EXECUTE_ONLY_BIT; in nested_ept_new_eptp()
416 int ept_lpage_level = ept_caps_to_lpage_level(vmx->nested.msrs.ept_caps); in nested_ept_new_eptp()
575 unsigned long *msr_bitmap_l0 = vmx->nested.vmcs02.msr_bitmap; in nested_vmx_prepare_msr_bitmap()
576 struct hv_enlightened_vmcs *evmcs = vmx->nested.hv_evmcs; in nested_vmx_prepare_msr_bitmap()
577 struct kvm_host_map *map = &vmx->nested.msr_bitmap_map; in nested_vmx_prepare_msr_bitmap()
592 if (!vmx->nested.force_msr_bitmap_recalc && evmcs && in nested_vmx_prepare_msr_bitmap()
664 kvm_vcpu_unmap(vcpu, &vmx->nested.msr_bitmap_map, false); in nested_vmx_prepare_msr_bitmap()
666 vmx->nested.force_msr_bitmap_recalc = false; in nested_vmx_prepare_msr_bitmap()
675 struct gfn_to_hva_cache *ghc = &vmx->nested.shadow_vmcs12_cache; in nested_cache_shadow_vmcs12()
694 struct gfn_to_hva_cache *ghc = &vmx->nested.shadow_vmcs12_cache; in nested_flush_cached_shadow_vmcs12()
893 u64 vmx_misc = vmx_control_msr(vmx->nested.msrs.misc_low, in nested_vmx_max_atomic_switch_msrs()
894 vmx->nested.msrs.misc_high); in nested_vmx_max_atomic_switch_msrs()
1134 (nested_cpu_has_vpid(vmcs12) && to_vmx(vcpu)->nested.vpid02); in nested_has_guest_tlb_tag()
1181 if (is_vmenter && vmcs12->virtual_processor_id != vmx->nested.last_vpid) { in nested_vmx_transition_tlb_flush()
1182 vmx->nested.last_vpid = vmcs12->virtual_processor_id; in nested_vmx_transition_tlb_flush()
1212 u64 vmx_basic = vmcs_config.nested.basic; in vmx_restore_vmx_basic()
1231 vmx->nested.msrs.basic = data; in vmx_restore_vmx_basic()
1270 vmx_get_control_msr(&vmcs_config.nested, msr_index, &lowp, &highp); in vmx_restore_control_msr()
1282 vmx_get_control_msr(&vmx->nested.msrs, msr_index, &lowp, &highp); in vmx_restore_control_msr()
1296 u64 vmx_misc = vmx_control_msr(vmcs_config.nested.misc_low, in vmx_restore_vmx_misc()
1297 vmcs_config.nested.misc_high); in vmx_restore_vmx_misc()
1302 if ((vmx->nested.msrs.pinbased_ctls_high & in vmx_restore_vmx_misc()
1317 vmx->nested.msrs.misc_low = data; in vmx_restore_vmx_misc()
1318 vmx->nested.msrs.misc_high = data >> 32; in vmx_restore_vmx_misc()
1325 u64 vmx_ept_vpid_cap = vmx_control_msr(vmcs_config.nested.ept_caps, in vmx_restore_vmx_ept_vpid_cap()
1326 vmcs_config.nested.vpid_caps); in vmx_restore_vmx_ept_vpid_cap()
1332 vmx->nested.msrs.ept_caps = data; in vmx_restore_vmx_ept_vpid_cap()
1333 vmx->nested.msrs.vpid_caps = data >> 32; in vmx_restore_vmx_ept_vpid_cap()
1351 const u64 *msr = vmx_get_fixed0_msr(&vmcs_config.nested, msr_index); in vmx_restore_fixed0_msr()
1360 *vmx_get_fixed0_msr(&vmx->nested.msrs, msr_index) = data; in vmx_restore_fixed0_msr()
1377 if (vmx->nested.vmxon) in vmx_set_vmx_msr()
1418 vmx->nested.msrs.vmcs_enum = data; in vmx_set_vmx_msr()
1421 if (data & ~vmcs_config.nested.vmfunc_controls) in vmx_set_vmx_msr()
1423 vmx->nested.msrs.vmfunc_controls = data; in vmx_set_vmx_msr()
1582 struct vmcs12 *vmcs12 = vmx->nested.cached_vmcs12; in copy_enlightened_to_vmcs12()
1583 struct hv_enlightened_vmcs *evmcs = vmx->nested.hv_evmcs; in copy_enlightened_to_vmcs12()
1592 hv_vcpu->nested.pa_page_gpa = evmcs->partition_assist_page; in copy_enlightened_to_vmcs12()
1593 hv_vcpu->nested.vm_id = evmcs->hv_vm_id; in copy_enlightened_to_vmcs12()
1594 hv_vcpu->nested.vp_id = evmcs->hv_vp_id; in copy_enlightened_to_vmcs12()
1826 struct vmcs12 *vmcs12 = vmx->nested.cached_vmcs12; in copy_vmcs12_to_enlightened()
1827 struct hv_enlightened_vmcs *evmcs = vmx->nested.hv_evmcs; in copy_vmcs12_to_enlightened()
2018 if (unlikely(evmcs_gpa != vmx->nested.hv_evmcs_vmptr)) { in nested_vmx_handle_enlightened_vmptrld()
2019 vmx->nested.current_vmptr = INVALID_GPA; in nested_vmx_handle_enlightened_vmptrld()
2024 &vmx->nested.hv_evmcs_map)) in nested_vmx_handle_enlightened_vmptrld()
2027 vmx->nested.hv_evmcs = vmx->nested.hv_evmcs_map.hva; in nested_vmx_handle_enlightened_vmptrld()
2051 if ((vmx->nested.hv_evmcs->revision_id != KVM_EVMCS_VERSION) && in nested_vmx_handle_enlightened_vmptrld()
2052 (vmx->nested.hv_evmcs->revision_id != VMCS12_REVISION)) { in nested_vmx_handle_enlightened_vmptrld()
2057 vmx->nested.hv_evmcs_vmptr = evmcs_gpa; in nested_vmx_handle_enlightened_vmptrld()
2079 vmx->nested.hv_evmcs->hv_clean_fields &= in nested_vmx_handle_enlightened_vmptrld()
2082 vmx->nested.force_msr_bitmap_recalc = true; in nested_vmx_handle_enlightened_vmptrld()
2092 if (evmptr_is_valid(vmx->nested.hv_evmcs_vmptr)) in nested_sync_vmcs12_to_shadow()
2097 vmx->nested.need_vmcs12_to_shadow_sync = false; in nested_sync_vmcs12_to_shadow()
2103 container_of(timer, struct vcpu_vmx, nested.preemption_timer); in vmx_preemption_timer_fn()
2105 vmx->nested.preemption_timer_expired = true; in vmx_preemption_timer_fn()
2120 if (!vmx->nested.has_preemption_timer_deadline) { in vmx_calc_preemption_timer_value()
2121 vmx->nested.preemption_timer_deadline = in vmx_calc_preemption_timer_value()
2123 vmx->nested.has_preemption_timer_deadline = true; in vmx_calc_preemption_timer_value()
2125 return vmx->nested.preemption_timer_deadline - l1_scaled_tsc; in vmx_calc_preemption_timer_value()
2138 vmx_preemption_timer_fn(&vmx->nested.preemption_timer); in vmx_start_preemption_timer()
2148 hrtimer_start(&vmx->nested.preemption_timer, in vmx_start_preemption_timer()
2155 if (vmx->nested.nested_run_pending && in nested_vmx_calc_efer()
2174 if (vmx->nested.vmcs02_initialized) in prepare_vmcs02_constant_state()
2176 vmx->nested.vmcs02_initialized = true; in prepare_vmcs02_constant_state()
2195 vmcs_write64(MSR_BITMAP, __pa(vmx->nested.vmcs02.msr_bitmap)); in prepare_vmcs02_constant_state()
2233 if (nested_cpu_has_vpid(vmcs12) && vmx->nested.vpid02) in prepare_vmcs02_early_rare()
2234 vmcs_write16(VIRTUAL_PROCESSOR_ID, vmx->nested.vpid02); in prepare_vmcs02_early_rare()
2246 if (vmx->nested.dirty_vmcs12 || evmptr_is_valid(vmx->nested.hv_evmcs_vmptr)) in prepare_vmcs02_early()
2257 vmx->nested.pi_pending = false; in prepare_vmcs02_early()
2259 vmx->nested.posted_intr_nv = vmcs12->posted_intr_nv; in prepare_vmcs02_early()
2273 vmx->nested.l1_tpr_threshold = -1; in prepare_vmcs02_early()
2389 if (vmx->nested.nested_run_pending) { in prepare_vmcs02_early()
2407 struct hv_enlightened_vmcs *hv_evmcs = vmx->nested.hv_evmcs; in prepare_vmcs02_rare()
2470 if (kvm_mpx_supported() && vmx->nested.nested_run_pending && in prepare_vmcs02_rare()
2541 if (vmx->nested.dirty_vmcs12 || evmptr_is_valid(vmx->nested.hv_evmcs_vmptr)) { in prepare_vmcs02()
2543 vmx->nested.dirty_vmcs12 = false; in prepare_vmcs02()
2545 load_guest_pdptrs_vmcs12 = !evmptr_is_valid(vmx->nested.hv_evmcs_vmptr) || in prepare_vmcs02()
2546 !(vmx->nested.hv_evmcs->hv_clean_fields & in prepare_vmcs02()
2550 if (vmx->nested.nested_run_pending && in prepare_vmcs02()
2556 vmcs_write64(GUEST_IA32_DEBUGCTL, vmx->nested.pre_vmenter_debugctl); in prepare_vmcs02()
2558 if (kvm_mpx_supported() && (!vmx->nested.nested_run_pending || in prepare_vmcs02()
2560 vmcs_write64(GUEST_BNDCFGS, vmx->nested.pre_vmenter_bndcfgs); in prepare_vmcs02()
2571 if (vmx->nested.nested_run_pending && in prepare_vmcs02()
2668 if (evmptr_is_valid(vmx->nested.hv_evmcs_vmptr)) in prepare_vmcs02()
2669 vmx->nested.hv_evmcs->hv_clean_fields |= in prepare_vmcs02()
2695 if (CC(!(vmx->nested.msrs.ept_caps & VMX_EPTP_UC_BIT))) in nested_vmx_check_eptp()
2699 if (CC(!(vmx->nested.msrs.ept_caps & VMX_EPTP_WB_BIT))) in nested_vmx_check_eptp()
2709 if (CC(!(vmx->nested.msrs.ept_caps & VMX_EPT_PAGE_WALK_5_BIT))) in nested_vmx_check_eptp()
2713 if (CC(!(vmx->nested.msrs.ept_caps & VMX_EPT_PAGE_WALK_4_BIT))) in nested_vmx_check_eptp()
2726 if (CC(!(vmx->nested.msrs.ept_caps & VMX_EPT_AD_BIT))) in nested_vmx_check_eptp()
2742 vmx->nested.msrs.pinbased_ctls_low, in nested_check_vm_execution_controls()
2743 vmx->nested.msrs.pinbased_ctls_high)) || in nested_check_vm_execution_controls()
2745 vmx->nested.msrs.procbased_ctls_low, in nested_check_vm_execution_controls()
2746 vmx->nested.msrs.procbased_ctls_high))) in nested_check_vm_execution_controls()
2751 vmx->nested.msrs.secondary_ctls_low, in nested_check_vm_execution_controls()
2752 vmx->nested.msrs.secondary_ctls_high))) in nested_check_vm_execution_controls()
2779 ~vmx->nested.msrs.vmfunc_controls)) in nested_check_vm_execution_controls()
2801 vmx->nested.msrs.exit_ctls_low, in nested_check_vm_exit_controls()
2802 vmx->nested.msrs.exit_ctls_high)) || in nested_check_vm_exit_controls()
2818 vmx->nested.msrs.entry_ctls_low, in nested_check_vm_entry_controls()
2819 vmx->nested.msrs.entry_ctls_high))) in nested_check_vm_entry_controls()
2982 struct gfn_to_hva_cache *ghc = &vmx->nested.shadow_vmcs12_cache; in nested_vmx_check_vmcs_link_ptr()
3067 if (to_vmx(vcpu)->nested.nested_run_pending && in nested_vmx_check_guest_state()
3175 vmx->nested.hv_evmcs_vmptr == EVMPTR_MAP_PENDING) { in nested_get_evmcs_page()
3187 vmx->nested.need_vmcs12_to_shadow_sync = true; in nested_get_evmcs_page()
3212 map = &vmx->nested.apic_access_page_map; in nested_get_vmcs12_pages()
3228 map = &vmx->nested.virtual_apic_map; in nested_get_vmcs12_pages()
3254 map = &vmx->nested.pi_desc_map; in nested_get_vmcs12_pages()
3257 vmx->nested.pi_desc = in nested_get_vmcs12_pages()
3269 vmx->nested.pi_desc = NULL; in nested_get_vmcs12_pages()
3315 if (WARN_ON_ONCE(vmx->nested.pml_full)) in nested_vmx_write_pml_buffer()
3327 vmx->nested.pml_full = true; in nested_vmx_write_pml_buffer()
3352 if (!to_vmx(vcpu)->nested.vmxon) { in nested_vmx_check_permission()
3400 vmx->nested.current_vmptr, in nested_vmx_enter_non_root_mode()
3418 if (!vmx->nested.nested_run_pending || in nested_vmx_enter_non_root_mode()
3420 vmx->nested.pre_vmenter_debugctl = vmcs_read64(GUEST_IA32_DEBUGCTL); in nested_vmx_enter_non_root_mode()
3422 (!vmx->nested.nested_run_pending || in nested_vmx_enter_non_root_mode()
3424 vmx->nested.pre_vmenter_bndcfgs = vmcs_read64(GUEST_BNDCFGS); in nested_vmx_enter_non_root_mode()
3445 vmx_switch_vmcs(vcpu, &vmx->nested.vmcs02); in nested_vmx_enter_non_root_mode()
3510 vmx->nested.preemption_timer_expired = false; in nested_vmx_enter_non_root_mode()
3542 if (enable_shadow_vmcs || evmptr_is_valid(vmx->nested.hv_evmcs_vmptr)) in nested_vmx_enter_non_root_mode()
3543 vmx->nested.need_vmcs12_to_shadow_sync = true; in nested_vmx_enter_non_root_mode()
3573 if (CC(!evmptr_is_valid(vmx->nested.hv_evmcs_vmptr) && in nested_vmx_run()
3574 vmx->nested.current_vmptr == INVALID_GPA)) in nested_vmx_run()
3588 if (evmptr_is_valid(vmx->nested.hv_evmcs_vmptr)) { in nested_vmx_run()
3589 copy_enlightened_to_vmcs12(vmx, vmx->nested.hv_evmcs->hv_clean_fields); in nested_vmx_run()
3627 vmx->nested.nested_run_pending = 1; in nested_vmx_run()
3628 vmx->nested.has_preemption_timer_deadline = false; in nested_vmx_run()
3635 kvm_apic_has_interrupt(vcpu) == vmx->nested.posted_intr_nv) { in nested_vmx_run()
3636 vmx->nested.pi_pending = true; in nested_vmx_run()
3638 kvm_apic_clear_irr(vcpu, vmx->nested.posted_intr_nv); in nested_vmx_run()
3667 vmx->nested.nested_run_pending = 0; in nested_vmx_run()
3672 vmx->nested.nested_run_pending = 0; in nested_vmx_run()
3682 vmx->nested.nested_run_pending = 0; in nested_vmx_run()
3823 if (!vmx->nested.pi_pending) in vmx_complete_nested_posted_interrupt()
3826 if (!vmx->nested.pi_desc) in vmx_complete_nested_posted_interrupt()
3829 vmx->nested.pi_pending = false; in vmx_complete_nested_posted_interrupt()
3831 if (!pi_test_and_clear_on(vmx->nested.pi_desc)) in vmx_complete_nested_posted_interrupt()
3834 max_irr = pi_find_highest_vector(vmx->nested.pi_desc); in vmx_complete_nested_posted_interrupt()
3836 vapic_page = vmx->nested.virtual_apic_map.hva; in vmx_complete_nested_posted_interrupt()
3840 __kvm_apic_update_irr(vmx->nested.pi_desc->pir, in vmx_complete_nested_posted_interrupt()
3963 to_vmx(vcpu)->nested.preemption_timer_expired; in nested_vmx_preemption_timer_pending()
3969 void *vapic = vmx->nested.virtual_apic_map.hva; in vmx_has_nested_events()
3973 vmx->nested.mtf_pending) in vmx_has_nested_events()
3994 if (vmx->nested.pi_pending && vmx->nested.pi_desc && in vmx_has_nested_events()
3995 pi_test_on(vmx->nested.pi_desc)) { in vmx_has_nested_events()
3996 max_irr = pi_find_highest_vector(vmx->nested.pi_desc); in vmx_has_nested_events()
4096 bool block_nested_exceptions = vmx->nested.nested_run_pending; in vmx_check_nested_events()
4116 vmx->nested.mtf_pending = false; in vmx_check_nested_events()
4160 if (vmx->nested.mtf_pending) { in vmx_check_nested_events()
4229 hrtimer_get_remaining(&to_vmx(vcpu)->nested.preemption_timer); in vmx_get_preemption_timer_value()
4329 vmx->nested.need_sync_vmcs02_to_vmcs12_rare = false; in sync_vmcs02_to_vmcs12_rare()
4338 if (!vmx->nested.need_sync_vmcs02_to_vmcs12_rare) in copy_vmcs02_to_vmcs12_rare()
4345 vmx->loaded_vmcs = &vmx->nested.vmcs02; in copy_vmcs02_to_vmcs12_rare()
4351 vmx_vcpu_load_vmcs(vcpu, cpu, &vmx->nested.vmcs02); in copy_vmcs02_to_vmcs12_rare()
4365 if (evmptr_is_valid(vmx->nested.hv_evmcs_vmptr)) in sync_vmcs02_to_vmcs12()
4368 vmx->nested.need_sync_vmcs02_to_vmcs12_rare = in sync_vmcs02_to_vmcs12()
4369 !evmptr_is_valid(vmx->nested.hv_evmcs_vmptr); in sync_vmcs02_to_vmcs12()
4393 !vmx->nested.nested_run_pending) in sync_vmcs02_to_vmcs12()
4763 vmx->nested.mtf_pending = false; in nested_vmx_vmexit()
4766 WARN_ON_ONCE(vmx->nested.nested_run_pending); in nested_vmx_vmexit()
4792 hrtimer_cancel(&to_vmx(vcpu)->nested.preemption_timer); in nested_vmx_vmexit()
4860 if (vmx->nested.l1_tpr_threshold != -1) in nested_vmx_vmexit()
4861 vmcs_write32(TPR_THRESHOLD, vmx->nested.l1_tpr_threshold); in nested_vmx_vmexit()
4863 if (vmx->nested.change_vmcs01_virtual_apic_mode) { in nested_vmx_vmexit()
4864 vmx->nested.change_vmcs01_virtual_apic_mode = false; in nested_vmx_vmexit()
4868 if (vmx->nested.update_vmcs01_cpu_dirty_logging) { in nested_vmx_vmexit()
4869 vmx->nested.update_vmcs01_cpu_dirty_logging = false; in nested_vmx_vmexit()
4874 kvm_vcpu_unmap(vcpu, &vmx->nested.apic_access_page_map, false); in nested_vmx_vmexit()
4875 kvm_vcpu_unmap(vcpu, &vmx->nested.virtual_apic_map, true); in nested_vmx_vmexit()
4876 kvm_vcpu_unmap(vcpu, &vmx->nested.pi_desc_map, true); in nested_vmx_vmexit()
4877 vmx->nested.pi_desc = NULL; in nested_vmx_vmexit()
4879 if (vmx->nested.reload_vmcs01_apic_access_page) { in nested_vmx_vmexit()
4880 vmx->nested.reload_vmcs01_apic_access_page = false; in nested_vmx_vmexit()
4884 if (vmx->nested.update_vmcs01_apicv_status) { in nested_vmx_vmexit()
4885 vmx->nested.update_vmcs01_apicv_status = false; in nested_vmx_vmexit()
4890 (enable_shadow_vmcs || evmptr_is_valid(vmx->nested.hv_evmcs_vmptr))) in nested_vmx_vmexit()
4891 vmx->nested.need_vmcs12_to_shadow_sync = true; in nested_vmx_vmexit()
5130 r = alloc_loaded_vmcs(&vmx->nested.vmcs02); in enter_vmx_operation()
5134 vmx->nested.cached_vmcs12 = kzalloc(VMCS12_SIZE, GFP_KERNEL_ACCOUNT); in enter_vmx_operation()
5135 if (!vmx->nested.cached_vmcs12) in enter_vmx_operation()
5138 vmx->nested.shadow_vmcs12_cache.gpa = INVALID_GPA; in enter_vmx_operation()
5139 vmx->nested.cached_shadow_vmcs12 = kzalloc(VMCS12_SIZE, GFP_KERNEL_ACCOUNT); in enter_vmx_operation()
5140 if (!vmx->nested.cached_shadow_vmcs12) in enter_vmx_operation()
5146 hrtimer_init(&vmx->nested.preemption_timer, CLOCK_MONOTONIC, in enter_vmx_operation()
5148 vmx->nested.preemption_timer.function = vmx_preemption_timer_fn; in enter_vmx_operation()
5150 vmx->nested.vpid02 = allocate_vpid(); in enter_vmx_operation()
5152 vmx->nested.vmcs02_initialized = false; in enter_vmx_operation()
5153 vmx->nested.vmxon = true; in enter_vmx_operation()
5163 kfree(vmx->nested.cached_shadow_vmcs12); in enter_vmx_operation()
5166 kfree(vmx->nested.cached_vmcs12); in enter_vmx_operation()
5169 free_loaded_vmcs(&vmx->nested.vmcs02); in enter_vmx_operation()
5221 if (vmx->nested.vmxon) in handle_vmxon()
5259 vmx->nested.vmxon_ptr = vmptr; in handle_vmxon()
5271 if (vmx->nested.current_vmptr == INVALID_GPA) in nested_release_vmcs12()
5282 vmx->nested.posted_intr_nv = -1; in nested_release_vmcs12()
5286 vmx->nested.current_vmptr >> PAGE_SHIFT, in nested_release_vmcs12()
5287 vmx->nested.cached_vmcs12, 0, VMCS12_SIZE); in nested_release_vmcs12()
5291 vmx->nested.current_vmptr = INVALID_GPA; in nested_release_vmcs12()
5325 if (vmptr == vmx->nested.vmxon_ptr) in handle_vmclear()
5340 if (vmptr == vmx->nested.current_vmptr) in handle_vmclear()
5356 } else if (vmx->nested.hv_evmcs && vmptr == vmx->nested.hv_evmcs_vmptr) { in handle_vmclear()
5396 if (!evmptr_is_valid(vmx->nested.hv_evmcs_vmptr)) { in handle_vmread()
5401 if (vmx->nested.current_vmptr == INVALID_GPA || in handle_vmread()
5434 value = evmcs_read_any(vmx->nested.hv_evmcs, field, offset); in handle_vmread()
5511 if (vmx->nested.current_vmptr == INVALID_GPA || in handle_vmwrite()
5583 vmx->nested.dirty_vmcs12 = true; in handle_vmwrite()
5591 vmx->nested.current_vmptr = vmptr; in set_current_vmptr()
5596 vmx->nested.need_vmcs12_to_shadow_sync = true; in set_current_vmptr()
5598 vmx->nested.dirty_vmcs12 = true; in set_current_vmptr()
5599 vmx->nested.force_msr_bitmap_recalc = true; in set_current_vmptr()
5618 if (vmptr == vmx->nested.vmxon_ptr) in handle_vmptrld()
5622 if (evmptr_is_valid(vmx->nested.hv_evmcs_vmptr)) in handle_vmptrld()
5625 if (vmx->nested.current_vmptr != vmptr) { in handle_vmptrld()
5626 struct gfn_to_hva_cache *ghc = &vmx->nested.vmcs12_cache; in handle_vmptrld()
5660 if (kvm_read_guest_cached(vcpu->kvm, ghc, vmx->nested.cached_vmcs12, in handle_vmptrld()
5677 gpa_t current_vmptr = to_vmx(vcpu)->nested.current_vmptr; in handle_vmptrst()
5685 if (unlikely(evmptr_is_valid(to_vmx(vcpu)->nested.hv_evmcs_vmptr))) in handle_vmptrst()
5714 if (!(vmx->nested.msrs.secondary_ctls_high & in handle_invept()
5716 !(vmx->nested.msrs.ept_caps & VMX_EPT_INVEPT_BIT)) { in handle_invept()
5728 types = (vmx->nested.msrs.ept_caps >> VMX_EPT_EXTENT_SHIFT) & 6; in handle_invept()
5795 if (!(vmx->nested.msrs.secondary_ctls_high & in handle_invvpid()
5797 !(vmx->nested.msrs.vpid_caps & VMX_VPID_INVVPID_BIT)) { in handle_invvpid()
5809 types = (vmx->nested.msrs.vpid_caps & in handle_invvpid()
6393 WARN_ON_ONCE(vmx->nested.nested_run_pending); in nested_vmx_reflect_vmexit()
6463 (vmx->nested.vmxon || vmx->nested.smm.vmxon)) { in vmx_get_nested_state()
6464 kvm_state.hdr.vmx.vmxon_pa = vmx->nested.vmxon_ptr; in vmx_get_nested_state()
6465 kvm_state.hdr.vmx.vmcs12_pa = vmx->nested.current_vmptr; in vmx_get_nested_state()
6471 if (vmx->nested.hv_evmcs_vmptr != EVMPTR_INVALID) in vmx_get_nested_state()
6480 if (vmx->nested.smm.vmxon) in vmx_get_nested_state()
6483 if (vmx->nested.smm.guest_mode) in vmx_get_nested_state()
6489 if (vmx->nested.nested_run_pending) in vmx_get_nested_state()
6492 if (vmx->nested.mtf_pending) in vmx_get_nested_state()
6496 vmx->nested.has_preemption_timer_deadline) { in vmx_get_nested_state()
6500 vmx->nested.preemption_timer_deadline; in vmx_get_nested_state()
6526 if (!vmx->nested.need_vmcs12_to_shadow_sync) { in vmx_get_nested_state()
6527 if (evmptr_is_valid(vmx->nested.hv_evmcs_vmptr)) in vmx_get_nested_state()
6564 to_vmx(vcpu)->nested.nested_run_pending = 0; in vmx_leave_nested()
6638 !vmx->nested.enlightened_vmcs_enabled)) in vmx_set_nested_state()
6646 vmx->nested.vmxon_ptr = kvm_state->hdr.vmx.vmxon_pa; in vmx_set_nested_state()
6675 vmx->nested.hv_evmcs_vmptr = EVMPTR_MAP_PENDING; in vmx_set_nested_state()
6682 vmx->nested.smm.vmxon = true; in vmx_set_nested_state()
6683 vmx->nested.vmxon = false; in vmx_set_nested_state()
6686 vmx->nested.smm.guest_mode = true; in vmx_set_nested_state()
6699 vmx->nested.nested_run_pending = in vmx_set_nested_state()
6702 vmx->nested.mtf_pending = in vmx_set_nested_state()
6727 vmx->nested.has_preemption_timer_deadline = false; in vmx_set_nested_state()
6729 vmx->nested.has_preemption_timer_deadline = true; in vmx_set_nested_state()
6730 vmx->nested.preemption_timer_deadline = in vmx_set_nested_state()
6739 vmx->nested.dirty_vmcs12 = true; in vmx_set_nested_state()
6740 vmx->nested.force_msr_bitmap_recalc = true; in vmx_set_nested_state()
6745 if (vmx->nested.mtf_pending) in vmx_set_nested_state()
6751 vmx->nested.nested_run_pending = 0; in vmx_set_nested_state()
7038 struct nested_vmx_msrs *msrs = &vmcs_conf->nested; in nested_vmx_setup_ctls_msrs()