Home
last modified time | relevance | path

Searched refs:is_guest_mode (Results 1 – 19 of 19) sorted by relevance

/openbmc/linux/arch/x86/kvm/
H A Dhyperv.h182 bool is_guest_mode) in kvm_hv_get_tlb_flush_fifo() argument
185 int i = is_guest_mode ? HV_L2_TLB_FLUSH_FIFO : in kvm_hv_get_tlb_flush_fifo()
198 tlb_flush_fifo = kvm_hv_get_tlb_flush_fifo(vcpu, is_guest_mode(vcpu)); in kvm_hv_vcpu_purge_flush_tlb()
H A Dirq.c87 if (!is_guest_mode(v) && kvm_vcpu_apicv_active(v)) in kvm_cpu_has_injectable_intr()
H A Dkvm_cache_regs.h226 static inline bool is_guest_mode(struct kvm_vcpu *vcpu) in is_guest_mode() function
H A Dhyperv.c1924 tlb_flush_fifo = kvm_hv_get_tlb_flush_fifo(vcpu, is_guest_mode(vcpu)); in kvm_hv_vcpu_flush_tlb()
1987 if (!hc->fast && is_guest_mode(vcpu)) { in kvm_hv_flush_tlb()
2009 is_guest_mode(vcpu)); in kvm_hv_flush_tlb()
2040 flush_ex.flags, is_guest_mode(vcpu)); in kvm_hv_flush_tlb()
2084 if (all_cpus && !is_guest_mode(vcpu)) { in kvm_hv_flush_tlb()
2092 } else if (!is_guest_mode(vcpu)) { in kvm_hv_flush_tlb()
2334 if (hv_result_success(result) && is_guest_mode(vcpu) && in kvm_hv_hypercall_complete()
H A Dkvm_emulate.h223 bool (*is_guest_mode)(struct x86_emulate_ctxt *ctxt); member
H A Dx86.c662 if (!reinject && is_guest_mode(vcpu) && in kvm_multiple_exception()
698 if (!is_guest_mode(vcpu)) in kvm_multiple_exception()
787 if (is_guest_mode(vcpu) && fault->async_page_fault) in kvm_inject_page_fault()
2628 if (is_guest_mode(vcpu)) in kvm_vcpu_write_tsc_offset()
2644 if (is_guest_mode(vcpu)) in kvm_vcpu_write_tsc_multiplier()
8311 return is_guest_mode(emul_to_vcpu(ctxt)); in emulator_is_guest_mode()
8380 .is_guest_mode = emulator_is_guest_mode,
8575 if (!is_guest_mode(vcpu) && static_call(kvm_x86_get_cpl)(vcpu) == 0) { in handle_emulation_failure()
8592 if (WARN_ON_ONCE(is_guest_mode(vcpu)) || in reexecute_instruction()
8684 if (WARN_ON_ONCE(is_guest_mode(vcpu)) || in retry_instruction()
[all …]
H A Demulate.c5142 bool is_guest_mode = ctxt->ops->is_guest_mode(ctxt); in x86_emulate_insn() local
5190 if (unlikely(is_guest_mode) && ctxt->intercept) { in x86_emulate_insn()
5219 if (unlikely(is_guest_mode) && (ctxt->d & Intercept)) { in x86_emulate_insn()
5273 if (unlikely(is_guest_mode) && (ctxt->d & Intercept)) { in x86_emulate_insn()
H A Dlapic.c3319 if (is_guest_mode(vcpu)) { in kvm_apic_accept_events()
/openbmc/linux/arch/x86/kvm/vmx/
H A Dvmx.c913 if (is_guest_mode(vcpu)) in vmx_update_exception_bitmap()
1763 if (!is_guest_mode(vcpu)) in vmx_update_emulated_instruction()
2218 if (is_guest_mode(vcpu)) in vmx_set_msr()
2223 if (is_guest_mode(vcpu)) { in vmx_set_msr()
2230 if (is_guest_mode(vcpu)) { in vmx_set_msr()
2249 if (is_guest_mode(vcpu) && get_vmcs12(vcpu)->vm_exit_controls & in vmx_set_msr()
2268 if (is_guest_mode(vcpu) && in vmx_set_msr()
2325 if (is_guest_mode(vcpu) && in vmx_set_msr()
3089 WARN_ON_ONCE(is_guest_mode(vcpu)); in enter_rmode()
3198 if (is_guest_mode(vcpu) && nested_cpu_has_vpid(get_vmcs12(vcpu))) in vmx_get_current_vpid()
[all …]
H A Dnested.c3314 if (is_guest_mode(vcpu) && !nested_get_vmcs12_pages(vcpu)) in vmx_get_nested_state_pages()
3326 if (WARN_ON_ONCE(!is_guest_mode(vcpu))) in nested_vmx_write_pml_buffer()
5392 struct vmcs12 *vmcs12 = is_guest_mode(vcpu) ? get_shadow_vmcs12(vcpu) in handle_vmread()
5416 (is_guest_mode(vcpu) && in handle_vmread()
5424 if (!is_guest_mode(vcpu) && is_vmcs12_ext_field(field)) in handle_vmread()
5440 if (WARN_ON_ONCE(is_guest_mode(vcpu))) in handle_vmread()
5498 struct vmcs12 *vmcs12 = is_guest_mode(vcpu) ? get_shadow_vmcs12(vcpu) in handle_vmwrite()
5526 (is_guest_mode(vcpu) && in handle_vmwrite()
5560 if (!is_guest_mode(vcpu) && !is_shadow_field_rw(field)) in handle_vmwrite()
5582 if (!is_guest_mode(vcpu) && !is_shadow_field_rw(field)) { in handle_vmwrite()
[all …]
H A Dsgx.c503 if (!vmcs12 && is_guest_mode(vcpu)) in vmx_write_encls_bitmap()
H A Dvmx.h726 return enable_unrestricted_guest && (!is_guest_mode(vcpu) || in is_unrestricted_guest()
/openbmc/linux/arch/x86/kvm/svm/
H A Davic.c131 if (is_guest_mode(&svm->vcpu) && in avic_deactivate_vmcb()
544 if (is_guest_mode(vcpu)) in avic_vcpu_get_apicv_inhibit_reasons()
941 pi.is_guest_mode = true; in avic_pi_update_irte()
952 if (!ret && pi.is_guest_mode) in avic_pi_update_irte()
964 pi.is_guest_mode = false; in avic_pi_update_irte()
H A Dsvm.c829 msrpm = is_guest_mode(vcpu) ? to_svm(vcpu)->nested.msrpm: in msr_write_intercepted()
1026 if (is_guest_mode(vcpu)) in svm_enable_lbrv()
1046 if (is_guest_mode(vcpu)) in svm_disable_lbrv()
1066 (is_guest_mode(vcpu) && guest_can_use(vcpu, X86_FEATURE_LBRV) && in svm_update_lbrv()
1693 if (is_guest_mode(&svm->vcpu)) { in svm_clear_vintr()
2373 if (is_guest_mode(vcpu)) { in emulate_svm_instr()
2415 if (!is_guest_mode(vcpu)) in gp_interception()
2633 if (!is_guest_mode(vcpu) || in check_selective_cr0_intercepted()
3037 is_guest_mode(vcpu)) in svm_set_msr()
3047 if (is_guest_mode(vcp in svm_set_msr()
[all...]
H A Dsvm.h456 if (is_guest_mode(&svm->vcpu) && !nested_vgif_enabled(svm)) in get_vgif_vmcb()
517 if (is_guest_mode(&svm->vcpu)) in get_vnmi_vmcb_l1()
573 return is_guest_mode(vcpu) && (svm->nested.ctl.int_ctl & V_INTR_MASKING_MASK); in nested_svm_virtualize_tpr()
H A Dnested.c134 if (!is_guest_mode(&svm->vcpu)) in recalc_intercepts()
1235 if (is_guest_mode(vcpu)) { in svm_leave_nested()
1604 if (is_guest_mode(vcpu)) { in svm_get_nested_state()
1619 if (!is_guest_mode(vcpu)) in svm_get_nested_state()
1741 if (is_guest_mode(vcpu)) in svm_set_nested_state()
1786 if (WARN_ON(!is_guest_mode(vcpu))) in svm_get_nested_state_pages()
/openbmc/linux/include/linux/
H A Damd-iommu.h24 bool is_guest_mode; member
/openbmc/linux/drivers/iommu/amd/
H A Diommu.c3634 pi_data->is_guest_mode = false; in amd_ir_set_vcpu_affinity()
3638 if (pi_data->is_guest_mode) { in amd_ir_set_vcpu_affinity()
/openbmc/linux/arch/x86/kvm/mmu/
H A Dmmu.c4269 if (is_guest_mode(vcpu)) { in __kvm_faultin_pfn()
5123 role.base.guest_mode = is_guest_mode(vcpu); in kvm_calc_cpu_role()
5766 if (!mmio_info_in_cache(vcpu, cr2_or_gpa, direct) && !is_guest_mode(vcpu)) in kvm_mmu_page_fault()