Lines Matching refs:svm

313 void recalc_intercepts(struct vcpu_svm *svm);
402 static inline void set_exception_intercept(struct vcpu_svm *svm, u32 bit) in set_exception_intercept() argument
404 struct vmcb *vmcb = svm->vmcb01.ptr; in set_exception_intercept()
409 recalc_intercepts(svm); in set_exception_intercept()
412 static inline void clr_exception_intercept(struct vcpu_svm *svm, u32 bit) in clr_exception_intercept() argument
414 struct vmcb *vmcb = svm->vmcb01.ptr; in clr_exception_intercept()
419 recalc_intercepts(svm); in clr_exception_intercept()
422 static inline void svm_set_intercept(struct vcpu_svm *svm, int bit) in svm_set_intercept() argument
424 struct vmcb *vmcb = svm->vmcb01.ptr; in svm_set_intercept()
428 recalc_intercepts(svm); in svm_set_intercept()
431 static inline void svm_clr_intercept(struct vcpu_svm *svm, int bit) in svm_clr_intercept() argument
433 struct vmcb *vmcb = svm->vmcb01.ptr; in svm_clr_intercept()
437 recalc_intercepts(svm); in svm_clr_intercept()
440 static inline bool svm_is_intercept(struct vcpu_svm *svm, int bit) in svm_is_intercept() argument
442 return vmcb_is_intercept(&svm->vmcb->control, bit); in svm_is_intercept()
445 static inline bool nested_vgif_enabled(struct vcpu_svm *svm) in nested_vgif_enabled() argument
447 return guest_can_use(&svm->vcpu, X86_FEATURE_VGIF) && in nested_vgif_enabled()
448 (svm->nested.ctl.int_ctl & V_GIF_ENABLE_MASK); in nested_vgif_enabled()
451 static inline struct vmcb *get_vgif_vmcb(struct vcpu_svm *svm) in get_vgif_vmcb() argument
456 if (is_guest_mode(&svm->vcpu) && !nested_vgif_enabled(svm)) in get_vgif_vmcb()
457 return svm->nested.vmcb02.ptr; in get_vgif_vmcb()
459 return svm->vmcb01.ptr; in get_vgif_vmcb()
462 static inline void enable_gif(struct vcpu_svm *svm) in enable_gif() argument
464 struct vmcb *vmcb = get_vgif_vmcb(svm); in enable_gif()
469 svm->guest_gif = true; in enable_gif()
472 static inline void disable_gif(struct vcpu_svm *svm) in disable_gif() argument
474 struct vmcb *vmcb = get_vgif_vmcb(svm); in disable_gif()
479 svm->guest_gif = false; in disable_gif()
482 static inline bool gif_set(struct vcpu_svm *svm) in gif_set() argument
484 struct vmcb *vmcb = get_vgif_vmcb(svm); in gif_set()
489 return svm->guest_gif; in gif_set()
492 static inline bool nested_npt_enabled(struct vcpu_svm *svm) in nested_npt_enabled() argument
494 return svm->nested.ctl.nested_ctl & SVM_NESTED_CTL_NP_ENABLE; in nested_npt_enabled()
497 static inline bool nested_vnmi_enabled(struct vcpu_svm *svm) in nested_vnmi_enabled() argument
499 return guest_can_use(&svm->vcpu, X86_FEATURE_VNMI) && in nested_vnmi_enabled()
500 (svm->nested.ctl.int_ctl & V_NMI_ENABLE_MASK); in nested_vnmi_enabled()
512 static inline struct vmcb *get_vnmi_vmcb_l1(struct vcpu_svm *svm) in get_vnmi_vmcb_l1() argument
517 if (is_guest_mode(&svm->vcpu)) in get_vnmi_vmcb_l1()
520 return svm->vmcb01.ptr; in get_vnmi_vmcb_l1()
523 static inline bool is_vnmi_enabled(struct vcpu_svm *svm) in is_vnmi_enabled() argument
525 struct vmcb *vmcb = get_vnmi_vmcb_l1(svm); in is_vnmi_enabled()
551 void disable_nmi_singlestep(struct vcpu_svm *svm);
555 void svm_set_gif(struct vcpu_svm *svm, bool value);
559 void svm_set_x2apic_msr_interception(struct vcpu_svm *svm, bool disable);
571 struct vcpu_svm *svm = to_svm(vcpu); in nested_svm_virtualize_tpr() local
573 return is_guest_mode(vcpu) && (svm->nested.ctl.int_ctl & V_INTR_MASKING_MASK); in nested_svm_virtualize_tpr()
576 static inline bool nested_exit_on_smi(struct vcpu_svm *svm) in nested_exit_on_smi() argument
578 return vmcb12_is_intercept(&svm->nested.ctl, INTERCEPT_SMI); in nested_exit_on_smi()
581 static inline bool nested_exit_on_intr(struct vcpu_svm *svm) in nested_exit_on_intr() argument
583 return vmcb12_is_intercept(&svm->nested.ctl, INTERCEPT_INTR); in nested_exit_on_intr()
586 static inline bool nested_exit_on_nmi(struct vcpu_svm *svm) in nested_exit_on_nmi() argument
588 return vmcb12_is_intercept(&svm->nested.ctl, INTERCEPT_NMI); in nested_exit_on_nmi()
594 void svm_free_nested(struct vcpu_svm *svm);
595 int svm_allocate_nested(struct vcpu_svm *svm);
600 int nested_svm_vmexit(struct vcpu_svm *svm);
602 static inline int nested_svm_simple_vmexit(struct vcpu_svm *svm, u32 exit_code) in nested_svm_simple_vmexit() argument
604 svm->vmcb->control.exit_code = exit_code; in nested_svm_simple_vmexit()
605 svm->vmcb->control.exit_info_1 = 0; in nested_svm_simple_vmexit()
606 svm->vmcb->control.exit_info_2 = 0; in nested_svm_simple_vmexit()
607 return nested_svm_vmexit(svm); in nested_svm_simple_vmexit()
610 int nested_svm_exit_handled(struct vcpu_svm *svm);
612 int nested_svm_check_exception(struct vcpu_svm *svm, unsigned nr,
614 int nested_svm_exit_special(struct vcpu_svm *svm);
617 void nested_copy_vmcb_control_to_cache(struct vcpu_svm *svm,
619 void nested_copy_vmcb_save_to_cache(struct vcpu_svm *svm,
621 void nested_sync_control_from_vmcb02(struct vcpu_svm *svm);
622 void nested_vmcb02_compute_g_pat(struct vcpu_svm *svm);
623 void svm_switch_vmcb(struct vcpu_svm *svm, struct kvm_vmcb_info *target_vmcb);
648 void avic_init_vmcb(struct vcpu_svm *svm, struct vmcb *vmcb);
651 int avic_init_vcpu(struct vcpu_svm *svm);
683 void pre_sev_run(struct vcpu_svm *svm, int cpu);
688 void sev_init_vmcb(struct vcpu_svm *svm);
689 void sev_vcpu_after_set_cpuid(struct vcpu_svm *svm);
692 int sev_es_string_io(struct vcpu_svm *svm, int size, unsigned int port, int in);
693 void sev_es_vcpu_reset(struct vcpu_svm *svm);
696 void sev_es_unmap_ghcb(struct vcpu_svm *svm);
700 void __svm_sev_es_vcpu_run(struct vcpu_svm *svm, bool spec_ctrl_intercepted);
701 void __svm_vcpu_run(struct vcpu_svm *svm, bool spec_ctrl_intercepted);
704 static __always_inline bool kvm_ghcb_##field##_is_valid(const struct vcpu_svm *svm) \
707 (unsigned long *)&svm->sev_es.valid_bitmap); \
710 …static __always_inline u64 kvm_ghcb_get_##field##_if_valid(struct vcpu_svm *svm, struct ghcb *ghcb…
712 return kvm_ghcb_##field##_is_valid(svm) ? ghcb->save.field : 0; \