Searched refs:hflags2 (Results 1 – 16 of 16) sorted by relevance
34 if (ferr_irq && !(env->hflags2 & HF2_IGNNE_MASK)) { in fpu_check_raise_ferr_irq()45 env->hflags2 &= ~HF2_IGNNE_MASK; in cpu_clear_ignne()54 env->hflags2 |= HF2_IGNNE_MASK; in cpu_set_ignne()
296 env->hflags2 |= HF2_NPT_MASK; in helper_vmrun()338 env->hflags2 |= HF2_VINTR_MASK; in helper_vmrun()403 env->hflags2 |= HF2_GIF_MASK; in helper_vmrun()410 env->hflags2 |= HF2_VGIF_MASK; in helper_vmrun()595 env->hflags2 |= HF2_VGIF_MASK; in helper_stgi()597 env->hflags2 |= HF2_GIF_MASK; in helper_stgi()607 env->hflags2 &= ~HF2_VGIF_MASK; in helper_clgi()609 env->hflags2 &= ~HF2_GIF_MASK; in helper_clgi()765 env->hflags2 &= ~HF2_NPT_MASK; in do_vmexit()911 env->hflags2 &= ~HF2_GIF_MASK; in do_vmexit()[all …]
48 if (env->hflags2 & HF2_NMI_MASK) { in do_smm_enter()49 env->hflags2 |= HF2_SMM_INSIDE_NMI_MASK; in do_smm_enter()51 env->hflags2 |= HF2_NMI_MASK; in do_smm_enter()311 if ((env->hflags2 & HF2_SMM_INSIDE_NMI_MASK) == 0) { in helper_rsm()312 env->hflags2 &= ~HF2_NMI_MASK; in helper_rsm()314 env->hflags2 &= ~HF2_SMM_INSIDE_NMI_MASK; in helper_rsm()
68 if (!(env->hflags2 & HF2_VINTR_MASK)) { in helper_read_cr8()111 if (!(env->hflags2 & HF2_VINTR_MASK)) { in helper_write_crN()
195 env->hflags2 |= HF2_NMI_MASK; in x86_cpu_exec_interrupt()
549 bool use_stage2 = env->hflags2 & HF2_NPT_MASK; in get_physical_address()
49 uint32_t hflags2 = env->hflags2; in cpu_sync_bndcs_hflags() local67 hflags2 |= HF2_MPX_PR_MASK; in cpu_sync_bndcs_hflags()69 hflags2 &= ~HF2_MPX_PR_MASK; in cpu_sync_bndcs_hflags()73 env->hflags2 = hflags2; in cpu_sync_bndcs_hflags()
1405 return !!(env->hflags2 & HF2_NPT_MASK); in svm_npt_needed()1705 VMSTATE_UINT32(env.hflags2, X86CPU),
7214 env->hflags2 |= HF2_GIF_MASK; in x86_cpu_reset_hold()7215 env->hflags2 |= HF2_VGIF_MASK; in x86_cpu_reset_hold()8283 if (env->hflags2 & HF2_GIF_MASK) { in x86_cpu_pending_interrupt()8288 !(env->hflags2 & HF2_NMI_MASK)) { in x86_cpu_pending_interrupt()8293 (((env->hflags2 & HF2_VINTR_MASK) && in x86_cpu_pending_interrupt()8294 (env->hflags2 & HF2_HIF_MASK)) || in x86_cpu_pending_interrupt()8295 (!(env->hflags2 & HF2_VINTR_MASK) && in x86_cpu_pending_interrupt()8300 } else if (env->hflags2 & HF2_VGIF_MASK) { in x86_cpu_pending_interrupt()
1752 uint32_t hflags2; /* various other flags, see HF2_xxx constants. */ member
201 env->hflags2 &= ~HF2_NMI_MASK; in vmx_clear_nmi_blocking()212 env->hflags2 |= HF2_NMI_MASK; in vmx_set_nmi_blocking()
382 env->hflags2 |= HF2_NMI_MASK; in hvf_store_events()384 env->hflags2 &= ~HF2_NMI_MASK; in hvf_store_events()
380 if (!(env->hflags2 & HF2_NMI_MASK) || intr_type != VMCS_INTR_T_NMI) { in hvf_inject_interrupts()399 if (!(env->hflags2 & HF2_NMI_MASK) && !(info & VMCS_INTR_VALID)) { in hvf_inject_interrupts()
135 if (!(env->hflags2 & HF2_MPX_PR_MASK)) { in helper_bnd_jmp()
1966 env->hflags2 &= ~HF2_NMI_MASK; in helper_iret_real()2246 env->hflags2 &= ~HF2_NMI_MASK; in helper_iret_protected()
5017 events.nmi.masked = !!(env->hflags2 & HF2_NMI_MASK); in kvm_put_vcpu_events()5024 events.smi.smm_inside_nmi = !!(env->hflags2 & HF2_SMM_INSIDE_NMI_MASK); in kvm_put_vcpu_events()5088 env->hflags2 |= HF2_NMI_MASK; in kvm_get_vcpu_events()5090 env->hflags2 &= ~HF2_NMI_MASK; in kvm_get_vcpu_events()5105 env->hflags2 |= HF2_SMM_INSIDE_NMI_MASK; in kvm_get_vcpu_events()5107 env->hflags2 &= ~HF2_SMM_INSIDE_NMI_MASK; in kvm_get_vcpu_events()5180 if (cpu_has_svm(env) && (env->hflags2 & HF2_GIF_MASK)) { in kvm_put_nested_state()5226 env->hflags2 |= HF2_GIF_MASK; in kvm_get_nested_state()5228 env->hflags2 &= ~HF2_GIF_MASK; in kvm_get_nested_state()