Searched refs:static_call_cond (Results 1 – 8 of 8) sorted by relevance
209 #define static_call_cond(name) (void)__static_call(name) macro251 #define static_call_cond(name) (void)__static_call(name) macro328 #define static_call_cond(name) (void)__static_call_cond(name) macro
158 static_call_cond(kvm_x86_migrate_timers)(vcpu); in __kvm_migrate_timers()
728 static_call_cond(kvm_x86_hwapic_irr_update)(apic->vcpu, in apic_clear_irr()755 static_call_cond(kvm_x86_hwapic_isr_update)(vec); in apic_set_isr()800 static_call_cond(kvm_x86_hwapic_isr_update)(apic_find_highest_isr(apic)); in apic_clear_isr()2589 static_call_cond(kvm_x86_set_virtual_apic_mode)(vcpu); in kvm_lapic_set_base()2699 static_call_cond(kvm_x86_apicv_pre_state_restore)(vcpu); in kvm_lapic_reset()2754 static_call_cond(kvm_x86_apicv_post_state_restore)(vcpu); in kvm_lapic_reset()2755 static_call_cond(kvm_x86_hwapic_irr_update)(vcpu, -1); in kvm_lapic_reset()2756 static_call_cond(kvm_x86_hwapic_isr_update)(-1); in kvm_lapic_reset()3013 static_call_cond(kvm_x86_apicv_pre_state_restore)(vcpu); in kvm_apic_set_state()3040 static_call_cond(kvm_x86_apicv_post_state_restore)(vcpu); in kvm_apic_set_state()[all …]
548 static_call_cond(kvm_x86_pmu_deliver_pmi)(vcpu); in kvm_pmu_deliver_pmi()684 static_call_cond(kvm_x86_pmu_reset)(vcpu); in kvm_pmu_reset()766 static_call_cond(kvm_x86_pmu_cleanup)(vcpu); in kvm_pmu_cleanup()
4936 static_call_cond(kvm_x86_sync_pir_to_irr)(vcpu); in kvm_vcpu_ioctl_get_lapic()9060 static_call_cond(kvm_x86_update_emulated_instruction)(vcpu); in x86_emulate_instruction()10461 static_call_cond(kvm_x86_sync_pir_to_irr)(vcpu); in vcpu_scan_ioapic()10489 static_call_cond(kvm_x86_load_eoi_exitmap)( in vcpu_load_eoi_exitmap()10495 static_call_cond(kvm_x86_guest_memory_reclaimed)(kvm); in kvm_arch_guest_memory_reclaimed()10503 static_call_cond(kvm_x86_set_apic_access_page_addr)(vcpu); in kvm_vcpu_reload_apic_access_page()10745 static_call_cond(kvm_x86_sync_pir_to_irr)(vcpu); in vcpu_enter_guest()10796 static_call_cond(kvm_x86_sync_pir_to_irr)(vcpu); in vcpu_enter_guest()11543 static_call_cond(kvm_x86_post_set_cr3)(vcpu, sregs->cr3); in __set_sregs_common()12516 static_call_cond(kvm_x86_vm_destroy)(kvm); in kvm_arch_destroy_vm()[all …]
998 static_call_cond(x86_pmu_start_scheduling)(cpuc); in x86_schedule_events()1105 static_call_cond(x86_pmu_put_event_constraints)(cpuc, e); in x86_schedule_events()1111 static_call_cond(x86_pmu_stop_scheduling)(cpuc); in x86_schedule_events()1224 static_call_cond(x86_pmu_assign)(event, idx); in x86_assign_hw_event()1402 static_call_cond(x86_pmu_limit_period)(event, &left); in x86_perf_event_set_period()1490 static_call_cond(x86_pmu_add)(event); in x86_pmu_add()1643 static_call_cond(x86_pmu_put_event_constraints)(cpuc, event); in x86_pmu_del()1664 static_call_cond(x86_pmu_del)(event); in x86_pmu_del()2631 static_call_cond(x86_pmu_sched_task)(pmu_ctx, sched_in); in x86_pmu_sched_task()2637 static_call_cond(x86_pmu_swap_task_ctx)(prev_epc, next_epc); in x86_pmu_swap_task_ctx()[all …]
458 static_call_cond(apic_call_wait_icr_idle)(); in apic_wait_icr_idle()
2202 static_call_cond(kvm_x86_vcpu_blocking)(vcpu); in kvm_arch_vcpu_blocking()2207 static_call_cond(kvm_x86_vcpu_unblocking)(vcpu); in kvm_arch_vcpu_unblocking()