/openbmc/linux/arch/x86/kvm/ |
H A D | smm.c | 206 static_call(kvm_x86_get_gdt)(vcpu, &dt); in enter_smm_save_state_32() 210 static_call(kvm_x86_get_idt)(vcpu, &dt); in enter_smm_save_state_32() 226 smram->int_shadow = static_call(kvm_x86_get_interrupt_shadow)(vcpu); in enter_smm_save_state_32() 260 static_call(kvm_x86_get_idt)(vcpu, &dt); in enter_smm_save_state_64() 266 static_call(kvm_x86_get_gdt)(vcpu, &dt); in enter_smm_save_state_64() 277 smram->int_shadow = static_call(kvm_x86_get_interrupt_shadow)(vcpu); in enter_smm_save_state_64() 307 if (static_call(kvm_x86_enter_smm)(vcpu, &smram)) in enter_smm() 315 if (static_call(kvm_x86_get_nmi_mask)(vcpu)) in enter_smm() 318 static_call(kvm_x86_set_nmi_mask)(vcpu, true); in enter_smm() 323 static_call(kvm_x86_set_interrupt_shadow)(vcpu, 0); in enter_smm() [all …]
|
H A D | pmu.c | 395 static_call(kvm_x86_pmu_hw_event_available)(pmc) && in pmc_event_is_allowed() 460 struct kvm_pmc *pmc = static_call(kvm_x86_pmu_pmc_idx_to_pmc)(pmu, bit); in kvm_pmu_handle_event() 482 return static_call(kvm_x86_pmu_is_valid_rdpmc_ecx)(vcpu, idx); in kvm_pmu_is_valid_rdpmc_ecx() 532 pmc = static_call(kvm_x86_pmu_rdpmc_ecx_to_pmc)(vcpu, idx, &mask); in kvm_pmu_rdpmc() 537 (static_call(kvm_x86_get_cpl)(vcpu) != 0) && in kvm_pmu_rdpmc() 563 return static_call(kvm_x86_pmu_msr_idx_to_pmc)(vcpu, msr) || in kvm_pmu_is_valid_msr() 564 static_call(kvm_x86_pmu_is_valid_msr)(vcpu, msr); in kvm_pmu_is_valid_msr() 570 struct kvm_pmc *pmc = static_call(kvm_x86_pmu_msr_idx_to_pmc)(vcpu, msr); in kvm_pmu_mark_pmc_in_use() 595 return static_call(kvm_x86_pmu_get_msr)(vcpu, msr_info); in kvm_pmu_get_msr() 654 return static_call(kvm_x86_pmu_set_msr)(vcpu, msr_info); in kvm_pmu_set_msr() [all …]
|
H A D | kvm_cache_regs.h | 101 static_call(kvm_x86_cache_reg)(vcpu, reg); in kvm_register_read_raw() 141 static_call(kvm_x86_cache_reg)(vcpu, VCPU_EXREG_PDPTR); in kvm_pdptr_read() 156 static_call(kvm_x86_cache_reg)(vcpu, VCPU_EXREG_CR0); in kvm_read_cr0_bits() 178 static_call(kvm_x86_cache_reg)(vcpu, VCPU_EXREG_CR4); in kvm_read_cr4_bits() 193 static_call(kvm_x86_cache_reg)(vcpu, VCPU_EXREG_CR3); in kvm_read_cr3()
|
H A D | x86.c | 842 if (static_call(kvm_x86_get_cpl)(vcpu) <= required_cpl) in kvm_require_cpl() 926 return static_call(kvm_x86_is_valid_cr0)(vcpu, cr0); in kvm_is_valid_cr0() 990 static_call(kvm_x86_get_cs_db_l_bits)(vcpu, &cs_db, &cs_l); in kvm_set_cr0() 1004 static_call(kvm_x86_set_cr0)(vcpu, cr0); in kvm_set_cr0() 1122 if (static_call(kvm_x86_get_cpl)(vcpu) != 0 || in kvm_emulate_xsetbv() 1147 static_call(kvm_x86_is_valid_cr4)(vcpu, cr4); in kvm_is_valid_cr4() 1215 static_call(kvm_x86_set_cr4)(vcpu, cr4); in kvm_set_cr4() 1354 static_call(kvm_x86_set_dr7)(vcpu, dr7); in kvm_update_dr7() 1697 return static_call(kvm_x86_get_msr_feature)(msr); in kvm_get_msr_feature() 1776 r = static_call(kvm_x86_set_efer)(vcpu, efer); in set_efer() [all …]
|
H A D | mmu.h | 156 static_call(kvm_x86_load_mmu_pgd)(vcpu, root_hpa, in kvm_mmu_load_pgd() 192 unsigned long rflags = static_call(kvm_x86_get_rflags)(vcpu); in permission_fault()
|
H A D | lapic.h | 239 !static_call(kvm_x86_apic_init_signal_blocked)(vcpu); in kvm_apic_init_sipi_allowed()
|
/openbmc/linux/arch/x86/include/asm/ |
H A D | apic.h | 399 return static_call(apic_call_read)(reg); in apic_read() 404 static_call(apic_call_write)(reg, val); in apic_write() 409 static_call(apic_call_eoi)(); in apic_eoi() 414 static_call(apic_call_native_eoi)(); in apic_native_eoi() 419 return static_call(apic_call_icr_read)(); in apic_icr_read() 424 static_call(apic_call_icr_write)(low, high); in apic_icr_write() 429 static_call(apic_call_send_IPI)(cpu, vector); in __apic_send_IPI() 439 static_call(apic_call_send_IPI_mask_allbutself)(mask, vector); in __apic_send_IPI_mask_allbutself() 444 static_call(apic_call_send_IPI_allbutself)(vector); in __apic_send_IPI_allbutself() 449 static_call(apic_call_send_IPI_all)(vector); in __apic_send_IPI_all()
|
H A D | paravirt.h | 31 return static_call(pv_sched_clock)(); in paravirt_sched_clock() 45 return static_call(pv_steal_clock)(cpu); in paravirt_steal_clock()
|
/openbmc/linux/include/linux/ |
H A D | static_call_types.h | 90 #define static_call(name) __static_call(name) macro 98 #define static_call(name) \ macro
|
H A D | entry-common.h | 423 #define irqentry_exit_cond_resched() static_call(irqentry_exit_cond_resched)()
|
H A D | perf_event.h | 1514 return static_call(__perf_guest_state)(); in perf_guest_state() 1518 return static_call(__perf_guest_get_ip)(); in perf_guest_get_ip() 1522 return static_call(__perf_guest_handle_intel_pt_intr)(); in perf_guest_handle_intel_pt_intr()
|
H A D | tracepoint.h | 173 static_call(tp_func_##name)(__data, args); \
|
/openbmc/linux/tools/include/linux/ |
H A D | static_call_types.h | 90 #define static_call(name) __static_call(name) macro 98 #define static_call(name) \ macro
|
/openbmc/linux/security/keys/trusted-keys/ |
H A D | trusted_core.c | 181 ret = static_call(trusted_key_unseal)(payload, datablob); in trusted_instantiate() 188 ret = static_call(trusted_key_get_random)(payload->key, in trusted_instantiate() 199 ret = static_call(trusted_key_seal)(payload, datablob); in trusted_instantiate() 268 ret = static_call(trusted_key_seal)(new_p, datablob); in trusted_update()
|
/openbmc/linux/tools/objtool/ |
H A D | builtin-check.c | 81 OPT_BOOLEAN('t', "static-call", &opts.static_call, "annotate static calls"), 144 opts.static_call || in opts_valid()
|
/openbmc/linux/arch/arm/include/asm/ |
H A D | paravirt.h | 18 return static_call(pv_steal_clock)(cpu); in paravirt_steal_clock()
|
/openbmc/linux/arch/arm64/include/asm/ |
H A D | paravirt.h | 18 return static_call(pv_steal_clock)(cpu); in paravirt_steal_clock()
|
/openbmc/linux/tools/objtool/include/objtool/ |
H A D | builtin.h | 25 bool static_call; member
|
/openbmc/linux/arch/x86/events/ |
H A D | core.c | 702 return static_call(x86_pmu_guest_get_msrs)(nr, data); in perf_guest_get_msrs() 733 static_call(x86_pmu_disable_all)(); in x86_pmu_disable() 1015 c = static_call(x86_pmu_get_event_constraints)(cpuc, i, cpuc->event_list[i]); in x86_schedule_events() 1358 static_call(x86_pmu_enable_all)(added); in x86_pmu_enable() 1468 ret = static_call(x86_pmu_schedule_events)(cpuc, n, assign); in x86_pmu_add() 1510 static_call(x86_pmu_set_period)(event); in x86_pmu_start() 1517 static_call(x86_pmu_enable)(event); in x86_pmu_start() 1590 static_call(x86_pmu_disable)(event); in x86_pmu_stop() 1602 static_call(x86_pmu_update)(event); in x86_pmu_stop() 1693 val = static_call(x86_pmu_update)(event); in x86_pmu_handle_irq() [all …]
|
/openbmc/linux/arch/x86/events/amd/ |
H A D | core.c | 368 return static_call(amd_pmu_branch_hw_config)(event); in amd_core_hw_config() 543 static_call(amd_pmu_branch_reset)(); in amd_pmu_cpu_reset() 696 if (!static_call(amd_pmu_test_overflow)(idx)) in amd_pmu_wait_on_overflow() 819 static_call(amd_pmu_branch_add)(event); in amd_pmu_add_event() 827 static_call(amd_pmu_branch_del)(event); in amd_pmu_del_event()
|
/openbmc/linux/arch/powerpc/kernel/ |
H A D | irq.c | 242 irq = static_call(ppc_get_irq)(); in __do_irq()
|
/openbmc/linux/kernel/ |
H A D | Makefile | 114 obj-$(CONFIG_HAVE_STATIC_CALL) += static_call.o
|
/openbmc/linux/arch/x86/kernel/ |
H A D | Makefile | 68 obj-y += static_call.o
|
/openbmc/linux/arch/x86/kvm/mmu/ |
H A D | spte.c | 193 spte |= static_call(kvm_x86_get_mt_mask)(vcpu, gfn, in make_spte()
|
/openbmc/linux/drivers/cpufreq/ |
H A D | amd-pstate.c | 198 static_call(amd_pstate_update_perf)(cpudata, min_perf, des_perf, in amd_pstate_update_perf() 314 return static_call(amd_pstate_enable)(enable); in amd_pstate_enable() 405 return static_call(amd_pstate_init_perf)(cpudata); in amd_pstate_init_perf()
|