Home
last modified time | relevance | path

Searched refs:vcpu_is_preempted (Results 1 – 16 of 16) sorted by relevance

/openbmc/linux/arch/arm64/include/asm/
H A Dspinlock.h21 #define vcpu_is_preempted vcpu_is_preempted macro
22 static inline bool vcpu_is_preempted(int cpu) in vcpu_is_preempted() function
/openbmc/linux/arch/powerpc/include/asm/
H A Dparavirt.h105 #define vcpu_is_preempted vcpu_is_preempted macro
106 static inline bool vcpu_is_preempted(int cpu) in vcpu_is_preempted() function
/openbmc/linux/arch/x86/include/asm/
H A Dqspinlock.h60 #define vcpu_is_preempted vcpu_is_preempted macro
61 static inline bool vcpu_is_preempted(long cpu) in vcpu_is_preempted() function
H A Dparavirt_types.h237 struct paravirt_callee_save vcpu_is_preempted; member
H A Dparavirt.h608 return PVOP_ALT_CALLEE1(bool, lock.vcpu_is_preempted, cpu, in pv_vcpu_is_preempted()
/openbmc/linux/arch/x86/kernel/
H A Dparavirt-spinlocks.c32 return pv_ops.lock.vcpu_is_preempted.func == in pv_is_native_vcpu_is_preempted()
H A Dparavirt.c307 .lock.vcpu_is_preempted =
H A Dkvm.c640 if (!idle_cpu(cpu) && vcpu_is_preempted(cpu)) { in kvm_smp_send_call_func_ipi()
823 pv_ops.lock.vcpu_is_preempted = in kvm_guest_init()
/openbmc/linux/arch/x86/hyperv/
H A Dhv_spinlock.c84 pv_ops.lock.vcpu_is_preempted = PV_CALLEE_SAVE(hv_vcpu_is_preempted); in hv_init_spinlocks()
/openbmc/linux/arch/s390/include/asm/
H A Dspinlock.h25 #define vcpu_is_preempted arch_vcpu_is_preempted macro
/openbmc/linux/arch/x86/xen/
H A Dspinlock.c144 pv_ops.lock.vcpu_is_preempted = PV_CALLEE_SAVE(xen_vcpu_stolen); in xen_init_spinlocks()
/openbmc/linux/kernel/locking/
H A Dosq_lock.c144 vcpu_is_preempted(node_cpu(node->prev)))) in osq_lock()
/openbmc/linux/arch/powerpc/lib/
H A Dqspinlock.c374 if (vcpu_is_preempted(owner)) { in propagate_yield_cpu()
709 if (vcpu_is_preempted(next_cpu)) in queued_spin_lock_mcs_queue()
/openbmc/linux/arch/s390/kvm/
H A Ddiag.c191 if (!vcpu_is_preempted(tcpu_cpu)) in __diag_time_slice_end_directed()
/openbmc/linux/include/linux/
H A Dsched.h2295 #ifndef vcpu_is_preempted
2296 static inline bool vcpu_is_preempted(int cpu) in vcpu_is_preempted() function
2316 return READ_ONCE(owner->on_cpu) && !vcpu_is_preempted(task_cpu(owner)); in owner_on_cpu()
/openbmc/linux/kernel/sched/
H A Dcore.c7356 if (vcpu_is_preempted(cpu)) in available_idle_cpu()