Home
last modified time | relevance | path

Searched refs:cpu_smt_mask (Results 1 – 12 of 12) sorted by relevance

/openbmc/linux/drivers/platform/x86/intel/ifs/
H A Druntest.c64 cpumask_pr_args(cpu_smt_mask(cpu)), in message_not_tested()
68 cpumask_pr_args(cpu_smt_mask(cpu))); in message_not_tested()
71 cpumask_pr_args(cpu_smt_mask(cpu)), in message_not_tested()
75 cpumask_pr_args(cpu_smt_mask(cpu)), status.data); in message_not_tested()
90 cpumask_pr_args(cpu_smt_mask(cpu)), ifsd->cur_batch, ifsd->loaded_version); in message_fail()
102 cpumask_pr_args(cpu_smt_mask(cpu)), ifsd->cur_batch, ifsd->loaded_version); in message_fail()
142 first = cpumask_first(cpu_smt_mask(cpu)); in doscan()
256 const struct cpumask *smt_mask = cpu_smt_mask(cpu); in wait_for_sibling_cpu()
278 first = cpumask_first(cpu_smt_mask(cpu)); in do_array_test()
/openbmc/linux/arch/powerpc/include/asm/
H A Dsmp.h139 #define cpu_smt_mask cpu_smt_mask macro
141 static inline const struct cpumask *cpu_smt_mask(int cpu) in cpu_smt_mask() function
/openbmc/linux/include/linux/
H A Dtopology.h236 #if defined(CONFIG_SCHED_SMT) && !defined(cpu_smt_mask)
237 static inline const struct cpumask *cpu_smt_mask(int cpu) in cpu_smt_mask() function
/openbmc/linux/kernel/sched/
H A Dcore_sched.c242 const struct cpumask *smt_mask = cpu_smt_mask(cpu_of(rq)); in __sched_core_account_forceidle()
H A Dcore.c334 const struct cpumask *smt_mask = cpu_smt_mask(cpu); in sched_core_lock()
344 const struct cpumask *smt_mask = cpu_smt_mask(cpu); in sched_core_unlock()
364 const struct cpumask *smt_mask = cpu_smt_mask(cpu); in __sched_core_flip()
6121 smt_mask = cpu_smt_mask(cpu); in pick_next_task()
6410 const struct cpumask *smt_mask = cpu_smt_mask(cpu); in sched_core_cpu_starting()
6449 const struct cpumask *smt_mask = cpu_smt_mask(cpu); in sched_core_cpu_deactivate()
9673 if (cpumask_weight(cpu_smt_mask(cpu)) == 2) in sched_smt_present_inc()
9681 if (cpumask_weight(cpu_smt_mask(cpu)) == 2) in sched_smt_present_dec()
H A Dtopology.c1290 cpumask_andnot(mask, mask, cpu_smt_mask(cpu)); in init_sched_groups_capacity()
1672 { cpu_smt_mask, cpu_smt_flags, SD_INIT_NAME(SMT) },
H A Dfair.c1358 for_each_cpu(sibling, cpu_smt_mask(cpu)) { in is_core_idle()
7297 for_each_cpu(cpu, cpu_smt_mask(core)) { in __update_idle_core()
7320 for_each_cpu(cpu, cpu_smt_mask(core)) { in select_idle_core()
7339 cpumask_andnot(cpus, cpus, cpu_smt_mask(core)); in select_idle_core()
7350 for_each_cpu_and(cpu, cpu_smt_mask(target), p->cpus_ptr) { in select_idle_smt()
11258 cpumask_andnot(swb_cpus, swb_cpus, cpu_smt_mask(cpu)); in should_we_balance()
H A Dsched.h1280 for_each_cpu(cpu, cpu_smt_mask(cpu_of(rq))) { in sched_core_cookie_match()
/openbmc/linux/kernel/
H A Dstop_machine.c637 const struct cpumask *smt_mask = cpu_smt_mask(cpu); in stop_core_cpuslocked()
H A Dworkqueue.c6798 return cpumask_test_cpu(cpu0, cpu_smt_mask(cpu1)); in cpus_share_smt()
/openbmc/linux/arch/x86/kernel/
H A Dsmpboot.c618 cpu_smt_mask, x86_smt_flags, SD_INIT_NAME(SMT) in build_sched_topology()
/openbmc/linux/arch/powerpc/kernel/
H A Dsmp.c1050 { cpu_smt_mask, powerpc_smt_flags, SD_INIT_NAME(SMT) },