Symbol: sched_group_span
kernel/sched/ext_idle.c
292
return sched_group_span(sg);
kernel/sched/fair.c
10157
for_each_cpu(cpu, sched_group_span(sdg)) {
kernel/sched/fair.c
10477
for_each_cpu_and(i, sched_group_span(group), env->cpus) {
kernel/sched/fair.c
10796
for_each_cpu_and(i, sched_group_span(group), p->cpus_ptr) {
kernel/sched/fair.c
10913
if (!cpumask_intersects(sched_group_span(group),
kernel/sched/fair.c
10922
sched_group_span(group));
kernel/sched/fair.c
11020
idlest_cpu = cpumask_first(sched_group_span(idlest));
kernel/sched/fair.c
11035
sched_group_span(local));
kernel/sched/fair.c
11151
local_group = cpumask_test_cpu(env->dst_cpu, sched_group_span(sg));
kernel/sched/fair.c
11555
for_each_cpu_and(i, sched_group_span(group), env->cpus) {
kernel/sched/fair.c
7457
return cpumask_first(sched_group_span(group));
kernel/sched/fair.c
7460
for_each_cpu_and(i, sched_group_span(group), p->cpus_ptr) {
kernel/sched/fair.c
7724
for_each_cpu_wrap(cpu, sched_group_span(sg), target + 1) {
kernel/sched/fair.c
7740
cpumask_andnot(cpus, cpus, sched_group_span(sg));
kernel/sched/sched.h
1431
static inline struct cpumask *sched_group_span(struct sched_group *sg);
kernel/sched/sched.h
1522
for_each_cpu_and(cpu, sched_group_span(group), p->cpus_ptr) {
kernel/sched/topology.c
103
cpumask_pr_args(sched_group_span(group)));
kernel/sched/topology.c
106
!cpumask_equal(group_balance_mask(group), sched_group_span(group))) {
kernel/sched/topology.c
1101
sg_span = sched_group_span(sg);
kernel/sched/topology.c
116
sched_group_span(group))) {
kernel/sched/topology.c
1218
cpumask_copy(sched_group_span(sg), sched_domain_span(child));
kernel/sched/topology.c
1219
cpumask_copy(group_balance_mask(sg), sched_group_span(sg));
kernel/sched/topology.c
1222
cpumask_set_cpu(cpu, sched_group_span(sg));
kernel/sched/topology.c
1226
sg->sgc->capacity = SCHED_CAPACITY_SCALE * cpumask_weight(sched_group_span(sg));
kernel/sched/topology.c
1262
cpumask_or(covered, covered, sched_group_span(sg));
kernel/sched/topology.c
1296
sg->group_weight = cpumask_weight(sched_group_span(sg));
kernel/sched/topology.c
1298
cpumask_copy(mask, sched_group_span(sg));
kernel/sched/topology.c
1310
for_each_cpu(cpu, sched_group_span(sg)) {
kernel/sched/topology.c
1373
for_each_cpu(group_cpu, sched_group_span(sg)) {
kernel/sched/topology.c
59
if (group && !cpumask_test_cpu(cpu, sched_group_span(group))) {
kernel/sched/topology.c
86
if (cpumask_empty(sched_group_span(group))) {
kernel/sched/topology.c
918
const struct cpumask *sg_span = sched_group_span(sg);
kernel/sched/topology.c
93
cpumask_intersects(groupmask, sched_group_span(group))) {
kernel/sched/topology.c
964
sg_span = sched_group_span(sg);
kernel/sched/topology.c
99
cpumask_or(groupmask, groupmask, sched_group_span(group));
kernel/sched/topology.c
998
sg_span = sched_group_span(sg);