Lines Matching refs:CAKE_QUEUES
80 #define CAKE_QUEUES (1024) macro
151 struct cake_flow flows[CAKE_QUEUES];
152 u32 backlogs[CAKE_QUEUES];
153 u32 tags[CAKE_QUEUES]; /* for set association */
154 u16 overflow_idx[CAKE_QUEUES];
155 struct cake_host hosts[CAKE_QUEUES]; /* for triple isolation */
205 struct cake_heap_entry overflow_heap[CAKE_QUEUES * CAKE_MAX_TINS];
300 static u16 quantum_div[CAKE_QUEUES + 1] = {0};
661 q->hosts[flow->srchost].srchost_bulk_flow_count < CAKE_QUEUES)) in cake_inc_srchost_bulk_flow_count()
679 q->hosts[flow->dsthost].dsthost_bulk_flow_count < CAKE_QUEUES)) in cake_inc_dsthost_bulk_flow_count()
796 reduced_hash = flow_hash % CAKE_QUEUES; in cake_hash()
859 srchost_idx = srchost_hash % CAKE_QUEUES; in cake_hash()
883 dsthost_idx = dsthost_hash % CAKE_QUEUES; in cake_hash()
1478 static const u32 a = CAKE_MAX_TINS * CAKE_QUEUES; in cake_heapify()
1515 while (i > 0 && i < CAKE_MAX_TINS * CAKE_QUEUES) { in cake_heapify_up()
1575 for (i = CAKE_MAX_TINS * CAKE_QUEUES / 2; i >= 0; i--) in cake_drop()
1743 if (TC_H_MIN(res.classid) <= CAKE_QUEUES) in cake_classify()
1745 if (TC_H_MAJ(res.classid) <= (CAKE_QUEUES << 16)) in cake_classify()
1983 for (q->cur_flow = 0; q->cur_flow < CAKE_QUEUES; q->cur_flow++) in cake_clear_tin()
2748 for (i = 1; i <= CAKE_QUEUES; i++) in cake_init()
2766 for (j = 0; j < CAKE_QUEUES; j++) { in cake_init()
3005 if (idx < CAKE_QUEUES * q->tin_cnt) { in cake_dump_class_stats()
3007 &q->tins[q->tin_order[idx / CAKE_QUEUES]]; in cake_dump_class_stats()
3010 flow = &b->flows[idx % CAKE_QUEUES]; in cake_dump_class_stats()
3021 qs.backlog = b->backlogs[idx % CAKE_QUEUES]; in cake_dump_class_stats()
3081 for (j = 0; j < CAKE_QUEUES; j++) { in cake_walk()
3086 if (!tc_qdisc_stats_dump(sch, i * CAKE_QUEUES + j + 1, in cake_walk()