Lines Matching refs:q

99 		struct gred_sched_data *q = table->tab[i];  in gred_wred_mode_check()  local
102 if (q == NULL) in gred_wred_mode_check()
106 if (table->tab[n] && table->tab[n]->prio == q->prio) in gred_wred_mode_check()
114 struct gred_sched_data *q, in gred_backlog() argument
120 return q->backlog; in gred_backlog()
129 struct gred_sched_data *q) in gred_load_wred_set() argument
131 q->vars.qavg = table->wred_set.qavg; in gred_load_wred_set()
132 q->vars.qidlestart = table->wred_set.qidlestart; in gred_load_wred_set()
136 struct gred_sched_data *q) in gred_store_wred_set() argument
138 table->wred_set.qavg = q->vars.qavg; in gred_store_wred_set()
139 table->wred_set.qidlestart = q->vars.qidlestart; in gred_store_wred_set()
142 static int gred_use_ecn(struct gred_sched_data *q) in gred_use_ecn() argument
144 return q->red_flags & TC_RED_ECN; in gred_use_ecn()
147 static int gred_use_harddrop(struct gred_sched_data *q) in gred_use_harddrop() argument
149 return q->red_flags & TC_RED_HARDDROP; in gred_use_harddrop()
168 struct gred_sched_data *q = NULL; in gred_enqueue() local
173 if (dp >= t->DPs || (q = t->tab[dp]) == NULL) { in gred_enqueue()
176 q = t->tab[dp]; in gred_enqueue()
177 if (!q) { in gred_enqueue()
199 if (t->tab[i] && t->tab[i]->prio < q->prio && in gred_enqueue()
206 q->packetsin++; in gred_enqueue()
207 q->bytesin += qdisc_pkt_len(skb); in gred_enqueue()
210 gred_load_wred_set(t, q); in gred_enqueue()
212 q->vars.qavg = red_calc_qavg(&q->parms, in gred_enqueue()
213 &q->vars, in gred_enqueue()
214 gred_backlog(t, q, sch)); in gred_enqueue()
216 if (red_is_idling(&q->vars)) in gred_enqueue()
217 red_end_of_idle_period(&q->vars); in gred_enqueue()
220 gred_store_wred_set(t, q); in gred_enqueue()
222 switch (red_action(&q->parms, &q->vars, q->vars.qavg + qavg)) { in gred_enqueue()
228 if (!gred_use_ecn(q) || !INET_ECN_set_ce(skb)) { in gred_enqueue()
229 q->stats.prob_drop++; in gred_enqueue()
233 q->stats.prob_mark++; in gred_enqueue()
238 if (gred_use_harddrop(q) || !gred_use_ecn(q) || in gred_enqueue()
240 q->stats.forced_drop++; in gred_enqueue()
243 q->stats.forced_mark++; in gred_enqueue()
247 if (gred_backlog(t, q, sch) + qdisc_pkt_len(skb) <= q->limit) { in gred_enqueue()
248 q->backlog += qdisc_pkt_len(skb); in gred_enqueue()
252 q->stats.pdrop++; in gred_enqueue()
269 struct gred_sched_data *q; in gred_dequeue() local
272 if (dp >= t->DPs || (q = t->tab[dp]) == NULL) { in gred_dequeue()
276 q->backlog -= qdisc_pkt_len(skb); in gred_dequeue()
282 if (!q->backlog) in gred_dequeue()
283 red_start_of_idle_period(&q->vars); in gred_dequeue()
301 struct gred_sched_data *q = t->tab[i]; in gred_reset() local
303 if (!q) in gred_reset()
306 red_restart(&q->vars); in gred_reset()
307 q->backlog = 0; in gred_reset()
334 struct gred_sched_data *q = table->tab[i]; in gred_offload() local
336 if (!q) in gred_offload()
339 opt->set.tab[i].limit = q->limit; in gred_offload()
340 opt->set.tab[i].prio = q->prio; in gred_offload()
341 opt->set.tab[i].min = q->parms.qth_min >> q->parms.Wlog; in gred_offload()
342 opt->set.tab[i].max = q->parms.qth_max >> q->parms.Wlog; in gred_offload()
343 opt->set.tab[i].is_ecn = gred_use_ecn(q); in gred_offload()
344 opt->set.tab[i].is_harddrop = gred_use_harddrop(q); in gred_offload()
345 opt->set.tab[i].probability = q->parms.max_P; in gred_offload()
346 opt->set.tab[i].backlog = &q->backlog; in gred_offload()
403 static inline void gred_destroy_vq(struct gred_sched_data *q) in gred_destroy_vq() argument
405 kfree(q); in gred_destroy_vq()
488 struct gred_sched_data *q = table->tab[dp]; in gred_change_vq() local
495 if (!q) { in gred_change_vq()
496 table->tab[dp] = q = *prealloc; in gred_change_vq()
498 if (!q) in gred_change_vq()
500 q->red_flags = table->red_flags & GRED_VQ_RED_FLAGS; in gred_change_vq()
503 q->DP = dp; in gred_change_vq()
504 q->prio = prio; in gred_change_vq()
506 q->limit = sch->limit; in gred_change_vq()
508 q->limit = ctl->limit; in gred_change_vq()
510 if (q->backlog == 0) in gred_change_vq()
511 red_end_of_idle_period(&q->vars); in gred_change_vq()
513 red_set_parms(&q->parms, in gred_change_vq()
516 red_set_vars(&q->vars); in gred_change_vq()
791 struct gred_sched_data *q = table->tab[i]; in gred_dump() local
793 max_p[i] = q ? q->parms.max_P : 0; in gred_dump()
807 struct gred_sched_data *q = table->tab[i]; in gred_dump() local
813 if (!q) { in gred_dump()
822 opt.limit = q->limit; in gred_dump()
823 opt.DP = q->DP; in gred_dump()
824 opt.backlog = gred_backlog(table, q, sch); in gred_dump()
825 opt.prio = q->prio; in gred_dump()
826 opt.qth_min = q->parms.qth_min >> q->parms.Wlog; in gred_dump()
827 opt.qth_max = q->parms.qth_max >> q->parms.Wlog; in gred_dump()
828 opt.Wlog = q->parms.Wlog; in gred_dump()
829 opt.Plog = q->parms.Plog; in gred_dump()
830 opt.Scell_log = q->parms.Scell_log; in gred_dump()
831 opt.early = q->stats.prob_drop; in gred_dump()
832 opt.forced = q->stats.forced_drop; in gred_dump()
833 opt.pdrop = q->stats.pdrop; in gred_dump()
834 opt.packets = q->packetsin; in gred_dump()
835 opt.bytesin = q->bytesin; in gred_dump()
838 gred_load_wred_set(table, q); in gred_dump()
840 qavg = red_calc_qavg(&q->parms, &q->vars, in gred_dump()
841 q->vars.qavg >> q->parms.Wlog); in gred_dump()
842 opt.qave = qavg >> q->parms.Wlog; in gred_dump()
857 struct gred_sched_data *q = table->tab[i]; in gred_dump() local
860 if (!q) in gred_dump()
867 if (nla_put_u32(skb, TCA_GRED_VQ_DP, q->DP)) in gred_dump()
870 if (nla_put_u32(skb, TCA_GRED_VQ_FLAGS, q->red_flags)) in gred_dump()
874 if (nla_put_u64_64bit(skb, TCA_GRED_VQ_STAT_BYTES, q->bytesin, in gred_dump()
877 if (nla_put_u32(skb, TCA_GRED_VQ_STAT_PACKETS, q->packetsin)) in gred_dump()
880 gred_backlog(table, q, sch))) in gred_dump()
883 q->stats.prob_drop)) in gred_dump()
886 q->stats.prob_mark)) in gred_dump()
889 q->stats.forced_drop)) in gred_dump()
892 q->stats.forced_mark)) in gred_dump()
894 if (nla_put_u32(skb, TCA_GRED_VQ_STAT_PDROP, q->stats.pdrop)) in gred_dump()