Lines Matching refs:vsi
35 struct ice_vsi *vsi; in ice_is_pfc_causing_hung_q() local
38 vsi = ice_get_main_vsi(pf); in ice_is_pfc_causing_hung_q()
39 if (!vsi) in ice_is_pfc_causing_hung_q()
43 if (vsi->tc_cfg.ena_tc & BIT(i)) in ice_is_pfc_causing_hung_q()
48 if (ice_find_q_in_range(vsi->tc_cfg.tc_info[tc].qoffset, in ice_is_pfc_causing_hung_q()
49 vsi->tc_cfg.tc_info[tc + 1].qoffset, in ice_is_pfc_causing_hung_q()
149 static u8 ice_get_first_droptc(struct ice_vsi *vsi) in ice_get_first_droptc() argument
151 struct ice_dcbx_cfg *cfg = &vsi->port_info->qos_cfg.local_dcbx_cfg; in ice_get_first_droptc()
152 struct device *dev = ice_pf_to_dev(vsi->back); in ice_get_first_droptc()
180 void ice_vsi_set_dcb_tc_cfg(struct ice_vsi *vsi) in ice_vsi_set_dcb_tc_cfg() argument
182 struct ice_dcbx_cfg *cfg = &vsi->port_info->qos_cfg.local_dcbx_cfg; in ice_vsi_set_dcb_tc_cfg()
184 switch (vsi->type) { in ice_vsi_set_dcb_tc_cfg()
186 vsi->tc_cfg.ena_tc = ice_dcb_get_ena_tc(cfg); in ice_vsi_set_dcb_tc_cfg()
187 vsi->tc_cfg.numtc = ice_dcb_get_num_tc(cfg); in ice_vsi_set_dcb_tc_cfg()
190 vsi->tc_cfg.ena_tc = BIT(ice_get_first_droptc(vsi)); in ice_vsi_set_dcb_tc_cfg()
191 vsi->tc_cfg.numtc = 1; in ice_vsi_set_dcb_tc_cfg()
196 vsi->tc_cfg.ena_tc = ICE_DFLT_TRAFFIC_CLASS; in ice_vsi_set_dcb_tc_cfg()
197 vsi->tc_cfg.numtc = 1; in ice_vsi_set_dcb_tc_cfg()
206 u8 ice_dcb_get_tc(struct ice_vsi *vsi, int queue_index) in ice_dcb_get_tc() argument
208 return vsi->tx_rings[queue_index]->dcb_tc; in ice_dcb_get_tc()
215 void ice_vsi_cfg_dcb_rings(struct ice_vsi *vsi) in ice_vsi_cfg_dcb_rings() argument
222 if (!test_bit(ICE_FLAG_DCB_ENA, vsi->back->flags)) { in ice_vsi_cfg_dcb_rings()
224 ice_for_each_txq(vsi, i) { in ice_vsi_cfg_dcb_rings()
225 tx_ring = vsi->tx_rings[i]; in ice_vsi_cfg_dcb_rings()
228 ice_for_each_rxq(vsi, i) { in ice_vsi_cfg_dcb_rings()
229 rx_ring = vsi->rx_rings[i]; in ice_vsi_cfg_dcb_rings()
236 if (!(vsi->tc_cfg.ena_tc & BIT(n))) in ice_vsi_cfg_dcb_rings()
239 qoffset = vsi->tc_cfg.tc_info[n].qoffset; in ice_vsi_cfg_dcb_rings()
240 qcount = vsi->tc_cfg.tc_info[n].qcount_tx; in ice_vsi_cfg_dcb_rings()
242 vsi->tx_rings[i]->dcb_tc = n; in ice_vsi_cfg_dcb_rings()
244 qcount = vsi->tc_cfg.tc_info[n].qcount_rx; in ice_vsi_cfg_dcb_rings()
246 vsi->rx_rings[i]->dcb_tc = n; in ice_vsi_cfg_dcb_rings()
251 if (vsi->all_enatc) { in ice_vsi_cfg_dcb_rings()
252 u8 first_droptc = ice_get_first_droptc(vsi); in ice_vsi_cfg_dcb_rings()
258 if (!(vsi->all_enatc & BIT(n))) in ice_vsi_cfg_dcb_rings()
261 qoffset = vsi->mqprio_qopt.qopt.offset[n]; in ice_vsi_cfg_dcb_rings()
262 qcount = vsi->mqprio_qopt.qopt.count[n]; in ice_vsi_cfg_dcb_rings()
264 vsi->tx_rings[i]->dcb_tc = first_droptc; in ice_vsi_cfg_dcb_rings()
265 vsi->rx_rings[i]->dcb_tc = first_droptc; in ice_vsi_cfg_dcb_rings()
287 struct ice_vsi *vsi = pf->vsi[i]; in ice_dcb_ena_dis_vsi() local
289 if (!vsi) in ice_dcb_ena_dis_vsi()
292 switch (vsi->type) { in ice_dcb_ena_dis_vsi()
297 ice_ena_vsi(vsi, locked); in ice_dcb_ena_dis_vsi()
299 ice_dis_vsi(vsi, locked); in ice_dcb_ena_dis_vsi()
749 struct ice_vsi *vsi = pf->vsi[v]; in ice_pf_dcb_recfg() local
751 if (!vsi) in ice_pf_dcb_recfg()
754 if (vsi->type == ICE_VSI_PF) { in ice_pf_dcb_recfg()
764 } else if (vsi->type == ICE_VSI_CHNL) { in ice_pf_dcb_recfg()
765 tc_map = BIT(ice_get_first_droptc(vsi)); in ice_pf_dcb_recfg()
770 ret = ice_vsi_cfg_tc(vsi, tc_map); in ice_pf_dcb_recfg()
773 vsi->idx); in ice_pf_dcb_recfg()
779 if (vsi->type == ICE_VSI_CHNL || in ice_pf_dcb_recfg()
780 vsi->type == ICE_VSI_SWITCHDEV_CTRL) in ice_pf_dcb_recfg()
783 ice_vsi_map_rings_to_vectors(vsi); in ice_pf_dcb_recfg()
784 if (vsi->type == ICE_VSI_PF) in ice_pf_dcb_recfg()
785 ice_dcbnl_set_all(vsi); in ice_pf_dcb_recfg()
928 if (!test_bit(ICE_FLAG_DCB_ENA, tx_ring->vsi->back->flags)) in ice_tx_prepare_vlan_flags_dcb()