Lines Matching refs:hdev

138 static void hclge_pfc_stats_get(struct hclge_dev *hdev, bool tx, u64 *stats)  in hclge_pfc_stats_get()  argument
149 stats[i] = HCLGE_STATS_READ(&hdev->mac_stats, offset[i]); in hclge_pfc_stats_get()
152 void hclge_pfc_rx_stats_get(struct hclge_dev *hdev, u64 *stats) in hclge_pfc_rx_stats_get() argument
154 hclge_pfc_stats_get(hdev, false, stats); in hclge_pfc_rx_stats_get()
157 void hclge_pfc_tx_stats_get(struct hclge_dev *hdev, u64 *stats) in hclge_pfc_tx_stats_get() argument
159 hclge_pfc_stats_get(hdev, true, stats); in hclge_pfc_tx_stats_get()
162 int hclge_mac_pause_en_cfg(struct hclge_dev *hdev, bool tx, bool rx) in hclge_mac_pause_en_cfg() argument
171 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_mac_pause_en_cfg()
174 int hclge_pfc_pause_en_cfg(struct hclge_dev *hdev, u8 tx_rx_bitmap, in hclge_pfc_pause_en_cfg() argument
185 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_pfc_pause_en_cfg()
188 static int hclge_pause_param_cfg(struct hclge_dev *hdev, const u8 *addr, in hclge_pause_param_cfg() argument
203 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_pause_param_cfg()
206 int hclge_pause_addr_cfg(struct hclge_dev *hdev, const u8 *mac_addr) in hclge_pause_addr_cfg() argument
218 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_pause_addr_cfg()
225 return hclge_pause_param_cfg(hdev, mac_addr, trans_gap, trans_time); in hclge_pause_addr_cfg()
228 static int hclge_fill_pri_array(struct hclge_dev *hdev, u8 *pri, u8 pri_id) in hclge_fill_pri_array() argument
232 tc = hdev->tm_info.prio_tc[pri_id]; in hclge_fill_pri_array()
234 if (tc >= hdev->tm_info.num_tc) in hclge_fill_pri_array()
251 int hclge_up_to_tc_map(struct hclge_dev *hdev) in hclge_up_to_tc_map() argument
261 ret = hclge_fill_pri_array(hdev, pri, pri_id); in hclge_up_to_tc_map()
266 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_up_to_tc_map()
269 static void hclge_dscp_to_prio_map_init(struct hclge_dev *hdev) in hclge_dscp_to_prio_map_init() argument
273 hdev->vport[0].nic.kinfo.tc_map_mode = HNAE3_TC_MAP_MODE_PRIO; in hclge_dscp_to_prio_map_init()
274 hdev->vport[0].nic.kinfo.dscp_app_cnt = 0; in hclge_dscp_to_prio_map_init()
276 hdev->vport[0].nic.kinfo.dscp_prio[i] = HNAE3_PRIO_ID_INVALID; in hclge_dscp_to_prio_map_init()
279 int hclge_dscp_to_tc_map(struct hclge_dev *hdev) in hclge_dscp_to_tc_map() argument
292 pri_id = hdev->vport[0].nic.kinfo.dscp_prio[i]; in hclge_dscp_to_tc_map()
294 tc_id = hdev->tm_info.prio_tc[pri_id]; in hclge_dscp_to_tc_map()
301 pri_id = hdev->vport[0].nic.kinfo.dscp_prio[j]; in hclge_dscp_to_tc_map()
303 tc_id = hdev->tm_info.prio_tc[pri_id]; in hclge_dscp_to_tc_map()
307 return hclge_cmd_send(&hdev->hw, desc, HCLGE_DSCP_MAP_TC_BD_NUM); in hclge_dscp_to_tc_map()
310 static int hclge_tm_pg_to_pri_map_cfg(struct hclge_dev *hdev, in hclge_tm_pg_to_pri_map_cfg() argument
323 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_pg_to_pri_map_cfg()
326 static int hclge_tm_qs_to_pri_map_cfg(struct hclge_dev *hdev, u16 qs_id, u8 pri, in hclge_tm_qs_to_pri_map_cfg() argument
340 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_qs_to_pri_map_cfg()
343 static int hclge_tm_q_to_qs_map_cfg(struct hclge_dev *hdev, in hclge_tm_q_to_qs_map_cfg() argument
374 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_q_to_qs_map_cfg()
377 static int hclge_tm_pg_weight_cfg(struct hclge_dev *hdev, u8 pg_id, in hclge_tm_pg_weight_cfg() argument
390 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_pg_weight_cfg()
393 static int hclge_tm_pri_weight_cfg(struct hclge_dev *hdev, u8 pri_id, in hclge_tm_pri_weight_cfg() argument
406 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_pri_weight_cfg()
409 static int hclge_tm_qs_weight_cfg(struct hclge_dev *hdev, u16 qs_id, in hclge_tm_qs_weight_cfg() argument
422 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_qs_weight_cfg()
439 static int hclge_tm_pg_shapping_cfg(struct hclge_dev *hdev, in hclge_tm_pg_shapping_cfg() argument
461 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_pg_shapping_cfg()
464 int hclge_tm_port_shaper_cfg(struct hclge_dev *hdev) in hclge_tm_port_shaper_cfg() argument
472 ret = hclge_shaper_para_calc(hdev->hw.mac.speed, HCLGE_SHAPER_LVL_PORT, in hclge_tm_port_shaper_cfg()
474 hdev->ae_dev->dev_specs.max_tm_rate); in hclge_tm_port_shaper_cfg()
490 shap_cfg_cmd->port_rate = cpu_to_le32(hdev->hw.mac.speed); in hclge_tm_port_shaper_cfg()
492 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_port_shaper_cfg()
495 static int hclge_tm_pri_shapping_cfg(struct hclge_dev *hdev, in hclge_tm_pri_shapping_cfg() argument
518 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_pri_shapping_cfg()
521 static int hclge_tm_pg_schd_mode_cfg(struct hclge_dev *hdev, u8 pg_id) in hclge_tm_pg_schd_mode_cfg() argument
527 if (hdev->tm_info.pg_info[pg_id].pg_sch_mode == HCLGE_SCH_MODE_DWRR) in hclge_tm_pg_schd_mode_cfg()
534 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_pg_schd_mode_cfg()
537 static int hclge_tm_pri_schd_mode_cfg(struct hclge_dev *hdev, u8 pri_id) in hclge_tm_pri_schd_mode_cfg() argument
543 if (hdev->tm_info.tc_info[pri_id].tc_sch_mode == HCLGE_SCH_MODE_DWRR) in hclge_tm_pri_schd_mode_cfg()
550 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_pri_schd_mode_cfg()
553 static int hclge_tm_qs_schd_mode_cfg(struct hclge_dev *hdev, u16 qs_id, u8 mode) in hclge_tm_qs_schd_mode_cfg() argument
566 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_qs_schd_mode_cfg()
569 static int hclge_tm_qs_bp_cfg(struct hclge_dev *hdev, u8 tc, u8 grp_id, in hclge_tm_qs_bp_cfg() argument
584 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_qs_bp_cfg()
592 struct hclge_dev *hdev = vport->back; in hclge_tm_qs_shaper_cfg() local
598 max_tx_rate = hdev->ae_dev->dev_specs.max_tm_rate; in hclge_tm_qs_shaper_cfg()
602 hdev->ae_dev->dev_specs.max_tm_rate); in hclge_tm_qs_shaper_cfg()
622 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_qs_shaper_cfg()
624 dev_err(&hdev->pdev->dev, in hclge_tm_qs_shaper_cfg()
639 struct hclge_dev *hdev = vport->back; in hclge_vport_get_max_rss_size() local
647 if (!(hdev->hw_tc_map & BIT(i)) || i >= tc_info->num_tc) in hclge_vport_get_max_rss_size()
660 struct hclge_dev *hdev = vport->back; in hclge_vport_get_tqp_num() local
668 if (hdev->hw_tc_map & BIT(i) && i < tc_info->num_tc) in hclge_vport_get_tqp_num()
678 struct hclge_dev *hdev = vport->back; in hclge_tm_update_kinfo_rss_size() local
690 vport_max_rss_size = hdev->vf_rss_size_max; in hclge_tm_update_kinfo_rss_size()
692 kinfo->tc_info.max_tc = hdev->tc_max; in hclge_tm_update_kinfo_rss_size()
694 min_t(u16, vport->alloc_tqps, hdev->tm_info.num_tc); in hclge_tm_update_kinfo_rss_size()
696 vport_max_rss_size = hdev->pf_rss_size_max; in hclge_tm_update_kinfo_rss_size()
705 dev_info(&hdev->pdev->dev, "rss changes from %u to %u\n", in hclge_tm_update_kinfo_rss_size()
718 struct hclge_dev *hdev = vport->back; in hclge_tm_vport_tc_info_update() local
724 vport->bw_limit = hdev->tm_info.pg_info[0].bw_limit; in hclge_tm_vport_tc_info_update()
727 hdev->rss_cfg.rss_size = kinfo->rss_size; in hclge_tm_vport_tc_info_update()
734 if (hdev->hw_tc_map & BIT(i) && i < kinfo->tc_info.num_tc) { in hclge_tm_vport_tc_info_update()
744 memcpy(kinfo->tc_info.prio_tc, hdev->tm_info.prio_tc, in hclge_tm_vport_tc_info_update()
748 static void hclge_tm_vport_info_update(struct hclge_dev *hdev) in hclge_tm_vport_info_update() argument
750 struct hclge_vport *vport = hdev->vport; in hclge_tm_vport_info_update()
753 for (i = 0; i < hdev->num_alloc_vport; i++) { in hclge_tm_vport_info_update()
760 static void hclge_tm_tc_info_init(struct hclge_dev *hdev) in hclge_tm_tc_info_init() argument
765 for (i = 0; i < hdev->tc_max; i++) { in hclge_tm_tc_info_init()
766 if (i < hdev->tm_info.num_tc) { in hclge_tm_tc_info_init()
768 bw_limit = hdev->tm_info.pg_info[0].bw_limit; in hclge_tm_tc_info_init()
774 hdev->tm_info.tc_info[i].tc_id = i; in hclge_tm_tc_info_init()
775 hdev->tm_info.tc_info[i].tc_sch_mode = tc_sch_mode; in hclge_tm_tc_info_init()
776 hdev->tm_info.tc_info[i].pgid = 0; in hclge_tm_tc_info_init()
777 hdev->tm_info.tc_info[i].bw_limit = bw_limit; in hclge_tm_tc_info_init()
781 hdev->tm_info.prio_tc[i] = in hclge_tm_tc_info_init()
782 (i >= hdev->tm_info.num_tc) ? 0 : i; in hclge_tm_tc_info_init()
785 static void hclge_tm_pg_info_init(struct hclge_dev *hdev) in hclge_tm_pg_info_init() argument
792 for (i = 0; i < hdev->tm_info.num_pg; i++) { in hclge_tm_pg_info_init()
795 hdev->tm_info.pg_dwrr[i] = i ? 0 : BW_PERCENT; in hclge_tm_pg_info_init()
797 hdev->tm_info.pg_info[i].pg_id = i; in hclge_tm_pg_info_init()
798 hdev->tm_info.pg_info[i].pg_sch_mode = HCLGE_SCH_MODE_DWRR; in hclge_tm_pg_info_init()
800 hdev->tm_info.pg_info[i].bw_limit = in hclge_tm_pg_info_init()
801 hdev->ae_dev->dev_specs.max_tm_rate; in hclge_tm_pg_info_init()
806 hdev->tm_info.pg_info[i].tc_bit_map = hdev->hw_tc_map; in hclge_tm_pg_info_init()
807 for (k = 0; k < hdev->tm_info.num_tc; k++) in hclge_tm_pg_info_init()
808 hdev->tm_info.pg_info[i].tc_dwrr[k] = BW_PERCENT; in hclge_tm_pg_info_init()
810 hdev->tm_info.pg_info[i].tc_dwrr[k] = DEFAULT_BW_WEIGHT; in hclge_tm_pg_info_init()
814 static void hclge_update_fc_mode_by_dcb_flag(struct hclge_dev *hdev) in hclge_update_fc_mode_by_dcb_flag() argument
816 if (hdev->tm_info.num_tc == 1 && !hdev->tm_info.pfc_en) { in hclge_update_fc_mode_by_dcb_flag()
817 if (hdev->fc_mode_last_time == HCLGE_FC_PFC) in hclge_update_fc_mode_by_dcb_flag()
818 dev_warn(&hdev->pdev->dev, in hclge_update_fc_mode_by_dcb_flag()
821 hdev->tm_info.fc_mode = hdev->fc_mode_last_time; in hclge_update_fc_mode_by_dcb_flag()
822 } else if (hdev->tm_info.fc_mode != HCLGE_FC_PFC) { in hclge_update_fc_mode_by_dcb_flag()
827 hdev->fc_mode_last_time = hdev->tm_info.fc_mode; in hclge_update_fc_mode_by_dcb_flag()
828 hdev->tm_info.fc_mode = HCLGE_FC_PFC; in hclge_update_fc_mode_by_dcb_flag()
832 static void hclge_update_fc_mode(struct hclge_dev *hdev) in hclge_update_fc_mode() argument
834 if (!hdev->tm_info.pfc_en) { in hclge_update_fc_mode()
835 hdev->tm_info.fc_mode = hdev->fc_mode_last_time; in hclge_update_fc_mode()
839 if (hdev->tm_info.fc_mode != HCLGE_FC_PFC) { in hclge_update_fc_mode()
840 hdev->fc_mode_last_time = hdev->tm_info.fc_mode; in hclge_update_fc_mode()
841 hdev->tm_info.fc_mode = HCLGE_FC_PFC; in hclge_update_fc_mode()
845 void hclge_tm_pfc_info_update(struct hclge_dev *hdev) in hclge_tm_pfc_info_update() argument
847 if (hdev->ae_dev->dev_version >= HNAE3_DEVICE_VERSION_V3) in hclge_tm_pfc_info_update()
848 hclge_update_fc_mode(hdev); in hclge_tm_pfc_info_update()
850 hclge_update_fc_mode_by_dcb_flag(hdev); in hclge_tm_pfc_info_update()
853 static void hclge_tm_schd_info_init(struct hclge_dev *hdev) in hclge_tm_schd_info_init() argument
855 hclge_tm_pg_info_init(hdev); in hclge_tm_schd_info_init()
857 hclge_tm_tc_info_init(hdev); in hclge_tm_schd_info_init()
859 hclge_tm_vport_info_update(hdev); in hclge_tm_schd_info_init()
861 hclge_tm_pfc_info_update(hdev); in hclge_tm_schd_info_init()
864 static int hclge_tm_pg_to_pri_map(struct hclge_dev *hdev) in hclge_tm_pg_to_pri_map() argument
869 if (hdev->tx_sch_mode != HCLGE_FLAG_TC_BASE_SCH_MODE) in hclge_tm_pg_to_pri_map()
872 for (i = 0; i < hdev->tm_info.num_pg; i++) { in hclge_tm_pg_to_pri_map()
875 hdev, i, hdev->tm_info.pg_info[i].tc_bit_map); in hclge_tm_pg_to_pri_map()
883 static int hclge_tm_pg_shaper_cfg(struct hclge_dev *hdev) in hclge_tm_pg_shaper_cfg() argument
885 u32 max_tm_rate = hdev->ae_dev->dev_specs.max_tm_rate; in hclge_tm_pg_shaper_cfg()
892 if (hdev->tx_sch_mode != HCLGE_FLAG_TC_BASE_SCH_MODE) in hclge_tm_pg_shaper_cfg()
896 for (i = 0; i < hdev->tm_info.num_pg; i++) { in hclge_tm_pg_shaper_cfg()
897 u32 rate = hdev->tm_info.pg_info[i].bw_limit; in hclge_tm_pg_shaper_cfg()
908 ret = hclge_tm_pg_shapping_cfg(hdev, in hclge_tm_pg_shaper_cfg()
919 ret = hclge_tm_pg_shapping_cfg(hdev, in hclge_tm_pg_shaper_cfg()
929 static int hclge_tm_pg_dwrr_cfg(struct hclge_dev *hdev) in hclge_tm_pg_dwrr_cfg() argument
935 if (hdev->tx_sch_mode != HCLGE_FLAG_TC_BASE_SCH_MODE) in hclge_tm_pg_dwrr_cfg()
939 for (i = 0; i < hdev->tm_info.num_pg; i++) { in hclge_tm_pg_dwrr_cfg()
941 ret = hclge_tm_pg_weight_cfg(hdev, i, hdev->tm_info.pg_dwrr[i]); in hclge_tm_pg_dwrr_cfg()
949 static int hclge_vport_q_to_qs_map(struct hclge_dev *hdev, in hclge_vport_q_to_qs_map() argument
962 ret = hclge_tm_q_to_qs_map_cfg(hdev, in hclge_vport_q_to_qs_map()
973 static int hclge_tm_pri_q_qs_cfg_tc_base(struct hclge_dev *hdev) in hclge_tm_pri_q_qs_cfg_tc_base() argument
975 struct hclge_vport *vport = hdev->vport; in hclge_tm_pri_q_qs_cfg_tc_base()
980 for (k = 0; k < hdev->num_alloc_vport; k++) { in hclge_tm_pri_q_qs_cfg_tc_base()
987 ret = hclge_tm_qs_to_pri_map_cfg(hdev, in hclge_tm_pri_q_qs_cfg_tc_base()
998 static int hclge_tm_pri_q_qs_cfg_vnet_base(struct hclge_dev *hdev) in hclge_tm_pri_q_qs_cfg_vnet_base() argument
1000 struct hclge_vport *vport = hdev->vport; in hclge_tm_pri_q_qs_cfg_vnet_base()
1005 for (k = 0; k < hdev->num_alloc_vport; k++) in hclge_tm_pri_q_qs_cfg_vnet_base()
1007 ret = hclge_tm_qs_to_pri_map_cfg(hdev, in hclge_tm_pri_q_qs_cfg_vnet_base()
1017 static int hclge_tm_pri_q_qs_cfg(struct hclge_dev *hdev) in hclge_tm_pri_q_qs_cfg() argument
1019 struct hclge_vport *vport = hdev->vport; in hclge_tm_pri_q_qs_cfg()
1023 if (hdev->tx_sch_mode == HCLGE_FLAG_TC_BASE_SCH_MODE) in hclge_tm_pri_q_qs_cfg()
1024 ret = hclge_tm_pri_q_qs_cfg_tc_base(hdev); in hclge_tm_pri_q_qs_cfg()
1025 else if (hdev->tx_sch_mode == HCLGE_FLAG_VNET_BASE_SCH_MODE) in hclge_tm_pri_q_qs_cfg()
1026 ret = hclge_tm_pri_q_qs_cfg_vnet_base(hdev); in hclge_tm_pri_q_qs_cfg()
1034 for (i = 0; i < hdev->num_alloc_vport; i++) { in hclge_tm_pri_q_qs_cfg()
1035 ret = hclge_vport_q_to_qs_map(hdev, vport); in hclge_tm_pri_q_qs_cfg()
1045 static int hclge_tm_pri_tc_base_shaper_cfg(struct hclge_dev *hdev) in hclge_tm_pri_tc_base_shaper_cfg() argument
1047 u32 max_tm_rate = hdev->ae_dev->dev_specs.max_tm_rate; in hclge_tm_pri_tc_base_shaper_cfg()
1053 for (i = 0; i < hdev->tc_max; i++) { in hclge_tm_pri_tc_base_shaper_cfg()
1054 u32 rate = hdev->tm_info.tc_info[i].bw_limit; in hclge_tm_pri_tc_base_shaper_cfg()
1075 ret = hclge_tm_pri_shapping_cfg(hdev, HCLGE_TM_SHAP_C_BUCKET, i, in hclge_tm_pri_tc_base_shaper_cfg()
1080 ret = hclge_tm_pri_shapping_cfg(hdev, HCLGE_TM_SHAP_P_BUCKET, i, in hclge_tm_pri_tc_base_shaper_cfg()
1091 struct hclge_dev *hdev = vport->back; in hclge_tm_pri_vnet_base_shaper_pri_cfg() local
1098 hdev->ae_dev->dev_specs.max_tm_rate); in hclge_tm_pri_vnet_base_shaper_pri_cfg()
1105 ret = hclge_tm_pri_shapping_cfg(hdev, HCLGE_TM_SHAP_C_BUCKET, in hclge_tm_pri_vnet_base_shaper_pri_cfg()
1115 ret = hclge_tm_pri_shapping_cfg(hdev, HCLGE_TM_SHAP_P_BUCKET, in hclge_tm_pri_vnet_base_shaper_pri_cfg()
1127 struct hclge_dev *hdev = vport->back; in hclge_tm_pri_vnet_base_shaper_qs_cfg() local
1128 u32 max_tm_rate = hdev->ae_dev->dev_specs.max_tm_rate; in hclge_tm_pri_vnet_base_shaper_qs_cfg()
1134 ret = hclge_shaper_para_calc(hdev->tm_info.tc_info[i].bw_limit, in hclge_tm_pri_vnet_base_shaper_qs_cfg()
1144 static int hclge_tm_pri_vnet_base_shaper_cfg(struct hclge_dev *hdev) in hclge_tm_pri_vnet_base_shaper_cfg() argument
1146 struct hclge_vport *vport = hdev->vport; in hclge_tm_pri_vnet_base_shaper_cfg()
1151 for (i = 0; i < hdev->num_alloc_vport; i++) { in hclge_tm_pri_vnet_base_shaper_cfg()
1166 static int hclge_tm_pri_shaper_cfg(struct hclge_dev *hdev) in hclge_tm_pri_shaper_cfg() argument
1170 if (hdev->tx_sch_mode == HCLGE_FLAG_TC_BASE_SCH_MODE) { in hclge_tm_pri_shaper_cfg()
1171 ret = hclge_tm_pri_tc_base_shaper_cfg(hdev); in hclge_tm_pri_shaper_cfg()
1175 ret = hclge_tm_pri_vnet_base_shaper_cfg(hdev); in hclge_tm_pri_shaper_cfg()
1183 static int hclge_tm_pri_tc_base_dwrr_cfg(struct hclge_dev *hdev) in hclge_tm_pri_tc_base_dwrr_cfg() argument
1185 struct hclge_vport *vport = hdev->vport; in hclge_tm_pri_tc_base_dwrr_cfg()
1191 for (i = 0; i < hdev->tc_max; i++) { in hclge_tm_pri_tc_base_dwrr_cfg()
1193 &hdev->tm_info.pg_info[hdev->tm_info.tc_info[i].pgid]; in hclge_tm_pri_tc_base_dwrr_cfg()
1196 ret = hclge_tm_pri_weight_cfg(hdev, i, dwrr); in hclge_tm_pri_tc_base_dwrr_cfg()
1200 for (k = 0; k < hdev->num_alloc_vport; k++) { in hclge_tm_pri_tc_base_dwrr_cfg()
1208 hdev, vport[k].qs_offset + i, in hclge_tm_pri_tc_base_dwrr_cfg()
1218 static int hclge_tm_ets_tc_dwrr_cfg(struct hclge_dev *hdev) in hclge_tm_ets_tc_dwrr_cfg() argument
1232 pg_info = &hdev->tm_info.pg_info[hdev->tm_info.tc_info[i].pgid]; in hclge_tm_ets_tc_dwrr_cfg()
1238 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_ets_tc_dwrr_cfg()
1244 struct hclge_dev *hdev = vport->back; in hclge_tm_pri_vnet_base_dwrr_pri_cfg() local
1249 ret = hclge_tm_pri_weight_cfg(hdev, vport->vport_id, vport->dwrr); in hclge_tm_pri_vnet_base_dwrr_pri_cfg()
1256 hdev, vport->qs_offset + i, in hclge_tm_pri_vnet_base_dwrr_pri_cfg()
1257 hdev->tm_info.pg_info[0].tc_dwrr[i]); in hclge_tm_pri_vnet_base_dwrr_pri_cfg()
1265 static int hclge_tm_pri_vnet_base_dwrr_cfg(struct hclge_dev *hdev) in hclge_tm_pri_vnet_base_dwrr_cfg() argument
1267 struct hclge_vport *vport = hdev->vport; in hclge_tm_pri_vnet_base_dwrr_cfg()
1271 for (i = 0; i < hdev->num_alloc_vport; i++) { in hclge_tm_pri_vnet_base_dwrr_cfg()
1282 static int hclge_tm_pri_dwrr_cfg(struct hclge_dev *hdev) in hclge_tm_pri_dwrr_cfg() argument
1286 if (hdev->tx_sch_mode == HCLGE_FLAG_TC_BASE_SCH_MODE) { in hclge_tm_pri_dwrr_cfg()
1287 ret = hclge_tm_pri_tc_base_dwrr_cfg(hdev); in hclge_tm_pri_dwrr_cfg()
1291 if (!hnae3_dev_dcb_supported(hdev)) in hclge_tm_pri_dwrr_cfg()
1294 ret = hclge_tm_ets_tc_dwrr_cfg(hdev); in hclge_tm_pri_dwrr_cfg()
1296 dev_warn(&hdev->pdev->dev, in hclge_tm_pri_dwrr_cfg()
1298 hdev->fw_version); in hclge_tm_pri_dwrr_cfg()
1304 ret = hclge_tm_pri_vnet_base_dwrr_cfg(hdev); in hclge_tm_pri_dwrr_cfg()
1312 static int hclge_tm_map_cfg(struct hclge_dev *hdev) in hclge_tm_map_cfg() argument
1316 ret = hclge_up_to_tc_map(hdev); in hclge_tm_map_cfg()
1320 if (hdev->vport[0].nic.kinfo.tc_map_mode == HNAE3_TC_MAP_MODE_DSCP) { in hclge_tm_map_cfg()
1321 ret = hclge_dscp_to_tc_map(hdev); in hclge_tm_map_cfg()
1326 ret = hclge_tm_pg_to_pri_map(hdev); in hclge_tm_map_cfg()
1330 return hclge_tm_pri_q_qs_cfg(hdev); in hclge_tm_map_cfg()
1333 static int hclge_tm_shaper_cfg(struct hclge_dev *hdev) in hclge_tm_shaper_cfg() argument
1337 ret = hclge_tm_port_shaper_cfg(hdev); in hclge_tm_shaper_cfg()
1341 ret = hclge_tm_pg_shaper_cfg(hdev); in hclge_tm_shaper_cfg()
1345 return hclge_tm_pri_shaper_cfg(hdev); in hclge_tm_shaper_cfg()
1348 int hclge_tm_dwrr_cfg(struct hclge_dev *hdev) in hclge_tm_dwrr_cfg() argument
1352 ret = hclge_tm_pg_dwrr_cfg(hdev); in hclge_tm_dwrr_cfg()
1356 return hclge_tm_pri_dwrr_cfg(hdev); in hclge_tm_dwrr_cfg()
1359 static int hclge_tm_lvl2_schd_mode_cfg(struct hclge_dev *hdev) in hclge_tm_lvl2_schd_mode_cfg() argument
1365 if (hdev->tx_sch_mode == HCLGE_FLAG_VNET_BASE_SCH_MODE) in hclge_tm_lvl2_schd_mode_cfg()
1368 for (i = 0; i < hdev->tm_info.num_pg; i++) { in hclge_tm_lvl2_schd_mode_cfg()
1369 ret = hclge_tm_pg_schd_mode_cfg(hdev, i); in hclge_tm_lvl2_schd_mode_cfg()
1377 static int hclge_tm_schd_mode_tc_base_cfg(struct hclge_dev *hdev, u8 pri_id) in hclge_tm_schd_mode_tc_base_cfg() argument
1379 struct hclge_vport *vport = hdev->vport; in hclge_tm_schd_mode_tc_base_cfg()
1384 ret = hclge_tm_pri_schd_mode_cfg(hdev, pri_id); in hclge_tm_schd_mode_tc_base_cfg()
1388 for (i = 0; i < hdev->num_alloc_vport; i++) { in hclge_tm_schd_mode_tc_base_cfg()
1396 ret = hclge_tm_qs_schd_mode_cfg(hdev, in hclge_tm_schd_mode_tc_base_cfg()
1409 struct hclge_dev *hdev = vport->back; in hclge_tm_schd_mode_vnet_base_cfg() local
1416 ret = hclge_tm_pri_schd_mode_cfg(hdev, vport->vport_id); in hclge_tm_schd_mode_vnet_base_cfg()
1421 u8 sch_mode = hdev->tm_info.tc_info[i].tc_sch_mode; in hclge_tm_schd_mode_vnet_base_cfg()
1423 ret = hclge_tm_qs_schd_mode_cfg(hdev, vport->qs_offset + i, in hclge_tm_schd_mode_vnet_base_cfg()
1432 static int hclge_tm_lvl34_schd_mode_cfg(struct hclge_dev *hdev) in hclge_tm_lvl34_schd_mode_cfg() argument
1434 struct hclge_vport *vport = hdev->vport; in hclge_tm_lvl34_schd_mode_cfg()
1438 if (hdev->tx_sch_mode == HCLGE_FLAG_TC_BASE_SCH_MODE) { in hclge_tm_lvl34_schd_mode_cfg()
1439 for (i = 0; i < hdev->tc_max; i++) { in hclge_tm_lvl34_schd_mode_cfg()
1440 ret = hclge_tm_schd_mode_tc_base_cfg(hdev, i); in hclge_tm_lvl34_schd_mode_cfg()
1445 for (i = 0; i < hdev->num_alloc_vport; i++) { in hclge_tm_lvl34_schd_mode_cfg()
1457 static int hclge_tm_schd_mode_hw(struct hclge_dev *hdev) in hclge_tm_schd_mode_hw() argument
1461 ret = hclge_tm_lvl2_schd_mode_cfg(hdev); in hclge_tm_schd_mode_hw()
1465 return hclge_tm_lvl34_schd_mode_cfg(hdev); in hclge_tm_schd_mode_hw()
1468 int hclge_tm_schd_setup_hw(struct hclge_dev *hdev) in hclge_tm_schd_setup_hw() argument
1473 ret = hclge_tm_map_cfg(hdev); in hclge_tm_schd_setup_hw()
1478 ret = hclge_tm_shaper_cfg(hdev); in hclge_tm_schd_setup_hw()
1483 ret = hclge_tm_dwrr_cfg(hdev); in hclge_tm_schd_setup_hw()
1488 ret = hclge_tm_schd_mode_hw(hdev); in hclge_tm_schd_setup_hw()
1492 return hclge_tm_flush_cfg(hdev, false); in hclge_tm_schd_setup_hw()
1495 static int hclge_pause_param_setup_hw(struct hclge_dev *hdev) in hclge_pause_param_setup_hw() argument
1497 struct hclge_mac *mac = &hdev->hw.mac; in hclge_pause_param_setup_hw()
1499 return hclge_pause_param_cfg(hdev, mac->mac_addr, in hclge_pause_param_setup_hw()
1504 static int hclge_pfc_setup_hw(struct hclge_dev *hdev) in hclge_pfc_setup_hw() argument
1508 if (hdev->tm_info.fc_mode == HCLGE_FC_PFC) in hclge_pfc_setup_hw()
1512 return hclge_pfc_pause_en_cfg(hdev, enable_bitmap, in hclge_pfc_setup_hw()
1513 hdev->tm_info.pfc_en); in hclge_pfc_setup_hw()
1519 static int hclge_bp_setup_hw(struct hclge_dev *hdev, u8 tc) in hclge_bp_setup_hw() argument
1526 if (hdev->num_tqps > HCLGE_TQP_MAX_SIZE_DEV_V2) { in hclge_bp_setup_hw()
1536 for (k = 0; k < hdev->num_alloc_vport; k++) { in hclge_bp_setup_hw()
1537 struct hclge_vport *vport = &hdev->vport[k]; in hclge_bp_setup_hw()
1548 ret = hclge_tm_qs_bp_cfg(hdev, tc, i, qs_bitmap); in hclge_bp_setup_hw()
1556 int hclge_mac_pause_setup_hw(struct hclge_dev *hdev) in hclge_mac_pause_setup_hw() argument
1560 switch (hdev->tm_info.fc_mode) { in hclge_mac_pause_setup_hw()
1586 return hclge_mac_pause_en_cfg(hdev, tx_en, rx_en); in hclge_mac_pause_setup_hw()
1589 static int hclge_tm_bp_setup(struct hclge_dev *hdev) in hclge_tm_bp_setup() argument
1594 for (i = 0; i < hdev->tm_info.num_tc; i++) { in hclge_tm_bp_setup()
1595 ret = hclge_bp_setup_hw(hdev, i); in hclge_tm_bp_setup()
1603 int hclge_pause_setup_hw(struct hclge_dev *hdev, bool init) in hclge_pause_setup_hw() argument
1607 ret = hclge_pause_param_setup_hw(hdev); in hclge_pause_setup_hw()
1611 ret = hclge_mac_pause_setup_hw(hdev); in hclge_pause_setup_hw()
1616 if (!hnae3_dev_dcb_supported(hdev)) in hclge_pause_setup_hw()
1623 ret = hclge_pfc_setup_hw(hdev); in hclge_pause_setup_hw()
1625 dev_warn(&hdev->pdev->dev, "GE MAC does not support pfc\n"); in hclge_pause_setup_hw()
1627 dev_err(&hdev->pdev->dev, "config pfc failed! ret = %d\n", in hclge_pause_setup_hw()
1632 return hclge_tm_bp_setup(hdev); in hclge_pause_setup_hw()
1635 void hclge_tm_prio_tc_info_update(struct hclge_dev *hdev, u8 *prio_tc) in hclge_tm_prio_tc_info_update() argument
1637 struct hclge_vport *vport = hdev->vport; in hclge_tm_prio_tc_info_update()
1642 hdev->tm_info.prio_tc[i] = prio_tc[i]; in hclge_tm_prio_tc_info_update()
1644 for (k = 0; k < hdev->num_alloc_vport; k++) { in hclge_tm_prio_tc_info_update()
1651 void hclge_tm_schd_info_update(struct hclge_dev *hdev, u8 num_tc) in hclge_tm_schd_info_update() argument
1656 hdev->tm_info.num_tc = num_tc; in hclge_tm_schd_info_update()
1658 for (i = 0; i < hdev->tm_info.num_tc; i++) in hclge_tm_schd_info_update()
1663 hdev->tm_info.num_tc = 1; in hclge_tm_schd_info_update()
1666 hdev->hw_tc_map = bit_map; in hclge_tm_schd_info_update()
1668 hclge_tm_schd_info_init(hdev); in hclge_tm_schd_info_update()
1671 int hclge_tm_init_hw(struct hclge_dev *hdev, bool init) in hclge_tm_init_hw() argument
1675 if ((hdev->tx_sch_mode != HCLGE_FLAG_TC_BASE_SCH_MODE) && in hclge_tm_init_hw()
1676 (hdev->tx_sch_mode != HCLGE_FLAG_VNET_BASE_SCH_MODE)) in hclge_tm_init_hw()
1679 ret = hclge_tm_schd_setup_hw(hdev); in hclge_tm_init_hw()
1683 ret = hclge_pause_setup_hw(hdev, init); in hclge_tm_init_hw()
1690 int hclge_tm_schd_init(struct hclge_dev *hdev) in hclge_tm_schd_init() argument
1693 hdev->tm_info.fc_mode = HCLGE_FC_FULL; in hclge_tm_schd_init()
1694 hdev->fc_mode_last_time = hdev->tm_info.fc_mode; in hclge_tm_schd_init()
1696 if (hdev->tx_sch_mode != HCLGE_FLAG_TC_BASE_SCH_MODE && in hclge_tm_schd_init()
1697 hdev->tm_info.num_pg != 1) in hclge_tm_schd_init()
1700 hclge_tm_schd_info_init(hdev); in hclge_tm_schd_init()
1701 hclge_dscp_to_prio_map_init(hdev); in hclge_tm_schd_init()
1703 return hclge_tm_init_hw(hdev, true); in hclge_tm_schd_init()
1706 int hclge_tm_vport_map_update(struct hclge_dev *hdev) in hclge_tm_vport_map_update() argument
1708 struct hclge_vport *vport = hdev->vport; in hclge_tm_vport_map_update()
1713 ret = hclge_vport_q_to_qs_map(hdev, vport); in hclge_tm_vport_map_update()
1717 if (hdev->tm_info.num_tc == 1 && !hdev->tm_info.pfc_en) in hclge_tm_vport_map_update()
1720 return hclge_tm_bp_setup(hdev); in hclge_tm_vport_map_update()
1723 int hclge_tm_get_qset_num(struct hclge_dev *hdev, u16 *qset_num) in hclge_tm_get_qset_num() argument
1729 if (hdev->ae_dev->dev_version <= HNAE3_DEVICE_VERSION_V2) { in hclge_tm_get_qset_num()
1731 *qset_num = HCLGE_TM_PF_MAX_QSET_NUM + pci_num_vf(hdev->pdev); in hclge_tm_get_qset_num()
1736 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_get_qset_num()
1738 dev_err(&hdev->pdev->dev, in hclge_tm_get_qset_num()
1748 int hclge_tm_get_pri_num(struct hclge_dev *hdev, u8 *pri_num) in hclge_tm_get_pri_num() argument
1754 if (hdev->ae_dev->dev_version <= HNAE3_DEVICE_VERSION_V2) { in hclge_tm_get_pri_num()
1760 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_get_pri_num()
1762 dev_err(&hdev->pdev->dev, in hclge_tm_get_pri_num()
1772 int hclge_tm_get_qset_map_pri(struct hclge_dev *hdev, u16 qset_id, u8 *priority, in hclge_tm_get_qset_map_pri() argument
1782 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_get_qset_map_pri()
1784 dev_err(&hdev->pdev->dev, in hclge_tm_get_qset_map_pri()
1794 int hclge_tm_get_qset_sch_mode(struct hclge_dev *hdev, u16 qset_id, u8 *mode) in hclge_tm_get_qset_sch_mode() argument
1803 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_get_qset_sch_mode()
1805 dev_err(&hdev->pdev->dev, in hclge_tm_get_qset_sch_mode()
1814 int hclge_tm_get_qset_weight(struct hclge_dev *hdev, u16 qset_id, u8 *weight) in hclge_tm_get_qset_weight() argument
1823 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_get_qset_weight()
1825 dev_err(&hdev->pdev->dev, in hclge_tm_get_qset_weight()
1834 int hclge_tm_get_qset_shaper(struct hclge_dev *hdev, u16 qset_id, in hclge_tm_get_qset_shaper() argument
1845 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_get_qset_shaper()
1847 dev_err(&hdev->pdev->dev, in hclge_tm_get_qset_shaper()
1864 int hclge_tm_get_pri_sch_mode(struct hclge_dev *hdev, u8 pri_id, u8 *mode) in hclge_tm_get_pri_sch_mode() argument
1873 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_get_pri_sch_mode()
1875 dev_err(&hdev->pdev->dev, in hclge_tm_get_pri_sch_mode()
1884 int hclge_tm_get_pri_weight(struct hclge_dev *hdev, u8 pri_id, u8 *weight) in hclge_tm_get_pri_weight() argument
1893 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_get_pri_weight()
1895 dev_err(&hdev->pdev->dev, in hclge_tm_get_pri_weight()
1904 int hclge_tm_get_pri_shaper(struct hclge_dev *hdev, u8 pri_id, in hclge_tm_get_pri_shaper() argument
1920 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_get_pri_shaper()
1922 dev_err(&hdev->pdev->dev, in hclge_tm_get_pri_shaper()
1939 int hclge_tm_get_q_to_qs_map(struct hclge_dev *hdev, u16 q_id, u16 *qset_id) in hclge_tm_get_q_to_qs_map() argument
1950 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_get_q_to_qs_map()
1952 dev_err(&hdev->pdev->dev, in hclge_tm_get_q_to_qs_map()
1977 int hclge_tm_get_q_to_tc(struct hclge_dev *hdev, u16 q_id, u8 *tc_id) in hclge_tm_get_q_to_tc() argument
1988 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_get_q_to_tc()
1990 dev_err(&hdev->pdev->dev, in hclge_tm_get_q_to_tc()
1999 int hclge_tm_get_pg_to_pri_map(struct hclge_dev *hdev, u8 pg_id, in hclge_tm_get_pg_to_pri_map() argument
2009 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_get_pg_to_pri_map()
2011 dev_err(&hdev->pdev->dev, in hclge_tm_get_pg_to_pri_map()
2020 int hclge_tm_get_pg_weight(struct hclge_dev *hdev, u8 pg_id, u8 *weight) in hclge_tm_get_pg_weight() argument
2029 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_get_pg_weight()
2031 dev_err(&hdev->pdev->dev, in hclge_tm_get_pg_weight()
2040 int hclge_tm_get_pg_sch_mode(struct hclge_dev *hdev, u8 pg_id, u8 *mode) in hclge_tm_get_pg_sch_mode() argument
2047 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_get_pg_sch_mode()
2049 dev_err(&hdev->pdev->dev, in hclge_tm_get_pg_sch_mode()
2058 int hclge_tm_get_pg_shaper(struct hclge_dev *hdev, u8 pg_id, in hclge_tm_get_pg_shaper() argument
2074 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_get_pg_shaper()
2076 dev_err(&hdev->pdev->dev, in hclge_tm_get_pg_shaper()
2093 int hclge_tm_get_port_shaper(struct hclge_dev *hdev, in hclge_tm_get_port_shaper() argument
2102 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_get_port_shaper()
2104 dev_err(&hdev->pdev->dev, in hclge_tm_get_port_shaper()
2122 int hclge_tm_flush_cfg(struct hclge_dev *hdev, bool enable) in hclge_tm_flush_cfg() argument
2127 if (!hnae3_ae_dev_tm_flush_supported(hdev)) in hclge_tm_flush_cfg()
2134 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_flush_cfg()
2136 dev_err(&hdev->pdev->dev, in hclge_tm_flush_cfg()