Lines Matching refs:vsi
10 static bool ice_is_arfs_active(struct ice_vsi *vsi) in ice_is_arfs_active() argument
12 return !!vsi->arfs_fltr_list; in ice_is_arfs_active()
28 struct ice_vsi *vsi; in ice_is_arfs_using_perfect_flow() local
30 vsi = ice_get_main_vsi(pf); in ice_is_arfs_using_perfect_flow()
31 if (!vsi) in ice_is_arfs_using_perfect_flow()
34 arfs_fltr_cntrs = vsi->arfs_fltr_cntrs; in ice_is_arfs_using_perfect_flow()
59 ice_arfs_update_active_fltr_cntrs(struct ice_vsi *vsi, in ice_arfs_update_active_fltr_cntrs() argument
62 struct ice_arfs_active_fltr_cntrs *fltr_cntrs = vsi->arfs_fltr_cntrs; in ice_arfs_update_active_fltr_cntrs()
90 …dev_err(ice_pf_to_dev(vsi->back), "aRFS: Failed to update filter counters, invalid filter type %d\… in ice_arfs_update_active_fltr_cntrs()
105 ice_arfs_del_flow_rules(struct ice_vsi *vsi, struct hlist_head *del_list_head) in ice_arfs_del_flow_rules() argument
111 dev = ice_pf_to_dev(vsi->back); in ice_arfs_del_flow_rules()
116 result = ice_fdir_write_fltr(vsi->back, &e->fltr_info, false, in ice_arfs_del_flow_rules()
119 ice_arfs_update_active_fltr_cntrs(vsi, e, false); in ice_arfs_del_flow_rules()
142 ice_arfs_add_flow_rules(struct ice_vsi *vsi, struct hlist_head *add_list_head) in ice_arfs_add_flow_rules() argument
148 dev = ice_pf_to_dev(vsi->back); in ice_arfs_add_flow_rules()
153 result = ice_fdir_write_fltr(vsi->back, in ice_arfs_add_flow_rules()
157 ice_arfs_update_active_fltr_cntrs(vsi, ep->arfs_entry, in ice_arfs_add_flow_rules()
181 ice_arfs_is_flow_expired(struct ice_vsi *vsi, struct ice_arfs_entry *arfs_entry) in ice_arfs_is_flow_expired() argument
184 if (rps_may_expire_flow(vsi->netdev, arfs_entry->fltr_info.q_index, in ice_arfs_is_flow_expired()
214 ice_arfs_update_flow_rules(struct ice_vsi *vsi, u16 idx, in ice_arfs_update_flow_rules() argument
222 dev = ice_pf_to_dev(vsi->back); in ice_arfs_update_flow_rules()
225 hlist_for_each_entry_safe(e, n, &vsi->arfs_fltr_list[idx], list_entry) in ice_arfs_update_flow_rules()
245 if (ice_arfs_is_flow_expired(vsi, e)) { in ice_arfs_update_flow_rules()
300 ice_arfs_build_entry(struct ice_vsi *vsi, const struct flow_keys *fk, in ice_arfs_build_entry() argument
307 arfs_entry = devm_kzalloc(ice_pf_to_dev(vsi->back), in ice_arfs_build_entry()
316 fltr_info->dest_vsi = vsi->idx; in ice_arfs_build_entry()
343 atomic_inc_return(vsi->arfs_last_fltr_id) % RPS_NO_FILTER; in ice_arfs_build_entry()
401 struct ice_vsi *vsi = np->vsi; in ice_rx_flow_steer() local
410 if (unlikely(!vsi->arfs_fltr_list)) in ice_rx_flow_steer()
413 pf = vsi->back; in ice_rx_flow_steer()
440 spin_lock_bh(&vsi->arfs_lock); in ice_rx_flow_steer()
441 hlist_for_each_entry(arfs_entry, &vsi->arfs_fltr_list[idx], in ice_rx_flow_steer()
459 ice_arfs_update_active_fltr_cntrs(vsi, arfs_entry, false); in ice_rx_flow_steer()
463 arfs_entry = ice_arfs_build_entry(vsi, &fk, rxq_idx, flow_id); in ice_rx_flow_steer()
471 hlist_add_head(&arfs_entry->list_entry, &vsi->arfs_fltr_list[idx]); in ice_rx_flow_steer()
475 spin_unlock_bh(&vsi->arfs_lock); in ice_rx_flow_steer()
483 static int ice_init_arfs_cntrs(struct ice_vsi *vsi) in ice_init_arfs_cntrs() argument
485 if (!vsi || vsi->type != ICE_VSI_PF) in ice_init_arfs_cntrs()
488 vsi->arfs_fltr_cntrs = kzalloc(sizeof(*vsi->arfs_fltr_cntrs), in ice_init_arfs_cntrs()
490 if (!vsi->arfs_fltr_cntrs) in ice_init_arfs_cntrs()
493 vsi->arfs_last_fltr_id = kzalloc(sizeof(*vsi->arfs_last_fltr_id), in ice_init_arfs_cntrs()
495 if (!vsi->arfs_last_fltr_id) { in ice_init_arfs_cntrs()
496 kfree(vsi->arfs_fltr_cntrs); in ice_init_arfs_cntrs()
497 vsi->arfs_fltr_cntrs = NULL; in ice_init_arfs_cntrs()
508 void ice_init_arfs(struct ice_vsi *vsi) in ice_init_arfs() argument
513 if (!vsi || vsi->type != ICE_VSI_PF) in ice_init_arfs()
521 if (ice_init_arfs_cntrs(vsi)) in ice_init_arfs()
527 spin_lock_init(&vsi->arfs_lock); in ice_init_arfs()
529 vsi->arfs_fltr_list = arfs_fltr_list; in ice_init_arfs()
541 void ice_clear_arfs(struct ice_vsi *vsi) in ice_clear_arfs() argument
546 if (!vsi || vsi->type != ICE_VSI_PF || !vsi->back || in ice_clear_arfs()
547 !vsi->arfs_fltr_list) in ice_clear_arfs()
550 dev = ice_pf_to_dev(vsi->back); in ice_clear_arfs()
555 spin_lock_bh(&vsi->arfs_lock); in ice_clear_arfs()
556 hlist_for_each_entry_safe(r, n, &vsi->arfs_fltr_list[i], in ice_clear_arfs()
561 spin_unlock_bh(&vsi->arfs_lock); in ice_clear_arfs()
564 kfree(vsi->arfs_fltr_list); in ice_clear_arfs()
565 vsi->arfs_fltr_list = NULL; in ice_clear_arfs()
566 kfree(vsi->arfs_last_fltr_id); in ice_clear_arfs()
567 vsi->arfs_last_fltr_id = NULL; in ice_clear_arfs()
568 kfree(vsi->arfs_fltr_cntrs); in ice_clear_arfs()
569 vsi->arfs_fltr_cntrs = NULL; in ice_clear_arfs()
576 void ice_free_cpu_rx_rmap(struct ice_vsi *vsi) in ice_free_cpu_rx_rmap() argument
580 if (!vsi || vsi->type != ICE_VSI_PF) in ice_free_cpu_rx_rmap()
583 netdev = vsi->netdev; in ice_free_cpu_rx_rmap()
595 int ice_set_cpu_rx_rmap(struct ice_vsi *vsi) in ice_set_cpu_rx_rmap() argument
601 if (!vsi || vsi->type != ICE_VSI_PF) in ice_set_cpu_rx_rmap()
604 pf = vsi->back; in ice_set_cpu_rx_rmap()
605 netdev = vsi->netdev; in ice_set_cpu_rx_rmap()
606 if (!pf || !netdev || !vsi->num_q_vectors) in ice_set_cpu_rx_rmap()
610 vsi->type, netdev->name, vsi->num_q_vectors); in ice_set_cpu_rx_rmap()
612 netdev->rx_cpu_rmap = alloc_irq_cpu_rmap(vsi->num_q_vectors); in ice_set_cpu_rx_rmap()
616 ice_for_each_q_vector(vsi, i) in ice_set_cpu_rx_rmap()
618 vsi->q_vectors[i]->irq.virq)) { in ice_set_cpu_rx_rmap()
619 ice_free_cpu_rx_rmap(vsi); in ice_set_cpu_rx_rmap()