Searched refs:netdev_txq (Results 1 – 7 of 7) sorted by relevance
/openbmc/linux/drivers/net/ethernet/huawei/hinic/ |
H A D | hinic_tx.c | 495 struct netdev_queue *netdev_txq; in hinic_lb_xmit_frame() local 537 netdev_txq = netdev_get_tx_queue(netdev, q_id); in hinic_lb_xmit_frame() 538 if ((!netdev_xmit_more()) || (netif_xmit_stopped(netdev_txq))) in hinic_lb_xmit_frame() 556 struct netdev_queue *netdev_txq; in hinic_xmit_frame() local 626 netdev_txq = netdev_get_tx_queue(netdev, q_id); in hinic_xmit_frame() 627 if ((!netdev_xmit_more()) || (netif_xmit_stopped(netdev_txq))) in hinic_xmit_frame() 702 struct netdev_queue *netdev_txq; in free_tx_poll() local 746 netdev_txq = netdev_get_tx_queue(txq->netdev, qp->q_id); in free_tx_poll() 748 __netif_tx_lock(netdev_txq, smp_processor_id()); in free_tx_poll() 752 __netif_tx_unlock(netdev_txq); in free_tx_poll()
|
/openbmc/linux/drivers/net/ethernet/qlogic/qede/ |
H A D | qede_fp.c | 435 struct netdev_queue *netdev_txq; in qede_tx_int() local 439 netdev_txq = netdev_get_tx_queue(edev->ndev, txq->ndev_txq_id); in qede_tx_int() 461 netdev_tx_completed_queue(netdev_txq, pkts_compl, bytes_compl); in qede_tx_int() 474 if (unlikely(netif_tx_queue_stopped(netdev_txq))) { in qede_tx_int() 485 __netif_tx_lock(netdev_txq, smp_processor_id()); in qede_tx_int() 487 if ((netif_tx_queue_stopped(netdev_txq)) && in qede_tx_int() 491 netif_tx_wake_queue(netdev_txq); in qede_tx_int() 496 __netif_tx_unlock(netdev_txq); in qede_tx_int() 1481 struct netdev_queue *netdev_txq; in qede_start_xmit() local 1500 netdev_txq = netdev_get_tx_queue(ndev, txq_index); in qede_start_xmit() [all …]
|
H A D | qede_main.c | 1791 struct netdev_queue *netdev_txq; in qede_empty_tx_queue() local 1794 netdev_txq = netdev_get_tx_queue(edev->ndev, txq->ndev_txq_id); in qede_empty_tx_queue() 1818 netdev_tx_completed_queue(netdev_txq, pkts_compl, bytes_compl); in qede_empty_tx_queue() 2809 struct netdev_queue *netdev_txq; in qede_is_txq_full() local 2811 netdev_txq = netdev_get_tx_queue(edev->ndev, txq->ndev_txq_id); in qede_is_txq_full() 2812 if (netif_xmit_stopped(netdev_txq)) in qede_is_txq_full()
|
/openbmc/linux/drivers/net/ethernet/google/gve/ |
H A D | gve_tx_dqo.c | 267 tx->netdev_txq = netdev_get_tx_queue(priv->dev, idx); in gve_tx_alloc_ring_dqo() 380 netdev_tx_reset_queue(tx->netdev_txq); in gve_tx_free_rings_dqo() 420 netif_tx_stop_queue(tx->netdev_txq); in gve_maybe_stop_tx_dqo() 433 netif_tx_start_queue(tx->netdev_txq); in gve_maybe_stop_tx_dqo() 919 netdev_tx_sent_queue(tx->netdev_txq, skb->len); in gve_try_tx_skb() 945 if (!netif_xmit_stopped(tx->netdev_txq) && netdev_xmit_more()) in gve_tx_dqo() 1247 netdev_tx_completed_queue(tx->netdev_txq, in gve_clean_tx_done_dqo() 1274 if (netif_tx_queue_stopped(tx->netdev_txq) && in gve_tx_poll_dqo() 1277 netif_tx_wake_queue(tx->netdev_txq); in gve_tx_poll_dqo()
|
H A D | gve_tx.c | 211 netdev_tx_reset_queue(tx->netdev_txq); in gve_tx_free_ring() 284 tx->netdev_txq = netdev_get_tx_queue(priv->dev, idx); in gve_tx_alloc_ring() 437 netif_tx_stop_queue(tx->netdev_txq); in gve_maybe_stop_tx() 709 netdev_tx_sent_queue(tx->netdev_txq, skb->len); in gve_tx() 716 if (!netif_xmit_stopped(tx->netdev_txq) && netdev_xmit_more()) in gve_tx() 868 netdev_tx_completed_queue(tx->netdev_txq, pkts, bytes); in gve_clean_tx_done() 875 if (try_to_wake && netif_tx_queue_stopped(tx->netdev_txq) && in gve_clean_tx_done() 878 netif_tx_wake_queue(tx->netdev_txq); in gve_clean_tx_done()
|
H A D | gve.h | 553 struct netdev_queue *netdev_txq; member
|
/openbmc/linux/drivers/net/ethernet/pensando/ionic/ |
H A D | ionic_txrx.c | 28 struct netdev_queue *netdev_txq; in ionic_txq_poke_doorbell() local 32 netdev_txq = netdev_get_tx_queue(netdev, q->index); in ionic_txq_poke_doorbell() 34 HARD_TX_LOCK(netdev, netdev_txq, smp_processor_id()); in ionic_txq_poke_doorbell() 37 HARD_TX_UNLOCK(netdev, netdev_txq); in ionic_txq_poke_doorbell() 52 HARD_TX_UNLOCK(netdev, netdev_txq); in ionic_txq_poke_doorbell()
|