Lines Matching refs:tqueue

640 		struct sxgbe_tx_queue *tqueue = priv->txq[queue_num];  in dma_free_tx_skbufs()  local
641 tx_free_ring_skbufs(tqueue); in dma_free_tx_skbufs()
735 static void sxgbe_tx_queue_clean(struct sxgbe_tx_queue *tqueue) in sxgbe_tx_queue_clean() argument
737 struct sxgbe_priv_data *priv = tqueue->priv_ptr; in sxgbe_tx_queue_clean()
740 u8 queue_no = tqueue->queue_no; in sxgbe_tx_queue_clean()
747 while (tqueue->dirty_tx != tqueue->cur_tx) { in sxgbe_tx_queue_clean()
748 unsigned int entry = tqueue->dirty_tx % tx_rsize; in sxgbe_tx_queue_clean()
749 struct sk_buff *skb = tqueue->tx_skbuff[entry]; in sxgbe_tx_queue_clean()
752 p = tqueue->dma_tx + entry; in sxgbe_tx_queue_clean()
760 __func__, tqueue->cur_tx, tqueue->dirty_tx); in sxgbe_tx_queue_clean()
762 if (likely(tqueue->tx_skbuff_dma[entry])) { in sxgbe_tx_queue_clean()
764 tqueue->tx_skbuff_dma[entry], in sxgbe_tx_queue_clean()
767 tqueue->tx_skbuff_dma[entry] = 0; in sxgbe_tx_queue_clean()
772 tqueue->tx_skbuff[entry] = NULL; in sxgbe_tx_queue_clean()
777 tqueue->dirty_tx++; in sxgbe_tx_queue_clean()
782 sxgbe_tx_avail(tqueue, tx_rsize) > SXGBE_TX_THRESH(priv))) { in sxgbe_tx_queue_clean()
801 struct sxgbe_tx_queue *tqueue = priv->txq[queue_num]; in sxgbe_tx_all_clean() local
803 sxgbe_tx_queue_clean(tqueue); in sxgbe_tx_all_clean()
1283 struct sxgbe_tx_queue *tqueue = priv->txq[txq_index]; in sxgbe_xmit() local
1295 if (unlikely(skb_is_gso(skb) && tqueue->prev_mss != cur_mss)) in sxgbe_xmit()
1300 tqueue->hwts_tx_en))) in sxgbe_xmit()
1306 if (unlikely(sxgbe_tx_avail(tqueue, tx_rsize) < nr_frags + 1)) { in sxgbe_xmit()
1315 entry = tqueue->cur_tx % tx_rsize; in sxgbe_xmit()
1316 tx_desc = tqueue->dma_tx + entry; in sxgbe_xmit()
1323 tqueue->tx_skbuff[entry] = skb; in sxgbe_xmit()
1328 if (unlikely(tqueue->prev_mss != cur_mss)) { in sxgbe_xmit()
1340 entry = (++tqueue->cur_tx) % tx_rsize; in sxgbe_xmit()
1341 first_desc = tqueue->dma_tx + entry; in sxgbe_xmit()
1343 tqueue->prev_mss = cur_mss; in sxgbe_xmit()
1362 entry = (++tqueue->cur_tx) % tx_rsize; in sxgbe_xmit()
1363 tx_desc = tqueue->dma_tx + entry; in sxgbe_xmit()
1367 tqueue->tx_skbuff_dma[entry] = tx_desc->tdes01; in sxgbe_xmit()
1368 tqueue->tx_skbuff[entry] = NULL; in sxgbe_xmit()
1386 tqueue->tx_count_frames += nr_frags + 1; in sxgbe_xmit()
1387 if (tqueue->tx_count_frames > tqueue->tx_coal_frames) { in sxgbe_xmit()
1390 mod_timer(&tqueue->txtimer, in sxgbe_xmit()
1391 SXGBE_COAL_TIMER(tqueue->tx_coal_timer)); in sxgbe_xmit()
1393 tqueue->tx_count_frames = 0; in sxgbe_xmit()
1402 tqueue->cur_tx++; in sxgbe_xmit()
1406 __func__, tqueue->cur_tx % tx_rsize, in sxgbe_xmit()
1407 tqueue->dirty_tx % tx_rsize, entry, in sxgbe_xmit()
1410 if (unlikely(sxgbe_tx_avail(tqueue, tx_rsize) <= (MAX_SKB_FRAGS + 1))) { in sxgbe_xmit()
1419 tqueue->hwts_tx_en)) { in sxgbe_xmit()