Lines Matching refs:cur_tx

367 	if (tx_q->dirty_tx > tx_q->cur_tx)  in stmmac_tx_avail()
368 avail = tx_q->dirty_tx - tx_q->cur_tx - 1; in stmmac_tx_avail()
370 avail = priv->dma_conf.dma_tx_size - tx_q->cur_tx + tx_q->dirty_tx - 1; in stmmac_tx_avail()
418 if (tx_q->dirty_tx != tx_q->cur_tx) in stmmac_enable_eee_mode()
2425 unsigned int entry = tx_q->cur_tx; in stmmac_xdp_xmit_zc()
2499 tx_q->cur_tx = STMMAC_GET_ENTRY(tx_q->cur_tx, priv->dma_conf.dma_tx_size); in stmmac_xdp_xmit_zc()
2500 entry = tx_q->cur_tx; in stmmac_xdp_xmit_zc()
2556 while ((entry != tx_q->cur_tx) && count < priv->dma_conf.dma_tx_size) { in stmmac_tx_clean()
2700 if (tx_q->dirty_tx != tx_q->cur_tx) in stmmac_tx_clean()
3998 p = &tx_q->dma_entx[tx_q->cur_tx].basic; in stmmac_vlan_insert()
4000 p = &tx_q->dma_tx[tx_q->cur_tx]; in stmmac_vlan_insert()
4006 tx_q->cur_tx = STMMAC_GET_ENTRY(tx_q->cur_tx, priv->dma_conf.dma_tx_size); in stmmac_vlan_insert()
4034 tx_q->cur_tx = STMMAC_GET_ENTRY(tx_q->cur_tx, in stmmac_tso_allocator()
4036 WARN_ON(tx_q->tx_skbuff[tx_q->cur_tx]); in stmmac_tso_allocator()
4039 desc = &tx_q->dma_entx[tx_q->cur_tx].basic; in stmmac_tso_allocator()
4041 desc = &tx_q->dma_tx[tx_q->cur_tx]; in stmmac_tso_allocator()
4079 tx_q->tx_tail_addr = tx_q->dma_tx_phy + (tx_q->cur_tx * desc_size); in stmmac_flush_tx_descriptors()
4128 first_tx = tx_q->cur_tx; in stmmac_tso_xmit()
4160 mss_desc = &tx_q->dma_entx[tx_q->cur_tx].basic; in stmmac_tso_xmit()
4162 mss_desc = &tx_q->dma_tx[tx_q->cur_tx]; in stmmac_tso_xmit()
4166 tx_q->cur_tx = STMMAC_GET_ENTRY(tx_q->cur_tx, in stmmac_tso_xmit()
4168 WARN_ON(tx_q->tx_skbuff[tx_q->cur_tx]); in stmmac_tso_xmit()
4181 first_entry = tx_q->cur_tx; in stmmac_tso_xmit()
4235 tx_q->tx_skbuff_dma[tx_q->cur_tx].buf = des; in stmmac_tso_xmit()
4236 tx_q->tx_skbuff_dma[tx_q->cur_tx].len = skb_frag_size(frag); in stmmac_tso_xmit()
4237 tx_q->tx_skbuff_dma[tx_q->cur_tx].map_as_page = true; in stmmac_tso_xmit()
4238 tx_q->tx_skbuff_dma[tx_q->cur_tx].buf_type = STMMAC_TXBUF_T_SKB; in stmmac_tso_xmit()
4241 tx_q->tx_skbuff_dma[tx_q->cur_tx].last_segment = true; in stmmac_tso_xmit()
4244 tx_q->tx_skbuff[tx_q->cur_tx] = skb; in stmmac_tso_xmit()
4245 tx_q->tx_skbuff_dma[tx_q->cur_tx].buf_type = STMMAC_TXBUF_T_SKB; in stmmac_tso_xmit()
4248 tx_packets = (tx_q->cur_tx + 1) - first_tx; in stmmac_tso_xmit()
4265 desc = &tx_q->dma_entx[tx_q->cur_tx].basic; in stmmac_tso_xmit()
4267 desc = &tx_q->dma_tx[tx_q->cur_tx]; in stmmac_tso_xmit()
4278 tx_q->cur_tx = STMMAC_GET_ENTRY(tx_q->cur_tx, priv->dma_conf.dma_tx_size); in stmmac_tso_xmit()
4326 __func__, tx_q->cur_tx, tx_q->dirty_tx, first_entry, in stmmac_tso_xmit()
4327 tx_q->cur_tx, first, nfrags); in stmmac_tso_xmit()
4395 first_tx = tx_q->cur_tx; in stmmac_xmit()
4423 entry = tx_q->cur_tx; in stmmac_xmit()
4542 tx_q->cur_tx = entry; in stmmac_xmit()
4547 __func__, tx_q->cur_tx, tx_q->dirty_tx, first_entry, in stmmac_xmit()
4773 unsigned int entry = tx_q->cur_tx; in stmmac_xdp_xmit_xdpf()
4838 tx_q->cur_tx = entry; in stmmac_xdp_xmit_xdpf()
7769 tx_q->cur_tx = 0; in stmmac_reset_tx_queue()