Lines Matching refs:tx

41 	struct vnic_txreq *tx = container_of(txreq, struct vnic_txreq, txreq);  in vnic_sdma_complete()  local
42 struct hfi1_vnic_sdma *vnic_sdma = tx->sdma; in vnic_sdma_complete()
45 dev_kfree_skb_any(tx->skb); in vnic_sdma_complete()
46 kmem_cache_free(vnic_sdma->dd->vnic.txreq_cache, tx); in vnic_sdma_complete()
50 struct vnic_txreq *tx) in build_vnic_ulp_payload() argument
56 &tx->txreq, in build_vnic_ulp_payload()
57 tx->skb->data, in build_vnic_ulp_payload()
58 skb_headlen(tx->skb)); in build_vnic_ulp_payload()
62 for (i = 0; i < skb_shinfo(tx->skb)->nr_frags; i++) { in build_vnic_ulp_payload()
63 skb_frag_t *frag = &skb_shinfo(tx->skb)->frags[i]; in build_vnic_ulp_payload()
67 &tx->txreq, in build_vnic_ulp_payload()
76 if (tx->plen) in build_vnic_ulp_payload()
77 ret = sdma_txadd_kvaddr(sde->dd, &tx->txreq, in build_vnic_ulp_payload()
78 tx->pad + HFI1_VNIC_MAX_PAD - tx->plen, in build_vnic_ulp_payload()
79 tx->plen); in build_vnic_ulp_payload()
86 struct vnic_txreq *tx, in build_vnic_tx_desc() argument
93 &tx->txreq, in build_vnic_tx_desc()
95 hdrbytes + tx->skb->len + tx->plen, in build_vnic_tx_desc()
105 tx->pbc_val = cpu_to_le64(pbc); in build_vnic_tx_desc()
108 &tx->txreq, in build_vnic_tx_desc()
109 &tx->pbc_val, in build_vnic_tx_desc()
115 ret = build_vnic_ulp_payload(sde, tx); in build_vnic_tx_desc()
132 struct vnic_txreq *tx; in hfi1_vnic_send_dma() local
141 tx = kmem_cache_alloc(dd->vnic.txreq_cache, GFP_ATOMIC); in hfi1_vnic_send_dma()
142 if (unlikely(!tx)) { in hfi1_vnic_send_dma()
147 tx->sdma = vnic_sdma; in hfi1_vnic_send_dma()
148 tx->skb = skb; in hfi1_vnic_send_dma()
149 hfi1_vnic_update_pad(tx->pad, plen); in hfi1_vnic_send_dma()
150 tx->plen = plen; in hfi1_vnic_send_dma()
151 ret = build_vnic_tx_desc(sde, tx, pbc); in hfi1_vnic_send_dma()
156 &tx->txreq, vnic_sdma->pkts_sent); in hfi1_vnic_send_dma()
168 sdma_txclean(dd, &tx->txreq); in hfi1_vnic_send_dma()
169 kmem_cache_free(dd->vnic.txreq_cache, tx); in hfi1_vnic_send_dma()