Lines Matching refs:ring

449 		struct rtl8192_tx_ring *ring = &rtlpci->tx_ring[hw_queue];  in _rtl_pci_tx_chk_waitq()  local
459 (ring->entries - skb_queue_len(&ring->queue) > in _rtl_pci_tx_chk_waitq()
486 struct rtl8192_tx_ring *ring = &rtlpci->tx_ring[prio]; in _rtl_pci_tx_isr() local
488 while (skb_queue_len(&ring->queue)) { in _rtl_pci_tx_isr()
496 entry = (u8 *)(&ring->buffer_desc[ring->idx]); in _rtl_pci_tx_isr()
498 entry = (u8 *)(&ring->desc[ring->idx]); in _rtl_pci_tx_isr()
500 if (!rtlpriv->cfg->ops->is_tx_desc_closed(hw, prio, ring->idx)) in _rtl_pci_tx_isr()
502 ring->idx = (ring->idx + 1) % ring->entries; in _rtl_pci_tx_isr()
504 skb = __skb_dequeue(&ring->queue); in _rtl_pci_tx_isr()
516 ring->idx, in _rtl_pci_tx_isr()
517 skb_queue_len(&ring->queue), in _rtl_pci_tx_isr()
564 if ((ring->entries - skb_queue_len(&ring->queue)) <= 4) { in _rtl_pci_tx_isr()
567 prio, ring->idx, in _rtl_pci_tx_isr()
568 skb_queue_len(&ring->queue)); in _rtl_pci_tx_isr()
1032 struct rtl8192_tx_ring *ring = NULL; in _rtl_pci_prepare_bcn_tasklet() local
1044 ring = &rtlpci->tx_ring[BEACON_QUEUE]; in _rtl_pci_prepare_bcn_tasklet()
1045 pskb = __skb_dequeue(&ring->queue); in _rtl_pci_prepare_bcn_tasklet()
1047 entry = (u8 *)(&ring->buffer_desc[ring->idx]); in _rtl_pci_prepare_bcn_tasklet()
1049 entry = (u8 *)(&ring->desc[ring->idx]); in _rtl_pci_prepare_bcn_tasklet()
1064 pdesc = &ring->desc[0]; in _rtl_pci_prepare_bcn_tasklet()
1066 pbuffer_desc = &ring->buffer_desc[0]; in _rtl_pci_prepare_bcn_tasklet()
1072 __skb_queue_tail(&ring->queue, pskb); in _rtl_pci_prepare_bcn_tasklet()
1290 struct rtl8192_tx_ring *ring = &rtlpci->tx_ring[prio]; in _rtl_pci_free_tx_ring() local
1293 while (skb_queue_len(&ring->queue)) { in _rtl_pci_free_tx_ring()
1295 struct sk_buff *skb = __skb_dequeue(&ring->queue); in _rtl_pci_free_tx_ring()
1298 entry = (u8 *)(&ring->buffer_desc[ring->idx]); in _rtl_pci_free_tx_ring()
1300 entry = (u8 *)(&ring->desc[ring->idx]); in _rtl_pci_free_tx_ring()
1307 ring->idx = (ring->idx + 1) % ring->entries; in _rtl_pci_free_tx_ring()
1312 sizeof(*ring->desc) * ring->entries, ring->desc, in _rtl_pci_free_tx_ring()
1313 ring->dma); in _rtl_pci_free_tx_ring()
1314 ring->desc = NULL; in _rtl_pci_free_tx_ring()
1317 sizeof(*ring->buffer_desc) * ring->entries, in _rtl_pci_free_tx_ring()
1318 ring->buffer_desc, ring->buffer_desc_dma); in _rtl_pci_free_tx_ring()
1319 ring->buffer_desc = NULL; in _rtl_pci_free_tx_ring()
1469 struct rtl8192_tx_ring *ring = &rtlpci->tx_ring[i]; in rtl_pci_reset_trx_ring() local
1471 while (skb_queue_len(&ring->queue)) { in rtl_pci_reset_trx_ring()
1474 __skb_dequeue(&ring->queue); in rtl_pci_reset_trx_ring()
1476 entry = (u8 *)(&ring->buffer_desc in rtl_pci_reset_trx_ring()
1477 [ring->idx]); in rtl_pci_reset_trx_ring()
1479 entry = (u8 *)(&ring->desc[ring->idx]); in rtl_pci_reset_trx_ring()
1486 ring->idx = (ring->idx + 1) % ring->entries; in rtl_pci_reset_trx_ring()
1494 ring->idx = 0; in rtl_pci_reset_trx_ring()
1495 ring->entries = rtlpci->txringcount[i]; in rtl_pci_reset_trx_ring()
1549 struct rtl8192_tx_ring *ring; in rtl_pci_tx() local
1581 ring = &rtlpci->tx_ring[hw_queue]; in rtl_pci_tx()
1584 idx = ring->cur_tx_wp; in rtl_pci_tx()
1586 idx = (ring->idx + skb_queue_len(&ring->queue)) % in rtl_pci_tx()
1587 ring->entries; in rtl_pci_tx()
1592 pdesc = &ring->desc[idx]; in rtl_pci_tx()
1594 ptx_bd_desc = &ring->buffer_desc[idx]; in rtl_pci_tx()
1602 hw_queue, ring->idx, idx, in rtl_pci_tx()
1603 skb_queue_len(&ring->queue)); in rtl_pci_tx()
1625 __skb_queue_tail(&ring->queue, skb); in rtl_pci_tx()
1635 if ((ring->entries - skb_queue_len(&ring->queue)) < 2 && in rtl_pci_tx()
1639 hw_queue, ring->idx, idx, in rtl_pci_tx()
1640 skb_queue_len(&ring->queue)); in rtl_pci_tx()
1660 struct rtl8192_tx_ring *ring; in rtl_pci_flush() local
1672 ring = &pcipriv->dev.tx_ring[queue_id]; in rtl_pci_flush()
1673 queue_len = skb_queue_len(&ring->queue); in rtl_pci_flush()