Lines Matching refs:gq

202 static int rswitch_next_queue_index(struct rswitch_gwca_queue *gq, bool cur, int num)  in rswitch_next_queue_index()  argument
204 int index = cur ? gq->cur : gq->dirty; in rswitch_next_queue_index()
206 if (index + num >= gq->ring_size) in rswitch_next_queue_index()
207 index = (index + num) % gq->ring_size; in rswitch_next_queue_index()
214 static int rswitch_get_num_cur_queues(struct rswitch_gwca_queue *gq) in rswitch_get_num_cur_queues() argument
216 if (gq->cur >= gq->dirty) in rswitch_get_num_cur_queues()
217 return gq->cur - gq->dirty; in rswitch_get_num_cur_queues()
219 return gq->ring_size - gq->dirty + gq->cur; in rswitch_get_num_cur_queues()
222 static bool rswitch_is_queue_rxed(struct rswitch_gwca_queue *gq) in rswitch_is_queue_rxed() argument
224 struct rswitch_ext_ts_desc *desc = &gq->rx_ring[gq->dirty]; in rswitch_is_queue_rxed()
232 static int rswitch_gwca_queue_alloc_skb(struct rswitch_gwca_queue *gq, in rswitch_gwca_queue_alloc_skb() argument
238 index = (i + start_index) % gq->ring_size; in rswitch_gwca_queue_alloc_skb()
239 if (gq->skbs[index]) in rswitch_gwca_queue_alloc_skb()
241 gq->skbs[index] = netdev_alloc_skb_ip_align(gq->ndev, in rswitch_gwca_queue_alloc_skb()
243 if (!gq->skbs[index]) in rswitch_gwca_queue_alloc_skb()
251 index = (i + start_index) % gq->ring_size; in rswitch_gwca_queue_alloc_skb()
252 dev_kfree_skb(gq->skbs[index]); in rswitch_gwca_queue_alloc_skb()
253 gq->skbs[index] = NULL; in rswitch_gwca_queue_alloc_skb()
260 struct rswitch_gwca_queue *gq) in rswitch_gwca_queue_free() argument
264 if (!gq->dir_tx) { in rswitch_gwca_queue_free()
267 (gq->ring_size + 1), gq->rx_ring, gq->ring_dma); in rswitch_gwca_queue_free()
268 gq->rx_ring = NULL; in rswitch_gwca_queue_free()
270 for (i = 0; i < gq->ring_size; i++) in rswitch_gwca_queue_free()
271 dev_kfree_skb(gq->skbs[i]); in rswitch_gwca_queue_free()
275 (gq->ring_size + 1), gq->tx_ring, gq->ring_dma); in rswitch_gwca_queue_free()
276 gq->tx_ring = NULL; in rswitch_gwca_queue_free()
279 kfree(gq->skbs); in rswitch_gwca_queue_free()
280 gq->skbs = NULL; in rswitch_gwca_queue_free()
285 struct rswitch_gwca_queue *gq = &priv->gwca.ts_queue; in rswitch_gwca_ts_queue_free() local
288 sizeof(struct rswitch_ts_desc) * (gq->ring_size + 1), in rswitch_gwca_ts_queue_free()
289 gq->ts_ring, gq->ring_dma); in rswitch_gwca_ts_queue_free()
290 gq->ts_ring = NULL; in rswitch_gwca_ts_queue_free()
295 struct rswitch_gwca_queue *gq, in rswitch_gwca_queue_alloc() argument
300 gq->dir_tx = dir_tx; in rswitch_gwca_queue_alloc()
301 gq->ring_size = ring_size; in rswitch_gwca_queue_alloc()
302 gq->ndev = ndev; in rswitch_gwca_queue_alloc()
304 gq->skbs = kcalloc(gq->ring_size, sizeof(*gq->skbs), GFP_KERNEL); in rswitch_gwca_queue_alloc()
305 if (!gq->skbs) in rswitch_gwca_queue_alloc()
309 rswitch_gwca_queue_alloc_skb(gq, 0, gq->ring_size); in rswitch_gwca_queue_alloc()
311 gq->rx_ring = dma_alloc_coherent(ndev->dev.parent, in rswitch_gwca_queue_alloc()
313 (gq->ring_size + 1), &gq->ring_dma, GFP_KERNEL); in rswitch_gwca_queue_alloc()
315 gq->tx_ring = dma_alloc_coherent(ndev->dev.parent, in rswitch_gwca_queue_alloc()
317 (gq->ring_size + 1), &gq->ring_dma, GFP_KERNEL); in rswitch_gwca_queue_alloc()
320 if (!gq->rx_ring && !gq->tx_ring) in rswitch_gwca_queue_alloc()
323 i = gq->index / 32; in rswitch_gwca_queue_alloc()
324 bit = BIT(gq->index % 32); in rswitch_gwca_queue_alloc()
333 rswitch_gwca_queue_free(ndev, gq); in rswitch_gwca_queue_alloc()
351 struct rswitch_gwca_queue *gq) in rswitch_gwca_queue_format() argument
353 int ring_size = sizeof(struct rswitch_ext_desc) * gq->ring_size; in rswitch_gwca_queue_format()
359 memset(gq->tx_ring, 0, ring_size); in rswitch_gwca_queue_format()
360 for (i = 0, desc = gq->tx_ring; i < gq->ring_size; i++, desc++) { in rswitch_gwca_queue_format()
361 if (!gq->dir_tx) { in rswitch_gwca_queue_format()
363 gq->skbs[i]->data, PKT_BUF_SZ, in rswitch_gwca_queue_format()
375 rswitch_desc_set_dptr(&desc->desc, gq->ring_dma); in rswitch_gwca_queue_format()
378 linkfix = &priv->gwca.linkfix_table[gq->index]; in rswitch_gwca_queue_format()
380 rswitch_desc_set_dptr(linkfix, gq->ring_dma); in rswitch_gwca_queue_format()
382 iowrite32(GWDCC_BALR | (gq->dir_tx ? GWDCC_DCP(GWCA_IPV_NUM) | GWDCC_DQT : 0) | GWDCC_EDE, in rswitch_gwca_queue_format()
383 priv->addr + GWDCC_OFFS(gq->index)); in rswitch_gwca_queue_format()
388 if (!gq->dir_tx) { in rswitch_gwca_queue_format()
389 for (i--, desc = gq->tx_ring; i >= 0; i--, desc++) { in rswitch_gwca_queue_format()
402 struct rswitch_gwca_queue *gq = &priv->gwca.ts_queue; in rswitch_gwca_ts_queue_fill() local
407 index = (i + start_index) % gq->ring_size; in rswitch_gwca_ts_queue_fill()
408 desc = &gq->ts_ring[index]; in rswitch_gwca_ts_queue_fill()
414 struct rswitch_gwca_queue *gq, in rswitch_gwca_queue_ext_ts_fill() argument
423 index = (i + start_index) % gq->ring_size; in rswitch_gwca_queue_ext_ts_fill()
424 desc = &gq->rx_ring[index]; in rswitch_gwca_queue_ext_ts_fill()
425 if (!gq->dir_tx) { in rswitch_gwca_queue_ext_ts_fill()
427 gq->skbs[index]->data, PKT_BUF_SZ, in rswitch_gwca_queue_ext_ts_fill()
445 if (!gq->dir_tx) { in rswitch_gwca_queue_ext_ts_fill()
447 index = (i + start_index) % gq->ring_size; in rswitch_gwca_queue_ext_ts_fill()
448 desc = &gq->rx_ring[index]; in rswitch_gwca_queue_ext_ts_fill()
460 struct rswitch_gwca_queue *gq) in rswitch_gwca_queue_ext_ts_format() argument
462 int ring_size = sizeof(struct rswitch_ext_ts_desc) * gq->ring_size; in rswitch_gwca_queue_ext_ts_format()
467 memset(gq->rx_ring, 0, ring_size); in rswitch_gwca_queue_ext_ts_format()
468 err = rswitch_gwca_queue_ext_ts_fill(ndev, gq, 0, gq->ring_size); in rswitch_gwca_queue_ext_ts_format()
472 desc = &gq->rx_ring[gq->ring_size]; /* Last */ in rswitch_gwca_queue_ext_ts_format()
473 rswitch_desc_set_dptr(&desc->desc, gq->ring_dma); in rswitch_gwca_queue_ext_ts_format()
476 linkfix = &priv->gwca.linkfix_table[gq->index]; in rswitch_gwca_queue_ext_ts_format()
478 rswitch_desc_set_dptr(linkfix, gq->ring_dma); in rswitch_gwca_queue_ext_ts_format()
480 iowrite32(GWDCC_BALR | (gq->dir_tx ? GWDCC_DCP(GWCA_IPV_NUM) | GWDCC_DQT : 0) | in rswitch_gwca_queue_ext_ts_format()
482 priv->addr + GWDCC_OFFS(gq->index)); in rswitch_gwca_queue_ext_ts_format()
516 struct rswitch_gwca_queue *gq = &priv->gwca.ts_queue; in rswitch_gwca_ts_queue_alloc() local
519 gq->ring_size = TS_RING_SIZE; in rswitch_gwca_ts_queue_alloc()
520 gq->ts_ring = dma_alloc_coherent(&priv->pdev->dev, in rswitch_gwca_ts_queue_alloc()
522 (gq->ring_size + 1), &gq->ring_dma, GFP_KERNEL); in rswitch_gwca_ts_queue_alloc()
524 if (!gq->ts_ring) in rswitch_gwca_ts_queue_alloc()
528 desc = &gq->ts_ring[gq->ring_size]; in rswitch_gwca_ts_queue_alloc()
530 rswitch_desc_set_dptr(&desc->desc, gq->ring_dma); in rswitch_gwca_ts_queue_alloc()
538 struct rswitch_gwca_queue *gq; in rswitch_gwca_get() local
545 gq = &priv->gwca.queues[index]; in rswitch_gwca_get()
546 memset(gq, 0, sizeof(*gq)); in rswitch_gwca_get()
547 gq->index = index; in rswitch_gwca_get()
549 return gq; in rswitch_gwca_get()
553 struct rswitch_gwca_queue *gq) in rswitch_gwca_put() argument
555 clear_bit(gq->index, priv->gwca.used); in rswitch_gwca_put()
698 struct rswitch_gwca_queue *gq = rdev->rx_queue; in rswitch_rx() local
709 boguscnt = min_t(int, gq->ring_size, *quota); in rswitch_rx()
712 desc = &gq->rx_ring[gq->cur]; in rswitch_rx()
716 skb = gq->skbs[gq->cur]; in rswitch_rx()
717 gq->skbs[gq->cur] = NULL; in rswitch_rx()
737 gq->cur = rswitch_next_queue_index(gq, true, 1); in rswitch_rx()
738 desc = &gq->rx_ring[gq->cur]; in rswitch_rx()
744 num = rswitch_get_num_cur_queues(gq); in rswitch_rx()
745 ret = rswitch_gwca_queue_alloc_skb(gq, gq->dirty, num); in rswitch_rx()
748 ret = rswitch_gwca_queue_ext_ts_fill(ndev, gq, gq->dirty, num); in rswitch_rx()
751 gq->dirty = rswitch_next_queue_index(gq, false, num); in rswitch_rx()
766 struct rswitch_gwca_queue *gq = rdev->tx_queue; in rswitch_tx_free() local
773 for (; rswitch_get_num_cur_queues(gq) > 0; in rswitch_tx_free()
774 gq->dirty = rswitch_next_queue_index(gq, false, 1)) { in rswitch_tx_free()
775 desc = &gq->tx_ring[gq->dirty]; in rswitch_tx_free()
781 skb = gq->skbs[gq->dirty]; in rswitch_tx_free()
786 dev_kfree_skb_any(gq->skbs[gq->dirty]); in rswitch_tx_free()
787 gq->skbs[gq->dirty] = NULL; in rswitch_tx_free()
852 struct rswitch_gwca_queue *gq; in rswitch_data_irq() local
856 gq = &priv->gwca.queues[i]; in rswitch_data_irq()
857 index = gq->index / 32; in rswitch_data_irq()
858 bit = BIT(gq->index % 32); in rswitch_data_irq()
862 rswitch_ack_data_irq(priv, gq->index); in rswitch_data_irq()
863 rswitch_queue_interrupt(gq->ndev); in rswitch_data_irq()
915 struct rswitch_gwca_queue *gq = &priv->gwca.ts_queue; in rswitch_ts() local
923 desc = &gq->ts_ring[gq->cur]; in rswitch_ts()
945 gq->cur = rswitch_next_queue_index(gq, true, 1); in rswitch_ts()
946 desc = &gq->ts_ring[gq->cur]; in rswitch_ts()
949 num = rswitch_get_num_cur_queues(gq); in rswitch_ts()
950 rswitch_gwca_ts_queue_fill(priv, gq->dirty, num); in rswitch_ts()
951 gq->dirty = rswitch_next_queue_index(gq, false, num); in rswitch_ts()
1503 struct rswitch_gwca_queue *gq = rdev->tx_queue; in rswitch_start_xmit() local
1508 if (rswitch_get_num_cur_queues(gq) >= gq->ring_size - 1) { in rswitch_start_xmit()
1520 gq->skbs[gq->cur] = skb; in rswitch_start_xmit()
1521 desc = &gq->tx_ring[gq->cur]; in rswitch_start_xmit()
1551 gq->cur = rswitch_next_queue_index(gq, true, 1); in rswitch_start_xmit()
1552 rswitch_modify(rdev->addr, GWTRC(gq->index), 0, BIT(gq->index % 32)); in rswitch_start_xmit()