Lines Matching refs:dev

49 	struct net_device *dev;  member
53 wed_m32(struct mtk_wed_device *dev, u32 reg, u32 mask, u32 val) in wed_m32() argument
55 regmap_update_bits(dev->hw->regs, reg, mask | val, val); in wed_m32()
59 wed_set(struct mtk_wed_device *dev, u32 reg, u32 mask) in wed_set() argument
61 return wed_m32(dev, reg, 0, mask); in wed_set()
65 wed_clr(struct mtk_wed_device *dev, u32 reg, u32 mask) in wed_clr() argument
67 return wed_m32(dev, reg, mask, 0); in wed_clr()
71 wdma_m32(struct mtk_wed_device *dev, u32 reg, u32 mask, u32 val) in wdma_m32() argument
73 wdma_w32(dev, reg, (wdma_r32(dev, reg) & ~mask) | val); in wdma_m32()
77 wdma_set(struct mtk_wed_device *dev, u32 reg, u32 mask) in wdma_set() argument
79 wdma_m32(dev, reg, 0, mask); in wdma_set()
83 wdma_clr(struct mtk_wed_device *dev, u32 reg, u32 mask) in wdma_clr() argument
85 wdma_m32(dev, reg, mask, 0); in wdma_clr()
89 wifi_r32(struct mtk_wed_device *dev, u32 reg) in wifi_r32() argument
91 return readl(dev->wlan.base + reg); in wifi_r32()
95 wifi_w32(struct mtk_wed_device *dev, u32 reg, u32 val) in wifi_w32() argument
97 writel(val, dev->wlan.base + reg); in wifi_w32()
101 mtk_wed_read_reset(struct mtk_wed_device *dev) in mtk_wed_read_reset() argument
103 return wed_r32(dev, MTK_WED_RESET); in mtk_wed_read_reset()
107 mtk_wdma_read_reset(struct mtk_wed_device *dev) in mtk_wdma_read_reset() argument
109 return wdma_r32(dev, MTK_WDMA_GLO_CFG); in mtk_wdma_read_reset()
113 mtk_wdma_rx_reset(struct mtk_wed_device *dev) in mtk_wdma_rx_reset() argument
118 wdma_clr(dev, MTK_WDMA_GLO_CFG, MTK_WDMA_GLO_CFG_RX_DMA_EN); in mtk_wdma_rx_reset()
119 ret = readx_poll_timeout(mtk_wdma_read_reset, dev, status, in mtk_wdma_rx_reset()
122 dev_err(dev->hw->dev, "rx reset failed\n"); in mtk_wdma_rx_reset()
124 wdma_w32(dev, MTK_WDMA_RESET_IDX, MTK_WDMA_RESET_IDX_RX); in mtk_wdma_rx_reset()
125 wdma_w32(dev, MTK_WDMA_RESET_IDX, 0); in mtk_wdma_rx_reset()
127 for (i = 0; i < ARRAY_SIZE(dev->rx_wdma); i++) { in mtk_wdma_rx_reset()
128 if (dev->rx_wdma[i].desc) in mtk_wdma_rx_reset()
131 wdma_w32(dev, in mtk_wdma_rx_reset()
139 mtk_wdma_tx_reset(struct mtk_wed_device *dev) in mtk_wdma_tx_reset() argument
144 wdma_clr(dev, MTK_WDMA_GLO_CFG, MTK_WDMA_GLO_CFG_TX_DMA_EN); in mtk_wdma_tx_reset()
145 if (readx_poll_timeout(mtk_wdma_read_reset, dev, status, in mtk_wdma_tx_reset()
147 dev_err(dev->hw->dev, "tx reset failed\n"); in mtk_wdma_tx_reset()
149 wdma_w32(dev, MTK_WDMA_RESET_IDX, MTK_WDMA_RESET_IDX_TX); in mtk_wdma_tx_reset()
150 wdma_w32(dev, MTK_WDMA_RESET_IDX, 0); in mtk_wdma_tx_reset()
152 for (i = 0; i < ARRAY_SIZE(dev->tx_wdma); i++) in mtk_wdma_tx_reset()
153 wdma_w32(dev, in mtk_wdma_tx_reset()
158 mtk_wed_reset(struct mtk_wed_device *dev, u32 mask) in mtk_wed_reset() argument
162 wed_w32(dev, MTK_WED_RESET, mask); in mtk_wed_reset()
163 if (readx_poll_timeout(mtk_wed_read_reset, dev, status, in mtk_wed_reset()
169 mtk_wed_wo_read_status(struct mtk_wed_device *dev) in mtk_wed_wo_read_status() argument
171 return wed_r32(dev, MTK_WED_SCR0 + 4 * MTK_WED_DUMMY_CR_WO_STATUS); in mtk_wed_wo_read_status()
175 mtk_wed_wo_reset(struct mtk_wed_device *dev) in mtk_wed_wo_reset() argument
177 struct mtk_wed_wo *wo = dev->hw->wed_wo; in mtk_wed_wo_reset()
182 mtk_wdma_tx_reset(dev); in mtk_wed_wo_reset()
183 mtk_wed_reset(dev, MTK_WED_RESET_WED); in mtk_wed_wo_reset()
190 if (readx_poll_timeout(mtk_wed_wo_read_status, dev, val, in mtk_wed_wo_reset()
193 dev_err(dev->hw->dev, "failed to disable wed-wo\n"); in mtk_wed_wo_reset()
198 switch (dev->hw->index) { in mtk_wed_wo_reset()
225 struct mtk_wed_device *dev; in mtk_wed_fe_reset() local
231 dev = hw->wed_dev; in mtk_wed_fe_reset()
232 if (!dev || !dev->wlan.reset) in mtk_wed_fe_reset()
236 err = dev->wlan.reset(dev); in mtk_wed_fe_reset()
238 dev_err(dev->dev, "wlan reset failed: %d\n", err); in mtk_wed_fe_reset()
252 struct mtk_wed_device *dev; in mtk_wed_fe_reset_complete() local
257 dev = hw->wed_dev; in mtk_wed_fe_reset_complete()
258 if (!dev || !dev->wlan.reset_complete) in mtk_wed_fe_reset_complete()
261 dev->wlan.reset_complete(dev); in mtk_wed_fe_reset_complete()
268 mtk_wed_assign(struct mtk_wed_device *dev) in mtk_wed_assign() argument
273 if (dev->wlan.bus_type == MTK_WED_BUS_PCIE) { in mtk_wed_assign()
274 hw = hw_list[pci_domain_nr(dev->wlan.pci_dev->bus)]; in mtk_wed_assign()
296 hw->wed_dev = dev; in mtk_wed_assign()
301 mtk_wed_tx_buffer_alloc(struct mtk_wed_device *dev) in mtk_wed_tx_buffer_alloc() argument
306 int token = dev->wlan.token_start; in mtk_wed_tx_buffer_alloc()
311 ring_size = dev->wlan.nbuf & ~(MTK_WED_BUF_PER_PAGE - 1); in mtk_wed_tx_buffer_alloc()
318 dev->tx_buf_ring.size = ring_size; in mtk_wed_tx_buffer_alloc()
319 dev->tx_buf_ring.pages = page_list; in mtk_wed_tx_buffer_alloc()
321 desc = dma_alloc_coherent(dev->hw->dev, ring_size * sizeof(*desc), in mtk_wed_tx_buffer_alloc()
326 dev->tx_buf_ring.desc = desc; in mtk_wed_tx_buffer_alloc()
327 dev->tx_buf_ring.desc_phys = desc_phys; in mtk_wed_tx_buffer_alloc()
339 page_phys = dma_map_page(dev->hw->dev, page, 0, PAGE_SIZE, in mtk_wed_tx_buffer_alloc()
341 if (dma_mapping_error(dev->hw->dev, page_phys)) { in mtk_wed_tx_buffer_alloc()
347 dma_sync_single_for_cpu(dev->hw->dev, page_phys, PAGE_SIZE, in mtk_wed_tx_buffer_alloc()
357 txd_size = dev->wlan.init_buf(buf, buf_phys, token++); in mtk_wed_tx_buffer_alloc()
362 if (dev->hw->version == 1) in mtk_wed_tx_buffer_alloc()
380 dma_sync_single_for_device(dev->hw->dev, page_phys, PAGE_SIZE, in mtk_wed_tx_buffer_alloc()
388 mtk_wed_free_tx_buffer(struct mtk_wed_device *dev) in mtk_wed_free_tx_buffer() argument
390 struct mtk_wdma_desc *desc = dev->tx_buf_ring.desc; in mtk_wed_free_tx_buffer()
391 void **page_list = dev->tx_buf_ring.pages; in mtk_wed_free_tx_buffer()
401 for (i = 0, page_idx = 0; i < dev->tx_buf_ring.size; in mtk_wed_free_tx_buffer()
410 dma_unmap_page(dev->hw->dev, buf_addr, PAGE_SIZE, in mtk_wed_free_tx_buffer()
415 dma_free_coherent(dev->hw->dev, dev->tx_buf_ring.size * sizeof(*desc), in mtk_wed_free_tx_buffer()
416 desc, dev->tx_buf_ring.desc_phys); in mtk_wed_free_tx_buffer()
423 mtk_wed_rx_buffer_alloc(struct mtk_wed_device *dev) in mtk_wed_rx_buffer_alloc() argument
428 dev->rx_buf_ring.size = dev->wlan.rx_nbuf; in mtk_wed_rx_buffer_alloc()
429 desc = dma_alloc_coherent(dev->hw->dev, in mtk_wed_rx_buffer_alloc()
430 dev->wlan.rx_nbuf * sizeof(*desc), in mtk_wed_rx_buffer_alloc()
435 dev->rx_buf_ring.desc = desc; in mtk_wed_rx_buffer_alloc()
436 dev->rx_buf_ring.desc_phys = desc_phys; in mtk_wed_rx_buffer_alloc()
437 dev->wlan.init_rx_buf(dev, dev->wlan.rx_npkt); in mtk_wed_rx_buffer_alloc()
443 mtk_wed_free_rx_buffer(struct mtk_wed_device *dev) in mtk_wed_free_rx_buffer() argument
445 struct mtk_rxbm_desc *desc = dev->rx_buf_ring.desc; in mtk_wed_free_rx_buffer()
450 dev->wlan.release_rx_buf(dev); in mtk_wed_free_rx_buffer()
451 dma_free_coherent(dev->hw->dev, dev->rx_buf_ring.size * sizeof(*desc), in mtk_wed_free_rx_buffer()
452 desc, dev->rx_buf_ring.desc_phys); in mtk_wed_free_rx_buffer()
456 mtk_wed_rx_buffer_hw_init(struct mtk_wed_device *dev) in mtk_wed_rx_buffer_hw_init() argument
458 wed_w32(dev, MTK_WED_RX_BM_RX_DMAD, in mtk_wed_rx_buffer_hw_init()
459 FIELD_PREP(MTK_WED_RX_BM_RX_DMAD_SDL0, dev->wlan.rx_size)); in mtk_wed_rx_buffer_hw_init()
460 wed_w32(dev, MTK_WED_RX_BM_BASE, dev->rx_buf_ring.desc_phys); in mtk_wed_rx_buffer_hw_init()
461 wed_w32(dev, MTK_WED_RX_BM_INIT_PTR, MTK_WED_RX_BM_INIT_SW_TAIL | in mtk_wed_rx_buffer_hw_init()
462 FIELD_PREP(MTK_WED_RX_BM_SW_TAIL, dev->wlan.rx_npkt)); in mtk_wed_rx_buffer_hw_init()
463 wed_w32(dev, MTK_WED_RX_BM_DYN_ALLOC_TH, in mtk_wed_rx_buffer_hw_init()
465 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_RX_BM_EN); in mtk_wed_rx_buffer_hw_init()
469 mtk_wed_free_ring(struct mtk_wed_device *dev, struct mtk_wed_ring *ring) in mtk_wed_free_ring() argument
474 dma_free_coherent(dev->hw->dev, ring->size * ring->desc_size, in mtk_wed_free_ring()
479 mtk_wed_free_rx_rings(struct mtk_wed_device *dev) in mtk_wed_free_rx_rings() argument
481 mtk_wed_free_rx_buffer(dev); in mtk_wed_free_rx_rings()
482 mtk_wed_free_ring(dev, &dev->rro.ring); in mtk_wed_free_rx_rings()
486 mtk_wed_free_tx_rings(struct mtk_wed_device *dev) in mtk_wed_free_tx_rings() argument
490 for (i = 0; i < ARRAY_SIZE(dev->tx_ring); i++) in mtk_wed_free_tx_rings()
491 mtk_wed_free_ring(dev, &dev->tx_ring[i]); in mtk_wed_free_tx_rings()
492 for (i = 0; i < ARRAY_SIZE(dev->rx_wdma); i++) in mtk_wed_free_tx_rings()
493 mtk_wed_free_ring(dev, &dev->rx_wdma[i]); in mtk_wed_free_tx_rings()
497 mtk_wed_set_ext_int(struct mtk_wed_device *dev, bool en) in mtk_wed_set_ext_int() argument
501 if (dev->hw->version == 1) in mtk_wed_set_ext_int()
509 if (!dev->hw->num_flows) in mtk_wed_set_ext_int()
512 wed_w32(dev, MTK_WED_EXT_INT_MASK, en ? mask : 0); in mtk_wed_set_ext_int()
513 wed_r32(dev, MTK_WED_EXT_INT_MASK); in mtk_wed_set_ext_int()
517 mtk_wed_set_512_support(struct mtk_wed_device *dev, bool enable) in mtk_wed_set_512_support() argument
520 wed_w32(dev, MTK_WED_TXDP_CTRL, MTK_WED_TXDP_DW9_OVERWR); in mtk_wed_set_512_support()
521 wed_w32(dev, MTK_WED_TXP_DW1, in mtk_wed_set_512_support()
524 wed_w32(dev, MTK_WED_TXP_DW1, in mtk_wed_set_512_support()
526 wed_clr(dev, MTK_WED_TXDP_CTRL, MTK_WED_TXDP_DW9_OVERWR); in mtk_wed_set_512_support()
532 mtk_wed_check_wfdma_rx_fill(struct mtk_wed_device *dev, int idx) in mtk_wed_check_wfdma_rx_fill() argument
537 if (!(dev->rx_ring[idx].flags & MTK_WED_RING_CONFIGURED)) in mtk_wed_check_wfdma_rx_fill()
543 cur_idx = wed_r32(dev, in mtk_wed_check_wfdma_rx_fill()
553 dev_err(dev->hw->dev, "rx dma enable failed\n"); in mtk_wed_check_wfdma_rx_fill()
557 val = wifi_r32(dev, dev->wlan.wpdma_rx_glo - dev->wlan.phy_base) | in mtk_wed_check_wfdma_rx_fill()
559 wifi_w32(dev, dev->wlan.wpdma_rx_glo - dev->wlan.phy_base, val); in mtk_wed_check_wfdma_rx_fill()
563 mtk_wed_dma_disable(struct mtk_wed_device *dev) in mtk_wed_dma_disable() argument
565 wed_clr(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_dma_disable()
569 wed_clr(dev, MTK_WED_WDMA_GLO_CFG, MTK_WED_WDMA_GLO_CFG_RX_DRV_EN); in mtk_wed_dma_disable()
571 wed_clr(dev, MTK_WED_GLO_CFG, in mtk_wed_dma_disable()
575 wdma_clr(dev, MTK_WDMA_GLO_CFG, in mtk_wed_dma_disable()
580 if (dev->hw->version == 1) { in mtk_wed_dma_disable()
581 regmap_write(dev->hw->mirror, dev->hw->index * 4, 0); in mtk_wed_dma_disable()
582 wdma_clr(dev, MTK_WDMA_GLO_CFG, in mtk_wed_dma_disable()
585 wed_clr(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_dma_disable()
589 wed_clr(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, in mtk_wed_dma_disable()
591 wed_clr(dev, MTK_WED_WDMA_GLO_CFG, in mtk_wed_dma_disable()
595 mtk_wed_set_512_support(dev, false); in mtk_wed_dma_disable()
599 mtk_wed_stop(struct mtk_wed_device *dev) in mtk_wed_stop() argument
601 mtk_wed_dma_disable(dev); in mtk_wed_stop()
602 mtk_wed_set_ext_int(dev, false); in mtk_wed_stop()
604 wed_w32(dev, MTK_WED_WPDMA_INT_TRIGGER, 0); in mtk_wed_stop()
605 wed_w32(dev, MTK_WED_WDMA_INT_TRIGGER, 0); in mtk_wed_stop()
606 wdma_w32(dev, MTK_WDMA_INT_MASK, 0); in mtk_wed_stop()
607 wdma_w32(dev, MTK_WDMA_INT_GRP2, 0); in mtk_wed_stop()
609 if (dev->hw->version == 1) in mtk_wed_stop()
612 wed_w32(dev, MTK_WED_EXT_INT_MASK1, 0); in mtk_wed_stop()
613 wed_w32(dev, MTK_WED_EXT_INT_MASK2, 0); in mtk_wed_stop()
617 mtk_wed_deinit(struct mtk_wed_device *dev) in mtk_wed_deinit() argument
619 mtk_wed_stop(dev); in mtk_wed_deinit()
621 wed_clr(dev, MTK_WED_CTRL, in mtk_wed_deinit()
627 if (dev->hw->version == 1) in mtk_wed_deinit()
630 wed_clr(dev, MTK_WED_CTRL, in mtk_wed_deinit()
637 __mtk_wed_detach(struct mtk_wed_device *dev) in __mtk_wed_detach() argument
639 struct mtk_wed_hw *hw = dev->hw; in __mtk_wed_detach()
641 mtk_wed_deinit(dev); in __mtk_wed_detach()
643 mtk_wdma_rx_reset(dev); in __mtk_wed_detach()
644 mtk_wed_reset(dev, MTK_WED_RESET_WED); in __mtk_wed_detach()
645 mtk_wed_free_tx_buffer(dev); in __mtk_wed_detach()
646 mtk_wed_free_tx_rings(dev); in __mtk_wed_detach()
648 if (mtk_wed_get_rx_capa(dev)) { in __mtk_wed_detach()
650 mtk_wed_wo_reset(dev); in __mtk_wed_detach()
651 mtk_wed_free_rx_rings(dev); in __mtk_wed_detach()
656 if (dev->wlan.bus_type == MTK_WED_BUS_PCIE) { in __mtk_wed_detach()
659 wlan_node = dev->wlan.pci_dev->dev.of_node; in __mtk_wed_detach()
666 hw->eth->dma_dev != hw->eth->dev) in __mtk_wed_detach()
667 mtk_eth_set_dma_device(hw->eth, hw->eth->dev); in __mtk_wed_detach()
669 memset(dev, 0, sizeof(*dev)); in __mtk_wed_detach()
676 mtk_wed_detach(struct mtk_wed_device *dev) in mtk_wed_detach() argument
679 __mtk_wed_detach(dev); in mtk_wed_detach()
685 mtk_wed_bus_init(struct mtk_wed_device *dev) in mtk_wed_bus_init() argument
687 switch (dev->wlan.bus_type) { in mtk_wed_bus_init()
689 struct device_node *np = dev->hw->eth->dev->of_node; in mtk_wed_bus_init()
699 wed_w32(dev, MTK_WED_PCIE_INT_CTRL, in mtk_wed_bus_init()
703 wed_set(dev, MTK_WED_PCIE_INT_CTRL, in mtk_wed_bus_init()
706 wed_r32(dev, MTK_WED_PCIE_INT_CTRL); in mtk_wed_bus_init()
708 wed_w32(dev, MTK_WED_PCIE_CFG_INTM, PCIE_BASE_ADDR0 | 0x180); in mtk_wed_bus_init()
709 wed_w32(dev, MTK_WED_PCIE_CFG_BASE, PCIE_BASE_ADDR0 | 0x184); in mtk_wed_bus_init()
712 wed_w32(dev, MTK_WED_PCIE_INT_TRIGGER, BIT(24)); in mtk_wed_bus_init()
713 wed_r32(dev, MTK_WED_PCIE_INT_TRIGGER); in mtk_wed_bus_init()
716 wed_set(dev, MTK_WED_PCIE_INT_CTRL, in mtk_wed_bus_init()
721 wed_set(dev, MTK_WED_WPDMA_INT_CTRL, in mtk_wed_bus_init()
731 mtk_wed_set_wpdma(struct mtk_wed_device *dev) in mtk_wed_set_wpdma() argument
733 if (dev->hw->version == 1) { in mtk_wed_set_wpdma()
734 wed_w32(dev, MTK_WED_WPDMA_CFG_BASE, dev->wlan.wpdma_phys); in mtk_wed_set_wpdma()
736 mtk_wed_bus_init(dev); in mtk_wed_set_wpdma()
738 wed_w32(dev, MTK_WED_WPDMA_CFG_BASE, dev->wlan.wpdma_int); in mtk_wed_set_wpdma()
739 wed_w32(dev, MTK_WED_WPDMA_CFG_INT_MASK, dev->wlan.wpdma_mask); in mtk_wed_set_wpdma()
740 wed_w32(dev, MTK_WED_WPDMA_CFG_TX, dev->wlan.wpdma_tx); in mtk_wed_set_wpdma()
741 wed_w32(dev, MTK_WED_WPDMA_CFG_TX_FREE, dev->wlan.wpdma_txfree); in mtk_wed_set_wpdma()
742 wed_w32(dev, MTK_WED_WPDMA_RX_GLO_CFG, dev->wlan.wpdma_rx_glo); in mtk_wed_set_wpdma()
743 wed_w32(dev, MTK_WED_WPDMA_RX_RING, dev->wlan.wpdma_rx); in mtk_wed_set_wpdma()
748 mtk_wed_hw_init_early(struct mtk_wed_device *dev) in mtk_wed_hw_init_early() argument
752 mtk_wed_deinit(dev); in mtk_wed_hw_init_early()
753 mtk_wed_reset(dev, MTK_WED_RESET_WED); in mtk_wed_hw_init_early()
754 mtk_wed_set_wpdma(dev); in mtk_wed_hw_init_early()
762 wed_m32(dev, MTK_WED_WDMA_GLO_CFG, mask, set); in mtk_wed_hw_init_early()
764 if (dev->hw->version == 1) { in mtk_wed_hw_init_early()
765 u32 offset = dev->hw->index ? 0x04000400 : 0; in mtk_wed_hw_init_early()
767 wdma_set(dev, MTK_WDMA_GLO_CFG, in mtk_wed_hw_init_early()
772 wed_w32(dev, MTK_WED_WDMA_OFFSET0, 0x2a042a20 + offset); in mtk_wed_hw_init_early()
773 wed_w32(dev, MTK_WED_WDMA_OFFSET1, 0x29002800 + offset); in mtk_wed_hw_init_early()
774 wed_w32(dev, MTK_WED_PCIE_CFG_BASE, in mtk_wed_hw_init_early()
775 MTK_PCIE_BASE(dev->hw->index)); in mtk_wed_hw_init_early()
777 wed_w32(dev, MTK_WED_WDMA_CFG_BASE, dev->hw->wdma_phy); in mtk_wed_hw_init_early()
778 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_ETH_DMAD_FMT); in mtk_wed_hw_init_early()
779 wed_w32(dev, MTK_WED_WDMA_OFFSET0, in mtk_wed_hw_init_early()
785 wed_w32(dev, MTK_WED_WDMA_OFFSET1, in mtk_wed_hw_init_early()
794 mtk_wed_rro_ring_alloc(struct mtk_wed_device *dev, struct mtk_wed_ring *ring, in mtk_wed_rro_ring_alloc() argument
797 ring->desc = dma_alloc_coherent(dev->hw->dev, in mtk_wed_rro_ring_alloc()
811 mtk_wed_rro_alloc(struct mtk_wed_device *dev) in mtk_wed_rro_alloc() argument
817 index = of_property_match_string(dev->hw->node, "memory-region-names", in mtk_wed_rro_alloc()
822 np = of_parse_phandle(dev->hw->node, "memory-region", index); in mtk_wed_rro_alloc()
832 dev->rro.miod_phys = rmem->base; in mtk_wed_rro_alloc()
833 dev->rro.fdbk_phys = MTK_WED_MIOD_COUNT + dev->rro.miod_phys; in mtk_wed_rro_alloc()
835 return mtk_wed_rro_ring_alloc(dev, &dev->rro.ring, in mtk_wed_rro_alloc()
840 mtk_wed_rro_cfg(struct mtk_wed_device *dev) in mtk_wed_rro_cfg() argument
842 struct mtk_wed_wo *wo = dev->hw->wed_wo; in mtk_wed_rro_cfg()
871 mtk_wed_rro_hw_init(struct mtk_wed_device *dev) in mtk_wed_rro_hw_init() argument
873 wed_w32(dev, MTK_WED_RROQM_MIOD_CFG, in mtk_wed_rro_hw_init()
879 wed_w32(dev, MTK_WED_RROQM_MIOD_CTRL0, dev->rro.miod_phys); in mtk_wed_rro_hw_init()
880 wed_w32(dev, MTK_WED_RROQM_MIOD_CTRL1, in mtk_wed_rro_hw_init()
882 wed_w32(dev, MTK_WED_RROQM_FDBK_CTRL0, dev->rro.fdbk_phys); in mtk_wed_rro_hw_init()
883 wed_w32(dev, MTK_WED_RROQM_FDBK_CTRL1, in mtk_wed_rro_hw_init()
885 wed_w32(dev, MTK_WED_RROQM_FDBK_CTRL2, 0); in mtk_wed_rro_hw_init()
886 wed_w32(dev, MTK_WED_RROQ_BASE_L, dev->rro.ring.desc_phys); in mtk_wed_rro_hw_init()
888 wed_set(dev, MTK_WED_RROQM_RST_IDX, in mtk_wed_rro_hw_init()
892 wed_w32(dev, MTK_WED_RROQM_RST_IDX, 0); in mtk_wed_rro_hw_init()
893 wed_w32(dev, MTK_WED_RROQM_MIOD_CTRL2, MTK_WED_MIOD_CNT - 1); in mtk_wed_rro_hw_init()
894 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_RX_RRO_QM_EN); in mtk_wed_rro_hw_init()
898 mtk_wed_route_qm_hw_init(struct mtk_wed_device *dev) in mtk_wed_route_qm_hw_init() argument
900 wed_w32(dev, MTK_WED_RESET, MTK_WED_RESET_RX_ROUTE_QM); in mtk_wed_route_qm_hw_init()
904 if (!(wed_r32(dev, MTK_WED_RESET) & MTK_WED_RESET_RX_ROUTE_QM)) in mtk_wed_route_qm_hw_init()
909 wed_clr(dev, MTK_WED_RTQM_GLO_CFG, MTK_WED_RTQM_Q_RST); in mtk_wed_route_qm_hw_init()
910 wed_clr(dev, MTK_WED_RTQM_GLO_CFG, MTK_WED_RTQM_TXDMAD_FPORT); in mtk_wed_route_qm_hw_init()
911 wed_set(dev, MTK_WED_RTQM_GLO_CFG, in mtk_wed_route_qm_hw_init()
912 FIELD_PREP(MTK_WED_RTQM_TXDMAD_FPORT, 0x3 + dev->hw->index)); in mtk_wed_route_qm_hw_init()
913 wed_clr(dev, MTK_WED_RTQM_GLO_CFG, MTK_WED_RTQM_Q_RST); in mtk_wed_route_qm_hw_init()
915 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_RX_ROUTE_QM_EN); in mtk_wed_route_qm_hw_init()
919 mtk_wed_hw_init(struct mtk_wed_device *dev) in mtk_wed_hw_init() argument
921 if (dev->init_done) in mtk_wed_hw_init()
924 dev->init_done = true; in mtk_wed_hw_init()
925 mtk_wed_set_ext_int(dev, false); in mtk_wed_hw_init()
926 wed_w32(dev, MTK_WED_TX_BM_CTRL, in mtk_wed_hw_init()
929 dev->tx_buf_ring.size / 128) | in mtk_wed_hw_init()
933 wed_w32(dev, MTK_WED_TX_BM_BASE, dev->tx_buf_ring.desc_phys); in mtk_wed_hw_init()
935 wed_w32(dev, MTK_WED_TX_BM_BUF_LEN, MTK_WED_PKT_SIZE); in mtk_wed_hw_init()
937 if (dev->hw->version == 1) { in mtk_wed_hw_init()
938 wed_w32(dev, MTK_WED_TX_BM_TKID, in mtk_wed_hw_init()
940 dev->wlan.token_start) | in mtk_wed_hw_init()
942 dev->wlan.token_start + in mtk_wed_hw_init()
943 dev->wlan.nbuf - 1)); in mtk_wed_hw_init()
944 wed_w32(dev, MTK_WED_TX_BM_DYN_THR, in mtk_wed_hw_init()
948 wed_w32(dev, MTK_WED_TX_BM_TKID_V2, in mtk_wed_hw_init()
950 dev->wlan.token_start) | in mtk_wed_hw_init()
952 dev->wlan.token_start + in mtk_wed_hw_init()
953 dev->wlan.nbuf - 1)); in mtk_wed_hw_init()
954 wed_w32(dev, MTK_WED_TX_BM_DYN_THR, in mtk_wed_hw_init()
957 wed_w32(dev, MTK_WED_TX_TKID_CTRL, in mtk_wed_hw_init()
960 dev->tx_buf_ring.size / 128) | in mtk_wed_hw_init()
962 dev->tx_buf_ring.size / 128)); in mtk_wed_hw_init()
963 wed_w32(dev, MTK_WED_TX_TKID_DYN_THR, in mtk_wed_hw_init()
968 mtk_wed_reset(dev, MTK_WED_RESET_TX_BM); in mtk_wed_hw_init()
970 if (dev->hw->version == 1) { in mtk_wed_hw_init()
971 wed_set(dev, MTK_WED_CTRL, in mtk_wed_hw_init()
975 wed_clr(dev, MTK_WED_TX_TKID_CTRL, MTK_WED_TX_TKID_CTRL_PAUSE); in mtk_wed_hw_init()
977 wed_w32(dev, MTK_WED_WPDMA_RX_D_RST_IDX, in mtk_wed_hw_init()
980 wed_w32(dev, MTK_WED_WPDMA_RX_D_RST_IDX, 0); in mtk_wed_hw_init()
982 mtk_wed_rx_buffer_hw_init(dev); in mtk_wed_hw_init()
983 mtk_wed_rro_hw_init(dev); in mtk_wed_hw_init()
984 mtk_wed_route_qm_hw_init(dev); in mtk_wed_hw_init()
987 wed_clr(dev, MTK_WED_TX_BM_CTRL, MTK_WED_TX_BM_CTRL_PAUSE); in mtk_wed_hw_init()
1011 mtk_wed_check_busy(struct mtk_wed_device *dev, u32 reg, u32 mask) in mtk_wed_check_busy() argument
1013 return !!(wed_r32(dev, reg) & mask); in mtk_wed_check_busy()
1017 mtk_wed_poll_busy(struct mtk_wed_device *dev, u32 reg, u32 mask) in mtk_wed_poll_busy() argument
1024 timeout, false, dev, reg, mask); in mtk_wed_poll_busy()
1028 mtk_wed_rx_reset(struct mtk_wed_device *dev) in mtk_wed_rx_reset() argument
1030 struct mtk_wed_wo *wo = dev->hw->wed_wo; in mtk_wed_rx_reset()
1040 wed_clr(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, MTK_WED_WPDMA_RX_D_RX_DRV_EN); in mtk_wed_rx_reset()
1041 ret = mtk_wed_poll_busy(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, in mtk_wed_rx_reset()
1044 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_INT_AGENT); in mtk_wed_rx_reset()
1045 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_RX_D_DRV); in mtk_wed_rx_reset()
1047 wed_w32(dev, MTK_WED_WPDMA_RX_D_RST_IDX, in mtk_wed_rx_reset()
1051 wed_set(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, in mtk_wed_rx_reset()
1054 wed_clr(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, in mtk_wed_rx_reset()
1058 wed_w32(dev, MTK_WED_WPDMA_RX_D_RST_IDX, 0); in mtk_wed_rx_reset()
1062 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_RX_RRO_QM_EN); in mtk_wed_rx_reset()
1063 ret = mtk_wed_poll_busy(dev, MTK_WED_CTRL, in mtk_wed_rx_reset()
1066 mtk_wed_reset(dev, MTK_WED_RESET_RX_RRO_QM); in mtk_wed_rx_reset()
1068 wed_set(dev, MTK_WED_RROQM_RST_IDX, in mtk_wed_rx_reset()
1071 wed_w32(dev, MTK_WED_RROQM_RST_IDX, 0); in mtk_wed_rx_reset()
1075 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_RX_ROUTE_QM_EN); in mtk_wed_rx_reset()
1076 ret = mtk_wed_poll_busy(dev, MTK_WED_CTRL, in mtk_wed_rx_reset()
1079 mtk_wed_reset(dev, MTK_WED_RESET_RX_ROUTE_QM); in mtk_wed_rx_reset()
1081 wed_set(dev, MTK_WED_RTQM_GLO_CFG, in mtk_wed_rx_reset()
1085 mtk_wdma_tx_reset(dev); in mtk_wed_rx_reset()
1088 wed_clr(dev, MTK_WED_WDMA_GLO_CFG, MTK_WED_WDMA_GLO_CFG_TX_DRV_EN); in mtk_wed_rx_reset()
1089 mtk_wed_poll_busy(dev, MTK_WED_CTRL, in mtk_wed_rx_reset()
1091 mtk_wed_reset(dev, MTK_WED_RESET_WDMA_TX_DRV); in mtk_wed_rx_reset()
1094 ret = mtk_wed_poll_busy(dev, MTK_WED_GLO_CFG, in mtk_wed_rx_reset()
1096 wed_clr(dev, MTK_WED_GLO_CFG, MTK_WED_GLO_CFG_RX_DMA_EN); in mtk_wed_rx_reset()
1098 mtk_wed_reset(dev, MTK_WED_RESET_WED_RX_DMA); in mtk_wed_rx_reset()
1100 struct mtk_eth *eth = dev->hw->eth; in mtk_wed_rx_reset()
1103 wed_set(dev, MTK_WED_RESET_IDX, in mtk_wed_rx_reset()
1106 wed_set(dev, MTK_WED_RESET_IDX, MTK_WED_RESET_IDX_RX); in mtk_wed_rx_reset()
1107 wed_w32(dev, MTK_WED_RESET_IDX, 0); in mtk_wed_rx_reset()
1111 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_RX_BM_EN); in mtk_wed_rx_reset()
1112 mtk_wed_poll_busy(dev, MTK_WED_CTRL, in mtk_wed_rx_reset()
1114 mtk_wed_reset(dev, MTK_WED_RESET_RX_BM); in mtk_wed_rx_reset()
1125 for (i = 0; i < ARRAY_SIZE(dev->rx_ring); i++) { in mtk_wed_rx_reset()
1126 if (!dev->rx_ring[i].desc) in mtk_wed_rx_reset()
1129 mtk_wed_ring_reset(&dev->rx_ring[i], MTK_WED_RX_RING_SIZE, in mtk_wed_rx_reset()
1132 mtk_wed_free_rx_buffer(dev); in mtk_wed_rx_reset()
1138 mtk_wed_reset_dma(struct mtk_wed_device *dev) in mtk_wed_reset_dma() argument
1144 for (i = 0; i < ARRAY_SIZE(dev->tx_ring); i++) { in mtk_wed_reset_dma()
1145 if (!dev->tx_ring[i].desc) in mtk_wed_reset_dma()
1148 mtk_wed_ring_reset(&dev->tx_ring[i], MTK_WED_TX_RING_SIZE, in mtk_wed_reset_dma()
1153 wed_clr(dev, MTK_WED_GLO_CFG, MTK_WED_GLO_CFG_TX_DMA_EN); in mtk_wed_reset_dma()
1154 busy = mtk_wed_poll_busy(dev, MTK_WED_GLO_CFG, in mtk_wed_reset_dma()
1157 mtk_wed_reset(dev, MTK_WED_RESET_WED_TX_DMA); in mtk_wed_reset_dma()
1159 wed_w32(dev, MTK_WED_RESET_IDX, MTK_WED_RESET_IDX_TX); in mtk_wed_reset_dma()
1160 wed_w32(dev, MTK_WED_RESET_IDX, 0); in mtk_wed_reset_dma()
1164 busy = !!mtk_wdma_rx_reset(dev); in mtk_wed_reset_dma()
1165 wed_clr(dev, MTK_WED_WDMA_GLO_CFG, MTK_WED_WDMA_GLO_CFG_RX_DRV_EN); in mtk_wed_reset_dma()
1167 busy = mtk_wed_poll_busy(dev, MTK_WED_WDMA_GLO_CFG, in mtk_wed_reset_dma()
1171 mtk_wed_reset(dev, MTK_WED_RESET_WDMA_INT_AGENT); in mtk_wed_reset_dma()
1172 mtk_wed_reset(dev, MTK_WED_RESET_WDMA_RX_DRV); in mtk_wed_reset_dma()
1174 wed_w32(dev, MTK_WED_WDMA_RESET_IDX, in mtk_wed_reset_dma()
1176 wed_w32(dev, MTK_WED_WDMA_RESET_IDX, 0); in mtk_wed_reset_dma()
1178 wed_set(dev, MTK_WED_WDMA_GLO_CFG, in mtk_wed_reset_dma()
1181 wed_clr(dev, MTK_WED_WDMA_GLO_CFG, in mtk_wed_reset_dma()
1186 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_TX_FREE_AGENT_EN); in mtk_wed_reset_dma()
1189 val = wed_r32(dev, MTK_WED_TX_BM_INTF); in mtk_wed_reset_dma()
1194 mtk_wed_reset(dev, MTK_WED_RESET_TX_FREE_AGENT); in mtk_wed_reset_dma()
1195 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_TX_BM_EN); in mtk_wed_reset_dma()
1196 mtk_wed_reset(dev, MTK_WED_RESET_TX_BM); in mtk_wed_reset_dma()
1199 busy = mtk_wed_poll_busy(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_reset_dma()
1201 wed_clr(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_reset_dma()
1205 busy = mtk_wed_poll_busy(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_reset_dma()
1209 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_INT_AGENT); in mtk_wed_reset_dma()
1210 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_TX_DRV); in mtk_wed_reset_dma()
1211 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_RX_DRV); in mtk_wed_reset_dma()
1213 wed_w32(dev, MTK_WED_WPDMA_RESET_IDX, in mtk_wed_reset_dma()
1216 wed_w32(dev, MTK_WED_WPDMA_RESET_IDX, 0); in mtk_wed_reset_dma()
1219 dev->init_done = false; in mtk_wed_reset_dma()
1220 if (dev->hw->version == 1) in mtk_wed_reset_dma()
1224 wed_w32(dev, MTK_WED_RESET_IDX, MTK_WED_RESET_WPDMA_IDX_RX); in mtk_wed_reset_dma()
1225 wed_w32(dev, MTK_WED_RESET_IDX, 0); in mtk_wed_reset_dma()
1228 mtk_wed_rx_reset(dev); in mtk_wed_reset_dma()
1232 mtk_wed_ring_alloc(struct mtk_wed_device *dev, struct mtk_wed_ring *ring, in mtk_wed_ring_alloc() argument
1235 ring->desc = dma_alloc_coherent(dev->hw->dev, size * desc_size, in mtk_wed_ring_alloc()
1248 mtk_wed_wdma_rx_ring_setup(struct mtk_wed_device *dev, int idx, int size, in mtk_wed_wdma_rx_ring_setup() argument
1251 u32 desc_size = sizeof(struct mtk_wdma_desc) * dev->hw->version; in mtk_wed_wdma_rx_ring_setup()
1254 if (idx >= ARRAY_SIZE(dev->rx_wdma)) in mtk_wed_wdma_rx_ring_setup()
1257 wdma = &dev->rx_wdma[idx]; in mtk_wed_wdma_rx_ring_setup()
1258 if (!reset && mtk_wed_ring_alloc(dev, wdma, MTK_WED_WDMA_RING_SIZE, in mtk_wed_wdma_rx_ring_setup()
1262 wdma_w32(dev, MTK_WDMA_RING_RX(idx) + MTK_WED_RING_OFS_BASE, in mtk_wed_wdma_rx_ring_setup()
1264 wdma_w32(dev, MTK_WDMA_RING_RX(idx) + MTK_WED_RING_OFS_COUNT, in mtk_wed_wdma_rx_ring_setup()
1266 wdma_w32(dev, MTK_WDMA_RING_RX(idx) + MTK_WED_RING_OFS_CPU_IDX, 0); in mtk_wed_wdma_rx_ring_setup()
1268 wed_w32(dev, MTK_WED_WDMA_RING_RX(idx) + MTK_WED_RING_OFS_BASE, in mtk_wed_wdma_rx_ring_setup()
1270 wed_w32(dev, MTK_WED_WDMA_RING_RX(idx) + MTK_WED_RING_OFS_COUNT, in mtk_wed_wdma_rx_ring_setup()
1277 mtk_wed_wdma_tx_ring_setup(struct mtk_wed_device *dev, int idx, int size, in mtk_wed_wdma_tx_ring_setup() argument
1280 u32 desc_size = sizeof(struct mtk_wdma_desc) * dev->hw->version; in mtk_wed_wdma_tx_ring_setup()
1283 if (idx >= ARRAY_SIZE(dev->tx_wdma)) in mtk_wed_wdma_tx_ring_setup()
1286 wdma = &dev->tx_wdma[idx]; in mtk_wed_wdma_tx_ring_setup()
1287 if (!reset && mtk_wed_ring_alloc(dev, wdma, MTK_WED_WDMA_RING_SIZE, in mtk_wed_wdma_tx_ring_setup()
1291 wdma_w32(dev, MTK_WDMA_RING_TX(idx) + MTK_WED_RING_OFS_BASE, in mtk_wed_wdma_tx_ring_setup()
1293 wdma_w32(dev, MTK_WDMA_RING_TX(idx) + MTK_WED_RING_OFS_COUNT, in mtk_wed_wdma_tx_ring_setup()
1295 wdma_w32(dev, MTK_WDMA_RING_TX(idx) + MTK_WED_RING_OFS_CPU_IDX, 0); in mtk_wed_wdma_tx_ring_setup()
1296 wdma_w32(dev, MTK_WDMA_RING_TX(idx) + MTK_WED_RING_OFS_DMA_IDX, 0); in mtk_wed_wdma_tx_ring_setup()
1302 wed_w32(dev, MTK_WED_WDMA_RING_TX + MTK_WED_RING_OFS_BASE, in mtk_wed_wdma_tx_ring_setup()
1304 wed_w32(dev, MTK_WED_WDMA_RING_TX + MTK_WED_RING_OFS_COUNT, in mtk_wed_wdma_tx_ring_setup()
1306 wed_w32(dev, MTK_WED_WDMA_RING_TX + MTK_WED_RING_OFS_CPU_IDX, in mtk_wed_wdma_tx_ring_setup()
1308 wed_w32(dev, MTK_WED_WDMA_RING_TX + MTK_WED_RING_OFS_DMA_IDX, in mtk_wed_wdma_tx_ring_setup()
1316 mtk_wed_ppe_check(struct mtk_wed_device *dev, struct sk_buff *skb, in mtk_wed_ppe_check() argument
1319 struct mtk_eth *eth = dev->hw->eth; in mtk_wed_ppe_check()
1331 mtk_ppe_check_skb(eth->ppe[dev->hw->index], skb, hash); in mtk_wed_ppe_check()
1335 mtk_wed_configure_irq(struct mtk_wed_device *dev, u32 irq_mask) in mtk_wed_configure_irq() argument
1340 wed_set(dev, MTK_WED_CTRL, in mtk_wed_configure_irq()
1346 if (dev->hw->version == 1) { in mtk_wed_configure_irq()
1347 wed_w32(dev, MTK_WED_PCIE_INT_TRIGGER, in mtk_wed_configure_irq()
1350 wed_w32(dev, MTK_WED_WPDMA_INT_TRIGGER, in mtk_wed_configure_irq()
1354 wed_clr(dev, MTK_WED_WDMA_INT_CTRL, wdma_mask); in mtk_wed_configure_irq()
1359 wed_w32(dev, MTK_WED_WPDMA_INT_CTRL_TX, in mtk_wed_configure_irq()
1365 dev->wlan.tx_tbit[0]) | in mtk_wed_configure_irq()
1367 dev->wlan.tx_tbit[1])); in mtk_wed_configure_irq()
1370 wed_w32(dev, MTK_WED_WPDMA_INT_CTRL_TX_FREE, in mtk_wed_configure_irq()
1374 dev->wlan.txfree_tbit)); in mtk_wed_configure_irq()
1376 wed_w32(dev, MTK_WED_WPDMA_INT_CTRL_RX, in mtk_wed_configure_irq()
1382 dev->wlan.rx_tbit[0]) | in mtk_wed_configure_irq()
1384 dev->wlan.rx_tbit[1])); in mtk_wed_configure_irq()
1386 wed_w32(dev, MTK_WED_WDMA_INT_CLR, wdma_mask); in mtk_wed_configure_irq()
1387 wed_set(dev, MTK_WED_WDMA_INT_CTRL, in mtk_wed_configure_irq()
1389 dev->wdma_idx)); in mtk_wed_configure_irq()
1392 wed_w32(dev, MTK_WED_WDMA_INT_TRIGGER, wdma_mask); in mtk_wed_configure_irq()
1394 wdma_w32(dev, MTK_WDMA_INT_MASK, wdma_mask); in mtk_wed_configure_irq()
1395 wdma_w32(dev, MTK_WDMA_INT_GRP2, wdma_mask); in mtk_wed_configure_irq()
1396 wed_w32(dev, MTK_WED_WPDMA_INT_MASK, irq_mask); in mtk_wed_configure_irq()
1397 wed_w32(dev, MTK_WED_INT_MASK, irq_mask); in mtk_wed_configure_irq()
1401 mtk_wed_dma_enable(struct mtk_wed_device *dev) in mtk_wed_dma_enable() argument
1403 wed_set(dev, MTK_WED_WPDMA_INT_CTRL, MTK_WED_WPDMA_INT_CTRL_SUBRT_ADV); in mtk_wed_dma_enable()
1405 wed_set(dev, MTK_WED_GLO_CFG, in mtk_wed_dma_enable()
1408 wed_set(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_dma_enable()
1411 wed_set(dev, MTK_WED_WDMA_GLO_CFG, in mtk_wed_dma_enable()
1414 wdma_set(dev, MTK_WDMA_GLO_CFG, in mtk_wed_dma_enable()
1419 if (dev->hw->version == 1) { in mtk_wed_dma_enable()
1420 wdma_set(dev, MTK_WDMA_GLO_CFG, in mtk_wed_dma_enable()
1425 wed_set(dev, MTK_WED_WPDMA_CTRL, in mtk_wed_dma_enable()
1428 wed_set(dev, MTK_WED_WDMA_GLO_CFG, in mtk_wed_dma_enable()
1432 wed_set(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_dma_enable()
1436 wed_clr(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_dma_enable()
1440 wed_set(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, in mtk_wed_dma_enable()
1447 mtk_wed_check_wfdma_rx_fill(dev, i); in mtk_wed_dma_enable()
1452 mtk_wed_start(struct mtk_wed_device *dev, u32 irq_mask) in mtk_wed_start() argument
1456 if (mtk_wed_get_rx_capa(dev) && mtk_wed_rx_buffer_alloc(dev)) in mtk_wed_start()
1459 for (i = 0; i < ARRAY_SIZE(dev->rx_wdma); i++) in mtk_wed_start()
1460 if (!dev->rx_wdma[i].desc) in mtk_wed_start()
1461 mtk_wed_wdma_rx_ring_setup(dev, i, 16, false); in mtk_wed_start()
1463 mtk_wed_hw_init(dev); in mtk_wed_start()
1464 mtk_wed_configure_irq(dev, irq_mask); in mtk_wed_start()
1466 mtk_wed_set_ext_int(dev, true); in mtk_wed_start()
1468 if (dev->hw->version == 1) { in mtk_wed_start()
1469 u32 val = dev->wlan.wpdma_phys | MTK_PCIE_MIRROR_MAP_EN | in mtk_wed_start()
1471 dev->hw->index); in mtk_wed_start()
1473 val |= BIT(0) | (BIT(1) * !!dev->hw->index); in mtk_wed_start()
1474 regmap_write(dev->hw->mirror, dev->hw->index * 4, val); in mtk_wed_start()
1477 wed_w32(dev, MTK_WED_EXT_INT_MASK1, in mtk_wed_start()
1479 wed_w32(dev, MTK_WED_EXT_INT_MASK2, in mtk_wed_start()
1482 wed_r32(dev, MTK_WED_EXT_INT_MASK1); in mtk_wed_start()
1483 wed_r32(dev, MTK_WED_EXT_INT_MASK2); in mtk_wed_start()
1485 if (mtk_wed_rro_cfg(dev)) in mtk_wed_start()
1490 mtk_wed_set_512_support(dev, dev->wlan.wcid_512); in mtk_wed_start()
1492 mtk_wed_dma_enable(dev); in mtk_wed_start()
1493 dev->running = true; in mtk_wed_start()
1497 mtk_wed_attach(struct mtk_wed_device *dev) in mtk_wed_attach() argument
1507 if ((dev->wlan.bus_type == MTK_WED_BUS_PCIE && in mtk_wed_attach()
1508 pci_domain_nr(dev->wlan.pci_dev->bus) > 1) || in mtk_wed_attach()
1519 hw = mtk_wed_assign(dev); in mtk_wed_attach()
1526 device = dev->wlan.bus_type == MTK_WED_BUS_PCIE in mtk_wed_attach()
1527 ? &dev->wlan.pci_dev->dev in mtk_wed_attach()
1528 : &dev->wlan.platform_dev->dev; in mtk_wed_attach()
1532 dev->hw = hw; in mtk_wed_attach()
1533 dev->dev = hw->dev; in mtk_wed_attach()
1534 dev->irq = hw->irq; in mtk_wed_attach()
1535 dev->wdma_idx = hw->index; in mtk_wed_attach()
1536 dev->version = hw->version; in mtk_wed_attach()
1538 if (hw->eth->dma_dev == hw->eth->dev && in mtk_wed_attach()
1539 of_dma_is_coherent(hw->eth->dev->of_node)) in mtk_wed_attach()
1540 mtk_eth_set_dma_device(hw->eth, hw->dev); in mtk_wed_attach()
1542 ret = mtk_wed_tx_buffer_alloc(dev); in mtk_wed_attach()
1546 if (mtk_wed_get_rx_capa(dev)) { in mtk_wed_attach()
1547 ret = mtk_wed_rro_alloc(dev); in mtk_wed_attach()
1552 mtk_wed_hw_init_early(dev); in mtk_wed_attach()
1557 dev->rev_id = wed_r32(dev, MTK_WED_REV_ID); in mtk_wed_attach()
1562 dev_err(dev->hw->dev, "failed to attach wed device\n"); in mtk_wed_attach()
1563 __mtk_wed_detach(dev); in mtk_wed_attach()
1572 mtk_wed_tx_ring_setup(struct mtk_wed_device *dev, int idx, void __iomem *regs, in mtk_wed_tx_ring_setup() argument
1575 struct mtk_wed_ring *ring = &dev->tx_ring[idx]; in mtk_wed_tx_ring_setup()
1589 if (WARN_ON(idx >= ARRAY_SIZE(dev->tx_ring))) in mtk_wed_tx_ring_setup()
1592 if (!reset && mtk_wed_ring_alloc(dev, ring, MTK_WED_TX_RING_SIZE, in mtk_wed_tx_ring_setup()
1596 if (mtk_wed_wdma_rx_ring_setup(dev, idx, MTK_WED_WDMA_RING_SIZE, in mtk_wed_tx_ring_setup()
1604 wpdma_tx_w32(dev, idx, MTK_WED_RING_OFS_BASE, ring->desc_phys); in mtk_wed_tx_ring_setup()
1605 wpdma_tx_w32(dev, idx, MTK_WED_RING_OFS_COUNT, MTK_WED_TX_RING_SIZE); in mtk_wed_tx_ring_setup()
1606 wpdma_tx_w32(dev, idx, MTK_WED_RING_OFS_CPU_IDX, 0); in mtk_wed_tx_ring_setup()
1608 wed_w32(dev, MTK_WED_WPDMA_RING_TX(idx) + MTK_WED_RING_OFS_BASE, in mtk_wed_tx_ring_setup()
1610 wed_w32(dev, MTK_WED_WPDMA_RING_TX(idx) + MTK_WED_RING_OFS_COUNT, in mtk_wed_tx_ring_setup()
1612 wed_w32(dev, MTK_WED_WPDMA_RING_TX(idx) + MTK_WED_RING_OFS_CPU_IDX, 0); in mtk_wed_tx_ring_setup()
1618 mtk_wed_txfree_ring_setup(struct mtk_wed_device *dev, void __iomem *regs) in mtk_wed_txfree_ring_setup() argument
1620 struct mtk_wed_ring *ring = &dev->txfree_ring; in mtk_wed_txfree_ring_setup()
1621 int i, index = dev->hw->version == 1; in mtk_wed_txfree_ring_setup()
1634 wed_w32(dev, MTK_WED_RING_RX(index) + i, val); in mtk_wed_txfree_ring_setup()
1635 wed_w32(dev, MTK_WED_WPDMA_RING_RX(index) + i, val); in mtk_wed_txfree_ring_setup()
1642 mtk_wed_rx_ring_setup(struct mtk_wed_device *dev, int idx, void __iomem *regs, in mtk_wed_rx_ring_setup() argument
1645 struct mtk_wed_ring *ring = &dev->rx_ring[idx]; in mtk_wed_rx_ring_setup()
1647 if (WARN_ON(idx >= ARRAY_SIZE(dev->rx_ring))) in mtk_wed_rx_ring_setup()
1650 if (!reset && mtk_wed_ring_alloc(dev, ring, MTK_WED_RX_RING_SIZE, in mtk_wed_rx_ring_setup()
1654 if (mtk_wed_wdma_tx_ring_setup(dev, idx, MTK_WED_WDMA_RING_SIZE, in mtk_wed_rx_ring_setup()
1663 wpdma_rx_w32(dev, idx, MTK_WED_RING_OFS_BASE, ring->desc_phys); in mtk_wed_rx_ring_setup()
1664 wpdma_rx_w32(dev, idx, MTK_WED_RING_OFS_COUNT, MTK_WED_RX_RING_SIZE); in mtk_wed_rx_ring_setup()
1666 wed_w32(dev, MTK_WED_WPDMA_RING_RX_DATA(idx) + MTK_WED_RING_OFS_BASE, in mtk_wed_rx_ring_setup()
1668 wed_w32(dev, MTK_WED_WPDMA_RING_RX_DATA(idx) + MTK_WED_RING_OFS_COUNT, in mtk_wed_rx_ring_setup()
1675 mtk_wed_irq_get(struct mtk_wed_device *dev, u32 mask) in mtk_wed_irq_get() argument
1679 if (dev->hw->version == 1) in mtk_wed_irq_get()
1687 val = wed_r32(dev, MTK_WED_EXT_INT_STATUS); in mtk_wed_irq_get()
1688 wed_w32(dev, MTK_WED_EXT_INT_STATUS, val); in mtk_wed_irq_get()
1690 if (!dev->hw->num_flows) in mtk_wed_irq_get()
1693 pr_err("mtk_wed%d: error status=%08x\n", dev->hw->index, val); in mtk_wed_irq_get()
1695 val = wed_r32(dev, MTK_WED_INT_STATUS); in mtk_wed_irq_get()
1697 wed_w32(dev, MTK_WED_INT_STATUS, val); /* ACK */ in mtk_wed_irq_get()
1703 mtk_wed_irq_set_mask(struct mtk_wed_device *dev, u32 mask) in mtk_wed_irq_set_mask() argument
1705 mtk_wed_set_ext_int(dev, !!mask); in mtk_wed_irq_set_mask()
1706 wed_w32(dev, MTK_WED_INT_MASK, mask); in mtk_wed_irq_set_mask()
1767 if (!priv || !tc_can_offload(priv->dev)) in mtk_wed_setup_tc_block_cb()
1778 mtk_wed_setup_tc_block(struct mtk_wed_hw *hw, struct net_device *dev, in mtk_wed_setup_tc_block() argument
1798 block_cb = flow_block_cb_lookup(f->block, cb, dev); in mtk_wed_setup_tc_block()
1809 priv->dev = dev; in mtk_wed_setup_tc_block()
1810 block_cb = flow_block_cb_alloc(cb, dev, priv, NULL); in mtk_wed_setup_tc_block()
1821 block_cb = flow_block_cb_lookup(f->block, cb, dev); in mtk_wed_setup_tc_block()
1838 mtk_wed_setup_tc(struct mtk_wed_device *wed, struct net_device *dev, in mtk_wed_setup_tc() argument
1849 return mtk_wed_setup_tc_block(hw, dev, type_data); in mtk_wed_setup_tc()
1876 struct device_node *eth_np = eth->dev->of_node; in mtk_wed_add_hw()
1889 get_device(&pdev->dev); in mtk_wed_add_hw()
1912 hw->dev = &pdev->dev; in mtk_wed_add_hw()
1946 put_device(&pdev->dev); in mtk_wed_add_hw()
1968 put_device(hw->dev); in mtk_wed_exit()