Lines Matching +full:wo +full:- +full:dlm
1 // SPDX-License-Identifier: GPL-2.0-only
9 #include <linux/dma-mapping.h>
55 regmap_update_bits(dev->hw->regs, reg, mask | val, val); in wed_m32()
91 return readl(dev->wlan.base + reg); in wifi_r32()
97 writel(val, dev->wlan.base + reg); in wifi_w32()
122 dev_err(dev->hw->dev, "rx reset failed\n"); in mtk_wdma_rx_reset()
127 for (i = 0; i < ARRAY_SIZE(dev->rx_wdma); i++) { in mtk_wdma_rx_reset()
128 if (dev->rx_wdma[i].desc) in mtk_wdma_rx_reset()
147 dev_err(dev->hw->dev, "tx reset failed\n"); in mtk_wdma_tx_reset()
152 for (i = 0; i < ARRAY_SIZE(dev->tx_wdma); i++) in mtk_wdma_tx_reset()
177 struct mtk_wed_wo *wo = dev->hw->wed_wo; in mtk_wed_wo_reset() local
185 if (mtk_wed_mcu_send_msg(wo, MTK_WED_MODULE_ID_WO, in mtk_wed_wo_reset()
193 dev_err(dev->hw->dev, "failed to disable wed-wo\n"); in mtk_wed_wo_reset()
198 switch (dev->hw->index) { in mtk_wed_wo_reset()
231 dev = hw->wed_dev; in mtk_wed_fe_reset()
232 if (!dev || !dev->wlan.reset) in mtk_wed_fe_reset()
236 err = dev->wlan.reset(dev); in mtk_wed_fe_reset()
238 dev_err(dev->dev, "wlan reset failed: %d\n", err); in mtk_wed_fe_reset()
257 dev = hw->wed_dev; in mtk_wed_fe_reset_complete()
258 if (!dev || !dev->wlan.reset_complete) in mtk_wed_fe_reset_complete()
261 dev->wlan.reset_complete(dev); in mtk_wed_fe_reset_complete()
273 if (dev->wlan.bus_type == MTK_WED_BUS_PCIE) { in mtk_wed_assign()
274 hw = hw_list[pci_domain_nr(dev->wlan.pci_dev->bus)]; in mtk_wed_assign()
278 if (!hw->wed_dev) in mtk_wed_assign()
281 if (hw->version == 1) in mtk_wed_assign()
289 if (hw && !hw->wed_dev) in mtk_wed_assign()
296 hw->wed_dev = dev; in mtk_wed_assign()
306 int token = dev->wlan.token_start; in mtk_wed_tx_buffer_alloc()
311 ring_size = dev->wlan.nbuf & ~(MTK_WED_BUF_PER_PAGE - 1); in mtk_wed_tx_buffer_alloc()
316 return -ENOMEM; in mtk_wed_tx_buffer_alloc()
318 dev->tx_buf_ring.size = ring_size; in mtk_wed_tx_buffer_alloc()
319 dev->tx_buf_ring.pages = page_list; in mtk_wed_tx_buffer_alloc()
321 desc = dma_alloc_coherent(dev->hw->dev, ring_size * sizeof(*desc), in mtk_wed_tx_buffer_alloc()
324 return -ENOMEM; in mtk_wed_tx_buffer_alloc()
326 dev->tx_buf_ring.desc = desc; in mtk_wed_tx_buffer_alloc()
327 dev->tx_buf_ring.desc_phys = desc_phys; in mtk_wed_tx_buffer_alloc()
337 return -ENOMEM; in mtk_wed_tx_buffer_alloc()
339 page_phys = dma_map_page(dev->hw->dev, page, 0, PAGE_SIZE, in mtk_wed_tx_buffer_alloc()
341 if (dma_mapping_error(dev->hw->dev, page_phys)) { in mtk_wed_tx_buffer_alloc()
343 return -ENOMEM; in mtk_wed_tx_buffer_alloc()
347 dma_sync_single_for_cpu(dev->hw->dev, page_phys, PAGE_SIZE, in mtk_wed_tx_buffer_alloc()
357 txd_size = dev->wlan.init_buf(buf, buf_phys, token++); in mtk_wed_tx_buffer_alloc()
359 desc->buf0 = cpu_to_le32(buf_phys); in mtk_wed_tx_buffer_alloc()
360 desc->buf1 = cpu_to_le32(buf_phys + txd_size); in mtk_wed_tx_buffer_alloc()
362 if (dev->hw->version == 1) in mtk_wed_tx_buffer_alloc()
365 MTK_WED_BUF_SIZE - txd_size) | in mtk_wed_tx_buffer_alloc()
370 MTK_WED_BUF_SIZE - txd_size) | in mtk_wed_tx_buffer_alloc()
372 desc->ctrl = cpu_to_le32(ctrl); in mtk_wed_tx_buffer_alloc()
373 desc->info = 0; in mtk_wed_tx_buffer_alloc()
380 dma_sync_single_for_device(dev->hw->dev, page_phys, PAGE_SIZE, in mtk_wed_tx_buffer_alloc()
390 struct mtk_wdma_desc *desc = dev->tx_buf_ring.desc; in mtk_wed_free_tx_buffer()
391 void **page_list = dev->tx_buf_ring.pages; in mtk_wed_free_tx_buffer()
401 for (i = 0, page_idx = 0; i < dev->tx_buf_ring.size; in mtk_wed_free_tx_buffer()
410 dma_unmap_page(dev->hw->dev, buf_addr, PAGE_SIZE, in mtk_wed_free_tx_buffer()
415 dma_free_coherent(dev->hw->dev, dev->tx_buf_ring.size * sizeof(*desc), in mtk_wed_free_tx_buffer()
416 desc, dev->tx_buf_ring.desc_phys); in mtk_wed_free_tx_buffer()
428 dev->rx_buf_ring.size = dev->wlan.rx_nbuf; in mtk_wed_rx_buffer_alloc()
429 desc = dma_alloc_coherent(dev->hw->dev, in mtk_wed_rx_buffer_alloc()
430 dev->wlan.rx_nbuf * sizeof(*desc), in mtk_wed_rx_buffer_alloc()
433 return -ENOMEM; in mtk_wed_rx_buffer_alloc()
435 dev->rx_buf_ring.desc = desc; in mtk_wed_rx_buffer_alloc()
436 dev->rx_buf_ring.desc_phys = desc_phys; in mtk_wed_rx_buffer_alloc()
437 dev->wlan.init_rx_buf(dev, dev->wlan.rx_npkt); in mtk_wed_rx_buffer_alloc()
445 struct mtk_rxbm_desc *desc = dev->rx_buf_ring.desc; in mtk_wed_free_rx_buffer()
450 dev->wlan.release_rx_buf(dev); in mtk_wed_free_rx_buffer()
451 dma_free_coherent(dev->hw->dev, dev->rx_buf_ring.size * sizeof(*desc), in mtk_wed_free_rx_buffer()
452 desc, dev->rx_buf_ring.desc_phys); in mtk_wed_free_rx_buffer()
459 FIELD_PREP(MTK_WED_RX_BM_RX_DMAD_SDL0, dev->wlan.rx_size)); in mtk_wed_rx_buffer_hw_init()
460 wed_w32(dev, MTK_WED_RX_BM_BASE, dev->rx_buf_ring.desc_phys); in mtk_wed_rx_buffer_hw_init()
462 FIELD_PREP(MTK_WED_RX_BM_SW_TAIL, dev->wlan.rx_npkt)); in mtk_wed_rx_buffer_hw_init()
471 if (!ring->desc) in mtk_wed_free_ring()
474 dma_free_coherent(dev->hw->dev, ring->size * ring->desc_size, in mtk_wed_free_ring()
475 ring->desc, ring->desc_phys); in mtk_wed_free_ring()
482 mtk_wed_free_ring(dev, &dev->rro.ring); in mtk_wed_free_rx_rings()
490 for (i = 0; i < ARRAY_SIZE(dev->tx_ring); i++) in mtk_wed_free_tx_rings()
491 mtk_wed_free_ring(dev, &dev->tx_ring[i]); in mtk_wed_free_tx_rings()
492 for (i = 0; i < ARRAY_SIZE(dev->rx_wdma); i++) in mtk_wed_free_tx_rings()
493 mtk_wed_free_ring(dev, &dev->rx_wdma[i]); in mtk_wed_free_tx_rings()
501 if (dev->hw->version == 1) in mtk_wed_set_ext_int()
509 if (!dev->hw->num_flows) in mtk_wed_set_ext_int()
537 if (!(dev->rx_ring[idx].flags & MTK_WED_RING_CONFIGURED)) in mtk_wed_check_wfdma_rx_fill()
546 if (cur_idx == MTK_WED_RX_RING_SIZE - 1) in mtk_wed_check_wfdma_rx_fill()
553 dev_err(dev->hw->dev, "rx dma enable failed\n"); in mtk_wed_check_wfdma_rx_fill()
557 val = wifi_r32(dev, dev->wlan.wpdma_rx_glo - dev->wlan.phy_base) | in mtk_wed_check_wfdma_rx_fill()
559 wifi_w32(dev, dev->wlan.wpdma_rx_glo - dev->wlan.phy_base, val); in mtk_wed_check_wfdma_rx_fill()
580 if (dev->hw->version == 1) { in mtk_wed_dma_disable()
581 regmap_write(dev->hw->mirror, dev->hw->index * 4, 0); in mtk_wed_dma_disable()
609 if (dev->hw->version == 1) in mtk_wed_stop()
627 if (dev->hw->version == 1) in mtk_wed_deinit()
639 struct mtk_wed_hw *hw = dev->hw; in __mtk_wed_detach()
649 if (hw->wed_wo) in __mtk_wed_detach()
652 if (hw->wed_wo) in __mtk_wed_detach()
656 if (dev->wlan.bus_type == MTK_WED_BUS_PCIE) { in __mtk_wed_detach()
659 wlan_node = dev->wlan.pci_dev->dev.of_node; in __mtk_wed_detach()
660 if (of_dma_is_coherent(wlan_node) && hw->hifsys) in __mtk_wed_detach()
661 regmap_update_bits(hw->hifsys, HIFSYS_DMA_AG_MAP, in __mtk_wed_detach()
662 BIT(hw->index), BIT(hw->index)); in __mtk_wed_detach()
665 if ((!hw_list[!hw->index] || !hw_list[!hw->index]->wed_dev) && in __mtk_wed_detach()
666 hw->eth->dma_dev != hw->eth->dev) in __mtk_wed_detach()
667 mtk_eth_set_dma_device(hw->eth, hw->eth->dev); in __mtk_wed_detach()
672 hw->wed_dev = NULL; in __mtk_wed_detach()
687 switch (dev->wlan.bus_type) { in mtk_wed_bus_init()
689 struct device_node *np = dev->hw->eth->dev->of_node; in mtk_wed_bus_init()
693 "mediatek,wed-pcie"); in mtk_wed_bus_init()
733 if (dev->hw->version == 1) { in mtk_wed_set_wpdma()
734 wed_w32(dev, MTK_WED_WPDMA_CFG_BASE, dev->wlan.wpdma_phys); in mtk_wed_set_wpdma()
738 wed_w32(dev, MTK_WED_WPDMA_CFG_BASE, dev->wlan.wpdma_int); in mtk_wed_set_wpdma()
739 wed_w32(dev, MTK_WED_WPDMA_CFG_INT_MASK, dev->wlan.wpdma_mask); in mtk_wed_set_wpdma()
740 wed_w32(dev, MTK_WED_WPDMA_CFG_TX, dev->wlan.wpdma_tx); in mtk_wed_set_wpdma()
741 wed_w32(dev, MTK_WED_WPDMA_CFG_TX_FREE, dev->wlan.wpdma_txfree); in mtk_wed_set_wpdma()
742 wed_w32(dev, MTK_WED_WPDMA_RX_GLO_CFG, dev->wlan.wpdma_rx_glo); in mtk_wed_set_wpdma()
743 wed_w32(dev, MTK_WED_WPDMA_RX_RING, dev->wlan.wpdma_rx); in mtk_wed_set_wpdma()
764 if (dev->hw->version == 1) { in mtk_wed_hw_init_early()
765 u32 offset = dev->hw->index ? 0x04000400 : 0; in mtk_wed_hw_init_early()
775 MTK_PCIE_BASE(dev->hw->index)); in mtk_wed_hw_init_early()
777 wed_w32(dev, MTK_WED_WDMA_CFG_BASE, dev->hw->wdma_phy); in mtk_wed_hw_init_early()
797 ring->desc = dma_alloc_coherent(dev->hw->dev, in mtk_wed_rro_ring_alloc()
798 size * sizeof(*ring->desc), in mtk_wed_rro_ring_alloc()
799 &ring->desc_phys, GFP_KERNEL); in mtk_wed_rro_ring_alloc()
800 if (!ring->desc) in mtk_wed_rro_ring_alloc()
801 return -ENOMEM; in mtk_wed_rro_ring_alloc()
803 ring->desc_size = sizeof(*ring->desc); in mtk_wed_rro_ring_alloc()
804 ring->size = size; in mtk_wed_rro_ring_alloc()
817 index = of_property_match_string(dev->hw->node, "memory-region-names", in mtk_wed_rro_alloc()
818 "wo-dlm"); in mtk_wed_rro_alloc()
822 np = of_parse_phandle(dev->hw->node, "memory-region", index); in mtk_wed_rro_alloc()
824 return -ENODEV; in mtk_wed_rro_alloc()
830 return -ENODEV; in mtk_wed_rro_alloc()
832 dev->rro.miod_phys = rmem->base; in mtk_wed_rro_alloc()
833 dev->rro.fdbk_phys = MTK_WED_MIOD_COUNT + dev->rro.miod_phys; in mtk_wed_rro_alloc()
835 return mtk_wed_rro_ring_alloc(dev, &dev->rro.ring, in mtk_wed_rro_alloc()
842 struct mtk_wed_wo *wo = dev->hw->wed_wo; in mtk_wed_rro_cfg() local
865 return mtk_wed_mcu_send_msg(wo, MTK_WED_MODULE_ID_WO, in mtk_wed_rro_cfg()
879 wed_w32(dev, MTK_WED_RROQM_MIOD_CTRL0, dev->rro.miod_phys); in mtk_wed_rro_hw_init()
882 wed_w32(dev, MTK_WED_RROQM_FDBK_CTRL0, dev->rro.fdbk_phys); in mtk_wed_rro_hw_init()
886 wed_w32(dev, MTK_WED_RROQ_BASE_L, dev->rro.ring.desc_phys); in mtk_wed_rro_hw_init()
893 wed_w32(dev, MTK_WED_RROQM_MIOD_CTRL2, MTK_WED_MIOD_CNT - 1); in mtk_wed_rro_hw_init()
912 FIELD_PREP(MTK_WED_RTQM_TXDMAD_FPORT, 0x3 + dev->hw->index)); in mtk_wed_route_qm_hw_init()
921 if (dev->init_done) in mtk_wed_hw_init()
924 dev->init_done = true; in mtk_wed_hw_init()
929 dev->tx_buf_ring.size / 128) | in mtk_wed_hw_init()
933 wed_w32(dev, MTK_WED_TX_BM_BASE, dev->tx_buf_ring.desc_phys); in mtk_wed_hw_init()
937 if (dev->hw->version == 1) { in mtk_wed_hw_init()
940 dev->wlan.token_start) | in mtk_wed_hw_init()
942 dev->wlan.token_start + in mtk_wed_hw_init()
943 dev->wlan.nbuf - 1)); in mtk_wed_hw_init()
950 dev->wlan.token_start) | in mtk_wed_hw_init()
952 dev->wlan.token_start + in mtk_wed_hw_init()
953 dev->wlan.nbuf - 1)); in mtk_wed_hw_init()
960 dev->tx_buf_ring.size / 128) | in mtk_wed_hw_init()
962 dev->tx_buf_ring.size / 128)); in mtk_wed_hw_init()
970 if (dev->hw->version == 1) { in mtk_wed_hw_init()
993 void *head = (void *)ring->desc; in mtk_wed_ring_reset()
999 desc = (struct mtk_wdma_desc *)(head + i * ring->desc_size); in mtk_wed_ring_reset()
1000 desc->buf0 = 0; in mtk_wed_ring_reset()
1002 desc->ctrl = cpu_to_le32(MTK_WDMA_DESC_CTRL_DMA_DONE); in mtk_wed_ring_reset()
1004 desc->ctrl = cpu_to_le32(MTK_WFDMA_DESC_CTRL_TO_HOST); in mtk_wed_ring_reset()
1005 desc->buf1 = 0; in mtk_wed_ring_reset()
1006 desc->info = 0; in mtk_wed_ring_reset()
1030 struct mtk_wed_wo *wo = dev->hw->wed_wo; in mtk_wed_rx_reset() local
1034 ret = mtk_wed_mcu_send_msg(wo, MTK_WED_MODULE_ID_WO, in mtk_wed_rx_reset()
1100 struct mtk_eth *eth = dev->hw->eth; in mtk_wed_rx_reset()
1116 /* wo change to enable state */ in mtk_wed_rx_reset()
1118 ret = mtk_wed_mcu_send_msg(wo, MTK_WED_MODULE_ID_WO, in mtk_wed_rx_reset()
1125 for (i = 0; i < ARRAY_SIZE(dev->rx_ring); i++) { in mtk_wed_rx_reset()
1126 if (!dev->rx_ring[i].desc) in mtk_wed_rx_reset()
1129 mtk_wed_ring_reset(&dev->rx_ring[i], MTK_WED_RX_RING_SIZE, in mtk_wed_rx_reset()
1144 for (i = 0; i < ARRAY_SIZE(dev->tx_ring); i++) { in mtk_wed_reset_dma()
1145 if (!dev->tx_ring[i].desc) in mtk_wed_reset_dma()
1148 mtk_wed_ring_reset(&dev->tx_ring[i], MTK_WED_TX_RING_SIZE, in mtk_wed_reset_dma()
1219 dev->init_done = false; in mtk_wed_reset_dma()
1220 if (dev->hw->version == 1) in mtk_wed_reset_dma()
1235 ring->desc = dma_alloc_coherent(dev->hw->dev, size * desc_size, in mtk_wed_ring_alloc()
1236 &ring->desc_phys, GFP_KERNEL); in mtk_wed_ring_alloc()
1237 if (!ring->desc) in mtk_wed_ring_alloc()
1238 return -ENOMEM; in mtk_wed_ring_alloc()
1240 ring->desc_size = desc_size; in mtk_wed_ring_alloc()
1241 ring->size = size; in mtk_wed_ring_alloc()
1251 u32 desc_size = sizeof(struct mtk_wdma_desc) * dev->hw->version; in mtk_wed_wdma_rx_ring_setup()
1254 if (idx >= ARRAY_SIZE(dev->rx_wdma)) in mtk_wed_wdma_rx_ring_setup()
1255 return -EINVAL; in mtk_wed_wdma_rx_ring_setup()
1257 wdma = &dev->rx_wdma[idx]; in mtk_wed_wdma_rx_ring_setup()
1260 return -ENOMEM; in mtk_wed_wdma_rx_ring_setup()
1263 wdma->desc_phys); in mtk_wed_wdma_rx_ring_setup()
1269 wdma->desc_phys); in mtk_wed_wdma_rx_ring_setup()
1280 u32 desc_size = sizeof(struct mtk_wdma_desc) * dev->hw->version; in mtk_wed_wdma_tx_ring_setup()
1283 if (idx >= ARRAY_SIZE(dev->tx_wdma)) in mtk_wed_wdma_tx_ring_setup()
1284 return -EINVAL; in mtk_wed_wdma_tx_ring_setup()
1286 wdma = &dev->tx_wdma[idx]; in mtk_wed_wdma_tx_ring_setup()
1289 return -ENOMEM; in mtk_wed_wdma_tx_ring_setup()
1292 wdma->desc_phys); in mtk_wed_wdma_tx_ring_setup()
1303 wdma->desc_phys); in mtk_wed_wdma_tx_ring_setup()
1319 struct mtk_eth *eth = dev->hw->eth; in mtk_wed_ppe_check()
1330 skb->protocol = eh->h_proto; in mtk_wed_ppe_check()
1331 mtk_ppe_check_skb(eth->ppe[dev->hw->index], skb, hash); in mtk_wed_ppe_check()
1346 if (dev->hw->version == 1) { in mtk_wed_configure_irq()
1365 dev->wlan.tx_tbit[0]) | in mtk_wed_configure_irq()
1367 dev->wlan.tx_tbit[1])); in mtk_wed_configure_irq()
1374 dev->wlan.txfree_tbit)); in mtk_wed_configure_irq()
1382 dev->wlan.rx_tbit[0]) | in mtk_wed_configure_irq()
1384 dev->wlan.rx_tbit[1])); in mtk_wed_configure_irq()
1389 dev->wdma_idx)); in mtk_wed_configure_irq()
1419 if (dev->hw->version == 1) { in mtk_wed_dma_enable()
1459 for (i = 0; i < ARRAY_SIZE(dev->rx_wdma); i++) in mtk_wed_start()
1460 if (!dev->rx_wdma[i].desc) in mtk_wed_start()
1468 if (dev->hw->version == 1) { in mtk_wed_start()
1469 u32 val = dev->wlan.wpdma_phys | MTK_PCIE_MIRROR_MAP_EN | in mtk_wed_start()
1471 dev->hw->index); in mtk_wed_start()
1473 val |= BIT(0) | (BIT(1) * !!dev->hw->index); in mtk_wed_start()
1474 regmap_write(dev->hw->mirror, dev->hw->index * 4, val); in mtk_wed_start()
1490 mtk_wed_set_512_support(dev, dev->wlan.wcid_512); in mtk_wed_start()
1493 dev->running = true; in mtk_wed_start()
1507 if ((dev->wlan.bus_type == MTK_WED_BUS_PCIE && in mtk_wed_attach()
1508 pci_domain_nr(dev->wlan.pci_dev->bus) > 1) || in mtk_wed_attach()
1510 ret = -ENODEV; in mtk_wed_attach()
1522 ret = -ENODEV; in mtk_wed_attach()
1526 device = dev->wlan.bus_type == MTK_WED_BUS_PCIE in mtk_wed_attach()
1527 ? &dev->wlan.pci_dev->dev in mtk_wed_attach()
1528 : &dev->wlan.platform_dev->dev; in mtk_wed_attach()
1530 hw->index, hw->version); in mtk_wed_attach()
1532 dev->hw = hw; in mtk_wed_attach()
1533 dev->dev = hw->dev; in mtk_wed_attach()
1534 dev->irq = hw->irq; in mtk_wed_attach()
1535 dev->wdma_idx = hw->index; in mtk_wed_attach()
1536 dev->version = hw->version; in mtk_wed_attach()
1538 if (hw->eth->dma_dev == hw->eth->dev && in mtk_wed_attach()
1539 of_dma_is_coherent(hw->eth->dev->of_node)) in mtk_wed_attach()
1540 mtk_eth_set_dma_device(hw->eth, hw->dev); in mtk_wed_attach()
1553 if (hw->version == 1) { in mtk_wed_attach()
1554 regmap_update_bits(hw->hifsys, HIFSYS_DMA_AG_MAP, in mtk_wed_attach()
1555 BIT(hw->index), 0); in mtk_wed_attach()
1557 dev->rev_id = wed_r32(dev, MTK_WED_REV_ID); in mtk_wed_attach()
1562 dev_err(dev->hw->dev, "failed to attach wed device\n"); in mtk_wed_attach()
1575 struct mtk_wed_ring *ring = &dev->tx_ring[idx]; in mtk_wed_tx_ring_setup()
1589 if (WARN_ON(idx >= ARRAY_SIZE(dev->tx_ring))) in mtk_wed_tx_ring_setup()
1590 return -EINVAL; in mtk_wed_tx_ring_setup()
1593 sizeof(*ring->desc), true)) in mtk_wed_tx_ring_setup()
1594 return -ENOMEM; in mtk_wed_tx_ring_setup()
1598 return -ENOMEM; in mtk_wed_tx_ring_setup()
1600 ring->reg_base = MTK_WED_RING_TX(idx); in mtk_wed_tx_ring_setup()
1601 ring->wpdma = regs; in mtk_wed_tx_ring_setup()
1603 /* WED -> WPDMA */ in mtk_wed_tx_ring_setup()
1604 wpdma_tx_w32(dev, idx, MTK_WED_RING_OFS_BASE, ring->desc_phys); in mtk_wed_tx_ring_setup()
1609 ring->desc_phys); in mtk_wed_tx_ring_setup()
1620 struct mtk_wed_ring *ring = &dev->txfree_ring; in mtk_wed_txfree_ring_setup()
1621 int i, index = dev->hw->version == 1; in mtk_wed_txfree_ring_setup()
1628 ring->reg_base = MTK_WED_RING_RX(index); in mtk_wed_txfree_ring_setup()
1629 ring->wpdma = regs; in mtk_wed_txfree_ring_setup()
1645 struct mtk_wed_ring *ring = &dev->rx_ring[idx]; in mtk_wed_rx_ring_setup()
1647 if (WARN_ON(idx >= ARRAY_SIZE(dev->rx_ring))) in mtk_wed_rx_ring_setup()
1648 return -EINVAL; in mtk_wed_rx_ring_setup()
1651 sizeof(*ring->desc), false)) in mtk_wed_rx_ring_setup()
1652 return -ENOMEM; in mtk_wed_rx_ring_setup()
1656 return -ENOMEM; in mtk_wed_rx_ring_setup()
1658 ring->reg_base = MTK_WED_RING_RX_DATA(idx); in mtk_wed_rx_ring_setup()
1659 ring->wpdma = regs; in mtk_wed_rx_ring_setup()
1660 ring->flags |= MTK_WED_RING_CONFIGURED; in mtk_wed_rx_ring_setup()
1662 /* WPDMA -> WED */ in mtk_wed_rx_ring_setup()
1663 wpdma_rx_w32(dev, idx, MTK_WED_RING_OFS_BASE, ring->desc_phys); in mtk_wed_rx_ring_setup()
1667 ring->desc_phys); in mtk_wed_rx_ring_setup()
1679 if (dev->hw->version == 1) in mtk_wed_irq_get()
1690 if (!dev->hw->num_flows) in mtk_wed_irq_get()
1693 pr_err("mtk_wed%d: error status=%08x\n", dev->hw->index, val); in mtk_wed_irq_get()
1714 if (!hw || !hw->wed_dev) in mtk_wed_flow_add()
1715 return -ENODEV; in mtk_wed_flow_add()
1717 if (hw->num_flows) { in mtk_wed_flow_add()
1718 hw->num_flows++; in mtk_wed_flow_add()
1723 if (!hw->wed_dev) { in mtk_wed_flow_add()
1724 ret = -ENODEV; in mtk_wed_flow_add()
1728 ret = hw->wed_dev->wlan.offload_enable(hw->wed_dev); in mtk_wed_flow_add()
1730 hw->num_flows++; in mtk_wed_flow_add()
1731 mtk_wed_set_ext_int(hw->wed_dev, true); in mtk_wed_flow_add()
1746 if (--hw->num_flows) in mtk_wed_flow_remove()
1750 if (!hw->wed_dev) in mtk_wed_flow_remove()
1753 hw->wed_dev->wlan.offload_disable(hw->wed_dev); in mtk_wed_flow_remove()
1754 mtk_wed_set_ext_int(hw->wed_dev, true); in mtk_wed_flow_remove()
1767 if (!priv || !tc_can_offload(priv->dev)) in mtk_wed_setup_tc_block_cb()
1768 return -EOPNOTSUPP; in mtk_wed_setup_tc_block_cb()
1771 return -EOPNOTSUPP; in mtk_wed_setup_tc_block_cb()
1773 hw = priv->hw; in mtk_wed_setup_tc_block_cb()
1774 return mtk_flow_offload_cmd(hw->eth, cls, hw->index); in mtk_wed_setup_tc_block_cb()
1784 struct mtk_eth *eth = hw->eth; in mtk_wed_setup_tc_block()
1787 if (!eth->soc->offload_version) in mtk_wed_setup_tc_block()
1788 return -EOPNOTSUPP; in mtk_wed_setup_tc_block()
1790 if (f->binder_type != FLOW_BLOCK_BINDER_TYPE_CLSACT_INGRESS) in mtk_wed_setup_tc_block()
1791 return -EOPNOTSUPP; in mtk_wed_setup_tc_block()
1794 f->driver_block_list = &block_cb_list; in mtk_wed_setup_tc_block()
1796 switch (f->command) { in mtk_wed_setup_tc_block()
1798 block_cb = flow_block_cb_lookup(f->block, cb, dev); in mtk_wed_setup_tc_block()
1806 return -ENOMEM; in mtk_wed_setup_tc_block()
1808 priv->hw = hw; in mtk_wed_setup_tc_block()
1809 priv->dev = dev; in mtk_wed_setup_tc_block()
1818 list_add_tail(&block_cb->driver_list, &block_cb_list); in mtk_wed_setup_tc_block()
1821 block_cb = flow_block_cb_lookup(f->block, cb, dev); in mtk_wed_setup_tc_block()
1823 return -ENOENT; in mtk_wed_setup_tc_block()
1827 list_del(&block_cb->driver_list); in mtk_wed_setup_tc_block()
1828 kfree(block_cb->cb_priv); in mtk_wed_setup_tc_block()
1829 block_cb->cb_priv = NULL; in mtk_wed_setup_tc_block()
1833 return -EOPNOTSUPP; in mtk_wed_setup_tc_block()
1841 struct mtk_wed_hw *hw = wed->hw; in mtk_wed_setup_tc()
1843 if (hw->version < 2) in mtk_wed_setup_tc()
1844 return -EOPNOTSUPP; in mtk_wed_setup_tc()
1851 return -EOPNOTSUPP; in mtk_wed_setup_tc()
1876 struct device_node *eth_np = eth->dev->of_node; in mtk_wed_add_hw()
1889 get_device(&pdev->dev); in mtk_wed_add_hw()
1909 hw->node = np; in mtk_wed_add_hw()
1910 hw->regs = regs; in mtk_wed_add_hw()
1911 hw->eth = eth; in mtk_wed_add_hw()
1912 hw->dev = &pdev->dev; in mtk_wed_add_hw()
1913 hw->wdma_phy = wdma_phy; in mtk_wed_add_hw()
1914 hw->wdma = wdma; in mtk_wed_add_hw()
1915 hw->index = index; in mtk_wed_add_hw()
1916 hw->irq = irq; in mtk_wed_add_hw()
1917 hw->version = mtk_is_netsys_v1(eth) ? 1 : 2; in mtk_wed_add_hw()
1919 if (hw->version == 1) { in mtk_wed_add_hw()
1920 hw->mirror = syscon_regmap_lookup_by_phandle(eth_np, in mtk_wed_add_hw()
1921 "mediatek,pcie-mirror"); in mtk_wed_add_hw()
1922 hw->hifsys = syscon_regmap_lookup_by_phandle(eth_np, in mtk_wed_add_hw()
1924 if (IS_ERR(hw->mirror) || IS_ERR(hw->hifsys)) { in mtk_wed_add_hw()
1930 regmap_write(hw->mirror, 0, 0); in mtk_wed_add_hw()
1931 regmap_write(hw->mirror, 4, 0); in mtk_wed_add_hw()
1946 put_device(&pdev->dev); in mtk_wed_add_hw()
1967 debugfs_remove(hw->debugfs_dir); in mtk_wed_exit()
1968 put_device(hw->dev); in mtk_wed_exit()
1969 of_node_put(hw->node); in mtk_wed_exit()