1 // SPDX-License-Identifier: ISC 2 /* Copyright (C) 2020 MediaTek Inc. */ 3 4 #include "mt7915.h" 5 #include "../dma.h" 6 #include "mac.h" 7 8 int mt7915_init_tx_queues(struct mt7915_phy *phy, int idx, int n_desc, int ring_base) 9 { 10 int i, err; 11 12 err = mt76_init_tx_queue(phy->mt76, 0, idx, n_desc, ring_base); 13 if (err < 0) 14 return err; 15 16 for (i = 0; i <= MT_TXQ_PSD; i++) 17 phy->mt76->q_tx[i] = phy->mt76->q_tx[0]; 18 19 return 0; 20 } 21 22 static void 23 mt7915_tx_cleanup(struct mt7915_dev *dev) 24 { 25 mt76_queue_tx_cleanup(dev, dev->mt76.q_mcu[MT_MCUQ_WM], false); 26 mt76_queue_tx_cleanup(dev, dev->mt76.q_mcu[MT_MCUQ_WA], false); 27 } 28 29 static int mt7915_poll_tx(struct napi_struct *napi, int budget) 30 { 31 struct mt7915_dev *dev; 32 33 dev = container_of(napi, struct mt7915_dev, mt76.tx_napi); 34 35 mt7915_tx_cleanup(dev); 36 37 if (napi_complete_done(napi, 0)) 38 mt7915_irq_enable(dev, MT_INT_TX_DONE_MCU); 39 40 return 0; 41 } 42 43 static void mt7915_dma_config(struct mt7915_dev *dev) 44 { 45 #define Q_CONFIG(q, wfdma, int, id) do { \ 46 if (wfdma) \ 47 dev->wfdma_mask |= (1 << (q)); \ 48 dev->q_int_mask[(q)] = int; \ 49 dev->q_id[(q)] = id; \ 50 } while (0) 51 52 #define MCUQ_CONFIG(q, wfdma, int, id) Q_CONFIG(q, (wfdma), (int), (id)) 53 #define RXQ_CONFIG(q, wfdma, int, id) Q_CONFIG(__RXQ(q), (wfdma), (int), (id)) 54 #define TXQ_CONFIG(q, wfdma, int, id) Q_CONFIG(__TXQ(q), (wfdma), (int), (id)) 55 56 if (is_mt7915(&dev->mt76)) { 57 RXQ_CONFIG(MT_RXQ_MAIN, WFDMA0, MT_INT_RX_DONE_BAND0, MT7915_RXQ_BAND0); 58 RXQ_CONFIG(MT_RXQ_MCU, WFDMA1, MT_INT_RX_DONE_WM, MT7915_RXQ_MCU_WM); 59 RXQ_CONFIG(MT_RXQ_MCU_WA, WFDMA1, MT_INT_RX_DONE_WA, MT7915_RXQ_MCU_WA); 60 RXQ_CONFIG(MT_RXQ_EXT, WFDMA0, MT_INT_RX_DONE_BAND1, MT7915_RXQ_BAND1); 61 RXQ_CONFIG(MT_RXQ_EXT_WA, WFDMA1, MT_INT_RX_DONE_WA_EXT, MT7915_RXQ_MCU_WA_EXT); 62 RXQ_CONFIG(MT_RXQ_MAIN_WA, WFDMA1, MT_INT_RX_DONE_WA_MAIN, MT7915_RXQ_MCU_WA); 63 TXQ_CONFIG(0, WFDMA1, MT_INT_TX_DONE_BAND0, MT7915_TXQ_BAND0); 64 TXQ_CONFIG(1, WFDMA1, MT_INT_TX_DONE_BAND1, MT7915_TXQ_BAND1); 65 MCUQ_CONFIG(MT_MCUQ_WM, WFDMA1, MT_INT_TX_DONE_MCU_WM, MT7915_TXQ_MCU_WM); 66 MCUQ_CONFIG(MT_MCUQ_WA, WFDMA1, MT_INT_TX_DONE_MCU_WA, MT7915_TXQ_MCU_WA); 67 MCUQ_CONFIG(MT_MCUQ_FWDL, WFDMA1, MT_INT_TX_DONE_FWDL, MT7915_TXQ_FWDL); 68 } else { 69 RXQ_CONFIG(MT_RXQ_MAIN, WFDMA0, MT_INT_RX_DONE_BAND0_MT7916, MT7916_RXQ_BAND0); 70 RXQ_CONFIG(MT_RXQ_MCU, WFDMA0, MT_INT_RX_DONE_WM, MT7916_RXQ_MCU_WM); 71 RXQ_CONFIG(MT_RXQ_MCU_WA, WFDMA0, MT_INT_RX_DONE_WA, MT7916_RXQ_MCU_WA); 72 RXQ_CONFIG(MT_RXQ_EXT, WFDMA0, MT_INT_RX_DONE_BAND1_MT7916, MT7916_RXQ_BAND1); 73 RXQ_CONFIG(MT_RXQ_EXT_WA, WFDMA0, MT_INT_RX_DONE_WA_EXT_MT7916, MT7916_RXQ_MCU_WA_EXT); 74 RXQ_CONFIG(MT_RXQ_MAIN_WA, WFDMA0, MT_INT_RX_DONE_WA_MAIN_MT7916, MT7916_RXQ_MCU_WA_MAIN); 75 TXQ_CONFIG(0, WFDMA0, MT_INT_TX_DONE_BAND0, MT7915_TXQ_BAND0); 76 TXQ_CONFIG(1, WFDMA0, MT_INT_TX_DONE_BAND1, MT7915_TXQ_BAND1); 77 MCUQ_CONFIG(MT_MCUQ_WM, WFDMA0, MT_INT_TX_DONE_MCU_WM, MT7915_TXQ_MCU_WM); 78 MCUQ_CONFIG(MT_MCUQ_WA, WFDMA0, MT_INT_TX_DONE_MCU_WA_MT7916, MT7915_TXQ_MCU_WA); 79 MCUQ_CONFIG(MT_MCUQ_FWDL, WFDMA0, MT_INT_TX_DONE_FWDL, MT7915_TXQ_FWDL); 80 } 81 } 82 83 static void __mt7915_dma_prefetch(struct mt7915_dev *dev, u32 ofs) 84 { 85 #define PREFETCH(_base, _depth) ((_base) << 16 | (_depth)) 86 u32 base = 0; 87 88 /* prefetch SRAM wrapping boundary for tx/rx ring. */ 89 mt76_wr(dev, MT_MCUQ_EXT_CTRL(MT_MCUQ_FWDL) + ofs, PREFETCH(0x0, 0x4)); 90 mt76_wr(dev, MT_MCUQ_EXT_CTRL(MT_MCUQ_WM) + ofs, PREFETCH(0x40, 0x4)); 91 mt76_wr(dev, MT_TXQ_EXT_CTRL(0) + ofs, PREFETCH(0x80, 0x4)); 92 mt76_wr(dev, MT_TXQ_EXT_CTRL(1) + ofs, PREFETCH(0xc0, 0x4)); 93 mt76_wr(dev, MT_MCUQ_EXT_CTRL(MT_MCUQ_WA) + ofs, PREFETCH(0x100, 0x4)); 94 95 mt76_wr(dev, MT_RXQ_EXT_CTRL(MT_RXQ_MCU) + ofs, PREFETCH(0x140, 0x4)); 96 mt76_wr(dev, MT_RXQ_EXT_CTRL(MT_RXQ_MCU_WA) + ofs, PREFETCH(0x180, 0x4)); 97 if (!is_mt7915(&dev->mt76)) { 98 mt76_wr(dev, MT_RXQ_EXT_CTRL(MT_RXQ_MAIN_WA) + ofs, PREFETCH(0x1c0, 0x4)); 99 base = 0x40; 100 } 101 mt76_wr(dev, MT_RXQ_EXT_CTRL(MT_RXQ_EXT_WA) + ofs, PREFETCH(0x1c0 + base, 0x4)); 102 mt76_wr(dev, MT_RXQ_EXT_CTRL(MT_RXQ_MAIN) + ofs, PREFETCH(0x200 + base, 0x4)); 103 mt76_wr(dev, MT_RXQ_EXT_CTRL(MT_RXQ_EXT) + ofs, PREFETCH(0x240 + base, 0x4)); 104 105 /* for mt7915, the ring which is next the last 106 * used ring must be initialized. 107 */ 108 if (is_mt7915(&dev->mt76)) { 109 ofs += 0x4; 110 mt76_wr(dev, MT_MCUQ_EXT_CTRL(MT_MCUQ_WA) + ofs, PREFETCH(0x140, 0x0)); 111 mt76_wr(dev, MT_RXQ_EXT_CTRL(MT_RXQ_EXT_WA) + ofs, PREFETCH(0x200 + base, 0x0)); 112 mt76_wr(dev, MT_RXQ_EXT_CTRL(MT_RXQ_EXT) + ofs, PREFETCH(0x280 + base, 0x0)); 113 } 114 } 115 116 void mt7915_dma_prefetch(struct mt7915_dev *dev) 117 { 118 __mt7915_dma_prefetch(dev, 0); 119 if (dev->hif2) 120 __mt7915_dma_prefetch(dev, MT_WFDMA0_PCIE1(0) - MT_WFDMA0(0)); 121 } 122 123 static void mt7915_dma_disable(struct mt7915_dev *dev, bool rst) 124 { 125 struct mt76_dev *mdev = &dev->mt76; 126 u32 hif1_ofs = 0; 127 128 if (dev->hif2) 129 hif1_ofs = MT_WFDMA0_PCIE1(0) - MT_WFDMA0(0); 130 131 /* reset */ 132 if (rst) { 133 mt76_clear(dev, MT_WFDMA0_RST, 134 MT_WFDMA0_RST_DMASHDL_ALL_RST | 135 MT_WFDMA0_RST_LOGIC_RST); 136 137 mt76_set(dev, MT_WFDMA0_RST, 138 MT_WFDMA0_RST_DMASHDL_ALL_RST | 139 MT_WFDMA0_RST_LOGIC_RST); 140 141 if (is_mt7915(mdev)) { 142 mt76_clear(dev, MT_WFDMA1_RST, 143 MT_WFDMA1_RST_DMASHDL_ALL_RST | 144 MT_WFDMA1_RST_LOGIC_RST); 145 146 mt76_set(dev, MT_WFDMA1_RST, 147 MT_WFDMA1_RST_DMASHDL_ALL_RST | 148 MT_WFDMA1_RST_LOGIC_RST); 149 } 150 151 if (dev->hif2) { 152 mt76_clear(dev, MT_WFDMA0_RST + hif1_ofs, 153 MT_WFDMA0_RST_DMASHDL_ALL_RST | 154 MT_WFDMA0_RST_LOGIC_RST); 155 156 mt76_set(dev, MT_WFDMA0_RST + hif1_ofs, 157 MT_WFDMA0_RST_DMASHDL_ALL_RST | 158 MT_WFDMA0_RST_LOGIC_RST); 159 160 if (is_mt7915(mdev)) { 161 mt76_clear(dev, MT_WFDMA1_RST + hif1_ofs, 162 MT_WFDMA1_RST_DMASHDL_ALL_RST | 163 MT_WFDMA1_RST_LOGIC_RST); 164 165 mt76_set(dev, MT_WFDMA1_RST + hif1_ofs, 166 MT_WFDMA1_RST_DMASHDL_ALL_RST | 167 MT_WFDMA1_RST_LOGIC_RST); 168 } 169 } 170 } 171 172 /* disable */ 173 mt76_clear(dev, MT_WFDMA0_GLO_CFG, 174 MT_WFDMA0_GLO_CFG_TX_DMA_EN | 175 MT_WFDMA0_GLO_CFG_RX_DMA_EN | 176 MT_WFDMA0_GLO_CFG_OMIT_TX_INFO | 177 MT_WFDMA0_GLO_CFG_OMIT_RX_INFO | 178 MT_WFDMA0_GLO_CFG_OMIT_RX_INFO_PFET2); 179 180 if (is_mt7915(mdev)) 181 mt76_clear(dev, MT_WFDMA1_GLO_CFG, 182 MT_WFDMA1_GLO_CFG_TX_DMA_EN | 183 MT_WFDMA1_GLO_CFG_RX_DMA_EN | 184 MT_WFDMA1_GLO_CFG_OMIT_TX_INFO | 185 MT_WFDMA1_GLO_CFG_OMIT_RX_INFO | 186 MT_WFDMA1_GLO_CFG_OMIT_RX_INFO_PFET2); 187 188 if (dev->hif2) { 189 mt76_clear(dev, MT_WFDMA0_GLO_CFG + hif1_ofs, 190 MT_WFDMA0_GLO_CFG_TX_DMA_EN | 191 MT_WFDMA0_GLO_CFG_RX_DMA_EN | 192 MT_WFDMA0_GLO_CFG_OMIT_TX_INFO | 193 MT_WFDMA0_GLO_CFG_OMIT_RX_INFO | 194 MT_WFDMA0_GLO_CFG_OMIT_RX_INFO_PFET2); 195 196 if (is_mt7915(mdev)) 197 mt76_clear(dev, MT_WFDMA1_GLO_CFG + hif1_ofs, 198 MT_WFDMA1_GLO_CFG_TX_DMA_EN | 199 MT_WFDMA1_GLO_CFG_RX_DMA_EN | 200 MT_WFDMA1_GLO_CFG_OMIT_TX_INFO | 201 MT_WFDMA1_GLO_CFG_OMIT_RX_INFO | 202 MT_WFDMA1_GLO_CFG_OMIT_RX_INFO_PFET2); 203 } 204 } 205 206 static int mt7915_dma_enable(struct mt7915_dev *dev) 207 { 208 struct mt76_dev *mdev = &dev->mt76; 209 u32 hif1_ofs = 0; 210 u32 irq_mask; 211 212 if (dev->hif2) 213 hif1_ofs = MT_WFDMA0_PCIE1(0) - MT_WFDMA0(0); 214 215 /* reset dma idx */ 216 mt76_wr(dev, MT_WFDMA0_RST_DTX_PTR, ~0); 217 if (is_mt7915(mdev)) 218 mt76_wr(dev, MT_WFDMA1_RST_DTX_PTR, ~0); 219 if (dev->hif2) { 220 mt76_wr(dev, MT_WFDMA0_RST_DTX_PTR + hif1_ofs, ~0); 221 if (is_mt7915(mdev)) 222 mt76_wr(dev, MT_WFDMA1_RST_DTX_PTR + hif1_ofs, ~0); 223 } 224 225 /* configure delay interrupt off */ 226 mt76_wr(dev, MT_WFDMA0_PRI_DLY_INT_CFG0, 0); 227 if (is_mt7915(mdev)) { 228 mt76_wr(dev, MT_WFDMA1_PRI_DLY_INT_CFG0, 0); 229 } else { 230 mt76_wr(dev, MT_WFDMA0_PRI_DLY_INT_CFG1, 0); 231 mt76_wr(dev, MT_WFDMA0_PRI_DLY_INT_CFG2, 0); 232 } 233 234 if (dev->hif2) { 235 mt76_wr(dev, MT_WFDMA0_PRI_DLY_INT_CFG0 + hif1_ofs, 0); 236 if (is_mt7915(mdev)) { 237 mt76_wr(dev, MT_WFDMA1_PRI_DLY_INT_CFG0 + 238 hif1_ofs, 0); 239 } else { 240 mt76_wr(dev, MT_WFDMA0_PRI_DLY_INT_CFG1 + 241 hif1_ofs, 0); 242 mt76_wr(dev, MT_WFDMA0_PRI_DLY_INT_CFG2 + 243 hif1_ofs, 0); 244 } 245 } 246 247 /* configure perfetch settings */ 248 mt7915_dma_prefetch(dev); 249 250 /* hif wait WFDMA idle */ 251 mt76_set(dev, MT_WFDMA0_BUSY_ENA, 252 MT_WFDMA0_BUSY_ENA_TX_FIFO0 | 253 MT_WFDMA0_BUSY_ENA_TX_FIFO1 | 254 MT_WFDMA0_BUSY_ENA_RX_FIFO); 255 256 if (is_mt7915(mdev)) 257 mt76_set(dev, MT_WFDMA1_BUSY_ENA, 258 MT_WFDMA1_BUSY_ENA_TX_FIFO0 | 259 MT_WFDMA1_BUSY_ENA_TX_FIFO1 | 260 MT_WFDMA1_BUSY_ENA_RX_FIFO); 261 262 if (dev->hif2) { 263 mt76_set(dev, MT_WFDMA0_BUSY_ENA + hif1_ofs, 264 MT_WFDMA0_PCIE1_BUSY_ENA_TX_FIFO0 | 265 MT_WFDMA0_PCIE1_BUSY_ENA_TX_FIFO1 | 266 MT_WFDMA0_PCIE1_BUSY_ENA_RX_FIFO); 267 268 if (is_mt7915(mdev)) 269 mt76_set(dev, MT_WFDMA1_BUSY_ENA + hif1_ofs, 270 MT_WFDMA1_PCIE1_BUSY_ENA_TX_FIFO0 | 271 MT_WFDMA1_PCIE1_BUSY_ENA_TX_FIFO1 | 272 MT_WFDMA1_PCIE1_BUSY_ENA_RX_FIFO); 273 } 274 275 mt76_poll(dev, MT_WFDMA_EXT_CSR_HIF_MISC, 276 MT_WFDMA_EXT_CSR_HIF_MISC_BUSY, 0, 1000); 277 278 /* set WFDMA Tx/Rx */ 279 mt76_set(dev, MT_WFDMA0_GLO_CFG, 280 MT_WFDMA0_GLO_CFG_TX_DMA_EN | 281 MT_WFDMA0_GLO_CFG_RX_DMA_EN | 282 MT_WFDMA0_GLO_CFG_OMIT_TX_INFO | 283 MT_WFDMA0_GLO_CFG_OMIT_RX_INFO_PFET2); 284 285 if (is_mt7915(mdev)) 286 mt76_set(dev, MT_WFDMA1_GLO_CFG, 287 MT_WFDMA1_GLO_CFG_TX_DMA_EN | 288 MT_WFDMA1_GLO_CFG_RX_DMA_EN | 289 MT_WFDMA1_GLO_CFG_OMIT_TX_INFO | 290 MT_WFDMA1_GLO_CFG_OMIT_RX_INFO); 291 292 if (dev->hif2) { 293 mt76_set(dev, MT_WFDMA0_GLO_CFG + hif1_ofs, 294 MT_WFDMA0_GLO_CFG_TX_DMA_EN | 295 MT_WFDMA0_GLO_CFG_RX_DMA_EN | 296 MT_WFDMA0_GLO_CFG_OMIT_TX_INFO | 297 MT_WFDMA0_GLO_CFG_OMIT_RX_INFO_PFET2); 298 299 if (is_mt7915(mdev)) 300 mt76_set(dev, MT_WFDMA1_GLO_CFG + hif1_ofs, 301 MT_WFDMA1_GLO_CFG_TX_DMA_EN | 302 MT_WFDMA1_GLO_CFG_RX_DMA_EN | 303 MT_WFDMA1_GLO_CFG_OMIT_TX_INFO | 304 MT_WFDMA1_GLO_CFG_OMIT_RX_INFO); 305 306 mt76_set(dev, MT_WFDMA_HOST_CONFIG, 307 MT_WFDMA_HOST_CONFIG_PDMA_BAND); 308 } 309 310 /* enable interrupts for TX/RX rings */ 311 irq_mask = MT_INT_RX_DONE_MCU | 312 MT_INT_TX_DONE_MCU | 313 MT_INT_MCU_CMD; 314 315 if (!dev->phy.band_idx) 316 irq_mask |= MT_INT_BAND0_RX_DONE; 317 318 if (dev->dbdc_support || dev->phy.band_idx) 319 irq_mask |= MT_INT_BAND1_RX_DONE; 320 321 mt7915_irq_enable(dev, irq_mask); 322 323 return 0; 324 } 325 326 int mt7915_dma_init(struct mt7915_dev *dev) 327 { 328 struct mt76_dev *mdev = &dev->mt76; 329 u32 hif1_ofs = 0; 330 int ret; 331 332 mt7915_dma_config(dev); 333 334 mt76_dma_attach(&dev->mt76); 335 336 if (dev->hif2) 337 hif1_ofs = MT_WFDMA0_PCIE1(0) - MT_WFDMA0(0); 338 339 mt7915_dma_disable(dev, true); 340 341 /* init tx queue */ 342 ret = mt7915_init_tx_queues(&dev->phy, 343 MT_TXQ_ID(dev->phy.band_idx), 344 MT7915_TX_RING_SIZE, 345 MT_TXQ_RING_BASE(0)); 346 if (ret) 347 return ret; 348 349 /* command to WM */ 350 ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_WM, 351 MT_MCUQ_ID(MT_MCUQ_WM), 352 MT7915_TX_MCU_RING_SIZE, 353 MT_MCUQ_RING_BASE(MT_MCUQ_WM)); 354 if (ret) 355 return ret; 356 357 /* command to WA */ 358 ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_WA, 359 MT_MCUQ_ID(MT_MCUQ_WA), 360 MT7915_TX_MCU_RING_SIZE, 361 MT_MCUQ_RING_BASE(MT_MCUQ_WA)); 362 if (ret) 363 return ret; 364 365 /* firmware download */ 366 ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_FWDL, 367 MT_MCUQ_ID(MT_MCUQ_FWDL), 368 MT7915_TX_FWDL_RING_SIZE, 369 MT_MCUQ_RING_BASE(MT_MCUQ_FWDL)); 370 if (ret) 371 return ret; 372 373 /* event from WM */ 374 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MCU], 375 MT_RXQ_ID(MT_RXQ_MCU), 376 MT7915_RX_MCU_RING_SIZE, 377 MT_RX_BUF_SIZE, 378 MT_RXQ_RING_BASE(MT_RXQ_MCU)); 379 if (ret) 380 return ret; 381 382 /* event from WA */ 383 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MCU_WA], 384 MT_RXQ_ID(MT_RXQ_MCU_WA), 385 MT7915_RX_MCU_RING_SIZE, 386 MT_RX_BUF_SIZE, 387 MT_RXQ_RING_BASE(MT_RXQ_MCU_WA)); 388 if (ret) 389 return ret; 390 391 /* rx data queue for band0 */ 392 if (!dev->phy.band_idx) { 393 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MAIN], 394 MT_RXQ_ID(MT_RXQ_MAIN), 395 MT7915_RX_RING_SIZE, 396 MT_RX_BUF_SIZE, 397 MT_RXQ_RING_BASE(MT_RXQ_MAIN)); 398 if (ret) 399 return ret; 400 } 401 402 /* tx free notify event from WA for band0 */ 403 if (!is_mt7915(mdev)) { 404 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MAIN_WA], 405 MT_RXQ_ID(MT_RXQ_MAIN_WA), 406 MT7915_RX_MCU_RING_SIZE, 407 MT_RX_BUF_SIZE, 408 MT_RXQ_RING_BASE(MT_RXQ_MAIN_WA)); 409 if (ret) 410 return ret; 411 } 412 413 if (dev->dbdc_support || dev->phy.band_idx) { 414 /* rx data queue for band1 */ 415 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_EXT], 416 MT_RXQ_ID(MT_RXQ_EXT), 417 MT7915_RX_RING_SIZE, 418 MT_RX_BUF_SIZE, 419 MT_RXQ_RING_BASE(MT_RXQ_EXT) + hif1_ofs); 420 if (ret) 421 return ret; 422 423 /* tx free notify event from WA for band1 */ 424 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_EXT_WA], 425 MT_RXQ_ID(MT_RXQ_EXT_WA), 426 MT7915_RX_MCU_RING_SIZE, 427 MT_RX_BUF_SIZE, 428 MT_RXQ_RING_BASE(MT_RXQ_EXT_WA) + hif1_ofs); 429 if (ret) 430 return ret; 431 } 432 433 ret = mt76_init_queues(dev, mt76_dma_rx_poll); 434 if (ret < 0) 435 return ret; 436 437 netif_tx_napi_add(&dev->mt76.tx_napi_dev, &dev->mt76.tx_napi, 438 mt7915_poll_tx, NAPI_POLL_WEIGHT); 439 napi_enable(&dev->mt76.tx_napi); 440 441 mt7915_dma_enable(dev); 442 443 return 0; 444 } 445 446 void mt7915_dma_cleanup(struct mt7915_dev *dev) 447 { 448 mt7915_dma_disable(dev, true); 449 450 mt76_dma_cleanup(&dev->mt76); 451 } 452