Lines Matching refs:dev
414 static u32 mt7915_reg_map_l1(struct mt7915_dev *dev, u32 addr) in mt7915_reg_map_l1() argument
420 if (is_mt798x(&dev->mt76)) in mt7915_reg_map_l1()
423 l1_remap = is_mt7915(&dev->mt76) ? in mt7915_reg_map_l1()
426 dev->bus_ops->rmw(&dev->mt76, l1_remap, in mt7915_reg_map_l1()
430 dev->bus_ops->rr(&dev->mt76, l1_remap); in mt7915_reg_map_l1()
435 static u32 mt7915_reg_map_l2(struct mt7915_dev *dev, u32 addr) in mt7915_reg_map_l2() argument
439 if (is_mt7915(&dev->mt76)) { in mt7915_reg_map_l2()
443 dev->bus_ops->rmw(&dev->mt76, MT_HIF_REMAP_L2, in mt7915_reg_map_l2()
448 dev->bus_ops->rr(&dev->mt76, MT_HIF_REMAP_L2); in mt7915_reg_map_l2()
450 u32 ofs = is_mt798x(&dev->mt76) ? 0x400000 : 0; in mt7915_reg_map_l2()
455 dev->bus_ops->rmw(&dev->mt76, MT_HIF_REMAP_L2_MT7916 + ofs, in mt7915_reg_map_l2()
460 dev->bus_ops->rr(&dev->mt76, MT_HIF_REMAP_L2_MT7916 + ofs); in mt7915_reg_map_l2()
468 static u32 __mt7915_reg_addr(struct mt7915_dev *dev, u32 addr) in __mt7915_reg_addr() argument
475 if (!dev->reg.map) { in __mt7915_reg_addr()
476 dev_err(dev->mt76.dev, "err: reg_map is null\n"); in __mt7915_reg_addr()
480 for (i = 0; i < dev->reg.map_size; i++) { in __mt7915_reg_addr()
483 if (addr < dev->reg.map[i].phys) in __mt7915_reg_addr()
486 ofs = addr - dev->reg.map[i].phys; in __mt7915_reg_addr()
487 if (ofs > dev->reg.map[i].size) in __mt7915_reg_addr()
490 return dev->reg.map[i].maps + ofs; in __mt7915_reg_addr()
496 static u32 __mt7915_reg_remap_addr(struct mt7915_dev *dev, u32 addr) in __mt7915_reg_remap_addr() argument
501 return mt7915_reg_map_l1(dev, addr); in __mt7915_reg_remap_addr()
503 if (dev_is_pci(dev->mt76.dev) && in __mt7915_reg_remap_addr()
506 return mt7915_reg_map_l1(dev, addr); in __mt7915_reg_remap_addr()
511 return mt7915_reg_map_l1(dev, addr); in __mt7915_reg_remap_addr()
514 return mt7915_reg_map_l2(dev, addr); in __mt7915_reg_remap_addr()
517 void mt7915_memcpy_fromio(struct mt7915_dev *dev, void *buf, u32 offset, in mt7915_memcpy_fromio() argument
520 u32 addr = __mt7915_reg_addr(dev, offset); in mt7915_memcpy_fromio()
523 memcpy_fromio(buf, dev->mt76.mmio.regs + addr, len); in mt7915_memcpy_fromio()
527 spin_lock_bh(&dev->reg_lock); in mt7915_memcpy_fromio()
528 memcpy_fromio(buf, dev->mt76.mmio.regs + in mt7915_memcpy_fromio()
529 __mt7915_reg_remap_addr(dev, offset), len); in mt7915_memcpy_fromio()
530 spin_unlock_bh(&dev->reg_lock); in mt7915_memcpy_fromio()
535 struct mt7915_dev *dev = container_of(mdev, struct mt7915_dev, mt76); in mt7915_rr() local
536 u32 addr = __mt7915_reg_addr(dev, offset), val; in mt7915_rr()
539 return dev->bus_ops->rr(mdev, addr); in mt7915_rr()
541 spin_lock_bh(&dev->reg_lock); in mt7915_rr()
542 val = dev->bus_ops->rr(mdev, __mt7915_reg_remap_addr(dev, offset)); in mt7915_rr()
543 spin_unlock_bh(&dev->reg_lock); in mt7915_rr()
550 struct mt7915_dev *dev = container_of(mdev, struct mt7915_dev, mt76); in mt7915_wr() local
551 u32 addr = __mt7915_reg_addr(dev, offset); in mt7915_wr()
554 dev->bus_ops->wr(mdev, addr, val); in mt7915_wr()
558 spin_lock_bh(&dev->reg_lock); in mt7915_wr()
559 dev->bus_ops->wr(mdev, __mt7915_reg_remap_addr(dev, offset), val); in mt7915_wr()
560 spin_unlock_bh(&dev->reg_lock); in mt7915_wr()
565 struct mt7915_dev *dev = container_of(mdev, struct mt7915_dev, mt76); in mt7915_rmw() local
566 u32 addr = __mt7915_reg_addr(dev, offset); in mt7915_rmw()
569 return dev->bus_ops->rmw(mdev, addr, mask, val); in mt7915_rmw()
571 spin_lock_bh(&dev->reg_lock); in mt7915_rmw()
572 val = dev->bus_ops->rmw(mdev, __mt7915_reg_remap_addr(dev, offset), mask, val); in mt7915_rmw()
573 spin_unlock_bh(&dev->reg_lock); in mt7915_rmw()
581 struct mt7915_dev *dev; in mt7915_mmio_wed_offload_enable() local
583 dev = container_of(wed, struct mt7915_dev, mt76.mmio.wed); in mt7915_mmio_wed_offload_enable()
585 spin_lock_bh(&dev->mt76.token_lock); in mt7915_mmio_wed_offload_enable()
586 dev->mt76.token_size = wed->wlan.token_start; in mt7915_mmio_wed_offload_enable()
587 spin_unlock_bh(&dev->mt76.token_lock); in mt7915_mmio_wed_offload_enable()
589 return !wait_event_timeout(dev->mt76.tx_wait, in mt7915_mmio_wed_offload_enable()
590 !dev->mt76.wed_token_count, HZ); in mt7915_mmio_wed_offload_enable()
595 struct mt7915_dev *dev; in mt7915_mmio_wed_offload_disable() local
597 dev = container_of(wed, struct mt7915_dev, mt76.mmio.wed); in mt7915_mmio_wed_offload_disable()
599 spin_lock_bh(&dev->mt76.token_lock); in mt7915_mmio_wed_offload_disable()
600 dev->mt76.token_size = MT7915_TOKEN_SIZE; in mt7915_mmio_wed_offload_disable()
601 spin_unlock_bh(&dev->mt76.token_lock); in mt7915_mmio_wed_offload_disable()
606 struct mt7915_dev *dev; in mt7915_mmio_wed_release_rx_buf() local
609 dev = container_of(wed, struct mt7915_dev, mt76.mmio.wed); in mt7915_mmio_wed_release_rx_buf()
610 for (i = 0; i < dev->mt76.rx_token_size; i++) { in mt7915_mmio_wed_release_rx_buf()
613 t = mt76_rx_token_release(&dev->mt76, i); in mt7915_mmio_wed_release_rx_buf()
620 mt76_put_rxwi(&dev->mt76, t); in mt7915_mmio_wed_release_rx_buf()
623 mt76_free_pending_rxwi(&dev->mt76); in mt7915_mmio_wed_release_rx_buf()
630 struct mt7915_dev *dev; in mt7915_mmio_wed_init_rx_buf() local
634 dev = container_of(wed, struct mt7915_dev, mt76.mmio.wed); in mt7915_mmio_wed_init_rx_buf()
635 q = &dev->mt76.q_rx[MT_RXQ_MAIN]; in mt7915_mmio_wed_init_rx_buf()
645 t = mt76_get_rxwi(&dev->mt76); in mt7915_mmio_wed_init_rx_buf()
655 dma_sync_single_for_device(dev->mt76.dma_dev, addr, len, dir); in mt7915_mmio_wed_init_rx_buf()
658 token = mt76_rx_token_consume(&dev->mt76, buf, t, addr); in mt7915_mmio_wed_init_rx_buf()
673 mt76_put_rxwi(&dev->mt76, t); in mt7915_mmio_wed_init_rx_buf()
682 struct mt7915_dev *dev; in mt7915_mmio_wed_update_rx_stats() local
685 dev = container_of(wed, struct mt7915_dev, mt76.mmio.wed); in mt7915_mmio_wed_update_rx_stats()
687 if (idx >= mt7915_wtbl_size(dev)) in mt7915_mmio_wed_update_rx_stats()
692 wcid = rcu_dereference(dev->mt76.wcid[idx]); in mt7915_mmio_wed_update_rx_stats()
706 struct mt7915_dev *dev = container_of(mdev, struct mt7915_dev, mt76); in mt7915_mmio_wed_reset() local
707 struct mt76_phy *mphy = &dev->mphy; in mt7915_mmio_wed_reset()
715 ret = mt7915_mcu_set_ser(dev, SER_RECOVER, SER_SET_RECOVER_L1, in mt7915_mmio_wed_reset()
722 dev_err(mdev->dev, "wed reset timeout\n"); in mt7915_mmio_wed_reset()
734 struct mt76_dev *dev = container_of(wed, struct mt76_dev, mmio.wed); in mt7915_mmio_wed_reset_complete() local
736 complete(&dev->mmio.wed_reset_complete); in mt7915_mmio_wed_reset_complete()
740 int mt7915_mmio_wed_init(struct mt7915_dev *dev, void *pdev_ptr, in mt7915_mmio_wed_init() argument
744 struct mtk_wed_device *wed = &dev->mt76.mmio.wed; in mt7915_mmio_wed_init()
755 wed->wlan.base = devm_ioremap(dev->mt76.dev, in mt7915_mmio_wed_init()
783 wed->wlan.base = devm_ioremap(dev->mt76.dev, res->start, in mt7915_mmio_wed_init()
794 wed->wlan.tx_tbit[0] = is_mt7915(&dev->mt76) ? 4 : 30; in mt7915_mmio_wed_init()
795 wed->wlan.tx_tbit[1] = is_mt7915(&dev->mt76) ? 5 : 31; in mt7915_mmio_wed_init()
796 wed->wlan.txfree_tbit = is_mt798x(&dev->mt76) ? 2 : 1; in mt7915_mmio_wed_init()
798 wed->wlan.wcid_512 = !is_mt7915(&dev->mt76); in mt7915_mmio_wed_init()
803 if (is_mt7915(&dev->mt76)) { in mt7915_mmio_wed_init()
806 } else if (is_mt798x(&dev->mt76)) { in mt7915_mmio_wed_init()
823 dev->mt76.rx_token_size = wed->wlan.rx_npkt; in mt7915_mmio_wed_init()
829 dev->mt76.dma_dev = wed->dev; in mt7915_mmio_wed_init()
831 ret = dma_set_mask(wed->dev, DMA_BIT_MASK(32)); in mt7915_mmio_wed_init()
846 struct mt7915_dev *dev; in mt7915_mmio_init() local
848 dev = container_of(mdev, struct mt7915_dev, mt76); in mt7915_mmio_init()
849 mt76_mmio_init(&dev->mt76, mem_base); in mt7915_mmio_init()
850 spin_lock_init(&dev->reg_lock); in mt7915_mmio_init()
854 dev->reg.reg_rev = mt7915_reg; in mt7915_mmio_init()
855 dev->reg.offs_rev = mt7915_offs; in mt7915_mmio_init()
856 dev->reg.map = mt7915_reg_map; in mt7915_mmio_init()
857 dev->reg.map_size = ARRAY_SIZE(mt7915_reg_map); in mt7915_mmio_init()
860 dev->reg.reg_rev = mt7916_reg; in mt7915_mmio_init()
861 dev->reg.offs_rev = mt7916_offs; in mt7915_mmio_init()
862 dev->reg.map = mt7916_reg_map; in mt7915_mmio_init()
863 dev->reg.map_size = ARRAY_SIZE(mt7916_reg_map); in mt7915_mmio_init()
867 dev->reg.reg_rev = mt7986_reg; in mt7915_mmio_init()
868 dev->reg.offs_rev = mt7916_offs; in mt7915_mmio_init()
869 dev->reg.map = mt7986_reg_map; in mt7915_mmio_init()
870 dev->reg.map_size = ARRAY_SIZE(mt7986_reg_map); in mt7915_mmio_init()
876 dev->bus_ops = dev->mt76.bus; in mt7915_mmio_init()
877 bus_ops = devm_kmemdup(dev->mt76.dev, dev->bus_ops, sizeof(*bus_ops), in mt7915_mmio_init()
885 dev->mt76.bus = bus_ops; in mt7915_mmio_init()
888 (mt76_rr(dev, MT_HW_REV) & 0xff); in mt7915_mmio_init()
889 dev_dbg(mdev->dev, "ASIC revision: %04x\n", mdev->rev); in mt7915_mmio_init()
894 void mt7915_dual_hif_set_irq_mask(struct mt7915_dev *dev, in mt7915_dual_hif_set_irq_mask() argument
898 struct mt76_dev *mdev = &dev->mt76; in mt7915_dual_hif_set_irq_mask()
911 mt76_wr(dev, MT_INT_MASK_CSR, mdev->mmio.irqmask); in mt7915_dual_hif_set_irq_mask()
912 mt76_wr(dev, MT_INT1_MASK_CSR, mdev->mmio.irqmask); in mt7915_dual_hif_set_irq_mask()
921 struct mt7915_dev *dev = container_of(mdev, struct mt7915_dev, mt76); in mt7915_rx_poll_complete() local
923 mt7915_irq_enable(dev, MT_INT_RX(q)); in mt7915_rx_poll_complete()
929 struct mt7915_dev *dev = from_tasklet(dev, t, mt76.irq_tasklet); in mt7915_irq_tasklet() local
930 struct mtk_wed_device *wed = &dev->mt76.mmio.wed; in mt7915_irq_tasklet()
935 if (dev->hif2) in mt7915_irq_tasklet()
936 mt76_wr(dev, MT_INT1_MASK_CSR, 0); in mt7915_irq_tasklet()
937 intr = mtk_wed_device_irq_get(wed, dev->mt76.mmio.irqmask); in mt7915_irq_tasklet()
939 mt76_wr(dev, MT_INT_MASK_CSR, 0); in mt7915_irq_tasklet()
940 if (dev->hif2) in mt7915_irq_tasklet()
941 mt76_wr(dev, MT_INT1_MASK_CSR, 0); in mt7915_irq_tasklet()
943 intr = mt76_rr(dev, MT_INT_SOURCE_CSR); in mt7915_irq_tasklet()
944 intr &= dev->mt76.mmio.irqmask; in mt7915_irq_tasklet()
945 mt76_wr(dev, MT_INT_SOURCE_CSR, intr); in mt7915_irq_tasklet()
948 if (dev->hif2) { in mt7915_irq_tasklet()
949 intr1 = mt76_rr(dev, MT_INT1_SOURCE_CSR); in mt7915_irq_tasklet()
950 intr1 &= dev->mt76.mmio.irqmask; in mt7915_irq_tasklet()
951 mt76_wr(dev, MT_INT1_SOURCE_CSR, intr1); in mt7915_irq_tasklet()
956 trace_dev_irq(&dev->mt76, intr, dev->mt76.mmio.irqmask); in mt7915_irq_tasklet()
962 mt7915_irq_disable(dev, mask); in mt7915_irq_tasklet()
965 napi_schedule(&dev->mt76.tx_napi); in mt7915_irq_tasklet()
968 napi_schedule(&dev->mt76.napi[MT_RXQ_MAIN]); in mt7915_irq_tasklet()
971 napi_schedule(&dev->mt76.napi[MT_RXQ_BAND1]); in mt7915_irq_tasklet()
974 napi_schedule(&dev->mt76.napi[MT_RXQ_MCU]); in mt7915_irq_tasklet()
977 napi_schedule(&dev->mt76.napi[MT_RXQ_MCU_WA]); in mt7915_irq_tasklet()
979 if (!is_mt7915(&dev->mt76) && in mt7915_irq_tasklet()
981 napi_schedule(&dev->mt76.napi[MT_RXQ_MAIN_WA]); in mt7915_irq_tasklet()
984 napi_schedule(&dev->mt76.napi[MT_RXQ_BAND1_WA]); in mt7915_irq_tasklet()
987 u32 val = mt76_rr(dev, MT_MCU_CMD); in mt7915_irq_tasklet()
989 mt76_wr(dev, MT_MCU_CMD, val); in mt7915_irq_tasklet()
991 dev->recovery.state = val; in mt7915_irq_tasklet()
992 mt7915_reset(dev); in mt7915_irq_tasklet()
999 struct mt7915_dev *dev = dev_instance; in mt7915_irq_handler() local
1000 struct mtk_wed_device *wed = &dev->mt76.mmio.wed; in mt7915_irq_handler()
1005 mt76_wr(dev, MT_INT_MASK_CSR, 0); in mt7915_irq_handler()
1007 if (dev->hif2) in mt7915_irq_handler()
1008 mt76_wr(dev, MT_INT1_MASK_CSR, 0); in mt7915_irq_handler()
1010 if (!test_bit(MT76_STATE_INITIALIZED, &dev->mphy.state)) in mt7915_irq_handler()
1013 tasklet_schedule(&dev->mt76.irq_tasklet); in mt7915_irq_handler()
1039 struct mt7915_dev *dev; in mt7915_mmio_probe() local
1043 mdev = mt76_alloc_device(pdev, sizeof(*dev), &mt7915_ops, &drv_ops); in mt7915_mmio_probe()
1047 dev = container_of(mdev, struct mt7915_dev, mt76); in mt7915_mmio_probe()
1055 return dev; in mt7915_mmio_probe()
1058 mt76_free_device(&dev->mt76); in mt7915_mmio_probe()