1b458a033SSergey Matyukevich // SPDX-License-Identifier: GPL-2.0+ 2b458a033SSergey Matyukevich /* Copyright (c) 2018 Quantenna Communications */ 32ef0ecd7SIgor Mitsyanko 42ef0ecd7SIgor Mitsyanko #include <linux/kernel.h> 52ef0ecd7SIgor Mitsyanko #include <linux/firmware.h> 62ef0ecd7SIgor Mitsyanko #include <linux/pci.h> 72ef0ecd7SIgor Mitsyanko #include <linux/vmalloc.h> 82ef0ecd7SIgor Mitsyanko #include <linux/delay.h> 92ef0ecd7SIgor Mitsyanko #include <linux/interrupt.h> 102ef0ecd7SIgor Mitsyanko #include <linux/sched.h> 112ef0ecd7SIgor Mitsyanko #include <linux/completion.h> 122ef0ecd7SIgor Mitsyanko #include <linux/crc32.h> 132ef0ecd7SIgor Mitsyanko #include <linux/spinlock.h> 142ef0ecd7SIgor Mitsyanko #include <linux/circ_buf.h> 152ef0ecd7SIgor Mitsyanko #include <linux/log2.h> 162ef0ecd7SIgor Mitsyanko 17c9ff6c91SIgor Mitsyanko #include "pcie_priv.h" 1891dcececSIgor Mitsyanko #include "pearl_pcie_regs.h" 1991dcececSIgor Mitsyanko #include "pearl_pcie_ipc.h" 202ef0ecd7SIgor Mitsyanko #include "qtn_hw_ids.h" 212ef0ecd7SIgor Mitsyanko #include "core.h" 222ef0ecd7SIgor Mitsyanko #include "bus.h" 2391dcececSIgor Mitsyanko #include "shm_ipc.h" 242ef0ecd7SIgor Mitsyanko #include "debug.h" 252ef0ecd7SIgor Mitsyanko 26b7da53cdSIgor Mitsyanko #define PEARL_TX_BD_SIZE_DEFAULT 32 2797aef03cSSergey Matyukevich #define PEARL_RX_BD_SIZE_DEFAULT 256 282ef0ecd7SIgor Mitsyanko 29d0b95bfaSIgor Mitsyanko struct qtnf_pearl_bda { 30d0b95bfaSIgor Mitsyanko __le16 bda_len; 31d0b95bfaSIgor Mitsyanko __le16 bda_version; 32d0b95bfaSIgor Mitsyanko __le32 bda_pci_endian; 33d0b95bfaSIgor Mitsyanko __le32 bda_ep_state; 34d0b95bfaSIgor Mitsyanko __le32 bda_rc_state; 35d0b95bfaSIgor Mitsyanko __le32 bda_dma_mask; 36d0b95bfaSIgor Mitsyanko __le32 bda_msi_addr; 37d0b95bfaSIgor Mitsyanko __le32 bda_flashsz; 38d0b95bfaSIgor Mitsyanko u8 bda_boardname[PCIE_BDA_NAMELEN]; 39d0b95bfaSIgor Mitsyanko __le32 bda_rc_msi_enabled; 40d0b95bfaSIgor Mitsyanko u8 bda_hhbm_list[PCIE_HHBM_MAX_SIZE]; 41d0b95bfaSIgor Mitsyanko __le32 bda_dsbw_start_index; 42d0b95bfaSIgor Mitsyanko __le32 bda_dsbw_end_index; 43d0b95bfaSIgor Mitsyanko __le32 bda_dsbw_total_bytes; 44d0b95bfaSIgor Mitsyanko __le32 bda_rc_tx_bd_base; 45d0b95bfaSIgor Mitsyanko __le32 bda_rc_tx_bd_num; 46d0b95bfaSIgor Mitsyanko u8 bda_pcie_mac[QTN_ENET_ADDR_LENGTH]; 47d0b95bfaSIgor Mitsyanko struct qtnf_shm_ipc_region bda_shm_reg1 __aligned(4096); /* host TX */ 48d0b95bfaSIgor Mitsyanko struct qtnf_shm_ipc_region bda_shm_reg2 __aligned(4096); /* host RX */ 49d0b95bfaSIgor Mitsyanko } __packed; 50d0b95bfaSIgor Mitsyanko 51d0b95bfaSIgor Mitsyanko struct qtnf_pearl_tx_bd { 52d0b95bfaSIgor Mitsyanko __le32 addr; 53d0b95bfaSIgor Mitsyanko __le32 addr_h; 54d0b95bfaSIgor Mitsyanko __le32 info; 55d0b95bfaSIgor Mitsyanko __le32 info_h; 56d0b95bfaSIgor Mitsyanko } __packed; 57d0b95bfaSIgor Mitsyanko 58d0b95bfaSIgor Mitsyanko struct qtnf_pearl_rx_bd { 59d0b95bfaSIgor Mitsyanko __le32 addr; 60d0b95bfaSIgor Mitsyanko __le32 addr_h; 61d0b95bfaSIgor Mitsyanko __le32 info; 62d0b95bfaSIgor Mitsyanko __le32 info_h; 63d0b95bfaSIgor Mitsyanko __le32 next_ptr; 64d0b95bfaSIgor Mitsyanko __le32 next_ptr_h; 65d0b95bfaSIgor Mitsyanko } __packed; 66d0b95bfaSIgor Mitsyanko 67d0b95bfaSIgor Mitsyanko struct qtnf_pearl_fw_hdr { 68d0b95bfaSIgor Mitsyanko u8 boardflg[8]; 69d0b95bfaSIgor Mitsyanko __le32 fwsize; 70d0b95bfaSIgor Mitsyanko __le32 seqnum; 71d0b95bfaSIgor Mitsyanko __le32 type; 72d0b95bfaSIgor Mitsyanko __le32 pktlen; 73d0b95bfaSIgor Mitsyanko __le32 crc; 74d0b95bfaSIgor Mitsyanko } __packed; 75d0b95bfaSIgor Mitsyanko 7691dcececSIgor Mitsyanko struct qtnf_pcie_pearl_state { 77c9ff6c91SIgor Mitsyanko struct qtnf_pcie_bus_priv base; 7891dcececSIgor Mitsyanko 7991dcececSIgor Mitsyanko /* lock for irq configuration changes */ 8091dcececSIgor Mitsyanko spinlock_t irq_lock; 8191dcececSIgor Mitsyanko 82d0b95bfaSIgor Mitsyanko struct qtnf_pearl_bda __iomem *bda; 8391dcececSIgor Mitsyanko void __iomem *pcie_reg_base; 8491dcececSIgor Mitsyanko 85d0b95bfaSIgor Mitsyanko struct qtnf_pearl_tx_bd *tx_bd_vbase; 8691dcececSIgor Mitsyanko dma_addr_t tx_bd_pbase; 8791dcececSIgor Mitsyanko 88d0b95bfaSIgor Mitsyanko struct qtnf_pearl_rx_bd *rx_bd_vbase; 8991dcececSIgor Mitsyanko dma_addr_t rx_bd_pbase; 9091dcececSIgor Mitsyanko 9191dcececSIgor Mitsyanko dma_addr_t bd_table_paddr; 9291dcececSIgor Mitsyanko void *bd_table_vaddr; 9391dcececSIgor Mitsyanko u32 bd_table_len; 9491dcececSIgor Mitsyanko u32 pcie_irq_mask; 9591dcececSIgor Mitsyanko u32 pcie_irq_rx_count; 9691dcececSIgor Mitsyanko u32 pcie_irq_tx_count; 9791dcececSIgor Mitsyanko u32 pcie_irq_uf_count; 9891dcececSIgor Mitsyanko }; 9991dcececSIgor Mitsyanko 100c9ff6c91SIgor Mitsyanko static inline void qtnf_init_hdp_irqs(struct qtnf_pcie_pearl_state *ps) 1012ef0ecd7SIgor Mitsyanko { 1022ef0ecd7SIgor Mitsyanko unsigned long flags; 1032ef0ecd7SIgor Mitsyanko 104c9ff6c91SIgor Mitsyanko spin_lock_irqsave(&ps->irq_lock, flags); 105c9ff6c91SIgor Mitsyanko ps->pcie_irq_mask = (PCIE_HDP_INT_RX_BITS | PCIE_HDP_INT_TX_BITS); 106c9ff6c91SIgor Mitsyanko spin_unlock_irqrestore(&ps->irq_lock, flags); 1072ef0ecd7SIgor Mitsyanko } 1082ef0ecd7SIgor Mitsyanko 109c9ff6c91SIgor Mitsyanko static inline void qtnf_enable_hdp_irqs(struct qtnf_pcie_pearl_state *ps) 1102ef0ecd7SIgor Mitsyanko { 1112ef0ecd7SIgor Mitsyanko unsigned long flags; 1122ef0ecd7SIgor Mitsyanko 113c9ff6c91SIgor Mitsyanko spin_lock_irqsave(&ps->irq_lock, flags); 114c9ff6c91SIgor Mitsyanko writel(ps->pcie_irq_mask, PCIE_HDP_INT_EN(ps->pcie_reg_base)); 115c9ff6c91SIgor Mitsyanko spin_unlock_irqrestore(&ps->irq_lock, flags); 1162ef0ecd7SIgor Mitsyanko } 1172ef0ecd7SIgor Mitsyanko 118c9ff6c91SIgor Mitsyanko static inline void qtnf_disable_hdp_irqs(struct qtnf_pcie_pearl_state *ps) 1192ef0ecd7SIgor Mitsyanko { 1202ef0ecd7SIgor Mitsyanko unsigned long flags; 1212ef0ecd7SIgor Mitsyanko 122c9ff6c91SIgor Mitsyanko spin_lock_irqsave(&ps->irq_lock, flags); 123c9ff6c91SIgor Mitsyanko writel(0x0, PCIE_HDP_INT_EN(ps->pcie_reg_base)); 124c9ff6c91SIgor Mitsyanko spin_unlock_irqrestore(&ps->irq_lock, flags); 1252ef0ecd7SIgor Mitsyanko } 1262ef0ecd7SIgor Mitsyanko 127c9ff6c91SIgor Mitsyanko static inline void qtnf_en_rxdone_irq(struct qtnf_pcie_pearl_state *ps) 1282ef0ecd7SIgor Mitsyanko { 1292ef0ecd7SIgor Mitsyanko unsigned long flags; 1302ef0ecd7SIgor Mitsyanko 131c9ff6c91SIgor Mitsyanko spin_lock_irqsave(&ps->irq_lock, flags); 132c9ff6c91SIgor Mitsyanko ps->pcie_irq_mask |= PCIE_HDP_INT_RX_BITS; 133c9ff6c91SIgor Mitsyanko writel(ps->pcie_irq_mask, PCIE_HDP_INT_EN(ps->pcie_reg_base)); 134c9ff6c91SIgor Mitsyanko spin_unlock_irqrestore(&ps->irq_lock, flags); 1352ef0ecd7SIgor Mitsyanko } 1362ef0ecd7SIgor Mitsyanko 137c9ff6c91SIgor Mitsyanko static inline void qtnf_dis_rxdone_irq(struct qtnf_pcie_pearl_state *ps) 1382ef0ecd7SIgor Mitsyanko { 1392ef0ecd7SIgor Mitsyanko unsigned long flags; 1402ef0ecd7SIgor Mitsyanko 141c9ff6c91SIgor Mitsyanko spin_lock_irqsave(&ps->irq_lock, flags); 142c9ff6c91SIgor Mitsyanko ps->pcie_irq_mask &= ~PCIE_HDP_INT_RX_BITS; 143c9ff6c91SIgor Mitsyanko writel(ps->pcie_irq_mask, PCIE_HDP_INT_EN(ps->pcie_reg_base)); 144c9ff6c91SIgor Mitsyanko spin_unlock_irqrestore(&ps->irq_lock, flags); 1452ef0ecd7SIgor Mitsyanko } 1462ef0ecd7SIgor Mitsyanko 147c9ff6c91SIgor Mitsyanko static inline void qtnf_en_txdone_irq(struct qtnf_pcie_pearl_state *ps) 1482ef0ecd7SIgor Mitsyanko { 1492ef0ecd7SIgor Mitsyanko unsigned long flags; 1502ef0ecd7SIgor Mitsyanko 151c9ff6c91SIgor Mitsyanko spin_lock_irqsave(&ps->irq_lock, flags); 152c9ff6c91SIgor Mitsyanko ps->pcie_irq_mask |= PCIE_HDP_INT_TX_BITS; 153c9ff6c91SIgor Mitsyanko writel(ps->pcie_irq_mask, PCIE_HDP_INT_EN(ps->pcie_reg_base)); 154c9ff6c91SIgor Mitsyanko spin_unlock_irqrestore(&ps->irq_lock, flags); 1552ef0ecd7SIgor Mitsyanko } 1562ef0ecd7SIgor Mitsyanko 157c9ff6c91SIgor Mitsyanko static inline void qtnf_dis_txdone_irq(struct qtnf_pcie_pearl_state *ps) 1582ef0ecd7SIgor Mitsyanko { 1592ef0ecd7SIgor Mitsyanko unsigned long flags; 1602ef0ecd7SIgor Mitsyanko 161c9ff6c91SIgor Mitsyanko spin_lock_irqsave(&ps->irq_lock, flags); 162c9ff6c91SIgor Mitsyanko ps->pcie_irq_mask &= ~PCIE_HDP_INT_TX_BITS; 163c9ff6c91SIgor Mitsyanko writel(ps->pcie_irq_mask, PCIE_HDP_INT_EN(ps->pcie_reg_base)); 164c9ff6c91SIgor Mitsyanko spin_unlock_irqrestore(&ps->irq_lock, flags); 1652ef0ecd7SIgor Mitsyanko } 1662ef0ecd7SIgor Mitsyanko 167c9ff6c91SIgor Mitsyanko static void qtnf_deassert_intx(struct qtnf_pcie_pearl_state *ps) 1682ef0ecd7SIgor Mitsyanko { 169c9ff6c91SIgor Mitsyanko void __iomem *reg = ps->base.sysctl_bar + PEARL_PCIE_CFG0_OFFSET; 1702ef0ecd7SIgor Mitsyanko u32 cfg; 1712ef0ecd7SIgor Mitsyanko 1722ef0ecd7SIgor Mitsyanko cfg = readl(reg); 1732ef0ecd7SIgor Mitsyanko cfg &= ~PEARL_ASSERT_INTX; 1742ef0ecd7SIgor Mitsyanko qtnf_non_posted_write(cfg, reg); 1752ef0ecd7SIgor Mitsyanko } 1762ef0ecd7SIgor Mitsyanko 177789763b6SIgor Mitsyanko static void qtnf_pearl_reset_ep(struct qtnf_pcie_pearl_state *ps) 1782ef0ecd7SIgor Mitsyanko { 1792ef0ecd7SIgor Mitsyanko const u32 data = QTN_PEARL_IPC_IRQ_WORD(QTN_PEARL_LHOST_EP_RESET); 180c9ff6c91SIgor Mitsyanko void __iomem *reg = ps->base.sysctl_bar + 1812ef0ecd7SIgor Mitsyanko QTN_PEARL_SYSCTL_LHOST_IRQ_OFFSET; 1822ef0ecd7SIgor Mitsyanko 1832ef0ecd7SIgor Mitsyanko qtnf_non_posted_write(data, reg); 1842ef0ecd7SIgor Mitsyanko msleep(QTN_EP_RESET_WAIT_MS); 185c9ff6c91SIgor Mitsyanko pci_restore_state(ps->base.pdev); 1862ef0ecd7SIgor Mitsyanko } 1872ef0ecd7SIgor Mitsyanko 188789763b6SIgor Mitsyanko static void qtnf_pcie_pearl_ipc_gen_ep_int(void *arg) 1892ef0ecd7SIgor Mitsyanko { 190c9ff6c91SIgor Mitsyanko const struct qtnf_pcie_pearl_state *ps = arg; 1912ef0ecd7SIgor Mitsyanko const u32 data = QTN_PEARL_IPC_IRQ_WORD(QTN_PEARL_LHOST_IPC_IRQ); 192c9ff6c91SIgor Mitsyanko void __iomem *reg = ps->base.sysctl_bar + 1932ef0ecd7SIgor Mitsyanko QTN_PEARL_SYSCTL_LHOST_IRQ_OFFSET; 1942ef0ecd7SIgor Mitsyanko 1952ef0ecd7SIgor Mitsyanko qtnf_non_posted_write(data, reg); 1962ef0ecd7SIgor Mitsyanko } 1972ef0ecd7SIgor Mitsyanko 1982ef0ecd7SIgor Mitsyanko static int qtnf_is_state(__le32 __iomem *reg, u32 state) 1992ef0ecd7SIgor Mitsyanko { 2002ef0ecd7SIgor Mitsyanko u32 s = readl(reg); 2012ef0ecd7SIgor Mitsyanko 2022ef0ecd7SIgor Mitsyanko return s & state; 2032ef0ecd7SIgor Mitsyanko } 2042ef0ecd7SIgor Mitsyanko 2052ef0ecd7SIgor Mitsyanko static void qtnf_set_state(__le32 __iomem *reg, u32 state) 2062ef0ecd7SIgor Mitsyanko { 2072ef0ecd7SIgor Mitsyanko u32 s = readl(reg); 2082ef0ecd7SIgor Mitsyanko 2092ef0ecd7SIgor Mitsyanko qtnf_non_posted_write(state | s, reg); 2102ef0ecd7SIgor Mitsyanko } 2112ef0ecd7SIgor Mitsyanko 2122ef0ecd7SIgor Mitsyanko static void qtnf_clear_state(__le32 __iomem *reg, u32 state) 2132ef0ecd7SIgor Mitsyanko { 2142ef0ecd7SIgor Mitsyanko u32 s = readl(reg); 2152ef0ecd7SIgor Mitsyanko 2162ef0ecd7SIgor Mitsyanko qtnf_non_posted_write(s & ~state, reg); 2172ef0ecd7SIgor Mitsyanko } 2182ef0ecd7SIgor Mitsyanko 2192ef0ecd7SIgor Mitsyanko static int qtnf_poll_state(__le32 __iomem *reg, u32 state, u32 delay_in_ms) 2202ef0ecd7SIgor Mitsyanko { 2212ef0ecd7SIgor Mitsyanko u32 timeout = 0; 2222ef0ecd7SIgor Mitsyanko 2232ef0ecd7SIgor Mitsyanko while ((qtnf_is_state(reg, state) == 0)) { 2242ef0ecd7SIgor Mitsyanko usleep_range(1000, 1200); 2252ef0ecd7SIgor Mitsyanko if (++timeout > delay_in_ms) 2262ef0ecd7SIgor Mitsyanko return -1; 2272ef0ecd7SIgor Mitsyanko } 2282ef0ecd7SIgor Mitsyanko 2292ef0ecd7SIgor Mitsyanko return 0; 2302ef0ecd7SIgor Mitsyanko } 2312ef0ecd7SIgor Mitsyanko 232789763b6SIgor Mitsyanko static int pearl_alloc_bd_table(struct qtnf_pcie_pearl_state *ps) 2332ef0ecd7SIgor Mitsyanko { 234c9ff6c91SIgor Mitsyanko struct qtnf_pcie_bus_priv *priv = &ps->base; 2352ef0ecd7SIgor Mitsyanko dma_addr_t paddr; 2362ef0ecd7SIgor Mitsyanko void *vaddr; 2372ef0ecd7SIgor Mitsyanko int len; 2382ef0ecd7SIgor Mitsyanko 239d0b95bfaSIgor Mitsyanko len = priv->tx_bd_num * sizeof(struct qtnf_pearl_tx_bd) + 240d0b95bfaSIgor Mitsyanko priv->rx_bd_num * sizeof(struct qtnf_pearl_rx_bd); 2412ef0ecd7SIgor Mitsyanko 2422ef0ecd7SIgor Mitsyanko vaddr = dmam_alloc_coherent(&priv->pdev->dev, len, &paddr, GFP_KERNEL); 2432ef0ecd7SIgor Mitsyanko if (!vaddr) 2442ef0ecd7SIgor Mitsyanko return -ENOMEM; 2452ef0ecd7SIgor Mitsyanko 2462ef0ecd7SIgor Mitsyanko /* tx bd */ 2472ef0ecd7SIgor Mitsyanko 248c9ff6c91SIgor Mitsyanko ps->bd_table_vaddr = vaddr; 249c9ff6c91SIgor Mitsyanko ps->bd_table_paddr = paddr; 250c9ff6c91SIgor Mitsyanko ps->bd_table_len = len; 2512ef0ecd7SIgor Mitsyanko 252c9ff6c91SIgor Mitsyanko ps->tx_bd_vbase = vaddr; 253c9ff6c91SIgor Mitsyanko ps->tx_bd_pbase = paddr; 2542ef0ecd7SIgor Mitsyanko 2552ef0ecd7SIgor Mitsyanko pr_debug("TX descriptor table: vaddr=0x%p paddr=%pad\n", vaddr, &paddr); 2562ef0ecd7SIgor Mitsyanko 2572ef0ecd7SIgor Mitsyanko priv->tx_bd_r_index = 0; 2582ef0ecd7SIgor Mitsyanko priv->tx_bd_w_index = 0; 2592ef0ecd7SIgor Mitsyanko 2602ef0ecd7SIgor Mitsyanko /* rx bd */ 2612ef0ecd7SIgor Mitsyanko 262d0b95bfaSIgor Mitsyanko vaddr = ((struct qtnf_pearl_tx_bd *)vaddr) + priv->tx_bd_num; 263d0b95bfaSIgor Mitsyanko paddr += priv->tx_bd_num * sizeof(struct qtnf_pearl_tx_bd); 2642ef0ecd7SIgor Mitsyanko 265c9ff6c91SIgor Mitsyanko ps->rx_bd_vbase = vaddr; 266c9ff6c91SIgor Mitsyanko ps->rx_bd_pbase = paddr; 2672ef0ecd7SIgor Mitsyanko 2682ef0ecd7SIgor Mitsyanko #ifdef CONFIG_ARCH_DMA_ADDR_T_64BIT 2692ef0ecd7SIgor Mitsyanko writel(QTN_HOST_HI32(paddr), 270c9ff6c91SIgor Mitsyanko PCIE_HDP_TX_HOST_Q_BASE_H(ps->pcie_reg_base)); 2712ef0ecd7SIgor Mitsyanko #endif 2722ef0ecd7SIgor Mitsyanko writel(QTN_HOST_LO32(paddr), 273c9ff6c91SIgor Mitsyanko PCIE_HDP_TX_HOST_Q_BASE_L(ps->pcie_reg_base)); 274d0b95bfaSIgor Mitsyanko writel(priv->rx_bd_num | (sizeof(struct qtnf_pearl_rx_bd)) << 16, 275c9ff6c91SIgor Mitsyanko PCIE_HDP_TX_HOST_Q_SZ_CTRL(ps->pcie_reg_base)); 2762ef0ecd7SIgor Mitsyanko 2772ef0ecd7SIgor Mitsyanko pr_debug("RX descriptor table: vaddr=0x%p paddr=%pad\n", vaddr, &paddr); 2782ef0ecd7SIgor Mitsyanko 2792ef0ecd7SIgor Mitsyanko return 0; 2802ef0ecd7SIgor Mitsyanko } 2812ef0ecd7SIgor Mitsyanko 282789763b6SIgor Mitsyanko static int pearl_skb2rbd_attach(struct qtnf_pcie_pearl_state *ps, u16 index) 2832ef0ecd7SIgor Mitsyanko { 284c9ff6c91SIgor Mitsyanko struct qtnf_pcie_bus_priv *priv = &ps->base; 285d0b95bfaSIgor Mitsyanko struct qtnf_pearl_rx_bd *rxbd; 2862ef0ecd7SIgor Mitsyanko struct sk_buff *skb; 2872ef0ecd7SIgor Mitsyanko dma_addr_t paddr; 2882ef0ecd7SIgor Mitsyanko 289c960e2b3SChristophe JAILLET skb = netdev_alloc_skb_ip_align(NULL, SKB_BUF_SIZE); 2902ef0ecd7SIgor Mitsyanko if (!skb) { 2912ef0ecd7SIgor Mitsyanko priv->rx_skb[index] = NULL; 2922ef0ecd7SIgor Mitsyanko return -ENOMEM; 2932ef0ecd7SIgor Mitsyanko } 2942ef0ecd7SIgor Mitsyanko 2952ef0ecd7SIgor Mitsyanko priv->rx_skb[index] = skb; 296c9ff6c91SIgor Mitsyanko rxbd = &ps->rx_bd_vbase[index]; 2972ef0ecd7SIgor Mitsyanko 298*06e1359cSChristophe JAILLET paddr = dma_map_single(&priv->pdev->dev, skb->data, SKB_BUF_SIZE, 299*06e1359cSChristophe JAILLET DMA_FROM_DEVICE); 300*06e1359cSChristophe JAILLET if (dma_mapping_error(&priv->pdev->dev, paddr)) { 3012ef0ecd7SIgor Mitsyanko pr_err("skb DMA mapping error: %pad\n", &paddr); 3022ef0ecd7SIgor Mitsyanko return -ENOMEM; 3032ef0ecd7SIgor Mitsyanko } 3042ef0ecd7SIgor Mitsyanko 3052ef0ecd7SIgor Mitsyanko /* keep rx skb paddrs in rx buffer descriptors for cleanup purposes */ 3062ef0ecd7SIgor Mitsyanko rxbd->addr = cpu_to_le32(QTN_HOST_LO32(paddr)); 3072ef0ecd7SIgor Mitsyanko rxbd->addr_h = cpu_to_le32(QTN_HOST_HI32(paddr)); 3082ef0ecd7SIgor Mitsyanko rxbd->info = 0x0; 3092ef0ecd7SIgor Mitsyanko 3102ef0ecd7SIgor Mitsyanko priv->rx_bd_w_index = index; 3112ef0ecd7SIgor Mitsyanko 3122ef0ecd7SIgor Mitsyanko /* sync up all descriptor updates */ 3132ef0ecd7SIgor Mitsyanko wmb(); 3142ef0ecd7SIgor Mitsyanko 3152ef0ecd7SIgor Mitsyanko #ifdef CONFIG_ARCH_DMA_ADDR_T_64BIT 3162ef0ecd7SIgor Mitsyanko writel(QTN_HOST_HI32(paddr), 317c9ff6c91SIgor Mitsyanko PCIE_HDP_HHBM_BUF_PTR_H(ps->pcie_reg_base)); 3182ef0ecd7SIgor Mitsyanko #endif 3192ef0ecd7SIgor Mitsyanko writel(QTN_HOST_LO32(paddr), 320c9ff6c91SIgor Mitsyanko PCIE_HDP_HHBM_BUF_PTR(ps->pcie_reg_base)); 3212ef0ecd7SIgor Mitsyanko 322c9ff6c91SIgor Mitsyanko writel(index, PCIE_HDP_TX_HOST_Q_WR_PTR(ps->pcie_reg_base)); 3232ef0ecd7SIgor Mitsyanko return 0; 3242ef0ecd7SIgor Mitsyanko } 3252ef0ecd7SIgor Mitsyanko 326789763b6SIgor Mitsyanko static int pearl_alloc_rx_buffers(struct qtnf_pcie_pearl_state *ps) 3272ef0ecd7SIgor Mitsyanko { 3282ef0ecd7SIgor Mitsyanko u16 i; 3292ef0ecd7SIgor Mitsyanko int ret = 0; 3302ef0ecd7SIgor Mitsyanko 331c9ff6c91SIgor Mitsyanko memset(ps->rx_bd_vbase, 0x0, 332c9ff6c91SIgor Mitsyanko ps->base.rx_bd_num * sizeof(struct qtnf_pearl_rx_bd)); 3332ef0ecd7SIgor Mitsyanko 334c9ff6c91SIgor Mitsyanko for (i = 0; i < ps->base.rx_bd_num; i++) { 335789763b6SIgor Mitsyanko ret = pearl_skb2rbd_attach(ps, i); 3362ef0ecd7SIgor Mitsyanko if (ret) 3372ef0ecd7SIgor Mitsyanko break; 3382ef0ecd7SIgor Mitsyanko } 3392ef0ecd7SIgor Mitsyanko 3402ef0ecd7SIgor Mitsyanko return ret; 3412ef0ecd7SIgor Mitsyanko } 3422ef0ecd7SIgor Mitsyanko 3432ef0ecd7SIgor Mitsyanko /* all rx/tx activity should have ceased before calling this function */ 344789763b6SIgor Mitsyanko static void qtnf_pearl_free_xfer_buffers(struct qtnf_pcie_pearl_state *ps) 3452ef0ecd7SIgor Mitsyanko { 346c9ff6c91SIgor Mitsyanko struct qtnf_pcie_bus_priv *priv = &ps->base; 347d0b95bfaSIgor Mitsyanko struct qtnf_pearl_tx_bd *txbd; 348d0b95bfaSIgor Mitsyanko struct qtnf_pearl_rx_bd *rxbd; 3492ef0ecd7SIgor Mitsyanko struct sk_buff *skb; 3502ef0ecd7SIgor Mitsyanko dma_addr_t paddr; 3512ef0ecd7SIgor Mitsyanko int i; 3522ef0ecd7SIgor Mitsyanko 3532ef0ecd7SIgor Mitsyanko /* free rx buffers */ 3542ef0ecd7SIgor Mitsyanko for (i = 0; i < priv->rx_bd_num; i++) { 3552ef0ecd7SIgor Mitsyanko if (priv->rx_skb && priv->rx_skb[i]) { 356c9ff6c91SIgor Mitsyanko rxbd = &ps->rx_bd_vbase[i]; 3572ef0ecd7SIgor Mitsyanko skb = priv->rx_skb[i]; 3582ef0ecd7SIgor Mitsyanko paddr = QTN_HOST_ADDR(le32_to_cpu(rxbd->addr_h), 3592ef0ecd7SIgor Mitsyanko le32_to_cpu(rxbd->addr)); 360*06e1359cSChristophe JAILLET dma_unmap_single(&priv->pdev->dev, paddr, 361*06e1359cSChristophe JAILLET SKB_BUF_SIZE, DMA_FROM_DEVICE); 3622ef0ecd7SIgor Mitsyanko dev_kfree_skb_any(skb); 3632ef0ecd7SIgor Mitsyanko priv->rx_skb[i] = NULL; 3642ef0ecd7SIgor Mitsyanko } 3652ef0ecd7SIgor Mitsyanko } 3662ef0ecd7SIgor Mitsyanko 3672ef0ecd7SIgor Mitsyanko /* free tx buffers */ 3682ef0ecd7SIgor Mitsyanko for (i = 0; i < priv->tx_bd_num; i++) { 3692ef0ecd7SIgor Mitsyanko if (priv->tx_skb && priv->tx_skb[i]) { 370c9ff6c91SIgor Mitsyanko txbd = &ps->tx_bd_vbase[i]; 3712ef0ecd7SIgor Mitsyanko skb = priv->tx_skb[i]; 3722ef0ecd7SIgor Mitsyanko paddr = QTN_HOST_ADDR(le32_to_cpu(txbd->addr_h), 3732ef0ecd7SIgor Mitsyanko le32_to_cpu(txbd->addr)); 374*06e1359cSChristophe JAILLET dma_unmap_single(&priv->pdev->dev, paddr, skb->len, 375*06e1359cSChristophe JAILLET DMA_TO_DEVICE); 3762ef0ecd7SIgor Mitsyanko dev_kfree_skb_any(skb); 3772ef0ecd7SIgor Mitsyanko priv->tx_skb[i] = NULL; 3782ef0ecd7SIgor Mitsyanko } 3792ef0ecd7SIgor Mitsyanko } 3802ef0ecd7SIgor Mitsyanko } 3812ef0ecd7SIgor Mitsyanko 382789763b6SIgor Mitsyanko static int pearl_hhbm_init(struct qtnf_pcie_pearl_state *ps) 3832ef0ecd7SIgor Mitsyanko { 3842ef0ecd7SIgor Mitsyanko u32 val; 3852ef0ecd7SIgor Mitsyanko 386c9ff6c91SIgor Mitsyanko val = readl(PCIE_HHBM_CONFIG(ps->pcie_reg_base)); 3872ef0ecd7SIgor Mitsyanko val |= HHBM_CONFIG_SOFT_RESET; 388c9ff6c91SIgor Mitsyanko writel(val, PCIE_HHBM_CONFIG(ps->pcie_reg_base)); 3892ef0ecd7SIgor Mitsyanko usleep_range(50, 100); 3902ef0ecd7SIgor Mitsyanko val &= ~HHBM_CONFIG_SOFT_RESET; 3912ef0ecd7SIgor Mitsyanko #ifdef CONFIG_ARCH_DMA_ADDR_T_64BIT 3922ef0ecd7SIgor Mitsyanko val |= HHBM_64BIT; 3932ef0ecd7SIgor Mitsyanko #endif 394c9ff6c91SIgor Mitsyanko writel(val, PCIE_HHBM_CONFIG(ps->pcie_reg_base)); 395c9ff6c91SIgor Mitsyanko writel(ps->base.rx_bd_num, PCIE_HHBM_Q_LIMIT_REG(ps->pcie_reg_base)); 3962ef0ecd7SIgor Mitsyanko 3972ef0ecd7SIgor Mitsyanko return 0; 3982ef0ecd7SIgor Mitsyanko } 3992ef0ecd7SIgor Mitsyanko 400b7da53cdSIgor Mitsyanko static int qtnf_pcie_pearl_init_xfer(struct qtnf_pcie_pearl_state *ps, 40197aef03cSSergey Matyukevich unsigned int tx_bd_size, 40297aef03cSSergey Matyukevich unsigned int rx_bd_size) 4032ef0ecd7SIgor Mitsyanko { 404c9ff6c91SIgor Mitsyanko struct qtnf_pcie_bus_priv *priv = &ps->base; 4052ef0ecd7SIgor Mitsyanko int ret; 4062ef0ecd7SIgor Mitsyanko u32 val; 4072ef0ecd7SIgor Mitsyanko 408b7da53cdSIgor Mitsyanko if (tx_bd_size == 0) 409b7da53cdSIgor Mitsyanko tx_bd_size = PEARL_TX_BD_SIZE_DEFAULT; 410b7da53cdSIgor Mitsyanko 411b7da53cdSIgor Mitsyanko val = tx_bd_size * sizeof(struct qtnf_pearl_tx_bd); 412b7da53cdSIgor Mitsyanko 413b7da53cdSIgor Mitsyanko if (!is_power_of_2(tx_bd_size) || val > PCIE_HHBM_MAX_SIZE) { 41497aef03cSSergey Matyukevich pr_warn("invalid tx_bd_size value %u, use default %u\n", 41597aef03cSSergey Matyukevich tx_bd_size, PEARL_TX_BD_SIZE_DEFAULT); 416b7da53cdSIgor Mitsyanko priv->tx_bd_num = PEARL_TX_BD_SIZE_DEFAULT; 417b7da53cdSIgor Mitsyanko } else { 418b7da53cdSIgor Mitsyanko priv->tx_bd_num = tx_bd_size; 419b7da53cdSIgor Mitsyanko } 420b7da53cdSIgor Mitsyanko 42197aef03cSSergey Matyukevich if (rx_bd_size == 0) 42297aef03cSSergey Matyukevich rx_bd_size = PEARL_RX_BD_SIZE_DEFAULT; 42397aef03cSSergey Matyukevich 42497aef03cSSergey Matyukevich val = rx_bd_size * sizeof(dma_addr_t); 42597aef03cSSergey Matyukevich 42697aef03cSSergey Matyukevich if (!is_power_of_2(rx_bd_size) || val > PCIE_HHBM_MAX_SIZE) { 42797aef03cSSergey Matyukevich pr_warn("invalid rx_bd_size value %u, use default %u\n", 42897aef03cSSergey Matyukevich rx_bd_size, PEARL_RX_BD_SIZE_DEFAULT); 42997aef03cSSergey Matyukevich priv->rx_bd_num = PEARL_RX_BD_SIZE_DEFAULT; 43097aef03cSSergey Matyukevich } else { 43197aef03cSSergey Matyukevich priv->rx_bd_num = rx_bd_size; 43297aef03cSSergey Matyukevich } 43397aef03cSSergey Matyukevich 4342ef0ecd7SIgor Mitsyanko priv->rx_bd_w_index = 0; 4352ef0ecd7SIgor Mitsyanko priv->rx_bd_r_index = 0; 4362ef0ecd7SIgor Mitsyanko 437789763b6SIgor Mitsyanko ret = pearl_hhbm_init(ps); 4382ef0ecd7SIgor Mitsyanko if (ret) { 4392ef0ecd7SIgor Mitsyanko pr_err("failed to init h/w queues\n"); 4402ef0ecd7SIgor Mitsyanko return ret; 4412ef0ecd7SIgor Mitsyanko } 4422ef0ecd7SIgor Mitsyanko 443addc7540SIgor Mitsyanko ret = qtnf_pcie_alloc_skb_array(priv); 4442ef0ecd7SIgor Mitsyanko if (ret) { 4452ef0ecd7SIgor Mitsyanko pr_err("failed to allocate skb array\n"); 4462ef0ecd7SIgor Mitsyanko return ret; 4472ef0ecd7SIgor Mitsyanko } 4482ef0ecd7SIgor Mitsyanko 449789763b6SIgor Mitsyanko ret = pearl_alloc_bd_table(ps); 4502ef0ecd7SIgor Mitsyanko if (ret) { 4512ef0ecd7SIgor Mitsyanko pr_err("failed to allocate bd table\n"); 4522ef0ecd7SIgor Mitsyanko return ret; 4532ef0ecd7SIgor Mitsyanko } 4542ef0ecd7SIgor Mitsyanko 455789763b6SIgor Mitsyanko ret = pearl_alloc_rx_buffers(ps); 4562ef0ecd7SIgor Mitsyanko if (ret) { 4572ef0ecd7SIgor Mitsyanko pr_err("failed to allocate rx buffers\n"); 4582ef0ecd7SIgor Mitsyanko return ret; 4592ef0ecd7SIgor Mitsyanko } 4602ef0ecd7SIgor Mitsyanko 4612ef0ecd7SIgor Mitsyanko return ret; 4622ef0ecd7SIgor Mitsyanko } 4632ef0ecd7SIgor Mitsyanko 464789763b6SIgor Mitsyanko static void qtnf_pearl_data_tx_reclaim(struct qtnf_pcie_pearl_state *ps) 4652ef0ecd7SIgor Mitsyanko { 466c9ff6c91SIgor Mitsyanko struct qtnf_pcie_bus_priv *priv = &ps->base; 467d0b95bfaSIgor Mitsyanko struct qtnf_pearl_tx_bd *txbd; 4682ef0ecd7SIgor Mitsyanko struct sk_buff *skb; 4692ef0ecd7SIgor Mitsyanko unsigned long flags; 4702ef0ecd7SIgor Mitsyanko dma_addr_t paddr; 4712ef0ecd7SIgor Mitsyanko u32 tx_done_index; 4722ef0ecd7SIgor Mitsyanko int count = 0; 4732ef0ecd7SIgor Mitsyanko int i; 4742ef0ecd7SIgor Mitsyanko 4752ef0ecd7SIgor Mitsyanko spin_lock_irqsave(&priv->tx_reclaim_lock, flags); 4762ef0ecd7SIgor Mitsyanko 477c9ff6c91SIgor Mitsyanko tx_done_index = readl(PCIE_HDP_RX0DMA_CNT(ps->pcie_reg_base)) 4782ef0ecd7SIgor Mitsyanko & (priv->tx_bd_num - 1); 4792ef0ecd7SIgor Mitsyanko 4802ef0ecd7SIgor Mitsyanko i = priv->tx_bd_r_index; 4812ef0ecd7SIgor Mitsyanko 4822ef0ecd7SIgor Mitsyanko while (CIRC_CNT(tx_done_index, i, priv->tx_bd_num)) { 4832ef0ecd7SIgor Mitsyanko skb = priv->tx_skb[i]; 4842ef0ecd7SIgor Mitsyanko if (likely(skb)) { 485c9ff6c91SIgor Mitsyanko txbd = &ps->tx_bd_vbase[i]; 4862ef0ecd7SIgor Mitsyanko paddr = QTN_HOST_ADDR(le32_to_cpu(txbd->addr_h), 4872ef0ecd7SIgor Mitsyanko le32_to_cpu(txbd->addr)); 488*06e1359cSChristophe JAILLET dma_unmap_single(&priv->pdev->dev, paddr, skb->len, 489*06e1359cSChristophe JAILLET DMA_TO_DEVICE); 4902ef0ecd7SIgor Mitsyanko 4912ef0ecd7SIgor Mitsyanko if (skb->dev) { 49245fc3fd4SHeiner Kallweit dev_sw_netstats_tx_add(skb->dev, 1, skb->len); 4932ef0ecd7SIgor Mitsyanko if (unlikely(priv->tx_stopped)) { 4942ef0ecd7SIgor Mitsyanko qtnf_wake_all_queues(skb->dev); 4952ef0ecd7SIgor Mitsyanko priv->tx_stopped = 0; 4962ef0ecd7SIgor Mitsyanko } 4972ef0ecd7SIgor Mitsyanko } 4982ef0ecd7SIgor Mitsyanko 4992ef0ecd7SIgor Mitsyanko dev_kfree_skb_any(skb); 5002ef0ecd7SIgor Mitsyanko } 5012ef0ecd7SIgor Mitsyanko 5022ef0ecd7SIgor Mitsyanko priv->tx_skb[i] = NULL; 5032ef0ecd7SIgor Mitsyanko count++; 5042ef0ecd7SIgor Mitsyanko 5052ef0ecd7SIgor Mitsyanko if (++i >= priv->tx_bd_num) 5062ef0ecd7SIgor Mitsyanko i = 0; 5072ef0ecd7SIgor Mitsyanko } 5082ef0ecd7SIgor Mitsyanko 5092ef0ecd7SIgor Mitsyanko priv->tx_reclaim_done += count; 5102ef0ecd7SIgor Mitsyanko priv->tx_reclaim_req++; 5112ef0ecd7SIgor Mitsyanko priv->tx_bd_r_index = i; 5122ef0ecd7SIgor Mitsyanko 5132ef0ecd7SIgor Mitsyanko spin_unlock_irqrestore(&priv->tx_reclaim_lock, flags); 5142ef0ecd7SIgor Mitsyanko } 5152ef0ecd7SIgor Mitsyanko 516c9ff6c91SIgor Mitsyanko static int qtnf_tx_queue_ready(struct qtnf_pcie_pearl_state *ps) 5172ef0ecd7SIgor Mitsyanko { 518c9ff6c91SIgor Mitsyanko struct qtnf_pcie_bus_priv *priv = &ps->base; 519c9ff6c91SIgor Mitsyanko 5202ef0ecd7SIgor Mitsyanko if (!CIRC_SPACE(priv->tx_bd_w_index, priv->tx_bd_r_index, 5212ef0ecd7SIgor Mitsyanko priv->tx_bd_num)) { 522789763b6SIgor Mitsyanko qtnf_pearl_data_tx_reclaim(ps); 5232ef0ecd7SIgor Mitsyanko 5242ef0ecd7SIgor Mitsyanko if (!CIRC_SPACE(priv->tx_bd_w_index, priv->tx_bd_r_index, 5252ef0ecd7SIgor Mitsyanko priv->tx_bd_num)) { 5262ef0ecd7SIgor Mitsyanko pr_warn_ratelimited("reclaim full Tx queue\n"); 5272ef0ecd7SIgor Mitsyanko priv->tx_full_count++; 5282ef0ecd7SIgor Mitsyanko return 0; 5292ef0ecd7SIgor Mitsyanko } 5302ef0ecd7SIgor Mitsyanko } 5312ef0ecd7SIgor Mitsyanko 5322ef0ecd7SIgor Mitsyanko return 1; 5332ef0ecd7SIgor Mitsyanko } 5342ef0ecd7SIgor Mitsyanko 535904628d3SIgor Mitsyanko static int qtnf_pcie_skb_send(struct qtnf_bus *bus, struct sk_buff *skb) 5362ef0ecd7SIgor Mitsyanko { 537addc7540SIgor Mitsyanko struct qtnf_pcie_pearl_state *ps = get_bus_priv(bus); 538c9ff6c91SIgor Mitsyanko struct qtnf_pcie_bus_priv *priv = &ps->base; 5392ef0ecd7SIgor Mitsyanko dma_addr_t txbd_paddr, skb_paddr; 540d0b95bfaSIgor Mitsyanko struct qtnf_pearl_tx_bd *txbd; 5412ef0ecd7SIgor Mitsyanko unsigned long flags; 5422ef0ecd7SIgor Mitsyanko int len, i; 5432ef0ecd7SIgor Mitsyanko u32 info; 5442ef0ecd7SIgor Mitsyanko int ret = 0; 5452ef0ecd7SIgor Mitsyanko 54621077d09SIgor Mitsyanko spin_lock_irqsave(&priv->tx_lock, flags); 5472ef0ecd7SIgor Mitsyanko 548c9ff6c91SIgor Mitsyanko if (!qtnf_tx_queue_ready(ps)) { 5492ef0ecd7SIgor Mitsyanko if (skb->dev) { 5502ef0ecd7SIgor Mitsyanko netif_tx_stop_all_queues(skb->dev); 5512ef0ecd7SIgor Mitsyanko priv->tx_stopped = 1; 5522ef0ecd7SIgor Mitsyanko } 5532ef0ecd7SIgor Mitsyanko 55421077d09SIgor Mitsyanko spin_unlock_irqrestore(&priv->tx_lock, flags); 5552ef0ecd7SIgor Mitsyanko return NETDEV_TX_BUSY; 5562ef0ecd7SIgor Mitsyanko } 5572ef0ecd7SIgor Mitsyanko 5582ef0ecd7SIgor Mitsyanko i = priv->tx_bd_w_index; 5592ef0ecd7SIgor Mitsyanko priv->tx_skb[i] = skb; 5602ef0ecd7SIgor Mitsyanko len = skb->len; 5612ef0ecd7SIgor Mitsyanko 562*06e1359cSChristophe JAILLET skb_paddr = dma_map_single(&priv->pdev->dev, skb->data, skb->len, 563*06e1359cSChristophe JAILLET DMA_TO_DEVICE); 564*06e1359cSChristophe JAILLET if (dma_mapping_error(&priv->pdev->dev, skb_paddr)) { 5652ef0ecd7SIgor Mitsyanko pr_err("skb DMA mapping error: %pad\n", &skb_paddr); 5662ef0ecd7SIgor Mitsyanko ret = -ENOMEM; 5672ef0ecd7SIgor Mitsyanko goto tx_done; 5682ef0ecd7SIgor Mitsyanko } 5692ef0ecd7SIgor Mitsyanko 570c9ff6c91SIgor Mitsyanko txbd = &ps->tx_bd_vbase[i]; 5712ef0ecd7SIgor Mitsyanko txbd->addr = cpu_to_le32(QTN_HOST_LO32(skb_paddr)); 5722ef0ecd7SIgor Mitsyanko txbd->addr_h = cpu_to_le32(QTN_HOST_HI32(skb_paddr)); 5732ef0ecd7SIgor Mitsyanko 5742ef0ecd7SIgor Mitsyanko info = (len & QTN_PCIE_TX_DESC_LEN_MASK) << QTN_PCIE_TX_DESC_LEN_SHIFT; 5752ef0ecd7SIgor Mitsyanko txbd->info = cpu_to_le32(info); 5762ef0ecd7SIgor Mitsyanko 5772ef0ecd7SIgor Mitsyanko /* sync up all descriptor updates before passing them to EP */ 5782ef0ecd7SIgor Mitsyanko dma_wmb(); 5792ef0ecd7SIgor Mitsyanko 5802ef0ecd7SIgor Mitsyanko /* write new TX descriptor to PCIE_RX_FIFO on EP */ 581c9ff6c91SIgor Mitsyanko txbd_paddr = ps->tx_bd_pbase + i * sizeof(struct qtnf_pearl_tx_bd); 5822ef0ecd7SIgor Mitsyanko 5832ef0ecd7SIgor Mitsyanko #ifdef CONFIG_ARCH_DMA_ADDR_T_64BIT 5842ef0ecd7SIgor Mitsyanko writel(QTN_HOST_HI32(txbd_paddr), 585c9ff6c91SIgor Mitsyanko PCIE_HDP_HOST_WR_DESC0_H(ps->pcie_reg_base)); 5862ef0ecd7SIgor Mitsyanko #endif 5872ef0ecd7SIgor Mitsyanko writel(QTN_HOST_LO32(txbd_paddr), 588c9ff6c91SIgor Mitsyanko PCIE_HDP_HOST_WR_DESC0(ps->pcie_reg_base)); 5892ef0ecd7SIgor Mitsyanko 5902ef0ecd7SIgor Mitsyanko if (++i >= priv->tx_bd_num) 5912ef0ecd7SIgor Mitsyanko i = 0; 5922ef0ecd7SIgor Mitsyanko 5932ef0ecd7SIgor Mitsyanko priv->tx_bd_w_index = i; 5942ef0ecd7SIgor Mitsyanko 5952ef0ecd7SIgor Mitsyanko tx_done: 596946d077aSSergey Matyukevich if (ret) { 5972ef0ecd7SIgor Mitsyanko pr_err_ratelimited("drop skb\n"); 5982ef0ecd7SIgor Mitsyanko if (skb->dev) 5992ef0ecd7SIgor Mitsyanko skb->dev->stats.tx_dropped++; 6002ef0ecd7SIgor Mitsyanko dev_kfree_skb_any(skb); 6012ef0ecd7SIgor Mitsyanko } 6022ef0ecd7SIgor Mitsyanko 6032ef0ecd7SIgor Mitsyanko priv->tx_done_count++; 60421077d09SIgor Mitsyanko spin_unlock_irqrestore(&priv->tx_lock, flags); 6052ef0ecd7SIgor Mitsyanko 606789763b6SIgor Mitsyanko qtnf_pearl_data_tx_reclaim(ps); 6072ef0ecd7SIgor Mitsyanko 6082ef0ecd7SIgor Mitsyanko return NETDEV_TX_OK; 6092ef0ecd7SIgor Mitsyanko } 6102ef0ecd7SIgor Mitsyanko 611904628d3SIgor Mitsyanko static int qtnf_pcie_data_tx(struct qtnf_bus *bus, struct sk_buff *skb, 612904628d3SIgor Mitsyanko unsigned int macid, unsigned int vifid) 613904628d3SIgor Mitsyanko { 614904628d3SIgor Mitsyanko return qtnf_pcie_skb_send(bus, skb); 615904628d3SIgor Mitsyanko } 616904628d3SIgor Mitsyanko 617904628d3SIgor Mitsyanko static int qtnf_pcie_data_tx_meta(struct qtnf_bus *bus, struct sk_buff *skb, 618904628d3SIgor Mitsyanko unsigned int macid, unsigned int vifid) 619904628d3SIgor Mitsyanko { 620904628d3SIgor Mitsyanko struct qtnf_frame_meta_info *meta; 621904628d3SIgor Mitsyanko int tail_need = sizeof(*meta) - skb_tailroom(skb); 622904628d3SIgor Mitsyanko int ret; 623904628d3SIgor Mitsyanko 624904628d3SIgor Mitsyanko if (tail_need > 0 && pskb_expand_head(skb, 0, tail_need, GFP_ATOMIC)) { 625904628d3SIgor Mitsyanko skb->dev->stats.tx_dropped++; 626904628d3SIgor Mitsyanko dev_kfree_skb_any(skb); 627904628d3SIgor Mitsyanko return NETDEV_TX_OK; 628904628d3SIgor Mitsyanko } 629904628d3SIgor Mitsyanko 630904628d3SIgor Mitsyanko meta = skb_put(skb, sizeof(*meta)); 631904628d3SIgor Mitsyanko meta->magic_s = HBM_FRAME_META_MAGIC_PATTERN_S; 632904628d3SIgor Mitsyanko meta->magic_e = HBM_FRAME_META_MAGIC_PATTERN_E; 633904628d3SIgor Mitsyanko meta->macid = macid; 634904628d3SIgor Mitsyanko meta->ifidx = vifid; 635904628d3SIgor Mitsyanko 636904628d3SIgor Mitsyanko ret = qtnf_pcie_skb_send(bus, skb); 637904628d3SIgor Mitsyanko if (unlikely(ret == NETDEV_TX_BUSY)) 638904628d3SIgor Mitsyanko __skb_trim(skb, skb->len - sizeof(*meta)); 639904628d3SIgor Mitsyanko 640904628d3SIgor Mitsyanko return ret; 641904628d3SIgor Mitsyanko } 642904628d3SIgor Mitsyanko 643789763b6SIgor Mitsyanko static irqreturn_t qtnf_pcie_pearl_interrupt(int irq, void *data) 6442ef0ecd7SIgor Mitsyanko { 6452ef0ecd7SIgor Mitsyanko struct qtnf_bus *bus = (struct qtnf_bus *)data; 646addc7540SIgor Mitsyanko struct qtnf_pcie_pearl_state *ps = get_bus_priv(bus); 647c9ff6c91SIgor Mitsyanko struct qtnf_pcie_bus_priv *priv = &ps->base; 6482ef0ecd7SIgor Mitsyanko u32 status; 6492ef0ecd7SIgor Mitsyanko 6502ef0ecd7SIgor Mitsyanko priv->pcie_irq_count++; 651c9ff6c91SIgor Mitsyanko status = readl(PCIE_HDP_INT_STATUS(ps->pcie_reg_base)); 6522ef0ecd7SIgor Mitsyanko 6532ef0ecd7SIgor Mitsyanko qtnf_shm_ipc_irq_handler(&priv->shm_ipc_ep_in); 6542ef0ecd7SIgor Mitsyanko qtnf_shm_ipc_irq_handler(&priv->shm_ipc_ep_out); 6552ef0ecd7SIgor Mitsyanko 656c9ff6c91SIgor Mitsyanko if (!(status & ps->pcie_irq_mask)) 6572ef0ecd7SIgor Mitsyanko goto irq_done; 6582ef0ecd7SIgor Mitsyanko 6592ef0ecd7SIgor Mitsyanko if (status & PCIE_HDP_INT_RX_BITS) 660c9ff6c91SIgor Mitsyanko ps->pcie_irq_rx_count++; 6612ef0ecd7SIgor Mitsyanko 6622ef0ecd7SIgor Mitsyanko if (status & PCIE_HDP_INT_TX_BITS) 663c9ff6c91SIgor Mitsyanko ps->pcie_irq_tx_count++; 6642ef0ecd7SIgor Mitsyanko 6652ef0ecd7SIgor Mitsyanko if (status & PCIE_HDP_INT_HHBM_UF) 666c9ff6c91SIgor Mitsyanko ps->pcie_irq_uf_count++; 6672ef0ecd7SIgor Mitsyanko 6682ef0ecd7SIgor Mitsyanko if (status & PCIE_HDP_INT_RX_BITS) { 669c9ff6c91SIgor Mitsyanko qtnf_dis_rxdone_irq(ps); 6702ef0ecd7SIgor Mitsyanko napi_schedule(&bus->mux_napi); 6712ef0ecd7SIgor Mitsyanko } 6722ef0ecd7SIgor Mitsyanko 6732ef0ecd7SIgor Mitsyanko if (status & PCIE_HDP_INT_TX_BITS) { 674c9ff6c91SIgor Mitsyanko qtnf_dis_txdone_irq(ps); 6752ef0ecd7SIgor Mitsyanko tasklet_hi_schedule(&priv->reclaim_tq); 6762ef0ecd7SIgor Mitsyanko } 6772ef0ecd7SIgor Mitsyanko 6782ef0ecd7SIgor Mitsyanko irq_done: 6792ef0ecd7SIgor Mitsyanko /* H/W workaround: clean all bits, not only enabled */ 680c9ff6c91SIgor Mitsyanko qtnf_non_posted_write(~0U, PCIE_HDP_INT_STATUS(ps->pcie_reg_base)); 6812ef0ecd7SIgor Mitsyanko 6822ef0ecd7SIgor Mitsyanko if (!priv->msi_enabled) 683c9ff6c91SIgor Mitsyanko qtnf_deassert_intx(ps); 6842ef0ecd7SIgor Mitsyanko 6852ef0ecd7SIgor Mitsyanko return IRQ_HANDLED; 6862ef0ecd7SIgor Mitsyanko } 6872ef0ecd7SIgor Mitsyanko 688c9ff6c91SIgor Mitsyanko static int qtnf_rx_data_ready(struct qtnf_pcie_pearl_state *ps) 6892ef0ecd7SIgor Mitsyanko { 690c9ff6c91SIgor Mitsyanko u16 index = ps->base.rx_bd_r_index; 691d0b95bfaSIgor Mitsyanko struct qtnf_pearl_rx_bd *rxbd; 6922ef0ecd7SIgor Mitsyanko u32 descw; 6932ef0ecd7SIgor Mitsyanko 694c9ff6c91SIgor Mitsyanko rxbd = &ps->rx_bd_vbase[index]; 6952ef0ecd7SIgor Mitsyanko descw = le32_to_cpu(rxbd->info); 6962ef0ecd7SIgor Mitsyanko 6972ef0ecd7SIgor Mitsyanko if (descw & QTN_TXDONE_MASK) 6982ef0ecd7SIgor Mitsyanko return 1; 6992ef0ecd7SIgor Mitsyanko 7002ef0ecd7SIgor Mitsyanko return 0; 7012ef0ecd7SIgor Mitsyanko } 7022ef0ecd7SIgor Mitsyanko 703789763b6SIgor Mitsyanko static int qtnf_pcie_pearl_rx_poll(struct napi_struct *napi, int budget) 7042ef0ecd7SIgor Mitsyanko { 7052ef0ecd7SIgor Mitsyanko struct qtnf_bus *bus = container_of(napi, struct qtnf_bus, mux_napi); 706addc7540SIgor Mitsyanko struct qtnf_pcie_pearl_state *ps = get_bus_priv(bus); 707c9ff6c91SIgor Mitsyanko struct qtnf_pcie_bus_priv *priv = &ps->base; 7082ef0ecd7SIgor Mitsyanko struct net_device *ndev = NULL; 7092ef0ecd7SIgor Mitsyanko struct sk_buff *skb = NULL; 7102ef0ecd7SIgor Mitsyanko int processed = 0; 711d0b95bfaSIgor Mitsyanko struct qtnf_pearl_rx_bd *rxbd; 7122ef0ecd7SIgor Mitsyanko dma_addr_t skb_paddr; 7132ef0ecd7SIgor Mitsyanko int consume; 7142ef0ecd7SIgor Mitsyanko u32 descw; 7152ef0ecd7SIgor Mitsyanko u32 psize; 7162ef0ecd7SIgor Mitsyanko u16 r_idx; 7172ef0ecd7SIgor Mitsyanko u16 w_idx; 7182ef0ecd7SIgor Mitsyanko int ret; 7192ef0ecd7SIgor Mitsyanko 7202ef0ecd7SIgor Mitsyanko while (processed < budget) { 721c9ff6c91SIgor Mitsyanko if (!qtnf_rx_data_ready(ps)) 7222ef0ecd7SIgor Mitsyanko goto rx_out; 7232ef0ecd7SIgor Mitsyanko 7242ef0ecd7SIgor Mitsyanko r_idx = priv->rx_bd_r_index; 725c9ff6c91SIgor Mitsyanko rxbd = &ps->rx_bd_vbase[r_idx]; 7262ef0ecd7SIgor Mitsyanko descw = le32_to_cpu(rxbd->info); 7272ef0ecd7SIgor Mitsyanko 7282ef0ecd7SIgor Mitsyanko skb = priv->rx_skb[r_idx]; 7292ef0ecd7SIgor Mitsyanko psize = QTN_GET_LEN(descw); 7302ef0ecd7SIgor Mitsyanko consume = 1; 7312ef0ecd7SIgor Mitsyanko 7322ef0ecd7SIgor Mitsyanko if (!(descw & QTN_TXDONE_MASK)) { 7332ef0ecd7SIgor Mitsyanko pr_warn("skip invalid rxbd[%d]\n", r_idx); 7342ef0ecd7SIgor Mitsyanko consume = 0; 7352ef0ecd7SIgor Mitsyanko } 7362ef0ecd7SIgor Mitsyanko 7372ef0ecd7SIgor Mitsyanko if (!skb) { 7382ef0ecd7SIgor Mitsyanko pr_warn("skip missing rx_skb[%d]\n", r_idx); 7392ef0ecd7SIgor Mitsyanko consume = 0; 7402ef0ecd7SIgor Mitsyanko } 7412ef0ecd7SIgor Mitsyanko 7422ef0ecd7SIgor Mitsyanko if (skb && (skb_tailroom(skb) < psize)) { 7432ef0ecd7SIgor Mitsyanko pr_err("skip packet with invalid length: %u > %u\n", 7442ef0ecd7SIgor Mitsyanko psize, skb_tailroom(skb)); 7452ef0ecd7SIgor Mitsyanko consume = 0; 7462ef0ecd7SIgor Mitsyanko } 7472ef0ecd7SIgor Mitsyanko 7482ef0ecd7SIgor Mitsyanko if (skb) { 7492ef0ecd7SIgor Mitsyanko skb_paddr = QTN_HOST_ADDR(le32_to_cpu(rxbd->addr_h), 7502ef0ecd7SIgor Mitsyanko le32_to_cpu(rxbd->addr)); 751*06e1359cSChristophe JAILLET dma_unmap_single(&priv->pdev->dev, skb_paddr, 752*06e1359cSChristophe JAILLET SKB_BUF_SIZE, DMA_FROM_DEVICE); 7532ef0ecd7SIgor Mitsyanko } 7542ef0ecd7SIgor Mitsyanko 7552ef0ecd7SIgor Mitsyanko if (consume) { 7562ef0ecd7SIgor Mitsyanko skb_put(skb, psize); 7572ef0ecd7SIgor Mitsyanko ndev = qtnf_classify_skb(bus, skb); 7582ef0ecd7SIgor Mitsyanko if (likely(ndev)) { 75945fc3fd4SHeiner Kallweit dev_sw_netstats_rx_add(ndev, skb->len); 7602ef0ecd7SIgor Mitsyanko skb->protocol = eth_type_trans(skb, ndev); 7612ef0ecd7SIgor Mitsyanko napi_gro_receive(napi, skb); 7622ef0ecd7SIgor Mitsyanko } else { 7632ef0ecd7SIgor Mitsyanko pr_debug("drop untagged skb\n"); 7642ef0ecd7SIgor Mitsyanko bus->mux_dev.stats.rx_dropped++; 7652ef0ecd7SIgor Mitsyanko dev_kfree_skb_any(skb); 7662ef0ecd7SIgor Mitsyanko } 7672ef0ecd7SIgor Mitsyanko } else { 7682ef0ecd7SIgor Mitsyanko if (skb) { 7692ef0ecd7SIgor Mitsyanko bus->mux_dev.stats.rx_dropped++; 7702ef0ecd7SIgor Mitsyanko dev_kfree_skb_any(skb); 7712ef0ecd7SIgor Mitsyanko } 7722ef0ecd7SIgor Mitsyanko } 7732ef0ecd7SIgor Mitsyanko 7742ef0ecd7SIgor Mitsyanko priv->rx_skb[r_idx] = NULL; 7752ef0ecd7SIgor Mitsyanko if (++r_idx >= priv->rx_bd_num) 7762ef0ecd7SIgor Mitsyanko r_idx = 0; 7772ef0ecd7SIgor Mitsyanko 7782ef0ecd7SIgor Mitsyanko priv->rx_bd_r_index = r_idx; 7792ef0ecd7SIgor Mitsyanko 7802ef0ecd7SIgor Mitsyanko /* repalce processed buffer by a new one */ 7812ef0ecd7SIgor Mitsyanko w_idx = priv->rx_bd_w_index; 7822ef0ecd7SIgor Mitsyanko while (CIRC_SPACE(priv->rx_bd_w_index, priv->rx_bd_r_index, 7832ef0ecd7SIgor Mitsyanko priv->rx_bd_num) > 0) { 7842ef0ecd7SIgor Mitsyanko if (++w_idx >= priv->rx_bd_num) 7852ef0ecd7SIgor Mitsyanko w_idx = 0; 7862ef0ecd7SIgor Mitsyanko 787789763b6SIgor Mitsyanko ret = pearl_skb2rbd_attach(ps, w_idx); 7882ef0ecd7SIgor Mitsyanko if (ret) { 7892ef0ecd7SIgor Mitsyanko pr_err("failed to allocate new rx_skb[%d]\n", 7902ef0ecd7SIgor Mitsyanko w_idx); 7912ef0ecd7SIgor Mitsyanko break; 7922ef0ecd7SIgor Mitsyanko } 7932ef0ecd7SIgor Mitsyanko } 7942ef0ecd7SIgor Mitsyanko 7952ef0ecd7SIgor Mitsyanko processed++; 7962ef0ecd7SIgor Mitsyanko } 7972ef0ecd7SIgor Mitsyanko 7982ef0ecd7SIgor Mitsyanko rx_out: 7992ef0ecd7SIgor Mitsyanko if (processed < budget) { 8002ef0ecd7SIgor Mitsyanko napi_complete(napi); 801c9ff6c91SIgor Mitsyanko qtnf_en_rxdone_irq(ps); 8022ef0ecd7SIgor Mitsyanko } 8032ef0ecd7SIgor Mitsyanko 8042ef0ecd7SIgor Mitsyanko return processed; 8052ef0ecd7SIgor Mitsyanko } 8062ef0ecd7SIgor Mitsyanko 8072ef0ecd7SIgor Mitsyanko static void 8082ef0ecd7SIgor Mitsyanko qtnf_pcie_data_tx_timeout(struct qtnf_bus *bus, struct net_device *ndev) 8092ef0ecd7SIgor Mitsyanko { 810c9ff6c91SIgor Mitsyanko struct qtnf_pcie_pearl_state *ps = (void *)get_bus_priv(bus); 8112ef0ecd7SIgor Mitsyanko 812c9ff6c91SIgor Mitsyanko tasklet_hi_schedule(&ps->base.reclaim_tq); 8132ef0ecd7SIgor Mitsyanko } 8142ef0ecd7SIgor Mitsyanko 8152ef0ecd7SIgor Mitsyanko static void qtnf_pcie_data_rx_start(struct qtnf_bus *bus) 8162ef0ecd7SIgor Mitsyanko { 817c9ff6c91SIgor Mitsyanko struct qtnf_pcie_pearl_state *ps = (void *)get_bus_priv(bus); 8182ef0ecd7SIgor Mitsyanko 819c9ff6c91SIgor Mitsyanko qtnf_enable_hdp_irqs(ps); 8202ef0ecd7SIgor Mitsyanko napi_enable(&bus->mux_napi); 8212ef0ecd7SIgor Mitsyanko } 8222ef0ecd7SIgor Mitsyanko 8232ef0ecd7SIgor Mitsyanko static void qtnf_pcie_data_rx_stop(struct qtnf_bus *bus) 8242ef0ecd7SIgor Mitsyanko { 825c9ff6c91SIgor Mitsyanko struct qtnf_pcie_pearl_state *ps = (void *)get_bus_priv(bus); 8262ef0ecd7SIgor Mitsyanko 8272ef0ecd7SIgor Mitsyanko napi_disable(&bus->mux_napi); 828c9ff6c91SIgor Mitsyanko qtnf_disable_hdp_irqs(ps); 8292ef0ecd7SIgor Mitsyanko } 8302ef0ecd7SIgor Mitsyanko 831904628d3SIgor Mitsyanko static void qtnf_pearl_tx_use_meta_info_set(struct qtnf_bus *bus, bool use_meta) 832904628d3SIgor Mitsyanko { 833904628d3SIgor Mitsyanko if (use_meta) 834904628d3SIgor Mitsyanko bus->bus_ops->data_tx = qtnf_pcie_data_tx_meta; 835904628d3SIgor Mitsyanko else 836904628d3SIgor Mitsyanko bus->bus_ops->data_tx = qtnf_pcie_data_tx; 837904628d3SIgor Mitsyanko } 838904628d3SIgor Mitsyanko 839904628d3SIgor Mitsyanko static struct qtnf_bus_ops qtnf_pcie_pearl_bus_ops = { 8402ef0ecd7SIgor Mitsyanko /* control path methods */ 8412ef0ecd7SIgor Mitsyanko .control_tx = qtnf_pcie_control_tx, 8422ef0ecd7SIgor Mitsyanko 8432ef0ecd7SIgor Mitsyanko /* data path methods */ 8442ef0ecd7SIgor Mitsyanko .data_tx = qtnf_pcie_data_tx, 8452ef0ecd7SIgor Mitsyanko .data_tx_timeout = qtnf_pcie_data_tx_timeout, 846904628d3SIgor Mitsyanko .data_tx_use_meta_set = qtnf_pearl_tx_use_meta_info_set, 8472ef0ecd7SIgor Mitsyanko .data_rx_start = qtnf_pcie_data_rx_start, 8482ef0ecd7SIgor Mitsyanko .data_rx_stop = qtnf_pcie_data_rx_stop, 8492ef0ecd7SIgor Mitsyanko }; 8502ef0ecd7SIgor Mitsyanko 8512ef0ecd7SIgor Mitsyanko static int qtnf_dbg_irq_stats(struct seq_file *s, void *data) 8522ef0ecd7SIgor Mitsyanko { 8532ef0ecd7SIgor Mitsyanko struct qtnf_bus *bus = dev_get_drvdata(s->private); 854c9ff6c91SIgor Mitsyanko struct qtnf_pcie_pearl_state *ps = get_bus_priv(bus); 855c9ff6c91SIgor Mitsyanko u32 reg = readl(PCIE_HDP_INT_EN(ps->pcie_reg_base)); 8562ef0ecd7SIgor Mitsyanko u32 status; 8572ef0ecd7SIgor Mitsyanko 858c9ff6c91SIgor Mitsyanko seq_printf(s, "pcie_irq_count(%u)\n", ps->base.pcie_irq_count); 859c9ff6c91SIgor Mitsyanko seq_printf(s, "pcie_irq_tx_count(%u)\n", ps->pcie_irq_tx_count); 8602ef0ecd7SIgor Mitsyanko status = reg & PCIE_HDP_INT_TX_BITS; 8612ef0ecd7SIgor Mitsyanko seq_printf(s, "pcie_irq_tx_status(%s)\n", 8622ef0ecd7SIgor Mitsyanko (status == PCIE_HDP_INT_TX_BITS) ? "EN" : "DIS"); 863c9ff6c91SIgor Mitsyanko seq_printf(s, "pcie_irq_rx_count(%u)\n", ps->pcie_irq_rx_count); 8642ef0ecd7SIgor Mitsyanko status = reg & PCIE_HDP_INT_RX_BITS; 8652ef0ecd7SIgor Mitsyanko seq_printf(s, "pcie_irq_rx_status(%s)\n", 8662ef0ecd7SIgor Mitsyanko (status == PCIE_HDP_INT_RX_BITS) ? "EN" : "DIS"); 867c9ff6c91SIgor Mitsyanko seq_printf(s, "pcie_irq_uf_count(%u)\n", ps->pcie_irq_uf_count); 8682ef0ecd7SIgor Mitsyanko status = reg & PCIE_HDP_INT_HHBM_UF; 8692ef0ecd7SIgor Mitsyanko seq_printf(s, "pcie_irq_hhbm_uf_status(%s)\n", 8702ef0ecd7SIgor Mitsyanko (status == PCIE_HDP_INT_HHBM_UF) ? "EN" : "DIS"); 8712ef0ecd7SIgor Mitsyanko 8722ef0ecd7SIgor Mitsyanko return 0; 8732ef0ecd7SIgor Mitsyanko } 8742ef0ecd7SIgor Mitsyanko 8752ef0ecd7SIgor Mitsyanko static int qtnf_dbg_hdp_stats(struct seq_file *s, void *data) 8762ef0ecd7SIgor Mitsyanko { 8772ef0ecd7SIgor Mitsyanko struct qtnf_bus *bus = dev_get_drvdata(s->private); 878c9ff6c91SIgor Mitsyanko struct qtnf_pcie_pearl_state *ps = get_bus_priv(bus); 879c9ff6c91SIgor Mitsyanko struct qtnf_pcie_bus_priv *priv = &ps->base; 8802ef0ecd7SIgor Mitsyanko 8812ef0ecd7SIgor Mitsyanko seq_printf(s, "tx_full_count(%u)\n", priv->tx_full_count); 8822ef0ecd7SIgor Mitsyanko seq_printf(s, "tx_done_count(%u)\n", priv->tx_done_count); 8832ef0ecd7SIgor Mitsyanko seq_printf(s, "tx_reclaim_done(%u)\n", priv->tx_reclaim_done); 8842ef0ecd7SIgor Mitsyanko seq_printf(s, "tx_reclaim_req(%u)\n", priv->tx_reclaim_req); 8852ef0ecd7SIgor Mitsyanko 8862ef0ecd7SIgor Mitsyanko seq_printf(s, "tx_bd_r_index(%u)\n", priv->tx_bd_r_index); 8872ef0ecd7SIgor Mitsyanko seq_printf(s, "tx_bd_p_index(%u)\n", 888c9ff6c91SIgor Mitsyanko readl(PCIE_HDP_RX0DMA_CNT(ps->pcie_reg_base)) 8892ef0ecd7SIgor Mitsyanko & (priv->tx_bd_num - 1)); 8902ef0ecd7SIgor Mitsyanko seq_printf(s, "tx_bd_w_index(%u)\n", priv->tx_bd_w_index); 8912ef0ecd7SIgor Mitsyanko seq_printf(s, "tx queue len(%u)\n", 8922ef0ecd7SIgor Mitsyanko CIRC_CNT(priv->tx_bd_w_index, priv->tx_bd_r_index, 8932ef0ecd7SIgor Mitsyanko priv->tx_bd_num)); 8942ef0ecd7SIgor Mitsyanko 8952ef0ecd7SIgor Mitsyanko seq_printf(s, "rx_bd_r_index(%u)\n", priv->rx_bd_r_index); 8962ef0ecd7SIgor Mitsyanko seq_printf(s, "rx_bd_p_index(%u)\n", 897c9ff6c91SIgor Mitsyanko readl(PCIE_HDP_TX0DMA_CNT(ps->pcie_reg_base)) 8982ef0ecd7SIgor Mitsyanko & (priv->rx_bd_num - 1)); 8992ef0ecd7SIgor Mitsyanko seq_printf(s, "rx_bd_w_index(%u)\n", priv->rx_bd_w_index); 9002ef0ecd7SIgor Mitsyanko seq_printf(s, "rx alloc queue len(%u)\n", 9012ef0ecd7SIgor Mitsyanko CIRC_SPACE(priv->rx_bd_w_index, priv->rx_bd_r_index, 9022ef0ecd7SIgor Mitsyanko priv->rx_bd_num)); 9032ef0ecd7SIgor Mitsyanko 9042ef0ecd7SIgor Mitsyanko return 0; 9052ef0ecd7SIgor Mitsyanko } 9062ef0ecd7SIgor Mitsyanko 907addc7540SIgor Mitsyanko static int qtnf_ep_fw_send(struct pci_dev *pdev, uint32_t size, 9082ef0ecd7SIgor Mitsyanko int blk, const u8 *pblk, const u8 *fw) 9092ef0ecd7SIgor Mitsyanko { 9102ef0ecd7SIgor Mitsyanko struct qtnf_bus *bus = pci_get_drvdata(pdev); 9112ef0ecd7SIgor Mitsyanko 912d0b95bfaSIgor Mitsyanko struct qtnf_pearl_fw_hdr *hdr; 9132ef0ecd7SIgor Mitsyanko u8 *pdata; 9142ef0ecd7SIgor Mitsyanko 9152ef0ecd7SIgor Mitsyanko int hds = sizeof(*hdr); 9162ef0ecd7SIgor Mitsyanko struct sk_buff *skb = NULL; 9172ef0ecd7SIgor Mitsyanko int len = 0; 9182ef0ecd7SIgor Mitsyanko int ret; 9192ef0ecd7SIgor Mitsyanko 9202ef0ecd7SIgor Mitsyanko skb = __dev_alloc_skb(QTN_PCIE_FW_BUFSZ, GFP_KERNEL); 9212ef0ecd7SIgor Mitsyanko if (!skb) 9222ef0ecd7SIgor Mitsyanko return -ENOMEM; 9232ef0ecd7SIgor Mitsyanko 9242ef0ecd7SIgor Mitsyanko skb->len = QTN_PCIE_FW_BUFSZ; 9252ef0ecd7SIgor Mitsyanko skb->dev = NULL; 9262ef0ecd7SIgor Mitsyanko 927d0b95bfaSIgor Mitsyanko hdr = (struct qtnf_pearl_fw_hdr *)skb->data; 9282ef0ecd7SIgor Mitsyanko memcpy(hdr->boardflg, QTN_PCIE_BOARDFLG, strlen(QTN_PCIE_BOARDFLG)); 9292ef0ecd7SIgor Mitsyanko hdr->fwsize = cpu_to_le32(size); 9302ef0ecd7SIgor Mitsyanko hdr->seqnum = cpu_to_le32(blk); 9312ef0ecd7SIgor Mitsyanko 9322ef0ecd7SIgor Mitsyanko if (blk) 9332ef0ecd7SIgor Mitsyanko hdr->type = cpu_to_le32(QTN_FW_DSUB); 9342ef0ecd7SIgor Mitsyanko else 9352ef0ecd7SIgor Mitsyanko hdr->type = cpu_to_le32(QTN_FW_DBEGIN); 9362ef0ecd7SIgor Mitsyanko 9372ef0ecd7SIgor Mitsyanko pdata = skb->data + hds; 9382ef0ecd7SIgor Mitsyanko 9392ef0ecd7SIgor Mitsyanko len = QTN_PCIE_FW_BUFSZ - hds; 9402ef0ecd7SIgor Mitsyanko if (pblk >= (fw + size - len)) { 9412ef0ecd7SIgor Mitsyanko len = fw + size - pblk; 9422ef0ecd7SIgor Mitsyanko hdr->type = cpu_to_le32(QTN_FW_DEND); 9432ef0ecd7SIgor Mitsyanko } 9442ef0ecd7SIgor Mitsyanko 9452ef0ecd7SIgor Mitsyanko hdr->pktlen = cpu_to_le32(len); 9462ef0ecd7SIgor Mitsyanko memcpy(pdata, pblk, len); 9472ef0ecd7SIgor Mitsyanko hdr->crc = cpu_to_le32(~crc32(0, pdata, len)); 9482ef0ecd7SIgor Mitsyanko 949904628d3SIgor Mitsyanko ret = qtnf_pcie_skb_send(bus, skb); 9502ef0ecd7SIgor Mitsyanko 9512ef0ecd7SIgor Mitsyanko return (ret == NETDEV_TX_OK) ? len : 0; 9522ef0ecd7SIgor Mitsyanko } 9532ef0ecd7SIgor Mitsyanko 9542ef0ecd7SIgor Mitsyanko static int 955c9ff6c91SIgor Mitsyanko qtnf_ep_fw_load(struct qtnf_pcie_pearl_state *ps, const u8 *fw, u32 fw_size) 9562ef0ecd7SIgor Mitsyanko { 957d0b95bfaSIgor Mitsyanko int blk_size = QTN_PCIE_FW_BUFSZ - sizeof(struct qtnf_pearl_fw_hdr); 9582ef0ecd7SIgor Mitsyanko int blk_count = fw_size / blk_size + ((fw_size % blk_size) ? 1 : 0); 9592ef0ecd7SIgor Mitsyanko const u8 *pblk = fw; 9602ef0ecd7SIgor Mitsyanko int threshold = 0; 9612ef0ecd7SIgor Mitsyanko int blk = 0; 9622ef0ecd7SIgor Mitsyanko int len; 9632ef0ecd7SIgor Mitsyanko 9642ef0ecd7SIgor Mitsyanko pr_debug("FW upload started: fw_addr=0x%p size=%d\n", fw, fw_size); 9652ef0ecd7SIgor Mitsyanko 9662ef0ecd7SIgor Mitsyanko while (blk < blk_count) { 9672ef0ecd7SIgor Mitsyanko if (++threshold > 10000) { 9682ef0ecd7SIgor Mitsyanko pr_err("FW upload failed: too many retries\n"); 9692ef0ecd7SIgor Mitsyanko return -ETIMEDOUT; 9702ef0ecd7SIgor Mitsyanko } 9712ef0ecd7SIgor Mitsyanko 972addc7540SIgor Mitsyanko len = qtnf_ep_fw_send(ps->base.pdev, fw_size, blk, pblk, fw); 9732ef0ecd7SIgor Mitsyanko if (len <= 0) 9742ef0ecd7SIgor Mitsyanko continue; 9752ef0ecd7SIgor Mitsyanko 9762ef0ecd7SIgor Mitsyanko if (!((blk + 1) & QTN_PCIE_FW_DLMASK) || 9772ef0ecd7SIgor Mitsyanko (blk == (blk_count - 1))) { 978c9ff6c91SIgor Mitsyanko qtnf_set_state(&ps->bda->bda_rc_state, 9792ef0ecd7SIgor Mitsyanko QTN_RC_FW_SYNC); 980c9ff6c91SIgor Mitsyanko if (qtnf_poll_state(&ps->bda->bda_ep_state, 9812ef0ecd7SIgor Mitsyanko QTN_EP_FW_SYNC, 9822ef0ecd7SIgor Mitsyanko QTN_FW_DL_TIMEOUT_MS)) { 9832ef0ecd7SIgor Mitsyanko pr_err("FW upload failed: SYNC timed out\n"); 9842ef0ecd7SIgor Mitsyanko return -ETIMEDOUT; 9852ef0ecd7SIgor Mitsyanko } 9862ef0ecd7SIgor Mitsyanko 987c9ff6c91SIgor Mitsyanko qtnf_clear_state(&ps->bda->bda_ep_state, 9882ef0ecd7SIgor Mitsyanko QTN_EP_FW_SYNC); 9892ef0ecd7SIgor Mitsyanko 990c9ff6c91SIgor Mitsyanko if (qtnf_is_state(&ps->bda->bda_ep_state, 9912ef0ecd7SIgor Mitsyanko QTN_EP_FW_RETRY)) { 9922ef0ecd7SIgor Mitsyanko if (blk == (blk_count - 1)) { 9932ef0ecd7SIgor Mitsyanko int last_round = 9942ef0ecd7SIgor Mitsyanko blk_count & QTN_PCIE_FW_DLMASK; 9952ef0ecd7SIgor Mitsyanko blk -= last_round; 9962ef0ecd7SIgor Mitsyanko pblk -= ((last_round - 1) * 9972ef0ecd7SIgor Mitsyanko blk_size + len); 9982ef0ecd7SIgor Mitsyanko } else { 9992ef0ecd7SIgor Mitsyanko blk -= QTN_PCIE_FW_DLMASK; 10002ef0ecd7SIgor Mitsyanko pblk -= QTN_PCIE_FW_DLMASK * blk_size; 10012ef0ecd7SIgor Mitsyanko } 10022ef0ecd7SIgor Mitsyanko 1003c9ff6c91SIgor Mitsyanko qtnf_clear_state(&ps->bda->bda_ep_state, 10042ef0ecd7SIgor Mitsyanko QTN_EP_FW_RETRY); 10052ef0ecd7SIgor Mitsyanko 10062ef0ecd7SIgor Mitsyanko pr_warn("FW upload retry: block #%d\n", blk); 10072ef0ecd7SIgor Mitsyanko continue; 10082ef0ecd7SIgor Mitsyanko } 10092ef0ecd7SIgor Mitsyanko 1010789763b6SIgor Mitsyanko qtnf_pearl_data_tx_reclaim(ps); 10112ef0ecd7SIgor Mitsyanko } 10122ef0ecd7SIgor Mitsyanko 10132ef0ecd7SIgor Mitsyanko pblk += len; 10142ef0ecd7SIgor Mitsyanko blk++; 10152ef0ecd7SIgor Mitsyanko } 10162ef0ecd7SIgor Mitsyanko 10172ef0ecd7SIgor Mitsyanko pr_debug("FW upload completed: totally sent %d blocks\n", blk); 10182ef0ecd7SIgor Mitsyanko return 0; 10192ef0ecd7SIgor Mitsyanko } 10202ef0ecd7SIgor Mitsyanko 1021789763b6SIgor Mitsyanko static void qtnf_pearl_fw_work_handler(struct work_struct *work) 10222ef0ecd7SIgor Mitsyanko { 10232ef0ecd7SIgor Mitsyanko struct qtnf_bus *bus = container_of(work, struct qtnf_bus, fw_work); 1024c9ff6c91SIgor Mitsyanko struct qtnf_pcie_pearl_state *ps = (void *)get_bus_priv(bus); 1025ae1946beSSergey Matyukevich u32 state = QTN_RC_FW_LOADRDY | QTN_RC_FW_QLINK; 1026ae1946beSSergey Matyukevich const char *fwname = QTN_PCI_PEARL_FW_NAME; 1027c9ff6c91SIgor Mitsyanko struct pci_dev *pdev = ps->base.pdev; 10282ef0ecd7SIgor Mitsyanko const struct firmware *fw; 10292ef0ecd7SIgor Mitsyanko int ret; 10302ef0ecd7SIgor Mitsyanko 1031b7da53cdSIgor Mitsyanko if (ps->base.flashboot) { 10322ef0ecd7SIgor Mitsyanko state |= QTN_RC_FW_FLASHBOOT; 10332ef0ecd7SIgor Mitsyanko } else { 10342ef0ecd7SIgor Mitsyanko ret = request_firmware(&fw, fwname, &pdev->dev); 10352ef0ecd7SIgor Mitsyanko if (ret < 0) { 10362ef0ecd7SIgor Mitsyanko pr_err("failed to get firmware %s\n", fwname); 1037addc7540SIgor Mitsyanko goto fw_load_exit; 10382ef0ecd7SIgor Mitsyanko } 10392ef0ecd7SIgor Mitsyanko } 10402ef0ecd7SIgor Mitsyanko 1041c9ff6c91SIgor Mitsyanko qtnf_set_state(&ps->bda->bda_rc_state, state); 10422ef0ecd7SIgor Mitsyanko 1043c9ff6c91SIgor Mitsyanko if (qtnf_poll_state(&ps->bda->bda_ep_state, QTN_EP_FW_LOADRDY, 10442ef0ecd7SIgor Mitsyanko QTN_FW_DL_TIMEOUT_MS)) { 10452ef0ecd7SIgor Mitsyanko pr_err("card is not ready\n"); 10462ef0ecd7SIgor Mitsyanko 1047b7da53cdSIgor Mitsyanko if (!ps->base.flashboot) 10482ef0ecd7SIgor Mitsyanko release_firmware(fw); 10492ef0ecd7SIgor Mitsyanko 1050addc7540SIgor Mitsyanko goto fw_load_exit; 10512ef0ecd7SIgor Mitsyanko } 10522ef0ecd7SIgor Mitsyanko 1053c9ff6c91SIgor Mitsyanko qtnf_clear_state(&ps->bda->bda_ep_state, QTN_EP_FW_LOADRDY); 10542ef0ecd7SIgor Mitsyanko 1055b7da53cdSIgor Mitsyanko if (ps->base.flashboot) { 10562ef0ecd7SIgor Mitsyanko pr_info("booting firmware from flash\n"); 1057addc7540SIgor Mitsyanko 10582ef0ecd7SIgor Mitsyanko } else { 10592ef0ecd7SIgor Mitsyanko pr_info("starting firmware upload: %s\n", fwname); 10602ef0ecd7SIgor Mitsyanko 1061c9ff6c91SIgor Mitsyanko ret = qtnf_ep_fw_load(ps, fw->data, fw->size); 10622ef0ecd7SIgor Mitsyanko release_firmware(fw); 10632ef0ecd7SIgor Mitsyanko if (ret) { 10642ef0ecd7SIgor Mitsyanko pr_err("firmware upload error\n"); 1065addc7540SIgor Mitsyanko goto fw_load_exit; 10662ef0ecd7SIgor Mitsyanko } 10672ef0ecd7SIgor Mitsyanko } 10682ef0ecd7SIgor Mitsyanko 1069c9ff6c91SIgor Mitsyanko if (qtnf_poll_state(&ps->bda->bda_ep_state, QTN_EP_FW_DONE, 10702ef0ecd7SIgor Mitsyanko QTN_FW_DL_TIMEOUT_MS)) { 10712ef0ecd7SIgor Mitsyanko pr_err("firmware bringup timed out\n"); 1072addc7540SIgor Mitsyanko goto fw_load_exit; 10732ef0ecd7SIgor Mitsyanko } 10742ef0ecd7SIgor Mitsyanko 1075c9ff6c91SIgor Mitsyanko if (qtnf_poll_state(&ps->bda->bda_ep_state, 10762ef0ecd7SIgor Mitsyanko QTN_EP_FW_QLINK_DONE, QTN_FW_QLINK_TIMEOUT_MS)) { 10772ef0ecd7SIgor Mitsyanko pr_err("firmware runtime failure\n"); 10782ef0ecd7SIgor Mitsyanko goto fw_load_exit; 1079addc7540SIgor Mitsyanko } 10802ef0ecd7SIgor Mitsyanko 1081ae1946beSSergey Matyukevich pr_info("firmware is up and running\n"); 10822ef0ecd7SIgor Mitsyanko 1083ae1946beSSergey Matyukevich ret = qtnf_pcie_fw_boot_done(bus); 1084ae1946beSSergey Matyukevich if (ret) 1085ae1946beSSergey Matyukevich goto fw_load_exit; 1086addc7540SIgor Mitsyanko 1087addc7540SIgor Mitsyanko qtnf_debugfs_add_entry(bus, "hdp_stats", qtnf_dbg_hdp_stats); 1088addc7540SIgor Mitsyanko qtnf_debugfs_add_entry(bus, "irq_stats", qtnf_dbg_irq_stats); 1089ae1946beSSergey Matyukevich 1090ae1946beSSergey Matyukevich fw_load_exit: 1091ae1946beSSergey Matyukevich put_device(&pdev->dev); 10922ef0ecd7SIgor Mitsyanko } 10932ef0ecd7SIgor Mitsyanko 1094aff8e8d0SAllen Pais static void qtnf_pearl_reclaim_tasklet_fn(struct tasklet_struct *t) 10952ef0ecd7SIgor Mitsyanko { 1096aff8e8d0SAllen Pais struct qtnf_pcie_pearl_state *ps = from_tasklet(ps, t, base.reclaim_tq); 10972ef0ecd7SIgor Mitsyanko 1098789763b6SIgor Mitsyanko qtnf_pearl_data_tx_reclaim(ps); 1099c9ff6c91SIgor Mitsyanko qtnf_en_txdone_irq(ps); 11002ef0ecd7SIgor Mitsyanko } 11012ef0ecd7SIgor Mitsyanko 1102b7da53cdSIgor Mitsyanko static u64 qtnf_pearl_dma_mask_get(void) 1103033a7599SIgor Mitsyanko { 1104b7da53cdSIgor Mitsyanko #ifdef CONFIG_ARCH_DMA_ADDR_T_64BIT 1105b7da53cdSIgor Mitsyanko return DMA_BIT_MASK(64); 1106b7da53cdSIgor Mitsyanko #else 1107b7da53cdSIgor Mitsyanko return DMA_BIT_MASK(32); 1108b7da53cdSIgor Mitsyanko #endif 1109033a7599SIgor Mitsyanko } 1110033a7599SIgor Mitsyanko 111197aef03cSSergey Matyukevich static int qtnf_pcie_pearl_probe(struct qtnf_bus *bus, unsigned int tx_bd_size, 111297aef03cSSergey Matyukevich unsigned int rx_bd_size) 11132ef0ecd7SIgor Mitsyanko { 1114addc7540SIgor Mitsyanko struct qtnf_shm_ipc_int ipc_int; 1115b7da53cdSIgor Mitsyanko struct qtnf_pcie_pearl_state *ps = get_bus_priv(bus); 1116b7da53cdSIgor Mitsyanko struct pci_dev *pdev = ps->base.pdev; 11172ef0ecd7SIgor Mitsyanko int ret; 11182ef0ecd7SIgor Mitsyanko 1119b7da53cdSIgor Mitsyanko bus->bus_ops = &qtnf_pcie_pearl_bus_ops; 1120c9ff6c91SIgor Mitsyanko spin_lock_init(&ps->irq_lock); 1121addc7540SIgor Mitsyanko INIT_WORK(&bus->fw_work, qtnf_pearl_fw_work_handler); 11222ef0ecd7SIgor Mitsyanko 1123addc7540SIgor Mitsyanko ps->pcie_reg_base = ps->base.dmareg_bar; 1124addc7540SIgor Mitsyanko ps->bda = ps->base.epmem_bar; 1125addc7540SIgor Mitsyanko writel(ps->base.msi_enabled, &ps->bda->bda_rc_msi_enabled); 11262ef0ecd7SIgor Mitsyanko 112797aef03cSSergey Matyukevich ret = qtnf_pcie_pearl_init_xfer(ps, tx_bd_size, rx_bd_size); 11282ef0ecd7SIgor Mitsyanko if (ret) { 11292ef0ecd7SIgor Mitsyanko pr_err("PCIE xfer init failed\n"); 1130b7da53cdSIgor Mitsyanko return ret; 11312ef0ecd7SIgor Mitsyanko } 11322ef0ecd7SIgor Mitsyanko 11332ef0ecd7SIgor Mitsyanko /* init default irq settings */ 1134c9ff6c91SIgor Mitsyanko qtnf_init_hdp_irqs(ps); 11352ef0ecd7SIgor Mitsyanko 11362ef0ecd7SIgor Mitsyanko /* start with disabled irqs */ 1137c9ff6c91SIgor Mitsyanko qtnf_disable_hdp_irqs(ps); 11382ef0ecd7SIgor Mitsyanko 1139789763b6SIgor Mitsyanko ret = devm_request_irq(&pdev->dev, pdev->irq, 1140789763b6SIgor Mitsyanko &qtnf_pcie_pearl_interrupt, 0, 1141b7da53cdSIgor Mitsyanko "qtnf_pearl_irq", (void *)bus); 11422ef0ecd7SIgor Mitsyanko if (ret) { 11432ef0ecd7SIgor Mitsyanko pr_err("failed to request pcie irq %d\n", pdev->irq); 1144789763b6SIgor Mitsyanko qtnf_pearl_free_xfer_buffers(ps); 11452ef0ecd7SIgor Mitsyanko return ret; 11462ef0ecd7SIgor Mitsyanko } 11472ef0ecd7SIgor Mitsyanko 1148aff8e8d0SAllen Pais tasklet_setup(&ps->base.reclaim_tq, qtnf_pearl_reclaim_tasklet_fn); 1149b7da53cdSIgor Mitsyanko netif_napi_add(&bus->mux_dev, &bus->mux_napi, 1150b7da53cdSIgor Mitsyanko qtnf_pcie_pearl_rx_poll, 10); 1151b7da53cdSIgor Mitsyanko 1152b7da53cdSIgor Mitsyanko ipc_int.fn = qtnf_pcie_pearl_ipc_gen_ep_int; 1153b7da53cdSIgor Mitsyanko ipc_int.arg = ps; 1154b7da53cdSIgor Mitsyanko qtnf_pcie_init_shm_ipc(&ps->base, &ps->bda->bda_shm_reg1, 1155b7da53cdSIgor Mitsyanko &ps->bda->bda_shm_reg2, &ipc_int); 1156b7da53cdSIgor Mitsyanko 1157b7da53cdSIgor Mitsyanko return 0; 1158b7da53cdSIgor Mitsyanko } 1159b7da53cdSIgor Mitsyanko 1160b7da53cdSIgor Mitsyanko static void qtnf_pcie_pearl_remove(struct qtnf_bus *bus) 11612ef0ecd7SIgor Mitsyanko { 1162b7da53cdSIgor Mitsyanko struct qtnf_pcie_pearl_state *ps = get_bus_priv(bus); 11632ef0ecd7SIgor Mitsyanko 1164addc7540SIgor Mitsyanko qtnf_pearl_reset_ep(ps); 1165789763b6SIgor Mitsyanko qtnf_pearl_free_xfer_buffers(ps); 11662ef0ecd7SIgor Mitsyanko } 11672ef0ecd7SIgor Mitsyanko 11682ef0ecd7SIgor Mitsyanko #ifdef CONFIG_PM_SLEEP 1169b7da53cdSIgor Mitsyanko static int qtnf_pcie_pearl_suspend(struct qtnf_bus *bus) 11702ef0ecd7SIgor Mitsyanko { 11712ef0ecd7SIgor Mitsyanko return -EOPNOTSUPP; 11722ef0ecd7SIgor Mitsyanko } 11732ef0ecd7SIgor Mitsyanko 1174b7da53cdSIgor Mitsyanko static int qtnf_pcie_pearl_resume(struct qtnf_bus *bus) 11752ef0ecd7SIgor Mitsyanko { 11762ef0ecd7SIgor Mitsyanko return 0; 11772ef0ecd7SIgor Mitsyanko } 11782ef0ecd7SIgor Mitsyanko #endif 11792ef0ecd7SIgor Mitsyanko 1180b7da53cdSIgor Mitsyanko struct qtnf_bus *qtnf_pcie_pearl_alloc(struct pci_dev *pdev) 11812ef0ecd7SIgor Mitsyanko { 1182b7da53cdSIgor Mitsyanko struct qtnf_bus *bus; 1183b7da53cdSIgor Mitsyanko struct qtnf_pcie_pearl_state *ps; 11842ef0ecd7SIgor Mitsyanko 1185b7da53cdSIgor Mitsyanko bus = devm_kzalloc(&pdev->dev, sizeof(*bus) + sizeof(*ps), GFP_KERNEL); 1186b7da53cdSIgor Mitsyanko if (!bus) 1187b7da53cdSIgor Mitsyanko return NULL; 11882ef0ecd7SIgor Mitsyanko 1189b7da53cdSIgor Mitsyanko ps = get_bus_priv(bus); 1190b7da53cdSIgor Mitsyanko ps->base.probe_cb = qtnf_pcie_pearl_probe; 1191b7da53cdSIgor Mitsyanko ps->base.remove_cb = qtnf_pcie_pearl_remove; 1192b7da53cdSIgor Mitsyanko ps->base.dma_mask_get_cb = qtnf_pearl_dma_mask_get; 11932ef0ecd7SIgor Mitsyanko #ifdef CONFIG_PM_SLEEP 1194b7da53cdSIgor Mitsyanko ps->base.resume_cb = qtnf_pcie_pearl_resume; 1195b7da53cdSIgor Mitsyanko ps->base.suspend_cb = qtnf_pcie_pearl_suspend; 11962ef0ecd7SIgor Mitsyanko #endif 11972ef0ecd7SIgor Mitsyanko 1198b7da53cdSIgor Mitsyanko return bus; 11992ef0ecd7SIgor Mitsyanko } 1200