Lines Matching full:lan966x

9 static int lan966x_fdma_channel_active(struct lan966x *lan966x)  in lan966x_fdma_channel_active()  argument
11 return lan_rd(lan966x, FDMA_CH_ACTIVE); in lan966x_fdma_channel_active()
71 struct lan966x *lan966x = rx->lan966x; in lan966x_fdma_rx_alloc_page_pool() local
77 .dev = lan966x->dev, in lan966x_fdma_rx_alloc_page_pool()
84 if (lan966x_xdp_present(lan966x)) in lan966x_fdma_rx_alloc_page_pool()
89 for (int i = 0; i < lan966x->num_phys_ports; ++i) { in lan966x_fdma_rx_alloc_page_pool()
92 if (!lan966x->ports[i]) in lan966x_fdma_rx_alloc_page_pool()
95 port = lan966x->ports[i]; in lan966x_fdma_rx_alloc_page_pool()
106 struct lan966x *lan966x = rx->lan966x; in lan966x_fdma_rx_alloc() local
120 rx->dcbs = dma_alloc_coherent(lan966x->dev, size, &rx->dma, GFP_KERNEL); in lan966x_fdma_rx_alloc()
158 struct lan966x *lan966x = rx->lan966x; in lan966x_fdma_rx_free() local
164 dma_free_coherent(lan966x->dev, size, rx->dcbs, rx->dma); in lan966x_fdma_rx_free()
169 struct lan966x *lan966x = rx->lan966x; in lan966x_fdma_rx_start() local
175 lan_wr(lower_32_bits((u64)rx->dma), lan966x, in lan966x_fdma_rx_start()
177 lan_wr(upper_32_bits((u64)rx->dma), lan966x, in lan966x_fdma_rx_start()
184 lan966x, FDMA_CH_CFG(rx->channel_id)); in lan966x_fdma_rx_start()
189 lan966x, FDMA_PORT_CTRL(0)); in lan966x_fdma_rx_start()
192 mask = lan_rd(lan966x, FDMA_INTR_DB_ENA); in lan966x_fdma_rx_start()
197 lan966x, FDMA_INTR_DB_ENA); in lan966x_fdma_rx_start()
202 lan966x, FDMA_CH_ACTIVATE); in lan966x_fdma_rx_start()
207 struct lan966x *lan966x = rx->lan966x; in lan966x_fdma_rx_disable() local
213 lan966x, FDMA_CH_DISABLE); in lan966x_fdma_rx_disable()
215 readx_poll_timeout_atomic(lan966x_fdma_channel_active, lan966x, in lan966x_fdma_rx_disable()
221 lan966x, FDMA_CH_DB_DISCARD); in lan966x_fdma_rx_disable()
226 struct lan966x *lan966x = rx->lan966x; in lan966x_fdma_rx_reload() local
230 lan966x, FDMA_CH_RELOAD); in lan966x_fdma_rx_reload()
242 struct lan966x *lan966x = tx->lan966x; in lan966x_fdma_tx_alloc() local
256 tx->dcbs = dma_alloc_coherent(lan966x->dev, size, &tx->dma, GFP_KERNEL); in lan966x_fdma_tx_alloc()
282 struct lan966x *lan966x = tx->lan966x; in lan966x_fdma_tx_free() local
289 dma_free_coherent(lan966x->dev, size, tx->dcbs, tx->dma); in lan966x_fdma_tx_free()
294 struct lan966x *lan966x = tx->lan966x; in lan966x_fdma_tx_activate() local
300 lan_wr(lower_32_bits((u64)tx->dma), lan966x, in lan966x_fdma_tx_activate()
302 lan_wr(upper_32_bits((u64)tx->dma), lan966x, in lan966x_fdma_tx_activate()
309 lan966x, FDMA_CH_CFG(tx->channel_id)); in lan966x_fdma_tx_activate()
314 lan966x, FDMA_PORT_CTRL(0)); in lan966x_fdma_tx_activate()
317 mask = lan_rd(lan966x, FDMA_INTR_DB_ENA); in lan966x_fdma_tx_activate()
322 lan966x, FDMA_INTR_DB_ENA); in lan966x_fdma_tx_activate()
327 lan966x, FDMA_CH_ACTIVATE); in lan966x_fdma_tx_activate()
332 struct lan966x *lan966x = tx->lan966x; in lan966x_fdma_tx_disable() local
338 lan966x, FDMA_CH_DISABLE); in lan966x_fdma_tx_disable()
340 readx_poll_timeout_atomic(lan966x_fdma_channel_active, lan966x, in lan966x_fdma_tx_disable()
346 lan966x, FDMA_CH_DB_DISCARD); in lan966x_fdma_tx_disable()
354 struct lan966x *lan966x = tx->lan966x; in lan966x_fdma_tx_reload() local
359 lan966x, FDMA_CH_RELOAD); in lan966x_fdma_tx_reload()
362 static void lan966x_fdma_wakeup_netdev(struct lan966x *lan966x) in lan966x_fdma_wakeup_netdev() argument
367 for (i = 0; i < lan966x->num_phys_ports; ++i) { in lan966x_fdma_wakeup_netdev()
368 port = lan966x->ports[i]; in lan966x_fdma_wakeup_netdev()
377 static void lan966x_fdma_stop_netdev(struct lan966x *lan966x) in lan966x_fdma_stop_netdev() argument
382 for (i = 0; i < lan966x->num_phys_ports; ++i) { in lan966x_fdma_stop_netdev()
383 port = lan966x->ports[i]; in lan966x_fdma_stop_netdev()
391 static void lan966x_fdma_tx_clear_buf(struct lan966x *lan966x, int weight) in lan966x_fdma_tx_clear_buf() argument
393 struct lan966x_tx *tx = &lan966x->tx; in lan966x_fdma_tx_clear_buf()
394 struct lan966x_rx *rx = &lan966x->rx; in lan966x_fdma_tx_clear_buf()
404 spin_lock_irqsave(&lan966x->tx_lock, flags); in lan966x_fdma_tx_clear_buf()
420 dma_unmap_single(lan966x->dev, in lan966x_fdma_tx_clear_buf()
429 dma_unmap_single(lan966x->dev, in lan966x_fdma_tx_clear_buf()
447 lan966x_fdma_wakeup_netdev(lan966x); in lan966x_fdma_tx_clear_buf()
449 spin_unlock_irqrestore(&lan966x->tx_lock, flags); in lan966x_fdma_tx_clear_buf()
466 struct lan966x *lan966x = rx->lan966x; in lan966x_fdma_rx_check_frame() local
476 dma_sync_single_for_cpu(lan966x->dev, in lan966x_fdma_rx_check_frame()
483 if (WARN_ON(*src_port >= lan966x->num_phys_ports)) in lan966x_fdma_rx_check_frame()
486 port = lan966x->ports[*src_port]; in lan966x_fdma_rx_check_frame()
496 struct lan966x *lan966x = rx->lan966x; in lan966x_fdma_rx_get_frame() local
517 skb->dev = lan966x->ports[src_port]->dev; in lan966x_fdma_rx_get_frame()
523 lan966x_ptp_rxtstamp(lan966x, skb, src_port, timestamp); in lan966x_fdma_rx_get_frame()
526 if (lan966x->bridge_mask & BIT(src_port)) { in lan966x_fdma_rx_get_frame()
530 if (!lan966x_hw_offload(lan966x, src_port, skb)) in lan966x_fdma_rx_get_frame()
547 struct lan966x *lan966x = container_of(napi, struct lan966x, napi); in lan966x_fdma_napi_poll() local
548 struct lan966x_rx *rx = &lan966x->rx; in lan966x_fdma_napi_poll()
559 lan966x_fdma_tx_clear_buf(lan966x, weight); in lan966x_fdma_napi_poll()
592 napi_gro_receive(&lan966x->napi, skb); in lan966x_fdma_napi_poll()
618 lan_wr(0xff, lan966x, FDMA_INTR_DB_ENA); in lan966x_fdma_napi_poll()
625 struct lan966x *lan966x = args; in lan966x_fdma_irq_handler() local
628 db = lan_rd(lan966x, FDMA_INTR_DB); in lan966x_fdma_irq_handler()
629 err = lan_rd(lan966x, FDMA_INTR_ERR); in lan966x_fdma_irq_handler()
632 lan_wr(0, lan966x, FDMA_INTR_DB_ENA); in lan966x_fdma_irq_handler()
633 lan_wr(db, lan966x, FDMA_INTR_DB); in lan966x_fdma_irq_handler()
635 napi_schedule(&lan966x->napi); in lan966x_fdma_irq_handler()
639 err_type = lan_rd(lan966x, FDMA_ERRORS); in lan966x_fdma_irq_handler()
643 lan_wr(err, lan966x, FDMA_INTR_ERR); in lan966x_fdma_irq_handler()
644 lan_wr(err_type, lan966x, FDMA_ERRORS); in lan966x_fdma_irq_handler()
685 struct lan966x *lan966x = tx->lan966x; in lan966x_fdma_tx_start() local
688 if (likely(lan966x->tx.activated)) { in lan966x_fdma_tx_start()
697 lan966x->tx.activated = true; in lan966x_fdma_tx_start()
707 struct lan966x *lan966x = port->lan966x; in lan966x_fdma_xmit_xdpf() local
709 struct lan966x_tx *tx = &lan966x->tx; in lan966x_fdma_xmit_xdpf()
717 spin_lock(&lan966x->tx_lock); in lan966x_fdma_xmit_xdpf()
744 dma_addr = dma_map_single(lan966x->dev, in lan966x_fdma_xmit_xdpf()
748 if (dma_mapping_error(lan966x->dev, dma_addr)) { in lan966x_fdma_xmit_xdpf()
769 dma_sync_single_for_device(lan966x->dev, in lan966x_fdma_xmit_xdpf()
795 spin_unlock(&lan966x->tx_lock); in lan966x_fdma_xmit_xdpf()
803 struct lan966x *lan966x = port->lan966x; in lan966x_fdma_xmit() local
805 struct lan966x_tx *tx = &lan966x->tx; in lan966x_fdma_xmit()
842 dma_addr = dma_map_single(lan966x->dev, skb->data, skb->len, in lan966x_fdma_xmit()
844 if (dma_mapping_error(lan966x->dev, dma_addr)) { in lan966x_fdma_xmit()
882 static int lan966x_fdma_get_max_mtu(struct lan966x *lan966x) in lan966x_fdma_get_max_mtu() argument
887 for (i = 0; i < lan966x->num_phys_ports; ++i) { in lan966x_fdma_get_max_mtu()
891 port = lan966x->ports[i]; in lan966x_fdma_get_max_mtu()
895 mtu = lan_rd(lan966x, DEV_MAC_MAXLEN_CFG(port->chip_port)); in lan966x_fdma_get_max_mtu()
903 static int lan966x_qsys_sw_status(struct lan966x *lan966x) in lan966x_qsys_sw_status() argument
905 return lan_rd(lan966x, QSYS_SW_STATUS(CPU_PORT)); in lan966x_qsys_sw_status()
908 static int lan966x_fdma_reload(struct lan966x *lan966x, int new_mtu) in lan966x_fdma_reload() argument
917 rx_dma = lan966x->rx.dma; in lan966x_fdma_reload()
918 rx_dcbs = lan966x->rx.dcbs; in lan966x_fdma_reload()
919 page_pool = lan966x->rx.page_pool; in lan966x_fdma_reload()
921 napi_synchronize(&lan966x->napi); in lan966x_fdma_reload()
922 napi_disable(&lan966x->napi); in lan966x_fdma_reload()
923 lan966x_fdma_stop_netdev(lan966x); in lan966x_fdma_reload()
925 lan966x_fdma_rx_disable(&lan966x->rx); in lan966x_fdma_reload()
926 lan966x_fdma_rx_free_pages(&lan966x->rx); in lan966x_fdma_reload()
927 lan966x->rx.page_order = round_up(new_mtu, PAGE_SIZE) / PAGE_SIZE - 1; in lan966x_fdma_reload()
928 lan966x->rx.max_mtu = new_mtu; in lan966x_fdma_reload()
929 err = lan966x_fdma_rx_alloc(&lan966x->rx); in lan966x_fdma_reload()
932 lan966x_fdma_rx_start(&lan966x->rx); in lan966x_fdma_reload()
936 dma_free_coherent(lan966x->dev, size, rx_dcbs, rx_dma); in lan966x_fdma_reload()
940 lan966x_fdma_wakeup_netdev(lan966x); in lan966x_fdma_reload()
941 napi_enable(&lan966x->napi); in lan966x_fdma_reload()
945 lan966x->rx.page_pool = page_pool; in lan966x_fdma_reload()
946 lan966x->rx.dma = rx_dma; in lan966x_fdma_reload()
947 lan966x->rx.dcbs = rx_dcbs; in lan966x_fdma_reload()
948 lan966x_fdma_rx_start(&lan966x->rx); in lan966x_fdma_reload()
953 static int lan966x_fdma_get_max_frame(struct lan966x *lan966x) in lan966x_fdma_get_max_frame() argument
955 return lan966x_fdma_get_max_mtu(lan966x) + in lan966x_fdma_get_max_frame()
962 static int __lan966x_fdma_reload(struct lan966x *lan966x, int max_mtu) in __lan966x_fdma_reload() argument
970 lan966x, QSYS_SW_PORT_MODE(CPU_PORT)); in __lan966x_fdma_reload()
973 readx_poll_timeout(lan966x_qsys_sw_status, lan966x, in __lan966x_fdma_reload()
982 err = lan966x_fdma_reload(lan966x, max_mtu); in __lan966x_fdma_reload()
987 lan966x, QSYS_SW_PORT_MODE(CPU_PORT)); in __lan966x_fdma_reload()
992 int lan966x_fdma_change_mtu(struct lan966x *lan966x) in lan966x_fdma_change_mtu() argument
996 max_mtu = lan966x_fdma_get_max_frame(lan966x); in lan966x_fdma_change_mtu()
997 if (max_mtu == lan966x->rx.max_mtu) in lan966x_fdma_change_mtu()
1000 return __lan966x_fdma_reload(lan966x, max_mtu); in lan966x_fdma_change_mtu()
1003 int lan966x_fdma_reload_page_pool(struct lan966x *lan966x) in lan966x_fdma_reload_page_pool() argument
1007 max_mtu = lan966x_fdma_get_max_frame(lan966x); in lan966x_fdma_reload_page_pool()
1008 return __lan966x_fdma_reload(lan966x, max_mtu); in lan966x_fdma_reload_page_pool()
1011 void lan966x_fdma_netdev_init(struct lan966x *lan966x, struct net_device *dev) in lan966x_fdma_netdev_init() argument
1013 if (lan966x->fdma_ndev) in lan966x_fdma_netdev_init()
1016 lan966x->fdma_ndev = dev; in lan966x_fdma_netdev_init()
1017 netif_napi_add(dev, &lan966x->napi, lan966x_fdma_napi_poll); in lan966x_fdma_netdev_init()
1018 napi_enable(&lan966x->napi); in lan966x_fdma_netdev_init()
1021 void lan966x_fdma_netdev_deinit(struct lan966x *lan966x, struct net_device *dev) in lan966x_fdma_netdev_deinit() argument
1023 if (lan966x->fdma_ndev == dev) { in lan966x_fdma_netdev_deinit()
1024 netif_napi_del(&lan966x->napi); in lan966x_fdma_netdev_deinit()
1025 lan966x->fdma_ndev = NULL; in lan966x_fdma_netdev_deinit()
1029 int lan966x_fdma_init(struct lan966x *lan966x) in lan966x_fdma_init() argument
1033 if (!lan966x->fdma) in lan966x_fdma_init()
1036 lan966x->rx.lan966x = lan966x; in lan966x_fdma_init()
1037 lan966x->rx.channel_id = FDMA_XTR_CHANNEL; in lan966x_fdma_init()
1038 lan966x->rx.max_mtu = lan966x_fdma_get_max_frame(lan966x); in lan966x_fdma_init()
1039 lan966x->tx.lan966x = lan966x; in lan966x_fdma_init()
1040 lan966x->tx.channel_id = FDMA_INJ_CHANNEL; in lan966x_fdma_init()
1041 lan966x->tx.last_in_use = -1; in lan966x_fdma_init()
1043 err = lan966x_fdma_rx_alloc(&lan966x->rx); in lan966x_fdma_init()
1047 err = lan966x_fdma_tx_alloc(&lan966x->tx); in lan966x_fdma_init()
1049 lan966x_fdma_rx_free(&lan966x->rx); in lan966x_fdma_init()
1053 lan966x_fdma_rx_start(&lan966x->rx); in lan966x_fdma_init()
1058 void lan966x_fdma_deinit(struct lan966x *lan966x) in lan966x_fdma_deinit() argument
1060 if (!lan966x->fdma) in lan966x_fdma_deinit()
1063 lan966x_fdma_rx_disable(&lan966x->rx); in lan966x_fdma_deinit()
1064 lan966x_fdma_tx_disable(&lan966x->tx); in lan966x_fdma_deinit()
1066 napi_synchronize(&lan966x->napi); in lan966x_fdma_deinit()
1067 napi_disable(&lan966x->napi); in lan966x_fdma_deinit()
1069 lan966x_fdma_rx_free_pages(&lan966x->rx); in lan966x_fdma_deinit()
1070 lan966x_fdma_rx_free(&lan966x->rx); in lan966x_fdma_deinit()
1071 page_pool_destroy(lan966x->rx.page_pool); in lan966x_fdma_deinit()
1072 lan966x_fdma_tx_free(&lan966x->tx); in lan966x_fdma_deinit()