Lines Matching full:lp

125  * @lp:		Pointer to axienet local structure
132 static inline u32 axienet_dma_in32(struct axienet_local *lp, off_t reg) in axienet_dma_in32() argument
134 return ioread32(lp->dma_regs + reg); in axienet_dma_in32()
137 static void desc_set_phys_addr(struct axienet_local *lp, dma_addr_t addr, in desc_set_phys_addr() argument
141 if (lp->features & XAE_FEATURE_DMA_64BIT) in desc_set_phys_addr()
145 static dma_addr_t desc_get_phys_addr(struct axienet_local *lp, in desc_get_phys_addr() argument
150 if (lp->features & XAE_FEATURE_DMA_64BIT) in desc_get_phys_addr()
167 struct axienet_local *lp = netdev_priv(ndev); in axienet_dma_bd_release() local
170 dma_free_coherent(lp->dev, in axienet_dma_bd_release()
171 sizeof(*lp->tx_bd_v) * lp->tx_bd_num, in axienet_dma_bd_release()
172 lp->tx_bd_v, in axienet_dma_bd_release()
173 lp->tx_bd_p); in axienet_dma_bd_release()
175 if (!lp->rx_bd_v) in axienet_dma_bd_release()
178 for (i = 0; i < lp->rx_bd_num; i++) { in axienet_dma_bd_release()
184 if (!lp->rx_bd_v[i].skb) in axienet_dma_bd_release()
187 dev_kfree_skb(lp->rx_bd_v[i].skb); in axienet_dma_bd_release()
193 if (lp->rx_bd_v[i].cntrl) { in axienet_dma_bd_release()
194 phys = desc_get_phys_addr(lp, &lp->rx_bd_v[i]); in axienet_dma_bd_release()
195 dma_unmap_single(lp->dev, phys, in axienet_dma_bd_release()
196 lp->max_frm_size, DMA_FROM_DEVICE); in axienet_dma_bd_release()
200 dma_free_coherent(lp->dev, in axienet_dma_bd_release()
201 sizeof(*lp->rx_bd_v) * lp->rx_bd_num, in axienet_dma_bd_release()
202 lp->rx_bd_v, in axienet_dma_bd_release()
203 lp->rx_bd_p); in axienet_dma_bd_release()
208 * @lp: Pointer to the axienet_local structure
211 static u32 axienet_usec_to_timer(struct axienet_local *lp, u32 coalesce_usec) in axienet_usec_to_timer() argument
216 if (lp->axi_clk) in axienet_usec_to_timer()
217 clk_rate = clk_get_rate(lp->axi_clk); in axienet_usec_to_timer()
230 * @lp: Pointer to the axienet_local structure
232 static void axienet_dma_start(struct axienet_local *lp) in axienet_dma_start() argument
235 lp->rx_dma_cr = (lp->coalesce_count_rx << XAXIDMA_COALESCE_SHIFT) | in axienet_dma_start()
240 if (lp->coalesce_count_rx > 1) in axienet_dma_start()
241 lp->rx_dma_cr |= (axienet_usec_to_timer(lp, lp->coalesce_usec_rx) in axienet_dma_start()
244 axienet_dma_out32(lp, XAXIDMA_RX_CR_OFFSET, lp->rx_dma_cr); in axienet_dma_start()
247 lp->tx_dma_cr = (lp->coalesce_count_tx << XAXIDMA_COALESCE_SHIFT) | in axienet_dma_start()
252 if (lp->coalesce_count_tx > 1) in axienet_dma_start()
253 lp->tx_dma_cr |= (axienet_usec_to_timer(lp, lp->coalesce_usec_tx) in axienet_dma_start()
256 axienet_dma_out32(lp, XAXIDMA_TX_CR_OFFSET, lp->tx_dma_cr); in axienet_dma_start()
261 axienet_dma_out_addr(lp, XAXIDMA_RX_CDESC_OFFSET, lp->rx_bd_p); in axienet_dma_start()
262 lp->rx_dma_cr |= XAXIDMA_CR_RUNSTOP_MASK; in axienet_dma_start()
263 axienet_dma_out32(lp, XAXIDMA_RX_CR_OFFSET, lp->rx_dma_cr); in axienet_dma_start()
264 axienet_dma_out_addr(lp, XAXIDMA_RX_TDESC_OFFSET, lp->rx_bd_p + in axienet_dma_start()
265 (sizeof(*lp->rx_bd_v) * (lp->rx_bd_num - 1))); in axienet_dma_start()
271 axienet_dma_out_addr(lp, XAXIDMA_TX_CDESC_OFFSET, lp->tx_bd_p); in axienet_dma_start()
272 lp->tx_dma_cr |= XAXIDMA_CR_RUNSTOP_MASK; in axienet_dma_start()
273 axienet_dma_out32(lp, XAXIDMA_TX_CR_OFFSET, lp->tx_dma_cr); in axienet_dma_start()
290 struct axienet_local *lp = netdev_priv(ndev); in axienet_dma_bd_init() local
293 lp->tx_bd_ci = 0; in axienet_dma_bd_init()
294 lp->tx_bd_tail = 0; in axienet_dma_bd_init()
295 lp->rx_bd_ci = 0; in axienet_dma_bd_init()
298 lp->tx_bd_v = dma_alloc_coherent(lp->dev, in axienet_dma_bd_init()
299 sizeof(*lp->tx_bd_v) * lp->tx_bd_num, in axienet_dma_bd_init()
300 &lp->tx_bd_p, GFP_KERNEL); in axienet_dma_bd_init()
301 if (!lp->tx_bd_v) in axienet_dma_bd_init()
304 lp->rx_bd_v = dma_alloc_coherent(lp->dev, in axienet_dma_bd_init()
305 sizeof(*lp->rx_bd_v) * lp->rx_bd_num, in axienet_dma_bd_init()
306 &lp->rx_bd_p, GFP_KERNEL); in axienet_dma_bd_init()
307 if (!lp->rx_bd_v) in axienet_dma_bd_init()
310 for (i = 0; i < lp->tx_bd_num; i++) { in axienet_dma_bd_init()
311 dma_addr_t addr = lp->tx_bd_p + in axienet_dma_bd_init()
312 sizeof(*lp->tx_bd_v) * in axienet_dma_bd_init()
313 ((i + 1) % lp->tx_bd_num); in axienet_dma_bd_init()
315 lp->tx_bd_v[i].next = lower_32_bits(addr); in axienet_dma_bd_init()
316 if (lp->features & XAE_FEATURE_DMA_64BIT) in axienet_dma_bd_init()
317 lp->tx_bd_v[i].next_msb = upper_32_bits(addr); in axienet_dma_bd_init()
320 for (i = 0; i < lp->rx_bd_num; i++) { in axienet_dma_bd_init()
323 addr = lp->rx_bd_p + sizeof(*lp->rx_bd_v) * in axienet_dma_bd_init()
324 ((i + 1) % lp->rx_bd_num); in axienet_dma_bd_init()
325 lp->rx_bd_v[i].next = lower_32_bits(addr); in axienet_dma_bd_init()
326 if (lp->features & XAE_FEATURE_DMA_64BIT) in axienet_dma_bd_init()
327 lp->rx_bd_v[i].next_msb = upper_32_bits(addr); in axienet_dma_bd_init()
329 skb = netdev_alloc_skb_ip_align(ndev, lp->max_frm_size); in axienet_dma_bd_init()
333 lp->rx_bd_v[i].skb = skb; in axienet_dma_bd_init()
334 addr = dma_map_single(lp->dev, skb->data, in axienet_dma_bd_init()
335 lp->max_frm_size, DMA_FROM_DEVICE); in axienet_dma_bd_init()
336 if (dma_mapping_error(lp->dev, addr)) { in axienet_dma_bd_init()
340 desc_set_phys_addr(lp, addr, &lp->rx_bd_v[i]); in axienet_dma_bd_init()
342 lp->rx_bd_v[i].cntrl = lp->max_frm_size; in axienet_dma_bd_init()
345 axienet_dma_start(lp); in axienet_dma_bd_init()
364 struct axienet_local *lp = netdev_priv(ndev); in axienet_set_mac_address() local
372 axienet_iow(lp, XAE_UAW0_OFFSET, in axienet_set_mac_address()
377 axienet_iow(lp, XAE_UAW1_OFFSET, in axienet_set_mac_address()
378 (((axienet_ior(lp, XAE_UAW1_OFFSET)) & in axienet_set_mac_address()
417 struct axienet_local *lp = netdev_priv(ndev); in axienet_set_multicast_list() local
426 reg = axienet_ior(lp, XAE_FMI_OFFSET); in axienet_set_multicast_list()
428 axienet_iow(lp, XAE_FMI_OFFSET, reg); in axienet_set_multicast_list()
433 reg = axienet_ior(lp, XAE_FMI_OFFSET); in axienet_set_multicast_list()
435 axienet_iow(lp, XAE_FMI_OFFSET, reg); in axienet_set_multicast_list()
449 reg = axienet_ior(lp, XAE_FMI_OFFSET) & 0xFFFFFF00; in axienet_set_multicast_list()
452 axienet_iow(lp, XAE_FMI_OFFSET, reg); in axienet_set_multicast_list()
453 axienet_iow(lp, XAE_AF0_OFFSET, af0reg); in axienet_set_multicast_list()
454 axienet_iow(lp, XAE_AF1_OFFSET, af1reg); in axienet_set_multicast_list()
455 axienet_iow(lp, XAE_FFE_OFFSET, 1); in axienet_set_multicast_list()
459 reg = axienet_ior(lp, XAE_FMI_OFFSET); in axienet_set_multicast_list()
462 axienet_iow(lp, XAE_FMI_OFFSET, reg); in axienet_set_multicast_list()
467 reg = axienet_ior(lp, XAE_FMI_OFFSET) & 0xFFFFFF00; in axienet_set_multicast_list()
469 axienet_iow(lp, XAE_FMI_OFFSET, reg); in axienet_set_multicast_list()
470 axienet_iow(lp, XAE_FFE_OFFSET, 0); in axienet_set_multicast_list()
488 struct axienet_local *lp = netdev_priv(ndev); in axienet_setoptions() local
492 reg = ((axienet_ior(lp, tp->reg)) & ~(tp->m_or)); in axienet_setoptions()
495 axienet_iow(lp, tp->reg, reg); in axienet_setoptions()
499 lp->options |= options; in axienet_setoptions()
502 static int __axienet_device_reset(struct axienet_local *lp) in __axienet_device_reset() argument
514 axienet_dma_out32(lp, XAXIDMA_TX_CR_OFFSET, XAXIDMA_CR_RESET_MASK); in __axienet_device_reset()
517 DELAY_OF_ONE_MILLISEC, 50000, false, lp, in __axienet_device_reset()
520 dev_err(lp->dev, "%s: DMA reset timeout!\n", __func__); in __axienet_device_reset()
527 DELAY_OF_ONE_MILLISEC, 50000, false, lp, in __axienet_device_reset()
530 dev_err(lp->dev, "%s: timeout waiting for PhyRstCmplt\n", __func__); in __axienet_device_reset()
539 * @lp: Pointer to the axienet_local structure
541 static void axienet_dma_stop(struct axienet_local *lp) in axienet_dma_stop() argument
546 cr = axienet_dma_in32(lp, XAXIDMA_RX_CR_OFFSET); in axienet_dma_stop()
548 axienet_dma_out32(lp, XAXIDMA_RX_CR_OFFSET, cr); in axienet_dma_stop()
549 synchronize_irq(lp->rx_irq); in axienet_dma_stop()
551 cr = axienet_dma_in32(lp, XAXIDMA_TX_CR_OFFSET); in axienet_dma_stop()
553 axienet_dma_out32(lp, XAXIDMA_TX_CR_OFFSET, cr); in axienet_dma_stop()
554 synchronize_irq(lp->tx_irq); in axienet_dma_stop()
557 sr = axienet_dma_in32(lp, XAXIDMA_RX_SR_OFFSET); in axienet_dma_stop()
560 sr = axienet_dma_in32(lp, XAXIDMA_RX_SR_OFFSET); in axienet_dma_stop()
563 sr = axienet_dma_in32(lp, XAXIDMA_TX_SR_OFFSET); in axienet_dma_stop()
566 sr = axienet_dma_in32(lp, XAXIDMA_TX_SR_OFFSET); in axienet_dma_stop()
570 axienet_lock_mii(lp); in axienet_dma_stop()
571 __axienet_device_reset(lp); in axienet_dma_stop()
572 axienet_unlock_mii(lp); in axienet_dma_stop()
590 struct axienet_local *lp = netdev_priv(ndev); in axienet_device_reset() local
593 ret = __axienet_device_reset(lp); in axienet_device_reset()
597 lp->max_frm_size = XAE_MAX_VLAN_FRAME_SIZE; in axienet_device_reset()
598 lp->options |= XAE_OPTION_VLAN; in axienet_device_reset()
599 lp->options &= (~XAE_OPTION_JUMBO); in axienet_device_reset()
603 lp->max_frm_size = ndev->mtu + VLAN_ETH_HLEN + in axienet_device_reset()
606 if (lp->max_frm_size <= lp->rxmem) in axienet_device_reset()
607 lp->options |= XAE_OPTION_JUMBO; in axienet_device_reset()
617 axienet_status = axienet_ior(lp, XAE_RCW1_OFFSET); in axienet_device_reset()
619 axienet_iow(lp, XAE_RCW1_OFFSET, axienet_status); in axienet_device_reset()
621 axienet_status = axienet_ior(lp, XAE_IP_OFFSET); in axienet_device_reset()
623 axienet_iow(lp, XAE_IS_OFFSET, XAE_INT_RXRJECT_MASK); in axienet_device_reset()
624 axienet_iow(lp, XAE_IE_OFFSET, lp->eth_irq > 0 ? in axienet_device_reset()
627 axienet_iow(lp, XAE_FCC_OFFSET, XAE_FCC_FCRX_MASK); in axienet_device_reset()
632 axienet_setoptions(ndev, lp->options & in axienet_device_reset()
636 axienet_setoptions(ndev, lp->options); in axienet_device_reset()
645 * @lp: Pointer to the axienet_local structure
657 static int axienet_free_tx_chain(struct axienet_local *lp, u32 first_bd, in axienet_free_tx_chain() argument
666 cur_p = &lp->tx_bd_v[(first_bd + i) % lp->tx_bd_num]; in axienet_free_tx_chain()
677 phys = desc_get_phys_addr(lp, cur_p); in axienet_free_tx_chain()
678 dma_unmap_single(lp->dev, phys, in axienet_free_tx_chain()
702 lp->tx_bd_ci += i; in axienet_free_tx_chain()
703 if (lp->tx_bd_ci >= lp->tx_bd_num) in axienet_free_tx_chain()
704 lp->tx_bd_ci %= lp->tx_bd_num; in axienet_free_tx_chain()
712 * @lp: Pointer to the axienet_local structure
723 static inline int axienet_check_tx_bd_space(struct axienet_local *lp, in axienet_check_tx_bd_space() argument
730 cur_p = &lp->tx_bd_v[(READ_ONCE(lp->tx_bd_tail) + num_frag) % in axienet_check_tx_bd_space()
731 lp->tx_bd_num]; in axienet_check_tx_bd_space()
753 struct axienet_local *lp = container_of(napi, struct axienet_local, napi_tx); in axienet_tx_poll() local
754 struct net_device *ndev = lp->ndev; in axienet_tx_poll()
758 packets = axienet_free_tx_chain(lp, lp->tx_bd_ci, lp->tx_bd_num, false, in axienet_tx_poll()
762 u64_stats_update_begin(&lp->tx_stat_sync); in axienet_tx_poll()
763 u64_stats_add(&lp->tx_packets, packets); in axienet_tx_poll()
764 u64_stats_add(&lp->tx_bytes, size); in axienet_tx_poll()
765 u64_stats_update_end(&lp->tx_stat_sync); in axienet_tx_poll()
770 if (!axienet_check_tx_bd_space(lp, MAX_SKB_FRAGS + 1)) in axienet_tx_poll()
779 axienet_dma_out32(lp, XAXIDMA_TX_CR_OFFSET, lp->tx_dma_cr); in axienet_tx_poll()
807 struct axienet_local *lp = netdev_priv(ndev); in axienet_start_xmit() local
810 orig_tail_ptr = lp->tx_bd_tail; in axienet_start_xmit()
814 cur_p = &lp->tx_bd_v[orig_tail_ptr]; in axienet_start_xmit()
816 if (axienet_check_tx_bd_space(lp, num_frag + 1)) { in axienet_start_xmit()
828 if (lp->features & XAE_FEATURE_FULL_TX_CSUM) { in axienet_start_xmit()
831 } else if (lp->features & XAE_FEATURE_PARTIAL_TX_CSUM) { in axienet_start_xmit()
842 phys = dma_map_single(lp->dev, skb->data, in axienet_start_xmit()
844 if (unlikely(dma_mapping_error(lp->dev, phys))) { in axienet_start_xmit()
851 desc_set_phys_addr(lp, phys, cur_p); in axienet_start_xmit()
855 if (++new_tail_ptr >= lp->tx_bd_num) in axienet_start_xmit()
857 cur_p = &lp->tx_bd_v[new_tail_ptr]; in axienet_start_xmit()
859 phys = dma_map_single(lp->dev, in axienet_start_xmit()
863 if (unlikely(dma_mapping_error(lp->dev, phys))) { in axienet_start_xmit()
867 axienet_free_tx_chain(lp, orig_tail_ptr, ii + 1, in axienet_start_xmit()
872 desc_set_phys_addr(lp, phys, cur_p); in axienet_start_xmit()
879 tail_p = lp->tx_bd_p + sizeof(*lp->tx_bd_v) * new_tail_ptr; in axienet_start_xmit()
880 if (++new_tail_ptr >= lp->tx_bd_num) in axienet_start_xmit()
882 WRITE_ONCE(lp->tx_bd_tail, new_tail_ptr); in axienet_start_xmit()
885 axienet_dma_out_addr(lp, XAXIDMA_TX_TDESC_OFFSET, tail_p); in axienet_start_xmit()
888 if (axienet_check_tx_bd_space(lp, MAX_SKB_FRAGS + 1)) { in axienet_start_xmit()
895 if (!axienet_check_tx_bd_space(lp, MAX_SKB_FRAGS + 1)) in axienet_start_xmit()
918 struct axienet_local *lp = container_of(napi, struct axienet_local, napi_rx); in axienet_rx_poll() local
920 cur_p = &lp->rx_bd_v[lp->rx_bd_ci]; in axienet_rx_poll()
939 phys = desc_get_phys_addr(lp, cur_p); in axienet_rx_poll()
940 dma_unmap_single(lp->dev, phys, lp->max_frm_size, in axienet_rx_poll()
944 skb->protocol = eth_type_trans(skb, lp->ndev); in axienet_rx_poll()
949 if (lp->features & XAE_FEATURE_FULL_RX_CSUM) { in axienet_rx_poll()
956 } else if ((lp->features & XAE_FEATURE_PARTIAL_RX_CSUM) != 0 && in axienet_rx_poll()
969 new_skb = napi_alloc_skb(napi, lp->max_frm_size); in axienet_rx_poll()
973 phys = dma_map_single(lp->dev, new_skb->data, in axienet_rx_poll()
974 lp->max_frm_size, in axienet_rx_poll()
976 if (unlikely(dma_mapping_error(lp->dev, phys))) { in axienet_rx_poll()
978 netdev_err(lp->ndev, "RX DMA mapping error\n"); in axienet_rx_poll()
982 desc_set_phys_addr(lp, phys, cur_p); in axienet_rx_poll()
984 cur_p->cntrl = lp->max_frm_size; in axienet_rx_poll()
991 tail_p = lp->rx_bd_p + sizeof(*lp->rx_bd_v) * lp->rx_bd_ci; in axienet_rx_poll()
993 if (++lp->rx_bd_ci >= lp->rx_bd_num) in axienet_rx_poll()
994 lp->rx_bd_ci = 0; in axienet_rx_poll()
995 cur_p = &lp->rx_bd_v[lp->rx_bd_ci]; in axienet_rx_poll()
998 u64_stats_update_begin(&lp->rx_stat_sync); in axienet_rx_poll()
999 u64_stats_add(&lp->rx_packets, packets); in axienet_rx_poll()
1000 u64_stats_add(&lp->rx_bytes, size); in axienet_rx_poll()
1001 u64_stats_update_end(&lp->rx_stat_sync); in axienet_rx_poll()
1004 axienet_dma_out_addr(lp, XAXIDMA_RX_TDESC_OFFSET, tail_p); in axienet_rx_poll()
1011 axienet_dma_out32(lp, XAXIDMA_RX_CR_OFFSET, lp->rx_dma_cr); in axienet_rx_poll()
1030 struct axienet_local *lp = netdev_priv(ndev); in axienet_tx_irq() local
1032 status = axienet_dma_in32(lp, XAXIDMA_TX_SR_OFFSET); in axienet_tx_irq()
1037 axienet_dma_out32(lp, XAXIDMA_TX_SR_OFFSET, status); in axienet_tx_irq()
1042 (lp->tx_bd_v[lp->tx_bd_ci]).phys_msb, in axienet_tx_irq()
1043 (lp->tx_bd_v[lp->tx_bd_ci]).phys); in axienet_tx_irq()
1044 schedule_work(&lp->dma_err_task); in axienet_tx_irq()
1049 u32 cr = lp->tx_dma_cr; in axienet_tx_irq()
1052 if (napi_schedule_prep(&lp->napi_tx)) { in axienet_tx_irq()
1053 axienet_dma_out32(lp, XAXIDMA_TX_CR_OFFSET, cr); in axienet_tx_irq()
1054 __napi_schedule(&lp->napi_tx); in axienet_tx_irq()
1075 struct axienet_local *lp = netdev_priv(ndev); in axienet_rx_irq() local
1077 status = axienet_dma_in32(lp, XAXIDMA_RX_SR_OFFSET); in axienet_rx_irq()
1082 axienet_dma_out32(lp, XAXIDMA_RX_SR_OFFSET, status); in axienet_rx_irq()
1087 (lp->rx_bd_v[lp->rx_bd_ci]).phys_msb, in axienet_rx_irq()
1088 (lp->rx_bd_v[lp->rx_bd_ci]).phys); in axienet_rx_irq()
1089 schedule_work(&lp->dma_err_task); in axienet_rx_irq()
1094 u32 cr = lp->rx_dma_cr; in axienet_rx_irq()
1097 if (napi_schedule_prep(&lp->napi_rx)) { in axienet_rx_irq()
1098 axienet_dma_out32(lp, XAXIDMA_RX_CR_OFFSET, cr); in axienet_rx_irq()
1099 __napi_schedule(&lp->napi_rx); in axienet_rx_irq()
1118 struct axienet_local *lp = netdev_priv(ndev); in axienet_eth_irq() local
1121 pending = axienet_ior(lp, XAE_IP_OFFSET); in axienet_eth_irq()
1131 axienet_iow(lp, XAE_IS_OFFSET, pending); in axienet_eth_irq()
1153 struct axienet_local *lp = netdev_priv(ndev); in axienet_open() local
1161 axienet_lock_mii(lp); in axienet_open()
1163 axienet_unlock_mii(lp); in axienet_open()
1165 ret = phylink_of_phy_connect(lp->phylink, lp->dev->of_node, 0); in axienet_open()
1167 dev_err(lp->dev, "phylink_of_phy_connect() failed: %d\n", ret); in axienet_open()
1171 phylink_start(lp->phylink); in axienet_open()
1174 lp->stopping = false; in axienet_open()
1175 INIT_WORK(&lp->dma_err_task, axienet_dma_err_handler); in axienet_open()
1177 napi_enable(&lp->napi_rx); in axienet_open()
1178 napi_enable(&lp->napi_tx); in axienet_open()
1181 ret = request_irq(lp->tx_irq, axienet_tx_irq, IRQF_SHARED, in axienet_open()
1186 ret = request_irq(lp->rx_irq, axienet_rx_irq, IRQF_SHARED, in axienet_open()
1191 if (lp->eth_irq > 0) { in axienet_open()
1192 ret = request_irq(lp->eth_irq, axienet_eth_irq, IRQF_SHARED, in axienet_open()
1201 free_irq(lp->rx_irq, ndev); in axienet_open()
1203 free_irq(lp->tx_irq, ndev); in axienet_open()
1205 napi_disable(&lp->napi_tx); in axienet_open()
1206 napi_disable(&lp->napi_rx); in axienet_open()
1207 phylink_stop(lp->phylink); in axienet_open()
1208 phylink_disconnect_phy(lp->phylink); in axienet_open()
1209 cancel_work_sync(&lp->dma_err_task); in axienet_open()
1210 dev_err(lp->dev, "request_irq() failed\n"); in axienet_open()
1226 struct axienet_local *lp = netdev_priv(ndev); in axienet_stop() local
1230 WRITE_ONCE(lp->stopping, true); in axienet_stop()
1231 flush_work(&lp->dma_err_task); in axienet_stop()
1233 napi_disable(&lp->napi_tx); in axienet_stop()
1234 napi_disable(&lp->napi_rx); in axienet_stop()
1236 phylink_stop(lp->phylink); in axienet_stop()
1237 phylink_disconnect_phy(lp->phylink); in axienet_stop()
1239 axienet_setoptions(ndev, lp->options & in axienet_stop()
1242 axienet_dma_stop(lp); in axienet_stop()
1244 axienet_iow(lp, XAE_IE_OFFSET, 0); in axienet_stop()
1246 cancel_work_sync(&lp->dma_err_task); in axienet_stop()
1248 if (lp->eth_irq > 0) in axienet_stop()
1249 free_irq(lp->eth_irq, ndev); in axienet_stop()
1250 free_irq(lp->tx_irq, ndev); in axienet_stop()
1251 free_irq(lp->rx_irq, ndev); in axienet_stop()
1270 struct axienet_local *lp = netdev_priv(ndev); in axienet_change_mtu() local
1276 XAE_TRL_SIZE) > lp->rxmem) in axienet_change_mtu()
1294 struct axienet_local *lp = netdev_priv(ndev); in axienet_poll_controller() local
1295 disable_irq(lp->tx_irq); in axienet_poll_controller()
1296 disable_irq(lp->rx_irq); in axienet_poll_controller()
1297 axienet_rx_irq(lp->tx_irq, ndev); in axienet_poll_controller()
1298 axienet_tx_irq(lp->rx_irq, ndev); in axienet_poll_controller()
1299 enable_irq(lp->tx_irq); in axienet_poll_controller()
1300 enable_irq(lp->rx_irq); in axienet_poll_controller()
1306 struct axienet_local *lp = netdev_priv(dev); in axienet_ioctl() local
1311 return phylink_mii_ioctl(lp->phylink, rq, cmd); in axienet_ioctl()
1317 struct axienet_local *lp = netdev_priv(dev); in axienet_get_stats64() local
1323 start = u64_stats_fetch_begin(&lp->rx_stat_sync); in axienet_get_stats64()
1324 stats->rx_packets = u64_stats_read(&lp->rx_packets); in axienet_get_stats64()
1325 stats->rx_bytes = u64_stats_read(&lp->rx_bytes); in axienet_get_stats64()
1326 } while (u64_stats_fetch_retry(&lp->rx_stat_sync, start)); in axienet_get_stats64()
1329 start = u64_stats_fetch_begin(&lp->tx_stat_sync); in axienet_get_stats64()
1330 stats->tx_packets = u64_stats_read(&lp->tx_packets); in axienet_get_stats64()
1331 stats->tx_bytes = u64_stats_read(&lp->tx_bytes); in axienet_get_stats64()
1332 } while (u64_stats_fetch_retry(&lp->tx_stat_sync, start)); in axienet_get_stats64()
1395 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_get_regs() local
1401 data[0] = axienet_ior(lp, XAE_RAF_OFFSET); in axienet_ethtools_get_regs()
1402 data[1] = axienet_ior(lp, XAE_TPF_OFFSET); in axienet_ethtools_get_regs()
1403 data[2] = axienet_ior(lp, XAE_IFGP_OFFSET); in axienet_ethtools_get_regs()
1404 data[3] = axienet_ior(lp, XAE_IS_OFFSET); in axienet_ethtools_get_regs()
1405 data[4] = axienet_ior(lp, XAE_IP_OFFSET); in axienet_ethtools_get_regs()
1406 data[5] = axienet_ior(lp, XAE_IE_OFFSET); in axienet_ethtools_get_regs()
1407 data[6] = axienet_ior(lp, XAE_TTAG_OFFSET); in axienet_ethtools_get_regs()
1408 data[7] = axienet_ior(lp, XAE_RTAG_OFFSET); in axienet_ethtools_get_regs()
1409 data[8] = axienet_ior(lp, XAE_UAWL_OFFSET); in axienet_ethtools_get_regs()
1410 data[9] = axienet_ior(lp, XAE_UAWU_OFFSET); in axienet_ethtools_get_regs()
1411 data[10] = axienet_ior(lp, XAE_TPID0_OFFSET); in axienet_ethtools_get_regs()
1412 data[11] = axienet_ior(lp, XAE_TPID1_OFFSET); in axienet_ethtools_get_regs()
1413 data[12] = axienet_ior(lp, XAE_PPST_OFFSET); in axienet_ethtools_get_regs()
1414 data[13] = axienet_ior(lp, XAE_RCW0_OFFSET); in axienet_ethtools_get_regs()
1415 data[14] = axienet_ior(lp, XAE_RCW1_OFFSET); in axienet_ethtools_get_regs()
1416 data[15] = axienet_ior(lp, XAE_TC_OFFSET); in axienet_ethtools_get_regs()
1417 data[16] = axienet_ior(lp, XAE_FCC_OFFSET); in axienet_ethtools_get_regs()
1418 data[17] = axienet_ior(lp, XAE_EMMC_OFFSET); in axienet_ethtools_get_regs()
1419 data[18] = axienet_ior(lp, XAE_PHYC_OFFSET); in axienet_ethtools_get_regs()
1420 data[19] = axienet_ior(lp, XAE_MDIO_MC_OFFSET); in axienet_ethtools_get_regs()
1421 data[20] = axienet_ior(lp, XAE_MDIO_MCR_OFFSET); in axienet_ethtools_get_regs()
1422 data[21] = axienet_ior(lp, XAE_MDIO_MWD_OFFSET); in axienet_ethtools_get_regs()
1423 data[22] = axienet_ior(lp, XAE_MDIO_MRD_OFFSET); in axienet_ethtools_get_regs()
1424 data[27] = axienet_ior(lp, XAE_UAW0_OFFSET); in axienet_ethtools_get_regs()
1425 data[28] = axienet_ior(lp, XAE_UAW1_OFFSET); in axienet_ethtools_get_regs()
1426 data[29] = axienet_ior(lp, XAE_FMI_OFFSET); in axienet_ethtools_get_regs()
1427 data[30] = axienet_ior(lp, XAE_AF0_OFFSET); in axienet_ethtools_get_regs()
1428 data[31] = axienet_ior(lp, XAE_AF1_OFFSET); in axienet_ethtools_get_regs()
1429 data[32] = axienet_dma_in32(lp, XAXIDMA_TX_CR_OFFSET); in axienet_ethtools_get_regs()
1430 data[33] = axienet_dma_in32(lp, XAXIDMA_TX_SR_OFFSET); in axienet_ethtools_get_regs()
1431 data[34] = axienet_dma_in32(lp, XAXIDMA_TX_CDESC_OFFSET); in axienet_ethtools_get_regs()
1432 data[35] = axienet_dma_in32(lp, XAXIDMA_TX_TDESC_OFFSET); in axienet_ethtools_get_regs()
1433 data[36] = axienet_dma_in32(lp, XAXIDMA_RX_CR_OFFSET); in axienet_ethtools_get_regs()
1434 data[37] = axienet_dma_in32(lp, XAXIDMA_RX_SR_OFFSET); in axienet_ethtools_get_regs()
1435 data[38] = axienet_dma_in32(lp, XAXIDMA_RX_CDESC_OFFSET); in axienet_ethtools_get_regs()
1436 data[39] = axienet_dma_in32(lp, XAXIDMA_RX_TDESC_OFFSET); in axienet_ethtools_get_regs()
1445 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_get_ringparam() local
1451 ering->rx_pending = lp->rx_bd_num; in axienet_ethtools_get_ringparam()
1454 ering->tx_pending = lp->tx_bd_num; in axienet_ethtools_get_ringparam()
1463 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_set_ringparam() local
1475 lp->rx_bd_num = ering->rx_pending; in axienet_ethtools_set_ringparam()
1476 lp->tx_bd_num = ering->tx_pending; in axienet_ethtools_set_ringparam()
1493 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_get_pauseparam() local
1495 phylink_ethtool_get_pauseparam(lp->phylink, epauseparm); in axienet_ethtools_get_pauseparam()
1514 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_set_pauseparam() local
1516 return phylink_ethtool_set_pauseparam(lp->phylink, epauseparm); in axienet_ethtools_set_pauseparam()
1538 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_get_coalesce() local
1540 ecoalesce->rx_max_coalesced_frames = lp->coalesce_count_rx; in axienet_ethtools_get_coalesce()
1541 ecoalesce->rx_coalesce_usecs = lp->coalesce_usec_rx; in axienet_ethtools_get_coalesce()
1542 ecoalesce->tx_max_coalesced_frames = lp->coalesce_count_tx; in axienet_ethtools_get_coalesce()
1543 ecoalesce->tx_coalesce_usecs = lp->coalesce_usec_tx; in axienet_ethtools_get_coalesce()
1566 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_set_coalesce() local
1581 lp->coalesce_count_rx = ecoalesce->rx_max_coalesced_frames; in axienet_ethtools_set_coalesce()
1583 lp->coalesce_usec_rx = ecoalesce->rx_coalesce_usecs; in axienet_ethtools_set_coalesce()
1585 lp->coalesce_count_tx = ecoalesce->tx_max_coalesced_frames; in axienet_ethtools_set_coalesce()
1587 lp->coalesce_usec_tx = ecoalesce->tx_coalesce_usecs; in axienet_ethtools_set_coalesce()
1596 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_get_link_ksettings() local
1598 return phylink_ethtool_ksettings_get(lp->phylink, cmd); in axienet_ethtools_get_link_ksettings()
1605 struct axienet_local *lp = netdev_priv(ndev); in axienet_ethtools_set_link_ksettings() local
1607 return phylink_ethtool_ksettings_set(lp->phylink, cmd); in axienet_ethtools_set_link_ksettings()
1612 struct axienet_local *lp = netdev_priv(dev); in axienet_ethtools_nway_reset() local
1614 return phylink_ethtool_nway_reset(lp->phylink); in axienet_ethtools_nway_reset()
1662 struct axienet_local *lp = netdev_priv(ndev); in axienet_pcs_config() local
1665 if (lp->switch_x_sgmii) { in axienet_pcs_config()
1695 struct axienet_local *lp = netdev_priv(ndev); in axienet_mac_select_pcs() local
1699 return &lp->pcs; in axienet_mac_select_pcs()
1724 struct axienet_local *lp = netdev_priv(ndev); in axienet_mac_link_up() local
1727 emmc_reg = axienet_ior(lp, XAE_EMMC_OFFSET); in axienet_mac_link_up()
1746 axienet_iow(lp, XAE_EMMC_OFFSET, emmc_reg); in axienet_mac_link_up()
1748 fcc_reg = axienet_ior(lp, XAE_FCC_OFFSET); in axienet_mac_link_up()
1757 axienet_iow(lp, XAE_FCC_OFFSET, fcc_reg); in axienet_mac_link_up()
1779 struct axienet_local *lp = container_of(work, struct axienet_local, in axienet_dma_err_handler() local
1781 struct net_device *ndev = lp->ndev; in axienet_dma_err_handler()
1784 if (READ_ONCE(lp->stopping)) in axienet_dma_err_handler()
1787 napi_disable(&lp->napi_tx); in axienet_dma_err_handler()
1788 napi_disable(&lp->napi_rx); in axienet_dma_err_handler()
1790 axienet_setoptions(ndev, lp->options & in axienet_dma_err_handler()
1793 axienet_dma_stop(lp); in axienet_dma_err_handler()
1795 for (i = 0; i < lp->tx_bd_num; i++) { in axienet_dma_err_handler()
1796 cur_p = &lp->tx_bd_v[i]; in axienet_dma_err_handler()
1798 dma_addr_t addr = desc_get_phys_addr(lp, cur_p); in axienet_dma_err_handler()
1800 dma_unmap_single(lp->dev, addr, in axienet_dma_err_handler()
1819 for (i = 0; i < lp->rx_bd_num; i++) { in axienet_dma_err_handler()
1820 cur_p = &lp->rx_bd_v[i]; in axienet_dma_err_handler()
1829 lp->tx_bd_ci = 0; in axienet_dma_err_handler()
1830 lp->tx_bd_tail = 0; in axienet_dma_err_handler()
1831 lp->rx_bd_ci = 0; in axienet_dma_err_handler()
1833 axienet_dma_start(lp); in axienet_dma_err_handler()
1835 axienet_status = axienet_ior(lp, XAE_RCW1_OFFSET); in axienet_dma_err_handler()
1837 axienet_iow(lp, XAE_RCW1_OFFSET, axienet_status); in axienet_dma_err_handler()
1839 axienet_status = axienet_ior(lp, XAE_IP_OFFSET); in axienet_dma_err_handler()
1841 axienet_iow(lp, XAE_IS_OFFSET, XAE_INT_RXRJECT_MASK); in axienet_dma_err_handler()
1842 axienet_iow(lp, XAE_IE_OFFSET, lp->eth_irq > 0 ? in axienet_dma_err_handler()
1844 axienet_iow(lp, XAE_FCC_OFFSET, XAE_FCC_FCRX_MASK); in axienet_dma_err_handler()
1849 axienet_setoptions(ndev, lp->options & in axienet_dma_err_handler()
1853 napi_enable(&lp->napi_rx); in axienet_dma_err_handler()
1854 napi_enable(&lp->napi_tx); in axienet_dma_err_handler()
1855 axienet_setoptions(ndev, lp->options); in axienet_dma_err_handler()
1874 struct axienet_local *lp; in axienet_probe() local
1881 ndev = alloc_etherdev(sizeof(*lp)); in axienet_probe()
1897 lp = netdev_priv(ndev); in axienet_probe()
1898 lp->ndev = ndev; in axienet_probe()
1899 lp->dev = &pdev->dev; in axienet_probe()
1900 lp->options = XAE_OPTION_DEFAULTS; in axienet_probe()
1901 lp->rx_bd_num = RX_BD_NUM_DEFAULT; in axienet_probe()
1902 lp->tx_bd_num = TX_BD_NUM_DEFAULT; in axienet_probe()
1904 u64_stats_init(&lp->rx_stat_sync); in axienet_probe()
1905 u64_stats_init(&lp->tx_stat_sync); in axienet_probe()
1907 netif_napi_add(ndev, &lp->napi_rx, axienet_rx_poll); in axienet_probe()
1908 netif_napi_add(ndev, &lp->napi_tx, axienet_tx_poll); in axienet_probe()
1910 lp->axi_clk = devm_clk_get_optional(&pdev->dev, "s_axi_lite_clk"); in axienet_probe()
1911 if (!lp->axi_clk) { in axienet_probe()
1915 lp->axi_clk = devm_clk_get_optional(&pdev->dev, NULL); in axienet_probe()
1917 if (IS_ERR(lp->axi_clk)) { in axienet_probe()
1918 ret = PTR_ERR(lp->axi_clk); in axienet_probe()
1921 ret = clk_prepare_enable(lp->axi_clk); in axienet_probe()
1927 lp->misc_clks[0].id = "axis_clk"; in axienet_probe()
1928 lp->misc_clks[1].id = "ref_clk"; in axienet_probe()
1929 lp->misc_clks[2].id = "mgt_clk"; in axienet_probe()
1931 ret = devm_clk_bulk_get_optional(&pdev->dev, XAE_NUM_MISC_CLOCKS, lp->misc_clks); in axienet_probe()
1935 ret = clk_bulk_prepare_enable(XAE_NUM_MISC_CLOCKS, lp->misc_clks); in axienet_probe()
1940 lp->regs = devm_platform_get_and_ioremap_resource(pdev, 0, &ethres); in axienet_probe()
1941 if (IS_ERR(lp->regs)) { in axienet_probe()
1942 ret = PTR_ERR(lp->regs); in axienet_probe()
1945 lp->regs_start = ethres->start; in axienet_probe()
1948 lp->features = 0; in axienet_probe()
1954 lp->csum_offload_on_tx_path = in axienet_probe()
1956 lp->features |= XAE_FEATURE_PARTIAL_TX_CSUM; in axienet_probe()
1961 lp->csum_offload_on_tx_path = in axienet_probe()
1963 lp->features |= XAE_FEATURE_FULL_TX_CSUM; in axienet_probe()
1968 lp->csum_offload_on_tx_path = XAE_NO_CSUM_OFFLOAD; in axienet_probe()
1975 lp->csum_offload_on_rx_path = in axienet_probe()
1977 lp->features |= XAE_FEATURE_PARTIAL_RX_CSUM; in axienet_probe()
1980 lp->csum_offload_on_rx_path = in axienet_probe()
1982 lp->features |= XAE_FEATURE_FULL_RX_CSUM; in axienet_probe()
1985 lp->csum_offload_on_rx_path = XAE_NO_CSUM_OFFLOAD; in axienet_probe()
1994 of_property_read_u32(pdev->dev.of_node, "xlnx,rxmem", &lp->rxmem); in axienet_probe()
1996 lp->switch_x_sgmii = of_property_read_bool(pdev->dev.of_node, in axienet_probe()
2005 lp->phy_mode = PHY_INTERFACE_MODE_MII; in axienet_probe()
2008 lp->phy_mode = PHY_INTERFACE_MODE_GMII; in axienet_probe()
2011 lp->phy_mode = PHY_INTERFACE_MODE_RGMII_ID; in axienet_probe()
2014 lp->phy_mode = PHY_INTERFACE_MODE_SGMII; in axienet_probe()
2017 lp->phy_mode = PHY_INTERFACE_MODE_1000BASEX; in axienet_probe()
2024 ret = of_get_phy_mode(pdev->dev.of_node, &lp->phy_mode); in axienet_probe()
2028 if (lp->switch_x_sgmii && lp->phy_mode != PHY_INTERFACE_MODE_SGMII && in axienet_probe()
2029 lp->phy_mode != PHY_INTERFACE_MODE_1000BASEX) { in axienet_probe()
2047 lp->dma_regs = devm_ioremap_resource(&pdev->dev, in axienet_probe()
2049 lp->rx_irq = irq_of_parse_and_map(np, 1); in axienet_probe()
2050 lp->tx_irq = irq_of_parse_and_map(np, 0); in axienet_probe()
2052 lp->eth_irq = platform_get_irq_optional(pdev, 0); in axienet_probe()
2055 lp->dma_regs = devm_platform_get_and_ioremap_resource(pdev, 1, NULL); in axienet_probe()
2056 lp->rx_irq = platform_get_irq(pdev, 1); in axienet_probe()
2057 lp->tx_irq = platform_get_irq(pdev, 0); in axienet_probe()
2058 lp->eth_irq = platform_get_irq_optional(pdev, 2); in axienet_probe()
2060 if (IS_ERR(lp->dma_regs)) { in axienet_probe()
2062 ret = PTR_ERR(lp->dma_regs); in axienet_probe()
2065 if ((lp->rx_irq <= 0) || (lp->tx_irq <= 0)) { in axienet_probe()
2072 ret = __axienet_device_reset(lp); in axienet_probe()
2084 if ((axienet_ior(lp, XAE_ID_OFFSET) >> 24) >= 0x9) { in axienet_probe()
2085 void __iomem *desc = lp->dma_regs + XAXIDMA_TX_CDESC_OFFSET + 4; in axienet_probe()
2091 lp->features |= XAE_FEATURE_DMA_64BIT; in axienet_probe()
2099 if (!IS_ENABLED(CONFIG_64BIT) && lp->features & XAE_FEATURE_DMA_64BIT) { in axienet_probe()
2112 if (lp->eth_irq <= 0) in axienet_probe()
2125 lp->coalesce_count_rx = XAXIDMA_DFT_RX_THRESHOLD; in axienet_probe()
2126 lp->coalesce_usec_rx = XAXIDMA_DFT_RX_USEC; in axienet_probe()
2127 lp->coalesce_count_tx = XAXIDMA_DFT_TX_THRESHOLD; in axienet_probe()
2128 lp->coalesce_usec_tx = XAXIDMA_DFT_TX_USEC; in axienet_probe()
2130 ret = axienet_mdio_setup(lp); in axienet_probe()
2135 if (lp->phy_mode == PHY_INTERFACE_MODE_SGMII || in axienet_probe()
2136 lp->phy_mode == PHY_INTERFACE_MODE_1000BASEX) { in axienet_probe()
2150 lp->pcs_phy = of_mdio_find_device(np); in axienet_probe()
2151 if (!lp->pcs_phy) { in axienet_probe()
2157 lp->pcs.ops = &axienet_pcs_ops; in axienet_probe()
2158 lp->pcs.neg_mode = true; in axienet_probe()
2159 lp->pcs.poll = true; in axienet_probe()
2162 lp->phylink_config.dev = &ndev->dev; in axienet_probe()
2163 lp->phylink_config.type = PHYLINK_NETDEV; in axienet_probe()
2164 lp->phylink_config.mac_managed_pm = true; in axienet_probe()
2165 lp->phylink_config.mac_capabilities = MAC_SYM_PAUSE | MAC_ASYM_PAUSE | in axienet_probe()
2168 __set_bit(lp->phy_mode, lp->phylink_config.supported_interfaces); in axienet_probe()
2169 if (lp->switch_x_sgmii) { in axienet_probe()
2171 lp->phylink_config.supported_interfaces); in axienet_probe()
2173 lp->phylink_config.supported_interfaces); in axienet_probe()
2176 lp->phylink = phylink_create(&lp->phylink_config, pdev->dev.fwnode, in axienet_probe()
2177 lp->phy_mode, in axienet_probe()
2179 if (IS_ERR(lp->phylink)) { in axienet_probe()
2180 ret = PTR_ERR(lp->phylink); in axienet_probe()
2185 ret = register_netdev(lp->ndev); in axienet_probe()
2187 dev_err(lp->dev, "register_netdev() error (%i)\n", ret); in axienet_probe()
2194 phylink_destroy(lp->phylink); in axienet_probe()
2197 if (lp->pcs_phy) in axienet_probe()
2198 put_device(&lp->pcs_phy->dev); in axienet_probe()
2199 if (lp->mii_bus) in axienet_probe()
2200 axienet_mdio_teardown(lp); in axienet_probe()
2202 clk_bulk_disable_unprepare(XAE_NUM_MISC_CLOCKS, lp->misc_clks); in axienet_probe()
2203 clk_disable_unprepare(lp->axi_clk); in axienet_probe()
2214 struct axienet_local *lp = netdev_priv(ndev); in axienet_remove() local
2218 if (lp->phylink) in axienet_remove()
2219 phylink_destroy(lp->phylink); in axienet_remove()
2221 if (lp->pcs_phy) in axienet_remove()
2222 put_device(&lp->pcs_phy->dev); in axienet_remove()
2224 axienet_mdio_teardown(lp); in axienet_remove()
2226 clk_bulk_disable_unprepare(XAE_NUM_MISC_CLOCKS, lp->misc_clks); in axienet_remove()
2227 clk_disable_unprepare(lp->axi_clk); in axienet_remove()