Lines Matching refs:dev

78 		dma_unmap_page(tx_ring->dev, tx_swbd->dma,  in enetc_unmap_tx_buff()
82 dma_unmap_single(tx_ring->dev, tx_swbd->dma, in enetc_unmap_tx_buff()
166 dma = dma_map_single(tx_ring->dev, skb->data, len, DMA_TO_DEVICE); in enetc_map_tx_buffs()
167 if (unlikely(dma_mapping_error(tx_ring->dev, dma))) in enetc_map_tx_buffs()
279 dma = skb_frag_dma_map(tx_ring->dev, frag, 0, len, in enetc_map_tx_buffs()
281 if (dma_mapping_error(tx_ring->dev, dma)) in enetc_map_tx_buffs()
326 dev_err(tx_ring->dev, "DMA map error"); in enetc_map_tx_buffs()
403 addr = dma_map_single(tx_ring->dev, data, size, DMA_TO_DEVICE); in enetc_map_tx_tso_data()
404 if (unlikely(dma_mapping_error(tx_ring->dev, addr))) { in enetc_map_tx_tso_data()
574 dev_err(tx_ring->dev, "DMA map error"); in enetc_map_tx_tso_buffs()
794 dma_sync_single_range_for_device(rx_ring->dev, rx_swbd.dma, in enetc_recycle_xdp_tx_buff()
806 dma_unmap_page(rx_ring->dev, rx_swbd.dma, PAGE_SIZE, in enetc_recycle_xdp_tx_buff()
927 addr = dma_map_page(rx_ring->dev, page, 0, PAGE_SIZE, rx_swbd->dir); in enetc_new_page()
928 if (unlikely(dma_mapping_error(rx_ring->dev, addr))) { in enetc_new_page()
1060 dma_sync_single_range_for_cpu(rx_ring->dev, rx_swbd->dma, in enetc_get_rx_buff()
1074 dma_sync_single_range_for_device(rx_ring->dev, rx_swbd->dma, in enetc_put_rx_buff()
1091 dma_unmap_page(rx_ring->dev, rx_swbd->dma, PAGE_SIZE, in enetc_flip_rx_buff()
1321 dma = dma_map_single(tx_ring->dev, data, len, DMA_TO_DEVICE); in enetc_xdp_frame_to_xdp_tx_swbd()
1322 if (unlikely(dma_mapping_error(tx_ring->dev, dma))) { in enetc_xdp_frame_to_xdp_tx_swbd()
1348 dma = dma_map_single(tx_ring->dev, data, len, DMA_TO_DEVICE); in enetc_xdp_frame_to_xdp_tx_swbd()
1349 if (unlikely(dma_mapping_error(tx_ring->dev, dma))) { in enetc_xdp_frame_to_xdp_tx_swbd()
1770 res->bd_base = dma_alloc_coherent(res->dev, bd_base_size, in enetc_dma_alloc_bdr()
1777 dma_free_coherent(res->dev, bd_base_size, res->bd_base, in enetc_dma_alloc_bdr()
1789 dma_free_coherent(res->dev, bd_base_size, res->bd_base, in enetc_dma_free_bdr()
1794 struct device *dev, size_t bd_count) in enetc_alloc_tx_resource() argument
1798 res->dev = dev; in enetc_alloc_tx_resource()
1810 res->tso_headers = dma_alloc_coherent(dev, bd_count * TSO_HEADER_SIZE, in enetc_alloc_tx_resource()
1831 dma_free_coherent(res->dev, res->bd_count * TSO_HEADER_SIZE, in enetc_free_tx_resource()
1850 err = enetc_alloc_tx_resource(&tx_res[i], tx_ring->dev, in enetc_alloc_tx_resources()
1879 struct device *dev, size_t bd_count, in enetc_alloc_rx_resource() argument
1884 res->dev = dev; in enetc_alloc_rx_resource()
1922 err = enetc_alloc_rx_resource(&rx_res[i], rx_ring->dev, in enetc_alloc_rx_resources()
2021 dma_unmap_page(rx_ring->dev, rx_swbd->dma, PAGE_SIZE, in enetc_free_rx_ring()
2332 dev_err(priv->dev, "request_irq() failed!\n"); in enetc_setup_irqs()
2435 err = phylink_of_phy_connect(priv->phylink, priv->dev->of_node, 0); in enetc_phylink_connect()
2437 dev_err(&ndev->dev, "could not attach to PHY\n"); in enetc_phylink_connect()
3039 bdr->dev = priv->dev; in enetc_alloc_msix()
3076 bdr->dev = priv->dev; in enetc_alloc_msix()
3169 return dev_err_probe(&pdev->dev, err, "device enable failed\n"); in enetc_pci_probe()
3172 err = dma_set_mask_and_coherent(&pdev->dev, DMA_BIT_MASK(64)); in enetc_pci_probe()
3174 dev_err(&pdev->dev, "DMA configuration failed: 0x%x\n", err); in enetc_pci_probe()
3180 dev_err(&pdev->dev, "pci_request_regions failed err=%d\n", err); in enetc_pci_probe()
3212 dev_err(&pdev->dev, "ioremap() failed\n"); in enetc_pci_probe()