Lines Matching refs:nbpf

205 	struct nbpf_device *nbpf;  member
321 static inline u32 nbpf_read(struct nbpf_device *nbpf, in nbpf_read() argument
324 u32 data = ioread32(nbpf->base + offset); in nbpf_read()
325 dev_dbg(nbpf->dma_dev.dev, "%s(0x%p + 0x%x) = 0x%x\n", in nbpf_read()
326 __func__, nbpf->base, offset, data); in nbpf_read()
330 static inline void nbpf_write(struct nbpf_device *nbpf, in nbpf_write() argument
333 iowrite32(data, nbpf->base + offset); in nbpf_write()
334 dev_dbg(nbpf->dma_dev.dev, "%s(0x%p + 0x%x) = 0x%x\n", in nbpf_write()
335 __func__, nbpf->base, offset, data); in nbpf_write()
345 u32 status = nbpf_read(chan->nbpf, NBPF_DSTAT_END); in nbpf_status_get()
347 return status & BIT(chan - chan->nbpf->chan); in nbpf_status_get()
355 static u32 nbpf_error_get(struct nbpf_device *nbpf) in nbpf_error_get() argument
357 return nbpf_read(nbpf, NBPF_DSTAT_ER); in nbpf_error_get()
360 static struct nbpf_channel *nbpf_error_get_channel(struct nbpf_device *nbpf, u32 error) in nbpf_error_get_channel() argument
362 return nbpf->chan + __ffs(error); in nbpf_error_get_channel()
400 dev_dbg(chan->nbpf->dma_dev.dev, "%s(): next 0x%x, cur 0x%x\n", __func__, in nbpf_start()
433 static u32 nbpf_xfer_ds(struct nbpf_device *nbpf, size_t size, in nbpf_xfer_ds() argument
436 int max_burst = nbpf->config->buffer_size * 8; in nbpf_xfer_ds()
438 if (nbpf->max_burst_mem_read || nbpf->max_burst_mem_write) { in nbpf_xfer_ds()
441 max_burst = min_not_zero(nbpf->max_burst_mem_read, in nbpf_xfer_ds()
442 nbpf->max_burst_mem_write); in nbpf_xfer_ds()
445 if (nbpf->max_burst_mem_read) in nbpf_xfer_ds()
446 max_burst = nbpf->max_burst_mem_read; in nbpf_xfer_ds()
449 if (nbpf->max_burst_mem_write) in nbpf_xfer_ds()
450 max_burst = nbpf->max_burst_mem_write; in nbpf_xfer_ds()
462 static size_t nbpf_xfer_size(struct nbpf_device *nbpf, in nbpf_xfer_size() argument
490 return nbpf_xfer_ds(nbpf, size, DMA_TRANS_NONE); in nbpf_xfer_size()
539 mem_xfer = nbpf_xfer_ds(chan->nbpf, size, direction); in nbpf_prep_one()
593 static void nbpf_configure(struct nbpf_device *nbpf) in nbpf_configure() argument
595 nbpf_write(nbpf, NBPF_CTRL, NBPF_CTRL_LVINT); in nbpf_configure()
856 dev_dbg(chan->nbpf->dma_dev.dev, "%s(): force-free desc %p cookie %d\n", in nbpf_chan_idle()
904 chan->slave_dst_width = nbpf_xfer_size(chan->nbpf, in nbpf_config()
906 chan->slave_dst_burst = nbpf_xfer_size(chan->nbpf, in nbpf_config()
910 chan->slave_src_width = nbpf_xfer_size(chan->nbpf, in nbpf_config()
912 chan->slave_src_burst = nbpf_xfer_size(chan->nbpf, in nbpf_config()
1090 struct nbpf_device *nbpf = ofdma->of_dma_data; in nbpf_of_xlate() local
1097 dchan = dma_get_any_slave_channel(&nbpf->dma_dev); in nbpf_of_xlate()
1225 struct nbpf_device *nbpf = dev; in nbpf_err_irq() local
1226 u32 error = nbpf_error_get(nbpf); in nbpf_err_irq()
1228 dev_warn(nbpf->dma_dev.dev, "DMA error IRQ %u\n", irq); in nbpf_err_irq()
1234 struct nbpf_channel *chan = nbpf_error_get_channel(nbpf, error); in nbpf_err_irq()
1238 error = nbpf_error_get(nbpf); in nbpf_err_irq()
1244 static int nbpf_chan_probe(struct nbpf_device *nbpf, int n) in nbpf_chan_probe() argument
1246 struct dma_device *dma_dev = &nbpf->dma_dev; in nbpf_chan_probe()
1247 struct nbpf_channel *chan = nbpf->chan + n; in nbpf_chan_probe()
1250 chan->nbpf = nbpf; in nbpf_chan_probe()
1251 chan->base = nbpf->base + NBPF_REG_CHAN_OFFSET + NBPF_REG_CHAN_SIZE * n; in nbpf_chan_probe()
1294 struct nbpf_device *nbpf; in nbpf_probe() local
1311 nbpf = devm_kzalloc(dev, struct_size(nbpf, chan, num_channels), in nbpf_probe()
1313 if (!nbpf) in nbpf_probe()
1316 dma_dev = &nbpf->dma_dev; in nbpf_probe()
1319 nbpf->base = devm_platform_ioremap_resource(pdev, 0); in nbpf_probe()
1320 if (IS_ERR(nbpf->base)) in nbpf_probe()
1321 return PTR_ERR(nbpf->base); in nbpf_probe()
1323 nbpf->clk = devm_clk_get(dev, NULL); in nbpf_probe()
1324 if (IS_ERR(nbpf->clk)) in nbpf_probe()
1325 return PTR_ERR(nbpf->clk); in nbpf_probe()
1328 &nbpf->max_burst_mem_read); in nbpf_probe()
1330 &nbpf->max_burst_mem_write); in nbpf_probe()
1332 nbpf->config = cfg; in nbpf_probe()
1355 nbpf->chan[i].irq = irqbuf[0]; in nbpf_probe()
1364 for (i = 0, chan = nbpf->chan; i <= num_channels; in nbpf_probe()
1372 if (chan != nbpf->chan + num_channels) in nbpf_probe()
1382 nbpf->chan[i].irq = irq; in nbpf_probe()
1387 IRQF_SHARED, "dma error", nbpf); in nbpf_probe()
1390 nbpf->eirq = eirq; in nbpf_probe()
1396 ret = nbpf_chan_probe(nbpf, i); in nbpf_probe()
1431 platform_set_drvdata(pdev, nbpf); in nbpf_probe()
1433 ret = clk_prepare_enable(nbpf->clk); in nbpf_probe()
1437 nbpf_configure(nbpf); in nbpf_probe()
1443 ret = of_dma_controller_register(np, nbpf_of_xlate, nbpf); in nbpf_probe()
1452 clk_disable_unprepare(nbpf->clk); in nbpf_probe()
1459 struct nbpf_device *nbpf = platform_get_drvdata(pdev); in nbpf_remove() local
1462 devm_free_irq(&pdev->dev, nbpf->eirq, nbpf); in nbpf_remove()
1464 for (i = 0; i < nbpf->config->num_channels; i++) { in nbpf_remove()
1465 struct nbpf_channel *chan = nbpf->chan + i; in nbpf_remove()
1473 dma_async_device_unregister(&nbpf->dma_dev); in nbpf_remove()
1474 clk_disable_unprepare(nbpf->clk); in nbpf_remove()
1496 struct nbpf_device *nbpf = dev_get_drvdata(dev); in nbpf_runtime_suspend() local
1497 clk_disable_unprepare(nbpf->clk); in nbpf_runtime_suspend()
1503 struct nbpf_device *nbpf = dev_get_drvdata(dev); in nbpf_runtime_resume() local
1504 return clk_prepare_enable(nbpf->clk); in nbpf_runtime_resume()