Lines Matching refs:d

149 static void k3_dma_terminate_chan(struct k3_dma_phy *phy, struct k3_dma_dev *d)  in k3_dma_terminate_chan()  argument
156 writel_relaxed(val, d->base + INT_TC1_RAW); in k3_dma_terminate_chan()
157 writel_relaxed(val, d->base + INT_TC2_RAW); in k3_dma_terminate_chan()
158 writel_relaxed(val, d->base + INT_ERR1_RAW); in k3_dma_terminate_chan()
159 writel_relaxed(val, d->base + INT_ERR2_RAW); in k3_dma_terminate_chan()
171 static u32 k3_dma_get_curr_cnt(struct k3_dma_dev *d, struct k3_dma_phy *phy) in k3_dma_get_curr_cnt() argument
175 cnt = readl_relaxed(d->base + CX_CUR_CNT + phy->idx * 0x10); in k3_dma_get_curr_cnt()
185 static u32 k3_dma_get_chan_stat(struct k3_dma_dev *d) in k3_dma_get_chan_stat() argument
187 return readl_relaxed(d->base + CH_STAT); in k3_dma_get_chan_stat()
190 static void k3_dma_enable_dma(struct k3_dma_dev *d, bool on) in k3_dma_enable_dma() argument
194 writel_relaxed(0x0, d->base + CH_PRI); in k3_dma_enable_dma()
197 writel_relaxed(0xffff, d->base + INT_TC1_MASK); in k3_dma_enable_dma()
198 writel_relaxed(0xffff, d->base + INT_TC2_MASK); in k3_dma_enable_dma()
199 writel_relaxed(0xffff, d->base + INT_ERR1_MASK); in k3_dma_enable_dma()
200 writel_relaxed(0xffff, d->base + INT_ERR2_MASK); in k3_dma_enable_dma()
203 writel_relaxed(0x0, d->base + INT_TC1_MASK); in k3_dma_enable_dma()
204 writel_relaxed(0x0, d->base + INT_TC2_MASK); in k3_dma_enable_dma()
205 writel_relaxed(0x0, d->base + INT_ERR1_MASK); in k3_dma_enable_dma()
206 writel_relaxed(0x0, d->base + INT_ERR2_MASK); in k3_dma_enable_dma()
212 struct k3_dma_dev *d = (struct k3_dma_dev *)dev_id; in k3_dma_int_handler() local
215 u32 stat = readl_relaxed(d->base + INT_STAT); in k3_dma_int_handler()
216 u32 tc1 = readl_relaxed(d->base + INT_TC1); in k3_dma_int_handler()
217 u32 tc2 = readl_relaxed(d->base + INT_TC2); in k3_dma_int_handler()
218 u32 err1 = readl_relaxed(d->base + INT_ERR1); in k3_dma_int_handler()
219 u32 err2 = readl_relaxed(d->base + INT_ERR2); in k3_dma_int_handler()
227 p = &d->phy[i]; in k3_dma_int_handler()
247 dev_warn(d->slave.dev, "DMA ERR\n"); in k3_dma_int_handler()
250 writel_relaxed(irq_chan, d->base + INT_TC1_RAW); in k3_dma_int_handler()
251 writel_relaxed(irq_chan, d->base + INT_TC2_RAW); in k3_dma_int_handler()
252 writel_relaxed(err1, d->base + INT_ERR1_RAW); in k3_dma_int_handler()
253 writel_relaxed(err2, d->base + INT_ERR2_RAW); in k3_dma_int_handler()
256 tasklet_schedule(&d->task); in k3_dma_int_handler()
266 struct k3_dma_dev *d = to_k3_dma(c->vc.chan.device); in k3_dma_start_txd() local
272 if (BIT(c->phy->idx) & k3_dma_get_chan_stat(d)) in k3_dma_start_txd()
301 struct k3_dma_dev *d = from_tasklet(d, t, task); in k3_dma_tasklet() local
307 list_for_each_entry_safe(c, cn, &d->slave.channels, vc.chan.device_node) { in k3_dma_tasklet()
313 dev_dbg(d->slave.dev, "pchan %u: free\n", p->idx); in k3_dma_tasklet()
323 spin_lock_irq(&d->lock); in k3_dma_tasklet()
324 for (pch = 0; pch < d->dma_channels; pch++) { in k3_dma_tasklet()
325 if (!(d->dma_channel_mask & (1 << pch))) in k3_dma_tasklet()
328 p = &d->phy[pch]; in k3_dma_tasklet()
330 if (p->vchan == NULL && !list_empty(&d->chan_pending)) { in k3_dma_tasklet()
331 c = list_first_entry(&d->chan_pending, in k3_dma_tasklet()
339 dev_dbg(d->slave.dev, "pchan %u: alloc vchan %p\n", pch, &c->vc); in k3_dma_tasklet()
342 spin_unlock_irq(&d->lock); in k3_dma_tasklet()
344 for (pch = 0; pch < d->dma_channels; pch++) { in k3_dma_tasklet()
345 if (!(d->dma_channel_mask & (1 << pch))) in k3_dma_tasklet()
349 p = &d->phy[pch]; in k3_dma_tasklet()
363 struct k3_dma_dev *d = to_k3_dma(chan->device); in k3_dma_free_chan_resources() local
366 spin_lock_irqsave(&d->lock, flags); in k3_dma_free_chan_resources()
368 spin_unlock_irqrestore(&d->lock, flags); in k3_dma_free_chan_resources()
378 struct k3_dma_dev *d = to_k3_dma(chan->device); in k3_dma_tx_status() local
406 bytes = k3_dma_get_curr_cnt(d, p); in k3_dma_tx_status()
425 struct k3_dma_dev *d = to_k3_dma(chan->device); in k3_dma_issue_pending() local
431 spin_lock(&d->lock); in k3_dma_issue_pending()
435 list_add_tail(&c->node, &d->chan_pending); in k3_dma_issue_pending()
437 tasklet_schedule(&d->task); in k3_dma_issue_pending()
438 dev_dbg(d->slave.dev, "vchan %p: issued\n", &c->vc); in k3_dma_issue_pending()
441 spin_unlock(&d->lock); in k3_dma_issue_pending()
443 dev_dbg(d->slave.dev, "vchan %p: nothing to issue\n", &c->vc); in k3_dma_issue_pending()
466 struct k3_dma_dev *d = to_k3_dma(chan->device); in k3_dma_alloc_desc_resource() local
479 ds->desc_hw = dma_pool_zalloc(d->pool, GFP_NOWAIT, &ds->desc_hw_lli); in k3_dma_alloc_desc_resource()
714 struct k3_dma_dev *d = to_k3_dma(vd->tx.chan->device); in k3_dma_free_desc() local
716 dma_pool_free(d->pool, ds->desc_hw, ds->desc_hw_lli); in k3_dma_free_desc()
723 struct k3_dma_dev *d = to_k3_dma(chan->device); in k3_dma_terminate_all() local
728 dev_dbg(d->slave.dev, "vchan %p: terminate all\n", &c->vc); in k3_dma_terminate_all()
731 spin_lock(&d->lock); in k3_dma_terminate_all()
733 spin_unlock(&d->lock); in k3_dma_terminate_all()
740 k3_dma_terminate_chan(p, d); in k3_dma_terminate_all()
765 struct k3_dma_dev *d = to_k3_dma(chan->device); in k3_dma_transfer_pause() local
768 dev_dbg(d->slave.dev, "vchan %p: pause\n", &c->vc); in k3_dma_transfer_pause()
774 spin_lock(&d->lock); in k3_dma_transfer_pause()
776 spin_unlock(&d->lock); in k3_dma_transfer_pause()
786 struct k3_dma_dev *d = to_k3_dma(chan->device); in k3_dma_transfer_resume() local
790 dev_dbg(d->slave.dev, "vchan %p: resume\n", &c->vc); in k3_dma_transfer_resume()
797 spin_lock(&d->lock); in k3_dma_transfer_resume()
798 list_add_tail(&c->node, &d->chan_pending); in k3_dma_transfer_resume()
799 spin_unlock(&d->lock); in k3_dma_transfer_resume()
829 struct k3_dma_dev *d = ofdma->of_dma_data; in k3_of_dma_simple_xlate() local
832 if (request >= d->dma_requests) in k3_of_dma_simple_xlate()
835 return dma_get_slave_channel(&(d->chans[request].vc.chan)); in k3_of_dma_simple_xlate()
841 struct k3_dma_dev *d; in k3_dma_probe() local
845 d = devm_kzalloc(&op->dev, sizeof(*d), GFP_KERNEL); in k3_dma_probe()
846 if (!d) in k3_dma_probe()
853 d->base = devm_platform_ioremap_resource(op, 0); in k3_dma_probe()
854 if (IS_ERR(d->base)) in k3_dma_probe()
855 return PTR_ERR(d->base); in k3_dma_probe()
860 "dma-channels", &d->dma_channels); in k3_dma_probe()
862 "dma-requests", &d->dma_requests); in k3_dma_probe()
864 "dma-channel-mask", &d->dma_channel_mask); in k3_dma_probe()
868 d->dma_channel_mask = (u32)~0UL; in k3_dma_probe()
873 d->clk = devm_clk_get(&op->dev, NULL); in k3_dma_probe()
874 if (IS_ERR(d->clk)) { in k3_dma_probe()
876 return PTR_ERR(d->clk); in k3_dma_probe()
882 k3_dma_int_handler, 0, DRIVER_NAME, d); in k3_dma_probe()
886 d->irq = irq; in k3_dma_probe()
889 d->pool = dmam_pool_create(DRIVER_NAME, &op->dev, in k3_dma_probe()
891 if (!d->pool) in k3_dma_probe()
895 d->phy = devm_kcalloc(&op->dev, in k3_dma_probe()
896 d->dma_channels, sizeof(struct k3_dma_phy), GFP_KERNEL); in k3_dma_probe()
897 if (d->phy == NULL) in k3_dma_probe()
900 for (i = 0; i < d->dma_channels; i++) { in k3_dma_probe()
903 if (!(d->dma_channel_mask & BIT(i))) in k3_dma_probe()
906 p = &d->phy[i]; in k3_dma_probe()
908 p->base = d->base + i * 0x40; in k3_dma_probe()
911 INIT_LIST_HEAD(&d->slave.channels); in k3_dma_probe()
912 dma_cap_set(DMA_SLAVE, d->slave.cap_mask); in k3_dma_probe()
913 dma_cap_set(DMA_MEMCPY, d->slave.cap_mask); in k3_dma_probe()
914 dma_cap_set(DMA_CYCLIC, d->slave.cap_mask); in k3_dma_probe()
915 d->slave.dev = &op->dev; in k3_dma_probe()
916 d->slave.device_free_chan_resources = k3_dma_free_chan_resources; in k3_dma_probe()
917 d->slave.device_tx_status = k3_dma_tx_status; in k3_dma_probe()
918 d->slave.device_prep_dma_memcpy = k3_dma_prep_memcpy; in k3_dma_probe()
919 d->slave.device_prep_slave_sg = k3_dma_prep_slave_sg; in k3_dma_probe()
920 d->slave.device_prep_dma_cyclic = k3_dma_prep_dma_cyclic; in k3_dma_probe()
921 d->slave.device_issue_pending = k3_dma_issue_pending; in k3_dma_probe()
922 d->slave.device_config = k3_dma_config; in k3_dma_probe()
923 d->slave.device_pause = k3_dma_transfer_pause; in k3_dma_probe()
924 d->slave.device_resume = k3_dma_transfer_resume; in k3_dma_probe()
925 d->slave.device_terminate_all = k3_dma_terminate_all; in k3_dma_probe()
926 d->slave.device_synchronize = k3_dma_synchronize; in k3_dma_probe()
927 d->slave.copy_align = DMAENGINE_ALIGN_8_BYTES; in k3_dma_probe()
930 d->chans = devm_kcalloc(&op->dev, in k3_dma_probe()
931 d->dma_requests, sizeof(struct k3_dma_chan), GFP_KERNEL); in k3_dma_probe()
932 if (d->chans == NULL) in k3_dma_probe()
935 for (i = 0; i < d->dma_requests; i++) { in k3_dma_probe()
936 struct k3_dma_chan *c = &d->chans[i]; in k3_dma_probe()
941 vchan_init(&c->vc, &d->slave); in k3_dma_probe()
945 ret = clk_prepare_enable(d->clk); in k3_dma_probe()
951 k3_dma_enable_dma(d, true); in k3_dma_probe()
953 ret = dma_async_device_register(&d->slave); in k3_dma_probe()
958 k3_of_dma_simple_xlate, d); in k3_dma_probe()
962 spin_lock_init(&d->lock); in k3_dma_probe()
963 INIT_LIST_HEAD(&d->chan_pending); in k3_dma_probe()
964 tasklet_setup(&d->task, k3_dma_tasklet); in k3_dma_probe()
965 platform_set_drvdata(op, d); in k3_dma_probe()
971 dma_async_device_unregister(&d->slave); in k3_dma_probe()
973 clk_disable_unprepare(d->clk); in k3_dma_probe()
980 struct k3_dma_dev *d = platform_get_drvdata(op); in k3_dma_remove() local
982 dma_async_device_unregister(&d->slave); in k3_dma_remove()
985 devm_free_irq(&op->dev, d->irq, d); in k3_dma_remove()
987 list_for_each_entry_safe(c, cn, &d->slave.channels, vc.chan.device_node) { in k3_dma_remove()
991 tasklet_kill(&d->task); in k3_dma_remove()
992 clk_disable_unprepare(d->clk); in k3_dma_remove()
999 struct k3_dma_dev *d = dev_get_drvdata(dev); in k3_dma_suspend_dev() local
1002 stat = k3_dma_get_chan_stat(d); in k3_dma_suspend_dev()
1004 dev_warn(d->slave.dev, in k3_dma_suspend_dev()
1008 k3_dma_enable_dma(d, false); in k3_dma_suspend_dev()
1009 clk_disable_unprepare(d->clk); in k3_dma_suspend_dev()
1015 struct k3_dma_dev *d = dev_get_drvdata(dev); in k3_dma_resume_dev() local
1018 ret = clk_prepare_enable(d->clk); in k3_dma_resume_dev()
1020 dev_err(d->slave.dev, "clk_prepare_enable failed: %d\n", ret); in k3_dma_resume_dev()
1023 k3_dma_enable_dma(d, true); in k3_dma_resume_dev()