Lines Matching refs:fsl_chan
46 void fsl_edma_tx_chan_handler(struct fsl_edma_chan *fsl_chan) in fsl_edma_tx_chan_handler() argument
48 spin_lock(&fsl_chan->vchan.lock); in fsl_edma_tx_chan_handler()
50 if (!fsl_chan->edesc) { in fsl_edma_tx_chan_handler()
52 spin_unlock(&fsl_chan->vchan.lock); in fsl_edma_tx_chan_handler()
56 if (!fsl_chan->edesc->iscyclic) { in fsl_edma_tx_chan_handler()
57 list_del(&fsl_chan->edesc->vdesc.node); in fsl_edma_tx_chan_handler()
58 vchan_cookie_complete(&fsl_chan->edesc->vdesc); in fsl_edma_tx_chan_handler()
59 fsl_chan->edesc = NULL; in fsl_edma_tx_chan_handler()
60 fsl_chan->status = DMA_COMPLETE; in fsl_edma_tx_chan_handler()
61 fsl_chan->idle = true; in fsl_edma_tx_chan_handler()
63 vchan_cyclic_callback(&fsl_chan->edesc->vdesc); in fsl_edma_tx_chan_handler()
66 if (!fsl_chan->edesc) in fsl_edma_tx_chan_handler()
67 fsl_edma_xfer_desc(fsl_chan); in fsl_edma_tx_chan_handler()
69 spin_unlock(&fsl_chan->vchan.lock); in fsl_edma_tx_chan_handler()
72 static void fsl_edma3_enable_request(struct fsl_edma_chan *fsl_chan) in fsl_edma3_enable_request() argument
76 flags = fsl_edma_drvflags(fsl_chan); in fsl_edma3_enable_request()
77 val = edma_readl_chreg(fsl_chan, ch_sbr); in fsl_edma3_enable_request()
78 if (fsl_chan->is_rxchan) in fsl_edma3_enable_request()
83 if (fsl_chan->is_remote) in fsl_edma3_enable_request()
86 edma_writel_chreg(fsl_chan, val, ch_sbr); in fsl_edma3_enable_request()
93 if (!edma_readl(fsl_chan->edma, fsl_chan->mux_addr)) in fsl_edma3_enable_request()
94 edma_writel(fsl_chan->edma, fsl_chan->srcid, fsl_chan->mux_addr); in fsl_edma3_enable_request()
97 val = edma_readl_chreg(fsl_chan, ch_csr); in fsl_edma3_enable_request()
99 edma_writel_chreg(fsl_chan, val, ch_csr); in fsl_edma3_enable_request()
102 static void fsl_edma_enable_request(struct fsl_edma_chan *fsl_chan) in fsl_edma_enable_request() argument
104 struct edma_regs *regs = &fsl_chan->edma->regs; in fsl_edma_enable_request()
105 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_enable_request()
107 if (fsl_edma_drvflags(fsl_chan) & FSL_EDMA_DRV_SPLIT_REG) in fsl_edma_enable_request()
108 return fsl_edma3_enable_request(fsl_chan); in fsl_edma_enable_request()
110 if (fsl_chan->edma->drvdata->flags & FSL_EDMA_DRV_WRAP_IO) { in fsl_edma_enable_request()
111 edma_writeb(fsl_chan->edma, EDMA_SEEI_SEEI(ch), regs->seei); in fsl_edma_enable_request()
112 edma_writeb(fsl_chan->edma, ch, regs->serq); in fsl_edma_enable_request()
122 static void fsl_edma3_disable_request(struct fsl_edma_chan *fsl_chan) in fsl_edma3_disable_request() argument
124 u32 val = edma_readl_chreg(fsl_chan, ch_csr); in fsl_edma3_disable_request()
127 flags = fsl_edma_drvflags(fsl_chan); in fsl_edma3_disable_request()
130 edma_writel(fsl_chan->edma, 0, fsl_chan->mux_addr); in fsl_edma3_disable_request()
133 edma_writel_chreg(fsl_chan, val, ch_csr); in fsl_edma3_disable_request()
136 void fsl_edma_disable_request(struct fsl_edma_chan *fsl_chan) in fsl_edma_disable_request() argument
138 struct edma_regs *regs = &fsl_chan->edma->regs; in fsl_edma_disable_request()
139 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_disable_request()
141 if (fsl_edma_drvflags(fsl_chan) & FSL_EDMA_DRV_SPLIT_REG) in fsl_edma_disable_request()
142 return fsl_edma3_disable_request(fsl_chan); in fsl_edma_disable_request()
144 if (fsl_chan->edma->drvdata->flags & FSL_EDMA_DRV_WRAP_IO) { in fsl_edma_disable_request()
145 edma_writeb(fsl_chan->edma, ch, regs->cerq); in fsl_edma_disable_request()
146 edma_writeb(fsl_chan->edma, EDMA_CEEI_CEEI(ch), regs->ceei); in fsl_edma_disable_request()
156 static void mux_configure8(struct fsl_edma_chan *fsl_chan, void __iomem *addr, in mux_configure8() argument
169 static void mux_configure32(struct fsl_edma_chan *fsl_chan, void __iomem *addr, in mux_configure32() argument
182 void fsl_edma_chan_mux(struct fsl_edma_chan *fsl_chan, in fsl_edma_chan_mux() argument
185 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_chan_mux()
189 u32 dmamux_nr = fsl_chan->edma->drvdata->dmamuxs; in fsl_edma_chan_mux()
194 chans_per_mux = fsl_chan->edma->n_chans / dmamux_nr; in fsl_edma_chan_mux()
195 ch_off = fsl_chan->vchan.chan.chan_id % chans_per_mux; in fsl_edma_chan_mux()
197 if (fsl_chan->edma->drvdata->flags & FSL_EDMA_DRV_MUX_SWAP) in fsl_edma_chan_mux()
200 muxaddr = fsl_chan->edma->muxbase[ch / chans_per_mux]; in fsl_edma_chan_mux()
203 if (fsl_chan->edma->drvdata->flags & FSL_EDMA_DRV_CONFIG32) in fsl_edma_chan_mux()
204 mux_configure32(fsl_chan, muxaddr, ch_off, slot, enable); in fsl_edma_chan_mux()
206 mux_configure8(fsl_chan, muxaddr, ch_off, slot, enable); in fsl_edma_chan_mux()
234 struct fsl_edma_chan *fsl_chan = to_fsl_edma_chan(chan); in fsl_edma_terminate_all() local
238 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_terminate_all()
239 fsl_edma_disable_request(fsl_chan); in fsl_edma_terminate_all()
240 fsl_chan->edesc = NULL; in fsl_edma_terminate_all()
241 fsl_chan->idle = true; in fsl_edma_terminate_all()
242 vchan_get_all_descriptors(&fsl_chan->vchan, &head); in fsl_edma_terminate_all()
243 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_terminate_all()
244 vchan_dma_desc_free_list(&fsl_chan->vchan, &head); in fsl_edma_terminate_all()
246 if (fsl_edma_drvflags(fsl_chan) & FSL_EDMA_DRV_HAS_PD) in fsl_edma_terminate_all()
247 pm_runtime_allow(fsl_chan->pd_dev); in fsl_edma_terminate_all()
254 struct fsl_edma_chan *fsl_chan = to_fsl_edma_chan(chan); in fsl_edma_pause() local
257 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_pause()
258 if (fsl_chan->edesc) { in fsl_edma_pause()
259 fsl_edma_disable_request(fsl_chan); in fsl_edma_pause()
260 fsl_chan->status = DMA_PAUSED; in fsl_edma_pause()
261 fsl_chan->idle = true; in fsl_edma_pause()
263 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_pause()
269 struct fsl_edma_chan *fsl_chan = to_fsl_edma_chan(chan); in fsl_edma_resume() local
272 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_resume()
273 if (fsl_chan->edesc) { in fsl_edma_resume()
274 fsl_edma_enable_request(fsl_chan); in fsl_edma_resume()
275 fsl_chan->status = DMA_IN_PROGRESS; in fsl_edma_resume()
276 fsl_chan->idle = false; in fsl_edma_resume()
278 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_resume()
282 static void fsl_edma_unprep_slave_dma(struct fsl_edma_chan *fsl_chan) in fsl_edma_unprep_slave_dma() argument
284 if (fsl_chan->dma_dir != DMA_NONE) in fsl_edma_unprep_slave_dma()
285 dma_unmap_resource(fsl_chan->vchan.chan.device->dev, in fsl_edma_unprep_slave_dma()
286 fsl_chan->dma_dev_addr, in fsl_edma_unprep_slave_dma()
287 fsl_chan->dma_dev_size, in fsl_edma_unprep_slave_dma()
288 fsl_chan->dma_dir, 0); in fsl_edma_unprep_slave_dma()
289 fsl_chan->dma_dir = DMA_NONE; in fsl_edma_unprep_slave_dma()
292 static bool fsl_edma_prep_slave_dma(struct fsl_edma_chan *fsl_chan, in fsl_edma_prep_slave_dma() argument
295 struct device *dev = fsl_chan->vchan.chan.device->dev; in fsl_edma_prep_slave_dma()
303 addr = fsl_chan->cfg.dst_addr; in fsl_edma_prep_slave_dma()
304 size = fsl_chan->cfg.dst_maxburst; in fsl_edma_prep_slave_dma()
308 addr = fsl_chan->cfg.src_addr; in fsl_edma_prep_slave_dma()
309 size = fsl_chan->cfg.src_maxburst; in fsl_edma_prep_slave_dma()
317 if (fsl_chan->dma_dir == dma_dir) in fsl_edma_prep_slave_dma()
320 fsl_edma_unprep_slave_dma(fsl_chan); in fsl_edma_prep_slave_dma()
322 fsl_chan->dma_dev_addr = dma_map_resource(dev, addr, size, dma_dir, 0); in fsl_edma_prep_slave_dma()
323 if (dma_mapping_error(dev, fsl_chan->dma_dev_addr)) in fsl_edma_prep_slave_dma()
325 fsl_chan->dma_dev_size = size; in fsl_edma_prep_slave_dma()
326 fsl_chan->dma_dir = dma_dir; in fsl_edma_prep_slave_dma()
334 struct fsl_edma_chan *fsl_chan = to_fsl_edma_chan(chan); in fsl_edma_slave_config() local
336 memcpy(&fsl_chan->cfg, cfg, sizeof(*cfg)); in fsl_edma_slave_config()
337 fsl_edma_unprep_slave_dma(fsl_chan); in fsl_edma_slave_config()
342 static size_t fsl_edma_desc_residue(struct fsl_edma_chan *fsl_chan, in fsl_edma_desc_residue() argument
345 struct fsl_edma_desc *edesc = fsl_chan->edesc; in fsl_edma_desc_residue()
353 for (len = i = 0; i < fsl_chan->edesc->n_tcds; i++) { in fsl_edma_desc_residue()
364 cur_addr = edma_read_tcdreg(fsl_chan, saddr); in fsl_edma_desc_residue()
366 cur_addr = edma_read_tcdreg(fsl_chan, daddr); in fsl_edma_desc_residue()
369 for (i = 0; i < fsl_chan->edesc->n_tcds; i++) { in fsl_edma_desc_residue()
394 struct fsl_edma_chan *fsl_chan = to_fsl_edma_chan(chan); in fsl_edma_tx_status() local
404 return fsl_chan->status; in fsl_edma_tx_status()
406 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_tx_status()
407 vdesc = vchan_find_desc(&fsl_chan->vchan, cookie); in fsl_edma_tx_status()
408 if (fsl_chan->edesc && cookie == fsl_chan->edesc->vdesc.tx.cookie) in fsl_edma_tx_status()
410 fsl_edma_desc_residue(fsl_chan, vdesc, true); in fsl_edma_tx_status()
413 fsl_edma_desc_residue(fsl_chan, vdesc, false); in fsl_edma_tx_status()
417 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_tx_status()
419 return fsl_chan->status; in fsl_edma_tx_status()
422 static void fsl_edma_set_tcd_regs(struct fsl_edma_chan *fsl_chan, in fsl_edma_set_tcd_regs() argument
433 edma_write_tcdreg(fsl_chan, 0, csr); in fsl_edma_set_tcd_regs()
435 edma_write_tcdreg(fsl_chan, tcd->saddr, saddr); in fsl_edma_set_tcd_regs()
436 edma_write_tcdreg(fsl_chan, tcd->daddr, daddr); in fsl_edma_set_tcd_regs()
438 edma_write_tcdreg(fsl_chan, tcd->attr, attr); in fsl_edma_set_tcd_regs()
439 edma_write_tcdreg(fsl_chan, tcd->soff, soff); in fsl_edma_set_tcd_regs()
441 edma_write_tcdreg(fsl_chan, tcd->nbytes, nbytes); in fsl_edma_set_tcd_regs()
442 edma_write_tcdreg(fsl_chan, tcd->slast, slast); in fsl_edma_set_tcd_regs()
444 edma_write_tcdreg(fsl_chan, tcd->citer, citer); in fsl_edma_set_tcd_regs()
445 edma_write_tcdreg(fsl_chan, tcd->biter, biter); in fsl_edma_set_tcd_regs()
446 edma_write_tcdreg(fsl_chan, tcd->doff, doff); in fsl_edma_set_tcd_regs()
448 edma_write_tcdreg(fsl_chan, tcd->dlast_sga, dlast_sga); in fsl_edma_set_tcd_regs()
452 if (fsl_chan->is_sw) { in fsl_edma_set_tcd_regs()
462 if (((fsl_edma_drvflags(fsl_chan) & FSL_EDMA_DRV_CLEAR_DONE_E_SG) && in fsl_edma_set_tcd_regs()
464 ((fsl_edma_drvflags(fsl_chan) & FSL_EDMA_DRV_CLEAR_DONE_E_LINK) && in fsl_edma_set_tcd_regs()
466 edma_writel_chreg(fsl_chan, edma_readl_chreg(fsl_chan, ch_csr), ch_csr); in fsl_edma_set_tcd_regs()
469 edma_write_tcdreg(fsl_chan, tcd->csr, csr); in fsl_edma_set_tcd_regs()
473 void fsl_edma_fill_tcd(struct fsl_edma_chan *fsl_chan, in fsl_edma_fill_tcd() argument
479 struct dma_slave_config *cfg = &fsl_chan->cfg; in fsl_edma_fill_tcd()
496 if (fsl_chan->is_multi_fifo) { in fsl_edma_fill_tcd()
529 if (fsl_chan->is_rxchan) in fsl_edma_fill_tcd()
532 if (fsl_chan->is_sw) in fsl_edma_fill_tcd()
538 static struct fsl_edma_desc *fsl_edma_alloc_desc(struct fsl_edma_chan *fsl_chan, in fsl_edma_alloc_desc() argument
548 fsl_desc->echan = fsl_chan; in fsl_edma_alloc_desc()
551 fsl_desc->tcd[i].vtcd = dma_pool_alloc(fsl_chan->tcd_pool, in fsl_edma_alloc_desc()
560 dma_pool_free(fsl_chan->tcd_pool, fsl_desc->tcd[i].vtcd, in fsl_edma_alloc_desc()
571 struct fsl_edma_chan *fsl_chan = to_fsl_edma_chan(chan); in fsl_edma_prep_dma_cyclic() local
582 if (!fsl_edma_prep_slave_dma(fsl_chan, direction)) in fsl_edma_prep_dma_cyclic()
586 fsl_desc = fsl_edma_alloc_desc(fsl_chan, sg_len); in fsl_edma_prep_dma_cyclic()
594 fsl_chan->attr = in fsl_edma_prep_dma_cyclic()
595 fsl_edma_get_tcd_attr(fsl_chan->cfg.dst_addr_width); in fsl_edma_prep_dma_cyclic()
596 nbytes = fsl_chan->cfg.dst_addr_width * in fsl_edma_prep_dma_cyclic()
597 fsl_chan->cfg.dst_maxburst; in fsl_edma_prep_dma_cyclic()
599 fsl_chan->attr = in fsl_edma_prep_dma_cyclic()
600 fsl_edma_get_tcd_attr(fsl_chan->cfg.src_addr_width); in fsl_edma_prep_dma_cyclic()
601 nbytes = fsl_chan->cfg.src_addr_width * in fsl_edma_prep_dma_cyclic()
602 fsl_chan->cfg.src_maxburst; in fsl_edma_prep_dma_cyclic()
616 dst_addr = fsl_chan->dma_dev_addr; in fsl_edma_prep_dma_cyclic()
617 soff = fsl_chan->cfg.dst_addr_width; in fsl_edma_prep_dma_cyclic()
618 doff = fsl_chan->is_multi_fifo ? 4 : 0; in fsl_edma_prep_dma_cyclic()
620 src_addr = fsl_chan->dma_dev_addr; in fsl_edma_prep_dma_cyclic()
622 soff = fsl_chan->is_multi_fifo ? 4 : 0; in fsl_edma_prep_dma_cyclic()
623 doff = fsl_chan->cfg.src_addr_width; in fsl_edma_prep_dma_cyclic()
626 src_addr = fsl_chan->cfg.src_addr; in fsl_edma_prep_dma_cyclic()
627 dst_addr = fsl_chan->cfg.dst_addr; in fsl_edma_prep_dma_cyclic()
632 fsl_edma_fill_tcd(fsl_chan, fsl_desc->tcd[i].vtcd, src_addr, dst_addr, in fsl_edma_prep_dma_cyclic()
633 fsl_chan->attr, soff, nbytes, 0, iter, in fsl_edma_prep_dma_cyclic()
638 return vchan_tx_prep(&fsl_chan->vchan, &fsl_desc->vdesc, flags); in fsl_edma_prep_dma_cyclic()
646 struct fsl_edma_chan *fsl_chan = to_fsl_edma_chan(chan); in fsl_edma_prep_slave_sg() local
656 if (!fsl_edma_prep_slave_dma(fsl_chan, direction)) in fsl_edma_prep_slave_sg()
659 fsl_desc = fsl_edma_alloc_desc(fsl_chan, sg_len); in fsl_edma_prep_slave_sg()
666 fsl_chan->attr = in fsl_edma_prep_slave_sg()
667 fsl_edma_get_tcd_attr(fsl_chan->cfg.dst_addr_width); in fsl_edma_prep_slave_sg()
668 nbytes = fsl_chan->cfg.dst_addr_width * in fsl_edma_prep_slave_sg()
669 fsl_chan->cfg.dst_maxburst; in fsl_edma_prep_slave_sg()
671 fsl_chan->attr = in fsl_edma_prep_slave_sg()
672 fsl_edma_get_tcd_attr(fsl_chan->cfg.src_addr_width); in fsl_edma_prep_slave_sg()
673 nbytes = fsl_chan->cfg.src_addr_width * in fsl_edma_prep_slave_sg()
674 fsl_chan->cfg.src_maxburst; in fsl_edma_prep_slave_sg()
680 dst_addr = fsl_chan->dma_dev_addr; in fsl_edma_prep_slave_sg()
681 soff = fsl_chan->cfg.dst_addr_width; in fsl_edma_prep_slave_sg()
684 src_addr = fsl_chan->dma_dev_addr; in fsl_edma_prep_slave_sg()
687 doff = fsl_chan->cfg.src_addr_width; in fsl_edma_prep_slave_sg()
690 src_addr = fsl_chan->cfg.src_addr; in fsl_edma_prep_slave_sg()
691 dst_addr = fsl_chan->cfg.dst_addr; in fsl_edma_prep_slave_sg()
704 fsl_chan->cfg.src_maxburst : in fsl_edma_prep_slave_sg()
705 fsl_chan->cfg.dst_maxburst; in fsl_edma_prep_slave_sg()
721 fsl_edma_fill_tcd(fsl_chan, fsl_desc->tcd[i].vtcd, src_addr, in fsl_edma_prep_slave_sg()
722 dst_addr, fsl_chan->attr, soff, in fsl_edma_prep_slave_sg()
727 fsl_edma_fill_tcd(fsl_chan, fsl_desc->tcd[i].vtcd, src_addr, in fsl_edma_prep_slave_sg()
728 dst_addr, fsl_chan->attr, soff, in fsl_edma_prep_slave_sg()
734 return vchan_tx_prep(&fsl_chan->vchan, &fsl_desc->vdesc, flags); in fsl_edma_prep_slave_sg()
741 struct fsl_edma_chan *fsl_chan = to_fsl_edma_chan(chan); in fsl_edma_prep_memcpy() local
744 fsl_desc = fsl_edma_alloc_desc(fsl_chan, 1); in fsl_edma_prep_memcpy()
749 fsl_chan->is_sw = true; in fsl_edma_prep_memcpy()
750 if (fsl_edma_drvflags(fsl_chan) & FSL_EDMA_DRV_MEM_REMOTE) in fsl_edma_prep_memcpy()
751 fsl_chan->is_remote = true; in fsl_edma_prep_memcpy()
754 fsl_edma_fill_tcd(fsl_chan, fsl_desc->tcd[0].vtcd, dma_src, dma_dst, in fsl_edma_prep_memcpy()
758 return vchan_tx_prep(&fsl_chan->vchan, &fsl_desc->vdesc, flags); in fsl_edma_prep_memcpy()
761 void fsl_edma_xfer_desc(struct fsl_edma_chan *fsl_chan) in fsl_edma_xfer_desc() argument
765 lockdep_assert_held(&fsl_chan->vchan.lock); in fsl_edma_xfer_desc()
767 vdesc = vchan_next_desc(&fsl_chan->vchan); in fsl_edma_xfer_desc()
770 fsl_chan->edesc = to_fsl_edma_desc(vdesc); in fsl_edma_xfer_desc()
771 fsl_edma_set_tcd_regs(fsl_chan, fsl_chan->edesc->tcd[0].vtcd); in fsl_edma_xfer_desc()
772 fsl_edma_enable_request(fsl_chan); in fsl_edma_xfer_desc()
773 fsl_chan->status = DMA_IN_PROGRESS; in fsl_edma_xfer_desc()
774 fsl_chan->idle = false; in fsl_edma_xfer_desc()
779 struct fsl_edma_chan *fsl_chan = to_fsl_edma_chan(chan); in fsl_edma_issue_pending() local
782 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_issue_pending()
784 if (unlikely(fsl_chan->pm_state != RUNNING)) { in fsl_edma_issue_pending()
785 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_issue_pending()
790 if (vchan_issue_pending(&fsl_chan->vchan) && !fsl_chan->edesc) in fsl_edma_issue_pending()
791 fsl_edma_xfer_desc(fsl_chan); in fsl_edma_issue_pending()
793 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_issue_pending()
798 struct fsl_edma_chan *fsl_chan = to_fsl_edma_chan(chan); in fsl_edma_alloc_chan_resources() local
800 if (fsl_edma_drvflags(fsl_chan) & FSL_EDMA_DRV_HAS_CHCLK) in fsl_edma_alloc_chan_resources()
801 clk_prepare_enable(fsl_chan->clk); in fsl_edma_alloc_chan_resources()
803 fsl_chan->tcd_pool = dma_pool_create("tcd_pool", chan->device->dev, in fsl_edma_alloc_chan_resources()
811 struct fsl_edma_chan *fsl_chan = to_fsl_edma_chan(chan); in fsl_edma_free_chan_resources() local
812 struct fsl_edma_engine *edma = fsl_chan->edma; in fsl_edma_free_chan_resources()
816 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_free_chan_resources()
817 fsl_edma_disable_request(fsl_chan); in fsl_edma_free_chan_resources()
819 fsl_edma_chan_mux(fsl_chan, 0, false); in fsl_edma_free_chan_resources()
820 fsl_chan->edesc = NULL; in fsl_edma_free_chan_resources()
821 vchan_get_all_descriptors(&fsl_chan->vchan, &head); in fsl_edma_free_chan_resources()
822 fsl_edma_unprep_slave_dma(fsl_chan); in fsl_edma_free_chan_resources()
823 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_free_chan_resources()
825 vchan_dma_desc_free_list(&fsl_chan->vchan, &head); in fsl_edma_free_chan_resources()
826 dma_pool_destroy(fsl_chan->tcd_pool); in fsl_edma_free_chan_resources()
827 fsl_chan->tcd_pool = NULL; in fsl_edma_free_chan_resources()
828 fsl_chan->is_sw = false; in fsl_edma_free_chan_resources()
829 fsl_chan->srcid = 0; in fsl_edma_free_chan_resources()
830 fsl_chan->is_remote = false; in fsl_edma_free_chan_resources()
831 if (fsl_edma_drvflags(fsl_chan) & FSL_EDMA_DRV_HAS_CHCLK) in fsl_edma_free_chan_resources()
832 clk_disable_unprepare(fsl_chan->clk); in fsl_edma_free_chan_resources()