Lines Matching full:pd
132 #define dma_readl(pd, name) \ argument
133 readl((pd)->membase + PCH_DMA_##name)
134 #define dma_writel(pd, name, val) \ argument
135 writel((val), (pd)->membase + PCH_DMA_##name)
179 struct pch_dma *pd = to_pd(chan->device); in pdc_enable_irq() local
188 val = dma_readl(pd, CTL2); in pdc_enable_irq()
195 dma_writel(pd, CTL2, val); in pdc_enable_irq()
204 struct pch_dma *pd = to_pd(chan->device); in pdc_set_dir() local
210 val = dma_readl(pd, CTL0); in pdc_set_dir()
225 dma_writel(pd, CTL0, val); in pdc_set_dir()
228 val = dma_readl(pd, CTL3); in pdc_set_dir()
242 dma_writel(pd, CTL3, val); in pdc_set_dir()
251 struct pch_dma *pd = to_pd(chan->device); in pdc_set_mode() local
261 val = dma_readl(pd, CTL0); in pdc_set_mode()
265 dma_writel(pd, CTL0, val); in pdc_set_mode()
272 val = dma_readl(pd, CTL3); in pdc_set_mode()
276 dma_writel(pd, CTL3, val); in pdc_set_mode()
285 struct pch_dma *pd = to_pd(pd_chan->chan.device); in pdc_get_status0() local
288 val = dma_readl(pd, STS0); in pdc_get_status0()
295 struct pch_dma *pd = to_pd(pd_chan->chan.device); in pdc_get_status2() local
298 val = dma_readl(pd, STS2); in pdc_get_status2()
429 struct pch_dma *pd = to_pd(chan->device); in pdc_alloc_desc() local
432 desc = dma_pool_zalloc(pd->pool, flags, &addr); in pdc_alloc_desc()
530 struct pch_dma *pd = to_pd(chan->device); in pd_free_chan_resources() local
544 dma_pool_free(pd->pool, desc, desc->txd.phys); in pd_free_chan_resources()
694 struct pch_dma *pd = (struct pch_dma *)devid; in pd_irq() local
702 sts0 = dma_readl(pd, STS0); in pd_irq()
703 sts2 = dma_readl(pd, STS2); in pd_irq()
705 dev_dbg(pd->dma.dev, "pd_irq sts0: %x\n", sts0); in pd_irq()
707 for (i = 0; i < pd->dma.chancnt; i++) { in pd_irq()
708 pd_chan = &pd->channels[i]; in pd_irq()
731 dma_writel(pd, STS0, sts0); in pd_irq()
733 dma_writel(pd, STS2, sts2); in pd_irq()
738 static void __maybe_unused pch_dma_save_regs(struct pch_dma *pd) in pch_dma_save_regs() argument
744 pd->regs.dma_ctl0 = dma_readl(pd, CTL0); in pch_dma_save_regs()
745 pd->regs.dma_ctl1 = dma_readl(pd, CTL1); in pch_dma_save_regs()
746 pd->regs.dma_ctl2 = dma_readl(pd, CTL2); in pch_dma_save_regs()
747 pd->regs.dma_ctl3 = dma_readl(pd, CTL3); in pch_dma_save_regs()
749 list_for_each_entry_safe(chan, _c, &pd->dma.channels, device_node) { in pch_dma_save_regs()
752 pd->ch_regs[i].dev_addr = channel_readl(pd_chan, DEV_ADDR); in pch_dma_save_regs()
753 pd->ch_regs[i].mem_addr = channel_readl(pd_chan, MEM_ADDR); in pch_dma_save_regs()
754 pd->ch_regs[i].size = channel_readl(pd_chan, SIZE); in pch_dma_save_regs()
755 pd->ch_regs[i].next = channel_readl(pd_chan, NEXT); in pch_dma_save_regs()
761 static void __maybe_unused pch_dma_restore_regs(struct pch_dma *pd) in pch_dma_restore_regs() argument
767 dma_writel(pd, CTL0, pd->regs.dma_ctl0); in pch_dma_restore_regs()
768 dma_writel(pd, CTL1, pd->regs.dma_ctl1); in pch_dma_restore_regs()
769 dma_writel(pd, CTL2, pd->regs.dma_ctl2); in pch_dma_restore_regs()
770 dma_writel(pd, CTL3, pd->regs.dma_ctl3); in pch_dma_restore_regs()
772 list_for_each_entry_safe(chan, _c, &pd->dma.channels, device_node) { in pch_dma_restore_regs()
775 channel_writel(pd_chan, DEV_ADDR, pd->ch_regs[i].dev_addr); in pch_dma_restore_regs()
776 channel_writel(pd_chan, MEM_ADDR, pd->ch_regs[i].mem_addr); in pch_dma_restore_regs()
777 channel_writel(pd_chan, SIZE, pd->ch_regs[i].size); in pch_dma_restore_regs()
778 channel_writel(pd_chan, NEXT, pd->ch_regs[i].next); in pch_dma_restore_regs()
786 struct pch_dma *pd = dev_get_drvdata(dev); in pch_dma_suspend() local
788 if (pd) in pch_dma_suspend()
789 pch_dma_save_regs(pd); in pch_dma_suspend()
796 struct pch_dma *pd = dev_get_drvdata(dev); in pch_dma_resume() local
798 if (pd) in pch_dma_resume()
799 pch_dma_restore_regs(pd); in pch_dma_resume()
807 struct pch_dma *pd; in pch_dma_probe() local
814 pd = kzalloc(sizeof(*pd), GFP_KERNEL); in pch_dma_probe()
815 if (!pd) in pch_dma_probe()
818 pci_set_drvdata(pdev, pd); in pch_dma_probe()
844 regs = pd->membase = pci_iomap(pdev, 1, 0); in pch_dma_probe()
845 if (!pd->membase) { in pch_dma_probe()
852 pd->dma.dev = &pdev->dev; in pch_dma_probe()
854 err = request_irq(pdev->irq, pd_irq, IRQF_SHARED, DRV_NAME, pd); in pch_dma_probe()
860 pd->pool = dma_pool_create("pch_dma_desc_pool", &pdev->dev, in pch_dma_probe()
862 if (!pd->pool) { in pch_dma_probe()
869 INIT_LIST_HEAD(&pd->dma.channels); in pch_dma_probe()
872 struct pch_dma_chan *pd_chan = &pd->channels[i]; in pch_dma_probe()
874 pd_chan->chan.device = &pd->dma; in pch_dma_probe()
886 list_add_tail(&pd_chan->chan.device_node, &pd->dma.channels); in pch_dma_probe()
889 dma_cap_zero(pd->dma.cap_mask); in pch_dma_probe()
890 dma_cap_set(DMA_PRIVATE, pd->dma.cap_mask); in pch_dma_probe()
891 dma_cap_set(DMA_SLAVE, pd->dma.cap_mask); in pch_dma_probe()
893 pd->dma.device_alloc_chan_resources = pd_alloc_chan_resources; in pch_dma_probe()
894 pd->dma.device_free_chan_resources = pd_free_chan_resources; in pch_dma_probe()
895 pd->dma.device_tx_status = pd_tx_status; in pch_dma_probe()
896 pd->dma.device_issue_pending = pd_issue_pending; in pch_dma_probe()
897 pd->dma.device_prep_slave_sg = pd_prep_slave_sg; in pch_dma_probe()
898 pd->dma.device_terminate_all = pd_device_terminate_all; in pch_dma_probe()
900 err = dma_async_device_register(&pd->dma); in pch_dma_probe()
909 dma_pool_destroy(pd->pool); in pch_dma_probe()
911 free_irq(pdev->irq, pd); in pch_dma_probe()
913 pci_iounmap(pdev, pd->membase); in pch_dma_probe()
919 kfree(pd); in pch_dma_probe()
925 struct pch_dma *pd = pci_get_drvdata(pdev); in pch_dma_remove() local
929 if (pd) { in pch_dma_remove()
930 dma_async_device_unregister(&pd->dma); in pch_dma_remove()
932 free_irq(pdev->irq, pd); in pch_dma_remove()
934 list_for_each_entry_safe(chan, _c, &pd->dma.channels, in pch_dma_remove()
941 dma_pool_destroy(pd->pool); in pch_dma_remove()
942 pci_iounmap(pdev, pd->membase); in pch_dma_remove()
945 kfree(pd); in pch_dma_remove()