Lines Matching +full:dma +full:- +full:poll +full:- +full:cnt

1 // SPDX-License-Identifier: GPL-2.0
5 * Copyright (C) 2015 - 2020 Xilinx, Inc.
15 #include <linux/dma/xilinx_dpdma.h>
28 #include <dt-bindings/dma/xlnx-zynqmp-dpdma.h>
31 #include "../virt-dma.h"
142 * struct xilinx_dpdma_hw_desc - DPDMA hardware descriptor
180 * struct xilinx_dpdma_sw_desc - DPDMA software descriptor
183 * @dma_addr: DMA address of the software descriptor
192 * struct xilinx_dpdma_tx_desc - DPDMA transaction descriptor
193 * @vdesc: virtual DMA descriptor
194 * @chan: DMA channel
209 * struct xilinx_dpdma_chan - DPDMA channel
210 * @vchan: virtual DMA channel
216 * @video_group: flag if multi-channel operation is needed for video channels
252 * struct xilinx_dpdma_device - DPDMA device
253 * @common: generic dma device structure
273 /* -----------------------------------------------------------------------------
301 if (IS_ENABLED(CONFIG_DEBUG_FS) && chan->id == dpdma_debugfs.chan_id) in xilinx_dpdma_debugfs_desc_done_irq()
328 return -EINVAL; in xilinx_dpdma_debugfs_desc_done_irq_write()
332 return -EINVAL; in xilinx_dpdma_debugfs_desc_done_irq_write()
339 return -EINVAL; in xilinx_dpdma_debugfs_desc_done_irq_write()
366 return -EINVAL; in xilinx_dpdma_debugfs_read()
371 return -ENOMEM; in xilinx_dpdma_debugfs_read()
386 ret = -EFAULT; in xilinx_dpdma_debugfs_read()
407 return -EINVAL; in xilinx_dpdma_debugfs_write()
411 return -EBUSY; in xilinx_dpdma_debugfs_write()
415 return -ENOMEM; in xilinx_dpdma_debugfs_write()
431 ret = -EINVAL; in xilinx_dpdma_debugfs_write()
458 dent = debugfs_create_file("testcase", 0444, xdev->common.dbg_dev_root, in xilinx_dpdma_debugfs_init()
461 dev_err(xdev->dev, "Failed to create debugfs testcase file\n"); in xilinx_dpdma_debugfs_init()
464 /* -----------------------------------------------------------------------------
488 /* -----------------------------------------------------------------------------
493 * xilinx_dpdma_sw_desc_set_dma_addrs - Set DMA addresses in the descriptor
495 * @sw_desc: The software descriptor in which to set DMA addresses
497 * @dma_addr: array of dma addresses
500 * Set all the DMA addresses in the hardware descriptor corresponding to @dev
502 * descriptor DMA address is set to the DMA address of @sw_desc. @prev may be
511 struct xilinx_dpdma_hw_desc *hw_desc = &sw_desc->hw; in xilinx_dpdma_sw_desc_set_dma_addrs()
514 hw_desc->src_addr = lower_32_bits(dma_addr[0]); in xilinx_dpdma_sw_desc_set_dma_addrs()
515 if (xdev->ext_addr) in xilinx_dpdma_sw_desc_set_dma_addrs()
516 hw_desc->addr_ext |= in xilinx_dpdma_sw_desc_set_dma_addrs()
521 u32 *addr = &hw_desc->src_addr2; in xilinx_dpdma_sw_desc_set_dma_addrs()
523 addr[i - 1] = lower_32_bits(dma_addr[i]); in xilinx_dpdma_sw_desc_set_dma_addrs()
525 if (xdev->ext_addr) { in xilinx_dpdma_sw_desc_set_dma_addrs()
526 u32 *addr_ext = &hw_desc->addr_ext_23; in xilinx_dpdma_sw_desc_set_dma_addrs()
530 addr_msb <<= 16 * ((i - 1) % 2); in xilinx_dpdma_sw_desc_set_dma_addrs()
531 addr_ext[(i - 1) / 2] |= addr_msb; in xilinx_dpdma_sw_desc_set_dma_addrs()
538 prev->hw.next_desc = lower_32_bits(sw_desc->dma_addr); in xilinx_dpdma_sw_desc_set_dma_addrs()
539 if (xdev->ext_addr) in xilinx_dpdma_sw_desc_set_dma_addrs()
540 prev->hw.addr_ext |= in xilinx_dpdma_sw_desc_set_dma_addrs()
542 upper_32_bits(sw_desc->dma_addr)); in xilinx_dpdma_sw_desc_set_dma_addrs()
546 * xilinx_dpdma_chan_alloc_sw_desc - Allocate a software descriptor
559 sw_desc = dma_pool_zalloc(chan->desc_pool, GFP_ATOMIC, &dma_addr); in xilinx_dpdma_chan_alloc_sw_desc()
563 sw_desc->dma_addr = dma_addr; in xilinx_dpdma_chan_alloc_sw_desc()
569 * xilinx_dpdma_chan_free_sw_desc - Free a software descriptor
579 dma_pool_free(chan->desc_pool, sw_desc, sw_desc->dma_addr); in xilinx_dpdma_chan_free_sw_desc()
583 * xilinx_dpdma_chan_dump_tx_desc - Dump a tx descriptor
593 struct device *dev = chan->xdev->dev; in xilinx_dpdma_chan_dump_tx_desc()
596 dev_dbg(dev, "------- TX descriptor dump start -------\n"); in xilinx_dpdma_chan_dump_tx_desc()
597 dev_dbg(dev, "------- channel ID = %d -------\n", chan->id); in xilinx_dpdma_chan_dump_tx_desc()
599 list_for_each_entry(sw_desc, &tx_desc->descriptors, node) { in xilinx_dpdma_chan_dump_tx_desc()
600 struct xilinx_dpdma_hw_desc *hw_desc = &sw_desc->hw; in xilinx_dpdma_chan_dump_tx_desc()
602 dev_dbg(dev, "------- HW descriptor %d -------\n", i++); in xilinx_dpdma_chan_dump_tx_desc()
603 dev_dbg(dev, "descriptor DMA addr: %pad\n", &sw_desc->dma_addr); in xilinx_dpdma_chan_dump_tx_desc()
604 dev_dbg(dev, "control: 0x%08x\n", hw_desc->control); in xilinx_dpdma_chan_dump_tx_desc()
605 dev_dbg(dev, "desc_id: 0x%08x\n", hw_desc->desc_id); in xilinx_dpdma_chan_dump_tx_desc()
606 dev_dbg(dev, "xfer_size: 0x%08x\n", hw_desc->xfer_size); in xilinx_dpdma_chan_dump_tx_desc()
607 dev_dbg(dev, "hsize_stride: 0x%08x\n", hw_desc->hsize_stride); in xilinx_dpdma_chan_dump_tx_desc()
608 dev_dbg(dev, "timestamp_lsb: 0x%08x\n", hw_desc->timestamp_lsb); in xilinx_dpdma_chan_dump_tx_desc()
609 dev_dbg(dev, "timestamp_msb: 0x%08x\n", hw_desc->timestamp_msb); in xilinx_dpdma_chan_dump_tx_desc()
610 dev_dbg(dev, "addr_ext: 0x%08x\n", hw_desc->addr_ext); in xilinx_dpdma_chan_dump_tx_desc()
611 dev_dbg(dev, "next_desc: 0x%08x\n", hw_desc->next_desc); in xilinx_dpdma_chan_dump_tx_desc()
612 dev_dbg(dev, "src_addr: 0x%08x\n", hw_desc->src_addr); in xilinx_dpdma_chan_dump_tx_desc()
613 dev_dbg(dev, "addr_ext_23: 0x%08x\n", hw_desc->addr_ext_23); in xilinx_dpdma_chan_dump_tx_desc()
614 dev_dbg(dev, "addr_ext_45: 0x%08x\n", hw_desc->addr_ext_45); in xilinx_dpdma_chan_dump_tx_desc()
615 dev_dbg(dev, "src_addr2: 0x%08x\n", hw_desc->src_addr2); in xilinx_dpdma_chan_dump_tx_desc()
616 dev_dbg(dev, "src_addr3: 0x%08x\n", hw_desc->src_addr3); in xilinx_dpdma_chan_dump_tx_desc()
617 dev_dbg(dev, "src_addr4: 0x%08x\n", hw_desc->src_addr4); in xilinx_dpdma_chan_dump_tx_desc()
618 dev_dbg(dev, "src_addr5: 0x%08x\n", hw_desc->src_addr5); in xilinx_dpdma_chan_dump_tx_desc()
619 dev_dbg(dev, "crc: 0x%08x\n", hw_desc->crc); in xilinx_dpdma_chan_dump_tx_desc()
622 dev_dbg(dev, "------- TX descriptor dump end -------\n"); in xilinx_dpdma_chan_dump_tx_desc()
626 * xilinx_dpdma_chan_alloc_tx_desc - Allocate a transaction descriptor
642 INIT_LIST_HEAD(&tx_desc->descriptors); in xilinx_dpdma_chan_alloc_tx_desc()
643 tx_desc->chan = chan; in xilinx_dpdma_chan_alloc_tx_desc()
644 tx_desc->error = false; in xilinx_dpdma_chan_alloc_tx_desc()
650 * xilinx_dpdma_chan_free_tx_desc - Free a virtual DMA descriptor
651 * @vdesc: virtual DMA descriptor
653 * Free the virtual DMA descriptor @vdesc including its software descriptors.
665 list_for_each_entry_safe(sw_desc, next, &desc->descriptors, node) { in xilinx_dpdma_chan_free_tx_desc()
666 list_del(&sw_desc->node); in xilinx_dpdma_chan_free_tx_desc()
667 xilinx_dpdma_chan_free_sw_desc(desc->chan, sw_desc); in xilinx_dpdma_chan_free_tx_desc()
674 * xilinx_dpdma_chan_prep_interleaved_dma - Prepare an interleaved dma
677 * @xt: dma interleaved template
691 size_t hsize = xt->sgl[0].size; in xilinx_dpdma_chan_prep_interleaved_dma()
692 size_t stride = hsize + xt->sgl[0].icg; in xilinx_dpdma_chan_prep_interleaved_dma()
694 if (!IS_ALIGNED(xt->src_start, XILINX_DPDMA_ALIGN_BYTES)) { in xilinx_dpdma_chan_prep_interleaved_dma()
695 dev_err(chan->xdev->dev, in xilinx_dpdma_chan_prep_interleaved_dma()
697 chan->id, XILINX_DPDMA_ALIGN_BYTES); in xilinx_dpdma_chan_prep_interleaved_dma()
707 xilinx_dpdma_chan_free_tx_desc(&tx_desc->vdesc); in xilinx_dpdma_chan_prep_interleaved_dma()
711 xilinx_dpdma_sw_desc_set_dma_addrs(chan->xdev, sw_desc, sw_desc, in xilinx_dpdma_chan_prep_interleaved_dma()
712 &xt->src_start, 1); in xilinx_dpdma_chan_prep_interleaved_dma()
714 hw_desc = &sw_desc->hw; in xilinx_dpdma_chan_prep_interleaved_dma()
716 hw_desc->xfer_size = hsize * xt->numf; in xilinx_dpdma_chan_prep_interleaved_dma()
717 hw_desc->hsize_stride = in xilinx_dpdma_chan_prep_interleaved_dma()
721 hw_desc->control |= XILINX_DPDMA_DESC_CONTROL_PREEMBLE; in xilinx_dpdma_chan_prep_interleaved_dma()
722 hw_desc->control |= XILINX_DPDMA_DESC_CONTROL_COMPLETE_INTR; in xilinx_dpdma_chan_prep_interleaved_dma()
723 hw_desc->control |= XILINX_DPDMA_DESC_CONTROL_IGNORE_DONE; in xilinx_dpdma_chan_prep_interleaved_dma()
724 hw_desc->control |= XILINX_DPDMA_DESC_CONTROL_LAST_OF_FRAME; in xilinx_dpdma_chan_prep_interleaved_dma()
726 list_add_tail(&sw_desc->node, &tx_desc->descriptors); in xilinx_dpdma_chan_prep_interleaved_dma()
731 /* -----------------------------------------------------------------------------
736 * xilinx_dpdma_chan_enable - Enable the channel
745 reg = (XILINX_DPDMA_INTR_CHAN_MASK << chan->id) in xilinx_dpdma_chan_enable()
747 dpdma_write(chan->xdev->reg, XILINX_DPDMA_IEN, reg); in xilinx_dpdma_chan_enable()
748 reg = (XILINX_DPDMA_EINTR_CHAN_ERR_MASK << chan->id) in xilinx_dpdma_chan_enable()
750 dpdma_write(chan->xdev->reg, XILINX_DPDMA_EIEN, reg); in xilinx_dpdma_chan_enable()
759 dpdma_set(chan->reg, XILINX_DPDMA_CH_CNTL, reg); in xilinx_dpdma_chan_enable()
763 * xilinx_dpdma_chan_disable - Disable the channel
772 reg = XILINX_DPDMA_INTR_CHAN_MASK << chan->id; in xilinx_dpdma_chan_disable()
773 dpdma_write(chan->xdev->reg, XILINX_DPDMA_IEN, reg); in xilinx_dpdma_chan_disable()
774 reg = XILINX_DPDMA_EINTR_CHAN_ERR_MASK << chan->id; in xilinx_dpdma_chan_disable()
775 dpdma_write(chan->xdev->reg, XILINX_DPDMA_EIEN, reg); in xilinx_dpdma_chan_disable()
777 dpdma_clr(chan->reg, XILINX_DPDMA_CH_CNTL, XILINX_DPDMA_CH_CNTL_ENABLE); in xilinx_dpdma_chan_disable()
781 * xilinx_dpdma_chan_pause - Pause the channel
788 dpdma_set(chan->reg, XILINX_DPDMA_CH_CNTL, XILINX_DPDMA_CH_CNTL_PAUSE); in xilinx_dpdma_chan_pause()
792 * xilinx_dpdma_chan_unpause - Unpause the channel
799 dpdma_clr(chan->reg, XILINX_DPDMA_CH_CNTL, XILINX_DPDMA_CH_CNTL_PAUSE); in xilinx_dpdma_chan_unpause()
804 struct xilinx_dpdma_device *xdev = chan->xdev; in xilinx_dpdma_chan_video_group_ready()
809 if (xdev->chan[i]->video_group && !xdev->chan[i]->running) in xilinx_dpdma_chan_video_group_ready()
812 if (xdev->chan[i]->video_group) in xilinx_dpdma_chan_video_group_ready()
820 * xilinx_dpdma_chan_queue_transfer - Queue the next transfer
828 struct xilinx_dpdma_device *xdev = chan->xdev; in xilinx_dpdma_chan_queue_transfer()
835 lockdep_assert_held(&chan->lock); in xilinx_dpdma_chan_queue_transfer()
837 if (chan->desc.pending) in xilinx_dpdma_chan_queue_transfer()
840 if (!chan->running) { in xilinx_dpdma_chan_queue_transfer()
843 chan->first_frame = true; in xilinx_dpdma_chan_queue_transfer()
844 chan->running = true; in xilinx_dpdma_chan_queue_transfer()
847 vdesc = vchan_next_desc(&chan->vchan); in xilinx_dpdma_chan_queue_transfer()
852 chan->desc.pending = desc; in xilinx_dpdma_chan_queue_transfer()
853 list_del(&desc->vdesc.node); in xilinx_dpdma_chan_queue_transfer()
859 list_for_each_entry(sw_desc, &desc->descriptors, node) in xilinx_dpdma_chan_queue_transfer()
860 sw_desc->hw.desc_id = desc->vdesc.tx.cookie in xilinx_dpdma_chan_queue_transfer()
863 sw_desc = list_first_entry(&desc->descriptors, in xilinx_dpdma_chan_queue_transfer()
865 dpdma_write(chan->reg, XILINX_DPDMA_CH_DESC_START_ADDR, in xilinx_dpdma_chan_queue_transfer()
866 lower_32_bits(sw_desc->dma_addr)); in xilinx_dpdma_chan_queue_transfer()
867 if (xdev->ext_addr) in xilinx_dpdma_chan_queue_transfer()
868 dpdma_write(chan->reg, XILINX_DPDMA_CH_DESC_START_ADDRE, in xilinx_dpdma_chan_queue_transfer()
870 upper_32_bits(sw_desc->dma_addr))); in xilinx_dpdma_chan_queue_transfer()
872 first_frame = chan->first_frame; in xilinx_dpdma_chan_queue_transfer()
873 chan->first_frame = false; in xilinx_dpdma_chan_queue_transfer()
875 if (chan->video_group) { in xilinx_dpdma_chan_queue_transfer()
884 channels = BIT(chan->id); in xilinx_dpdma_chan_queue_transfer()
892 dpdma_write(xdev->reg, XILINX_DPDMA_GBL, reg); in xilinx_dpdma_chan_queue_transfer()
896 * xilinx_dpdma_chan_ostand - Number of outstanding transactions
906 dpdma_read(chan->reg, XILINX_DPDMA_CH_STATUS)); in xilinx_dpdma_chan_ostand()
910 * xilinx_dpdma_chan_notify_no_ostand - Notify no outstanding transaction event
916 * should be re-enabled when this event is handled. If the channel status
920 * Return: 0 on success. On failure, -EWOULDBLOCK if there's still outstanding
925 u32 cnt; in xilinx_dpdma_chan_notify_no_ostand() local
927 cnt = xilinx_dpdma_chan_ostand(chan); in xilinx_dpdma_chan_notify_no_ostand()
928 if (cnt) { in xilinx_dpdma_chan_notify_no_ostand()
929 dev_dbg(chan->xdev->dev, in xilinx_dpdma_chan_notify_no_ostand()
931 chan->id, cnt); in xilinx_dpdma_chan_notify_no_ostand()
932 return -EWOULDBLOCK; in xilinx_dpdma_chan_notify_no_ostand()
936 dpdma_write(chan->xdev->reg, XILINX_DPDMA_IDS, in xilinx_dpdma_chan_notify_no_ostand()
937 XILINX_DPDMA_INTR_NO_OSTAND(chan->id)); in xilinx_dpdma_chan_notify_no_ostand()
938 wake_up(&chan->wait_to_stop); in xilinx_dpdma_chan_notify_no_ostand()
944 * xilinx_dpdma_chan_wait_no_ostand - Wait for the no outstanding irq
950 * Return: 0 on success. On failure, -ETIMEOUT for time out, or the error code
958 ret = wait_event_interruptible_timeout(chan->wait_to_stop, in xilinx_dpdma_chan_wait_no_ostand()
962 dpdma_write(chan->xdev->reg, XILINX_DPDMA_IEN, in xilinx_dpdma_chan_wait_no_ostand()
963 XILINX_DPDMA_INTR_NO_OSTAND(chan->id)); in xilinx_dpdma_chan_wait_no_ostand()
967 dev_err(chan->xdev->dev, "chan%u: not ready to stop: %d trans\n", in xilinx_dpdma_chan_wait_no_ostand()
968 chan->id, xilinx_dpdma_chan_ostand(chan)); in xilinx_dpdma_chan_wait_no_ostand()
971 return -ETIMEDOUT; in xilinx_dpdma_chan_wait_no_ostand()
977 * xilinx_dpdma_chan_poll_no_ostand - Poll the outstanding transaction status
980 * Poll the outstanding transaction status, and return when there's no
984 * Return: 0 on success, or -ETIMEDOUT.
988 u32 cnt, loop = 50000; in xilinx_dpdma_chan_poll_no_ostand() local
990 /* Poll at least for 50ms (20 fps). */ in xilinx_dpdma_chan_poll_no_ostand()
992 cnt = xilinx_dpdma_chan_ostand(chan); in xilinx_dpdma_chan_poll_no_ostand()
994 } while (loop-- > 0 && cnt); in xilinx_dpdma_chan_poll_no_ostand()
997 dpdma_write(chan->xdev->reg, XILINX_DPDMA_IEN, in xilinx_dpdma_chan_poll_no_ostand()
998 XILINX_DPDMA_INTR_NO_OSTAND(chan->id)); in xilinx_dpdma_chan_poll_no_ostand()
1002 dev_err(chan->xdev->dev, "chan%u: not ready to stop: %d trans\n", in xilinx_dpdma_chan_poll_no_ostand()
1003 chan->id, xilinx_dpdma_chan_ostand(chan)); in xilinx_dpdma_chan_poll_no_ostand()
1005 return -ETIMEDOUT; in xilinx_dpdma_chan_poll_no_ostand()
1009 * xilinx_dpdma_chan_stop - Stop the channel
1015 * Return: 0 on success, or -ETIMEDOUT if the channel failed to stop.
1026 spin_lock_irqsave(&chan->lock, flags); in xilinx_dpdma_chan_stop()
1028 chan->running = false; in xilinx_dpdma_chan_stop()
1029 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dpdma_chan_stop()
1035 * xilinx_dpdma_chan_done_irq - Handle hardware descriptor completion
1038 * Handle completion of the currently active descriptor (@chan->desc.active). As
1048 spin_lock_irqsave(&chan->lock, flags); in xilinx_dpdma_chan_done_irq()
1052 active = chan->desc.active; in xilinx_dpdma_chan_done_irq()
1054 vchan_cyclic_callback(&active->vdesc); in xilinx_dpdma_chan_done_irq()
1056 dev_warn(chan->xdev->dev, in xilinx_dpdma_chan_done_irq()
1058 chan->id); in xilinx_dpdma_chan_done_irq()
1060 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dpdma_chan_done_irq()
1064 * xilinx_dpdma_chan_vsync_irq - Handle hardware descriptor scheduling
1078 spin_lock_irqsave(&chan->lock, flags); in xilinx_dpdma_chan_vsync_irq()
1080 pending = chan->desc.pending; in xilinx_dpdma_chan_vsync_irq()
1081 if (!chan->running || !pending) in xilinx_dpdma_chan_vsync_irq()
1084 desc_id = dpdma_read(chan->reg, XILINX_DPDMA_CH_DESC_ID) in xilinx_dpdma_chan_vsync_irq()
1088 sw_desc = list_first_entry(&pending->descriptors, in xilinx_dpdma_chan_vsync_irq()
1090 if (sw_desc->hw.desc_id != desc_id) { in xilinx_dpdma_chan_vsync_irq()
1091 dev_dbg(chan->xdev->dev, in xilinx_dpdma_chan_vsync_irq()
1093 chan->id, sw_desc->hw.desc_id, desc_id); in xilinx_dpdma_chan_vsync_irq()
1101 spin_lock(&chan->vchan.lock); in xilinx_dpdma_chan_vsync_irq()
1102 if (chan->desc.active) in xilinx_dpdma_chan_vsync_irq()
1103 vchan_cookie_complete(&chan->desc.active->vdesc); in xilinx_dpdma_chan_vsync_irq()
1104 chan->desc.active = pending; in xilinx_dpdma_chan_vsync_irq()
1105 chan->desc.pending = NULL; in xilinx_dpdma_chan_vsync_irq()
1108 spin_unlock(&chan->vchan.lock); in xilinx_dpdma_chan_vsync_irq()
1111 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dpdma_chan_vsync_irq()
1115 * xilinx_dpdma_chan_err - Detect any channel error
1128 if (chan->running && in xilinx_dpdma_chan_err()
1129 ((isr & (XILINX_DPDMA_INTR_CHAN_ERR_MASK << chan->id)) || in xilinx_dpdma_chan_err()
1130 (eisr & (XILINX_DPDMA_EINTR_CHAN_ERR_MASK << chan->id)))) in xilinx_dpdma_chan_err()
1137 * xilinx_dpdma_chan_handle_err - DPDMA channel error handling
1147 struct xilinx_dpdma_device *xdev = chan->xdev; in xilinx_dpdma_chan_handle_err()
1151 spin_lock_irqsave(&chan->lock, flags); in xilinx_dpdma_chan_handle_err()
1153 dev_dbg(xdev->dev, "chan%u: cur desc addr = 0x%04x%08x\n", in xilinx_dpdma_chan_handle_err()
1154 chan->id, in xilinx_dpdma_chan_handle_err()
1155 dpdma_read(chan->reg, XILINX_DPDMA_CH_DESC_START_ADDRE), in xilinx_dpdma_chan_handle_err()
1156 dpdma_read(chan->reg, XILINX_DPDMA_CH_DESC_START_ADDR)); in xilinx_dpdma_chan_handle_err()
1157 dev_dbg(xdev->dev, "chan%u: cur payload addr = 0x%04x%08x\n", in xilinx_dpdma_chan_handle_err()
1158 chan->id, in xilinx_dpdma_chan_handle_err()
1159 dpdma_read(chan->reg, XILINX_DPDMA_CH_PYLD_CUR_ADDRE), in xilinx_dpdma_chan_handle_err()
1160 dpdma_read(chan->reg, XILINX_DPDMA_CH_PYLD_CUR_ADDR)); in xilinx_dpdma_chan_handle_err()
1163 chan->running = false; in xilinx_dpdma_chan_handle_err()
1165 if (!chan->desc.active) in xilinx_dpdma_chan_handle_err()
1168 active = chan->desc.active; in xilinx_dpdma_chan_handle_err()
1169 chan->desc.active = NULL; in xilinx_dpdma_chan_handle_err()
1173 if (active->error) in xilinx_dpdma_chan_handle_err()
1174 dev_dbg(xdev->dev, "chan%u: repeated error on desc\n", in xilinx_dpdma_chan_handle_err()
1175 chan->id); in xilinx_dpdma_chan_handle_err()
1178 if (!chan->desc.pending && in xilinx_dpdma_chan_handle_err()
1179 list_empty(&chan->vchan.desc_issued)) { in xilinx_dpdma_chan_handle_err()
1180 active->error = true; in xilinx_dpdma_chan_handle_err()
1181 list_add_tail(&active->vdesc.node, in xilinx_dpdma_chan_handle_err()
1182 &chan->vchan.desc_issued); in xilinx_dpdma_chan_handle_err()
1184 xilinx_dpdma_chan_free_tx_desc(&active->vdesc); in xilinx_dpdma_chan_handle_err()
1188 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dpdma_chan_handle_err()
1191 /* -----------------------------------------------------------------------------
1192 * DMA Engine Operations
1203 if (xt->dir != DMA_MEM_TO_DEV) in xilinx_dpdma_prep_interleaved_dma()
1206 if (!xt->numf || !xt->sgl[0].size) in xilinx_dpdma_prep_interleaved_dma()
1216 vchan_tx_prep(&chan->vchan, &desc->vdesc, flags | DMA_CTRL_ACK); in xilinx_dpdma_prep_interleaved_dma()
1218 return &desc->vdesc.tx; in xilinx_dpdma_prep_interleaved_dma()
1222 * xilinx_dpdma_alloc_chan_resources - Allocate resources for the channel
1223 * @dchan: DMA channel
1227 * Return: 0 on success, or -ENOMEM if failed to allocate a pool.
1234 chan->desc_pool = dma_pool_create(dev_name(chan->xdev->dev), in xilinx_dpdma_alloc_chan_resources()
1235 chan->xdev->dev, in xilinx_dpdma_alloc_chan_resources()
1238 if (!chan->desc_pool) { in xilinx_dpdma_alloc_chan_resources()
1239 dev_err(chan->xdev->dev, in xilinx_dpdma_alloc_chan_resources()
1241 chan->id); in xilinx_dpdma_alloc_chan_resources()
1242 return -ENOMEM; in xilinx_dpdma_alloc_chan_resources()
1249 * xilinx_dpdma_free_chan_resources - Free all resources for the channel
1250 * @dchan: DMA channel
1252 * Free resources associated with the virtual DMA channel, and destroy the
1259 vchan_free_chan_resources(&chan->vchan); in xilinx_dpdma_free_chan_resources()
1261 dma_pool_destroy(chan->desc_pool); in xilinx_dpdma_free_chan_resources()
1262 chan->desc_pool = NULL; in xilinx_dpdma_free_chan_resources()
1270 spin_lock_irqsave(&chan->lock, flags); in xilinx_dpdma_issue_pending()
1271 spin_lock(&chan->vchan.lock); in xilinx_dpdma_issue_pending()
1272 if (vchan_issue_pending(&chan->vchan)) in xilinx_dpdma_issue_pending()
1274 spin_unlock(&chan->vchan.lock); in xilinx_dpdma_issue_pending()
1275 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dpdma_issue_pending()
1297 pconfig = config->peripheral_config; in xilinx_dpdma_config()
1298 if (WARN_ON(pconfig && config->peripheral_size != sizeof(*pconfig))) in xilinx_dpdma_config()
1299 return -EINVAL; in xilinx_dpdma_config()
1301 spin_lock_irqsave(&chan->lock, flags); in xilinx_dpdma_config()
1302 if (chan->id <= ZYNQMP_DPDMA_VIDEO2 && pconfig) in xilinx_dpdma_config()
1303 chan->video_group = pconfig->video_group; in xilinx_dpdma_config()
1304 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dpdma_config()
1324 * xilinx_dpdma_terminate_all - Terminate the channel and descriptors
1325 * @dchan: DMA channel
1336 * Return: 0 on success, or -ETIMEDOUT if the channel failed to stop.
1341 struct xilinx_dpdma_device *xdev = chan->xdev; in xilinx_dpdma_terminate_all()
1347 if (chan->video_group) { in xilinx_dpdma_terminate_all()
1349 if (xdev->chan[i]->video_group && in xilinx_dpdma_terminate_all()
1350 xdev->chan[i]->running) { in xilinx_dpdma_terminate_all()
1351 xilinx_dpdma_chan_pause(xdev->chan[i]); in xilinx_dpdma_terminate_all()
1352 xdev->chan[i]->video_group = false; in xilinx_dpdma_terminate_all()
1360 spin_lock_irqsave(&chan->vchan.lock, flags); in xilinx_dpdma_terminate_all()
1361 vchan_get_all_descriptors(&chan->vchan, &descriptors); in xilinx_dpdma_terminate_all()
1362 spin_unlock_irqrestore(&chan->vchan.lock, flags); in xilinx_dpdma_terminate_all()
1364 vchan_dma_desc_free_list(&chan->vchan, &descriptors); in xilinx_dpdma_terminate_all()
1370 * xilinx_dpdma_synchronize - Synchronize callback execution
1371 * @dchan: DMA channel
1377 * This function waits for the DMA channel to stop. It assumes it has been
1389 spin_lock_irqsave(&chan->vchan.lock, flags); in xilinx_dpdma_synchronize()
1390 if (chan->desc.pending) { in xilinx_dpdma_synchronize()
1391 vchan_terminate_vdesc(&chan->desc.pending->vdesc); in xilinx_dpdma_synchronize()
1392 chan->desc.pending = NULL; in xilinx_dpdma_synchronize()
1394 if (chan->desc.active) { in xilinx_dpdma_synchronize()
1395 vchan_terminate_vdesc(&chan->desc.active->vdesc); in xilinx_dpdma_synchronize()
1396 chan->desc.active = NULL; in xilinx_dpdma_synchronize()
1398 spin_unlock_irqrestore(&chan->vchan.lock, flags); in xilinx_dpdma_synchronize()
1400 vchan_synchronize(&chan->vchan); in xilinx_dpdma_synchronize()
1403 /* -----------------------------------------------------------------------------
1408 * xilinx_dpdma_err - Detect any global error
1424 * xilinx_dpdma_handle_err_irq - Handle DPDMA error interrupt
1430 * corresponding error interrupts, and those should be re-enabled once handling
1439 dev_dbg_ratelimited(xdev->dev, in xilinx_dpdma_handle_err_irq()
1444 dpdma_write(xdev->reg, XILINX_DPDMA_IDS, in xilinx_dpdma_handle_err_irq()
1446 dpdma_write(xdev->reg, XILINX_DPDMA_EIDS, in xilinx_dpdma_handle_err_irq()
1449 for (i = 0; i < ARRAY_SIZE(xdev->chan); i++) in xilinx_dpdma_handle_err_irq()
1450 if (err || xilinx_dpdma_chan_err(xdev->chan[i], isr, eisr)) in xilinx_dpdma_handle_err_irq()
1451 tasklet_schedule(&xdev->chan[i]->err_task); in xilinx_dpdma_handle_err_irq()
1455 * xilinx_dpdma_enable_irq - Enable interrupts
1462 dpdma_write(xdev->reg, XILINX_DPDMA_IEN, XILINX_DPDMA_INTR_ALL); in xilinx_dpdma_enable_irq()
1463 dpdma_write(xdev->reg, XILINX_DPDMA_EIEN, XILINX_DPDMA_EINTR_ALL); in xilinx_dpdma_enable_irq()
1467 * xilinx_dpdma_disable_irq - Disable interrupts
1474 dpdma_write(xdev->reg, XILINX_DPDMA_IDS, XILINX_DPDMA_INTR_ALL); in xilinx_dpdma_disable_irq()
1475 dpdma_write(xdev->reg, XILINX_DPDMA_EIDS, XILINX_DPDMA_EINTR_ALL); in xilinx_dpdma_disable_irq()
1479 * xilinx_dpdma_chan_err_task - Per channel tasklet for error handling
1484 * re-enable channel error interrupts, and restart the channel if needed.
1489 struct xilinx_dpdma_device *xdev = chan->xdev; in xilinx_dpdma_chan_err_task()
1497 dpdma_write(xdev->reg, XILINX_DPDMA_IEN, in xilinx_dpdma_chan_err_task()
1498 XILINX_DPDMA_INTR_CHAN_ERR_MASK << chan->id); in xilinx_dpdma_chan_err_task()
1499 dpdma_write(xdev->reg, XILINX_DPDMA_EIEN, in xilinx_dpdma_chan_err_task()
1500 XILINX_DPDMA_EINTR_CHAN_ERR_MASK << chan->id); in xilinx_dpdma_chan_err_task()
1502 spin_lock_irqsave(&chan->lock, flags); in xilinx_dpdma_chan_err_task()
1503 spin_lock(&chan->vchan.lock); in xilinx_dpdma_chan_err_task()
1505 spin_unlock(&chan->vchan.lock); in xilinx_dpdma_chan_err_task()
1506 spin_unlock_irqrestore(&chan->lock, flags); in xilinx_dpdma_chan_err_task()
1517 status = dpdma_read(xdev->reg, XILINX_DPDMA_ISR); in xilinx_dpdma_irq_handler()
1518 error = dpdma_read(xdev->reg, XILINX_DPDMA_EISR); in xilinx_dpdma_irq_handler()
1522 dpdma_write(xdev->reg, XILINX_DPDMA_ISR, status); in xilinx_dpdma_irq_handler()
1523 dpdma_write(xdev->reg, XILINX_DPDMA_EISR, error); in xilinx_dpdma_irq_handler()
1530 for (i = 0; i < ARRAY_SIZE(xdev->chan); i++) { in xilinx_dpdma_irq_handler()
1531 struct xilinx_dpdma_chan *chan = xdev->chan[i]; in xilinx_dpdma_irq_handler()
1540 for_each_set_bit(i, &mask, ARRAY_SIZE(xdev->chan)) in xilinx_dpdma_irq_handler()
1541 xilinx_dpdma_chan_done_irq(xdev->chan[i]); in xilinx_dpdma_irq_handler()
1546 for_each_set_bit(i, &mask, ARRAY_SIZE(xdev->chan)) in xilinx_dpdma_irq_handler()
1547 xilinx_dpdma_chan_notify_no_ostand(xdev->chan[i]); in xilinx_dpdma_irq_handler()
1557 /* -----------------------------------------------------------------------------
1566 chan = devm_kzalloc(xdev->dev, sizeof(*chan), GFP_KERNEL); in xilinx_dpdma_chan_init()
1568 return -ENOMEM; in xilinx_dpdma_chan_init()
1570 chan->id = chan_id; in xilinx_dpdma_chan_init()
1571 chan->reg = xdev->reg + XILINX_DPDMA_CH_BASE in xilinx_dpdma_chan_init()
1572 + XILINX_DPDMA_CH_OFFSET * chan->id; in xilinx_dpdma_chan_init()
1573 chan->running = false; in xilinx_dpdma_chan_init()
1574 chan->xdev = xdev; in xilinx_dpdma_chan_init()
1576 spin_lock_init(&chan->lock); in xilinx_dpdma_chan_init()
1577 init_waitqueue_head(&chan->wait_to_stop); in xilinx_dpdma_chan_init()
1579 tasklet_setup(&chan->err_task, xilinx_dpdma_chan_err_task); in xilinx_dpdma_chan_init()
1581 chan->vchan.desc_free = xilinx_dpdma_chan_free_tx_desc; in xilinx_dpdma_chan_init()
1582 vchan_init(&chan->vchan, &xdev->common); in xilinx_dpdma_chan_init()
1584 xdev->chan[chan->id] = chan; in xilinx_dpdma_chan_init()
1594 tasklet_kill(&chan->err_task); in xilinx_dpdma_chan_remove()
1595 list_del(&chan->vchan.chan.device_node); in xilinx_dpdma_chan_remove()
1601 struct xilinx_dpdma_device *xdev = ofdma->of_dma_data; in of_dma_xilinx_xlate()
1602 u32 chan_id = dma_spec->args[0]; in of_dma_xilinx_xlate()
1604 if (chan_id >= ARRAY_SIZE(xdev->chan)) in of_dma_xilinx_xlate()
1607 if (!xdev->chan[chan_id]) in of_dma_xilinx_xlate()
1610 return dma_get_slave_channel(&xdev->chan[chan_id]->vchan.chan); in of_dma_xilinx_xlate()
1622 for (i = 0; i < ARRAY_SIZE(xdev->chan); i++) { in dpdma_hw_init()
1623 reg = xdev->reg + XILINX_DPDMA_CH_BASE in dpdma_hw_init()
1629 dpdma_write(xdev->reg, XILINX_DPDMA_ISR, XILINX_DPDMA_INTR_ALL); in dpdma_hw_init()
1630 dpdma_write(xdev->reg, XILINX_DPDMA_EISR, XILINX_DPDMA_EINTR_ALL); in dpdma_hw_init()
1640 xdev = devm_kzalloc(&pdev->dev, sizeof(*xdev), GFP_KERNEL); in xilinx_dpdma_probe()
1642 return -ENOMEM; in xilinx_dpdma_probe()
1644 xdev->dev = &pdev->dev; in xilinx_dpdma_probe()
1645 xdev->ext_addr = sizeof(dma_addr_t) > 4; in xilinx_dpdma_probe()
1647 INIT_LIST_HEAD(&xdev->common.channels); in xilinx_dpdma_probe()
1651 xdev->axi_clk = devm_clk_get(xdev->dev, "axi_clk"); in xilinx_dpdma_probe()
1652 if (IS_ERR(xdev->axi_clk)) in xilinx_dpdma_probe()
1653 return PTR_ERR(xdev->axi_clk); in xilinx_dpdma_probe()
1655 xdev->reg = devm_platform_ioremap_resource(pdev, 0); in xilinx_dpdma_probe()
1656 if (IS_ERR(xdev->reg)) in xilinx_dpdma_probe()
1657 return PTR_ERR(xdev->reg); in xilinx_dpdma_probe()
1661 xdev->irq = platform_get_irq(pdev, 0); in xilinx_dpdma_probe()
1662 if (xdev->irq < 0) in xilinx_dpdma_probe()
1663 return xdev->irq; in xilinx_dpdma_probe()
1665 ret = request_irq(xdev->irq, xilinx_dpdma_irq_handler, IRQF_SHARED, in xilinx_dpdma_probe()
1666 dev_name(xdev->dev), xdev); in xilinx_dpdma_probe()
1668 dev_err(xdev->dev, "failed to request IRQ\n"); in xilinx_dpdma_probe()
1672 ddev = &xdev->common; in xilinx_dpdma_probe()
1673 ddev->dev = &pdev->dev; in xilinx_dpdma_probe()
1675 dma_cap_set(DMA_SLAVE, ddev->cap_mask); in xilinx_dpdma_probe()
1676 dma_cap_set(DMA_PRIVATE, ddev->cap_mask); in xilinx_dpdma_probe()
1677 dma_cap_set(DMA_INTERLEAVE, ddev->cap_mask); in xilinx_dpdma_probe()
1678 dma_cap_set(DMA_REPEAT, ddev->cap_mask); in xilinx_dpdma_probe()
1679 dma_cap_set(DMA_LOAD_EOT, ddev->cap_mask); in xilinx_dpdma_probe()
1680 ddev->copy_align = fls(XILINX_DPDMA_ALIGN_BYTES - 1); in xilinx_dpdma_probe()
1682 ddev->device_alloc_chan_resources = xilinx_dpdma_alloc_chan_resources; in xilinx_dpdma_probe()
1683 ddev->device_free_chan_resources = xilinx_dpdma_free_chan_resources; in xilinx_dpdma_probe()
1684 ddev->device_prep_interleaved_dma = xilinx_dpdma_prep_interleaved_dma; in xilinx_dpdma_probe()
1686 ddev->device_tx_status = dma_cookie_status; in xilinx_dpdma_probe()
1687 ddev->device_issue_pending = xilinx_dpdma_issue_pending; in xilinx_dpdma_probe()
1688 ddev->device_config = xilinx_dpdma_config; in xilinx_dpdma_probe()
1689 ddev->device_pause = xilinx_dpdma_pause; in xilinx_dpdma_probe()
1690 ddev->device_resume = xilinx_dpdma_resume; in xilinx_dpdma_probe()
1691 ddev->device_terminate_all = xilinx_dpdma_terminate_all; in xilinx_dpdma_probe()
1692 ddev->device_synchronize = xilinx_dpdma_synchronize; in xilinx_dpdma_probe()
1693 ddev->src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_UNDEFINED); in xilinx_dpdma_probe()
1694 ddev->directions = BIT(DMA_MEM_TO_DEV); in xilinx_dpdma_probe()
1695 ddev->residue_granularity = DMA_RESIDUE_GRANULARITY_DESCRIPTOR; in xilinx_dpdma_probe()
1697 for (i = 0; i < ARRAY_SIZE(xdev->chan); ++i) { in xilinx_dpdma_probe()
1700 dev_err(xdev->dev, "failed to initialize channel %u\n", in xilinx_dpdma_probe()
1706 ret = clk_prepare_enable(xdev->axi_clk); in xilinx_dpdma_probe()
1708 dev_err(xdev->dev, "failed to enable the axi clock\n"); in xilinx_dpdma_probe()
1714 dev_err(xdev->dev, "failed to register the dma device\n"); in xilinx_dpdma_probe()
1718 ret = of_dma_controller_register(xdev->dev->of_node, in xilinx_dpdma_probe()
1721 dev_err(xdev->dev, "failed to register DMA to DT DMA helper\n"); in xilinx_dpdma_probe()
1729 dev_info(&pdev->dev, "Xilinx DPDMA engine is probed\n"); in xilinx_dpdma_probe()
1736 clk_disable_unprepare(xdev->axi_clk); in xilinx_dpdma_probe()
1738 for (i = 0; i < ARRAY_SIZE(xdev->chan); i++) in xilinx_dpdma_probe()
1739 xilinx_dpdma_chan_remove(xdev->chan[i]); in xilinx_dpdma_probe()
1741 free_irq(xdev->irq, xdev); in xilinx_dpdma_probe()
1752 free_irq(xdev->irq, xdev); in xilinx_dpdma_remove()
1755 of_dma_controller_free(pdev->dev.of_node); in xilinx_dpdma_remove()
1756 dma_async_device_unregister(&xdev->common); in xilinx_dpdma_remove()
1757 clk_disable_unprepare(xdev->axi_clk); in xilinx_dpdma_remove()
1759 for (i = 0; i < ARRAY_SIZE(xdev->chan); i++) in xilinx_dpdma_remove()
1760 xilinx_dpdma_chan_remove(xdev->chan[i]); in xilinx_dpdma_remove()
1766 { .compatible = "xlnx,zynqmp-dpdma",},
1775 .name = "xilinx-zynqmp-dpdma",