Lines Matching +full:axi +full:- +full:config
1 // SPDX-License-Identifier: GPL-2.0
2 // (C) 2017-2018 Synopsys, Inc. (www.synopsys.com)
5 * Synopsys DesignWare AXI DMA Controller driver.
15 #include <linux/dma-mapping.h>
20 #include <linux/io-64-nonatomic-lo-hi.h>
32 #include "dw-axi-dmac.h"
34 #include "../virt-dma.h"
37 * The set of bus widths supported by the DMA controller. DW AXI DMAC supports
38 * master data bus width up to 512 bits (for both AXI master interfaces), but
57 iowrite32(val, chip->regs + reg); in axi_dma_iowrite32()
62 return ioread32(chip->regs + reg); in axi_dma_ioread32()
68 iowrite32(val, chan->chan_regs + reg); in axi_chan_iowrite32()
73 return ioread32(chan->chan_regs + reg); in axi_chan_ioread32()
83 iowrite32(lower_32_bits(val), chan->chan_regs + reg); in axi_chan_iowrite64()
84 iowrite32(upper_32_bits(val), chan->chan_regs + reg + 4); in axi_chan_iowrite64()
88 struct axi_dma_chan_config *config) in axi_chan_config_write() argument
92 cfg_lo = (config->dst_multblk_type << CH_CFG_L_DST_MULTBLK_TYPE_POS | in axi_chan_config_write()
93 config->src_multblk_type << CH_CFG_L_SRC_MULTBLK_TYPE_POS); in axi_chan_config_write()
94 if (chan->chip->dw->hdata->reg_map_8_channels && in axi_chan_config_write()
95 !chan->chip->dw->hdata->use_cfg2) { in axi_chan_config_write()
96 cfg_hi = config->tt_fc << CH_CFG_H_TT_FC_POS | in axi_chan_config_write()
97 config->hs_sel_src << CH_CFG_H_HS_SEL_SRC_POS | in axi_chan_config_write()
98 config->hs_sel_dst << CH_CFG_H_HS_SEL_DST_POS | in axi_chan_config_write()
99 config->src_per << CH_CFG_H_SRC_PER_POS | in axi_chan_config_write()
100 config->dst_per << CH_CFG_H_DST_PER_POS | in axi_chan_config_write()
101 config->prior << CH_CFG_H_PRIORITY_POS; in axi_chan_config_write()
103 cfg_lo |= config->src_per << CH_CFG2_L_SRC_PER_POS | in axi_chan_config_write()
104 config->dst_per << CH_CFG2_L_DST_PER_POS; in axi_chan_config_write()
105 cfg_hi = config->tt_fc << CH_CFG2_H_TT_FC_POS | in axi_chan_config_write()
106 config->hs_sel_src << CH_CFG2_H_HS_SEL_SRC_POS | in axi_chan_config_write()
107 config->hs_sel_dst << CH_CFG2_H_HS_SEL_DST_POS | in axi_chan_config_write()
108 config->prior << CH_CFG2_H_PRIORITY_POS; in axi_chan_config_write()
187 val = axi_dma_ioread32(chan->chip, DMAC_CHEN); in axi_chan_disable()
188 val &= ~(BIT(chan->id) << DMAC_CHAN_EN_SHIFT); in axi_chan_disable()
189 if (chan->chip->dw->hdata->reg_map_8_channels) in axi_chan_disable()
190 val |= BIT(chan->id) << DMAC_CHAN_EN_WE_SHIFT; in axi_chan_disable()
192 val |= BIT(chan->id) << DMAC_CHAN_EN2_WE_SHIFT; in axi_chan_disable()
193 axi_dma_iowrite32(chan->chip, DMAC_CHEN, val); in axi_chan_disable()
200 val = axi_dma_ioread32(chan->chip, DMAC_CHEN); in axi_chan_enable()
201 if (chan->chip->dw->hdata->reg_map_8_channels) in axi_chan_enable()
202 val |= BIT(chan->id) << DMAC_CHAN_EN_SHIFT | in axi_chan_enable()
203 BIT(chan->id) << DMAC_CHAN_EN_WE_SHIFT; in axi_chan_enable()
205 val |= BIT(chan->id) << DMAC_CHAN_EN_SHIFT | in axi_chan_enable()
206 BIT(chan->id) << DMAC_CHAN_EN2_WE_SHIFT; in axi_chan_enable()
207 axi_dma_iowrite32(chan->chip, DMAC_CHEN, val); in axi_chan_enable()
214 val = axi_dma_ioread32(chan->chip, DMAC_CHEN); in axi_chan_is_hw_enable()
216 return !!(val & (BIT(chan->id) << DMAC_CHAN_EN_SHIFT)); in axi_chan_is_hw_enable()
224 for (i = 0; i < chip->dw->hdata->nr_channels; i++) { in axi_dma_hw_init()
225 axi_chan_irq_disable(&chip->dw->chan[i], DWAXIDMAC_IRQ_ALL); in axi_dma_hw_init()
226 axi_chan_disable(&chip->dw->chan[i]); in axi_dma_hw_init()
228 ret = dma_set_mask_and_coherent(chip->dev, DMA_BIT_MASK(64)); in axi_dma_hw_init()
230 dev_warn(chip->dev, "Unable to set coherent mask\n"); in axi_dma_hw_init()
236 u32 max_width = chan->chip->dw->hdata->m_data_width; in axi_chan_get_xfer_width()
243 return dma_chan_name(&chan->vc.chan); in axi_chan_name()
254 desc->hw_desc = kcalloc(num, sizeof(*desc->hw_desc), GFP_NOWAIT); in axi_desc_alloc()
255 if (!desc->hw_desc) { in axi_desc_alloc()
259 desc->nr_hw_descs = num; in axi_desc_alloc()
270 lli = dma_pool_zalloc(chan->desc_pool, GFP_NOWAIT, &phys); in axi_desc_get()
277 atomic_inc(&chan->descs_allocated); in axi_desc_get()
285 struct axi_dma_chan *chan = desc->chan; in axi_desc_put()
286 int count = desc->nr_hw_descs; in axi_desc_put()
291 hw_desc = &desc->hw_desc[descs_put]; in axi_desc_put()
292 dma_pool_free(chan->desc_pool, hw_desc->lli, hw_desc->llp); in axi_desc_put()
295 kfree(desc->hw_desc); in axi_desc_put()
297 atomic_sub(descs_put, &chan->descs_allocated); in axi_desc_put()
300 atomic_read(&chan->descs_allocated)); in axi_desc_put()
326 spin_lock_irqsave(&chan->vc.lock, flags); in dma_chan_tx_status()
328 vdesc = vchan_find_desc(&chan->vc, cookie); in dma_chan_tx_status()
330 length = vd_to_axi_desc(vdesc)->length; in dma_chan_tx_status()
331 completed_blocks = vd_to_axi_desc(vdesc)->completed_blocks; in dma_chan_tx_status()
332 len = vd_to_axi_desc(vdesc)->hw_desc[0].len; in dma_chan_tx_status()
334 bytes = length - completed_length; in dma_chan_tx_status()
337 spin_unlock_irqrestore(&chan->vc.lock, flags); in dma_chan_tx_status()
345 desc->lli->llp = cpu_to_le64(adr); in write_desc_llp()
358 if (!chan->chip->apb_regs) { in dw_axi_dma_set_byte_halfword()
359 dev_dbg(chan->chip->dev, "apb_regs not initialized\n"); in dw_axi_dma_set_byte_halfword()
363 reg_width = __ffs(chan->config.dst_addr_width); in dw_axi_dma_set_byte_halfword()
367 val = ioread32(chan->chip->apb_regs + offset); in dw_axi_dma_set_byte_halfword()
370 val |= BIT(chan->id); in dw_axi_dma_set_byte_halfword()
372 val &= ~BIT(chan->id); in dw_axi_dma_set_byte_halfword()
374 iowrite32(val, chan->chip->apb_regs + offset); in dw_axi_dma_set_byte_halfword()
380 u32 priority = chan->chip->dw->hdata->priority[chan->id]; in axi_chan_block_xfer_start()
381 struct axi_dma_chan_config config = {}; in axi_chan_block_xfer_start() local
386 dev_err(chan2dev(chan), "%s is non-idle!\n", in axi_chan_block_xfer_start()
392 axi_dma_enable(chan->chip); in axi_chan_block_xfer_start()
394 config.dst_multblk_type = DWAXIDMAC_MBLK_TYPE_LL; in axi_chan_block_xfer_start()
395 config.src_multblk_type = DWAXIDMAC_MBLK_TYPE_LL; in axi_chan_block_xfer_start()
396 config.tt_fc = DWAXIDMAC_TT_FC_MEM_TO_MEM_DMAC; in axi_chan_block_xfer_start()
397 config.prior = priority; in axi_chan_block_xfer_start()
398 config.hs_sel_dst = DWAXIDMAC_HS_SEL_HW; in axi_chan_block_xfer_start()
399 config.hs_sel_src = DWAXIDMAC_HS_SEL_HW; in axi_chan_block_xfer_start()
400 switch (chan->direction) { in axi_chan_block_xfer_start()
403 config.tt_fc = chan->config.device_fc ? in axi_chan_block_xfer_start()
406 if (chan->chip->apb_regs) in axi_chan_block_xfer_start()
407 config.dst_per = chan->id; in axi_chan_block_xfer_start()
409 config.dst_per = chan->hw_handshake_num; in axi_chan_block_xfer_start()
412 config.tt_fc = chan->config.device_fc ? in axi_chan_block_xfer_start()
415 if (chan->chip->apb_regs) in axi_chan_block_xfer_start()
416 config.src_per = chan->id; in axi_chan_block_xfer_start()
418 config.src_per = chan->hw_handshake_num; in axi_chan_block_xfer_start()
423 axi_chan_config_write(chan, &config); in axi_chan_block_xfer_start()
425 write_chan_llp(chan, first->hw_desc[0].llp | lms); in axi_chan_block_xfer_start()
442 vd = vchan_next_desc(&chan->vc); in axi_chan_start_first_queued()
448 vd->tx.cookie); in axi_chan_start_first_queued()
457 spin_lock_irqsave(&chan->vc.lock, flags); in dma_chan_issue_pending()
458 if (vchan_issue_pending(&chan->vc)) in dma_chan_issue_pending()
460 spin_unlock_irqrestore(&chan->vc.lock, flags); in dma_chan_issue_pending()
467 vchan_synchronize(&chan->vc); in dw_axi_dma_synchronize()
476 dev_err(chan2dev(chan), "%s is non-idle!\n", in dma_chan_alloc_chan_resources()
478 return -EBUSY; in dma_chan_alloc_chan_resources()
481 /* LLI address must be aligned to a 64-byte boundary */ in dma_chan_alloc_chan_resources()
482 chan->desc_pool = dma_pool_create(dev_name(chan2dev(chan)), in dma_chan_alloc_chan_resources()
483 chan->chip->dev, in dma_chan_alloc_chan_resources()
486 if (!chan->desc_pool) { in dma_chan_alloc_chan_resources()
488 return -ENOMEM; in dma_chan_alloc_chan_resources()
492 pm_runtime_get(chan->chip->dev); in dma_chan_alloc_chan_resources()
503 dev_err(dchan2dev(dchan), "%s is non-idle!\n", in dma_chan_free_chan_resources()
509 vchan_free_chan_resources(&chan->vc); in dma_chan_free_chan_resources()
511 dma_pool_destroy(chan->desc_pool); in dma_chan_free_chan_resources()
512 chan->desc_pool = NULL; in dma_chan_free_chan_resources()
515 axi_chan_name(chan), atomic_read(&chan->descs_allocated)); in dma_chan_free_chan_resources()
517 pm_runtime_put(chan->chip->dev); in dma_chan_free_chan_resources()
522 struct axi_dma_chip *chip = chan->chip; in dw_axi_dma_set_hw_channel()
525 if (!chip->apb_regs) { in dw_axi_dma_set_hw_channel()
526 dev_err(chip->dev, "apb_regs not initialized\n"); in dw_axi_dma_set_hw_channel()
536 val = chan->hw_handshake_num; in dw_axi_dma_set_hw_channel()
540 reg_value = lo_hi_readq(chip->apb_regs + DMAC_APB_HW_HS_SEL_0); in dw_axi_dma_set_hw_channel()
546 (chan->id * DMA_APB_HS_SEL_BIT_SIZE)); in dw_axi_dma_set_hw_channel()
547 reg_value |= (val << (chan->id * DMA_APB_HS_SEL_BIT_SIZE)); in dw_axi_dma_set_hw_channel()
548 lo_hi_writeq(reg_value, chip->apb_regs + DMAC_APB_HW_HS_SEL_0); in dw_axi_dma_set_hw_channel()
563 val = le32_to_cpu(desc->lli->ctl_hi); in set_desc_last()
565 desc->lli->ctl_hi = cpu_to_le32(val); in set_desc_last()
570 desc->lli->sar = cpu_to_le64(adr); in write_desc_sar()
575 desc->lli->dar = cpu_to_le64(adr); in write_desc_dar()
583 val = le32_to_cpu(desc->lli->ctl_lo); in set_desc_src_master()
585 desc->lli->ctl_lo = cpu_to_le32(val); in set_desc_src_master()
594 val = le32_to_cpu(hw_desc->lli->ctl_lo); in set_desc_dest_master()
595 if (desc->chan->chip->dw->hdata->nr_masters > 1) in set_desc_dest_master()
600 hw_desc->lli->ctl_lo = cpu_to_le32(val); in set_desc_dest_master()
607 unsigned int data_width = BIT(chan->chip->dw->hdata->m_data_width); in dw_axi_dma_set_hw_desc()
616 axi_block_ts = chan->chip->dw->hdata->block_size[chan->id]; in dw_axi_dma_set_hw_desc()
623 dev_err(chan->chip->dev, "invalid buffer alignment\n"); in dw_axi_dma_set_hw_desc()
624 return -EINVAL; in dw_axi_dma_set_hw_desc()
627 switch (chan->direction) { in dw_axi_dma_set_hw_desc()
629 reg_width = __ffs(chan->config.dst_addr_width); in dw_axi_dma_set_hw_desc()
630 device_addr = chan->config.dst_addr; in dw_axi_dma_set_hw_desc()
638 reg_width = __ffs(chan->config.src_addr_width); in dw_axi_dma_set_hw_desc()
639 device_addr = chan->config.src_addr; in dw_axi_dma_set_hw_desc()
647 return -EINVAL; in dw_axi_dma_set_hw_desc()
651 return -EINVAL; in dw_axi_dma_set_hw_desc()
653 hw_desc->lli = axi_desc_get(chan, &hw_desc->llp); in dw_axi_dma_set_hw_desc()
654 if (unlikely(!hw_desc->lli)) in dw_axi_dma_set_hw_desc()
655 return -ENOMEM; in dw_axi_dma_set_hw_desc()
659 if (chan->chip->dw->hdata->restrict_axi_burst_len) { in dw_axi_dma_set_hw_desc()
660 burst_len = chan->chip->dw->hdata->axi_rw_burst_len; in dw_axi_dma_set_hw_desc()
666 hw_desc->lli->ctl_hi = cpu_to_le32(ctlhi); in dw_axi_dma_set_hw_desc()
668 if (chan->direction == DMA_MEM_TO_DEV) { in dw_axi_dma_set_hw_desc()
676 hw_desc->lli->block_ts_lo = cpu_to_le32(block_ts - 1); in dw_axi_dma_set_hw_desc()
680 hw_desc->lli->ctl_lo = cpu_to_le32(ctllo); in dw_axi_dma_set_hw_desc()
684 hw_desc->len = len; in dw_axi_dma_set_hw_desc()
695 axi_block_ts = chan->chip->dw->hdata->block_size[chan->id]; in calculate_block_len()
699 data_width = BIT(chan->chip->dw->hdata->m_data_width); in calculate_block_len()
707 reg_width = __ffs(chan->config.src_addr_width); in calculate_block_len()
751 chan->direction = direction; in dw_axi_dma_chan_prep_cyclic()
752 desc->chan = chan; in dw_axi_dma_chan_prep_cyclic()
753 chan->cyclic = true; in dw_axi_dma_chan_prep_cyclic()
754 desc->length = 0; in dw_axi_dma_chan_prep_cyclic()
755 desc->period_len = period_len; in dw_axi_dma_chan_prep_cyclic()
758 hw_desc = &desc->hw_desc[i]; in dw_axi_dma_chan_prep_cyclic()
765 desc->length += hw_desc->len; in dw_axi_dma_chan_prep_cyclic()
766 /* Set end-of-link to the linked descriptor, so that cyclic in dw_axi_dma_chan_prep_cyclic()
774 llp = desc->hw_desc[0].llp; in dw_axi_dma_chan_prep_cyclic()
778 hw_desc = &desc->hw_desc[--total_segments]; in dw_axi_dma_chan_prep_cyclic()
780 llp = hw_desc->llp; in dw_axi_dma_chan_prep_cyclic()
785 return vchan_tx_prep(&chan->vc, &desc->vd, flags); in dw_axi_dma_chan_prep_cyclic()
831 desc->chan = chan; in dw_axi_dma_chan_prep_slave_sg()
832 desc->length = 0; in dw_axi_dma_chan_prep_slave_sg()
833 chan->direction = direction; in dw_axi_dma_chan_prep_slave_sg()
842 hw_desc = &desc->hw_desc[loop++]; in dw_axi_dma_chan_prep_slave_sg()
847 desc->length += hw_desc->len; in dw_axi_dma_chan_prep_slave_sg()
848 len -= segment_len; in dw_axi_dma_chan_prep_slave_sg()
853 /* Set end-of-link to the last link descriptor of list */ in dw_axi_dma_chan_prep_slave_sg()
854 set_desc_last(&desc->hw_desc[num_sgs - 1]); in dw_axi_dma_chan_prep_slave_sg()
858 hw_desc = &desc->hw_desc[--num_sgs]; in dw_axi_dma_chan_prep_slave_sg()
860 llp = hw_desc->llp; in dw_axi_dma_chan_prep_slave_sg()
865 return vchan_tx_prep(&chan->vc, &desc->vd, flags); in dw_axi_dma_chan_prep_slave_sg()
889 max_block_ts = chan->chip->dw->hdata->block_size[chan->id]; in dma_chan_prep_dma_memcpy()
896 desc->chan = chan; in dma_chan_prep_dma_memcpy()
898 desc->length = 0; in dma_chan_prep_dma_memcpy()
902 hw_desc = &desc->hw_desc[num]; in dma_chan_prep_dma_memcpy()
913 * BLOCK_TS register should be set to block_ts - 1 in dma_chan_prep_dma_memcpy()
921 hw_desc->lli = axi_desc_get(chan, &hw_desc->llp); in dma_chan_prep_dma_memcpy()
922 if (unlikely(!hw_desc->lli)) in dma_chan_prep_dma_memcpy()
927 hw_desc->lli->block_ts_lo = cpu_to_le32(block_ts - 1); in dma_chan_prep_dma_memcpy()
930 if (chan->chip->dw->hdata->restrict_axi_burst_len) { in dma_chan_prep_dma_memcpy()
931 u32 burst_len = chan->chip->dw->hdata->axi_rw_burst_len; in dma_chan_prep_dma_memcpy()
938 hw_desc->lli->ctl_hi = cpu_to_le32(reg); in dma_chan_prep_dma_memcpy()
946 hw_desc->lli->ctl_lo = cpu_to_le32(reg); in dma_chan_prep_dma_memcpy()
951 hw_desc->len = xfer_len; in dma_chan_prep_dma_memcpy()
952 desc->length += hw_desc->len; in dma_chan_prep_dma_memcpy()
954 len -= xfer_len; in dma_chan_prep_dma_memcpy()
960 /* Set end-of-link to the last link descriptor of list */ in dma_chan_prep_dma_memcpy()
961 set_desc_last(&desc->hw_desc[num - 1]); in dma_chan_prep_dma_memcpy()
964 hw_desc = &desc->hw_desc[--num]; in dma_chan_prep_dma_memcpy()
966 llp = hw_desc->llp; in dma_chan_prep_dma_memcpy()
969 return vchan_tx_prep(&chan->vc, &desc->vd, flags); in dma_chan_prep_dma_memcpy()
978 struct dma_slave_config *config) in dw_axi_dma_chan_slave_config() argument
982 memcpy(&chan->config, config, sizeof(*config)); in dw_axi_dma_chan_slave_config()
990 if (!desc->lli) { in axi_chan_dump_lli()
991 dev_err(dchan2dev(&chan->vc.chan), "NULL LLI\n"); in axi_chan_dump_lli()
995 dev_err(dchan2dev(&chan->vc.chan), in axi_chan_dump_lli()
997 le64_to_cpu(desc->lli->sar), in axi_chan_dump_lli()
998 le64_to_cpu(desc->lli->dar), in axi_chan_dump_lli()
999 le64_to_cpu(desc->lli->llp), in axi_chan_dump_lli()
1000 le32_to_cpu(desc->lli->block_ts_lo), in axi_chan_dump_lli()
1001 le32_to_cpu(desc->lli->ctl_hi), in axi_chan_dump_lli()
1002 le32_to_cpu(desc->lli->ctl_lo)); in axi_chan_dump_lli()
1008 int count = atomic_read(&chan->descs_allocated); in axi_chan_list_dump_lli()
1012 axi_chan_dump_lli(chan, &desc_head->hw_desc[i]); in axi_chan_list_dump_lli()
1020 spin_lock_irqsave(&chan->vc.lock, flags); in axi_chan_handle_err()
1025 vd = vchan_next_desc(&chan->vc); in axi_chan_handle_err()
1032 list_del(&vd->node); in axi_chan_handle_err()
1037 axi_chan_name(chan), vd->tx.cookie, status); in axi_chan_handle_err()
1046 spin_unlock_irqrestore(&chan->vc.lock, flags); in axi_chan_handle_err()
1051 int count = atomic_read(&chan->descs_allocated); in axi_chan_block_xfer_complete()
1059 spin_lock_irqsave(&chan->vc.lock, flags); in axi_chan_block_xfer_complete()
1067 vd = vchan_next_desc(&chan->vc); in axi_chan_block_xfer_complete()
1074 if (chan->cyclic) { in axi_chan_block_xfer_complete()
1077 llp = lo_hi_readq(chan->chan_regs + CH_LLP); in axi_chan_block_xfer_complete()
1079 hw_desc = &desc->hw_desc[i]; in axi_chan_block_xfer_complete()
1080 if (hw_desc->llp == llp) { in axi_chan_block_xfer_complete()
1081 axi_chan_irq_clear(chan, hw_desc->lli->status_lo); in axi_chan_block_xfer_complete()
1082 hw_desc->lli->ctl_hi |= CH_CTL_H_LLI_VALID; in axi_chan_block_xfer_complete()
1083 desc->completed_blocks = i; in axi_chan_block_xfer_complete()
1085 if (((hw_desc->len * (i + 1)) % desc->period_len) == 0) in axi_chan_block_xfer_complete()
1095 list_del(&vd->node); in axi_chan_block_xfer_complete()
1100 spin_unlock_irqrestore(&chan->vc.lock, flags); in axi_chan_block_xfer_complete()
1106 struct dw_axi_dma *dw = chip->dw; in dw_axi_dma_interrupt()
1115 for (i = 0; i < dw->hdata->nr_channels; i++) { in dw_axi_dma_interrupt()
1116 chan = &dw->chan[i]; in dw_axi_dma_interrupt()
1120 dev_vdbg(chip->dev, "%s %u IRQ status: 0x%08x\n", in dw_axi_dma_interrupt()
1129 /* Re-enable interrupts */ in dw_axi_dma_interrupt()
1138 u32 chan_active = BIT(chan->id) << DMAC_CHAN_EN_SHIFT; in dma_chan_terminate_all()
1146 ret = readl_poll_timeout_atomic(chan->chip->regs + DMAC_CHEN, val, in dma_chan_terminate_all()
1148 if (ret == -ETIMEDOUT) in dma_chan_terminate_all()
1152 if (chan->direction != DMA_MEM_TO_MEM) in dma_chan_terminate_all()
1154 if (chan->direction == DMA_MEM_TO_DEV) in dma_chan_terminate_all()
1157 spin_lock_irqsave(&chan->vc.lock, flags); in dma_chan_terminate_all()
1159 vchan_get_all_descriptors(&chan->vc, &head); in dma_chan_terminate_all()
1161 chan->cyclic = false; in dma_chan_terminate_all()
1162 spin_unlock_irqrestore(&chan->vc.lock, flags); in dma_chan_terminate_all()
1164 vchan_dma_desc_free_list(&chan->vc, &head); in dma_chan_terminate_all()
1178 spin_lock_irqsave(&chan->vc.lock, flags); in dma_chan_pause()
1180 if (chan->chip->dw->hdata->reg_map_8_channels) { in dma_chan_pause()
1181 val = axi_dma_ioread32(chan->chip, DMAC_CHEN); in dma_chan_pause()
1182 val |= BIT(chan->id) << DMAC_CHAN_SUSP_SHIFT | in dma_chan_pause()
1183 BIT(chan->id) << DMAC_CHAN_SUSP_WE_SHIFT; in dma_chan_pause()
1184 axi_dma_iowrite32(chan->chip, DMAC_CHEN, val); in dma_chan_pause()
1186 val = axi_dma_ioread32(chan->chip, DMAC_CHSUSPREG); in dma_chan_pause()
1187 val |= BIT(chan->id) << DMAC_CHAN_SUSP2_SHIFT | in dma_chan_pause()
1188 BIT(chan->id) << DMAC_CHAN_SUSP2_WE_SHIFT; in dma_chan_pause()
1189 axi_dma_iowrite32(chan->chip, DMAC_CHSUSPREG, val); in dma_chan_pause()
1197 } while (--timeout); in dma_chan_pause()
1201 chan->is_paused = true; in dma_chan_pause()
1203 spin_unlock_irqrestore(&chan->vc.lock, flags); in dma_chan_pause()
1205 return timeout ? 0 : -EAGAIN; in dma_chan_pause()
1213 if (chan->chip->dw->hdata->reg_map_8_channels) { in axi_chan_resume()
1214 val = axi_dma_ioread32(chan->chip, DMAC_CHEN); in axi_chan_resume()
1215 val &= ~(BIT(chan->id) << DMAC_CHAN_SUSP_SHIFT); in axi_chan_resume()
1216 val |= (BIT(chan->id) << DMAC_CHAN_SUSP_WE_SHIFT); in axi_chan_resume()
1217 axi_dma_iowrite32(chan->chip, DMAC_CHEN, val); in axi_chan_resume()
1219 val = axi_dma_ioread32(chan->chip, DMAC_CHSUSPREG); in axi_chan_resume()
1220 val &= ~(BIT(chan->id) << DMAC_CHAN_SUSP2_SHIFT); in axi_chan_resume()
1221 val |= (BIT(chan->id) << DMAC_CHAN_SUSP2_WE_SHIFT); in axi_chan_resume()
1222 axi_dma_iowrite32(chan->chip, DMAC_CHSUSPREG, val); in axi_chan_resume()
1225 chan->is_paused = false; in axi_chan_resume()
1233 spin_lock_irqsave(&chan->vc.lock, flags); in dma_chan_resume()
1235 if (chan->is_paused) in dma_chan_resume()
1238 spin_unlock_irqrestore(&chan->vc.lock, flags); in dma_chan_resume()
1248 clk_disable_unprepare(chip->core_clk); in axi_dma_suspend()
1249 clk_disable_unprepare(chip->cfgr_clk); in axi_dma_suspend()
1258 ret = clk_prepare_enable(chip->cfgr_clk); in axi_dma_resume()
1262 ret = clk_prepare_enable(chip->core_clk); in axi_dma_resume()
1289 struct dw_axi_dma *dw = ofdma->of_dma_data; in dw_axi_dma_of_xlate()
1293 dchan = dma_get_any_slave_channel(&dw->dma); in dw_axi_dma_of_xlate()
1298 chan->hw_handshake_num = dma_spec->args[0]; in dw_axi_dma_of_xlate()
1304 struct device *dev = chip->dev; in parse_device_properties()
1308 ret = device_property_read_u32(dev, "dma-channels", &tmp); in parse_device_properties()
1312 return -EINVAL; in parse_device_properties()
1314 chip->dw->hdata->nr_channels = tmp; in parse_device_properties()
1316 chip->dw->hdata->reg_map_8_channels = true; in parse_device_properties()
1318 ret = device_property_read_u32(dev, "snps,dma-masters", &tmp); in parse_device_properties()
1322 return -EINVAL; in parse_device_properties()
1324 chip->dw->hdata->nr_masters = tmp; in parse_device_properties()
1326 ret = device_property_read_u32(dev, "snps,data-width", &tmp); in parse_device_properties()
1330 return -EINVAL; in parse_device_properties()
1332 chip->dw->hdata->m_data_width = tmp; in parse_device_properties()
1334 ret = device_property_read_u32_array(dev, "snps,block-size", carr, in parse_device_properties()
1335 chip->dw->hdata->nr_channels); in parse_device_properties()
1338 for (tmp = 0; tmp < chip->dw->hdata->nr_channels; tmp++) { in parse_device_properties()
1340 return -EINVAL; in parse_device_properties()
1342 chip->dw->hdata->block_size[tmp] = carr[tmp]; in parse_device_properties()
1346 chip->dw->hdata->nr_channels); in parse_device_properties()
1349 /* Priority value must be programmed within [0:nr_channels-1] range */ in parse_device_properties()
1350 for (tmp = 0; tmp < chip->dw->hdata->nr_channels; tmp++) { in parse_device_properties()
1351 if (carr[tmp] >= chip->dw->hdata->nr_channels) in parse_device_properties()
1352 return -EINVAL; in parse_device_properties()
1354 chip->dw->hdata->priority[tmp] = carr[tmp]; in parse_device_properties()
1357 /* axi-max-burst-len is optional property */ in parse_device_properties()
1358 ret = device_property_read_u32(dev, "snps,axi-max-burst-len", &tmp); in parse_device_properties()
1361 return -EINVAL; in parse_device_properties()
1363 return -EINVAL; in parse_device_properties()
1365 chip->dw->hdata->restrict_axi_burst_len = true; in parse_device_properties()
1366 chip->dw->hdata->axi_rw_burst_len = tmp; in parse_device_properties()
1382 chip = devm_kzalloc(&pdev->dev, sizeof(*chip), GFP_KERNEL); in dw_probe()
1384 return -ENOMEM; in dw_probe()
1386 dw = devm_kzalloc(&pdev->dev, sizeof(*dw), GFP_KERNEL); in dw_probe()
1388 return -ENOMEM; in dw_probe()
1390 hdata = devm_kzalloc(&pdev->dev, sizeof(*hdata), GFP_KERNEL); in dw_probe()
1392 return -ENOMEM; in dw_probe()
1394 chip->dw = dw; in dw_probe()
1395 chip->dev = &pdev->dev; in dw_probe()
1396 chip->dw->hdata = hdata; in dw_probe()
1398 chip->irq = platform_get_irq(pdev, 0); in dw_probe()
1399 if (chip->irq < 0) in dw_probe()
1400 return chip->irq; in dw_probe()
1402 chip->regs = devm_platform_ioremap_resource(pdev, 0); in dw_probe()
1403 if (IS_ERR(chip->regs)) in dw_probe()
1404 return PTR_ERR(chip->regs); in dw_probe()
1406 flags = (uintptr_t)of_device_get_match_data(&pdev->dev); in dw_probe()
1408 chip->apb_regs = devm_platform_ioremap_resource(pdev, 1); in dw_probe()
1409 if (IS_ERR(chip->apb_regs)) in dw_probe()
1410 return PTR_ERR(chip->apb_regs); in dw_probe()
1414 resets = devm_reset_control_array_get_exclusive(&pdev->dev); in dw_probe()
1423 chip->dw->hdata->use_cfg2 = !!(flags & AXI_DMA_FLAG_USE_CFG2); in dw_probe()
1425 chip->core_clk = devm_clk_get(chip->dev, "core-clk"); in dw_probe()
1426 if (IS_ERR(chip->core_clk)) in dw_probe()
1427 return PTR_ERR(chip->core_clk); in dw_probe()
1429 chip->cfgr_clk = devm_clk_get(chip->dev, "cfgr-clk"); in dw_probe()
1430 if (IS_ERR(chip->cfgr_clk)) in dw_probe()
1431 return PTR_ERR(chip->cfgr_clk); in dw_probe()
1437 dw->chan = devm_kcalloc(chip->dev, hdata->nr_channels, in dw_probe()
1438 sizeof(*dw->chan), GFP_KERNEL); in dw_probe()
1439 if (!dw->chan) in dw_probe()
1440 return -ENOMEM; in dw_probe()
1442 ret = devm_request_irq(chip->dev, chip->irq, dw_axi_dma_interrupt, in dw_probe()
1447 INIT_LIST_HEAD(&dw->dma.channels); in dw_probe()
1448 for (i = 0; i < hdata->nr_channels; i++) { in dw_probe()
1449 struct axi_dma_chan *chan = &dw->chan[i]; in dw_probe()
1451 chan->chip = chip; in dw_probe()
1452 chan->id = i; in dw_probe()
1453 chan->chan_regs = chip->regs + COMMON_REG_LEN + i * CHAN_REG_LEN; in dw_probe()
1454 atomic_set(&chan->descs_allocated, 0); in dw_probe()
1456 chan->vc.desc_free = vchan_desc_put; in dw_probe()
1457 vchan_init(&chan->vc, &dw->dma); in dw_probe()
1461 dma_cap_set(DMA_MEMCPY, dw->dma.cap_mask); in dw_probe()
1462 dma_cap_set(DMA_SLAVE, dw->dma.cap_mask); in dw_probe()
1463 dma_cap_set(DMA_CYCLIC, dw->dma.cap_mask); in dw_probe()
1466 dw->dma.max_burst = hdata->axi_rw_burst_len; in dw_probe()
1467 dw->dma.src_addr_widths = AXI_DMA_BUSWIDTHS; in dw_probe()
1468 dw->dma.dst_addr_widths = AXI_DMA_BUSWIDTHS; in dw_probe()
1469 dw->dma.directions = BIT(DMA_MEM_TO_MEM); in dw_probe()
1470 dw->dma.directions |= BIT(DMA_MEM_TO_DEV) | BIT(DMA_DEV_TO_MEM); in dw_probe()
1471 dw->dma.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; in dw_probe()
1473 dw->dma.dev = chip->dev; in dw_probe()
1474 dw->dma.device_tx_status = dma_chan_tx_status; in dw_probe()
1475 dw->dma.device_issue_pending = dma_chan_issue_pending; in dw_probe()
1476 dw->dma.device_terminate_all = dma_chan_terminate_all; in dw_probe()
1477 dw->dma.device_pause = dma_chan_pause; in dw_probe()
1478 dw->dma.device_resume = dma_chan_resume; in dw_probe()
1480 dw->dma.device_alloc_chan_resources = dma_chan_alloc_chan_resources; in dw_probe()
1481 dw->dma.device_free_chan_resources = dma_chan_free_chan_resources; in dw_probe()
1483 dw->dma.device_prep_dma_memcpy = dma_chan_prep_dma_memcpy; in dw_probe()
1484 dw->dma.device_synchronize = dw_axi_dma_synchronize; in dw_probe()
1485 dw->dma.device_config = dw_axi_dma_chan_slave_config; in dw_probe()
1486 dw->dma.device_prep_slave_sg = dw_axi_dma_chan_prep_slave_sg; in dw_probe()
1487 dw->dma.device_prep_dma_cyclic = dw_axi_dma_chan_prep_cyclic; in dw_probe()
1494 dw->dma.dev->dma_parms = &dw->dma_parms; in dw_probe()
1495 dma_set_max_seg_size(&pdev->dev, MAX_BLOCK_SIZE); in dw_probe()
1498 pm_runtime_enable(chip->dev); in dw_probe()
1505 pm_runtime_get_noresume(chip->dev); in dw_probe()
1512 pm_runtime_put(chip->dev); in dw_probe()
1514 ret = dmaenginem_async_device_register(&dw->dma); in dw_probe()
1519 ret = of_dma_controller_register(pdev->dev.of_node, in dw_probe()
1522 dev_warn(&pdev->dev, in dw_probe()
1525 dev_info(chip->dev, "DesignWare AXI DMA Controller, %d channels\n", in dw_probe()
1526 dw->hdata->nr_channels); in dw_probe()
1531 pm_runtime_disable(chip->dev); in dw_probe()
1539 struct dw_axi_dma *dw = chip->dw; in dw_remove()
1544 clk_prepare_enable(chip->cfgr_clk); in dw_remove()
1545 clk_prepare_enable(chip->core_clk); in dw_remove()
1547 for (i = 0; i < dw->hdata->nr_channels; i++) { in dw_remove()
1548 axi_chan_disable(&chip->dw->chan[i]); in dw_remove()
1549 axi_chan_irq_disable(&chip->dw->chan[i], DWAXIDMAC_IRQ_ALL); in dw_remove()
1553 pm_runtime_disable(chip->dev); in dw_remove()
1556 devm_free_irq(chip->dev, chip->irq, chip); in dw_remove()
1558 of_dma_controller_free(chip->dev->of_node); in dw_remove()
1560 list_for_each_entry_safe(chan, _chan, &dw->dma.channels, in dw_remove()
1562 list_del(&chan->vc.chan.device_node); in dw_remove()
1563 tasklet_kill(&chan->vc.task); in dw_remove()
1575 .compatible = "snps,axi-dma-1.01a"
1577 .compatible = "intel,kmb-axi-dma",
1580 .compatible = "starfive,jh7110-axi-dma",
1599 MODULE_DESCRIPTION("Synopsys DesignWare AXI DMA Controller platform driver");