Lines Matching +full:dma +full:- +full:byte +full:- +full:en
1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Driver for the Atmel AHB DMA Controller (aka HDMA or DMAC on AT91 systems)
8 * This supports the Atmel AHB DMA Controller found in several Atmel SoCs.
9 * The only Atmel DMA Controller that is not covered by this driver is the one
13 #include <dt-bindings/dma/at91.h>
18 #include <linux/dma-mapping.h>
29 #include "virt-dma.h"
33 * --------
35 * at_hdmac : Name of the ATmel AHB DMA Controller
36 * at_dma_ / atdma : ATmel DMA controller entity related
37 * atc_ / atchan : ATmel DMA Channel entity related
44 #define AT_DMA_IF_BIGEND(i) BIT((i)) /* AHB-Lite Interface i in Big-endian mode */
115 #define ATC_DSCR_IF GENMASK(1, 0) /* Dsc feched via AHB-Lite Interface */
127 #define ATC_SIF GENMASK(1, 0) /* Src tx done via AHB-Lite Interface i */
128 #define ATC_DIF GENMASK(5, 4) /* Dst tx done via AHB-Lite Interface i */
131 #define ATC_SRC_PIP BIT(8) /* Source Picture-in-Picture enabled */
132 #define ATC_DST_PIP BIT(12) /* Destination Picture-in-Picture enabled */
136 #define ATC_FC_MEM2MEM 0x0 /* Mem-to-Mem (DMA) */
137 #define ATC_FC_MEM2PER 0x1 /* Mem-to-Periph (DMA) */
138 #define ATC_FC_PER2MEM 0x2 /* Periph-to-Mem (DMA) */
139 #define ATC_FC_PER2PER 0x3 /* Periph-to-Periph (DMA) */
140 #define ATC_FC_PER2MEM_PER 0x4 /* Periph-to-Mem (Peripheral) */
141 #define ATC_FC_MEM2PER_PER 0x5 /* Mem-to-Periph (Peripheral) */
142 #define ATC_FC_PER2PER_SRCPER 0x6 /* Periph-to-Periph (Src Peripheral) */
143 #define ATC_FC_PER2PER_DSTPER 0x7 /* Periph-to-Periph (Dst Peripheral) */
194 /*-- descriptors -----------------------------------------------------*/
196 /* LLI == Linked List Item; aka DMA buffer descriptor */
209 * struct atdma_sg - atdma scatter gather entry
211 * @lli: linked list item that is passed to the DMA controller
221 * struct at_desc - software descriptor
222 * @vd: pointer to the virtual dma descriptor.
223 * @atchan: pointer to the atmel dma channel.
224 * @total_len: total transaction byte count
245 /*-- Channels --------------------------------------------------------*/
248 * atc_status - information bits stored in channel status flag
258 * struct at_dma_chan - internal representation of an Atmel HDMAC channel
259 * @vc: virtual dma channel entry.
272 * @desc: pointer to the atmel dma descriptor.
290 __raw_readl((atchan)->ch_regs + ATC_##name##_OFFSET)
293 __raw_writel((val), (atchan)->ch_regs + ATC_##name##_OFFSET)
297 * 1 -> 0, 4 -> 1, 8 -> 2, 16 -> 3, 32 -> 4, 64 -> 5, 128 -> 6, 256 -> 7.
304 *maxburst = fls(*maxburst) - 2; in convert_burst()
311 * 1 byte -> 0, 2 bytes -> 1, 4 bytes -> 2.
321 /* For 1 byte width or fallback */ in convert_buswidth()
326 /*-- Controller ------------------------------------------------------*/
329 * struct at_dma - internal representation of an Atmel HDMA Controller
331 * @atdma_devtype: identifier of DMA controller compatibility
333 * @clk: dma controller clock
354 __raw_readl((atdma)->regs + AT_DMA_##name)
356 __raw_writel((val), (atdma)->regs + AT_DMA_##name)
374 /*-- Helper functions ------------------------------------------------*/
378 return &chan->dev->device; in chan2dev()
384 struct at_dma *atdma = to_at_dma(atchan->vc.chan.device); in vdbg_dump_regs()
386 dev_err(chan2dev(&atchan->vc.chan), in vdbg_dump_regs()
388 atchan->vc.chan.chan_id, in vdbg_dump_regs()
392 dev_err(chan2dev(&atchan->vc.chan), in vdbg_dump_regs()
407 dev_crit(chan2dev(&atchan->vc.chan), in atc_dump_lli()
409 &lli->saddr, &lli->daddr, in atc_dump_lli()
410 lli->ctrla, lli->ctrlb, &lli->dscr); in atc_dump_lli()
439 * atc_chan_is_enabled - test if given channel is enabled
444 struct at_dma *atdma = to_at_dma(atchan->vc.chan.device); in atc_chan_is_enabled()
446 return !!(dma_readl(atdma, CHSR) & atchan->mask); in atc_chan_is_enabled()
450 * atc_chan_is_paused - test channel pause/resume status
455 return test_bit(ATC_IS_PAUSED, &atchan->status); in atc_chan_is_paused()
459 * atc_chan_is_cyclic - test if given channel has cyclic property set
464 return test_bit(ATC_IS_CYCLIC, &atchan->status); in atc_chan_is_cyclic()
468 * set_lli_eol - set end-of-link to descriptor so it will end transfer
474 u32 ctrlb = desc->sg[i].lli->ctrlb; in set_lli_eol()
479 desc->sg[i].lli->ctrlb = ctrlb; in set_lli_eol()
480 desc->sg[i].lli->dscr = 0; in set_lli_eol()
496 * be increased during dma usage.
504 * struct at_dma_platform_data - Controller configuration parameters
514 * struct at_dma_slave - Controller-specific information about a slave
515 * @dma_dev: required DMA master device
516 * @cfg: Platform-specific initializer for the CFG register
540 struct atdma_sg *atdma_sg = &desc->sg[i]; in atdma_lli_chain()
543 desc->sg[i - 1].lli->dscr = atdma_sg->lli_phys; in atdma_lli_chain()
547 * atc_dostart - starts the DMA engine for real
552 struct virt_dma_desc *vd = vchan_next_desc(&atchan->vc); in atc_dostart()
556 atchan->desc = NULL; in atc_dostart()
562 list_del(&vd->node); in atc_dostart()
563 atchan->desc = desc = to_atdma_desc(&vd->tx); in atc_dostart()
569 channel_writel(atchan, DSCR, desc->sg[0].lli_phys); in atc_dostart()
571 FIELD_PREP(ATC_SPIP_HOLE, desc->src_hole) | in atc_dostart()
572 FIELD_PREP(ATC_SPIP_BOUNDARY, desc->boundary)); in atc_dostart()
574 FIELD_PREP(ATC_DPIP_HOLE, desc->dst_hole) | in atc_dostart()
575 FIELD_PREP(ATC_DPIP_BOUNDARY, desc->boundary)); in atc_dostart()
579 dma_writel(atchan->atdma, CHER, atchan->mask); in atc_dostart()
586 struct at_dma *atdma = to_at_dma(vd->tx.chan->device); in atdma_desc_free()
587 struct at_desc *desc = to_atdma_desc(&vd->tx); in atdma_desc_free()
590 for (i = 0; i < desc->sglen; i++) { in atdma_desc_free()
591 if (desc->sg[i].lli) in atdma_desc_free()
592 dma_pool_free(atdma->lli_pool, desc->sg[i].lli, in atdma_desc_free()
593 desc->sg[i].lli_phys); in atdma_desc_free()
597 if (desc->memset_buffer) { in atdma_desc_free()
598 dma_pool_free(atdma->memset_pool, desc->memset_vaddr, in atdma_desc_free()
599 desc->memset_paddr); in atdma_desc_free()
600 desc->memset_buffer = false; in atdma_desc_free()
607 * atc_calc_bytes_left - calculates the number of bytes left according to the
624 return current_len - (btsize << src_width); in atc_calc_bytes_left()
628 * atc_get_llis_residue - Get residue for a hardware linked list transfer
642 * DMA transfer progression inbetween the two reads.
644 * One solution could have been to pause the DMA transfer, read the DSCR and
645 * CTRLA then resume the DMA transfer. Nonetheless, this approach presents some
647 * - If the DMA transfer is paused, RX overruns or TX underruns are more likey
649 * example, it uses a cyclic DMA transfer to read data from the Receive
651 * by any FIFO on most Atmel SoCs. So pausing the DMA transfer to compute the
653 * - The atc_pause() function masks interrupts but we'd rather avoid to do so
667 * Returns 0 on success, -errno otherwise.
675 len = desc->total_len; in atc_get_llis_residue()
686 * If the DSCR register value has not changed inside the DMA in atc_get_llis_residue()
694 * DSCR has changed inside the DMA controller, so the previouly in atc_get_llis_residue()
704 return -ETIMEDOUT; in atc_get_llis_residue()
707 if (desc->sg[0].lli->dscr == dscr) { in atc_get_llis_residue()
711 len -= desc->sg[0].len; in atc_get_llis_residue()
713 for (i = 1; i < desc->sglen; i++) { in atc_get_llis_residue()
714 if (desc->sg[i].lli && desc->sg[i].lli->dscr == dscr) in atc_get_llis_residue()
716 len -= desc->sg[i].len; in atc_get_llis_residue()
729 * atc_get_residue - get the number of bytes residue for a cookie.
731 * @chan: DMA channel
734 * Return 0 on success, -errono otherwise.
744 vd = vchan_find_desc(&atchan->vc, cookie); in atc_get_residue()
746 desc = to_atdma_desc(&vd->tx); in atc_get_residue()
747 else if (atchan->desc && atchan->desc->vd.tx.cookie == cookie) in atc_get_residue()
748 desc = atchan->desc; in atc_get_residue()
751 return -EINVAL; in atc_get_residue()
753 if (desc->sg[0].lli->dscr) in atc_get_residue()
758 len = desc->total_len; in atc_get_residue()
765 * atc_handle_error - handle errors reported by DMA controller
771 struct at_desc *desc = atchan->desc; in atc_handle_error()
774 dma_writel(atchan->atdma, CHDR, AT_DMA_RES(i) | atchan->mask); in atc_handle_error()
783 dev_crit(chan2dev(&atchan->vc.chan), "Bad descriptor submitted for DMA!\n"); in atc_handle_error()
784 dev_crit(chan2dev(&atchan->vc.chan), "cookie: %d\n", in atc_handle_error()
785 desc->vd.tx.cookie); in atc_handle_error()
786 for (i = 0; i < desc->sglen; i++) in atc_handle_error()
787 atc_dump_lli(atchan, desc->sg[i].lli); in atc_handle_error()
795 spin_lock(&atchan->vc.lock); in atdma_handle_chan_done()
796 desc = atchan->desc; in atdma_handle_chan_done()
805 vchan_cyclic_callback(&desc->vd); in atdma_handle_chan_done()
807 vchan_cookie_complete(&desc->vd); in atdma_handle_chan_done()
808 atchan->desc = NULL; in atdma_handle_chan_done()
813 spin_unlock(&atchan->vc.lock); in atdma_handle_chan_done()
832 dev_vdbg(atdma->dma_device.dev, in at_dma_interrupt()
836 for (i = 0; i < atdma->dma_device.chancnt; i++) { in at_dma_interrupt()
837 atchan = &atdma->chan[i]; in at_dma_interrupt()
849 /*-- DMA Engine API --------------------------------------------------*/
851 * atc_prep_dma_interleaved - prepare memory to memory interleaved operation
861 struct at_dma *atdma = to_at_dma(chan->device); in atc_prep_dma_interleaved()
874 if (unlikely(!xt || xt->numf != 1 || !xt->frame_size)) in atc_prep_dma_interleaved()
877 first = xt->sgl; in atc_prep_dma_interleaved()
881 __func__, &xt->src_start, &xt->dst_start, xt->numf, in atc_prep_dma_interleaved()
882 xt->frame_size, flags); in atc_prep_dma_interleaved()
890 for (i = 0; i < xt->frame_size; i++) { in atc_prep_dma_interleaved()
891 struct data_chunk *chunk = xt->sgl + i; in atc_prep_dma_interleaved()
893 if ((chunk->size != xt->sgl->size) || in atc_prep_dma_interleaved()
902 len += chunk->size; in atc_prep_dma_interleaved()
905 dwidth = atc_get_xfer_width(xt->src_start, xt->dst_start, len); in atc_prep_dma_interleaved()
925 desc->sglen = 1; in atc_prep_dma_interleaved()
927 atdma_sg = desc->sg; in atc_prep_dma_interleaved()
928 atdma_sg->lli = dma_pool_alloc(atdma->lli_pool, GFP_NOWAIT, in atc_prep_dma_interleaved()
929 &atdma_sg->lli_phys); in atc_prep_dma_interleaved()
930 if (!atdma_sg->lli) { in atc_prep_dma_interleaved()
934 lli = atdma_sg->lli; in atc_prep_dma_interleaved()
936 lli->saddr = xt->src_start; in atc_prep_dma_interleaved()
937 lli->daddr = xt->dst_start; in atc_prep_dma_interleaved()
938 lli->ctrla = ctrla | xfer_count; in atc_prep_dma_interleaved()
939 lli->ctrlb = ctrlb; in atc_prep_dma_interleaved()
941 desc->boundary = first->size >> dwidth; in atc_prep_dma_interleaved()
942 desc->dst_hole = (dmaengine_get_dst_icg(xt, first) >> dwidth) + 1; in atc_prep_dma_interleaved()
943 desc->src_hole = (dmaengine_get_src_icg(xt, first) >> dwidth) + 1; in atc_prep_dma_interleaved()
945 atdma_sg->len = len; in atc_prep_dma_interleaved()
946 desc->total_len = len; in atc_prep_dma_interleaved()
949 return vchan_tx_prep(&atchan->vc, &desc->vd, flags); in atc_prep_dma_interleaved()
953 * atc_prep_dma_memcpy - prepare a memcpy operation
964 struct at_dma *atdma = to_at_dma(chan->device); in atc_prep_dma_memcpy()
988 desc->sglen = sg_len; in atc_prep_dma_memcpy()
1006 struct atdma_sg *atdma_sg = &desc->sg[i]; in atc_prep_dma_memcpy()
1009 atdma_sg->lli = dma_pool_alloc(atdma->lli_pool, GFP_NOWAIT, in atc_prep_dma_memcpy()
1010 &atdma_sg->lli_phys); in atc_prep_dma_memcpy()
1011 if (!atdma_sg->lli) in atc_prep_dma_memcpy()
1013 lli = atdma_sg->lli; in atc_prep_dma_memcpy()
1015 xfer_count = min_t(size_t, (len - offset) >> src_width, in atc_prep_dma_memcpy()
1018 lli->saddr = src + offset; in atc_prep_dma_memcpy()
1019 lli->daddr = dest + offset; in atc_prep_dma_memcpy()
1020 lli->ctrla = ctrla | xfer_count; in atc_prep_dma_memcpy()
1021 lli->ctrlb = ctrlb; in atc_prep_dma_memcpy()
1023 desc->sg[i].len = xfer_count << src_width; in atc_prep_dma_memcpy()
1028 desc->total_len = len; in atc_prep_dma_memcpy()
1030 /* set end-of-link to the last link descriptor of list*/ in atc_prep_dma_memcpy()
1031 set_lli_eol(desc, i - 1); in atc_prep_dma_memcpy()
1033 return vchan_tx_prep(&atchan->vc, &desc->vd, flags); in atc_prep_dma_memcpy()
1036 atdma_desc_free(&desc->vd); in atc_prep_dma_memcpy()
1044 struct at_dma *atdma = to_at_dma(chan->device); in atdma_create_memset_lli()
1056 return -EINVAL; in atdma_create_memset_lli()
1059 atdma_sg->lli = dma_pool_alloc(atdma->lli_pool, GFP_NOWAIT, in atdma_create_memset_lli()
1060 &atdma_sg->lli_phys); in atdma_create_memset_lli()
1061 if (!atdma_sg->lli) in atdma_create_memset_lli()
1062 return -ENOMEM; in atdma_create_memset_lli()
1063 lli = atdma_sg->lli; in atdma_create_memset_lli()
1065 lli->saddr = psrc; in atdma_create_memset_lli()
1066 lli->daddr = pdst; in atdma_create_memset_lli()
1067 lli->ctrla = ctrla | xfer_count; in atdma_create_memset_lli()
1068 lli->ctrlb = ctrlb; in atdma_create_memset_lli()
1070 atdma_sg->len = len; in atdma_create_memset_lli()
1076 * atc_prep_dma_memset - prepare a memcpy operation
1088 struct at_dma *atdma = to_at_dma(chan->device); in atc_prep_dma_memset()
1103 if (!is_dma_fill_aligned(chan->device, dest, 0, len)) { in atc_prep_dma_memset()
1109 vaddr = dma_pool_alloc(atdma->memset_pool, GFP_NOWAIT, &paddr); in atc_prep_dma_memset()
1116 /* Only the first byte of value is to be used according to dmaengine */ in atc_prep_dma_memset()
1127 desc->sglen = 1; in atc_prep_dma_memset()
1129 ret = atdma_create_memset_lli(chan, desc->sg, paddr, dest, len); in atc_prep_dma_memset()
1133 desc->memset_paddr = paddr; in atc_prep_dma_memset()
1134 desc->memset_vaddr = vaddr; in atc_prep_dma_memset()
1135 desc->memset_buffer = true; in atc_prep_dma_memset()
1137 desc->total_len = len; in atc_prep_dma_memset()
1139 /* set end-of-link on the descriptor */ in atc_prep_dma_memset()
1142 return vchan_tx_prep(&atchan->vc, &desc->vd, flags); in atc_prep_dma_memset()
1147 dma_pool_free(atdma->memset_pool, vaddr, paddr); in atc_prep_dma_memset()
1158 struct at_dma *atdma = to_at_dma(chan->device); in atc_prep_dma_memset_sg()
1176 vaddr = dma_pool_alloc(atdma->memset_pool, GFP_NOWAIT, &paddr); in atc_prep_dma_memset_sg()
1187 desc->sglen = sg_len; in atc_prep_dma_memset_sg()
1196 if (!is_dma_fill_aligned(chan->device, dest, 0, len)) { in atc_prep_dma_memset_sg()
1202 ret = atdma_create_memset_lli(chan, &desc->sg[i], paddr, dest, in atc_prep_dma_memset_sg()
1211 desc->memset_paddr = paddr; in atc_prep_dma_memset_sg()
1212 desc->memset_vaddr = vaddr; in atc_prep_dma_memset_sg()
1213 desc->memset_buffer = true; in atc_prep_dma_memset_sg()
1215 desc->total_len = total_len; in atc_prep_dma_memset_sg()
1217 /* set end-of-link on the descriptor */ in atc_prep_dma_memset_sg()
1218 set_lli_eol(desc, i - 1); in atc_prep_dma_memset_sg()
1220 return vchan_tx_prep(&atchan->vc, &desc->vd, flags); in atc_prep_dma_memset_sg()
1223 atdma_desc_free(&desc->vd); in atc_prep_dma_memset_sg()
1225 dma_pool_free(atdma->memset_pool, vaddr, paddr); in atc_prep_dma_memset_sg()
1230 * atc_prep_slave_sg - prepare descriptors for a DMA_SLAVE transaction
1231 * @chan: DMA channel
1234 * @direction: DMA direction
1243 struct at_dma *atdma = to_at_dma(chan->device); in atc_prep_slave_sg()
1245 struct at_dma_slave *atslave = chan->private; in atc_prep_slave_sg()
1246 struct dma_slave_config *sconfig = &atchan->dma_sconfig; in atc_prep_slave_sg()
1270 desc->sglen = sg_len; in atc_prep_slave_sg()
1272 ctrla = FIELD_PREP(ATC_SCSIZE, sconfig->src_maxburst) | in atc_prep_slave_sg()
1273 FIELD_PREP(ATC_DCSIZE, sconfig->dst_maxburst); in atc_prep_slave_sg()
1278 reg_width = convert_buswidth(sconfig->dst_addr_width); in atc_prep_slave_sg()
1284 FIELD_PREP(ATC_SIF, atchan->mem_if) | in atc_prep_slave_sg()
1285 FIELD_PREP(ATC_DIF, atchan->per_if); in atc_prep_slave_sg()
1286 reg = sconfig->dst_addr; in atc_prep_slave_sg()
1288 struct atdma_sg *atdma_sg = &desc->sg[i]; in atc_prep_slave_sg()
1293 atdma_sg->lli = dma_pool_alloc(atdma->lli_pool, in atc_prep_slave_sg()
1295 &atdma_sg->lli_phys); in atc_prep_slave_sg()
1296 if (!atdma_sg->lli) in atc_prep_slave_sg()
1298 lli = atdma_sg->lli; in atc_prep_slave_sg()
1311 lli->saddr = mem; in atc_prep_slave_sg()
1312 lli->daddr = reg; in atc_prep_slave_sg()
1313 lli->ctrla = ctrla | in atc_prep_slave_sg()
1316 lli->ctrlb = ctrlb; in atc_prep_slave_sg()
1318 atdma_sg->len = len; in atc_prep_slave_sg()
1321 desc->sg[i].len = len; in atc_prep_slave_sg()
1326 reg_width = convert_buswidth(sconfig->src_addr_width); in atc_prep_slave_sg()
1332 FIELD_PREP(ATC_SIF, atchan->per_if) | in atc_prep_slave_sg()
1333 FIELD_PREP(ATC_DIF, atchan->mem_if); in atc_prep_slave_sg()
1335 reg = sconfig->src_addr; in atc_prep_slave_sg()
1337 struct atdma_sg *atdma_sg = &desc->sg[i]; in atc_prep_slave_sg()
1342 atdma_sg->lli = dma_pool_alloc(atdma->lli_pool, in atc_prep_slave_sg()
1344 &atdma_sg->lli_phys); in atc_prep_slave_sg()
1345 if (!atdma_sg->lli) in atc_prep_slave_sg()
1347 lli = atdma_sg->lli; in atc_prep_slave_sg()
1360 lli->saddr = reg; in atc_prep_slave_sg()
1361 lli->daddr = mem; in atc_prep_slave_sg()
1362 lli->ctrla = ctrla | in atc_prep_slave_sg()
1365 lli->ctrlb = ctrlb; in atc_prep_slave_sg()
1367 desc->sg[i].len = len; in atc_prep_slave_sg()
1377 /* set end-of-link to the last link descriptor of list*/ in atc_prep_slave_sg()
1378 set_lli_eol(desc, i - 1); in atc_prep_slave_sg()
1380 desc->total_len = total_len; in atc_prep_slave_sg()
1382 return vchan_tx_prep(&atchan->vc, &desc->vd, flags); in atc_prep_slave_sg()
1387 atdma_desc_free(&desc->vd); in atc_prep_slave_sg()
1393 * Check for too big/unaligned periods and unaligned DMA buffer
1401 if (unlikely(period_len & ((1 << reg_width) - 1))) in atc_dma_cyclic_check_values()
1403 if (unlikely(buf_addr & ((1 << reg_width) - 1))) in atc_dma_cyclic_check_values()
1409 return -EINVAL; in atc_dma_cyclic_check_values()
1413 * atc_dma_cyclic_fill_desc - Fill one period descriptor
1421 struct at_dma *atdma = to_at_dma(chan->device); in atc_dma_cyclic_fill_desc()
1423 struct dma_slave_config *sconfig = &atchan->dma_sconfig; in atc_dma_cyclic_fill_desc()
1424 struct atdma_sg *atdma_sg = &desc->sg[i]; in atc_dma_cyclic_fill_desc()
1427 atdma_sg->lli = dma_pool_alloc(atdma->lli_pool, GFP_ATOMIC, in atc_dma_cyclic_fill_desc()
1428 &atdma_sg->lli_phys); in atc_dma_cyclic_fill_desc()
1429 if (!atdma_sg->lli) in atc_dma_cyclic_fill_desc()
1430 return -ENOMEM; in atc_dma_cyclic_fill_desc()
1431 lli = atdma_sg->lli; in atc_dma_cyclic_fill_desc()
1435 lli->saddr = buf_addr + (period_len * i); in atc_dma_cyclic_fill_desc()
1436 lli->daddr = sconfig->dst_addr; in atc_dma_cyclic_fill_desc()
1437 lli->ctrlb = FIELD_PREP(ATC_DST_ADDR_MODE, in atc_dma_cyclic_fill_desc()
1442 FIELD_PREP(ATC_SIF, atchan->mem_if) | in atc_dma_cyclic_fill_desc()
1443 FIELD_PREP(ATC_DIF, atchan->per_if); in atc_dma_cyclic_fill_desc()
1448 lli->saddr = sconfig->src_addr; in atc_dma_cyclic_fill_desc()
1449 lli->daddr = buf_addr + (period_len * i); in atc_dma_cyclic_fill_desc()
1450 lli->ctrlb = FIELD_PREP(ATC_DST_ADDR_MODE, in atc_dma_cyclic_fill_desc()
1455 FIELD_PREP(ATC_SIF, atchan->per_if) | in atc_dma_cyclic_fill_desc()
1456 FIELD_PREP(ATC_DIF, atchan->mem_if); in atc_dma_cyclic_fill_desc()
1460 return -EINVAL; in atc_dma_cyclic_fill_desc()
1463 lli->ctrla = FIELD_PREP(ATC_SCSIZE, sconfig->src_maxburst) | in atc_dma_cyclic_fill_desc()
1464 FIELD_PREP(ATC_DCSIZE, sconfig->dst_maxburst) | in atc_dma_cyclic_fill_desc()
1468 desc->sg[i].len = period_len; in atc_dma_cyclic_fill_desc()
1474 * atc_prep_dma_cyclic - prepare the cyclic DMA transfer
1475 * @chan: the DMA channel to prepare
1476 * @buf_addr: physical DMA address where the buffer starts
1488 struct at_dma_slave *atslave = chan->private; in atc_prep_dma_cyclic()
1489 struct dma_slave_config *sconfig = &atchan->dma_sconfig; in atc_prep_dma_cyclic()
1496 dev_vdbg(chan2dev(chan), "prep_dma_cyclic: %s buf@%pad - %d (%d/%d)\n", in atc_prep_dma_cyclic()
1506 was_cyclic = test_and_set_bit(ATC_IS_CYCLIC, &atchan->status); in atc_prep_dma_cyclic()
1516 reg_width = convert_buswidth(sconfig->dst_addr_width); in atc_prep_dma_cyclic()
1518 reg_width = convert_buswidth(sconfig->src_addr_width); in atc_prep_dma_cyclic()
1520 /* Check for too big/unaligned periods and unaligned DMA buffer */ in atc_prep_dma_cyclic()
1527 desc->sglen = periods; in atc_prep_dma_cyclic()
1536 desc->total_len = buf_len; in atc_prep_dma_cyclic()
1538 desc->sg[i - 1].lli->dscr = desc->sg[0].lli_phys; in atc_prep_dma_cyclic()
1540 return vchan_tx_prep(&atchan->vc, &desc->vd, flags); in atc_prep_dma_cyclic()
1543 atdma_desc_free(&desc->vd); in atc_prep_dma_cyclic()
1545 clear_bit(ATC_IS_CYCLIC, &atchan->status); in atc_prep_dma_cyclic()
1557 if (!chan->private) in atc_config()
1558 return -EINVAL; in atc_config()
1560 memcpy(&atchan->dma_sconfig, sconfig, sizeof(*sconfig)); in atc_config()
1562 convert_burst(&atchan->dma_sconfig.src_maxburst); in atc_config()
1563 convert_burst(&atchan->dma_sconfig.dst_maxburst); in atc_config()
1571 struct at_dma *atdma = to_at_dma(chan->device); in atc_pause()
1572 int chan_id = atchan->vc.chan.chan_id; in atc_pause()
1577 spin_lock_irqsave(&atchan->vc.lock, flags); in atc_pause()
1580 set_bit(ATC_IS_PAUSED, &atchan->status); in atc_pause()
1582 spin_unlock_irqrestore(&atchan->vc.lock, flags); in atc_pause()
1590 struct at_dma *atdma = to_at_dma(chan->device); in atc_resume()
1591 int chan_id = atchan->vc.chan.chan_id; in atc_resume()
1599 spin_lock_irqsave(&atchan->vc.lock, flags); in atc_resume()
1602 clear_bit(ATC_IS_PAUSED, &atchan->status); in atc_resume()
1604 spin_unlock_irqrestore(&atchan->vc.lock, flags); in atc_resume()
1612 struct at_dma *atdma = to_at_dma(chan->device); in atc_terminate_all()
1613 int chan_id = atchan->vc.chan.chan_id; in atc_terminate_all()
1626 spin_lock_irqsave(&atchan->vc.lock, flags); in atc_terminate_all()
1629 dma_writel(atdma, CHDR, AT_DMA_RES(chan_id) | atchan->mask); in atc_terminate_all()
1632 while (dma_readl(atdma, CHSR) & atchan->mask) in atc_terminate_all()
1635 if (atchan->desc) { in atc_terminate_all()
1636 vchan_terminate_vdesc(&atchan->desc->vd); in atc_terminate_all()
1637 atchan->desc = NULL; in atc_terminate_all()
1640 vchan_get_all_descriptors(&atchan->vc, &list); in atc_terminate_all()
1642 clear_bit(ATC_IS_PAUSED, &atchan->status); in atc_terminate_all()
1644 clear_bit(ATC_IS_CYCLIC, &atchan->status); in atc_terminate_all()
1646 spin_unlock_irqrestore(&atchan->vc.lock, flags); in atc_terminate_all()
1648 vchan_dma_desc_free_list(&atchan->vc, &list); in atc_terminate_all()
1654 * atc_tx_status - poll for transaction completion
1655 * @chan: DMA channel
1661 * the status of multiple cookies without re-checking hardware state.
1678 spin_lock_irqsave(&atchan->vc.lock, flags); in atc_tx_status()
1681 spin_unlock_irqrestore(&atchan->vc.lock, flags); in atc_tx_status()
1701 spin_lock_irqsave(&atchan->vc.lock, flags); in atc_issue_pending()
1702 if (vchan_issue_pending(&atchan->vc) && !atchan->desc) { in atc_issue_pending()
1706 spin_unlock_irqrestore(&atchan->vc.lock, flags); in atc_issue_pending()
1710 * atc_alloc_chan_resources - allocate resources for DMA channel
1713 * return - the number of allocated descriptors
1718 struct at_dma *atdma = to_at_dma(chan->device); in atc_alloc_chan_resources()
1726 dev_dbg(chan2dev(chan), "DMA channel not idle ?\n"); in atc_alloc_chan_resources()
1727 return -EIO; in atc_alloc_chan_resources()
1732 atslave = chan->private; in atc_alloc_chan_resources()
1735 * We need controller-specific data to set up slave in atc_alloc_chan_resources()
1738 BUG_ON(!atslave->dma_dev || atslave->dma_dev != atdma->dma_device.dev); in atc_alloc_chan_resources()
1741 if (atslave->cfg) in atc_alloc_chan_resources()
1742 cfg = atslave->cfg; in atc_alloc_chan_resources()
1752 * atc_free_chan_resources - free all channel resources
1753 * @chan: DMA channel
1762 atchan->status = 0; in atc_free_chan_resources()
1767 kfree(chan->private); in atc_free_chan_resources()
1768 chan->private = NULL; in atc_free_chan_resources()
1778 if (atslave->dma_dev == chan->device->dev) { in at_dma_filter()
1779 chan->private = atslave; in at_dma_filter()
1796 if (dma_spec->args_count != 2) in at_dma_xlate()
1799 dmac_pdev = of_find_device_by_node(dma_spec->np); in at_dma_xlate()
1808 put_device(&dmac_pdev->dev); in at_dma_xlate()
1812 atslave->cfg = ATC_DST_H2SEL | ATC_SRC_H2SEL; in at_dma_xlate()
1815 * ignored depending on DMA transfer direction. in at_dma_xlate()
1817 per_id = dma_spec->args[1] & AT91_DMA_CFG_PER_ID_MASK; in at_dma_xlate()
1818 atslave->cfg |= ATC_DST_PER_ID(per_id) | ATC_SRC_PER_ID(per_id); in at_dma_xlate()
1824 switch (dma_spec->args[1] & AT91_DMA_CFG_FIFOCFG_MASK) { in at_dma_xlate()
1826 atslave->cfg |= FIELD_PREP(ATC_FIFOCFG, in at_dma_xlate()
1830 atslave->cfg |= FIELD_PREP(ATC_FIFOCFG, in at_dma_xlate()
1835 atslave->cfg |= FIELD_PREP(ATC_FIFOCFG, ATC_FIFOCFG_HALFFIFO); in at_dma_xlate()
1837 atslave->dma_dev = &dmac_pdev->dev; in at_dma_xlate()
1841 put_device(&dmac_pdev->dev); in at_dma_xlate()
1847 atchan->per_if = dma_spec->args[0] & 0xff; in at_dma_xlate()
1848 atchan->mem_if = (dma_spec->args[0] >> 16) & 0xff; in at_dma_xlate()
1860 /*-- Module Management -----------------------------------------------*/
1862 /* cap_mask is a multi-u32 bitfield, fill it with proper C code. */
1873 .compatible = "atmel,at91sam9rl-dma",
1876 .compatible = "atmel,at91sam9g45-dma",
1901 if (pdev->dev.of_node) { in at_dma_get_driver_data()
1903 match = of_match_node(atmel_dma_dt_ids, pdev->dev.of_node); in at_dma_get_driver_data()
1906 return match->data; in at_dma_get_driver_data()
1909 platform_get_device_id(pdev)->driver_data; in at_dma_get_driver_data()
1913 * at_dma_off - disable DMA controller
1918 dma_writel(atdma, EN, 0); in at_dma_off()
1921 dma_writel(atdma, EBCIDR, -1L); in at_dma_off()
1924 while (dma_readl(atdma, CHSR) & atdma->all_chan_mask) in at_dma_off()
1945 /* get DMA parameters from controller type */ in at_dma_probe()
1948 return -ENODEV; in at_dma_probe()
1950 atdma = devm_kzalloc(&pdev->dev, in at_dma_probe()
1951 struct_size(atdma, chan, plat_dat->nr_channels), in at_dma_probe()
1954 return -ENOMEM; in at_dma_probe()
1956 atdma->regs = devm_platform_ioremap_resource(pdev, 0); in at_dma_probe()
1957 if (IS_ERR(atdma->regs)) in at_dma_probe()
1958 return PTR_ERR(atdma->regs); in at_dma_probe()
1965 atdma->dma_device.cap_mask = plat_dat->cap_mask; in at_dma_probe()
1966 atdma->all_chan_mask = (1 << plat_dat->nr_channels) - 1; in at_dma_probe()
1968 atdma->clk = devm_clk_get(&pdev->dev, "dma_clk"); in at_dma_probe()
1969 if (IS_ERR(atdma->clk)) in at_dma_probe()
1970 return PTR_ERR(atdma->clk); in at_dma_probe()
1972 err = clk_prepare_enable(atdma->clk); in at_dma_probe()
1976 /* force dma off, just in case */ in at_dma_probe()
1986 atdma->lli_pool = dma_pool_create("at_hdmac_lli_pool", in at_dma_probe()
1987 &pdev->dev, sizeof(struct at_lli), in at_dma_probe()
1989 if (!atdma->lli_pool) { in at_dma_probe()
1990 dev_err(&pdev->dev, "Unable to allocate DMA LLI descriptor pool\n"); in at_dma_probe()
1991 err = -ENOMEM; in at_dma_probe()
1996 atdma->memset_pool = dma_pool_create("at_hdmac_memset_pool", in at_dma_probe()
1997 &pdev->dev, sizeof(int), 4, 0); in at_dma_probe()
1998 if (!atdma->memset_pool) { in at_dma_probe()
1999 dev_err(&pdev->dev, "No memory for memset dma pool\n"); in at_dma_probe()
2000 err = -ENOMEM; in at_dma_probe()
2009 INIT_LIST_HEAD(&atdma->dma_device.channels); in at_dma_probe()
2010 for (i = 0; i < plat_dat->nr_channels; i++) { in at_dma_probe()
2011 struct at_dma_chan *atchan = &atdma->chan[i]; in at_dma_probe()
2013 atchan->mem_if = AT_DMA_MEM_IF; in at_dma_probe()
2014 atchan->per_if = AT_DMA_PER_IF; in at_dma_probe()
2016 atchan->ch_regs = atdma->regs + ch_regs(i); in at_dma_probe()
2017 atchan->mask = 1 << i; in at_dma_probe()
2019 atchan->atdma = atdma; in at_dma_probe()
2020 atchan->vc.desc_free = atdma_desc_free; in at_dma_probe()
2021 vchan_init(&atchan->vc, &atdma->dma_device); in at_dma_probe()
2026 atdma->dma_device.device_alloc_chan_resources = atc_alloc_chan_resources; in at_dma_probe()
2027 atdma->dma_device.device_free_chan_resources = atc_free_chan_resources; in at_dma_probe()
2028 atdma->dma_device.device_tx_status = atc_tx_status; in at_dma_probe()
2029 atdma->dma_device.device_issue_pending = atc_issue_pending; in at_dma_probe()
2030 atdma->dma_device.dev = &pdev->dev; in at_dma_probe()
2033 if (dma_has_cap(DMA_INTERLEAVE, atdma->dma_device.cap_mask)) in at_dma_probe()
2034 atdma->dma_device.device_prep_interleaved_dma = atc_prep_dma_interleaved; in at_dma_probe()
2036 if (dma_has_cap(DMA_MEMCPY, atdma->dma_device.cap_mask)) in at_dma_probe()
2037 atdma->dma_device.device_prep_dma_memcpy = atc_prep_dma_memcpy; in at_dma_probe()
2039 if (dma_has_cap(DMA_MEMSET, atdma->dma_device.cap_mask)) { in at_dma_probe()
2040 atdma->dma_device.device_prep_dma_memset = atc_prep_dma_memset; in at_dma_probe()
2041 atdma->dma_device.device_prep_dma_memset_sg = atc_prep_dma_memset_sg; in at_dma_probe()
2042 atdma->dma_device.fill_align = DMAENGINE_ALIGN_4_BYTES; in at_dma_probe()
2045 if (dma_has_cap(DMA_SLAVE, atdma->dma_device.cap_mask)) { in at_dma_probe()
2046 atdma->dma_device.device_prep_slave_sg = atc_prep_slave_sg; in at_dma_probe()
2047 /* controller can do slave DMA: can trigger cyclic transfers */ in at_dma_probe()
2048 dma_cap_set(DMA_CYCLIC, atdma->dma_device.cap_mask); in at_dma_probe()
2049 atdma->dma_device.device_prep_dma_cyclic = atc_prep_dma_cyclic; in at_dma_probe()
2050 atdma->dma_device.device_config = atc_config; in at_dma_probe()
2051 atdma->dma_device.device_pause = atc_pause; in at_dma_probe()
2052 atdma->dma_device.device_resume = atc_resume; in at_dma_probe()
2053 atdma->dma_device.device_terminate_all = atc_terminate_all; in at_dma_probe()
2054 atdma->dma_device.src_addr_widths = ATC_DMA_BUSWIDTHS; in at_dma_probe()
2055 atdma->dma_device.dst_addr_widths = ATC_DMA_BUSWIDTHS; in at_dma_probe()
2056 atdma->dma_device.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV); in at_dma_probe()
2057 atdma->dma_device.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; in at_dma_probe()
2060 dma_writel(atdma, EN, AT_DMA_ENABLE); in at_dma_probe()
2062 dev_info(&pdev->dev, "Atmel AHB DMA Controller ( %s%s%s), %d channels\n", in at_dma_probe()
2063 dma_has_cap(DMA_MEMCPY, atdma->dma_device.cap_mask) ? "cpy " : "", in at_dma_probe()
2064 dma_has_cap(DMA_MEMSET, atdma->dma_device.cap_mask) ? "set " : "", in at_dma_probe()
2065 dma_has_cap(DMA_SLAVE, atdma->dma_device.cap_mask) ? "slave " : "", in at_dma_probe()
2066 plat_dat->nr_channels); in at_dma_probe()
2068 err = dma_async_device_register(&atdma->dma_device); in at_dma_probe()
2070 dev_err(&pdev->dev, "Unable to register: %d.\n", err); in at_dma_probe()
2079 if (pdev->dev.of_node) { in at_dma_probe()
2080 err = of_dma_controller_register(pdev->dev.of_node, in at_dma_probe()
2083 dev_err(&pdev->dev, "could not register of_dma_controller\n"); in at_dma_probe()
2091 dma_async_device_unregister(&atdma->dma_device); in at_dma_probe()
2093 dma_pool_destroy(atdma->memset_pool); in at_dma_probe()
2095 dma_pool_destroy(atdma->lli_pool); in at_dma_probe()
2099 clk_disable_unprepare(atdma->clk); in at_dma_probe()
2109 if (pdev->dev.of_node) in at_dma_remove()
2110 of_dma_controller_free(pdev->dev.of_node); in at_dma_remove()
2111 dma_async_device_unregister(&atdma->dma_device); in at_dma_remove()
2113 dma_pool_destroy(atdma->memset_pool); in at_dma_remove()
2114 dma_pool_destroy(atdma->lli_pool); in at_dma_remove()
2117 list_for_each_entry_safe(chan, _chan, &atdma->dma_device.channels, in at_dma_remove()
2120 atc_disable_chan_irq(atdma, chan->chan_id); in at_dma_remove()
2121 list_del(&chan->device_node); in at_dma_remove()
2124 clk_disable_unprepare(atdma->clk); in at_dma_remove()
2134 clk_disable_unprepare(atdma->clk); in at_dma_shutdown()
2142 list_for_each_entry_safe(chan, _chan, &atdma->dma_device.channels, in at_dma_prepare()
2147 return -EAGAIN; in at_dma_prepare()
2154 struct dma_chan *chan = &atchan->vc.chan; in atc_suspend_cyclic()
2166 atchan->save_dscr = channel_readl(atchan, DSCR); in atc_suspend_cyclic()
2177 list_for_each_entry_safe(chan, _chan, &atdma->dma_device.channels, in at_dma_suspend_noirq()
2183 atchan->save_cfg = channel_readl(atchan, CFG); in at_dma_suspend_noirq()
2185 atdma->save_imr = dma_readl(atdma, EBCIMR); in at_dma_suspend_noirq()
2187 /* disable DMA controller */ in at_dma_suspend_noirq()
2189 clk_disable_unprepare(atdma->clk); in at_dma_suspend_noirq()
2195 struct at_dma *atdma = to_at_dma(atchan->vc.chan.device); in atc_resume_cyclic()
2203 channel_writel(atchan, DSCR, atchan->save_dscr); in atc_resume_cyclic()
2204 dma_writel(atdma, CHER, atchan->mask); in atc_resume_cyclic()
2217 /* bring back DMA controller */ in at_dma_resume_noirq()
2218 clk_prepare_enable(atdma->clk); in at_dma_resume_noirq()
2219 dma_writel(atdma, EN, AT_DMA_ENABLE); in at_dma_resume_noirq()
2226 dma_writel(atdma, EBCIER, atdma->save_imr); in at_dma_resume_noirq()
2227 list_for_each_entry_safe(chan, _chan, &atdma->dma_device.channels, in at_dma_resume_noirq()
2231 channel_writel(atchan, CFG, atchan->save_cfg); in at_dma_resume_noirq()
2267 MODULE_DESCRIPTION("Atmel AHB DMA Controller driver");