Lines Matching +full:1 +full:_chan

53 #define AT_DMA_SSREQ(x)		BIT((x) << 1)		/* Request a source single transfer on channel x */
54 #define AT_DMA_DSREQ(x) BIT(1 + ((x) << 1)) /* Request a destination single transfer on channel x …
58 #define AT_DMA_SCREQ(x) BIT((x) << 1) /* Request a source chunk transfer on channel x */
59 #define AT_DMA_DCREQ(x) BIT(1 + ((x) << 1)) /* Request a destination chunk transfer on channel x */
63 #define AT_DMA_SLAST(x) BIT((x) << 1) /* This src rq is last tx of buffer on channel x */
64 #define AT_DMA_DLAST(x) BIT(1 + ((x) << 1)) /* This dst rq is last tx of buffer on channel x */
115 #define ATC_DSCR_IF GENMASK(1, 0) /* Dsc feched via AHB-Lite Interface */
127 #define ATC_SIF GENMASK(1, 0) /* Src tx done via AHB-Lite Interface i */
130 #define AT_DMA_PER_IF 0x1 /* interface 1 as peripheral interface */
253 ATC_IS_PAUSED = 1,
297 * 1 -> 0, 4 -> 1, 8 -> 2, 16 -> 3, 32 -> 4, 64 -> 5, 128 -> 6, 256 -> 7.
303 if (*maxburst > 1) in convert_burst()
311 * 1 byte -> 0, 2 bytes -> 1, 4 bytes -> 2.
317 return 1; in convert_buswidth()
321 /* For 1 byte width or fallback */ in convert_buswidth()
429 atc_setup_irq(atdma, chan_id, 1); in atc_enable_chan_irq()
530 else if (!((src | dst | len) & 1)) in atc_get_xfer_width()
531 width = 1; in atc_get_xfer_width()
543 desc->sg[i - 1].lli->dscr = atdma_sg->lli_phys; in atdma_lli_chain()
713 for (i = 1; i < desc->sglen; i++) { in atc_get_llis_residue()
874 if (unlikely(!xt || xt->numf != 1 || !xt->frame_size)) in atc_prep_dma_interleaved()
922 desc = kzalloc(struct_size(desc, sg, 1), GFP_ATOMIC); in atc_prep_dma_interleaved()
925 desc->sglen = 1; in atc_prep_dma_interleaved()
942 desc->dst_hole = (dmaengine_get_dst_icg(xt, first) >> dwidth) + 1; in atc_prep_dma_interleaved()
943 desc->src_hole = (dmaengine_get_src_icg(xt, first) >> dwidth) + 1; in atc_prep_dma_interleaved()
1031 set_lli_eol(desc, i - 1); in atc_prep_dma_memcpy()
1124 desc = kzalloc(struct_size(desc, sg, 1), GFP_ATOMIC); in atc_prep_dma_memset()
1127 desc->sglen = 1; in atc_prep_dma_memset()
1218 set_lli_eol(desc, i - 1); in atc_prep_dma_memset_sg()
1378 set_lli_eol(desc, i - 1); in atc_prep_slave_sg()
1401 if (unlikely(period_len & ((1 << reg_width) - 1))) in atc_dma_cyclic_check_values()
1403 if (unlikely(buf_addr & ((1 << reg_width) - 1))) in atc_dma_cyclic_check_values()
1538 desc->sg[i - 1].lli->dscr = desc->sg[0].lli_phys; in atc_prep_dma_cyclic()
1817 per_id = dma_spec->args[1] & AT91_DMA_CFG_PER_ID_MASK; in at_dma_xlate()
1824 switch (dma_spec->args[1] & AT91_DMA_CFG_FIFOCFG_MASK) { in at_dma_xlate()
1921 dma_writel(atdma, EBCIDR, -1L); in at_dma_off()
1966 atdma->all_chan_mask = (1 << plat_dat->nr_channels) - 1; in at_dma_probe()
2017 atchan->mask = 1 << i; in at_dma_probe()
2106 struct dma_chan *chan, *_chan; in at_dma_remove() local
2117 list_for_each_entry_safe(chan, _chan, &atdma->dma_device.channels, in at_dma_remove()
2140 struct dma_chan *chan, *_chan; in at_dma_prepare() local
2142 list_for_each_entry_safe(chan, _chan, &atdma->dma_device.channels, in at_dma_prepare()
2174 struct dma_chan *chan, *_chan; in at_dma_suspend_noirq() local
2177 list_for_each_entry_safe(chan, _chan, &atdma->dma_device.channels, in at_dma_suspend_noirq()
2215 struct dma_chan *chan, *_chan; in at_dma_resume_noirq() local
2227 list_for_each_entry_safe(chan, _chan, &atdma->dma_device.channels, in at_dma_resume_noirq()