Lines Matching refs:dir

221 	enum dma_transfer_direction dir;  member
274 enum dma_transfer_direction dir; member
474 if (uc->config.dir == DMA_MEM_TO_DEV || uc->config.dir == DMA_MEM_TO_MEM) { in udma_dump_chan_stdata()
483 if (uc->config.dir == DMA_DEV_TO_MEM || uc->config.dir == DMA_MEM_TO_MEM) { in udma_dump_chan_stdata()
623 switch (uc->config.dir) { in udma_is_chan_paused()
657 switch (uc->config.dir) { in udma_push_to_ring()
683 if (uc->config.dir != DMA_DEV_TO_MEM) in udma_desc_is_rx_flush()
697 switch (uc->config.dir) { in udma_pop_from_ring()
731 switch (uc->config.dir) { in udma_reset_rings()
764 if (uc->desc->dir == DMA_DEV_TO_MEM) { in udma_decrement_byte_counters()
814 switch (uc->config.dir) { in udma_reset_chan()
852 if (uc->config.dir == DMA_DEV_TO_MEM) in udma_reset_chan()
868 (uc->cyclic || ucc->dir == DMA_DEV_TO_MEM)) { in udma_start_desc()
922 switch (uc->desc->dir) { in udma_start()
1010 switch (uc->config.dir) { in udma_stop()
1070 uc->config.dir != DMA_MEM_TO_DEV || !(uc->config.tx_flags & DMA_PREP_INTERRUPT)) in udma_is_desc_really_done()
1764 if (uc->config.dir == DMA_MEM_TO_MEM) in udma_alloc_rx_resources()
2187 if (uc->config.pkt_mode || uc->config.dir == DMA_MEM_TO_MEM) { in udma_alloc_chan_resources()
2190 if (uc->config.dir == DMA_MEM_TO_MEM) { in udma_alloc_chan_resources()
2219 switch (uc->config.dir) { in udma_alloc_chan_resources()
2289 __func__, uc->id, uc->config.dir); in udma_alloc_chan_resources()
2335 if (is_slave_direction(uc->config.dir) && !uc->config.pkt_mode) { in udma_alloc_chan_resources()
2400 switch (uc->config.dir) { in bcdma_alloc_chan_resources()
2458 __func__, uc->id, uc->config.dir); in bcdma_alloc_chan_resources()
2477 if (uc->config.dir == DMA_MEM_TO_MEM && !uc->config.tr_trigger_type) { in bcdma_alloc_chan_resources()
2494 } else if (uc->config.dir != DMA_MEM_TO_MEM) { in bcdma_alloc_chan_resources()
2524 if (is_slave_direction(uc->config.dir)) { in bcdma_alloc_chan_resources()
2609 switch (uc->config.dir) { in pktdma_alloc_chan_resources()
2651 __func__, uc->id, uc->config.dir); in pktdma_alloc_chan_resources()
2756 enum dma_transfer_direction dir) in udma_alloc_tr_desc() argument
2817 if (dir == DMA_DEV_TO_MEM) in udma_alloc_tr_desc()
2877 unsigned int sglen, enum dma_transfer_direction dir, in udma_prep_slave_sg_tr() argument
2900 d = udma_alloc_tr_desc(uc, tr_size, num_tr, dir); in udma_prep_slave_sg_tr()
2962 enum dma_transfer_direction dir, in udma_prep_slave_sg_triggered_tr() argument
2979 if (dir == DMA_DEV_TO_MEM) { in udma_prep_slave_sg_triggered_tr()
2984 } else if (dir == DMA_MEM_TO_DEV) { in udma_prep_slave_sg_triggered_tr()
3031 d = udma_alloc_tr_desc(uc, tr_size, num_tr, dir); in udma_prep_slave_sg_triggered_tr()
3069 if (dir == DMA_DEV_TO_MEM) { in udma_prep_slave_sg_triggered_tr()
3116 if (dir == DMA_DEV_TO_MEM) { in udma_prep_slave_sg_triggered_tr()
3202 if (uc->config.dir == DMA_DEV_TO_MEM && in udma_configure_statictr()
3214 unsigned int sglen, enum dma_transfer_direction dir, in udma_prep_slave_sg_pkt() argument
3231 if (dir == DMA_DEV_TO_MEM) in udma_prep_slave_sg_pkt()
3284 dir == DMA_MEM_TO_DEV) in udma_prep_slave_sg_pkt()
3322 if (d->dir == DMA_MEM_TO_DEV) in udma_attach_metadata()
3399 unsigned int sglen, enum dma_transfer_direction dir, in udma_prep_slave_sg() argument
3407 if (dir != uc->config.dir && in udma_prep_slave_sg()
3408 (uc->config.dir == DMA_MEM_TO_MEM && !uc->config.tr_trigger_type)) { in udma_prep_slave_sg()
3412 dmaengine_get_direction_text(uc->config.dir), in udma_prep_slave_sg()
3413 dmaengine_get_direction_text(dir)); in udma_prep_slave_sg()
3417 if (dir == DMA_DEV_TO_MEM) { in udma_prep_slave_sg()
3420 } else if (dir == DMA_MEM_TO_DEV) { in udma_prep_slave_sg()
3434 d = udma_prep_slave_sg_pkt(uc, sgl, sglen, dir, tx_flags, in udma_prep_slave_sg()
3436 else if (is_slave_direction(uc->config.dir)) in udma_prep_slave_sg()
3437 d = udma_prep_slave_sg_tr(uc, sgl, sglen, dir, tx_flags, in udma_prep_slave_sg()
3440 d = udma_prep_slave_sg_triggered_tr(uc, sgl, sglen, dir, in udma_prep_slave_sg()
3446 d->dir = dir; in udma_prep_slave_sg()
3470 enum dma_transfer_direction dir, unsigned long flags) in udma_prep_dma_cyclic_tr() argument
3490 d = udma_alloc_tr_desc(uc, tr_size, periods * num_tr, dir); in udma_prep_dma_cyclic_tr()
3540 enum dma_transfer_direction dir, unsigned long flags) in udma_prep_dma_cyclic_pkt() argument
3560 if (dir == DMA_DEV_TO_MEM) in udma_prep_dma_cyclic_pkt()
3607 size_t period_len, enum dma_transfer_direction dir, in udma_prep_dma_cyclic() argument
3615 if (dir != uc->config.dir) { in udma_prep_dma_cyclic()
3619 dmaengine_get_direction_text(uc->config.dir), in udma_prep_dma_cyclic()
3620 dmaengine_get_direction_text(dir)); in udma_prep_dma_cyclic()
3626 if (dir == DMA_DEV_TO_MEM) { in udma_prep_dma_cyclic()
3629 } else if (dir == DMA_MEM_TO_DEV) { in udma_prep_dma_cyclic()
3642 dir, flags); in udma_prep_dma_cyclic()
3645 dir, flags); in udma_prep_dma_cyclic()
3652 d->dir = dir; in udma_prep_dma_cyclic()
3684 if (uc->config.dir != DMA_MEM_TO_MEM) { in udma_prep_dma_memcpy()
3688 dmaengine_get_direction_text(uc->config.dir), in udma_prep_dma_memcpy()
3705 d->dir = DMA_MEM_TO_MEM; in udma_prep_dma_memcpy()
3812 if (uc->desc->dir == DMA_MEM_TO_DEV) { in udma_tx_status()
3822 } else if (uc->desc->dir == DMA_DEV_TO_MEM) { in udma_tx_status()
3841 if (!residue && (uc->config.dir == DMA_DEV_TO_MEM || !delay)) { in udma_tx_status()
3863 switch (uc->config.dir) { in udma_pause()
3891 switch (uc->config.dir) { in udma_resume()
4136 ucc->dir = DMA_MEM_TO_MEM; in udma_dma_filter_fn()
4139 ucc->dir = DMA_MEM_TO_DEV; in udma_dma_filter_fn()
4141 ucc->dir = DMA_DEV_TO_MEM; in udma_dma_filter_fn()
4148 ucc->dir = DMA_MEM_TO_MEM; in udma_dma_filter_fn()
4160 ucc->dir = DMA_MEM_TO_MEM; in udma_dma_filter_fn()
4201 ucc->remote_thread_id, dmaengine_get_direction_text(ucc->dir)); in udma_dma_filter_fn()
5220 dmaengine_get_direction_text(uc->config.dir)); in udma_dbg_summary_show_chan()
5222 switch (uc->config.dir) { in udma_dbg_summary_show_chan()
5531 uc->config.dir = DMA_MEM_TO_MEM; in udma_probe()