Lines Matching refs:desc
241 struct ep93xx_dma_desc *desc) in ep93xx_dma_set_active() argument
245 list_add_tail(&desc->node, &edmac->active); in ep93xx_dma_set_active()
248 while (!list_empty(&desc->tx_list)) { in ep93xx_dma_set_active()
249 struct ep93xx_dma_desc *d = list_first_entry(&desc->tx_list, in ep93xx_dma_set_active()
258 d->txd.callback = desc->txd.callback; in ep93xx_dma_set_active()
259 d->txd.callback_param = desc->txd.callback_param; in ep93xx_dma_set_active()
287 struct ep93xx_dma_desc *desc; in ep93xx_dma_advance_active() local
294 desc = ep93xx_dma_get_active(edmac); in ep93xx_dma_advance_active()
295 if (!desc) in ep93xx_dma_advance_active()
302 return !desc->txd.cookie; in ep93xx_dma_advance_active()
365 struct ep93xx_dma_desc *desc; in m2p_fill_desc() local
368 desc = ep93xx_dma_get_active(edmac); in m2p_fill_desc()
369 if (!desc) { in m2p_fill_desc()
375 bus_addr = desc->src_addr; in m2p_fill_desc()
377 bus_addr = desc->dst_addr; in m2p_fill_desc()
380 writel(desc->size, edmac->regs + M2P_MAXCNT0); in m2p_fill_desc()
383 writel(desc->size, edmac->regs + M2P_MAXCNT1); in m2p_fill_desc()
411 struct ep93xx_dma_desc *desc = ep93xx_dma_get_active(edmac); in m2p_hw_interrupt() local
430 desc->txd.cookie, desc->src_addr, desc->dst_addr, in m2p_hw_interrupt()
431 desc->size); in m2p_hw_interrupt()
528 struct ep93xx_dma_desc *desc; in m2m_fill_desc() local
530 desc = ep93xx_dma_get_active(edmac); in m2m_fill_desc()
531 if (!desc) { in m2m_fill_desc()
537 writel(desc->src_addr, edmac->regs + M2M_SAR_BASE0); in m2m_fill_desc()
538 writel(desc->dst_addr, edmac->regs + M2M_DAR_BASE0); in m2m_fill_desc()
539 writel(desc->size, edmac->regs + M2M_BCR0); in m2m_fill_desc()
541 writel(desc->src_addr, edmac->regs + M2M_SAR_BASE1); in m2m_fill_desc()
542 writel(desc->dst_addr, edmac->regs + M2M_DAR_BASE1); in m2m_fill_desc()
543 writel(desc->size, edmac->regs + M2M_BCR1); in m2m_fill_desc()
605 struct ep93xx_dma_desc *desc; in m2m_hw_interrupt() local
620 desc = ep93xx_dma_get_active(edmac); in m2m_hw_interrupt()
621 last_done = !desc || desc->txd.cookie; in m2m_hw_interrupt()
679 struct ep93xx_dma_desc *desc, *_desc; in ep93xx_dma_desc_get() local
684 list_for_each_entry_safe(desc, _desc, &edmac->free_list, node) { in ep93xx_dma_desc_get()
685 if (async_tx_test_ack(&desc->txd)) { in ep93xx_dma_desc_get()
686 list_del_init(&desc->node); in ep93xx_dma_desc_get()
689 desc->src_addr = 0; in ep93xx_dma_desc_get()
690 desc->dst_addr = 0; in ep93xx_dma_desc_get()
691 desc->size = 0; in ep93xx_dma_desc_get()
692 desc->complete = false; in ep93xx_dma_desc_get()
693 desc->txd.cookie = 0; in ep93xx_dma_desc_get()
694 desc->txd.callback = NULL; in ep93xx_dma_desc_get()
695 desc->txd.callback_param = NULL; in ep93xx_dma_desc_get()
697 ret = desc; in ep93xx_dma_desc_get()
706 struct ep93xx_dma_desc *desc) in ep93xx_dma_desc_put() argument
708 if (desc) { in ep93xx_dma_desc_put()
712 list_splice_init(&desc->tx_list, &edmac->free_list); in ep93xx_dma_desc_put()
713 list_add(&desc->node, &edmac->free_list); in ep93xx_dma_desc_put()
751 struct ep93xx_dma_desc *desc, *d; in ep93xx_dma_tasklet() local
762 desc = ep93xx_dma_get_active(edmac); in ep93xx_dma_tasklet()
763 if (desc) { in ep93xx_dma_tasklet()
764 if (desc->complete) { in ep93xx_dma_tasklet()
767 dma_cookie_complete(&desc->txd); in ep93xx_dma_tasklet()
770 dmaengine_desc_get_callback(&desc->txd, &cb); in ep93xx_dma_tasklet()
778 list_for_each_entry_safe(desc, d, &list, node) { in ep93xx_dma_tasklet()
779 dma_descriptor_unmap(&desc->txd); in ep93xx_dma_tasklet()
780 ep93xx_dma_desc_put(edmac, desc); in ep93xx_dma_tasklet()
789 struct ep93xx_dma_desc *desc; in ep93xx_dma_interrupt() local
794 desc = ep93xx_dma_get_active(edmac); in ep93xx_dma_interrupt()
795 if (!desc) { in ep93xx_dma_interrupt()
804 desc->complete = true; in ep93xx_dma_interrupt()
834 struct ep93xx_dma_desc *desc; in ep93xx_dma_tx_submit() local
841 desc = container_of(tx, struct ep93xx_dma_desc, txd); in ep93xx_dma_tx_submit()
849 ep93xx_dma_set_active(edmac, desc); in ep93xx_dma_tx_submit()
852 list_add_tail(&desc->node, &edmac->queue); in ep93xx_dma_tx_submit()
917 struct ep93xx_dma_desc *desc; in ep93xx_dma_alloc_chan_resources() local
919 desc = kzalloc(sizeof(*desc), GFP_KERNEL); in ep93xx_dma_alloc_chan_resources()
920 if (!desc) { in ep93xx_dma_alloc_chan_resources()
925 INIT_LIST_HEAD(&desc->tx_list); in ep93xx_dma_alloc_chan_resources()
927 dma_async_tx_descriptor_init(&desc->txd, chan); in ep93xx_dma_alloc_chan_resources()
928 desc->txd.flags = DMA_CTRL_ACK; in ep93xx_dma_alloc_chan_resources()
929 desc->txd.tx_submit = ep93xx_dma_tx_submit; in ep93xx_dma_alloc_chan_resources()
931 ep93xx_dma_desc_put(edmac, desc); in ep93xx_dma_alloc_chan_resources()
954 struct ep93xx_dma_desc *desc, *d; in ep93xx_dma_free_chan_resources() local
969 list_for_each_entry_safe(desc, d, &list, node) in ep93xx_dma_free_chan_resources()
970 kfree(desc); in ep93xx_dma_free_chan_resources()
991 struct ep93xx_dma_desc *desc, *first; in ep93xx_dma_prep_dma_memcpy() local
996 desc = ep93xx_dma_desc_get(edmac); in ep93xx_dma_prep_dma_memcpy()
997 if (!desc) { in ep93xx_dma_prep_dma_memcpy()
1004 desc->src_addr = src + offset; in ep93xx_dma_prep_dma_memcpy()
1005 desc->dst_addr = dest + offset; in ep93xx_dma_prep_dma_memcpy()
1006 desc->size = bytes; in ep93xx_dma_prep_dma_memcpy()
1009 first = desc; in ep93xx_dma_prep_dma_memcpy()
1011 list_add_tail(&desc->node, &first->tx_list); in ep93xx_dma_prep_dma_memcpy()
1040 struct ep93xx_dma_desc *desc, *first; in ep93xx_dma_prep_slave_sg() local
1068 desc = ep93xx_dma_desc_get(edmac); in ep93xx_dma_prep_slave_sg()
1069 if (!desc) { in ep93xx_dma_prep_slave_sg()
1075 desc->src_addr = sg_dma_address(sg); in ep93xx_dma_prep_slave_sg()
1076 desc->dst_addr = edmac->runtime_addr; in ep93xx_dma_prep_slave_sg()
1078 desc->src_addr = edmac->runtime_addr; in ep93xx_dma_prep_slave_sg()
1079 desc->dst_addr = sg_dma_address(sg); in ep93xx_dma_prep_slave_sg()
1081 desc->size = len; in ep93xx_dma_prep_slave_sg()
1084 first = desc; in ep93xx_dma_prep_slave_sg()
1086 list_add_tail(&desc->node, &first->tx_list); in ep93xx_dma_prep_slave_sg()
1122 struct ep93xx_dma_desc *desc, *first; in ep93xx_dma_prep_dma_cyclic() local
1148 desc = ep93xx_dma_desc_get(edmac); in ep93xx_dma_prep_dma_cyclic()
1149 if (!desc) { in ep93xx_dma_prep_dma_cyclic()
1155 desc->src_addr = dma_addr + offset; in ep93xx_dma_prep_dma_cyclic()
1156 desc->dst_addr = edmac->runtime_addr; in ep93xx_dma_prep_dma_cyclic()
1158 desc->src_addr = edmac->runtime_addr; in ep93xx_dma_prep_dma_cyclic()
1159 desc->dst_addr = dma_addr + offset; in ep93xx_dma_prep_dma_cyclic()
1162 desc->size = period_len; in ep93xx_dma_prep_dma_cyclic()
1165 first = desc; in ep93xx_dma_prep_dma_cyclic()
1167 list_add_tail(&desc->node, &first->tx_list); in ep93xx_dma_prep_dma_cyclic()
1209 struct ep93xx_dma_desc *desc, *_d; in ep93xx_dma_terminate_all() local
1226 list_for_each_entry_safe(desc, _d, &list, node) in ep93xx_dma_terminate_all()
1227 ep93xx_dma_desc_put(edmac, desc); in ep93xx_dma_terminate_all()