Lines Matching refs:desc
183 } desc; member
379 struct rcar_dmac_desc *desc = chan->desc.running; in rcar_dmac_chan_start_xfer() local
380 u32 chcr = desc->chcr; in rcar_dmac_chan_start_xfer()
387 if (desc->hwdescs.use) { in rcar_dmac_chan_start_xfer()
389 list_first_entry(&desc->chunks, in rcar_dmac_chan_start_xfer()
394 chan->index, desc, desc->nchunks, &desc->hwdescs.dma); in rcar_dmac_chan_start_xfer()
402 desc->hwdescs.dma >> 32); in rcar_dmac_chan_start_xfer()
405 (desc->hwdescs.dma & 0xfffffff0) | in rcar_dmac_chan_start_xfer()
408 RCAR_DMACHCRB_DCNT(desc->nchunks - 1) | in rcar_dmac_chan_start_xfer()
434 if (!desc->cyclic) in rcar_dmac_chan_start_xfer()
440 else if (desc->async_tx.callback) in rcar_dmac_chan_start_xfer()
449 struct rcar_dmac_xfer_chunk *chunk = desc->running; in rcar_dmac_chan_start_xfer()
467 chunk->size >> desc->xfer_shift); in rcar_dmac_chan_start_xfer()
501 struct rcar_dmac_desc *desc = to_rcar_dmac_desc(tx); in rcar_dmac_tx_submit() local
510 chan->index, tx->cookie, desc); in rcar_dmac_tx_submit()
512 list_add_tail(&desc->node, &chan->desc.pending); in rcar_dmac_tx_submit()
513 desc->running = list_first_entry(&desc->chunks, in rcar_dmac_tx_submit()
542 struct rcar_dmac_desc *desc = &page->descs[i]; in rcar_dmac_desc_alloc() local
544 dma_async_tx_descriptor_init(&desc->async_tx, &chan->chan); in rcar_dmac_desc_alloc()
545 desc->async_tx.tx_submit = rcar_dmac_tx_submit; in rcar_dmac_desc_alloc()
546 INIT_LIST_HEAD(&desc->chunks); in rcar_dmac_desc_alloc()
548 list_add_tail(&desc->node, &list); in rcar_dmac_desc_alloc()
552 list_splice_tail(&list, &chan->desc.free); in rcar_dmac_desc_alloc()
553 list_add_tail(&page->node, &chan->desc.pages); in rcar_dmac_desc_alloc()
572 struct rcar_dmac_desc *desc) in rcar_dmac_desc_put() argument
577 list_splice_tail_init(&desc->chunks, &chan->desc.chunks_free); in rcar_dmac_desc_put()
578 list_add(&desc->node, &chan->desc.free); in rcar_dmac_desc_put()
584 struct rcar_dmac_desc *desc, *_desc; in rcar_dmac_desc_recycle_acked() local
595 list_splice_init(&chan->desc.wait, &list); in rcar_dmac_desc_recycle_acked()
598 list_for_each_entry_safe(desc, _desc, &list, node) { in rcar_dmac_desc_recycle_acked()
599 if (async_tx_test_ack(&desc->async_tx)) { in rcar_dmac_desc_recycle_acked()
600 list_del(&desc->node); in rcar_dmac_desc_recycle_acked()
601 rcar_dmac_desc_put(chan, desc); in rcar_dmac_desc_recycle_acked()
610 list_splice(&list, &chan->desc.wait); in rcar_dmac_desc_recycle_acked()
625 struct rcar_dmac_desc *desc; in rcar_dmac_desc_get() local
634 while (list_empty(&chan->desc.free)) { in rcar_dmac_desc_get()
648 desc = list_first_entry(&chan->desc.free, struct rcar_dmac_desc, node); in rcar_dmac_desc_get()
649 list_del(&desc->node); in rcar_dmac_desc_get()
653 return desc; in rcar_dmac_desc_get()
679 list_splice_tail(&list, &chan->desc.chunks_free); in rcar_dmac_xfer_chunk_alloc()
680 list_add_tail(&page->node, &chan->desc.pages); in rcar_dmac_xfer_chunk_alloc()
704 while (list_empty(&chan->desc.chunks_free)) { in rcar_dmac_xfer_chunk_get()
718 chunk = list_first_entry(&chan->desc.chunks_free, in rcar_dmac_xfer_chunk_get()
728 struct rcar_dmac_desc *desc, size_t size) in rcar_dmac_realloc_hwdesc() argument
738 if (desc->hwdescs.size == size) in rcar_dmac_realloc_hwdesc()
741 if (desc->hwdescs.mem) { in rcar_dmac_realloc_hwdesc()
742 dma_free_coherent(chan->chan.device->dev, desc->hwdescs.size, in rcar_dmac_realloc_hwdesc()
743 desc->hwdescs.mem, desc->hwdescs.dma); in rcar_dmac_realloc_hwdesc()
744 desc->hwdescs.mem = NULL; in rcar_dmac_realloc_hwdesc()
745 desc->hwdescs.size = 0; in rcar_dmac_realloc_hwdesc()
751 desc->hwdescs.mem = dma_alloc_coherent(chan->chan.device->dev, size, in rcar_dmac_realloc_hwdesc()
752 &desc->hwdescs.dma, GFP_NOWAIT); in rcar_dmac_realloc_hwdesc()
753 if (!desc->hwdescs.mem) in rcar_dmac_realloc_hwdesc()
756 desc->hwdescs.size = size; in rcar_dmac_realloc_hwdesc()
760 struct rcar_dmac_desc *desc) in rcar_dmac_fill_hwdesc() argument
765 rcar_dmac_realloc_hwdesc(chan, desc, desc->nchunks * sizeof(*hwdesc)); in rcar_dmac_fill_hwdesc()
767 hwdesc = desc->hwdescs.mem; in rcar_dmac_fill_hwdesc()
771 list_for_each_entry(chunk, &desc->chunks, node) { in rcar_dmac_fill_hwdesc()
774 hwdesc->tcr = chunk->size >> desc->xfer_shift; in rcar_dmac_fill_hwdesc()
827 struct rcar_dmac_desc *desc, *_desc; in rcar_dmac_chan_reinit() local
834 list_splice_init(&chan->desc.pending, &descs); in rcar_dmac_chan_reinit()
835 list_splice_init(&chan->desc.active, &descs); in rcar_dmac_chan_reinit()
836 list_splice_init(&chan->desc.done, &descs); in rcar_dmac_chan_reinit()
837 list_splice_init(&chan->desc.wait, &descs); in rcar_dmac_chan_reinit()
839 chan->desc.running = NULL; in rcar_dmac_chan_reinit()
843 list_for_each_entry_safe(desc, _desc, &descs, node) { in rcar_dmac_chan_reinit()
844 list_del(&desc->node); in rcar_dmac_chan_reinit()
845 rcar_dmac_desc_put(chan, desc); in rcar_dmac_chan_reinit()
880 struct rcar_dmac_desc *desc) in rcar_dmac_chan_configure_desc() argument
892 switch (desc->direction) { in rcar_dmac_chan_configure_desc()
913 desc->xfer_shift = ilog2(xfer_size); in rcar_dmac_chan_configure_desc()
914 desc->chcr = chcr | chcr_ts[desc->xfer_shift]; in rcar_dmac_chan_configure_desc()
934 struct rcar_dmac_desc *desc; in rcar_dmac_chan_prep_sg() local
946 desc = rcar_dmac_desc_get(chan); in rcar_dmac_chan_prep_sg()
947 if (!desc) in rcar_dmac_chan_prep_sg()
950 desc->async_tx.flags = dma_flags; in rcar_dmac_chan_prep_sg()
951 desc->async_tx.cookie = -EBUSY; in rcar_dmac_chan_prep_sg()
953 desc->cyclic = cyclic; in rcar_dmac_chan_prep_sg()
954 desc->direction = dir; in rcar_dmac_chan_prep_sg()
956 rcar_dmac_chan_configure_desc(chan, desc); in rcar_dmac_chan_prep_sg()
958 max_chunk_size = RCAR_DMATCR_MASK << desc->xfer_shift; in rcar_dmac_chan_prep_sg()
1000 rcar_dmac_desc_put(chan, desc); in rcar_dmac_chan_prep_sg()
1016 chan->index, chunk, desc, i, sg, size, len, in rcar_dmac_chan_prep_sg()
1025 list_add_tail(&chunk->node, &desc->chunks); in rcar_dmac_chan_prep_sg()
1030 desc->nchunks = nchunks; in rcar_dmac_chan_prep_sg()
1031 desc->size = full_size; in rcar_dmac_chan_prep_sg()
1041 desc->hwdescs.use = !cross_boundary && nchunks > 1; in rcar_dmac_chan_prep_sg()
1042 if (desc->hwdescs.use) { in rcar_dmac_chan_prep_sg()
1043 if (rcar_dmac_fill_hwdesc(chan, desc) < 0) in rcar_dmac_chan_prep_sg()
1044 desc->hwdescs.use = false; in rcar_dmac_chan_prep_sg()
1047 return &desc->async_tx; in rcar_dmac_chan_prep_sg()
1059 INIT_LIST_HEAD(&rchan->desc.chunks_free); in rcar_dmac_alloc_chan_resources()
1060 INIT_LIST_HEAD(&rchan->desc.pages); in rcar_dmac_alloc_chan_resources()
1080 struct rcar_dmac_desc *desc; in rcar_dmac_free_chan_resources() local
1100 list_splice_init(&rchan->desc.free, &list); in rcar_dmac_free_chan_resources()
1101 list_splice_init(&rchan->desc.pending, &list); in rcar_dmac_free_chan_resources()
1102 list_splice_init(&rchan->desc.active, &list); in rcar_dmac_free_chan_resources()
1103 list_splice_init(&rchan->desc.done, &list); in rcar_dmac_free_chan_resources()
1104 list_splice_init(&rchan->desc.wait, &list); in rcar_dmac_free_chan_resources()
1106 rchan->desc.running = NULL; in rcar_dmac_free_chan_resources()
1108 list_for_each_entry(desc, &list, node) in rcar_dmac_free_chan_resources()
1109 rcar_dmac_realloc_hwdesc(rchan, desc, 0); in rcar_dmac_free_chan_resources()
1111 list_for_each_entry_safe(page, _page, &rchan->desc.pages, node) { in rcar_dmac_free_chan_resources()
1229 struct dma_async_tx_descriptor *desc; in rcar_dmac_prep_dma_cyclic() local
1272 desc = rcar_dmac_chan_prep_sg(rchan, sgl, sg_len, rchan->map.addr, in rcar_dmac_prep_dma_cyclic()
1276 return desc; in rcar_dmac_prep_dma_cyclic()
1318 struct rcar_dmac_desc *desc = chan->desc.running; in rcar_dmac_chan_get_residue() local
1328 if (!desc) in rcar_dmac_chan_get_residue()
1351 if (cookie != desc->async_tx.cookie) { in rcar_dmac_chan_get_residue()
1352 list_for_each_entry(desc, &chan->desc.done, node) { in rcar_dmac_chan_get_residue()
1353 if (cookie == desc->async_tx.cookie) in rcar_dmac_chan_get_residue()
1356 list_for_each_entry(desc, &chan->desc.pending, node) { in rcar_dmac_chan_get_residue()
1357 if (cookie == desc->async_tx.cookie) in rcar_dmac_chan_get_residue()
1358 return desc->size; in rcar_dmac_chan_get_residue()
1360 list_for_each_entry(desc, &chan->desc.active, node) { in rcar_dmac_chan_get_residue()
1361 if (cookie == desc->async_tx.cookie) in rcar_dmac_chan_get_residue()
1362 return desc->size; in rcar_dmac_chan_get_residue()
1398 if (desc->hwdescs.use) { in rcar_dmac_chan_get_residue()
1401 dptr = desc->nchunks; in rcar_dmac_chan_get_residue()
1403 WARN_ON(dptr >= desc->nchunks); in rcar_dmac_chan_get_residue()
1405 running = desc->running; in rcar_dmac_chan_get_residue()
1409 list_for_each_entry_reverse(chunk, &desc->chunks, node) { in rcar_dmac_chan_get_residue()
1410 if (chunk == running || ++dptr == desc->nchunks) in rcar_dmac_chan_get_residue()
1417 residue += tcrb << desc->xfer_shift; in rcar_dmac_chan_get_residue()
1438 cyclic = rchan->desc.running ? rchan->desc.running->cyclic : false; in rcar_dmac_tx_status()
1457 if (list_empty(&rchan->desc.pending)) in rcar_dmac_issue_pending()
1461 list_splice_tail_init(&rchan->desc.pending, &rchan->desc.active); in rcar_dmac_issue_pending()
1467 if (!rchan->desc.running) { in rcar_dmac_issue_pending()
1468 struct rcar_dmac_desc *desc; in rcar_dmac_issue_pending() local
1470 desc = list_first_entry(&rchan->desc.active, in rcar_dmac_issue_pending()
1472 rchan->desc.running = desc; in rcar_dmac_issue_pending()
1494 struct rcar_dmac_desc *desc = chan->desc.running; in rcar_dmac_isr_desc_stage_end() local
1497 if (WARN_ON(!desc || !desc->cyclic)) { in rcar_dmac_isr_desc_stage_end()
1516 struct rcar_dmac_desc *desc = chan->desc.running; in rcar_dmac_isr_transfer_end() local
1519 if (WARN_ON_ONCE(!desc)) { in rcar_dmac_isr_transfer_end()
1533 if (!desc->hwdescs.use) { in rcar_dmac_isr_transfer_end()
1539 if (!list_is_last(&desc->running->node, &desc->chunks)) { in rcar_dmac_isr_transfer_end()
1540 desc->running = list_next_entry(desc->running, node); in rcar_dmac_isr_transfer_end()
1541 if (!desc->cyclic) in rcar_dmac_isr_transfer_end()
1550 if (desc->cyclic) { in rcar_dmac_isr_transfer_end()
1551 desc->running = in rcar_dmac_isr_transfer_end()
1552 list_first_entry(&desc->chunks, in rcar_dmac_isr_transfer_end()
1560 list_move_tail(&desc->node, &chan->desc.done); in rcar_dmac_isr_transfer_end()
1563 if (!list_empty(&chan->desc.active)) in rcar_dmac_isr_transfer_end()
1564 chan->desc.running = list_first_entry(&chan->desc.active, in rcar_dmac_isr_transfer_end()
1568 chan->desc.running = NULL; in rcar_dmac_isr_transfer_end()
1571 if (chan->desc.running) in rcar_dmac_isr_transfer_end()
1630 struct rcar_dmac_desc *desc; in rcar_dmac_isr_channel_thread() local
1636 if (chan->desc.running && chan->desc.running->cyclic) { in rcar_dmac_isr_channel_thread()
1637 desc = chan->desc.running; in rcar_dmac_isr_channel_thread()
1638 dmaengine_desc_get_callback(&desc->async_tx, &cb); in rcar_dmac_isr_channel_thread()
1651 while (!list_empty(&chan->desc.done)) { in rcar_dmac_isr_channel_thread()
1652 desc = list_first_entry(&chan->desc.done, struct rcar_dmac_desc, in rcar_dmac_isr_channel_thread()
1654 dma_cookie_complete(&desc->async_tx); in rcar_dmac_isr_channel_thread()
1655 list_del(&desc->node); in rcar_dmac_isr_channel_thread()
1657 dmaengine_desc_get_callback(&desc->async_tx, &cb); in rcar_dmac_isr_channel_thread()
1669 list_add_tail(&desc->node, &chan->desc.wait); in rcar_dmac_isr_channel_thread()
1774 INIT_LIST_HEAD(&rchan->desc.free); in rcar_dmac_chan_probe()
1775 INIT_LIST_HEAD(&rchan->desc.pending); in rcar_dmac_chan_probe()
1776 INIT_LIST_HEAD(&rchan->desc.active); in rcar_dmac_chan_probe()
1777 INIT_LIST_HEAD(&rchan->desc.done); in rcar_dmac_chan_probe()
1778 INIT_LIST_HEAD(&rchan->desc.wait); in rcar_dmac_chan_probe()