Lines Matching refs:desc

65 	struct hsu_dma_desc *desc = hsuc->desc;  in hsu_dma_chan_start()  local
85 count = desc->nents - desc->active; in hsu_dma_chan_start()
87 hsu_chan_writel(hsuc, HSU_CH_DxSAR(i), desc->sg[i].addr); in hsu_dma_chan_start()
88 hsu_chan_writel(hsuc, HSU_CH_DxTSR(i), desc->sg[i].len); in hsu_dma_chan_start()
94 desc->active++; in hsu_dma_chan_start()
123 hsuc->desc = NULL; in hsu_dma_start_transfer()
128 hsuc->desc = to_hsu_dma_desc(vdesc); in hsu_dma_start_transfer()
214 struct hsu_dma_desc *desc; in hsu_dma_do_irq() local
225 desc = hsuc->desc; in hsu_dma_do_irq()
226 if (desc) { in hsu_dma_do_irq()
228 desc->status = DMA_ERROR; in hsu_dma_do_irq()
229 } else if (desc->active < desc->nents) { in hsu_dma_do_irq()
232 vchan_cookie_complete(&desc->vdesc); in hsu_dma_do_irq()
233 desc->status = DMA_COMPLETE; in hsu_dma_do_irq()
234 stat->bytes_transferred += desc->length; in hsu_dma_do_irq()
246 struct hsu_dma_desc *desc; in hsu_dma_alloc_desc() local
248 desc = kzalloc(sizeof(*desc), GFP_NOWAIT); in hsu_dma_alloc_desc()
249 if (!desc) in hsu_dma_alloc_desc()
252 desc->sg = kcalloc(nents, sizeof(*desc->sg), GFP_NOWAIT); in hsu_dma_alloc_desc()
253 if (!desc->sg) { in hsu_dma_alloc_desc()
254 kfree(desc); in hsu_dma_alloc_desc()
258 return desc; in hsu_dma_alloc_desc()
263 struct hsu_dma_desc *desc = to_hsu_dma_desc(vdesc); in hsu_dma_desc_free() local
265 kfree(desc->sg); in hsu_dma_desc_free()
266 kfree(desc); in hsu_dma_desc_free()
275 struct hsu_dma_desc *desc; in hsu_dma_prep_slave_sg() local
279 desc = hsu_dma_alloc_desc(sg_len); in hsu_dma_prep_slave_sg()
280 if (!desc) in hsu_dma_prep_slave_sg()
284 desc->sg[i].addr = sg_dma_address(sg); in hsu_dma_prep_slave_sg()
285 desc->sg[i].len = sg_dma_len(sg); in hsu_dma_prep_slave_sg()
287 desc->length += sg_dma_len(sg); in hsu_dma_prep_slave_sg()
290 desc->nents = sg_len; in hsu_dma_prep_slave_sg()
291 desc->direction = direction; in hsu_dma_prep_slave_sg()
293 desc->status = DMA_IN_PROGRESS; in hsu_dma_prep_slave_sg()
295 return vchan_tx_prep(&hsuc->vchan, &desc->vdesc, flags); in hsu_dma_prep_slave_sg()
304 if (vchan_issue_pending(&hsuc->vchan) && !hsuc->desc) in hsu_dma_issue_pending()
311 struct hsu_dma_desc *desc = hsuc->desc; in hsu_dma_active_desc_size() local
315 for (i = desc->active; i < desc->nents; i++) in hsu_dma_active_desc_size()
316 bytes += desc->sg[i].len; in hsu_dma_active_desc_size()
341 if (hsuc->desc && cookie == hsuc->desc->vdesc.tx.cookie) { in hsu_dma_tx_status()
344 status = hsuc->desc->status; in hsu_dma_tx_status()
370 if (hsuc->desc && hsuc->desc->status == DMA_IN_PROGRESS) { in hsu_dma_pause()
372 hsuc->desc->status = DMA_PAUSED; in hsu_dma_pause()
385 if (hsuc->desc && hsuc->desc->status == DMA_PAUSED) { in hsu_dma_resume()
386 hsuc->desc->status = DMA_IN_PROGRESS; in hsu_dma_resume()
403 if (hsuc->desc) { in hsu_dma_terminate_all()
404 hsu_dma_desc_free(&hsuc->desc->vdesc); in hsu_dma_terminate_all()
405 hsuc->desc = NULL; in hsu_dma_terminate_all()