Lines Matching refs:tdmac

134 	struct mmp_tdma_chan		*tdmac[TDMA_CHANNEL_NUM];  member
143 static void mmp_tdma_chan_set_desc(struct mmp_tdma_chan *tdmac, dma_addr_t phys) in mmp_tdma_chan_set_desc() argument
145 writel(phys, tdmac->reg_base + TDNDPR); in mmp_tdma_chan_set_desc()
146 writel(readl(tdmac->reg_base + TDCR) | TDCR_FETCHND, in mmp_tdma_chan_set_desc()
147 tdmac->reg_base + TDCR); in mmp_tdma_chan_set_desc()
150 static void mmp_tdma_enable_irq(struct mmp_tdma_chan *tdmac, bool enable) in mmp_tdma_enable_irq() argument
153 writel(TDIMR_COMP, tdmac->reg_base + TDIMR); in mmp_tdma_enable_irq()
155 writel(0, tdmac->reg_base + TDIMR); in mmp_tdma_enable_irq()
158 static void mmp_tdma_enable_chan(struct mmp_tdma_chan *tdmac) in mmp_tdma_enable_chan() argument
161 writel(readl(tdmac->reg_base + TDCR) | TDCR_CHANEN, in mmp_tdma_enable_chan()
162 tdmac->reg_base + TDCR); in mmp_tdma_enable_chan()
163 tdmac->status = DMA_IN_PROGRESS; in mmp_tdma_enable_chan()
168 struct mmp_tdma_chan *tdmac = to_mmp_tdma_chan(chan); in mmp_tdma_disable_chan() local
171 tdcr = readl(tdmac->reg_base + TDCR); in mmp_tdma_disable_chan()
174 writel(tdcr, tdmac->reg_base + TDCR); in mmp_tdma_disable_chan()
176 tdmac->status = DMA_COMPLETE; in mmp_tdma_disable_chan()
183 struct mmp_tdma_chan *tdmac = to_mmp_tdma_chan(chan); in mmp_tdma_resume_chan() local
185 writel(readl(tdmac->reg_base + TDCR) | TDCR_CHANEN, in mmp_tdma_resume_chan()
186 tdmac->reg_base + TDCR); in mmp_tdma_resume_chan()
187 tdmac->status = DMA_IN_PROGRESS; in mmp_tdma_resume_chan()
194 struct mmp_tdma_chan *tdmac = to_mmp_tdma_chan(chan); in mmp_tdma_pause_chan() local
196 writel(readl(tdmac->reg_base + TDCR) & ~TDCR_CHANEN, in mmp_tdma_pause_chan()
197 tdmac->reg_base + TDCR); in mmp_tdma_pause_chan()
198 tdmac->status = DMA_PAUSED; in mmp_tdma_pause_chan()
205 struct mmp_tdma_chan *tdmac = to_mmp_tdma_chan(chan); in mmp_tdma_config_chan() local
210 if (tdmac->dir == DMA_MEM_TO_DEV) in mmp_tdma_config_chan()
212 else if (tdmac->dir == DMA_DEV_TO_MEM) in mmp_tdma_config_chan()
215 if (tdmac->type == MMP_AUD_TDMA) { in mmp_tdma_config_chan()
218 switch (tdmac->burst_sz) { in mmp_tdma_config_chan()
238 dev_err(tdmac->dev, "unknown burst size.\n"); in mmp_tdma_config_chan()
242 switch (tdmac->buswidth) { in mmp_tdma_config_chan()
253 dev_err(tdmac->dev, "unknown bus size.\n"); in mmp_tdma_config_chan()
256 } else if (tdmac->type == PXA910_SQU) { in mmp_tdma_config_chan()
259 switch (tdmac->burst_sz) { in mmp_tdma_config_chan()
279 dev_err(tdmac->dev, "unknown burst size.\n"); in mmp_tdma_config_chan()
284 writel(tdcr, tdmac->reg_base + TDCR); in mmp_tdma_config_chan()
288 static int mmp_tdma_clear_chan_irq(struct mmp_tdma_chan *tdmac) in mmp_tdma_clear_chan_irq() argument
290 u32 reg = readl(tdmac->reg_base + TDISR); in mmp_tdma_clear_chan_irq()
295 writel(reg, tdmac->reg_base + TDISR); in mmp_tdma_clear_chan_irq()
302 static size_t mmp_tdma_get_pos(struct mmp_tdma_chan *tdmac) in mmp_tdma_get_pos() argument
306 if (tdmac->idx == 0) { in mmp_tdma_get_pos()
307 reg = __raw_readl(tdmac->reg_base + TDSAR); in mmp_tdma_get_pos()
308 reg -= tdmac->desc_arr[0].src_addr; in mmp_tdma_get_pos()
309 } else if (tdmac->idx == 1) { in mmp_tdma_get_pos()
310 reg = __raw_readl(tdmac->reg_base + TDDAR); in mmp_tdma_get_pos()
311 reg -= tdmac->desc_arr[0].dst_addr; in mmp_tdma_get_pos()
320 struct mmp_tdma_chan *tdmac = dev_id; in mmp_tdma_chan_handler() local
322 if (mmp_tdma_clear_chan_irq(tdmac) == 0) { in mmp_tdma_chan_handler()
323 tasklet_schedule(&tdmac->tasklet); in mmp_tdma_chan_handler()
336 struct mmp_tdma_chan *tdmac = tdev->tdmac[i]; in mmp_tdma_int_handler() local
338 ret = mmp_tdma_chan_handler(irq, tdmac); in mmp_tdma_int_handler()
351 struct mmp_tdma_chan *tdmac = from_tasklet(tdmac, t, tasklet); in dma_do_tasklet() local
353 dmaengine_desc_get_callback_invoke(&tdmac->desc, NULL); in dma_do_tasklet()
356 static void mmp_tdma_free_descriptor(struct mmp_tdma_chan *tdmac) in mmp_tdma_free_descriptor() argument
359 int size = tdmac->desc_num * sizeof(struct mmp_tdma_desc); in mmp_tdma_free_descriptor()
361 gpool = tdmac->pool; in mmp_tdma_free_descriptor()
362 if (gpool && tdmac->desc_arr) in mmp_tdma_free_descriptor()
363 gen_pool_free(gpool, (unsigned long)tdmac->desc_arr, in mmp_tdma_free_descriptor()
365 tdmac->desc_arr = NULL; in mmp_tdma_free_descriptor()
366 if (tdmac->status == DMA_ERROR) in mmp_tdma_free_descriptor()
367 tdmac->status = DMA_COMPLETE; in mmp_tdma_free_descriptor()
374 struct mmp_tdma_chan *tdmac = to_mmp_tdma_chan(tx->chan); in mmp_tdma_tx_submit() local
376 mmp_tdma_chan_set_desc(tdmac, tdmac->desc_arr_phys); in mmp_tdma_tx_submit()
383 struct mmp_tdma_chan *tdmac = to_mmp_tdma_chan(chan); in mmp_tdma_alloc_chan_resources() local
386 dma_async_tx_descriptor_init(&tdmac->desc, chan); in mmp_tdma_alloc_chan_resources()
387 tdmac->desc.tx_submit = mmp_tdma_tx_submit; in mmp_tdma_alloc_chan_resources()
389 if (tdmac->irq) { in mmp_tdma_alloc_chan_resources()
390 ret = devm_request_irq(tdmac->dev, tdmac->irq, in mmp_tdma_alloc_chan_resources()
391 mmp_tdma_chan_handler, 0, "tdma", tdmac); in mmp_tdma_alloc_chan_resources()
400 struct mmp_tdma_chan *tdmac = to_mmp_tdma_chan(chan); in mmp_tdma_free_chan_resources() local
402 if (tdmac->irq) in mmp_tdma_free_chan_resources()
403 devm_free_irq(tdmac->dev, tdmac->irq, tdmac); in mmp_tdma_free_chan_resources()
404 mmp_tdma_free_descriptor(tdmac); in mmp_tdma_free_chan_resources()
408 static struct mmp_tdma_desc *mmp_tdma_alloc_descriptor(struct mmp_tdma_chan *tdmac) in mmp_tdma_alloc_descriptor() argument
411 int size = tdmac->desc_num * sizeof(struct mmp_tdma_desc); in mmp_tdma_alloc_descriptor()
413 gpool = tdmac->pool; in mmp_tdma_alloc_descriptor()
417 tdmac->desc_arr = gen_pool_dma_alloc(gpool, size, &tdmac->desc_arr_phys); in mmp_tdma_alloc_descriptor()
419 return tdmac->desc_arr; in mmp_tdma_alloc_descriptor()
427 struct mmp_tdma_chan *tdmac = to_mmp_tdma_chan(chan); in mmp_tdma_prep_dma_cyclic() local
433 dev_err(tdmac->dev, "unsupported transfer direction\n"); in mmp_tdma_prep_dma_cyclic()
437 if (tdmac->status != DMA_COMPLETE) { in mmp_tdma_prep_dma_cyclic()
438 dev_err(tdmac->dev, "controller busy"); in mmp_tdma_prep_dma_cyclic()
443 dev_err(tdmac->dev, in mmp_tdma_prep_dma_cyclic()
449 tdmac->status = DMA_IN_PROGRESS; in mmp_tdma_prep_dma_cyclic()
450 tdmac->desc_num = num_periods; in mmp_tdma_prep_dma_cyclic()
451 desc = mmp_tdma_alloc_descriptor(tdmac); in mmp_tdma_prep_dma_cyclic()
455 if (mmp_tdma_config_write(chan, direction, &tdmac->slave_config)) in mmp_tdma_prep_dma_cyclic()
459 desc = &tdmac->desc_arr[i]; in mmp_tdma_prep_dma_cyclic()
462 desc->nxt_desc = tdmac->desc_arr_phys; in mmp_tdma_prep_dma_cyclic()
464 desc->nxt_desc = tdmac->desc_arr_phys + in mmp_tdma_prep_dma_cyclic()
469 desc->dst_addr = tdmac->dev_addr; in mmp_tdma_prep_dma_cyclic()
471 desc->src_addr = tdmac->dev_addr; in mmp_tdma_prep_dma_cyclic()
482 mmp_tdma_enable_irq(tdmac, true); in mmp_tdma_prep_dma_cyclic()
484 tdmac->buf_len = buf_len; in mmp_tdma_prep_dma_cyclic()
485 tdmac->period_len = period_len; in mmp_tdma_prep_dma_cyclic()
486 tdmac->pos = 0; in mmp_tdma_prep_dma_cyclic()
488 return &tdmac->desc; in mmp_tdma_prep_dma_cyclic()
491 tdmac->status = DMA_ERROR; in mmp_tdma_prep_dma_cyclic()
497 struct mmp_tdma_chan *tdmac = to_mmp_tdma_chan(chan); in mmp_tdma_terminate_all() local
501 mmp_tdma_enable_irq(tdmac, false); in mmp_tdma_terminate_all()
509 struct mmp_tdma_chan *tdmac = to_mmp_tdma_chan(chan); in mmp_tdma_config() local
511 memcpy(&tdmac->slave_config, dmaengine_cfg, sizeof(*dmaengine_cfg)); in mmp_tdma_config()
520 struct mmp_tdma_chan *tdmac = to_mmp_tdma_chan(chan); in mmp_tdma_config_write() local
523 tdmac->dev_addr = dmaengine_cfg->src_addr; in mmp_tdma_config_write()
524 tdmac->burst_sz = dmaengine_cfg->src_maxburst; in mmp_tdma_config_write()
525 tdmac->buswidth = dmaengine_cfg->src_addr_width; in mmp_tdma_config_write()
527 tdmac->dev_addr = dmaengine_cfg->dst_addr; in mmp_tdma_config_write()
528 tdmac->burst_sz = dmaengine_cfg->dst_maxburst; in mmp_tdma_config_write()
529 tdmac->buswidth = dmaengine_cfg->dst_addr_width; in mmp_tdma_config_write()
531 tdmac->dir = dir; in mmp_tdma_config_write()
539 struct mmp_tdma_chan *tdmac = to_mmp_tdma_chan(chan); in mmp_tdma_tx_status() local
541 tdmac->pos = mmp_tdma_get_pos(tdmac); in mmp_tdma_tx_status()
543 tdmac->buf_len - tdmac->pos); in mmp_tdma_tx_status()
545 return tdmac->status; in mmp_tdma_tx_status()
550 struct mmp_tdma_chan *tdmac = to_mmp_tdma_chan(chan); in mmp_tdma_issue_pending() local
552 mmp_tdma_enable_chan(tdmac); in mmp_tdma_issue_pending()
567 struct mmp_tdma_chan *tdmac; in mmp_tdma_chan_init() local
575 tdmac = devm_kzalloc(tdev->dev, sizeof(*tdmac), GFP_KERNEL); in mmp_tdma_chan_init()
576 if (!tdmac) in mmp_tdma_chan_init()
580 tdmac->irq = irq; in mmp_tdma_chan_init()
581 tdmac->dev = tdev->dev; in mmp_tdma_chan_init()
582 tdmac->chan.device = &tdev->device; in mmp_tdma_chan_init()
583 tdmac->idx = idx; in mmp_tdma_chan_init()
584 tdmac->type = type; in mmp_tdma_chan_init()
585 tdmac->reg_base = tdev->base + idx * 4; in mmp_tdma_chan_init()
586 tdmac->pool = pool; in mmp_tdma_chan_init()
587 tdmac->status = DMA_COMPLETE; in mmp_tdma_chan_init()
588 tdev->tdmac[tdmac->idx] = tdmac; in mmp_tdma_chan_init()
589 tasklet_setup(&tdmac->tasklet, dma_do_tasklet); in mmp_tdma_chan_init()
592 list_add_tail(&tdmac->chan.device_node, in mmp_tdma_chan_init()