Lines Matching refs:mv_chan

178 static void mv_chan_start_new_chain(struct mv_xor_chan *mv_chan,  in mv_chan_start_new_chain()  argument
181 dev_dbg(mv_chan_to_devp(mv_chan), "%s %d: sw_desc %p\n", in mv_chan_start_new_chain()
185 mv_chan_set_next_descriptor(mv_chan, sw_desc->async_tx.phys); in mv_chan_start_new_chain()
187 mv_chan->pending++; in mv_chan_start_new_chain()
188 mv_xor_issue_pending(&mv_chan->dmachan); in mv_chan_start_new_chain()
193 struct mv_xor_chan *mv_chan, in mv_desc_run_tx_complete_actions() argument
215 mv_chan_clean_completed_slots(struct mv_xor_chan *mv_chan) in mv_chan_clean_completed_slots() argument
219 dev_dbg(mv_chan_to_devp(mv_chan), "%s %d\n", __func__, __LINE__); in mv_chan_clean_completed_slots()
220 list_for_each_entry_safe(iter, _iter, &mv_chan->completed_slots, in mv_chan_clean_completed_slots()
224 list_move_tail(&iter->node, &mv_chan->free_slots); in mv_chan_clean_completed_slots()
227 &mv_chan->free_slots); in mv_chan_clean_completed_slots()
236 struct mv_xor_chan *mv_chan) in mv_desc_clean_slot() argument
238 dev_dbg(mv_chan_to_devp(mv_chan), "%s %d: desc %p flags %d\n", in mv_desc_clean_slot()
246 list_move_tail(&desc->node, &mv_chan->completed_slots); in mv_desc_clean_slot()
249 &mv_chan->completed_slots); in mv_desc_clean_slot()
252 list_move_tail(&desc->node, &mv_chan->free_slots); in mv_desc_clean_slot()
255 &mv_chan->free_slots); in mv_desc_clean_slot()
263 static void mv_chan_slot_cleanup(struct mv_xor_chan *mv_chan) in mv_chan_slot_cleanup() argument
267 int busy = mv_chan_is_busy(mv_chan); in mv_chan_slot_cleanup()
268 u32 current_desc = mv_chan_get_current_desc(mv_chan); in mv_chan_slot_cleanup()
272 dev_dbg(mv_chan_to_devp(mv_chan), "%s %d\n", __func__, __LINE__); in mv_chan_slot_cleanup()
273 dev_dbg(mv_chan_to_devp(mv_chan), "current_desc %x\n", current_desc); in mv_chan_slot_cleanup()
274 mv_chan_clean_completed_slots(mv_chan); in mv_chan_slot_cleanup()
280 list_for_each_entry_safe(iter, _iter, &mv_chan->chain, in mv_chan_slot_cleanup()
286 cookie = mv_desc_run_tx_complete_actions(iter, mv_chan, in mv_chan_slot_cleanup()
290 mv_desc_clean_slot(iter, mv_chan); in mv_chan_slot_cleanup()
305 if ((busy == 0) && !list_empty(&mv_chan->chain)) { in mv_chan_slot_cleanup()
311 iter = list_entry(mv_chan->chain.next, in mv_chan_slot_cleanup()
314 mv_chan_start_new_chain(mv_chan, iter); in mv_chan_slot_cleanup()
316 if (!list_is_last(&iter->node, &mv_chan->chain)) { in mv_chan_slot_cleanup()
324 mv_chan_start_new_chain(mv_chan, iter); in mv_chan_slot_cleanup()
330 tasklet_schedule(&mv_chan->irq_tasklet); in mv_chan_slot_cleanup()
336 mv_chan->dmachan.completed_cookie = cookie; in mv_chan_slot_cleanup()
349 mv_chan_alloc_slot(struct mv_xor_chan *mv_chan) in mv_chan_alloc_slot() argument
353 spin_lock_bh(&mv_chan->lock); in mv_chan_alloc_slot()
355 if (!list_empty(&mv_chan->free_slots)) { in mv_chan_alloc_slot()
356 iter = list_first_entry(&mv_chan->free_slots, in mv_chan_alloc_slot()
360 list_move_tail(&iter->node, &mv_chan->allocated_slots); in mv_chan_alloc_slot()
362 spin_unlock_bh(&mv_chan->lock); in mv_chan_alloc_slot()
372 spin_unlock_bh(&mv_chan->lock); in mv_chan_alloc_slot()
375 tasklet_schedule(&mv_chan->irq_tasklet); in mv_chan_alloc_slot()
385 struct mv_xor_chan *mv_chan = to_mv_xor_chan(tx->chan); in mv_xor_tx_submit() local
390 dev_dbg(mv_chan_to_devp(mv_chan), in mv_xor_tx_submit()
394 spin_lock_bh(&mv_chan->lock); in mv_xor_tx_submit()
397 if (list_empty(&mv_chan->chain)) in mv_xor_tx_submit()
398 list_move_tail(&sw_desc->node, &mv_chan->chain); in mv_xor_tx_submit()
402 old_chain_tail = list_entry(mv_chan->chain.prev, in mv_xor_tx_submit()
405 list_move_tail(&sw_desc->node, &mv_chan->chain); in mv_xor_tx_submit()
407 dev_dbg(mv_chan_to_devp(mv_chan), "Append to last desc %pa\n", in mv_xor_tx_submit()
414 if (!mv_chan_is_busy(mv_chan)) { in mv_xor_tx_submit()
415 u32 current_desc = mv_chan_get_current_desc(mv_chan); in mv_xor_tx_submit()
426 mv_chan_start_new_chain(mv_chan, sw_desc); in mv_xor_tx_submit()
428 spin_unlock_bh(&mv_chan->lock); in mv_xor_tx_submit()
439 struct mv_xor_chan *mv_chan = to_mv_xor_chan(chan); in mv_xor_alloc_chan_resources() local
444 idx = mv_chan->slots_allocated; in mv_xor_alloc_chan_resources()
448 dev_info(mv_chan_to_devp(mv_chan), in mv_xor_alloc_chan_resources()
453 virt_desc = mv_chan->dma_desc_pool_virt; in mv_xor_alloc_chan_resources()
460 dma_desc = mv_chan->dma_desc_pool; in mv_xor_alloc_chan_resources()
464 spin_lock_bh(&mv_chan->lock); in mv_xor_alloc_chan_resources()
465 mv_chan->slots_allocated = idx; in mv_xor_alloc_chan_resources()
466 list_add_tail(&slot->node, &mv_chan->free_slots); in mv_xor_alloc_chan_resources()
467 spin_unlock_bh(&mv_chan->lock); in mv_xor_alloc_chan_resources()
470 dev_dbg(mv_chan_to_devp(mv_chan), in mv_xor_alloc_chan_resources()
472 mv_chan->slots_allocated); in mv_xor_alloc_chan_resources()
474 return mv_chan->slots_allocated ? : -ENOMEM; in mv_xor_alloc_chan_resources()
483 static int mv_xor_add_io_win(struct mv_xor_chan *mv_chan, u32 addr) in mv_xor_add_io_win() argument
485 struct mv_xor_device *xordev = mv_chan->xordev; in mv_xor_add_io_win()
486 void __iomem *base = mv_chan->mmr_high_base; in mv_xor_add_io_win()
558 struct mv_xor_chan *mv_chan = to_mv_xor_chan(chan); in mv_xor_prep_dma_xor() local
567 dev_dbg(mv_chan_to_devp(mv_chan), in mv_xor_prep_dma_xor()
572 ret = mv_xor_add_io_win(mv_chan, dest); in mv_xor_prep_dma_xor()
576 sw_desc = mv_chan_alloc_slot(mv_chan); in mv_xor_prep_dma_xor()
581 if (mv_chan->op_in_desc == XOR_MODE_IN_DESC) in mv_xor_prep_dma_xor()
585 ret = mv_xor_add_io_win(mv_chan, src[src_cnt]); in mv_xor_prep_dma_xor()
592 dev_dbg(mv_chan_to_devp(mv_chan), in mv_xor_prep_dma_xor()
612 struct mv_xor_chan *mv_chan = to_mv_xor_chan(chan); in mv_xor_prep_dma_interrupt() local
616 src = mv_chan->dummy_src_addr; in mv_xor_prep_dma_interrupt()
617 dest = mv_chan->dummy_dst_addr; in mv_xor_prep_dma_interrupt()
629 struct mv_xor_chan *mv_chan = to_mv_xor_chan(chan); in mv_xor_free_chan_resources() local
633 spin_lock_bh(&mv_chan->lock); in mv_xor_free_chan_resources()
635 mv_chan_slot_cleanup(mv_chan); in mv_xor_free_chan_resources()
637 list_for_each_entry_safe(iter, _iter, &mv_chan->chain, in mv_xor_free_chan_resources()
640 list_move_tail(&iter->node, &mv_chan->free_slots); in mv_xor_free_chan_resources()
642 list_for_each_entry_safe(iter, _iter, &mv_chan->completed_slots, in mv_xor_free_chan_resources()
645 list_move_tail(&iter->node, &mv_chan->free_slots); in mv_xor_free_chan_resources()
647 list_for_each_entry_safe(iter, _iter, &mv_chan->allocated_slots, in mv_xor_free_chan_resources()
650 list_move_tail(&iter->node, &mv_chan->free_slots); in mv_xor_free_chan_resources()
653 iter, _iter, &mv_chan->free_slots, node) { in mv_xor_free_chan_resources()
656 mv_chan->slots_allocated--; in mv_xor_free_chan_resources()
659 dev_dbg(mv_chan_to_devp(mv_chan), "%s slots_allocated %d\n", in mv_xor_free_chan_resources()
660 __func__, mv_chan->slots_allocated); in mv_xor_free_chan_resources()
661 spin_unlock_bh(&mv_chan->lock); in mv_xor_free_chan_resources()
664 dev_err(mv_chan_to_devp(mv_chan), in mv_xor_free_chan_resources()
678 struct mv_xor_chan *mv_chan = to_mv_xor_chan(chan); in mv_xor_status() local
685 spin_lock_bh(&mv_chan->lock); in mv_xor_status()
686 mv_chan_slot_cleanup(mv_chan); in mv_xor_status()
687 spin_unlock_bh(&mv_chan->lock); in mv_xor_status()
749 struct mv_xor_chan *mv_chan = to_mv_xor_chan(chan); in mv_xor_issue_pending() local
751 if (mv_chan->pending >= MV_XOR_THRESHOLD) { in mv_xor_issue_pending()
752 mv_chan->pending = 0; in mv_xor_issue_pending()
753 mv_chan_activate(mv_chan); in mv_xor_issue_pending()
761 static int mv_chan_memcpy_self_test(struct mv_xor_chan *mv_chan) in mv_chan_memcpy_self_test() argument
786 dma_chan = &mv_chan->dmachan; in mv_chan_memcpy_self_test()
872 mv_chan_xor_self_test(struct mv_xor_chan *mv_chan) in mv_chan_xor_self_test() argument
919 dma_chan = &mv_chan->dmachan; in mv_chan_xor_self_test()
1009 static int mv_xor_channel_remove(struct mv_xor_chan *mv_chan) in mv_xor_channel_remove() argument
1012 struct device *dev = mv_chan->dmadev.dev; in mv_xor_channel_remove()
1014 dma_async_device_unregister(&mv_chan->dmadev); in mv_xor_channel_remove()
1017 mv_chan->dma_desc_pool_virt, mv_chan->dma_desc_pool); in mv_xor_channel_remove()
1018 dma_unmap_single(dev, mv_chan->dummy_src_addr, in mv_xor_channel_remove()
1020 dma_unmap_single(dev, mv_chan->dummy_dst_addr, in mv_xor_channel_remove()
1023 list_for_each_entry_safe(chan, _chan, &mv_chan->dmadev.channels, in mv_xor_channel_remove()
1028 free_irq(mv_chan->irq, mv_chan); in mv_xor_channel_remove()
1039 struct mv_xor_chan *mv_chan; in mv_xor_channel_add() local
1042 mv_chan = devm_kzalloc(&pdev->dev, sizeof(*mv_chan), GFP_KERNEL); in mv_xor_channel_add()
1043 if (!mv_chan) in mv_xor_channel_add()
1046 mv_chan->idx = idx; in mv_xor_channel_add()
1047 mv_chan->irq = irq; in mv_xor_channel_add()
1049 mv_chan->op_in_desc = XOR_MODE_IN_REG; in mv_xor_channel_add()
1051 mv_chan->op_in_desc = XOR_MODE_IN_DESC; in mv_xor_channel_add()
1053 dma_dev = &mv_chan->dmadev; in mv_xor_channel_add()
1055 mv_chan->xordev = xordev; in mv_xor_channel_add()
1062 mv_chan->dummy_src_addr = dma_map_single(dma_dev->dev, in mv_xor_channel_add()
1063 mv_chan->dummy_src, MV_XOR_MIN_BYTE_COUNT, DMA_FROM_DEVICE); in mv_xor_channel_add()
1064 mv_chan->dummy_dst_addr = dma_map_single(dma_dev->dev, in mv_xor_channel_add()
1065 mv_chan->dummy_dst, MV_XOR_MIN_BYTE_COUNT, DMA_TO_DEVICE); in mv_xor_channel_add()
1071 mv_chan->dma_desc_pool_virt = in mv_xor_channel_add()
1072 dma_alloc_wc(&pdev->dev, MV_XOR_POOL_SIZE, &mv_chan->dma_desc_pool, in mv_xor_channel_add()
1074 if (!mv_chan->dma_desc_pool_virt) in mv_xor_channel_add()
1098 mv_chan->mmr_base = xordev->xor_base; in mv_xor_channel_add()
1099 mv_chan->mmr_high_base = xordev->xor_high_base; in mv_xor_channel_add()
1100 tasklet_setup(&mv_chan->irq_tasklet, mv_xor_tasklet); in mv_xor_channel_add()
1103 mv_chan_clear_err_status(mv_chan); in mv_xor_channel_add()
1105 ret = request_irq(mv_chan->irq, mv_xor_interrupt_handler, in mv_xor_channel_add()
1106 0, dev_name(&pdev->dev), mv_chan); in mv_xor_channel_add()
1110 mv_chan_unmask_interrupts(mv_chan); in mv_xor_channel_add()
1112 if (mv_chan->op_in_desc == XOR_MODE_IN_DESC) in mv_xor_channel_add()
1113 mv_chan_set_mode(mv_chan, XOR_OPERATION_MODE_IN_DESC); in mv_xor_channel_add()
1115 mv_chan_set_mode(mv_chan, XOR_OPERATION_MODE_XOR); in mv_xor_channel_add()
1117 spin_lock_init(&mv_chan->lock); in mv_xor_channel_add()
1118 INIT_LIST_HEAD(&mv_chan->chain); in mv_xor_channel_add()
1119 INIT_LIST_HEAD(&mv_chan->completed_slots); in mv_xor_channel_add()
1120 INIT_LIST_HEAD(&mv_chan->free_slots); in mv_xor_channel_add()
1121 INIT_LIST_HEAD(&mv_chan->allocated_slots); in mv_xor_channel_add()
1122 mv_chan->dmachan.device = dma_dev; in mv_xor_channel_add()
1123 dma_cookie_init(&mv_chan->dmachan); in mv_xor_channel_add()
1125 list_add_tail(&mv_chan->dmachan.device_node, &dma_dev->channels); in mv_xor_channel_add()
1128 ret = mv_chan_memcpy_self_test(mv_chan); in mv_xor_channel_add()
1135 ret = mv_chan_xor_self_test(mv_chan); in mv_xor_channel_add()
1142 mv_chan->op_in_desc ? "Descriptor Mode" : "Registers Mode", in mv_xor_channel_add()
1151 return mv_chan; in mv_xor_channel_add()
1154 free_irq(mv_chan->irq, mv_chan); in mv_xor_channel_add()
1157 mv_chan->dma_desc_pool_virt, mv_chan->dma_desc_pool); in mv_xor_channel_add()
1237 struct mv_xor_chan *mv_chan = xordev->channels[i]; in mv_xor_suspend() local
1239 if (!mv_chan) in mv_xor_suspend()
1242 mv_chan->saved_config_reg = in mv_xor_suspend()
1243 readl_relaxed(XOR_CONFIG(mv_chan)); in mv_xor_suspend()
1244 mv_chan->saved_int_mask_reg = in mv_xor_suspend()
1245 readl_relaxed(XOR_INTR_MASK(mv_chan)); in mv_xor_suspend()
1258 struct mv_xor_chan *mv_chan = xordev->channels[i]; in mv_xor_resume() local
1260 if (!mv_chan) in mv_xor_resume()
1263 writel_relaxed(mv_chan->saved_config_reg, in mv_xor_resume()
1264 XOR_CONFIG(mv_chan)); in mv_xor_resume()
1265 writel_relaxed(mv_chan->saved_int_mask_reg, in mv_xor_resume()
1266 XOR_INTR_MASK(mv_chan)); in mv_xor_resume()