Lines Matching refs:d40c

628 static struct device *chan2dev(struct d40_chan *d40c)  in chan2dev()  argument
630 return &d40c->chan.dev->device; in chan2dev()
652 #define chan_err(d40c, format, arg...) \ argument
653 d40_err(chan2dev(d40c), format, ## arg)
659 static int d40_pool_lli_alloc(struct d40_chan *d40c, struct d40_desc *d40d, in d40_pool_lli_alloc() argument
662 bool is_log = chan_is_logical(d40c); in d40_pool_lli_alloc()
694 d40d->lli_pool.dma_addr = dma_map_single(d40c->base->dev, in d40_pool_lli_alloc()
699 if (dma_mapping_error(d40c->base->dev, in d40_pool_lli_alloc()
711 static void d40_pool_lli_free(struct d40_chan *d40c, struct d40_desc *d40d) in d40_pool_lli_free() argument
714 dma_unmap_single(d40c->base->dev, d40d->lli_pool.dma_addr, in d40_pool_lli_free()
726 static int d40_lcla_alloc_one(struct d40_chan *d40c, in d40_lcla_alloc_one() argument
733 spin_lock_irqsave(&d40c->base->lcla_pool.lock, flags); in d40_lcla_alloc_one()
740 int idx = d40c->phy_chan->num * D40_LCLA_LINK_PER_EVENT_GRP + i; in d40_lcla_alloc_one()
742 if (!d40c->base->lcla_pool.alloc_map[idx]) { in d40_lcla_alloc_one()
743 d40c->base->lcla_pool.alloc_map[idx] = d40d; in d40_lcla_alloc_one()
750 spin_unlock_irqrestore(&d40c->base->lcla_pool.lock, flags); in d40_lcla_alloc_one()
755 static int d40_lcla_free_all(struct d40_chan *d40c, in d40_lcla_free_all() argument
762 if (chan_is_physical(d40c)) in d40_lcla_free_all()
765 spin_lock_irqsave(&d40c->base->lcla_pool.lock, flags); in d40_lcla_free_all()
768 int idx = d40c->phy_chan->num * D40_LCLA_LINK_PER_EVENT_GRP + i; in d40_lcla_free_all()
770 if (d40c->base->lcla_pool.alloc_map[idx] == d40d) { in d40_lcla_free_all()
771 d40c->base->lcla_pool.alloc_map[idx] = NULL; in d40_lcla_free_all()
780 spin_unlock_irqrestore(&d40c->base->lcla_pool.lock, flags); in d40_lcla_free_all()
791 static struct d40_desc *d40_desc_get(struct d40_chan *d40c) in d40_desc_get() argument
795 if (!list_empty(&d40c->client)) { in d40_desc_get()
799 list_for_each_entry_safe(d, _d, &d40c->client, node) { in d40_desc_get()
810 desc = kmem_cache_zalloc(d40c->base->desc_slab, GFP_NOWAIT); in d40_desc_get()
818 static void d40_desc_free(struct d40_chan *d40c, struct d40_desc *d40d) in d40_desc_free() argument
821 d40_pool_lli_free(d40c, d40d); in d40_desc_free()
822 d40_lcla_free_all(d40c, d40d); in d40_desc_free()
823 kmem_cache_free(d40c->base->desc_slab, d40d); in d40_desc_free()
826 static void d40_desc_submit(struct d40_chan *d40c, struct d40_desc *desc) in d40_desc_submit() argument
828 list_add_tail(&desc->node, &d40c->active); in d40_desc_submit()
848 static void d40_desc_done(struct d40_chan *d40c, struct d40_desc *desc) in d40_desc_done() argument
850 list_add_tail(&desc->node, &d40c->done); in d40_desc_done()
965 static void d40_desc_load(struct d40_chan *d40c, struct d40_desc *d40d) in d40_desc_load() argument
967 if (chan_is_physical(d40c)) { in d40_desc_load()
968 d40_phy_lli_load(d40c, d40d); in d40_desc_load()
971 d40_log_lli_to_lcxa(d40c, d40d); in d40_desc_load()
974 static struct d40_desc *d40_first_active_get(struct d40_chan *d40c) in d40_first_active_get() argument
976 return list_first_entry_or_null(&d40c->active, struct d40_desc, node); in d40_first_active_get()
980 static void d40_desc_queue(struct d40_chan *d40c, struct d40_desc *desc) in d40_desc_queue() argument
984 list_add_tail(&desc->node, &d40c->pending_queue); in d40_desc_queue()
987 static struct d40_desc *d40_first_pending(struct d40_chan *d40c) in d40_first_pending() argument
989 return list_first_entry_or_null(&d40c->pending_queue, struct d40_desc, in d40_first_pending()
993 static struct d40_desc *d40_first_queued(struct d40_chan *d40c) in d40_first_queued() argument
995 return list_first_entry_or_null(&d40c->queue, struct d40_desc, node); in d40_first_queued()
998 static struct d40_desc *d40_first_done(struct d40_chan *d40c) in d40_first_done() argument
1000 return list_first_entry_or_null(&d40c->done, struct d40_desc, node); in d40_first_done()
1063 static int __d40_execute_command_phy(struct d40_chan *d40c, in __d40_execute_command_phy() argument
1074 ret = __d40_execute_command_phy(d40c, D40_DMA_SUSPEND_REQ); in __d40_execute_command_phy()
1079 spin_lock_irqsave(&d40c->base->execmd_lock, flags); in __d40_execute_command_phy()
1081 if (d40c->phy_chan->num % 2 == 0) in __d40_execute_command_phy()
1082 active_reg = d40c->base->virtbase + D40_DREG_ACTIVE; in __d40_execute_command_phy()
1084 active_reg = d40c->base->virtbase + D40_DREG_ACTIVO; in __d40_execute_command_phy()
1088 D40_CHAN_POS_MASK(d40c->phy_chan->num)) >> in __d40_execute_command_phy()
1089 D40_CHAN_POS(d40c->phy_chan->num); in __d40_execute_command_phy()
1095 wmask = 0xffffffff & ~(D40_CHAN_POS_MASK(d40c->phy_chan->num)); in __d40_execute_command_phy()
1096 writel(wmask | (command << D40_CHAN_POS(d40c->phy_chan->num)), in __d40_execute_command_phy()
1103 D40_CHAN_POS_MASK(d40c->phy_chan->num)) >> in __d40_execute_command_phy()
1104 D40_CHAN_POS(d40c->phy_chan->num); in __d40_execute_command_phy()
1119 chan_err(d40c, in __d40_execute_command_phy()
1121 d40c->phy_chan->num, d40c->log_num, in __d40_execute_command_phy()
1129 spin_unlock_irqrestore(&d40c->base->execmd_lock, flags); in __d40_execute_command_phy()
1133 static void d40_term_all(struct d40_chan *d40c) in d40_term_all() argument
1139 while ((d40d = d40_first_done(d40c))) { in d40_term_all()
1141 d40_desc_free(d40c, d40d); in d40_term_all()
1145 while ((d40d = d40_first_active_get(d40c))) { in d40_term_all()
1147 d40_desc_free(d40c, d40d); in d40_term_all()
1151 while ((d40d = d40_first_queued(d40c))) { in d40_term_all()
1153 d40_desc_free(d40c, d40d); in d40_term_all()
1157 while ((d40d = d40_first_pending(d40c))) { in d40_term_all()
1159 d40_desc_free(d40c, d40d); in d40_term_all()
1163 if (!list_empty(&d40c->client)) in d40_term_all()
1164 list_for_each_entry_safe(d40d, _d, &d40c->client, node) { in d40_term_all()
1166 d40_desc_free(d40c, d40d); in d40_term_all()
1170 if (!list_empty(&d40c->prepare_queue)) in d40_term_all()
1172 &d40c->prepare_queue, node) { in d40_term_all()
1174 d40_desc_free(d40c, d40d); in d40_term_all()
1177 d40c->pending_tx = 0; in d40_term_all()
1180 static void __d40_config_set_event(struct d40_chan *d40c, in __d40_config_set_event() argument
1184 void __iomem *addr = chan_base(d40c) + reg; in __d40_config_set_event()
1224 chan_err(d40c, in __d40_config_set_event()
1226 "status %x\n", d40c->phy_chan->num, in __d40_config_set_event()
1227 d40c->log_num, status); in __d40_config_set_event()
1248 dev_dbg(chan2dev(d40c), in __d40_config_set_event()
1263 static void d40_config_set_event(struct d40_chan *d40c, in d40_config_set_event() argument
1266 u32 event = D40_TYPE_TO_EVENT(d40c->dma_cfg.dev_type); in d40_config_set_event()
1269 if ((d40c->dma_cfg.dir == DMA_DEV_TO_MEM) || in d40_config_set_event()
1270 (d40c->dma_cfg.dir == DMA_DEV_TO_DEV)) in d40_config_set_event()
1271 __d40_config_set_event(d40c, event_type, event, in d40_config_set_event()
1274 if (d40c->dma_cfg.dir != DMA_DEV_TO_MEM) in d40_config_set_event()
1275 __d40_config_set_event(d40c, event_type, event, in d40_config_set_event()
1279 static u32 d40_chan_has_events(struct d40_chan *d40c) in d40_chan_has_events() argument
1281 void __iomem *chanbase = chan_base(d40c); in d40_chan_has_events()
1291 __d40_execute_command_log(struct d40_chan *d40c, enum d40_command command) in __d40_execute_command_log() argument
1298 if (d40c->phy_chan->num % 2 == 0) in __d40_execute_command_log()
1299 active_reg = d40c->base->virtbase + D40_DREG_ACTIVE; in __d40_execute_command_log()
1301 active_reg = d40c->base->virtbase + D40_DREG_ACTIVO; in __d40_execute_command_log()
1304 spin_lock_irqsave(&d40c->phy_chan->lock, flags); in __d40_execute_command_log()
1311 D40_CHAN_POS_MASK(d40c->phy_chan->num)) >> in __d40_execute_command_log()
1312 D40_CHAN_POS(d40c->phy_chan->num); in __d40_execute_command_log()
1315 d40_config_set_event(d40c, D40_SUSPEND_REQ_EVENTLINE); in __d40_execute_command_log()
1317 d40_config_set_event(d40c, D40_DEACTIVATE_EVENTLINE); in __d40_execute_command_log()
1319 if (!d40_chan_has_events(d40c) && (command == D40_DMA_STOP)) in __d40_execute_command_log()
1320 ret = __d40_execute_command_phy(d40c, command); in __d40_execute_command_log()
1326 d40_config_set_event(d40c, D40_ACTIVATE_EVENTLINE); in __d40_execute_command_log()
1327 ret = __d40_execute_command_phy(d40c, command); in __d40_execute_command_log()
1335 spin_unlock_irqrestore(&d40c->phy_chan->lock, flags); in __d40_execute_command_log()
1339 static int d40_channel_execute_command(struct d40_chan *d40c, in d40_channel_execute_command() argument
1342 if (chan_is_logical(d40c)) in d40_channel_execute_command()
1343 return __d40_execute_command_log(d40c, command); in d40_channel_execute_command()
1345 return __d40_execute_command_phy(d40c, command); in d40_channel_execute_command()
1348 static u32 d40_get_prmo(struct d40_chan *d40c) in d40_get_prmo() argument
1367 if (chan_is_physical(d40c)) in d40_get_prmo()
1368 return phy_map[d40c->dma_cfg.mode_opt]; in d40_get_prmo()
1370 return log_map[d40c->dma_cfg.mode_opt]; in d40_get_prmo()
1373 static void d40_config_write(struct d40_chan *d40c) in d40_config_write() argument
1379 addr_base = (d40c->phy_chan->num % 2) * 4; in d40_config_write()
1381 var = ((u32)(chan_is_logical(d40c)) + 1) << in d40_config_write()
1382 D40_CHAN_POS(d40c->phy_chan->num); in d40_config_write()
1383 writel(var, d40c->base->virtbase + D40_DREG_PRMSE + addr_base); in d40_config_write()
1386 var = d40_get_prmo(d40c) << D40_CHAN_POS(d40c->phy_chan->num); in d40_config_write()
1388 writel(var, d40c->base->virtbase + D40_DREG_PRMOE + addr_base); in d40_config_write()
1390 if (chan_is_logical(d40c)) { in d40_config_write()
1391 int lidx = (d40c->phy_chan->num << D40_SREG_ELEM_LOG_LIDX_POS) in d40_config_write()
1393 void __iomem *chanbase = chan_base(d40c); in d40_config_write()
1396 writel(d40c->src_def_cfg, chanbase + D40_CHAN_REG_SSCFG); in d40_config_write()
1397 writel(d40c->dst_def_cfg, chanbase + D40_CHAN_REG_SDCFG); in d40_config_write()
1409 static u32 d40_residue(struct d40_chan *d40c) in d40_residue() argument
1413 if (chan_is_logical(d40c)) in d40_residue()
1414 num_elt = (readl(&d40c->lcpa->lcsp2) & D40_MEM_LCSP2_ECNT_MASK) in d40_residue()
1417 u32 val = readl(chan_base(d40c) + D40_CHAN_REG_SDELT); in d40_residue()
1422 return num_elt * d40c->dma_cfg.dst_info.data_width; in d40_residue()
1425 static bool d40_tx_is_linked(struct d40_chan *d40c) in d40_tx_is_linked() argument
1429 if (chan_is_logical(d40c)) in d40_tx_is_linked()
1430 is_link = readl(&d40c->lcpa->lcsp3) & D40_MEM_LCSP3_DLOS_MASK; in d40_tx_is_linked()
1432 is_link = readl(chan_base(d40c) + D40_CHAN_REG_SDLNK) in d40_tx_is_linked()
1440 struct d40_chan *d40c = container_of(chan, struct d40_chan, chan); in d40_pause() local
1444 if (d40c->phy_chan == NULL) { in d40_pause()
1445 chan_err(d40c, "Channel is not allocated!\n"); in d40_pause()
1449 if (!d40c->busy) in d40_pause()
1452 spin_lock_irqsave(&d40c->lock, flags); in d40_pause()
1453 pm_runtime_get_sync(d40c->base->dev); in d40_pause()
1455 res = d40_channel_execute_command(d40c, D40_DMA_SUSPEND_REQ); in d40_pause()
1457 pm_runtime_mark_last_busy(d40c->base->dev); in d40_pause()
1458 pm_runtime_put_autosuspend(d40c->base->dev); in d40_pause()
1459 spin_unlock_irqrestore(&d40c->lock, flags); in d40_pause()
1465 struct d40_chan *d40c = container_of(chan, struct d40_chan, chan); in d40_resume() local
1469 if (d40c->phy_chan == NULL) { in d40_resume()
1470 chan_err(d40c, "Channel is not allocated!\n"); in d40_resume()
1474 if (!d40c->busy) in d40_resume()
1477 spin_lock_irqsave(&d40c->lock, flags); in d40_resume()
1478 pm_runtime_get_sync(d40c->base->dev); in d40_resume()
1481 if (d40_residue(d40c) || d40_tx_is_linked(d40c)) in d40_resume()
1482 res = d40_channel_execute_command(d40c, D40_DMA_RUN); in d40_resume()
1484 pm_runtime_mark_last_busy(d40c->base->dev); in d40_resume()
1485 pm_runtime_put_autosuspend(d40c->base->dev); in d40_resume()
1486 spin_unlock_irqrestore(&d40c->lock, flags); in d40_resume()
1492 struct d40_chan *d40c = container_of(tx->chan, in d40_tx_submit() local
1499 spin_lock_irqsave(&d40c->lock, flags); in d40_tx_submit()
1501 d40_desc_queue(d40c, d40d); in d40_tx_submit()
1502 spin_unlock_irqrestore(&d40c->lock, flags); in d40_tx_submit()
1507 static int d40_start(struct d40_chan *d40c) in d40_start() argument
1509 return d40_channel_execute_command(d40c, D40_DMA_RUN); in d40_start()
1512 static struct d40_desc *d40_queue_start(struct d40_chan *d40c) in d40_queue_start() argument
1518 d40d = d40_first_queued(d40c); in d40_queue_start()
1521 if (!d40c->busy) { in d40_queue_start()
1522 d40c->busy = true; in d40_queue_start()
1523 pm_runtime_get_sync(d40c->base->dev); in d40_queue_start()
1530 d40_desc_submit(d40c, d40d); in d40_queue_start()
1533 d40_desc_load(d40c, d40d); in d40_queue_start()
1536 err = d40_start(d40c); in d40_queue_start()
1546 static void dma_tc_handle(struct d40_chan *d40c) in dma_tc_handle() argument
1551 d40d = d40_first_active_get(d40c); in dma_tc_handle()
1564 && !d40_tx_is_linked(d40c) in dma_tc_handle()
1565 && !d40_residue(d40c)) { in dma_tc_handle()
1566 d40_lcla_free_all(d40c, d40d); in dma_tc_handle()
1567 d40_desc_load(d40c, d40d); in dma_tc_handle()
1568 (void) d40_start(d40c); in dma_tc_handle()
1574 d40_lcla_free_all(d40c, d40d); in dma_tc_handle()
1577 d40_desc_load(d40c, d40d); in dma_tc_handle()
1579 (void) d40_start(d40c); in dma_tc_handle()
1583 if (d40_queue_start(d40c) == NULL) { in dma_tc_handle()
1584 d40c->busy = false; in dma_tc_handle()
1586 pm_runtime_mark_last_busy(d40c->base->dev); in dma_tc_handle()
1587 pm_runtime_put_autosuspend(d40c->base->dev); in dma_tc_handle()
1591 d40_desc_done(d40c, d40d); in dma_tc_handle()
1594 d40c->pending_tx++; in dma_tc_handle()
1595 tasklet_schedule(&d40c->tasklet); in dma_tc_handle()
1601 struct d40_chan *d40c = from_tasklet(d40c, t, tasklet); in dma_tasklet() local
1607 spin_lock_irqsave(&d40c->lock, flags); in dma_tasklet()
1610 d40d = d40_first_done(d40c); in dma_tasklet()
1613 d40d = d40_first_active_get(d40c); in dma_tasklet()
1625 if (d40c->pending_tx == 0) { in dma_tasklet()
1626 spin_unlock_irqrestore(&d40c->lock, flags); in dma_tasklet()
1637 d40_desc_free(d40c, d40d); in dma_tasklet()
1640 d40_lcla_free_all(d40c, d40d); in dma_tasklet()
1641 list_add_tail(&d40d->node, &d40c->client); in dma_tasklet()
1646 d40c->pending_tx--; in dma_tasklet()
1648 if (d40c->pending_tx) in dma_tasklet()
1649 tasklet_schedule(&d40c->tasklet); in dma_tasklet()
1651 spin_unlock_irqrestore(&d40c->lock, flags); in dma_tasklet()
1659 if (d40c->pending_tx > 0) in dma_tasklet()
1660 d40c->pending_tx--; in dma_tasklet()
1661 spin_unlock_irqrestore(&d40c->lock, flags); in dma_tasklet()
1670 struct d40_chan *d40c; in d40_handle_interrupt() local
1695 d40c = base->lookup_phy_chans[idx]; in d40_handle_interrupt()
1697 d40c = base->lookup_log_chans[il[row].offset + idx]; in d40_handle_interrupt()
1699 if (!d40c) { in d40_handle_interrupt()
1710 spin_lock(&d40c->lock); in d40_handle_interrupt()
1713 dma_tc_handle(d40c); in d40_handle_interrupt()
1718 spin_unlock(&d40c->lock); in d40_handle_interrupt()
1726 static int d40_validate_conf(struct d40_chan *d40c, in d40_validate_conf() argument
1733 chan_err(d40c, "Invalid direction.\n"); in d40_validate_conf()
1737 if ((is_log && conf->dev_type > d40c->base->num_log_chans) || in d40_validate_conf()
1738 (!is_log && conf->dev_type > d40c->base->num_phy_chans) || in d40_validate_conf()
1740 chan_err(d40c, "Invalid device type (%d)\n", conf->dev_type); in d40_validate_conf()
1749 chan_err(d40c, "periph to periph not supported\n"); in d40_validate_conf()
1762 chan_err(d40c, "src (burst x width) != dst (burst x width)\n"); in d40_validate_conf()
1856 static int d40_allocate_channel(struct d40_chan *d40c, bool *first_phy_user) in d40_allocate_channel() argument
1858 int dev_type = d40c->dma_cfg.dev_type; in d40_allocate_channel()
1867 bool is_log = d40c->dma_cfg.mode == STEDMA40_MODE_LOGICAL; in d40_allocate_channel()
1869 phys = d40c->base->phy_res; in d40_allocate_channel()
1870 num_phy_chans = d40c->base->num_phy_chans; in d40_allocate_channel()
1872 if (d40c->dma_cfg.dir == DMA_DEV_TO_MEM) { in d40_allocate_channel()
1875 } else if (d40c->dma_cfg.dir == DMA_MEM_TO_DEV || in d40_allocate_channel()
1876 d40c->dma_cfg.dir == DMA_MEM_TO_MEM) { in d40_allocate_channel()
1887 if (d40c->dma_cfg.dir == DMA_MEM_TO_MEM) { in d40_allocate_channel()
1889 if (d40c->dma_cfg.use_fixed_channel) { in d40_allocate_channel()
1890 i = d40c->dma_cfg.phy_channel; in d40_allocate_channel()
1904 for (j = 0; j < d40c->base->num_phy_chans; j += 8) { in d40_allocate_channel()
1917 d40c->phy_chan = &phys[i]; in d40_allocate_channel()
1918 d40c->log_num = D40_PHY_CHAN; in d40_allocate_channel()
1925 for (j = 0; j < d40c->base->num_phy_chans; j += 8) { in d40_allocate_channel()
1928 if (d40c->dma_cfg.use_fixed_channel) { in d40_allocate_channel()
1929 i = d40c->dma_cfg.phy_channel; in d40_allocate_channel()
1932 dev_err(chan2dev(d40c), in d40_allocate_channel()
1941 dev_err(chan2dev(d40c), in d40_allocate_channel()
1970 d40c->phy_chan = &phys[i]; in d40_allocate_channel()
1971 d40c->log_num = log_num; in d40_allocate_channel()
1975 d40c->base->lookup_log_chans[d40c->log_num] = d40c; in d40_allocate_channel()
1977 d40c->base->lookup_phy_chans[d40c->phy_chan->num] = d40c; in d40_allocate_channel()
1983 static int d40_config_memcpy(struct d40_chan *d40c) in d40_config_memcpy() argument
1985 dma_cap_mask_t cap = d40c->chan.device->cap_mask; in d40_config_memcpy()
1988 d40c->dma_cfg = dma40_memcpy_conf_log; in d40_config_memcpy()
1989 d40c->dma_cfg.dev_type = dma40_memcpy_channels[d40c->chan.chan_id]; in d40_config_memcpy()
1991 d40_log_cfg(&d40c->dma_cfg, in d40_config_memcpy()
1992 &d40c->log_def.lcsp1, &d40c->log_def.lcsp3); in d40_config_memcpy()
1996 d40c->dma_cfg = dma40_memcpy_conf_phy; in d40_config_memcpy()
1999 d40c->dst_def_cfg |= BIT(D40_SREG_CFG_TIM_POS); in d40_config_memcpy()
2002 d40c->src_def_cfg |= BIT(D40_SREG_CFG_EIM_POS); in d40_config_memcpy()
2003 d40c->dst_def_cfg |= BIT(D40_SREG_CFG_EIM_POS); in d40_config_memcpy()
2006 chan_err(d40c, "No memcpy\n"); in d40_config_memcpy()
2013 static int d40_free_dma(struct d40_chan *d40c) in d40_free_dma() argument
2017 u32 event = D40_TYPE_TO_EVENT(d40c->dma_cfg.dev_type); in d40_free_dma()
2018 struct d40_phy_res *phy = d40c->phy_chan; in d40_free_dma()
2022 d40_term_all(d40c); in d40_free_dma()
2025 chan_err(d40c, "phy == null\n"); in d40_free_dma()
2031 chan_err(d40c, "channel already free\n"); in d40_free_dma()
2035 if (d40c->dma_cfg.dir == DMA_MEM_TO_DEV || in d40_free_dma()
2036 d40c->dma_cfg.dir == DMA_MEM_TO_MEM) in d40_free_dma()
2038 else if (d40c->dma_cfg.dir == DMA_DEV_TO_MEM) in d40_free_dma()
2041 chan_err(d40c, "Unknown direction\n"); in d40_free_dma()
2045 pm_runtime_get_sync(d40c->base->dev); in d40_free_dma()
2046 res = d40_channel_execute_command(d40c, D40_DMA_STOP); in d40_free_dma()
2048 chan_err(d40c, "stop failed\n"); in d40_free_dma()
2052 d40_alloc_mask_free(phy, is_src, chan_is_logical(d40c) ? event : 0); in d40_free_dma()
2054 if (chan_is_logical(d40c)) in d40_free_dma()
2055 d40c->base->lookup_log_chans[d40c->log_num] = NULL; in d40_free_dma()
2057 d40c->base->lookup_phy_chans[phy->num] = NULL; in d40_free_dma()
2059 if (d40c->busy) { in d40_free_dma()
2060 pm_runtime_mark_last_busy(d40c->base->dev); in d40_free_dma()
2061 pm_runtime_put_autosuspend(d40c->base->dev); in d40_free_dma()
2064 d40c->busy = false; in d40_free_dma()
2065 d40c->phy_chan = NULL; in d40_free_dma()
2066 d40c->configured = false; in d40_free_dma()
2068 pm_runtime_mark_last_busy(d40c->base->dev); in d40_free_dma()
2069 pm_runtime_put_autosuspend(d40c->base->dev); in d40_free_dma()
2073 static bool d40_is_paused(struct d40_chan *d40c) in d40_is_paused() argument
2075 void __iomem *chanbase = chan_base(d40c); in d40_is_paused()
2080 u32 event = D40_TYPE_TO_EVENT(d40c->dma_cfg.dev_type); in d40_is_paused()
2082 spin_lock_irqsave(&d40c->lock, flags); in d40_is_paused()
2084 if (chan_is_physical(d40c)) { in d40_is_paused()
2085 if (d40c->phy_chan->num % 2 == 0) in d40_is_paused()
2086 active_reg = d40c->base->virtbase + D40_DREG_ACTIVE; in d40_is_paused()
2088 active_reg = d40c->base->virtbase + D40_DREG_ACTIVO; in d40_is_paused()
2091 D40_CHAN_POS_MASK(d40c->phy_chan->num)) >> in d40_is_paused()
2092 D40_CHAN_POS(d40c->phy_chan->num); in d40_is_paused()
2098 if (d40c->dma_cfg.dir == DMA_MEM_TO_DEV || in d40_is_paused()
2099 d40c->dma_cfg.dir == DMA_MEM_TO_MEM) { in d40_is_paused()
2101 } else if (d40c->dma_cfg.dir == DMA_DEV_TO_MEM) { in d40_is_paused()
2104 chan_err(d40c, "Unknown direction\n"); in d40_is_paused()
2114 spin_unlock_irqrestore(&d40c->lock, flags); in d40_is_paused()
2121 struct d40_chan *d40c = in stedma40_residue() local
2126 spin_lock_irqsave(&d40c->lock, flags); in stedma40_residue()
2127 bytes_left = d40_residue(d40c); in stedma40_residue()
2128 spin_unlock_irqrestore(&d40c->lock, flags); in stedma40_residue()
2299 struct d40_chan *d40c = in stedma40_filter() local
2304 err = d40_validate_conf(d40c, info); in stedma40_filter()
2306 d40c->dma_cfg = *info; in stedma40_filter()
2308 err = d40_config_memcpy(d40c); in stedma40_filter()
2311 d40c->configured = true; in stedma40_filter()
2316 static void __d40_set_prio_rt(struct d40_chan *d40c, int dev_type, bool src) in __d40_set_prio_rt() argument
2318 bool realtime = d40c->dma_cfg.realtime; in __d40_set_prio_rt()
2319 bool highprio = d40c->dma_cfg.high_priority; in __d40_set_prio_rt()
2325 struct d40_gen_dmac *dmac = &d40c->base->gen_dmac; in __d40_set_prio_rt()
2336 if (!src && chan_is_logical(d40c)) in __d40_set_prio_rt()
2345 writel(bit, d40c->base->virtbase + prioreg + group * 4); in __d40_set_prio_rt()
2346 writel(bit, d40c->base->virtbase + rtreg + group * 4); in __d40_set_prio_rt()
2349 static void d40_set_prio_realtime(struct d40_chan *d40c) in d40_set_prio_realtime() argument
2351 if (d40c->base->rev < 3) in d40_set_prio_realtime()
2354 if ((d40c->dma_cfg.dir == DMA_DEV_TO_MEM) || in d40_set_prio_realtime()
2355 (d40c->dma_cfg.dir == DMA_DEV_TO_DEV)) in d40_set_prio_realtime()
2356 __d40_set_prio_rt(d40c, d40c->dma_cfg.dev_type, true); in d40_set_prio_realtime()
2358 if ((d40c->dma_cfg.dir == DMA_MEM_TO_DEV) || in d40_set_prio_realtime()
2359 (d40c->dma_cfg.dir == DMA_DEV_TO_DEV)) in d40_set_prio_realtime()
2360 __d40_set_prio_rt(d40c, d40c->dma_cfg.dev_type, false); in d40_set_prio_realtime()
2416 struct d40_chan *d40c = in d40_alloc_chan_resources() local
2419 spin_lock_irqsave(&d40c->lock, flags); in d40_alloc_chan_resources()
2424 if (!d40c->configured) { in d40_alloc_chan_resources()
2425 err = d40_config_memcpy(d40c); in d40_alloc_chan_resources()
2427 chan_err(d40c, "Failed to configure memcpy channel\n"); in d40_alloc_chan_resources()
2432 err = d40_allocate_channel(d40c, &is_free_phy); in d40_alloc_chan_resources()
2434 chan_err(d40c, "Failed to allocate channel\n"); in d40_alloc_chan_resources()
2435 d40c->configured = false; in d40_alloc_chan_resources()
2439 pm_runtime_get_sync(d40c->base->dev); in d40_alloc_chan_resources()
2441 d40_set_prio_realtime(d40c); in d40_alloc_chan_resources()
2443 if (chan_is_logical(d40c)) { in d40_alloc_chan_resources()
2444 if (d40c->dma_cfg.dir == DMA_DEV_TO_MEM) in d40_alloc_chan_resources()
2445 d40c->lcpa = d40c->base->lcpa_base + in d40_alloc_chan_resources()
2446 d40c->dma_cfg.dev_type * D40_LCPA_CHAN_SIZE; in d40_alloc_chan_resources()
2448 d40c->lcpa = d40c->base->lcpa_base + in d40_alloc_chan_resources()
2449 d40c->dma_cfg.dev_type * in d40_alloc_chan_resources()
2453 d40c->src_def_cfg |= BIT(D40_SREG_CFG_LOG_GIM_POS); in d40_alloc_chan_resources()
2454 d40c->dst_def_cfg |= BIT(D40_SREG_CFG_LOG_GIM_POS); in d40_alloc_chan_resources()
2457 dev_dbg(chan2dev(d40c), "allocated %s channel (phy %d%s)\n", in d40_alloc_chan_resources()
2458 chan_is_logical(d40c) ? "logical" : "physical", in d40_alloc_chan_resources()
2459 d40c->phy_chan->num, in d40_alloc_chan_resources()
2460 d40c->dma_cfg.use_fixed_channel ? ", fixed" : ""); in d40_alloc_chan_resources()
2469 d40_config_write(d40c); in d40_alloc_chan_resources()
2471 pm_runtime_mark_last_busy(d40c->base->dev); in d40_alloc_chan_resources()
2472 pm_runtime_put_autosuspend(d40c->base->dev); in d40_alloc_chan_resources()
2473 spin_unlock_irqrestore(&d40c->lock, flags); in d40_alloc_chan_resources()
2479 struct d40_chan *d40c = in d40_free_chan_resources() local
2484 if (d40c->phy_chan == NULL) { in d40_free_chan_resources()
2485 chan_err(d40c, "Cannot free unallocated channel\n"); in d40_free_chan_resources()
2489 spin_lock_irqsave(&d40c->lock, flags); in d40_free_chan_resources()
2491 err = d40_free_dma(d40c); in d40_free_chan_resources()
2494 chan_err(d40c, "Failed to free channel\n"); in d40_free_chan_resources()
2495 spin_unlock_irqrestore(&d40c->lock, flags); in d40_free_chan_resources()
2565 struct d40_chan *d40c = container_of(chan, struct d40_chan, chan); in d40_tx_status() local
2568 if (d40c->phy_chan == NULL) { in d40_tx_status()
2569 chan_err(d40c, "Cannot read status of unallocated channel\n"); in d40_tx_status()
2577 if (d40_is_paused(d40c)) in d40_tx_status()
2585 struct d40_chan *d40c = container_of(chan, struct d40_chan, chan); in d40_issue_pending() local
2588 if (d40c->phy_chan == NULL) { in d40_issue_pending()
2589 chan_err(d40c, "Channel is not allocated!\n"); in d40_issue_pending()
2593 spin_lock_irqsave(&d40c->lock, flags); in d40_issue_pending()
2595 list_splice_tail_init(&d40c->pending_queue, &d40c->queue); in d40_issue_pending()
2598 if (!d40c->busy) in d40_issue_pending()
2599 (void) d40_queue_start(d40c); in d40_issue_pending()
2601 spin_unlock_irqrestore(&d40c->lock, flags); in d40_issue_pending()
2607 struct d40_chan *d40c = container_of(chan, struct d40_chan, chan); in d40_terminate_all() local
2610 if (d40c->phy_chan == NULL) { in d40_terminate_all()
2611 chan_err(d40c, "Channel is not allocated!\n"); in d40_terminate_all()
2615 spin_lock_irqsave(&d40c->lock, flags); in d40_terminate_all()
2617 pm_runtime_get_sync(d40c->base->dev); in d40_terminate_all()
2618 ret = d40_channel_execute_command(d40c, D40_DMA_STOP); in d40_terminate_all()
2620 chan_err(d40c, "Failed to stop channel\n"); in d40_terminate_all()
2622 d40_term_all(d40c); in d40_terminate_all()
2623 pm_runtime_mark_last_busy(d40c->base->dev); in d40_terminate_all()
2624 pm_runtime_put_autosuspend(d40c->base->dev); in d40_terminate_all()
2625 if (d40c->busy) { in d40_terminate_all()
2626 pm_runtime_mark_last_busy(d40c->base->dev); in d40_terminate_all()
2627 pm_runtime_put_autosuspend(d40c->base->dev); in d40_terminate_all()
2629 d40c->busy = false; in d40_terminate_all()
2631 spin_unlock_irqrestore(&d40c->lock, flags); in d40_terminate_all()
2636 dma40_config_to_halfchannel(struct d40_chan *d40c, in dma40_config_to_halfchannel() argument
2642 if (chan_is_logical(d40c)) { in dma40_config_to_halfchannel()
2671 struct d40_chan *d40c = container_of(chan, struct d40_chan, chan); in d40_set_runtime_config() local
2673 memcpy(&d40c->slave_config, config, sizeof(*config)); in d40_set_runtime_config()
2683 struct d40_chan *d40c = container_of(chan, struct d40_chan, chan); in d40_set_runtime_config_write() local
2684 struct stedma40_chan_cfg *cfg = &d40c->dma_cfg; in d40_set_runtime_config_write()
2690 if (d40c->phy_chan == NULL) { in d40_set_runtime_config_write()
2691 chan_err(d40c, "Channel is not allocated!\n"); in d40_set_runtime_config_write()
2704 dev_dbg(d40c->base->dev, in d40_set_runtime_config_write()
2720 dev_dbg(d40c->base->dev, in d40_set_runtime_config_write()
2732 dev_err(d40c->base->dev, in d40_set_runtime_config_write()
2739 dev_err(d40c->base->dev, "no address supplied\n"); in d40_set_runtime_config_write()
2744 dev_err(d40c->base->dev, in d40_set_runtime_config_write()
2773 ret = dma40_config_to_halfchannel(d40c, &cfg->src_info, in d40_set_runtime_config_write()
2778 ret = dma40_config_to_halfchannel(d40c, &cfg->dst_info, in d40_set_runtime_config_write()
2784 if (chan_is_logical(d40c)) in d40_set_runtime_config_write()
2785 d40_log_cfg(cfg, &d40c->log_def.lcsp1, &d40c->log_def.lcsp3); in d40_set_runtime_config_write()
2787 d40_phy_cfg(cfg, &d40c->src_def_cfg, &d40c->dst_def_cfg); in d40_set_runtime_config_write()
2790 d40c->runtime_addr = config_addr; in d40_set_runtime_config_write()
2791 d40c->runtime_direction = direction; in d40_set_runtime_config_write()
2792 dev_dbg(d40c->base->dev, in d40_set_runtime_config_write()
2810 struct d40_chan *d40c; in d40_chan_init() local
2815 d40c = &chans[i]; in d40_chan_init()
2816 d40c->base = base; in d40_chan_init()
2817 d40c->chan.device = dma; in d40_chan_init()
2819 spin_lock_init(&d40c->lock); in d40_chan_init()
2821 d40c->log_num = D40_PHY_CHAN; in d40_chan_init()
2823 INIT_LIST_HEAD(&d40c->done); in d40_chan_init()
2824 INIT_LIST_HEAD(&d40c->active); in d40_chan_init()
2825 INIT_LIST_HEAD(&d40c->queue); in d40_chan_init()
2826 INIT_LIST_HEAD(&d40c->pending_queue); in d40_chan_init()
2827 INIT_LIST_HEAD(&d40c->client); in d40_chan_init()
2828 INIT_LIST_HEAD(&d40c->prepare_queue); in d40_chan_init()
2830 tasklet_setup(&d40c->tasklet, dma_tasklet); in d40_chan_init()
2832 list_add_tail(&d40c->chan.device_node, in d40_chan_init()