Lines Matching refs:cmt

91 	struct sh_cmt_device *cmt;  member
244 return ch->cmt->info->read_control(ch->iostart, 0); in sh_cmt_read_cmstr()
246 return ch->cmt->info->read_control(ch->cmt->mapbase, 0); in sh_cmt_read_cmstr()
255 ch->cmt->info->write_control(ch->iostart, 0, value); in sh_cmt_write_cmstr()
256 udelay(ch->cmt->reg_delay); in sh_cmt_write_cmstr()
258 ch->cmt->info->write_control(ch->cmt->mapbase, 0, value); in sh_cmt_write_cmstr()
259 udelay(ch->cmt->reg_delay); in sh_cmt_write_cmstr()
266 return ch->cmt->info->read_control(ch->ioctrl, CMCSR); in sh_cmt_read_cmcsr()
274 ch->cmt->info->write_control(ch->ioctrl, CMCSR, value); in sh_cmt_write_cmcsr()
275 udelay(ch->cmt->reg_delay); in sh_cmt_write_cmcsr()
281 return ch->cmt->info->read_count(ch->ioctrl, CMCNT); in sh_cmt_read_cmcnt()
287 unsigned int cmcnt_delay = DIV_ROUND_UP(3 * ch->cmt->reg_delay, 2); in sh_cmt_write_cmcnt()
290 if (ch->cmt->info->model > SH_CMT_16BIT) { in sh_cmt_write_cmcnt()
298 ch->cmt->info->write_count(ch->ioctrl, CMCNT, value); in sh_cmt_write_cmcnt()
305 u32 old_value = ch->cmt->info->read_count(ch->ioctrl, CMCOR); in sh_cmt_write_cmcor()
308 ch->cmt->info->write_count(ch->ioctrl, CMCOR, value); in sh_cmt_write_cmcor()
309 udelay(ch->cmt->reg_delay); in sh_cmt_write_cmcor()
318 o1 = sh_cmt_read_cmcsr(ch) & ch->cmt->info->overflow_bit; in sh_cmt_get_counter()
326 o1 = sh_cmt_read_cmcsr(ch) & ch->cmt->info->overflow_bit; in sh_cmt_get_counter()
340 raw_spin_lock_irqsave(&ch->cmt->lock, flags); in sh_cmt_start_stop_ch()
349 raw_spin_unlock_irqrestore(&ch->cmt->lock, flags); in sh_cmt_start_stop_ch()
356 dev_pm_syscore_device(&ch->cmt->pdev->dev, true); in sh_cmt_enable()
359 ret = clk_enable(ch->cmt->clk); in sh_cmt_enable()
361 dev_err(&ch->cmt->pdev->dev, "ch%u: cannot enable clock\n", in sh_cmt_enable()
370 if (ch->cmt->info->width == 16) { in sh_cmt_enable()
374 u32 cmtout = ch->cmt->info->model <= SH_CMT_48BIT ? in sh_cmt_enable()
385 dev_err(&ch->cmt->pdev->dev, "ch%u: cannot clear CMCNT\n", in sh_cmt_enable()
396 clk_disable(ch->cmt->clk); in sh_cmt_enable()
411 clk_disable(ch->cmt->clk); in sh_cmt_disable()
413 dev_pm_syscore_device(&ch->cmt->pdev->dev, false); in sh_cmt_disable()
503 dev_warn(&ch->cmt->pdev->dev, "ch%u: too long delay\n", in sh_cmt_clock_event_program_verify()
512 dev_warn(&ch->cmt->pdev->dev, "ch%u: delta out of range\n", in __sh_cmt_set_next()
535 ch->cmt->info->clear_bits); in sh_cmt_interrupt()
587 pm_runtime_get_sync(&ch->cmt->pdev->dev); in sh_cmt_start()
593 pm_runtime_get_sync(&ch->cmt->pdev->dev); in sh_cmt_start()
602 if (ch->cmt->num_channels == 1 && in sh_cmt_start()
624 pm_runtime_put(&ch->cmt->pdev->dev); in sh_cmt_stop()
634 pm_runtime_put(&ch->cmt->pdev->dev); in sh_cmt_stop()
647 if (ch->cmt->num_channels == 1) { in sh_cmt_clocksource_read()
700 dev_pm_genpd_suspend(&ch->cmt->pdev->dev); in sh_cmt_clocksource_suspend()
710 dev_pm_genpd_resume(&ch->cmt->pdev->dev); in sh_cmt_clocksource_resume()
726 cs->mask = CLOCKSOURCE_MASK(ch->cmt->info->width); in sh_cmt_register_clocksource()
729 dev_info(&ch->cmt->pdev->dev, "ch%u: used as clock source\n", in sh_cmt_register_clocksource()
732 clocksource_register_hz(cs, ch->cmt->rate); in sh_cmt_register_clocksource()
746 sh_cmt_set_next(ch, ((ch->cmt->rate + HZ/2) / HZ) - 1); in sh_cmt_clock_event_start()
768 dev_info(&ch->cmt->pdev->dev, "ch%u: used for %s clock events\n", in sh_cmt_clock_event_set_state()
808 dev_pm_genpd_suspend(&ch->cmt->pdev->dev); in sh_cmt_clock_event_suspend()
809 clk_unprepare(ch->cmt->clk); in sh_cmt_clock_event_suspend()
816 clk_prepare(ch->cmt->clk); in sh_cmt_clock_event_resume()
817 dev_pm_genpd_resume(&ch->cmt->pdev->dev); in sh_cmt_clock_event_resume()
827 irq = platform_get_irq(ch->cmt->pdev, ch->index); in sh_cmt_register_clockevent()
833 dev_name(&ch->cmt->pdev->dev), ch); in sh_cmt_register_clockevent()
835 dev_err(&ch->cmt->pdev->dev, "ch%u: failed to request irq %d\n", in sh_cmt_register_clockevent()
854 ced->mult = div_sc(ch->cmt->rate, NSEC_PER_SEC, ced->shift); in sh_cmt_register_clockevent()
860 dev_info(&ch->cmt->pdev->dev, "ch%u: used for clock events\n", in sh_cmt_register_clockevent()
873 ch->cmt->has_clockevent = true; in sh_cmt_register()
880 ch->cmt->has_clocksource = true; in sh_cmt_register()
889 bool clocksource, struct sh_cmt_device *cmt) in sh_cmt_setup_channel() argument
898 ch->cmt = cmt; in sh_cmt_setup_channel()
908 switch (cmt->info->model) { in sh_cmt_setup_channel()
910 ch->ioctrl = cmt->mapbase + 2 + ch->hwidx * 6; in sh_cmt_setup_channel()
914 ch->ioctrl = cmt->mapbase + 0x10 + ch->hwidx * 0x10; in sh_cmt_setup_channel()
918 ch->iostart = cmt->mapbase + ch->hwidx * 0x100; in sh_cmt_setup_channel()
923 value = ioread32(cmt->mapbase + CMCLKE); in sh_cmt_setup_channel()
925 iowrite32(value, cmt->mapbase + CMCLKE); in sh_cmt_setup_channel()
929 if (cmt->info->width == (sizeof(ch->max_match_value) * 8)) in sh_cmt_setup_channel()
932 ch->max_match_value = (1 << cmt->info->width) - 1; in sh_cmt_setup_channel()
937 ret = sh_cmt_register(ch, dev_name(&cmt->pdev->dev), in sh_cmt_setup_channel()
940 dev_err(&cmt->pdev->dev, "ch%u: registration failed\n", in sh_cmt_setup_channel()
949 static int sh_cmt_map_memory(struct sh_cmt_device *cmt) in sh_cmt_map_memory() argument
953 mem = platform_get_resource(cmt->pdev, IORESOURCE_MEM, 0); in sh_cmt_map_memory()
955 dev_err(&cmt->pdev->dev, "failed to get I/O memory\n"); in sh_cmt_map_memory()
959 cmt->mapbase = ioremap(mem->start, resource_size(mem)); in sh_cmt_map_memory()
960 if (cmt->mapbase == NULL) { in sh_cmt_map_memory()
961 dev_err(&cmt->pdev->dev, "failed to remap I/O memory\n"); in sh_cmt_map_memory()
1022 static int sh_cmt_setup(struct sh_cmt_device *cmt, struct platform_device *pdev) in sh_cmt_setup() argument
1028 cmt->pdev = pdev; in sh_cmt_setup()
1029 raw_spin_lock_init(&cmt->lock); in sh_cmt_setup()
1032 cmt->info = of_device_get_match_data(&pdev->dev); in sh_cmt_setup()
1033 cmt->hw_channels = cmt->info->channels_mask; in sh_cmt_setup()
1038 cmt->info = (const struct sh_cmt_info *)id->driver_data; in sh_cmt_setup()
1039 cmt->hw_channels = cfg->channels_mask; in sh_cmt_setup()
1041 dev_err(&cmt->pdev->dev, "missing platform data\n"); in sh_cmt_setup()
1046 cmt->clk = clk_get(&cmt->pdev->dev, "fck"); in sh_cmt_setup()
1047 if (IS_ERR(cmt->clk)) { in sh_cmt_setup()
1048 dev_err(&cmt->pdev->dev, "cannot get clock\n"); in sh_cmt_setup()
1049 return PTR_ERR(cmt->clk); in sh_cmt_setup()
1052 ret = clk_prepare(cmt->clk); in sh_cmt_setup()
1057 ret = clk_enable(cmt->clk); in sh_cmt_setup()
1061 rate = clk_get_rate(cmt->clk); in sh_cmt_setup()
1068 if (cmt->info->model >= SH_CMT_48BIT) in sh_cmt_setup()
1069 cmt->reg_delay = DIV_ROUND_UP(2UL * USEC_PER_SEC, rate); in sh_cmt_setup()
1070 cmt->rate = rate / (cmt->info->width == 16 ? 512 : 8); in sh_cmt_setup()
1073 ret = sh_cmt_map_memory(cmt); in sh_cmt_setup()
1078 cmt->num_channels = hweight8(cmt->hw_channels); in sh_cmt_setup()
1079 cmt->channels = kcalloc(cmt->num_channels, sizeof(*cmt->channels), in sh_cmt_setup()
1081 if (cmt->channels == NULL) { in sh_cmt_setup()
1090 for (i = 0, mask = cmt->hw_channels; i < cmt->num_channels; ++i) { in sh_cmt_setup()
1092 bool clocksource = i == 1 || cmt->num_channels == 1; in sh_cmt_setup()
1095 ret = sh_cmt_setup_channel(&cmt->channels[i], i, hwidx, in sh_cmt_setup()
1096 clockevent, clocksource, cmt); in sh_cmt_setup()
1103 clk_disable(cmt->clk); in sh_cmt_setup()
1105 platform_set_drvdata(pdev, cmt); in sh_cmt_setup()
1110 kfree(cmt->channels); in sh_cmt_setup()
1111 iounmap(cmt->mapbase); in sh_cmt_setup()
1113 clk_disable(cmt->clk); in sh_cmt_setup()
1115 clk_unprepare(cmt->clk); in sh_cmt_setup()
1117 clk_put(cmt->clk); in sh_cmt_setup()
1123 struct sh_cmt_device *cmt = platform_get_drvdata(pdev); in sh_cmt_probe() local
1131 if (cmt) { in sh_cmt_probe()
1136 cmt = kzalloc(sizeof(*cmt), GFP_KERNEL); in sh_cmt_probe()
1137 if (cmt == NULL) in sh_cmt_probe()
1140 ret = sh_cmt_setup(cmt, pdev); in sh_cmt_probe()
1142 kfree(cmt); in sh_cmt_probe()
1150 if (cmt->has_clockevent || cmt->has_clocksource) in sh_cmt_probe()