Lines Matching +full:parent +full:- +full:clk
1 // SPDX-License-Identifier: GPL-2.0
7 * Based on renesas-cpg-mssr.c
15 #include <linux/clk.h>
16 #include <linux/clk-provider.h>
17 #include <linux/clk/renesas.h>
28 #include <linux/reset-controller.h>
32 #include <dt-bindings/clock/renesas-cpg-mssr.h>
34 #include "rzg2l-cpg.h"
85 * struct rzg2l_cpg_priv - Clock Pulse Generator Private Data
94 * @num_resets: Number of Module Resets in info->resets[]
106 struct clk **clks;
124 static struct clk * __init
126 struct clk **clks, in rzg2l_cpg_div_clk_register()
130 struct device *dev = priv->dev; in rzg2l_cpg_div_clk_register()
131 const struct clk *parent; in rzg2l_cpg_div_clk_register() local
135 parent = clks[core->parent & 0xffff]; in rzg2l_cpg_div_clk_register()
136 if (IS_ERR(parent)) in rzg2l_cpg_div_clk_register()
137 return ERR_CAST(parent); in rzg2l_cpg_div_clk_register()
139 parent_name = __clk_get_name(parent); in rzg2l_cpg_div_clk_register()
141 if (core->dtable) in rzg2l_cpg_div_clk_register()
142 clk_hw = clk_hw_register_divider_table(dev, core->name, in rzg2l_cpg_div_clk_register()
144 base + GET_REG_OFFSET(core->conf), in rzg2l_cpg_div_clk_register()
145 GET_SHIFT(core->conf), in rzg2l_cpg_div_clk_register()
146 GET_WIDTH(core->conf), in rzg2l_cpg_div_clk_register()
147 core->flag, in rzg2l_cpg_div_clk_register()
148 core->dtable, in rzg2l_cpg_div_clk_register()
149 &priv->rmw_lock); in rzg2l_cpg_div_clk_register()
151 clk_hw = clk_hw_register_divider(dev, core->name, in rzg2l_cpg_div_clk_register()
153 base + GET_REG_OFFSET(core->conf), in rzg2l_cpg_div_clk_register()
154 GET_SHIFT(core->conf), in rzg2l_cpg_div_clk_register()
155 GET_WIDTH(core->conf), in rzg2l_cpg_div_clk_register()
156 core->flag, &priv->rmw_lock); in rzg2l_cpg_div_clk_register()
161 return clk_hw->clk; in rzg2l_cpg_div_clk_register()
164 static struct clk * __init
171 clk_hw = devm_clk_hw_register_mux(priv->dev, core->name, in rzg2l_cpg_mux_clk_register()
172 core->parent_names, core->num_parents, in rzg2l_cpg_mux_clk_register()
173 core->flag, in rzg2l_cpg_mux_clk_register()
174 base + GET_REG_OFFSET(core->conf), in rzg2l_cpg_mux_clk_register()
175 GET_SHIFT(core->conf), in rzg2l_cpg_mux_clk_register()
176 GET_WIDTH(core->conf), in rzg2l_cpg_mux_clk_register()
177 core->mux_flags, &priv->rmw_lock); in rzg2l_cpg_mux_clk_register()
181 return clk_hw->clk; in rzg2l_cpg_mux_clk_register()
187 struct rzg2l_cpg_priv *priv = hwdata->priv; in rzg2l_cpg_sd_clk_mux_set_parent()
188 u32 off = GET_REG_OFFSET(hwdata->conf); in rzg2l_cpg_sd_clk_mux_set_parent()
189 u32 shift = GET_SHIFT(hwdata->conf); in rzg2l_cpg_sd_clk_mux_set_parent()
206 bitmask = (GENMASK(GET_WIDTH(hwdata->conf) - 1, 0) << shift) << 16; in rzg2l_cpg_sd_clk_mux_set_parent()
208 spin_lock_irqsave(&priv->rmw_lock, flags); in rzg2l_cpg_sd_clk_mux_set_parent()
210 writel(bitmask | ((clk_src_266 + 1) << shift), priv->base + off); in rzg2l_cpg_sd_clk_mux_set_parent()
212 ret = readl_poll_timeout_atomic(priv->base + CPG_CLKSTATUS, val, in rzg2l_cpg_sd_clk_mux_set_parent()
219 writel(bitmask | ((index + 1) << shift), priv->base + off); in rzg2l_cpg_sd_clk_mux_set_parent()
221 ret = readl_poll_timeout_atomic(priv->base + CPG_CLKSTATUS, val, in rzg2l_cpg_sd_clk_mux_set_parent()
225 spin_unlock_irqrestore(&priv->rmw_lock, flags); in rzg2l_cpg_sd_clk_mux_set_parent()
228 dev_err(priv->dev, "failed to switch clk source\n"); in rzg2l_cpg_sd_clk_mux_set_parent()
236 struct rzg2l_cpg_priv *priv = hwdata->priv; in rzg2l_cpg_sd_clk_mux_get_parent()
237 u32 val = readl(priv->base + GET_REG_OFFSET(hwdata->conf)); in rzg2l_cpg_sd_clk_mux_get_parent()
239 val >>= GET_SHIFT(hwdata->conf); in rzg2l_cpg_sd_clk_mux_get_parent()
240 val &= GENMASK(GET_WIDTH(hwdata->conf) - 1, 0); in rzg2l_cpg_sd_clk_mux_get_parent()
242 return val ? val - 1 : 0; in rzg2l_cpg_sd_clk_mux_get_parent()
251 static struct clk * __init
261 clk_hw_data = devm_kzalloc(priv->dev, sizeof(*clk_hw_data), GFP_KERNEL); in rzg2l_cpg_sd_mux_clk_register()
263 return ERR_PTR(-ENOMEM); in rzg2l_cpg_sd_mux_clk_register()
265 clk_hw_data->priv = priv; in rzg2l_cpg_sd_mux_clk_register()
266 clk_hw_data->conf = core->conf; in rzg2l_cpg_sd_mux_clk_register()
268 init.name = GET_SHIFT(core->conf) ? "sd1" : "sd0"; in rzg2l_cpg_sd_mux_clk_register()
271 init.num_parents = core->num_parents; in rzg2l_cpg_sd_mux_clk_register()
272 init.parent_names = core->parent_names; in rzg2l_cpg_sd_mux_clk_register()
274 clk_hw = &clk_hw_data->hw; in rzg2l_cpg_sd_mux_clk_register()
275 clk_hw->init = &init; in rzg2l_cpg_sd_mux_clk_register()
277 ret = devm_clk_hw_register(priv->dev, clk_hw); in rzg2l_cpg_sd_mux_clk_register()
281 return clk_hw->clk; in rzg2l_cpg_sd_mux_clk_register()
290 params->pl5_intin = rate / MEGA; in rzg2l_cpg_get_foutpostdiv_rate()
291 params->pl5_fracin = div_u64(((u64)rate % MEGA) << 24, MEGA); in rzg2l_cpg_get_foutpostdiv_rate()
292 params->pl5_refdiv = 2; in rzg2l_cpg_get_foutpostdiv_rate()
293 params->pl5_postdiv1 = 1; in rzg2l_cpg_get_foutpostdiv_rate()
294 params->pl5_postdiv2 = 1; in rzg2l_cpg_get_foutpostdiv_rate()
295 params->pl5_spread = 0x16; in rzg2l_cpg_get_foutpostdiv_rate()
298 (params->pl5_intin << 24) + params->pl5_fracin), in rzg2l_cpg_get_foutpostdiv_rate()
299 params->pl5_refdiv) >> 24; in rzg2l_cpg_get_foutpostdiv_rate()
301 params->pl5_postdiv1 * params->pl5_postdiv2); in rzg2l_cpg_get_foutpostdiv_rate()
319 unsigned long rate = dsi_div->rate; in rzg2l_cpg_dsi_div_recalc_rate()
331 struct rzg2l_cpg_priv *priv = dsi_div->priv; in rzg2l_cpg_get_vclk_parent_rate()
337 if (priv->mux_dsi_div_params.clksrc) in rzg2l_cpg_get_vclk_parent_rate()
346 if (req->rate > MAX_VCLK_FREQ) in rzg2l_cpg_dsi_div_determine_rate()
347 req->rate = MAX_VCLK_FREQ; in rzg2l_cpg_dsi_div_determine_rate()
349 req->best_parent_rate = rzg2l_cpg_get_vclk_parent_rate(hw, req->rate); in rzg2l_cpg_dsi_div_determine_rate()
359 struct rzg2l_cpg_priv *priv = dsi_div->priv; in rzg2l_cpg_dsi_div_set_rate()
362 * MUX -->DIV_DSI_{A,B} -->M3 -->VCLK in rzg2l_cpg_dsi_div_set_rate()
365 * calculates the pll parameters for generating FOUTPOSTDIV and the clk in rzg2l_cpg_dsi_div_set_rate()
370 return -EINVAL; in rzg2l_cpg_dsi_div_set_rate()
372 dsi_div->rate = rate; in rzg2l_cpg_dsi_div_set_rate()
374 (priv->mux_dsi_div_params.dsi_div_a << 0) | in rzg2l_cpg_dsi_div_set_rate()
375 (priv->mux_dsi_div_params.dsi_div_b << 8), in rzg2l_cpg_dsi_div_set_rate()
376 priv->base + CPG_PL5_SDIV); in rzg2l_cpg_dsi_div_set_rate()
387 static struct clk * __init
389 struct clk **clks, in rzg2l_cpg_dsi_div_clk_register()
393 const struct clk *parent; in rzg2l_cpg_dsi_div_clk_register() local
399 parent = clks[core->parent & 0xffff]; in rzg2l_cpg_dsi_div_clk_register()
400 if (IS_ERR(parent)) in rzg2l_cpg_dsi_div_clk_register()
401 return ERR_CAST(parent); in rzg2l_cpg_dsi_div_clk_register()
403 clk_hw_data = devm_kzalloc(priv->dev, sizeof(*clk_hw_data), GFP_KERNEL); in rzg2l_cpg_dsi_div_clk_register()
405 return ERR_PTR(-ENOMEM); in rzg2l_cpg_dsi_div_clk_register()
407 clk_hw_data->priv = priv; in rzg2l_cpg_dsi_div_clk_register()
409 parent_name = __clk_get_name(parent); in rzg2l_cpg_dsi_div_clk_register()
410 init.name = core->name; in rzg2l_cpg_dsi_div_clk_register()
416 clk_hw = &clk_hw_data->hw; in rzg2l_cpg_dsi_div_clk_register()
417 clk_hw->init = &init; in rzg2l_cpg_dsi_div_clk_register()
419 ret = devm_clk_hw_register(priv->dev, clk_hw); in rzg2l_cpg_dsi_div_clk_register()
423 return clk_hw->clk; in rzg2l_cpg_dsi_div_clk_register()
438 struct clk_hw *parent; in rzg2l_cpg_pll5_4_clk_mux_determine_rate() local
440 struct rzg2l_cpg_priv *priv = hwdata->priv; in rzg2l_cpg_pll5_4_clk_mux_determine_rate()
442 parent = clk_hw_get_parent_by_index(hw, priv->mux_dsi_div_params.clksrc); in rzg2l_cpg_pll5_4_clk_mux_determine_rate()
443 req->best_parent_hw = parent; in rzg2l_cpg_pll5_4_clk_mux_determine_rate()
444 req->best_parent_rate = req->rate; in rzg2l_cpg_pll5_4_clk_mux_determine_rate()
452 struct rzg2l_cpg_priv *priv = hwdata->priv; in rzg2l_cpg_pll5_4_clk_mux_set_parent()
455 * FOUTPOSTDIV--->| in rzg2l_cpg_pll5_4_clk_mux_set_parent()
456 * | | -->MUX -->DIV_DSIA_B -->M3 -->VCLK in rzg2l_cpg_pll5_4_clk_mux_set_parent()
457 * |--FOUT1PH0-->| in rzg2l_cpg_pll5_4_clk_mux_set_parent()
459 * Based on the dot clock, the DSI divider clock calculates the parent in rzg2l_cpg_pll5_4_clk_mux_set_parent()
460 * rate and clk source for the MUX. It propagates that info to in rzg2l_cpg_pll5_4_clk_mux_set_parent()
465 priv->base + CPG_OTHERFUNC1_REG); in rzg2l_cpg_pll5_4_clk_mux_set_parent()
473 struct rzg2l_cpg_priv *priv = hwdata->priv; in rzg2l_cpg_pll5_4_clk_mux_get_parent()
475 return readl(priv->base + GET_REG_OFFSET(hwdata->conf)); in rzg2l_cpg_pll5_4_clk_mux_get_parent()
484 static struct clk * __init
493 clk_hw_data = devm_kzalloc(priv->dev, sizeof(*clk_hw_data), GFP_KERNEL); in rzg2l_cpg_pll5_4_mux_clk_register()
495 return ERR_PTR(-ENOMEM); in rzg2l_cpg_pll5_4_mux_clk_register()
497 clk_hw_data->priv = priv; in rzg2l_cpg_pll5_4_mux_clk_register()
498 clk_hw_data->conf = core->conf; in rzg2l_cpg_pll5_4_mux_clk_register()
500 init.name = core->name; in rzg2l_cpg_pll5_4_mux_clk_register()
503 init.num_parents = core->num_parents; in rzg2l_cpg_pll5_4_mux_clk_register()
504 init.parent_names = core->parent_names; in rzg2l_cpg_pll5_4_mux_clk_register()
506 clk_hw = &clk_hw_data->hw; in rzg2l_cpg_pll5_4_mux_clk_register()
507 clk_hw->init = &init; in rzg2l_cpg_pll5_4_mux_clk_register()
509 ret = devm_clk_hw_register(priv->dev, clk_hw); in rzg2l_cpg_pll5_4_mux_clk_register()
513 return clk_hw->clk; in rzg2l_cpg_pll5_4_mux_clk_register()
529 struct rzg2l_cpg_priv *priv = sipll5->priv; in rzg2l_cpg_get_vclk_rate()
532 vclk = rate / ((1 << priv->mux_dsi_div_params.dsi_div_a) * in rzg2l_cpg_get_vclk_rate()
533 (priv->mux_dsi_div_params.dsi_div_b + 1)); in rzg2l_cpg_get_vclk_rate()
535 if (priv->mux_dsi_div_params.clksrc) in rzg2l_cpg_get_vclk_rate()
545 unsigned long pll5_rate = sipll5->foutpostdiv_rate; in rzg2l_cpg_sipll5_recalc_rate()
565 struct rzg2l_cpg_priv *priv = sipll5->priv; in rzg2l_cpg_sipll5_set_rate()
572 * OSC --> PLL5 --> FOUTPOSTDIV-->| in rzg2l_cpg_sipll5_set_rate()
573 * | | -->MUX -->DIV_DSIA_B -->M3 -->VCLK in rzg2l_cpg_sipll5_set_rate()
574 * |--FOUT1PH0-->| in rzg2l_cpg_sipll5_set_rate()
576 * Based on the dot clock, the DSI divider clock calculates the parent in rzg2l_cpg_sipll5_set_rate()
580 * OSC --> PLL5 --> FOUTPOSTDIV in rzg2l_cpg_sipll5_set_rate()
584 return -EINVAL; in rzg2l_cpg_sipll5_set_rate()
587 sipll5->foutpostdiv_rate = in rzg2l_cpg_sipll5_set_rate()
591 writel(CPG_SIPLL5_STBY_RESETB_WEN, priv->base + CPG_SIPLL5_STBY); in rzg2l_cpg_sipll5_set_rate()
592 ret = readl_poll_timeout(priv->base + CPG_SIPLL5_MON, val, in rzg2l_cpg_sipll5_set_rate()
595 dev_err(priv->dev, "failed to release pll5 lock"); in rzg2l_cpg_sipll5_set_rate()
601 (params.pl5_refdiv << 8), priv->base + CPG_SIPLL5_CLK1); in rzg2l_cpg_sipll5_set_rate()
604 writel((params.pl5_fracin << 8), priv->base + CPG_SIPLL5_CLK3); in rzg2l_cpg_sipll5_set_rate()
608 priv->base + CPG_SIPLL5_CLK4); in rzg2l_cpg_sipll5_set_rate()
611 writel(params.pl5_spread, priv->base + CPG_SIPLL5_CLK5); in rzg2l_cpg_sipll5_set_rate()
616 priv->base + CPG_SIPLL5_STBY); in rzg2l_cpg_sipll5_set_rate()
619 ret = readl_poll_timeout(priv->base + CPG_SIPLL5_MON, val, in rzg2l_cpg_sipll5_set_rate()
622 dev_err(priv->dev, "failed to lock pll5"); in rzg2l_cpg_sipll5_set_rate()
635 static struct clk * __init
637 struct clk **clks, in rzg2l_cpg_sipll5_register()
640 const struct clk *parent; in rzg2l_cpg_sipll5_register() local
647 parent = clks[core->parent & 0xffff]; in rzg2l_cpg_sipll5_register()
648 if (IS_ERR(parent)) in rzg2l_cpg_sipll5_register()
649 return ERR_CAST(parent); in rzg2l_cpg_sipll5_register()
651 sipll5 = devm_kzalloc(priv->dev, sizeof(*sipll5), GFP_KERNEL); in rzg2l_cpg_sipll5_register()
653 return ERR_PTR(-ENOMEM); in rzg2l_cpg_sipll5_register()
655 init.name = core->name; in rzg2l_cpg_sipll5_register()
656 parent_name = __clk_get_name(parent); in rzg2l_cpg_sipll5_register()
662 sipll5->hw.init = &init; in rzg2l_cpg_sipll5_register()
663 sipll5->conf = core->conf; in rzg2l_cpg_sipll5_register()
664 sipll5->priv = priv; in rzg2l_cpg_sipll5_register()
667 CPG_SIPLL5_STBY_RESETB, priv->base + CPG_SIPLL5_STBY); in rzg2l_cpg_sipll5_register()
669 clk_hw = &sipll5->hw; in rzg2l_cpg_sipll5_register()
670 clk_hw->init = &init; in rzg2l_cpg_sipll5_register()
672 ret = devm_clk_hw_register(priv->dev, clk_hw); in rzg2l_cpg_sipll5_register()
676 priv->mux_dsi_div_params.clksrc = 1; /* Use clk src 1 for DSI */ in rzg2l_cpg_sipll5_register()
677 priv->mux_dsi_div_params.dsi_div_a = 1; /* Divided by 2 */ in rzg2l_cpg_sipll5_register()
678 priv->mux_dsi_div_params.dsi_div_b = 2; /* Divided by 3 */ in rzg2l_cpg_sipll5_register()
680 return clk_hw->clk; in rzg2l_cpg_sipll5_register()
697 struct rzg2l_cpg_priv *priv = pll_clk->priv; in rzg2l_cpg_pll_clk_recalc_rate()
701 if (pll_clk->type != CLK_TYPE_SAM_PLL) in rzg2l_cpg_pll_clk_recalc_rate()
704 val1 = readl(priv->base + GET_REG_SAMPLL_CLK1(pll_clk->conf)); in rzg2l_cpg_pll_clk_recalc_rate()
705 val2 = readl(priv->base + GET_REG_SAMPLL_CLK2(pll_clk->conf)); in rzg2l_cpg_pll_clk_recalc_rate()
717 static struct clk * __init
719 struct clk **clks, in rzg2l_cpg_pll_clk_register()
723 struct device *dev = priv->dev; in rzg2l_cpg_pll_clk_register()
724 const struct clk *parent; in rzg2l_cpg_pll_clk_register() local
729 parent = clks[core->parent & 0xffff]; in rzg2l_cpg_pll_clk_register()
730 if (IS_ERR(parent)) in rzg2l_cpg_pll_clk_register()
731 return ERR_CAST(parent); in rzg2l_cpg_pll_clk_register()
735 return ERR_PTR(-ENOMEM); in rzg2l_cpg_pll_clk_register()
737 parent_name = __clk_get_name(parent); in rzg2l_cpg_pll_clk_register()
738 init.name = core->name; in rzg2l_cpg_pll_clk_register()
744 pll_clk->hw.init = &init; in rzg2l_cpg_pll_clk_register()
745 pll_clk->conf = core->conf; in rzg2l_cpg_pll_clk_register()
746 pll_clk->base = base; in rzg2l_cpg_pll_clk_register()
747 pll_clk->priv = priv; in rzg2l_cpg_pll_clk_register()
748 pll_clk->type = core->type; in rzg2l_cpg_pll_clk_register()
750 return clk_register(NULL, &pll_clk->hw); in rzg2l_cpg_pll_clk_register()
753 static struct clk
757 unsigned int clkidx = clkspec->args[1]; in rzg2l_cpg_clk_src_twocell_get()
759 struct device *dev = priv->dev; in rzg2l_cpg_clk_src_twocell_get()
761 struct clk *clk; in rzg2l_cpg_clk_src_twocell_get() local
763 switch (clkspec->args[0]) { in rzg2l_cpg_clk_src_twocell_get()
766 if (clkidx > priv->last_dt_core_clk) { in rzg2l_cpg_clk_src_twocell_get()
768 return ERR_PTR(-EINVAL); in rzg2l_cpg_clk_src_twocell_get()
770 clk = priv->clks[clkidx]; in rzg2l_cpg_clk_src_twocell_get()
775 if (clkidx >= priv->num_mod_clks) { in rzg2l_cpg_clk_src_twocell_get()
778 return ERR_PTR(-EINVAL); in rzg2l_cpg_clk_src_twocell_get()
780 clk = priv->clks[priv->num_core_clks + clkidx]; in rzg2l_cpg_clk_src_twocell_get()
784 dev_err(dev, "Invalid CPG clock type %u\n", clkspec->args[0]); in rzg2l_cpg_clk_src_twocell_get()
785 return ERR_PTR(-EINVAL); in rzg2l_cpg_clk_src_twocell_get()
788 if (IS_ERR(clk)) in rzg2l_cpg_clk_src_twocell_get()
790 PTR_ERR(clk)); in rzg2l_cpg_clk_src_twocell_get()
793 clkspec->args[0], clkspec->args[1], clk, in rzg2l_cpg_clk_src_twocell_get()
794 clk_get_rate(clk)); in rzg2l_cpg_clk_src_twocell_get()
795 return clk; in rzg2l_cpg_clk_src_twocell_get()
803 struct clk *clk = ERR_PTR(-EOPNOTSUPP), *parent; in rzg2l_cpg_register_core_clk() local
804 struct device *dev = priv->dev; in rzg2l_cpg_register_core_clk()
805 unsigned int id = core->id, div = core->div; in rzg2l_cpg_register_core_clk()
808 WARN_DEBUG(id >= priv->num_core_clks); in rzg2l_cpg_register_core_clk()
809 WARN_DEBUG(PTR_ERR(priv->clks[id]) != -ENOENT); in rzg2l_cpg_register_core_clk()
811 if (!core->name) { in rzg2l_cpg_register_core_clk()
816 switch (core->type) { in rzg2l_cpg_register_core_clk()
818 clk = of_clk_get_by_name(priv->dev->of_node, core->name); in rzg2l_cpg_register_core_clk()
821 WARN_DEBUG(core->parent >= priv->num_core_clks); in rzg2l_cpg_register_core_clk()
822 parent = priv->clks[core->parent]; in rzg2l_cpg_register_core_clk()
823 if (IS_ERR(parent)) { in rzg2l_cpg_register_core_clk()
824 clk = parent; in rzg2l_cpg_register_core_clk()
828 parent_name = __clk_get_name(parent); in rzg2l_cpg_register_core_clk()
829 clk = clk_register_fixed_factor(NULL, core->name, in rzg2l_cpg_register_core_clk()
831 core->mult, div); in rzg2l_cpg_register_core_clk()
834 clk = rzg2l_cpg_pll_clk_register(core, priv->clks, in rzg2l_cpg_register_core_clk()
835 priv->base, priv); in rzg2l_cpg_register_core_clk()
838 clk = rzg2l_cpg_sipll5_register(core, priv->clks, priv); in rzg2l_cpg_register_core_clk()
841 clk = rzg2l_cpg_div_clk_register(core, priv->clks, in rzg2l_cpg_register_core_clk()
842 priv->base, priv); in rzg2l_cpg_register_core_clk()
845 clk = rzg2l_cpg_mux_clk_register(core, priv->base, priv); in rzg2l_cpg_register_core_clk()
848 clk = rzg2l_cpg_sd_mux_clk_register(core, priv->base, priv); in rzg2l_cpg_register_core_clk()
851 clk = rzg2l_cpg_pll5_4_mux_clk_register(core, priv); in rzg2l_cpg_register_core_clk()
854 clk = rzg2l_cpg_dsi_div_clk_register(core, priv->clks, priv); in rzg2l_cpg_register_core_clk()
860 if (IS_ERR_OR_NULL(clk)) in rzg2l_cpg_register_core_clk()
863 dev_dbg(dev, "Core clock %pC at %lu Hz\n", clk, clk_get_rate(clk)); in rzg2l_cpg_register_core_clk()
864 priv->clks[id] = clk; in rzg2l_cpg_register_core_clk()
869 core->name, PTR_ERR(clk)); in rzg2l_cpg_register_core_clk()
873 * struct mstp_clock - MSTP gating clock
875 * @hw: handle between common and hardware-specific interfaces
896 struct rzg2l_cpg_priv *priv = clock->priv; in rzg2l_mod_clock_endisable()
897 unsigned int reg = clock->off; in rzg2l_mod_clock_endisable()
898 struct device *dev = priv->dev; in rzg2l_mod_clock_endisable()
900 u32 bitmask = BIT(clock->bit); in rzg2l_mod_clock_endisable()
904 if (!clock->off) { in rzg2l_mod_clock_endisable()
905 dev_dbg(dev, "%pC does not support ON/OFF\n", hw->clk); in rzg2l_mod_clock_endisable()
909 dev_dbg(dev, "CLK_ON %u/%pC %s\n", CLK_ON_R(reg), hw->clk, in rzg2l_mod_clock_endisable()
911 spin_lock_irqsave(&priv->rmw_lock, flags); in rzg2l_mod_clock_endisable()
917 writel(value, priv->base + CLK_ON_R(reg)); in rzg2l_mod_clock_endisable()
919 spin_unlock_irqrestore(&priv->rmw_lock, flags); in rzg2l_mod_clock_endisable()
924 if (!priv->info->has_clk_mon_regs) in rzg2l_mod_clock_endisable()
927 error = readl_poll_timeout_atomic(priv->base + CLK_MON_R(reg), value, in rzg2l_mod_clock_endisable()
931 priv->base + CLK_ON_R(reg)); in rzg2l_mod_clock_endisable()
940 if (clock->sibling) { in rzg2l_mod_clock_enable()
941 struct rzg2l_cpg_priv *priv = clock->priv; in rzg2l_mod_clock_enable()
945 spin_lock_irqsave(&priv->rmw_lock, flags); in rzg2l_mod_clock_enable()
946 enabled = clock->sibling->enabled; in rzg2l_mod_clock_enable()
947 clock->enabled = true; in rzg2l_mod_clock_enable()
948 spin_unlock_irqrestore(&priv->rmw_lock, flags); in rzg2l_mod_clock_enable()
960 if (clock->sibling) { in rzg2l_mod_clock_disable()
961 struct rzg2l_cpg_priv *priv = clock->priv; in rzg2l_mod_clock_disable()
965 spin_lock_irqsave(&priv->rmw_lock, flags); in rzg2l_mod_clock_disable()
966 enabled = clock->sibling->enabled; in rzg2l_mod_clock_disable()
967 clock->enabled = false; in rzg2l_mod_clock_disable()
968 spin_unlock_irqrestore(&priv->rmw_lock, flags); in rzg2l_mod_clock_disable()
979 struct rzg2l_cpg_priv *priv = clock->priv; in rzg2l_mod_clock_is_enabled()
980 u32 bitmask = BIT(clock->bit); in rzg2l_mod_clock_is_enabled()
983 if (!clock->off) { in rzg2l_mod_clock_is_enabled()
984 dev_dbg(priv->dev, "%pC does not support ON/OFF\n", hw->clk); in rzg2l_mod_clock_is_enabled()
988 if (clock->sibling) in rzg2l_mod_clock_is_enabled()
989 return clock->enabled; in rzg2l_mod_clock_is_enabled()
991 if (priv->info->has_clk_mon_regs) in rzg2l_mod_clock_is_enabled()
992 value = readl(priv->base + CLK_MON_R(clock->off)); in rzg2l_mod_clock_is_enabled()
994 value = readl(priv->base + clock->off); in rzg2l_mod_clock_is_enabled()
1012 for (i = 0; i < priv->num_mod_clks; i++) { in rzg2l_mod_clock_get_sibling()
1013 struct mstp_clock *clk; in rzg2l_mod_clock_get_sibling() local
1015 if (priv->clks[priv->num_core_clks + i] == ERR_PTR(-ENOENT)) in rzg2l_mod_clock_get_sibling()
1018 hw = __clk_get_hw(priv->clks[priv->num_core_clks + i]); in rzg2l_mod_clock_get_sibling()
1019 clk = to_mod_clock(hw); in rzg2l_mod_clock_get_sibling()
1020 if (clock->off == clk->off && clock->bit == clk->bit) in rzg2l_mod_clock_get_sibling()
1021 return clk; in rzg2l_mod_clock_get_sibling()
1033 struct device *dev = priv->dev; in rzg2l_cpg_register_mod_clk()
1034 unsigned int id = mod->id; in rzg2l_cpg_register_mod_clk()
1036 struct clk *parent, *clk; in rzg2l_cpg_register_mod_clk() local
1040 WARN_DEBUG(id < priv->num_core_clks); in rzg2l_cpg_register_mod_clk()
1041 WARN_DEBUG(id >= priv->num_core_clks + priv->num_mod_clks); in rzg2l_cpg_register_mod_clk()
1042 WARN_DEBUG(mod->parent >= priv->num_core_clks + priv->num_mod_clks); in rzg2l_cpg_register_mod_clk()
1043 WARN_DEBUG(PTR_ERR(priv->clks[id]) != -ENOENT); in rzg2l_cpg_register_mod_clk()
1045 if (!mod->name) { in rzg2l_cpg_register_mod_clk()
1050 parent = priv->clks[mod->parent]; in rzg2l_cpg_register_mod_clk()
1051 if (IS_ERR(parent)) { in rzg2l_cpg_register_mod_clk()
1052 clk = parent; in rzg2l_cpg_register_mod_clk()
1058 clk = ERR_PTR(-ENOMEM); in rzg2l_cpg_register_mod_clk()
1062 init.name = mod->name; in rzg2l_cpg_register_mod_clk()
1065 for (i = 0; i < info->num_crit_mod_clks; i++) in rzg2l_cpg_register_mod_clk()
1066 if (id == info->crit_mod_clks[i]) { in rzg2l_cpg_register_mod_clk()
1068 mod->name); in rzg2l_cpg_register_mod_clk()
1073 parent_name = __clk_get_name(parent); in rzg2l_cpg_register_mod_clk()
1077 clock->off = mod->off; in rzg2l_cpg_register_mod_clk()
1078 clock->bit = mod->bit; in rzg2l_cpg_register_mod_clk()
1079 clock->priv = priv; in rzg2l_cpg_register_mod_clk()
1080 clock->hw.init = &init; in rzg2l_cpg_register_mod_clk()
1082 clk = clk_register(NULL, &clock->hw); in rzg2l_cpg_register_mod_clk()
1083 if (IS_ERR(clk)) in rzg2l_cpg_register_mod_clk()
1086 dev_dbg(dev, "Module clock %pC at %lu Hz\n", clk, clk_get_rate(clk)); in rzg2l_cpg_register_mod_clk()
1087 priv->clks[id] = clk; in rzg2l_cpg_register_mod_clk()
1089 if (mod->is_coupled) { in rzg2l_cpg_register_mod_clk()
1092 clock->enabled = rzg2l_mod_clock_is_enabled(&clock->hw); in rzg2l_cpg_register_mod_clk()
1095 clock->sibling = sibling; in rzg2l_cpg_register_mod_clk()
1096 sibling->sibling = clock; in rzg2l_cpg_register_mod_clk()
1104 mod->name, PTR_ERR(clk)); in rzg2l_cpg_register_mod_clk()
1113 const struct rzg2l_cpg_info *info = priv->info; in rzg2l_cpg_assert()
1114 unsigned int reg = info->resets[id].off; in rzg2l_cpg_assert()
1115 u32 mask = BIT(info->resets[id].bit); in rzg2l_cpg_assert()
1116 s8 monbit = info->resets[id].monbit; in rzg2l_cpg_assert()
1119 dev_dbg(rcdev->dev, "assert id:%ld offset:0x%x\n", id, CLK_RST_R(reg)); in rzg2l_cpg_assert()
1121 writel(value, priv->base + CLK_RST_R(reg)); in rzg2l_cpg_assert()
1123 if (info->has_clk_mon_regs) { in rzg2l_cpg_assert()
1134 return readl_poll_timeout_atomic(priv->base + reg, value, in rzg2l_cpg_assert()
1142 const struct rzg2l_cpg_info *info = priv->info; in rzg2l_cpg_deassert()
1143 unsigned int reg = info->resets[id].off; in rzg2l_cpg_deassert()
1144 u32 mask = BIT(info->resets[id].bit); in rzg2l_cpg_deassert()
1145 s8 monbit = info->resets[id].monbit; in rzg2l_cpg_deassert()
1148 dev_dbg(rcdev->dev, "deassert id:%ld offset:0x%x\n", id, in rzg2l_cpg_deassert()
1151 writel(value, priv->base + CLK_RST_R(reg)); in rzg2l_cpg_deassert()
1153 if (info->has_clk_mon_regs) { in rzg2l_cpg_deassert()
1164 return readl_poll_timeout_atomic(priv->base + reg, value, in rzg2l_cpg_deassert()
1184 const struct rzg2l_cpg_info *info = priv->info; in rzg2l_cpg_status()
1185 s8 monbit = info->resets[id].monbit; in rzg2l_cpg_status()
1189 if (info->has_clk_mon_regs) { in rzg2l_cpg_status()
1190 reg = CLK_MRST_R(info->resets[id].off); in rzg2l_cpg_status()
1191 bitmask = BIT(info->resets[id].bit); in rzg2l_cpg_status()
1196 return -ENOTSUPP; in rzg2l_cpg_status()
1199 return !!(readl(priv->base + reg) & bitmask); in rzg2l_cpg_status()
1213 const struct rzg2l_cpg_info *info = priv->info; in rzg2l_cpg_reset_xlate()
1214 unsigned int id = reset_spec->args[0]; in rzg2l_cpg_reset_xlate()
1216 if (id >= rcdev->nr_resets || !info->resets[id].off) { in rzg2l_cpg_reset_xlate()
1217 dev_err(rcdev->dev, "Invalid reset index %u\n", id); in rzg2l_cpg_reset_xlate()
1218 return -EINVAL; in rzg2l_cpg_reset_xlate()
1226 priv->rcdev.ops = &rzg2l_cpg_reset_ops; in rzg2l_cpg_reset_controller_register()
1227 priv->rcdev.of_node = priv->dev->of_node; in rzg2l_cpg_reset_controller_register()
1228 priv->rcdev.dev = priv->dev; in rzg2l_cpg_reset_controller_register()
1229 priv->rcdev.of_reset_n_cells = 1; in rzg2l_cpg_reset_controller_register()
1230 priv->rcdev.of_xlate = rzg2l_cpg_reset_xlate; in rzg2l_cpg_reset_controller_register()
1231 priv->rcdev.nr_resets = priv->num_resets; in rzg2l_cpg_reset_controller_register()
1233 return devm_reset_controller_register(priv->dev, &priv->rcdev); in rzg2l_cpg_reset_controller_register()
1239 const struct rzg2l_cpg_info *info = priv->info; in rzg2l_cpg_is_pm_clk()
1243 if (clkspec->args_count != 2) in rzg2l_cpg_is_pm_clk()
1246 if (clkspec->args[0] != CPG_MOD) in rzg2l_cpg_is_pm_clk()
1249 id = clkspec->args[1] + info->num_total_core_clks; in rzg2l_cpg_is_pm_clk()
1250 for (i = 0; i < info->num_no_pm_mod_clks; i++) { in rzg2l_cpg_is_pm_clk()
1251 if (info->no_pm_mod_clks[i] == id) in rzg2l_cpg_is_pm_clk()
1261 struct device_node *np = dev->of_node; in rzg2l_cpg_attach_dev()
1264 struct clk *clk; in rzg2l_cpg_attach_dev() local
1268 while (!of_parse_phandle_with_args(np, "clocks", "#clock-cells", i, in rzg2l_cpg_attach_dev()
1279 clk = of_clk_get_from_provider(&clkspec); in rzg2l_cpg_attach_dev()
1281 if (IS_ERR(clk)) { in rzg2l_cpg_attach_dev()
1282 error = PTR_ERR(clk); in rzg2l_cpg_attach_dev()
1286 error = pm_clk_add_clk(dev, clk); in rzg2l_cpg_attach_dev()
1301 clk_put(clk); in rzg2l_cpg_attach_dev()
1322 struct device *dev = priv->dev; in rzg2l_cpg_add_clk_domain()
1323 struct device_node *np = dev->of_node; in rzg2l_cpg_add_clk_domain()
1324 struct generic_pm_domain *genpd = &priv->genpd; in rzg2l_cpg_add_clk_domain()
1327 genpd->name = np->name; in rzg2l_cpg_add_clk_domain()
1328 genpd->flags = GENPD_FLAG_PM_CLK | GENPD_FLAG_ALWAYS_ON | in rzg2l_cpg_add_clk_domain()
1330 genpd->attach_dev = rzg2l_cpg_attach_dev; in rzg2l_cpg_add_clk_domain()
1331 genpd->detach_dev = rzg2l_cpg_detach_dev; in rzg2l_cpg_add_clk_domain()
1345 struct device *dev = &pdev->dev; in rzg2l_cpg_probe()
1346 struct device_node *np = dev->of_node; in rzg2l_cpg_probe()
1350 struct clk **clks; in rzg2l_cpg_probe()
1357 return -ENOMEM; in rzg2l_cpg_probe()
1359 priv->dev = dev; in rzg2l_cpg_probe()
1360 priv->info = info; in rzg2l_cpg_probe()
1361 spin_lock_init(&priv->rmw_lock); in rzg2l_cpg_probe()
1363 priv->base = devm_platform_ioremap_resource(pdev, 0); in rzg2l_cpg_probe()
1364 if (IS_ERR(priv->base)) in rzg2l_cpg_probe()
1365 return PTR_ERR(priv->base); in rzg2l_cpg_probe()
1367 nclks = info->num_total_core_clks + info->num_hw_mod_clks; in rzg2l_cpg_probe()
1370 return -ENOMEM; in rzg2l_cpg_probe()
1373 priv->clks = clks; in rzg2l_cpg_probe()
1374 priv->num_core_clks = info->num_total_core_clks; in rzg2l_cpg_probe()
1375 priv->num_mod_clks = info->num_hw_mod_clks; in rzg2l_cpg_probe()
1376 priv->num_resets = info->num_resets; in rzg2l_cpg_probe()
1377 priv->last_dt_core_clk = info->last_dt_core_clk; in rzg2l_cpg_probe()
1380 clks[i] = ERR_PTR(-ENOENT); in rzg2l_cpg_probe()
1382 for (i = 0; i < info->num_core_clks; i++) in rzg2l_cpg_probe()
1383 rzg2l_cpg_register_core_clk(&info->core_clks[i], info, priv); in rzg2l_cpg_probe()
1385 for (i = 0; i < info->num_mod_clks; i++) in rzg2l_cpg_probe()
1386 rzg2l_cpg_register_mod_clk(&info->mod_clks[i], info, priv); in rzg2l_cpg_probe()
1410 .compatible = "renesas,r9a07g043-cpg",
1416 .compatible = "renesas,r9a07g044-cpg",
1422 .compatible = "renesas,r9a07g054-cpg",
1428 .compatible = "renesas,r9a09g011-cpg",
1437 .name = "rzg2l-cpg",