xref: /openbmc/linux/arch/mips/ath79/clock.c (revision 3ce7547e)
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  *  Atheros AR71XX/AR724X/AR913X common routines
4  *
5  *  Copyright (C) 2010-2011 Jaiganesh Narayanan <jnarayanan@atheros.com>
6  *  Copyright (C) 2011 Gabor Juhos <juhosg@openwrt.org>
7  *
8  *  Parts of this file are based on Atheros' 2.6.15/2.6.31 BSP
9  */
10 
11 #include <linux/kernel.h>
12 #include <linux/init.h>
13 #include <linux/io.h>
14 #include <linux/err.h>
15 #include <linux/clk.h>
16 #include <linux/clkdev.h>
17 #include <linux/clk-provider.h>
18 #include <linux/of.h>
19 #include <linux/of_address.h>
20 #include <dt-bindings/clock/ath79-clk.h>
21 
22 #include <asm/div64.h>
23 
24 #include <asm/mach-ath79/ath79.h>
25 #include <asm/mach-ath79/ar71xx_regs.h>
26 #include "common.h"
27 
28 #define AR71XX_BASE_FREQ	40000000
29 #define AR724X_BASE_FREQ	40000000
30 
31 static struct clk *clks[ATH79_CLK_END];
32 static struct clk_onecell_data clk_data = {
33 	.clks = clks,
34 	.clk_num = ARRAY_SIZE(clks),
35 };
36 
37 static const char * const clk_names[ATH79_CLK_END] = {
38 	[ATH79_CLK_CPU] = "cpu",
39 	[ATH79_CLK_DDR] = "ddr",
40 	[ATH79_CLK_AHB] = "ahb",
41 	[ATH79_CLK_REF] = "ref",
42 	[ATH79_CLK_MDIO] = "mdio",
43 };
44 
45 static const char * __init ath79_clk_name(int type)
46 {
47 	BUG_ON(type >= ARRAY_SIZE(clk_names) || !clk_names[type]);
48 	return clk_names[type];
49 }
50 
51 static void __init __ath79_set_clk(int type, const char *name, struct clk *clk)
52 {
53 	if (IS_ERR(clk))
54 		panic("failed to allocate %s clock structure", clk_names[type]);
55 
56 	clks[type] = clk;
57 	clk_register_clkdev(clk, name, NULL);
58 }
59 
60 static struct clk * __init ath79_set_clk(int type, unsigned long rate)
61 {
62 	const char *name = ath79_clk_name(type);
63 	struct clk *clk;
64 
65 	clk = clk_register_fixed_rate(NULL, name, NULL, 0, rate);
66 	__ath79_set_clk(type, name, clk);
67 	return clk;
68 }
69 
70 static struct clk * __init ath79_set_ff_clk(int type, const char *parent,
71 					    unsigned int mult, unsigned int div)
72 {
73 	const char *name = ath79_clk_name(type);
74 	struct clk *clk;
75 
76 	clk = clk_register_fixed_factor(NULL, name, parent, 0, mult, div);
77 	__ath79_set_clk(type, name, clk);
78 	return clk;
79 }
80 
81 static unsigned long __init ath79_setup_ref_clk(unsigned long rate)
82 {
83 	struct clk *clk = clks[ATH79_CLK_REF];
84 
85 	if (clk)
86 		rate = clk_get_rate(clk);
87 	else
88 		clk = ath79_set_clk(ATH79_CLK_REF, rate);
89 
90 	return rate;
91 }
92 
93 static void __init ar71xx_clocks_init(void __iomem *pll_base)
94 {
95 	unsigned long ref_rate;
96 	unsigned long cpu_rate;
97 	unsigned long ddr_rate;
98 	unsigned long ahb_rate;
99 	u32 pll;
100 	u32 freq;
101 	u32 div;
102 
103 	ref_rate = ath79_setup_ref_clk(AR71XX_BASE_FREQ);
104 
105 	pll = __raw_readl(pll_base + AR71XX_PLL_REG_CPU_CONFIG);
106 
107 	div = ((pll >> AR71XX_PLL_FB_SHIFT) & AR71XX_PLL_FB_MASK) + 1;
108 	freq = div * ref_rate;
109 
110 	div = ((pll >> AR71XX_CPU_DIV_SHIFT) & AR71XX_CPU_DIV_MASK) + 1;
111 	cpu_rate = freq / div;
112 
113 	div = ((pll >> AR71XX_DDR_DIV_SHIFT) & AR71XX_DDR_DIV_MASK) + 1;
114 	ddr_rate = freq / div;
115 
116 	div = (((pll >> AR71XX_AHB_DIV_SHIFT) & AR71XX_AHB_DIV_MASK) + 1) * 2;
117 	ahb_rate = cpu_rate / div;
118 
119 	ath79_set_clk(ATH79_CLK_CPU, cpu_rate);
120 	ath79_set_clk(ATH79_CLK_DDR, ddr_rate);
121 	ath79_set_clk(ATH79_CLK_AHB, ahb_rate);
122 }
123 
124 static void __init ar724x_clocks_init(void __iomem *pll_base)
125 {
126 	u32 mult, div, ddr_div, ahb_div;
127 	u32 pll;
128 
129 	ath79_setup_ref_clk(AR71XX_BASE_FREQ);
130 
131 	pll = __raw_readl(pll_base + AR724X_PLL_REG_CPU_CONFIG);
132 
133 	mult = ((pll >> AR724X_PLL_FB_SHIFT) & AR724X_PLL_FB_MASK);
134 	div = ((pll >> AR724X_PLL_REF_DIV_SHIFT) & AR724X_PLL_REF_DIV_MASK) * 2;
135 
136 	ddr_div = ((pll >> AR724X_DDR_DIV_SHIFT) & AR724X_DDR_DIV_MASK) + 1;
137 	ahb_div = (((pll >> AR724X_AHB_DIV_SHIFT) & AR724X_AHB_DIV_MASK) + 1) * 2;
138 
139 	ath79_set_ff_clk(ATH79_CLK_CPU, "ref", mult, div);
140 	ath79_set_ff_clk(ATH79_CLK_DDR, "ref", mult, div * ddr_div);
141 	ath79_set_ff_clk(ATH79_CLK_AHB, "ref", mult, div * ahb_div);
142 }
143 
144 static void __init ar933x_clocks_init(void __iomem *pll_base)
145 {
146 	unsigned long ref_rate;
147 	u32 clock_ctrl;
148 	u32 ref_div;
149 	u32 ninit_mul;
150 	u32 out_div;
151 
152 	u32 cpu_div;
153 	u32 ddr_div;
154 	u32 ahb_div;
155 	u32 t;
156 
157 	t = ath79_reset_rr(AR933X_RESET_REG_BOOTSTRAP);
158 	if (t & AR933X_BOOTSTRAP_REF_CLK_40)
159 		ref_rate = (40 * 1000 * 1000);
160 	else
161 		ref_rate = (25 * 1000 * 1000);
162 
163 	ath79_setup_ref_clk(ref_rate);
164 
165 	clock_ctrl = __raw_readl(pll_base + AR933X_PLL_CLOCK_CTRL_REG);
166 	if (clock_ctrl & AR933X_PLL_CLOCK_CTRL_BYPASS) {
167 		ref_div = 1;
168 		ninit_mul = 1;
169 		out_div = 1;
170 
171 		cpu_div = 1;
172 		ddr_div = 1;
173 		ahb_div = 1;
174 	} else {
175 		u32 cpu_config;
176 		u32 t;
177 
178 		cpu_config = __raw_readl(pll_base + AR933X_PLL_CPU_CONFIG_REG);
179 
180 		t = (cpu_config >> AR933X_PLL_CPU_CONFIG_REFDIV_SHIFT) &
181 		    AR933X_PLL_CPU_CONFIG_REFDIV_MASK;
182 		ref_div = t;
183 
184 		ninit_mul = (cpu_config >> AR933X_PLL_CPU_CONFIG_NINT_SHIFT) &
185 		    AR933X_PLL_CPU_CONFIG_NINT_MASK;
186 
187 		t = (cpu_config >> AR933X_PLL_CPU_CONFIG_OUTDIV_SHIFT) &
188 		    AR933X_PLL_CPU_CONFIG_OUTDIV_MASK;
189 		if (t == 0)
190 			t = 1;
191 
192 		out_div = (1 << t);
193 
194 		cpu_div = ((clock_ctrl >> AR933X_PLL_CLOCK_CTRL_CPU_DIV_SHIFT) &
195 		     AR933X_PLL_CLOCK_CTRL_CPU_DIV_MASK) + 1;
196 
197 		ddr_div = ((clock_ctrl >> AR933X_PLL_CLOCK_CTRL_DDR_DIV_SHIFT) &
198 		      AR933X_PLL_CLOCK_CTRL_DDR_DIV_MASK) + 1;
199 
200 		ahb_div = ((clock_ctrl >> AR933X_PLL_CLOCK_CTRL_AHB_DIV_SHIFT) &
201 		     AR933X_PLL_CLOCK_CTRL_AHB_DIV_MASK) + 1;
202 	}
203 
204 	ath79_set_ff_clk(ATH79_CLK_CPU, "ref", ninit_mul,
205 			 ref_div * out_div * cpu_div);
206 	ath79_set_ff_clk(ATH79_CLK_DDR, "ref", ninit_mul,
207 			 ref_div * out_div * ddr_div);
208 	ath79_set_ff_clk(ATH79_CLK_AHB, "ref", ninit_mul,
209 			 ref_div * out_div * ahb_div);
210 }
211 
212 static u32 __init ar934x_get_pll_freq(u32 ref, u32 ref_div, u32 nint, u32 nfrac,
213 				      u32 frac, u32 out_div)
214 {
215 	u64 t;
216 	u32 ret;
217 
218 	t = ref;
219 	t *= nint;
220 	do_div(t, ref_div);
221 	ret = t;
222 
223 	t = ref;
224 	t *= nfrac;
225 	do_div(t, ref_div * frac);
226 	ret += t;
227 
228 	ret /= (1 << out_div);
229 	return ret;
230 }
231 
232 static void __init ar934x_clocks_init(void __iomem *pll_base)
233 {
234 	unsigned long ref_rate;
235 	unsigned long cpu_rate;
236 	unsigned long ddr_rate;
237 	unsigned long ahb_rate;
238 	u32 pll, out_div, ref_div, nint, nfrac, frac, clk_ctrl, postdiv;
239 	u32 cpu_pll, ddr_pll;
240 	u32 bootstrap;
241 	void __iomem *dpll_base;
242 
243 	dpll_base = ioremap(AR934X_SRIF_BASE, AR934X_SRIF_SIZE);
244 
245 	bootstrap = ath79_reset_rr(AR934X_RESET_REG_BOOTSTRAP);
246 	if (bootstrap & AR934X_BOOTSTRAP_REF_CLK_40)
247 		ref_rate = 40 * 1000 * 1000;
248 	else
249 		ref_rate = 25 * 1000 * 1000;
250 
251 	ref_rate = ath79_setup_ref_clk(ref_rate);
252 
253 	pll = __raw_readl(dpll_base + AR934X_SRIF_CPU_DPLL2_REG);
254 	if (pll & AR934X_SRIF_DPLL2_LOCAL_PLL) {
255 		out_div = (pll >> AR934X_SRIF_DPLL2_OUTDIV_SHIFT) &
256 			  AR934X_SRIF_DPLL2_OUTDIV_MASK;
257 		pll = __raw_readl(dpll_base + AR934X_SRIF_CPU_DPLL1_REG);
258 		nint = (pll >> AR934X_SRIF_DPLL1_NINT_SHIFT) &
259 		       AR934X_SRIF_DPLL1_NINT_MASK;
260 		nfrac = pll & AR934X_SRIF_DPLL1_NFRAC_MASK;
261 		ref_div = (pll >> AR934X_SRIF_DPLL1_REFDIV_SHIFT) &
262 			  AR934X_SRIF_DPLL1_REFDIV_MASK;
263 		frac = 1 << 18;
264 	} else {
265 		pll = __raw_readl(pll_base + AR934X_PLL_CPU_CONFIG_REG);
266 		out_div = (pll >> AR934X_PLL_CPU_CONFIG_OUTDIV_SHIFT) &
267 			AR934X_PLL_CPU_CONFIG_OUTDIV_MASK;
268 		ref_div = (pll >> AR934X_PLL_CPU_CONFIG_REFDIV_SHIFT) &
269 			  AR934X_PLL_CPU_CONFIG_REFDIV_MASK;
270 		nint = (pll >> AR934X_PLL_CPU_CONFIG_NINT_SHIFT) &
271 		       AR934X_PLL_CPU_CONFIG_NINT_MASK;
272 		nfrac = (pll >> AR934X_PLL_CPU_CONFIG_NFRAC_SHIFT) &
273 			AR934X_PLL_CPU_CONFIG_NFRAC_MASK;
274 		frac = 1 << 6;
275 	}
276 
277 	cpu_pll = ar934x_get_pll_freq(ref_rate, ref_div, nint,
278 				      nfrac, frac, out_div);
279 
280 	pll = __raw_readl(dpll_base + AR934X_SRIF_DDR_DPLL2_REG);
281 	if (pll & AR934X_SRIF_DPLL2_LOCAL_PLL) {
282 		out_div = (pll >> AR934X_SRIF_DPLL2_OUTDIV_SHIFT) &
283 			  AR934X_SRIF_DPLL2_OUTDIV_MASK;
284 		pll = __raw_readl(dpll_base + AR934X_SRIF_DDR_DPLL1_REG);
285 		nint = (pll >> AR934X_SRIF_DPLL1_NINT_SHIFT) &
286 		       AR934X_SRIF_DPLL1_NINT_MASK;
287 		nfrac = pll & AR934X_SRIF_DPLL1_NFRAC_MASK;
288 		ref_div = (pll >> AR934X_SRIF_DPLL1_REFDIV_SHIFT) &
289 			  AR934X_SRIF_DPLL1_REFDIV_MASK;
290 		frac = 1 << 18;
291 	} else {
292 		pll = __raw_readl(pll_base + AR934X_PLL_DDR_CONFIG_REG);
293 		out_div = (pll >> AR934X_PLL_DDR_CONFIG_OUTDIV_SHIFT) &
294 			  AR934X_PLL_DDR_CONFIG_OUTDIV_MASK;
295 		ref_div = (pll >> AR934X_PLL_DDR_CONFIG_REFDIV_SHIFT) &
296 			   AR934X_PLL_DDR_CONFIG_REFDIV_MASK;
297 		nint = (pll >> AR934X_PLL_DDR_CONFIG_NINT_SHIFT) &
298 		       AR934X_PLL_DDR_CONFIG_NINT_MASK;
299 		nfrac = (pll >> AR934X_PLL_DDR_CONFIG_NFRAC_SHIFT) &
300 			AR934X_PLL_DDR_CONFIG_NFRAC_MASK;
301 		frac = 1 << 10;
302 	}
303 
304 	ddr_pll = ar934x_get_pll_freq(ref_rate, ref_div, nint,
305 				      nfrac, frac, out_div);
306 
307 	clk_ctrl = __raw_readl(pll_base + AR934X_PLL_CPU_DDR_CLK_CTRL_REG);
308 
309 	postdiv = (clk_ctrl >> AR934X_PLL_CPU_DDR_CLK_CTRL_CPU_POST_DIV_SHIFT) &
310 		  AR934X_PLL_CPU_DDR_CLK_CTRL_CPU_POST_DIV_MASK;
311 
312 	if (clk_ctrl & AR934X_PLL_CPU_DDR_CLK_CTRL_CPU_PLL_BYPASS)
313 		cpu_rate = ref_rate;
314 	else if (clk_ctrl & AR934X_PLL_CPU_DDR_CLK_CTRL_CPUCLK_FROM_CPUPLL)
315 		cpu_rate = cpu_pll / (postdiv + 1);
316 	else
317 		cpu_rate = ddr_pll / (postdiv + 1);
318 
319 	postdiv = (clk_ctrl >> AR934X_PLL_CPU_DDR_CLK_CTRL_DDR_POST_DIV_SHIFT) &
320 		  AR934X_PLL_CPU_DDR_CLK_CTRL_DDR_POST_DIV_MASK;
321 
322 	if (clk_ctrl & AR934X_PLL_CPU_DDR_CLK_CTRL_DDR_PLL_BYPASS)
323 		ddr_rate = ref_rate;
324 	else if (clk_ctrl & AR934X_PLL_CPU_DDR_CLK_CTRL_DDRCLK_FROM_DDRPLL)
325 		ddr_rate = ddr_pll / (postdiv + 1);
326 	else
327 		ddr_rate = cpu_pll / (postdiv + 1);
328 
329 	postdiv = (clk_ctrl >> AR934X_PLL_CPU_DDR_CLK_CTRL_AHB_POST_DIV_SHIFT) &
330 		  AR934X_PLL_CPU_DDR_CLK_CTRL_AHB_POST_DIV_MASK;
331 
332 	if (clk_ctrl & AR934X_PLL_CPU_DDR_CLK_CTRL_AHB_PLL_BYPASS)
333 		ahb_rate = ref_rate;
334 	else if (clk_ctrl & AR934X_PLL_CPU_DDR_CLK_CTRL_AHBCLK_FROM_DDRPLL)
335 		ahb_rate = ddr_pll / (postdiv + 1);
336 	else
337 		ahb_rate = cpu_pll / (postdiv + 1);
338 
339 	ath79_set_clk(ATH79_CLK_CPU, cpu_rate);
340 	ath79_set_clk(ATH79_CLK_DDR, ddr_rate);
341 	ath79_set_clk(ATH79_CLK_AHB, ahb_rate);
342 
343 	clk_ctrl = __raw_readl(pll_base + AR934X_PLL_SWITCH_CLOCK_CONTROL_REG);
344 	if (clk_ctrl & AR934X_PLL_SWITCH_CLOCK_CONTROL_MDIO_CLK_SEL)
345 		ath79_set_clk(ATH79_CLK_MDIO, 100 * 1000 * 1000);
346 
347 	iounmap(dpll_base);
348 }
349 
350 static void __init qca953x_clocks_init(void __iomem *pll_base)
351 {
352 	unsigned long ref_rate;
353 	unsigned long cpu_rate;
354 	unsigned long ddr_rate;
355 	unsigned long ahb_rate;
356 	u32 pll, out_div, ref_div, nint, frac, clk_ctrl, postdiv;
357 	u32 cpu_pll, ddr_pll;
358 	u32 bootstrap;
359 
360 	bootstrap = ath79_reset_rr(QCA953X_RESET_REG_BOOTSTRAP);
361 	if (bootstrap &	QCA953X_BOOTSTRAP_REF_CLK_40)
362 		ref_rate = 40 * 1000 * 1000;
363 	else
364 		ref_rate = 25 * 1000 * 1000;
365 
366 	ref_rate = ath79_setup_ref_clk(ref_rate);
367 
368 	pll = __raw_readl(pll_base + QCA953X_PLL_CPU_CONFIG_REG);
369 	out_div = (pll >> QCA953X_PLL_CPU_CONFIG_OUTDIV_SHIFT) &
370 		  QCA953X_PLL_CPU_CONFIG_OUTDIV_MASK;
371 	ref_div = (pll >> QCA953X_PLL_CPU_CONFIG_REFDIV_SHIFT) &
372 		  QCA953X_PLL_CPU_CONFIG_REFDIV_MASK;
373 	nint = (pll >> QCA953X_PLL_CPU_CONFIG_NINT_SHIFT) &
374 	       QCA953X_PLL_CPU_CONFIG_NINT_MASK;
375 	frac = (pll >> QCA953X_PLL_CPU_CONFIG_NFRAC_SHIFT) &
376 	       QCA953X_PLL_CPU_CONFIG_NFRAC_MASK;
377 
378 	cpu_pll = nint * ref_rate / ref_div;
379 	cpu_pll += frac * (ref_rate >> 6) / ref_div;
380 	cpu_pll /= (1 << out_div);
381 
382 	pll = __raw_readl(pll_base + QCA953X_PLL_DDR_CONFIG_REG);
383 	out_div = (pll >> QCA953X_PLL_DDR_CONFIG_OUTDIV_SHIFT) &
384 		  QCA953X_PLL_DDR_CONFIG_OUTDIV_MASK;
385 	ref_div = (pll >> QCA953X_PLL_DDR_CONFIG_REFDIV_SHIFT) &
386 		  QCA953X_PLL_DDR_CONFIG_REFDIV_MASK;
387 	nint = (pll >> QCA953X_PLL_DDR_CONFIG_NINT_SHIFT) &
388 	       QCA953X_PLL_DDR_CONFIG_NINT_MASK;
389 	frac = (pll >> QCA953X_PLL_DDR_CONFIG_NFRAC_SHIFT) &
390 	       QCA953X_PLL_DDR_CONFIG_NFRAC_MASK;
391 
392 	ddr_pll = nint * ref_rate / ref_div;
393 	ddr_pll += frac * (ref_rate >> 6) / (ref_div << 4);
394 	ddr_pll /= (1 << out_div);
395 
396 	clk_ctrl = __raw_readl(pll_base + QCA953X_PLL_CLK_CTRL_REG);
397 
398 	postdiv = (clk_ctrl >> QCA953X_PLL_CLK_CTRL_CPU_POST_DIV_SHIFT) &
399 		  QCA953X_PLL_CLK_CTRL_CPU_POST_DIV_MASK;
400 
401 	if (clk_ctrl & QCA953X_PLL_CLK_CTRL_CPU_PLL_BYPASS)
402 		cpu_rate = ref_rate;
403 	else if (clk_ctrl & QCA953X_PLL_CLK_CTRL_CPUCLK_FROM_CPUPLL)
404 		cpu_rate = cpu_pll / (postdiv + 1);
405 	else
406 		cpu_rate = ddr_pll / (postdiv + 1);
407 
408 	postdiv = (clk_ctrl >> QCA953X_PLL_CLK_CTRL_DDR_POST_DIV_SHIFT) &
409 		  QCA953X_PLL_CLK_CTRL_DDR_POST_DIV_MASK;
410 
411 	if (clk_ctrl & QCA953X_PLL_CLK_CTRL_DDR_PLL_BYPASS)
412 		ddr_rate = ref_rate;
413 	else if (clk_ctrl & QCA953X_PLL_CLK_CTRL_DDRCLK_FROM_DDRPLL)
414 		ddr_rate = ddr_pll / (postdiv + 1);
415 	else
416 		ddr_rate = cpu_pll / (postdiv + 1);
417 
418 	postdiv = (clk_ctrl >> QCA953X_PLL_CLK_CTRL_AHB_POST_DIV_SHIFT) &
419 		  QCA953X_PLL_CLK_CTRL_AHB_POST_DIV_MASK;
420 
421 	if (clk_ctrl & QCA953X_PLL_CLK_CTRL_AHB_PLL_BYPASS)
422 		ahb_rate = ref_rate;
423 	else if (clk_ctrl & QCA953X_PLL_CLK_CTRL_AHBCLK_FROM_DDRPLL)
424 		ahb_rate = ddr_pll / (postdiv + 1);
425 	else
426 		ahb_rate = cpu_pll / (postdiv + 1);
427 
428 	ath79_set_clk(ATH79_CLK_CPU, cpu_rate);
429 	ath79_set_clk(ATH79_CLK_DDR, ddr_rate);
430 	ath79_set_clk(ATH79_CLK_AHB, ahb_rate);
431 }
432 
433 static void __init qca955x_clocks_init(void __iomem *pll_base)
434 {
435 	unsigned long ref_rate;
436 	unsigned long cpu_rate;
437 	unsigned long ddr_rate;
438 	unsigned long ahb_rate;
439 	u32 pll, out_div, ref_div, nint, frac, clk_ctrl, postdiv;
440 	u32 cpu_pll, ddr_pll;
441 	u32 bootstrap;
442 
443 	bootstrap = ath79_reset_rr(QCA955X_RESET_REG_BOOTSTRAP);
444 	if (bootstrap &	QCA955X_BOOTSTRAP_REF_CLK_40)
445 		ref_rate = 40 * 1000 * 1000;
446 	else
447 		ref_rate = 25 * 1000 * 1000;
448 
449 	ref_rate = ath79_setup_ref_clk(ref_rate);
450 
451 	pll = __raw_readl(pll_base + QCA955X_PLL_CPU_CONFIG_REG);
452 	out_div = (pll >> QCA955X_PLL_CPU_CONFIG_OUTDIV_SHIFT) &
453 		  QCA955X_PLL_CPU_CONFIG_OUTDIV_MASK;
454 	ref_div = (pll >> QCA955X_PLL_CPU_CONFIG_REFDIV_SHIFT) &
455 		  QCA955X_PLL_CPU_CONFIG_REFDIV_MASK;
456 	nint = (pll >> QCA955X_PLL_CPU_CONFIG_NINT_SHIFT) &
457 	       QCA955X_PLL_CPU_CONFIG_NINT_MASK;
458 	frac = (pll >> QCA955X_PLL_CPU_CONFIG_NFRAC_SHIFT) &
459 	       QCA955X_PLL_CPU_CONFIG_NFRAC_MASK;
460 
461 	cpu_pll = nint * ref_rate / ref_div;
462 	cpu_pll += frac * ref_rate / (ref_div * (1 << 6));
463 	cpu_pll /= (1 << out_div);
464 
465 	pll = __raw_readl(pll_base + QCA955X_PLL_DDR_CONFIG_REG);
466 	out_div = (pll >> QCA955X_PLL_DDR_CONFIG_OUTDIV_SHIFT) &
467 		  QCA955X_PLL_DDR_CONFIG_OUTDIV_MASK;
468 	ref_div = (pll >> QCA955X_PLL_DDR_CONFIG_REFDIV_SHIFT) &
469 		  QCA955X_PLL_DDR_CONFIG_REFDIV_MASK;
470 	nint = (pll >> QCA955X_PLL_DDR_CONFIG_NINT_SHIFT) &
471 	       QCA955X_PLL_DDR_CONFIG_NINT_MASK;
472 	frac = (pll >> QCA955X_PLL_DDR_CONFIG_NFRAC_SHIFT) &
473 	       QCA955X_PLL_DDR_CONFIG_NFRAC_MASK;
474 
475 	ddr_pll = nint * ref_rate / ref_div;
476 	ddr_pll += frac * ref_rate / (ref_div * (1 << 10));
477 	ddr_pll /= (1 << out_div);
478 
479 	clk_ctrl = __raw_readl(pll_base + QCA955X_PLL_CLK_CTRL_REG);
480 
481 	postdiv = (clk_ctrl >> QCA955X_PLL_CLK_CTRL_CPU_POST_DIV_SHIFT) &
482 		  QCA955X_PLL_CLK_CTRL_CPU_POST_DIV_MASK;
483 
484 	if (clk_ctrl & QCA955X_PLL_CLK_CTRL_CPU_PLL_BYPASS)
485 		cpu_rate = ref_rate;
486 	else if (clk_ctrl & QCA955X_PLL_CLK_CTRL_CPUCLK_FROM_CPUPLL)
487 		cpu_rate = ddr_pll / (postdiv + 1);
488 	else
489 		cpu_rate = cpu_pll / (postdiv + 1);
490 
491 	postdiv = (clk_ctrl >> QCA955X_PLL_CLK_CTRL_DDR_POST_DIV_SHIFT) &
492 		  QCA955X_PLL_CLK_CTRL_DDR_POST_DIV_MASK;
493 
494 	if (clk_ctrl & QCA955X_PLL_CLK_CTRL_DDR_PLL_BYPASS)
495 		ddr_rate = ref_rate;
496 	else if (clk_ctrl & QCA955X_PLL_CLK_CTRL_DDRCLK_FROM_DDRPLL)
497 		ddr_rate = cpu_pll / (postdiv + 1);
498 	else
499 		ddr_rate = ddr_pll / (postdiv + 1);
500 
501 	postdiv = (clk_ctrl >> QCA955X_PLL_CLK_CTRL_AHB_POST_DIV_SHIFT) &
502 		  QCA955X_PLL_CLK_CTRL_AHB_POST_DIV_MASK;
503 
504 	if (clk_ctrl & QCA955X_PLL_CLK_CTRL_AHB_PLL_BYPASS)
505 		ahb_rate = ref_rate;
506 	else if (clk_ctrl & QCA955X_PLL_CLK_CTRL_AHBCLK_FROM_DDRPLL)
507 		ahb_rate = ddr_pll / (postdiv + 1);
508 	else
509 		ahb_rate = cpu_pll / (postdiv + 1);
510 
511 	ath79_set_clk(ATH79_CLK_CPU, cpu_rate);
512 	ath79_set_clk(ATH79_CLK_DDR, ddr_rate);
513 	ath79_set_clk(ATH79_CLK_AHB, ahb_rate);
514 }
515 
516 static void __init qca956x_clocks_init(void __iomem *pll_base)
517 {
518 	unsigned long ref_rate;
519 	unsigned long cpu_rate;
520 	unsigned long ddr_rate;
521 	unsigned long ahb_rate;
522 	u32 pll, out_div, ref_div, nint, hfrac, lfrac, clk_ctrl, postdiv;
523 	u32 cpu_pll, ddr_pll;
524 	u32 bootstrap;
525 
526 	/*
527 	 * QCA956x timer init workaround has to be applied right before setting
528 	 * up the clock. Else, there will be no jiffies
529 	 */
530 	u32 misc;
531 
532 	misc = ath79_reset_rr(AR71XX_RESET_REG_MISC_INT_ENABLE);
533 	misc |= MISC_INT_MIPS_SI_TIMERINT_MASK;
534 	ath79_reset_wr(AR71XX_RESET_REG_MISC_INT_ENABLE, misc);
535 
536 	bootstrap = ath79_reset_rr(QCA956X_RESET_REG_BOOTSTRAP);
537 	if (bootstrap &	QCA956X_BOOTSTRAP_REF_CLK_40)
538 		ref_rate = 40 * 1000 * 1000;
539 	else
540 		ref_rate = 25 * 1000 * 1000;
541 
542 	ref_rate = ath79_setup_ref_clk(ref_rate);
543 
544 	pll = __raw_readl(pll_base + QCA956X_PLL_CPU_CONFIG_REG);
545 	out_div = (pll >> QCA956X_PLL_CPU_CONFIG_OUTDIV_SHIFT) &
546 		  QCA956X_PLL_CPU_CONFIG_OUTDIV_MASK;
547 	ref_div = (pll >> QCA956X_PLL_CPU_CONFIG_REFDIV_SHIFT) &
548 		  QCA956X_PLL_CPU_CONFIG_REFDIV_MASK;
549 
550 	pll = __raw_readl(pll_base + QCA956X_PLL_CPU_CONFIG1_REG);
551 	nint = (pll >> QCA956X_PLL_CPU_CONFIG1_NINT_SHIFT) &
552 	       QCA956X_PLL_CPU_CONFIG1_NINT_MASK;
553 	hfrac = (pll >> QCA956X_PLL_CPU_CONFIG1_NFRAC_H_SHIFT) &
554 	       QCA956X_PLL_CPU_CONFIG1_NFRAC_H_MASK;
555 	lfrac = (pll >> QCA956X_PLL_CPU_CONFIG1_NFRAC_L_SHIFT) &
556 	       QCA956X_PLL_CPU_CONFIG1_NFRAC_L_MASK;
557 
558 	cpu_pll = nint * ref_rate / ref_div;
559 	cpu_pll += (lfrac * ref_rate) / ((ref_div * 25) << 13);
560 	cpu_pll += (hfrac >> 13) * ref_rate / ref_div;
561 	cpu_pll /= (1 << out_div);
562 
563 	pll = __raw_readl(pll_base + QCA956X_PLL_DDR_CONFIG_REG);
564 	out_div = (pll >> QCA956X_PLL_DDR_CONFIG_OUTDIV_SHIFT) &
565 		  QCA956X_PLL_DDR_CONFIG_OUTDIV_MASK;
566 	ref_div = (pll >> QCA956X_PLL_DDR_CONFIG_REFDIV_SHIFT) &
567 		  QCA956X_PLL_DDR_CONFIG_REFDIV_MASK;
568 	pll = __raw_readl(pll_base + QCA956X_PLL_DDR_CONFIG1_REG);
569 	nint = (pll >> QCA956X_PLL_DDR_CONFIG1_NINT_SHIFT) &
570 	       QCA956X_PLL_DDR_CONFIG1_NINT_MASK;
571 	hfrac = (pll >> QCA956X_PLL_DDR_CONFIG1_NFRAC_H_SHIFT) &
572 	       QCA956X_PLL_DDR_CONFIG1_NFRAC_H_MASK;
573 	lfrac = (pll >> QCA956X_PLL_DDR_CONFIG1_NFRAC_L_SHIFT) &
574 	       QCA956X_PLL_DDR_CONFIG1_NFRAC_L_MASK;
575 
576 	ddr_pll = nint * ref_rate / ref_div;
577 	ddr_pll += (lfrac * ref_rate) / ((ref_div * 25) << 13);
578 	ddr_pll += (hfrac >> 13) * ref_rate / ref_div;
579 	ddr_pll /= (1 << out_div);
580 
581 	clk_ctrl = __raw_readl(pll_base + QCA956X_PLL_CLK_CTRL_REG);
582 
583 	postdiv = (clk_ctrl >> QCA956X_PLL_CLK_CTRL_CPU_POST_DIV_SHIFT) &
584 		  QCA956X_PLL_CLK_CTRL_CPU_POST_DIV_MASK;
585 
586 	if (clk_ctrl & QCA956X_PLL_CLK_CTRL_CPU_PLL_BYPASS)
587 		cpu_rate = ref_rate;
588 	else if (clk_ctrl & QCA956X_PLL_CLK_CTRL_CPU_DDRCLK_FROM_CPUPLL)
589 		cpu_rate = ddr_pll / (postdiv + 1);
590 	else
591 		cpu_rate = cpu_pll / (postdiv + 1);
592 
593 	postdiv = (clk_ctrl >> QCA956X_PLL_CLK_CTRL_DDR_POST_DIV_SHIFT) &
594 		  QCA956X_PLL_CLK_CTRL_DDR_POST_DIV_MASK;
595 
596 	if (clk_ctrl & QCA956X_PLL_CLK_CTRL_DDR_PLL_BYPASS)
597 		ddr_rate = ref_rate;
598 	else if (clk_ctrl & QCA956X_PLL_CLK_CTRL_CPU_DDRCLK_FROM_DDRPLL)
599 		ddr_rate = cpu_pll / (postdiv + 1);
600 	else
601 		ddr_rate = ddr_pll / (postdiv + 1);
602 
603 	postdiv = (clk_ctrl >> QCA956X_PLL_CLK_CTRL_AHB_POST_DIV_SHIFT) &
604 		  QCA956X_PLL_CLK_CTRL_AHB_POST_DIV_MASK;
605 
606 	if (clk_ctrl & QCA956X_PLL_CLK_CTRL_AHB_PLL_BYPASS)
607 		ahb_rate = ref_rate;
608 	else if (clk_ctrl & QCA956X_PLL_CLK_CTRL_AHBCLK_FROM_DDRPLL)
609 		ahb_rate = ddr_pll / (postdiv + 1);
610 	else
611 		ahb_rate = cpu_pll / (postdiv + 1);
612 
613 	ath79_set_clk(ATH79_CLK_CPU, cpu_rate);
614 	ath79_set_clk(ATH79_CLK_DDR, ddr_rate);
615 	ath79_set_clk(ATH79_CLK_AHB, ahb_rate);
616 }
617 
618 static void __init ath79_clocks_init_dt(struct device_node *np)
619 {
620 	struct clk *ref_clk;
621 	void __iomem *pll_base;
622 
623 	ref_clk = of_clk_get(np, 0);
624 	if (!IS_ERR(ref_clk))
625 		clks[ATH79_CLK_REF] = ref_clk;
626 
627 	pll_base = of_iomap(np, 0);
628 	if (!pll_base) {
629 		pr_err("%pOF: can't map pll registers\n", np);
630 		goto err_clk;
631 	}
632 
633 	if (of_device_is_compatible(np, "qca,ar7100-pll"))
634 		ar71xx_clocks_init(pll_base);
635 	else if (of_device_is_compatible(np, "qca,ar7240-pll") ||
636 		 of_device_is_compatible(np, "qca,ar9130-pll"))
637 		ar724x_clocks_init(pll_base);
638 	else if (of_device_is_compatible(np, "qca,ar9330-pll"))
639 		ar933x_clocks_init(pll_base);
640 	else if (of_device_is_compatible(np, "qca,ar9340-pll"))
641 		ar934x_clocks_init(pll_base);
642 	else if (of_device_is_compatible(np, "qca,qca9530-pll"))
643 		qca953x_clocks_init(pll_base);
644 	else if (of_device_is_compatible(np, "qca,qca9550-pll"))
645 		qca955x_clocks_init(pll_base);
646 	else if (of_device_is_compatible(np, "qca,qca9560-pll"))
647 		qca956x_clocks_init(pll_base);
648 
649 	if (!clks[ATH79_CLK_MDIO])
650 		clks[ATH79_CLK_MDIO] = clks[ATH79_CLK_REF];
651 
652 	if (of_clk_add_provider(np, of_clk_src_onecell_get, &clk_data)) {
653 		pr_err("%pOF: could not register clk provider\n", np);
654 		goto err_iounmap;
655 	}
656 
657 	return;
658 
659 err_iounmap:
660 	iounmap(pll_base);
661 
662 err_clk:
663 	clk_put(ref_clk);
664 }
665 
666 CLK_OF_DECLARE(ar7100_clk, "qca,ar7100-pll", ath79_clocks_init_dt);
667 CLK_OF_DECLARE(ar7240_clk, "qca,ar7240-pll", ath79_clocks_init_dt);
668 CLK_OF_DECLARE(ar9130_clk, "qca,ar9130-pll", ath79_clocks_init_dt);
669 CLK_OF_DECLARE(ar9330_clk, "qca,ar9330-pll", ath79_clocks_init_dt);
670 CLK_OF_DECLARE(ar9340_clk, "qca,ar9340-pll", ath79_clocks_init_dt);
671 CLK_OF_DECLARE(ar9530_clk, "qca,qca9530-pll", ath79_clocks_init_dt);
672 CLK_OF_DECLARE(ar9550_clk, "qca,qca9550-pll", ath79_clocks_init_dt);
673 CLK_OF_DECLARE(ar9560_clk, "qca,qca9560-pll", ath79_clocks_init_dt);
674