xref: /openbmc/linux/arch/mips/ath79/clock.c (revision 5b628549)
1 /*
2  *  Atheros AR71XX/AR724X/AR913X common routines
3  *
4  *  Copyright (C) 2010-2011 Jaiganesh Narayanan <jnarayanan@atheros.com>
5  *  Copyright (C) 2011 Gabor Juhos <juhosg@openwrt.org>
6  *
7  *  Parts of this file are based on Atheros' 2.6.15/2.6.31 BSP
8  *
9  *  This program is free software; you can redistribute it and/or modify it
10  *  under the terms of the GNU General Public License version 2 as published
11  *  by the Free Software Foundation.
12  */
13 
14 #include <linux/kernel.h>
15 #include <linux/init.h>
16 #include <linux/err.h>
17 #include <linux/clk.h>
18 #include <linux/clkdev.h>
19 #include <linux/clk-provider.h>
20 #include <linux/of.h>
21 #include <linux/of_address.h>
22 #include <dt-bindings/clock/ath79-clk.h>
23 
24 #include <asm/div64.h>
25 
26 #include <asm/mach-ath79/ath79.h>
27 #include <asm/mach-ath79/ar71xx_regs.h>
28 #include "common.h"
29 
30 #define AR71XX_BASE_FREQ	40000000
31 #define AR724X_BASE_FREQ	40000000
32 
33 static struct clk *clks[ATH79_CLK_END];
34 static struct clk_onecell_data clk_data = {
35 	.clks = clks,
36 	.clk_num = ARRAY_SIZE(clks),
37 };
38 
39 static const char * const clk_names[ATH79_CLK_END] = {
40 	[ATH79_CLK_CPU] = "cpu",
41 	[ATH79_CLK_DDR] = "ddr",
42 	[ATH79_CLK_AHB] = "ahb",
43 	[ATH79_CLK_REF] = "ref",
44 	[ATH79_CLK_MDIO] = "mdio",
45 };
46 
47 static const char * __init ath79_clk_name(int type)
48 {
49 	BUG_ON(type >= ARRAY_SIZE(clk_names) || !clk_names[type]);
50 	return clk_names[type];
51 }
52 
53 static void __init __ath79_set_clk(int type, const char *name, struct clk *clk)
54 {
55 	if (IS_ERR(clk))
56 		panic("failed to allocate %s clock structure", clk_names[type]);
57 
58 	clks[type] = clk;
59 	clk_register_clkdev(clk, name, NULL);
60 }
61 
62 static struct clk * __init ath79_set_clk(int type, unsigned long rate)
63 {
64 	const char *name = ath79_clk_name(type);
65 	struct clk *clk;
66 
67 	clk = clk_register_fixed_rate(NULL, name, NULL, 0, rate);
68 	__ath79_set_clk(type, name, clk);
69 	return clk;
70 }
71 
72 static struct clk * __init ath79_set_ff_clk(int type, const char *parent,
73 					    unsigned int mult, unsigned int div)
74 {
75 	const char *name = ath79_clk_name(type);
76 	struct clk *clk;
77 
78 	clk = clk_register_fixed_factor(NULL, name, parent, 0, mult, div);
79 	__ath79_set_clk(type, name, clk);
80 	return clk;
81 }
82 
83 static unsigned long __init ath79_setup_ref_clk(unsigned long rate)
84 {
85 	struct clk *clk = clks[ATH79_CLK_REF];
86 
87 	if (clk)
88 		rate = clk_get_rate(clk);
89 	else
90 		clk = ath79_set_clk(ATH79_CLK_REF, rate);
91 
92 	return rate;
93 }
94 
95 static void __init ar71xx_clocks_init(void __iomem *pll_base)
96 {
97 	unsigned long ref_rate;
98 	unsigned long cpu_rate;
99 	unsigned long ddr_rate;
100 	unsigned long ahb_rate;
101 	u32 pll;
102 	u32 freq;
103 	u32 div;
104 
105 	ref_rate = ath79_setup_ref_clk(AR71XX_BASE_FREQ);
106 
107 	pll = __raw_readl(pll_base + AR71XX_PLL_REG_CPU_CONFIG);
108 
109 	div = ((pll >> AR71XX_PLL_FB_SHIFT) & AR71XX_PLL_FB_MASK) + 1;
110 	freq = div * ref_rate;
111 
112 	div = ((pll >> AR71XX_CPU_DIV_SHIFT) & AR71XX_CPU_DIV_MASK) + 1;
113 	cpu_rate = freq / div;
114 
115 	div = ((pll >> AR71XX_DDR_DIV_SHIFT) & AR71XX_DDR_DIV_MASK) + 1;
116 	ddr_rate = freq / div;
117 
118 	div = (((pll >> AR71XX_AHB_DIV_SHIFT) & AR71XX_AHB_DIV_MASK) + 1) * 2;
119 	ahb_rate = cpu_rate / div;
120 
121 	ath79_set_clk(ATH79_CLK_CPU, cpu_rate);
122 	ath79_set_clk(ATH79_CLK_DDR, ddr_rate);
123 	ath79_set_clk(ATH79_CLK_AHB, ahb_rate);
124 }
125 
126 static void __init ar724x_clocks_init(void __iomem *pll_base)
127 {
128 	u32 mult, div, ddr_div, ahb_div;
129 	u32 pll;
130 
131 	ath79_setup_ref_clk(AR71XX_BASE_FREQ);
132 
133 	pll = __raw_readl(pll_base + AR724X_PLL_REG_CPU_CONFIG);
134 
135 	mult = ((pll >> AR724X_PLL_FB_SHIFT) & AR724X_PLL_FB_MASK);
136 	div = ((pll >> AR724X_PLL_REF_DIV_SHIFT) & AR724X_PLL_REF_DIV_MASK) * 2;
137 
138 	ddr_div = ((pll >> AR724X_DDR_DIV_SHIFT) & AR724X_DDR_DIV_MASK) + 1;
139 	ahb_div = (((pll >> AR724X_AHB_DIV_SHIFT) & AR724X_AHB_DIV_MASK) + 1) * 2;
140 
141 	ath79_set_ff_clk(ATH79_CLK_CPU, "ref", mult, div);
142 	ath79_set_ff_clk(ATH79_CLK_DDR, "ref", mult, div * ddr_div);
143 	ath79_set_ff_clk(ATH79_CLK_AHB, "ref", mult, div * ahb_div);
144 }
145 
146 static void __init ar933x_clocks_init(void __iomem *pll_base)
147 {
148 	unsigned long ref_rate;
149 	u32 clock_ctrl;
150 	u32 ref_div;
151 	u32 ninit_mul;
152 	u32 out_div;
153 
154 	u32 cpu_div;
155 	u32 ddr_div;
156 	u32 ahb_div;
157 	u32 t;
158 
159 	t = ath79_reset_rr(AR933X_RESET_REG_BOOTSTRAP);
160 	if (t & AR933X_BOOTSTRAP_REF_CLK_40)
161 		ref_rate = (40 * 1000 * 1000);
162 	else
163 		ref_rate = (25 * 1000 * 1000);
164 
165 	ath79_setup_ref_clk(ref_rate);
166 
167 	clock_ctrl = __raw_readl(pll_base + AR933X_PLL_CLOCK_CTRL_REG);
168 	if (clock_ctrl & AR933X_PLL_CLOCK_CTRL_BYPASS) {
169 		ref_div = 1;
170 		ninit_mul = 1;
171 		out_div = 1;
172 
173 		cpu_div = 1;
174 		ddr_div = 1;
175 		ahb_div = 1;
176 	} else {
177 		u32 cpu_config;
178 		u32 t;
179 
180 		cpu_config = __raw_readl(pll_base + AR933X_PLL_CPU_CONFIG_REG);
181 
182 		t = (cpu_config >> AR933X_PLL_CPU_CONFIG_REFDIV_SHIFT) &
183 		    AR933X_PLL_CPU_CONFIG_REFDIV_MASK;
184 		ref_div = t;
185 
186 		ninit_mul = (cpu_config >> AR933X_PLL_CPU_CONFIG_NINT_SHIFT) &
187 		    AR933X_PLL_CPU_CONFIG_NINT_MASK;
188 
189 		t = (cpu_config >> AR933X_PLL_CPU_CONFIG_OUTDIV_SHIFT) &
190 		    AR933X_PLL_CPU_CONFIG_OUTDIV_MASK;
191 		if (t == 0)
192 			t = 1;
193 
194 		out_div = (1 << t);
195 
196 		cpu_div = ((clock_ctrl >> AR933X_PLL_CLOCK_CTRL_CPU_DIV_SHIFT) &
197 		     AR933X_PLL_CLOCK_CTRL_CPU_DIV_MASK) + 1;
198 
199 		ddr_div = ((clock_ctrl >> AR933X_PLL_CLOCK_CTRL_DDR_DIV_SHIFT) &
200 		      AR933X_PLL_CLOCK_CTRL_DDR_DIV_MASK) + 1;
201 
202 		ahb_div = ((clock_ctrl >> AR933X_PLL_CLOCK_CTRL_AHB_DIV_SHIFT) &
203 		     AR933X_PLL_CLOCK_CTRL_AHB_DIV_MASK) + 1;
204 	}
205 
206 	ath79_set_ff_clk(ATH79_CLK_CPU, "ref", ninit_mul,
207 			 ref_div * out_div * cpu_div);
208 	ath79_set_ff_clk(ATH79_CLK_DDR, "ref", ninit_mul,
209 			 ref_div * out_div * ddr_div);
210 	ath79_set_ff_clk(ATH79_CLK_AHB, "ref", ninit_mul,
211 			 ref_div * out_div * ahb_div);
212 }
213 
214 static u32 __init ar934x_get_pll_freq(u32 ref, u32 ref_div, u32 nint, u32 nfrac,
215 				      u32 frac, u32 out_div)
216 {
217 	u64 t;
218 	u32 ret;
219 
220 	t = ref;
221 	t *= nint;
222 	do_div(t, ref_div);
223 	ret = t;
224 
225 	t = ref;
226 	t *= nfrac;
227 	do_div(t, ref_div * frac);
228 	ret += t;
229 
230 	ret /= (1 << out_div);
231 	return ret;
232 }
233 
234 static void __init ar934x_clocks_init(void __iomem *pll_base)
235 {
236 	unsigned long ref_rate;
237 	unsigned long cpu_rate;
238 	unsigned long ddr_rate;
239 	unsigned long ahb_rate;
240 	u32 pll, out_div, ref_div, nint, nfrac, frac, clk_ctrl, postdiv;
241 	u32 cpu_pll, ddr_pll;
242 	u32 bootstrap;
243 	void __iomem *dpll_base;
244 
245 	dpll_base = ioremap(AR934X_SRIF_BASE, AR934X_SRIF_SIZE);
246 
247 	bootstrap = ath79_reset_rr(AR934X_RESET_REG_BOOTSTRAP);
248 	if (bootstrap & AR934X_BOOTSTRAP_REF_CLK_40)
249 		ref_rate = 40 * 1000 * 1000;
250 	else
251 		ref_rate = 25 * 1000 * 1000;
252 
253 	ref_rate = ath79_setup_ref_clk(ref_rate);
254 
255 	pll = __raw_readl(dpll_base + AR934X_SRIF_CPU_DPLL2_REG);
256 	if (pll & AR934X_SRIF_DPLL2_LOCAL_PLL) {
257 		out_div = (pll >> AR934X_SRIF_DPLL2_OUTDIV_SHIFT) &
258 			  AR934X_SRIF_DPLL2_OUTDIV_MASK;
259 		pll = __raw_readl(dpll_base + AR934X_SRIF_CPU_DPLL1_REG);
260 		nint = (pll >> AR934X_SRIF_DPLL1_NINT_SHIFT) &
261 		       AR934X_SRIF_DPLL1_NINT_MASK;
262 		nfrac = pll & AR934X_SRIF_DPLL1_NFRAC_MASK;
263 		ref_div = (pll >> AR934X_SRIF_DPLL1_REFDIV_SHIFT) &
264 			  AR934X_SRIF_DPLL1_REFDIV_MASK;
265 		frac = 1 << 18;
266 	} else {
267 		pll = __raw_readl(pll_base + AR934X_PLL_CPU_CONFIG_REG);
268 		out_div = (pll >> AR934X_PLL_CPU_CONFIG_OUTDIV_SHIFT) &
269 			AR934X_PLL_CPU_CONFIG_OUTDIV_MASK;
270 		ref_div = (pll >> AR934X_PLL_CPU_CONFIG_REFDIV_SHIFT) &
271 			  AR934X_PLL_CPU_CONFIG_REFDIV_MASK;
272 		nint = (pll >> AR934X_PLL_CPU_CONFIG_NINT_SHIFT) &
273 		       AR934X_PLL_CPU_CONFIG_NINT_MASK;
274 		nfrac = (pll >> AR934X_PLL_CPU_CONFIG_NFRAC_SHIFT) &
275 			AR934X_PLL_CPU_CONFIG_NFRAC_MASK;
276 		frac = 1 << 6;
277 	}
278 
279 	cpu_pll = ar934x_get_pll_freq(ref_rate, ref_div, nint,
280 				      nfrac, frac, out_div);
281 
282 	pll = __raw_readl(dpll_base + AR934X_SRIF_DDR_DPLL2_REG);
283 	if (pll & AR934X_SRIF_DPLL2_LOCAL_PLL) {
284 		out_div = (pll >> AR934X_SRIF_DPLL2_OUTDIV_SHIFT) &
285 			  AR934X_SRIF_DPLL2_OUTDIV_MASK;
286 		pll = __raw_readl(dpll_base + AR934X_SRIF_DDR_DPLL1_REG);
287 		nint = (pll >> AR934X_SRIF_DPLL1_NINT_SHIFT) &
288 		       AR934X_SRIF_DPLL1_NINT_MASK;
289 		nfrac = pll & AR934X_SRIF_DPLL1_NFRAC_MASK;
290 		ref_div = (pll >> AR934X_SRIF_DPLL1_REFDIV_SHIFT) &
291 			  AR934X_SRIF_DPLL1_REFDIV_MASK;
292 		frac = 1 << 18;
293 	} else {
294 		pll = __raw_readl(pll_base + AR934X_PLL_DDR_CONFIG_REG);
295 		out_div = (pll >> AR934X_PLL_DDR_CONFIG_OUTDIV_SHIFT) &
296 			  AR934X_PLL_DDR_CONFIG_OUTDIV_MASK;
297 		ref_div = (pll >> AR934X_PLL_DDR_CONFIG_REFDIV_SHIFT) &
298 			   AR934X_PLL_DDR_CONFIG_REFDIV_MASK;
299 		nint = (pll >> AR934X_PLL_DDR_CONFIG_NINT_SHIFT) &
300 		       AR934X_PLL_DDR_CONFIG_NINT_MASK;
301 		nfrac = (pll >> AR934X_PLL_DDR_CONFIG_NFRAC_SHIFT) &
302 			AR934X_PLL_DDR_CONFIG_NFRAC_MASK;
303 		frac = 1 << 10;
304 	}
305 
306 	ddr_pll = ar934x_get_pll_freq(ref_rate, ref_div, nint,
307 				      nfrac, frac, out_div);
308 
309 	clk_ctrl = __raw_readl(pll_base + AR934X_PLL_CPU_DDR_CLK_CTRL_REG);
310 
311 	postdiv = (clk_ctrl >> AR934X_PLL_CPU_DDR_CLK_CTRL_CPU_POST_DIV_SHIFT) &
312 		  AR934X_PLL_CPU_DDR_CLK_CTRL_CPU_POST_DIV_MASK;
313 
314 	if (clk_ctrl & AR934X_PLL_CPU_DDR_CLK_CTRL_CPU_PLL_BYPASS)
315 		cpu_rate = ref_rate;
316 	else if (clk_ctrl & AR934X_PLL_CPU_DDR_CLK_CTRL_CPUCLK_FROM_CPUPLL)
317 		cpu_rate = cpu_pll / (postdiv + 1);
318 	else
319 		cpu_rate = ddr_pll / (postdiv + 1);
320 
321 	postdiv = (clk_ctrl >> AR934X_PLL_CPU_DDR_CLK_CTRL_DDR_POST_DIV_SHIFT) &
322 		  AR934X_PLL_CPU_DDR_CLK_CTRL_DDR_POST_DIV_MASK;
323 
324 	if (clk_ctrl & AR934X_PLL_CPU_DDR_CLK_CTRL_DDR_PLL_BYPASS)
325 		ddr_rate = ref_rate;
326 	else if (clk_ctrl & AR934X_PLL_CPU_DDR_CLK_CTRL_DDRCLK_FROM_DDRPLL)
327 		ddr_rate = ddr_pll / (postdiv + 1);
328 	else
329 		ddr_rate = cpu_pll / (postdiv + 1);
330 
331 	postdiv = (clk_ctrl >> AR934X_PLL_CPU_DDR_CLK_CTRL_AHB_POST_DIV_SHIFT) &
332 		  AR934X_PLL_CPU_DDR_CLK_CTRL_AHB_POST_DIV_MASK;
333 
334 	if (clk_ctrl & AR934X_PLL_CPU_DDR_CLK_CTRL_AHB_PLL_BYPASS)
335 		ahb_rate = ref_rate;
336 	else if (clk_ctrl & AR934X_PLL_CPU_DDR_CLK_CTRL_AHBCLK_FROM_DDRPLL)
337 		ahb_rate = ddr_pll / (postdiv + 1);
338 	else
339 		ahb_rate = cpu_pll / (postdiv + 1);
340 
341 	ath79_set_clk(ATH79_CLK_CPU, cpu_rate);
342 	ath79_set_clk(ATH79_CLK_DDR, ddr_rate);
343 	ath79_set_clk(ATH79_CLK_AHB, ahb_rate);
344 
345 	clk_ctrl = __raw_readl(pll_base + AR934X_PLL_SWITCH_CLOCK_CONTROL_REG);
346 	if (clk_ctrl & AR934X_PLL_SWITCH_CLOCK_CONTROL_MDIO_CLK_SEL)
347 		ath79_set_clk(ATH79_CLK_MDIO, 100 * 1000 * 1000);
348 
349 	iounmap(dpll_base);
350 }
351 
352 static void __init qca953x_clocks_init(void __iomem *pll_base)
353 {
354 	unsigned long ref_rate;
355 	unsigned long cpu_rate;
356 	unsigned long ddr_rate;
357 	unsigned long ahb_rate;
358 	u32 pll, out_div, ref_div, nint, frac, clk_ctrl, postdiv;
359 	u32 cpu_pll, ddr_pll;
360 	u32 bootstrap;
361 
362 	bootstrap = ath79_reset_rr(QCA953X_RESET_REG_BOOTSTRAP);
363 	if (bootstrap &	QCA953X_BOOTSTRAP_REF_CLK_40)
364 		ref_rate = 40 * 1000 * 1000;
365 	else
366 		ref_rate = 25 * 1000 * 1000;
367 
368 	ref_rate = ath79_setup_ref_clk(ref_rate);
369 
370 	pll = __raw_readl(pll_base + QCA953X_PLL_CPU_CONFIG_REG);
371 	out_div = (pll >> QCA953X_PLL_CPU_CONFIG_OUTDIV_SHIFT) &
372 		  QCA953X_PLL_CPU_CONFIG_OUTDIV_MASK;
373 	ref_div = (pll >> QCA953X_PLL_CPU_CONFIG_REFDIV_SHIFT) &
374 		  QCA953X_PLL_CPU_CONFIG_REFDIV_MASK;
375 	nint = (pll >> QCA953X_PLL_CPU_CONFIG_NINT_SHIFT) &
376 	       QCA953X_PLL_CPU_CONFIG_NINT_MASK;
377 	frac = (pll >> QCA953X_PLL_CPU_CONFIG_NFRAC_SHIFT) &
378 	       QCA953X_PLL_CPU_CONFIG_NFRAC_MASK;
379 
380 	cpu_pll = nint * ref_rate / ref_div;
381 	cpu_pll += frac * (ref_rate >> 6) / ref_div;
382 	cpu_pll /= (1 << out_div);
383 
384 	pll = __raw_readl(pll_base + QCA953X_PLL_DDR_CONFIG_REG);
385 	out_div = (pll >> QCA953X_PLL_DDR_CONFIG_OUTDIV_SHIFT) &
386 		  QCA953X_PLL_DDR_CONFIG_OUTDIV_MASK;
387 	ref_div = (pll >> QCA953X_PLL_DDR_CONFIG_REFDIV_SHIFT) &
388 		  QCA953X_PLL_DDR_CONFIG_REFDIV_MASK;
389 	nint = (pll >> QCA953X_PLL_DDR_CONFIG_NINT_SHIFT) &
390 	       QCA953X_PLL_DDR_CONFIG_NINT_MASK;
391 	frac = (pll >> QCA953X_PLL_DDR_CONFIG_NFRAC_SHIFT) &
392 	       QCA953X_PLL_DDR_CONFIG_NFRAC_MASK;
393 
394 	ddr_pll = nint * ref_rate / ref_div;
395 	ddr_pll += frac * (ref_rate >> 6) / (ref_div << 4);
396 	ddr_pll /= (1 << out_div);
397 
398 	clk_ctrl = __raw_readl(pll_base + QCA953X_PLL_CLK_CTRL_REG);
399 
400 	postdiv = (clk_ctrl >> QCA953X_PLL_CLK_CTRL_CPU_POST_DIV_SHIFT) &
401 		  QCA953X_PLL_CLK_CTRL_CPU_POST_DIV_MASK;
402 
403 	if (clk_ctrl & QCA953X_PLL_CLK_CTRL_CPU_PLL_BYPASS)
404 		cpu_rate = ref_rate;
405 	else if (clk_ctrl & QCA953X_PLL_CLK_CTRL_CPUCLK_FROM_CPUPLL)
406 		cpu_rate = cpu_pll / (postdiv + 1);
407 	else
408 		cpu_rate = ddr_pll / (postdiv + 1);
409 
410 	postdiv = (clk_ctrl >> QCA953X_PLL_CLK_CTRL_DDR_POST_DIV_SHIFT) &
411 		  QCA953X_PLL_CLK_CTRL_DDR_POST_DIV_MASK;
412 
413 	if (clk_ctrl & QCA953X_PLL_CLK_CTRL_DDR_PLL_BYPASS)
414 		ddr_rate = ref_rate;
415 	else if (clk_ctrl & QCA953X_PLL_CLK_CTRL_DDRCLK_FROM_DDRPLL)
416 		ddr_rate = ddr_pll / (postdiv + 1);
417 	else
418 		ddr_rate = cpu_pll / (postdiv + 1);
419 
420 	postdiv = (clk_ctrl >> QCA953X_PLL_CLK_CTRL_AHB_POST_DIV_SHIFT) &
421 		  QCA953X_PLL_CLK_CTRL_AHB_POST_DIV_MASK;
422 
423 	if (clk_ctrl & QCA953X_PLL_CLK_CTRL_AHB_PLL_BYPASS)
424 		ahb_rate = ref_rate;
425 	else if (clk_ctrl & QCA953X_PLL_CLK_CTRL_AHBCLK_FROM_DDRPLL)
426 		ahb_rate = ddr_pll / (postdiv + 1);
427 	else
428 		ahb_rate = cpu_pll / (postdiv + 1);
429 
430 	ath79_set_clk(ATH79_CLK_CPU, cpu_rate);
431 	ath79_set_clk(ATH79_CLK_DDR, ddr_rate);
432 	ath79_set_clk(ATH79_CLK_AHB, ahb_rate);
433 }
434 
435 static void __init qca955x_clocks_init(void __iomem *pll_base)
436 {
437 	unsigned long ref_rate;
438 	unsigned long cpu_rate;
439 	unsigned long ddr_rate;
440 	unsigned long ahb_rate;
441 	u32 pll, out_div, ref_div, nint, frac, clk_ctrl, postdiv;
442 	u32 cpu_pll, ddr_pll;
443 	u32 bootstrap;
444 
445 	bootstrap = ath79_reset_rr(QCA955X_RESET_REG_BOOTSTRAP);
446 	if (bootstrap &	QCA955X_BOOTSTRAP_REF_CLK_40)
447 		ref_rate = 40 * 1000 * 1000;
448 	else
449 		ref_rate = 25 * 1000 * 1000;
450 
451 	ref_rate = ath79_setup_ref_clk(ref_rate);
452 
453 	pll = __raw_readl(pll_base + QCA955X_PLL_CPU_CONFIG_REG);
454 	out_div = (pll >> QCA955X_PLL_CPU_CONFIG_OUTDIV_SHIFT) &
455 		  QCA955X_PLL_CPU_CONFIG_OUTDIV_MASK;
456 	ref_div = (pll >> QCA955X_PLL_CPU_CONFIG_REFDIV_SHIFT) &
457 		  QCA955X_PLL_CPU_CONFIG_REFDIV_MASK;
458 	nint = (pll >> QCA955X_PLL_CPU_CONFIG_NINT_SHIFT) &
459 	       QCA955X_PLL_CPU_CONFIG_NINT_MASK;
460 	frac = (pll >> QCA955X_PLL_CPU_CONFIG_NFRAC_SHIFT) &
461 	       QCA955X_PLL_CPU_CONFIG_NFRAC_MASK;
462 
463 	cpu_pll = nint * ref_rate / ref_div;
464 	cpu_pll += frac * ref_rate / (ref_div * (1 << 6));
465 	cpu_pll /= (1 << out_div);
466 
467 	pll = __raw_readl(pll_base + QCA955X_PLL_DDR_CONFIG_REG);
468 	out_div = (pll >> QCA955X_PLL_DDR_CONFIG_OUTDIV_SHIFT) &
469 		  QCA955X_PLL_DDR_CONFIG_OUTDIV_MASK;
470 	ref_div = (pll >> QCA955X_PLL_DDR_CONFIG_REFDIV_SHIFT) &
471 		  QCA955X_PLL_DDR_CONFIG_REFDIV_MASK;
472 	nint = (pll >> QCA955X_PLL_DDR_CONFIG_NINT_SHIFT) &
473 	       QCA955X_PLL_DDR_CONFIG_NINT_MASK;
474 	frac = (pll >> QCA955X_PLL_DDR_CONFIG_NFRAC_SHIFT) &
475 	       QCA955X_PLL_DDR_CONFIG_NFRAC_MASK;
476 
477 	ddr_pll = nint * ref_rate / ref_div;
478 	ddr_pll += frac * ref_rate / (ref_div * (1 << 10));
479 	ddr_pll /= (1 << out_div);
480 
481 	clk_ctrl = __raw_readl(pll_base + QCA955X_PLL_CLK_CTRL_REG);
482 
483 	postdiv = (clk_ctrl >> QCA955X_PLL_CLK_CTRL_CPU_POST_DIV_SHIFT) &
484 		  QCA955X_PLL_CLK_CTRL_CPU_POST_DIV_MASK;
485 
486 	if (clk_ctrl & QCA955X_PLL_CLK_CTRL_CPU_PLL_BYPASS)
487 		cpu_rate = ref_rate;
488 	else if (clk_ctrl & QCA955X_PLL_CLK_CTRL_CPUCLK_FROM_CPUPLL)
489 		cpu_rate = ddr_pll / (postdiv + 1);
490 	else
491 		cpu_rate = cpu_pll / (postdiv + 1);
492 
493 	postdiv = (clk_ctrl >> QCA955X_PLL_CLK_CTRL_DDR_POST_DIV_SHIFT) &
494 		  QCA955X_PLL_CLK_CTRL_DDR_POST_DIV_MASK;
495 
496 	if (clk_ctrl & QCA955X_PLL_CLK_CTRL_DDR_PLL_BYPASS)
497 		ddr_rate = ref_rate;
498 	else if (clk_ctrl & QCA955X_PLL_CLK_CTRL_DDRCLK_FROM_DDRPLL)
499 		ddr_rate = cpu_pll / (postdiv + 1);
500 	else
501 		ddr_rate = ddr_pll / (postdiv + 1);
502 
503 	postdiv = (clk_ctrl >> QCA955X_PLL_CLK_CTRL_AHB_POST_DIV_SHIFT) &
504 		  QCA955X_PLL_CLK_CTRL_AHB_POST_DIV_MASK;
505 
506 	if (clk_ctrl & QCA955X_PLL_CLK_CTRL_AHB_PLL_BYPASS)
507 		ahb_rate = ref_rate;
508 	else if (clk_ctrl & QCA955X_PLL_CLK_CTRL_AHBCLK_FROM_DDRPLL)
509 		ahb_rate = ddr_pll / (postdiv + 1);
510 	else
511 		ahb_rate = cpu_pll / (postdiv + 1);
512 
513 	ath79_set_clk(ATH79_CLK_CPU, cpu_rate);
514 	ath79_set_clk(ATH79_CLK_DDR, ddr_rate);
515 	ath79_set_clk(ATH79_CLK_AHB, ahb_rate);
516 }
517 
518 static void __init qca956x_clocks_init(void __iomem *pll_base)
519 {
520 	unsigned long ref_rate;
521 	unsigned long cpu_rate;
522 	unsigned long ddr_rate;
523 	unsigned long ahb_rate;
524 	u32 pll, out_div, ref_div, nint, hfrac, lfrac, clk_ctrl, postdiv;
525 	u32 cpu_pll, ddr_pll;
526 	u32 bootstrap;
527 
528 	/*
529 	 * QCA956x timer init workaround has to be applied right before setting
530 	 * up the clock. Else, there will be no jiffies
531 	 */
532 	u32 misc;
533 
534 	misc = ath79_reset_rr(AR71XX_RESET_REG_MISC_INT_ENABLE);
535 	misc |= MISC_INT_MIPS_SI_TIMERINT_MASK;
536 	ath79_reset_wr(AR71XX_RESET_REG_MISC_INT_ENABLE, misc);
537 
538 	bootstrap = ath79_reset_rr(QCA956X_RESET_REG_BOOTSTRAP);
539 	if (bootstrap &	QCA956X_BOOTSTRAP_REF_CLK_40)
540 		ref_rate = 40 * 1000 * 1000;
541 	else
542 		ref_rate = 25 * 1000 * 1000;
543 
544 	ref_rate = ath79_setup_ref_clk(ref_rate);
545 
546 	pll = __raw_readl(pll_base + QCA956X_PLL_CPU_CONFIG_REG);
547 	out_div = (pll >> QCA956X_PLL_CPU_CONFIG_OUTDIV_SHIFT) &
548 		  QCA956X_PLL_CPU_CONFIG_OUTDIV_MASK;
549 	ref_div = (pll >> QCA956X_PLL_CPU_CONFIG_REFDIV_SHIFT) &
550 		  QCA956X_PLL_CPU_CONFIG_REFDIV_MASK;
551 
552 	pll = __raw_readl(pll_base + QCA956X_PLL_CPU_CONFIG1_REG);
553 	nint = (pll >> QCA956X_PLL_CPU_CONFIG1_NINT_SHIFT) &
554 	       QCA956X_PLL_CPU_CONFIG1_NINT_MASK;
555 	hfrac = (pll >> QCA956X_PLL_CPU_CONFIG1_NFRAC_H_SHIFT) &
556 	       QCA956X_PLL_CPU_CONFIG1_NFRAC_H_MASK;
557 	lfrac = (pll >> QCA956X_PLL_CPU_CONFIG1_NFRAC_L_SHIFT) &
558 	       QCA956X_PLL_CPU_CONFIG1_NFRAC_L_MASK;
559 
560 	cpu_pll = nint * ref_rate / ref_div;
561 	cpu_pll += (lfrac * ref_rate) / ((ref_div * 25) << 13);
562 	cpu_pll += (hfrac >> 13) * ref_rate / ref_div;
563 	cpu_pll /= (1 << out_div);
564 
565 	pll = __raw_readl(pll_base + QCA956X_PLL_DDR_CONFIG_REG);
566 	out_div = (pll >> QCA956X_PLL_DDR_CONFIG_OUTDIV_SHIFT) &
567 		  QCA956X_PLL_DDR_CONFIG_OUTDIV_MASK;
568 	ref_div = (pll >> QCA956X_PLL_DDR_CONFIG_REFDIV_SHIFT) &
569 		  QCA956X_PLL_DDR_CONFIG_REFDIV_MASK;
570 	pll = __raw_readl(pll_base + QCA956X_PLL_DDR_CONFIG1_REG);
571 	nint = (pll >> QCA956X_PLL_DDR_CONFIG1_NINT_SHIFT) &
572 	       QCA956X_PLL_DDR_CONFIG1_NINT_MASK;
573 	hfrac = (pll >> QCA956X_PLL_DDR_CONFIG1_NFRAC_H_SHIFT) &
574 	       QCA956X_PLL_DDR_CONFIG1_NFRAC_H_MASK;
575 	lfrac = (pll >> QCA956X_PLL_DDR_CONFIG1_NFRAC_L_SHIFT) &
576 	       QCA956X_PLL_DDR_CONFIG1_NFRAC_L_MASK;
577 
578 	ddr_pll = nint * ref_rate / ref_div;
579 	ddr_pll += (lfrac * ref_rate) / ((ref_div * 25) << 13);
580 	ddr_pll += (hfrac >> 13) * ref_rate / ref_div;
581 	ddr_pll /= (1 << out_div);
582 
583 	clk_ctrl = __raw_readl(pll_base + QCA956X_PLL_CLK_CTRL_REG);
584 
585 	postdiv = (clk_ctrl >> QCA956X_PLL_CLK_CTRL_CPU_POST_DIV_SHIFT) &
586 		  QCA956X_PLL_CLK_CTRL_CPU_POST_DIV_MASK;
587 
588 	if (clk_ctrl & QCA956X_PLL_CLK_CTRL_CPU_PLL_BYPASS)
589 		cpu_rate = ref_rate;
590 	else if (clk_ctrl & QCA956X_PLL_CLK_CTRL_CPU_DDRCLK_FROM_CPUPLL)
591 		cpu_rate = ddr_pll / (postdiv + 1);
592 	else
593 		cpu_rate = cpu_pll / (postdiv + 1);
594 
595 	postdiv = (clk_ctrl >> QCA956X_PLL_CLK_CTRL_DDR_POST_DIV_SHIFT) &
596 		  QCA956X_PLL_CLK_CTRL_DDR_POST_DIV_MASK;
597 
598 	if (clk_ctrl & QCA956X_PLL_CLK_CTRL_DDR_PLL_BYPASS)
599 		ddr_rate = ref_rate;
600 	else if (clk_ctrl & QCA956X_PLL_CLK_CTRL_CPU_DDRCLK_FROM_DDRPLL)
601 		ddr_rate = cpu_pll / (postdiv + 1);
602 	else
603 		ddr_rate = ddr_pll / (postdiv + 1);
604 
605 	postdiv = (clk_ctrl >> QCA956X_PLL_CLK_CTRL_AHB_POST_DIV_SHIFT) &
606 		  QCA956X_PLL_CLK_CTRL_AHB_POST_DIV_MASK;
607 
608 	if (clk_ctrl & QCA956X_PLL_CLK_CTRL_AHB_PLL_BYPASS)
609 		ahb_rate = ref_rate;
610 	else if (clk_ctrl & QCA956X_PLL_CLK_CTRL_AHBCLK_FROM_DDRPLL)
611 		ahb_rate = ddr_pll / (postdiv + 1);
612 	else
613 		ahb_rate = cpu_pll / (postdiv + 1);
614 
615 	ath79_set_clk(ATH79_CLK_CPU, cpu_rate);
616 	ath79_set_clk(ATH79_CLK_DDR, ddr_rate);
617 	ath79_set_clk(ATH79_CLK_AHB, ahb_rate);
618 }
619 
620 static void __init ath79_clocks_init_dt(struct device_node *np)
621 {
622 	struct clk *ref_clk;
623 	void __iomem *pll_base;
624 
625 	ref_clk = of_clk_get(np, 0);
626 	if (!IS_ERR(ref_clk))
627 		clks[ATH79_CLK_REF] = ref_clk;
628 
629 	pll_base = of_iomap(np, 0);
630 	if (!pll_base) {
631 		pr_err("%pOF: can't map pll registers\n", np);
632 		goto err_clk;
633 	}
634 
635 	if (of_device_is_compatible(np, "qca,ar7100-pll"))
636 		ar71xx_clocks_init(pll_base);
637 	else if (of_device_is_compatible(np, "qca,ar7240-pll") ||
638 		 of_device_is_compatible(np, "qca,ar9130-pll"))
639 		ar724x_clocks_init(pll_base);
640 	else if (of_device_is_compatible(np, "qca,ar9330-pll"))
641 		ar933x_clocks_init(pll_base);
642 	else if (of_device_is_compatible(np, "qca,ar9340-pll"))
643 		ar934x_clocks_init(pll_base);
644 	else if (of_device_is_compatible(np, "qca,qca9530-pll"))
645 		qca953x_clocks_init(pll_base);
646 	else if (of_device_is_compatible(np, "qca,qca9550-pll"))
647 		qca955x_clocks_init(pll_base);
648 	else if (of_device_is_compatible(np, "qca,qca9560-pll"))
649 		qca956x_clocks_init(pll_base);
650 
651 	if (!clks[ATH79_CLK_MDIO])
652 		clks[ATH79_CLK_MDIO] = clks[ATH79_CLK_REF];
653 
654 	if (of_clk_add_provider(np, of_clk_src_onecell_get, &clk_data)) {
655 		pr_err("%pOF: could not register clk provider\n", np);
656 		goto err_iounmap;
657 	}
658 
659 	return;
660 
661 err_iounmap:
662 	iounmap(pll_base);
663 
664 err_clk:
665 	clk_put(ref_clk);
666 }
667 
668 CLK_OF_DECLARE(ar7100_clk, "qca,ar7100-pll", ath79_clocks_init_dt);
669 CLK_OF_DECLARE(ar7240_clk, "qca,ar7240-pll", ath79_clocks_init_dt);
670 CLK_OF_DECLARE(ar9130_clk, "qca,ar9130-pll", ath79_clocks_init_dt);
671 CLK_OF_DECLARE(ar9330_clk, "qca,ar9330-pll", ath79_clocks_init_dt);
672 CLK_OF_DECLARE(ar9340_clk, "qca,ar9340-pll", ath79_clocks_init_dt);
673 CLK_OF_DECLARE(ar9530_clk, "qca,qca9530-pll", ath79_clocks_init_dt);
674 CLK_OF_DECLARE(ar9550_clk, "qca,qca9550-pll", ath79_clocks_init_dt);
675 CLK_OF_DECLARE(ar9560_clk, "qca,qca9560-pll", ath79_clocks_init_dt);
676