xref: /openbmc/u-boot/drivers/clk/clk_zynq.c (revision 2a8882ec)
1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3  * Copyright (C) 2017 Weidmüller Interface GmbH & Co. KG
4  * Stefan Herbrechtsmeier <stefan.herbrechtsmeier@weidmueller.com>
5  *
6  * Copyright (C) 2013 Soren Brinkmann <soren.brinkmann@xilinx.com>
7  * Copyright (C) 2013 Xilinx, Inc. All rights reserved.
8  */
9 
10 #include <common.h>
11 #include <clk-uclass.h>
12 #include <dm.h>
13 #include <dm/lists.h>
14 #include <errno.h>
15 #include <asm/io.h>
16 #include <asm/arch/clk.h>
17 #include <asm/arch/hardware.h>
18 #include <asm/arch/sys_proto.h>
19 
20 /* Register bitfield defines */
21 #define PLLCTRL_FBDIV_MASK	0x7f000
22 #define PLLCTRL_FBDIV_SHIFT	12
23 #define PLLCTRL_BPFORCE_MASK	(1 << 4)
24 #define PLLCTRL_PWRDWN_MASK	2
25 #define PLLCTRL_PWRDWN_SHIFT	1
26 #define PLLCTRL_RESET_MASK	1
27 #define PLLCTRL_RESET_SHIFT	0
28 
29 #define ZYNQ_CLK_MAXDIV		0x3f
30 #define CLK_CTRL_DIV1_SHIFT	20
31 #define CLK_CTRL_DIV1_MASK	(ZYNQ_CLK_MAXDIV << CLK_CTRL_DIV1_SHIFT)
32 #define CLK_CTRL_DIV0_SHIFT	8
33 #define CLK_CTRL_DIV0_MASK	(ZYNQ_CLK_MAXDIV << CLK_CTRL_DIV0_SHIFT)
34 #define CLK_CTRL_SRCSEL_SHIFT	4
35 #define CLK_CTRL_SRCSEL_MASK	(0x3 << CLK_CTRL_SRCSEL_SHIFT)
36 
37 #define CLK_CTRL_DIV2X_SHIFT	26
38 #define CLK_CTRL_DIV2X_MASK	(ZYNQ_CLK_MAXDIV << CLK_CTRL_DIV2X_SHIFT)
39 #define CLK_CTRL_DIV3X_SHIFT	20
40 #define CLK_CTRL_DIV3X_MASK	(ZYNQ_CLK_MAXDIV << CLK_CTRL_DIV3X_SHIFT)
41 
42 DECLARE_GLOBAL_DATA_PTR;
43 
44 #ifndef CONFIG_SPL_BUILD
45 enum zynq_clk_rclk {mio_clk, emio_clk};
46 #endif
47 
48 struct zynq_clk_priv {
49 	ulong ps_clk_freq;
50 #ifndef CONFIG_SPL_BUILD
51 	struct clk gem_emio_clk[2];
52 #endif
53 };
54 
55 static void *zynq_clk_get_register(enum zynq_clk id)
56 {
57 	switch (id) {
58 	case armpll_clk:
59 		return &slcr_base->arm_pll_ctrl;
60 	case ddrpll_clk:
61 		return &slcr_base->ddr_pll_ctrl;
62 	case iopll_clk:
63 		return &slcr_base->io_pll_ctrl;
64 	case lqspi_clk:
65 		return &slcr_base->lqspi_clk_ctrl;
66 	case smc_clk:
67 		return &slcr_base->smc_clk_ctrl;
68 	case pcap_clk:
69 		return &slcr_base->pcap_clk_ctrl;
70 	case sdio0_clk ... sdio1_clk:
71 		return &slcr_base->sdio_clk_ctrl;
72 	case uart0_clk ... uart1_clk:
73 		return &slcr_base->uart_clk_ctrl;
74 	case spi0_clk ... spi1_clk:
75 		return &slcr_base->spi_clk_ctrl;
76 #ifndef CONFIG_SPL_BUILD
77 	case dci_clk:
78 		return &slcr_base->dci_clk_ctrl;
79 	case gem0_clk:
80 		return &slcr_base->gem0_clk_ctrl;
81 	case gem1_clk:
82 		return &slcr_base->gem1_clk_ctrl;
83 	case fclk0_clk:
84 		return &slcr_base->fpga0_clk_ctrl;
85 	case fclk1_clk:
86 		return &slcr_base->fpga1_clk_ctrl;
87 	case fclk2_clk:
88 		return &slcr_base->fpga2_clk_ctrl;
89 	case fclk3_clk:
90 		return &slcr_base->fpga3_clk_ctrl;
91 	case can0_clk ... can1_clk:
92 		return &slcr_base->can_clk_ctrl;
93 	case dbg_trc_clk ... dbg_apb_clk:
94 		/* fall through */
95 #endif
96 	default:
97 		return &slcr_base->dbg_clk_ctrl;
98 	}
99 }
100 
101 static enum zynq_clk zynq_clk_get_cpu_pll(u32 clk_ctrl)
102 {
103 	u32 srcsel = (clk_ctrl & CLK_CTRL_SRCSEL_MASK) >> CLK_CTRL_SRCSEL_SHIFT;
104 
105 	switch (srcsel) {
106 	case 2:
107 		return ddrpll_clk;
108 	case 3:
109 		return iopll_clk;
110 	case 0 ... 1:
111 	default:
112 		return armpll_clk;
113 	}
114 }
115 
116 static enum zynq_clk zynq_clk_get_peripheral_pll(u32 clk_ctrl)
117 {
118 	u32 srcsel = (clk_ctrl & CLK_CTRL_SRCSEL_MASK) >> CLK_CTRL_SRCSEL_SHIFT;
119 
120 	switch (srcsel) {
121 	case 2:
122 		return armpll_clk;
123 	case 3:
124 		return ddrpll_clk;
125 	case 0 ... 1:
126 	default:
127 		return iopll_clk;
128 	}
129 }
130 
131 static ulong zynq_clk_get_pll_rate(struct zynq_clk_priv *priv, enum zynq_clk id)
132 {
133 	u32 clk_ctrl, reset, pwrdwn, mul, bypass;
134 
135 	clk_ctrl = readl(zynq_clk_get_register(id));
136 
137 	reset = (clk_ctrl & PLLCTRL_RESET_MASK) >> PLLCTRL_RESET_SHIFT;
138 	pwrdwn = (clk_ctrl & PLLCTRL_PWRDWN_MASK) >> PLLCTRL_PWRDWN_SHIFT;
139 	if (reset || pwrdwn)
140 		return 0;
141 
142 	bypass = clk_ctrl & PLLCTRL_BPFORCE_MASK;
143 	if (bypass)
144 		mul = 1;
145 	else
146 		mul = (clk_ctrl & PLLCTRL_FBDIV_MASK) >> PLLCTRL_FBDIV_SHIFT;
147 
148 	return priv->ps_clk_freq * mul;
149 }
150 
151 #ifndef CONFIG_SPL_BUILD
152 static enum zynq_clk_rclk zynq_clk_get_gem_rclk(enum zynq_clk id)
153 {
154 	u32 clk_ctrl, srcsel;
155 
156 	if (id == gem0_clk)
157 		clk_ctrl = readl(&slcr_base->gem0_rclk_ctrl);
158 	else
159 		clk_ctrl = readl(&slcr_base->gem1_rclk_ctrl);
160 
161 	srcsel = (clk_ctrl & CLK_CTRL_SRCSEL_MASK) >> CLK_CTRL_SRCSEL_SHIFT;
162 	if (srcsel)
163 		return emio_clk;
164 	else
165 		return mio_clk;
166 }
167 #endif
168 
169 static ulong zynq_clk_get_cpu_rate(struct zynq_clk_priv *priv, enum zynq_clk id)
170 {
171 	u32 clk_621, clk_ctrl, div;
172 	enum zynq_clk pll;
173 
174 	clk_ctrl = readl(&slcr_base->arm_clk_ctrl);
175 
176 	div = (clk_ctrl & CLK_CTRL_DIV0_MASK) >> CLK_CTRL_DIV0_SHIFT;
177 
178 	switch (id) {
179 	case cpu_1x_clk:
180 		div *= 2;
181 		/* fall through */
182 	case cpu_2x_clk:
183 		clk_621 = readl(&slcr_base->clk_621_true) & 1;
184 		div *= 2 + clk_621;
185 		break;
186 	case cpu_3or2x_clk:
187 		div *= 2;
188 		/* fall through */
189 	case cpu_6or4x_clk:
190 		break;
191 	default:
192 		return 0;
193 	}
194 
195 	pll = zynq_clk_get_cpu_pll(clk_ctrl);
196 
197 	return DIV_ROUND_CLOSEST(zynq_clk_get_pll_rate(priv, pll), div);
198 }
199 
200 #ifndef CONFIG_SPL_BUILD
201 static ulong zynq_clk_get_ddr2x_rate(struct zynq_clk_priv *priv)
202 {
203 	u32 clk_ctrl, div;
204 
205 	clk_ctrl = readl(&slcr_base->ddr_clk_ctrl);
206 
207 	div = (clk_ctrl & CLK_CTRL_DIV2X_MASK) >> CLK_CTRL_DIV2X_SHIFT;
208 
209 	return DIV_ROUND_CLOSEST(zynq_clk_get_pll_rate(priv, ddrpll_clk), div);
210 }
211 #endif
212 
213 static ulong zynq_clk_get_ddr3x_rate(struct zynq_clk_priv *priv)
214 {
215 	u32 clk_ctrl, div;
216 
217 	clk_ctrl = readl(&slcr_base->ddr_clk_ctrl);
218 
219 	div = (clk_ctrl & CLK_CTRL_DIV3X_MASK) >> CLK_CTRL_DIV3X_SHIFT;
220 
221 	return DIV_ROUND_CLOSEST(zynq_clk_get_pll_rate(priv, ddrpll_clk), div);
222 }
223 
224 #ifndef CONFIG_SPL_BUILD
225 static ulong zynq_clk_get_dci_rate(struct zynq_clk_priv *priv)
226 {
227 	u32 clk_ctrl, div0, div1;
228 
229 	clk_ctrl = readl(&slcr_base->dci_clk_ctrl);
230 
231 	div0 = (clk_ctrl & CLK_CTRL_DIV0_MASK) >> CLK_CTRL_DIV0_SHIFT;
232 	div1 = (clk_ctrl & CLK_CTRL_DIV1_MASK) >> CLK_CTRL_DIV1_SHIFT;
233 
234 	return DIV_ROUND_CLOSEST(DIV_ROUND_CLOSEST(
235 		zynq_clk_get_pll_rate(priv, ddrpll_clk), div0), div1);
236 }
237 #endif
238 
239 static ulong zynq_clk_get_peripheral_rate(struct zynq_clk_priv *priv,
240 					  enum zynq_clk id, bool two_divs)
241 {
242 	enum zynq_clk pll;
243 	u32 clk_ctrl, div0;
244 	u32 div1 = 1;
245 
246 	clk_ctrl = readl(zynq_clk_get_register(id));
247 
248 	div0 = (clk_ctrl & CLK_CTRL_DIV0_MASK) >> CLK_CTRL_DIV0_SHIFT;
249 	if (!div0)
250 		div0 = 1;
251 
252 #ifndef CONFIG_SPL_BUILD
253 	if (two_divs) {
254 		div1 = (clk_ctrl & CLK_CTRL_DIV1_MASK) >> CLK_CTRL_DIV1_SHIFT;
255 		if (!div1)
256 			div1 = 1;
257 	}
258 #endif
259 
260 	pll = zynq_clk_get_peripheral_pll(clk_ctrl);
261 
262 	return
263 		DIV_ROUND_CLOSEST(
264 			DIV_ROUND_CLOSEST(
265 				zynq_clk_get_pll_rate(priv, pll), div0),
266 			div1);
267 }
268 
269 #ifndef CONFIG_SPL_BUILD
270 static ulong zynq_clk_get_gem_rate(struct zynq_clk_priv *priv, enum zynq_clk id)
271 {
272 	struct clk *parent;
273 
274 	if (zynq_clk_get_gem_rclk(id) == mio_clk)
275 		return zynq_clk_get_peripheral_rate(priv, id, true);
276 
277 	parent = &priv->gem_emio_clk[id - gem0_clk];
278 	if (parent->dev)
279 		return clk_get_rate(parent);
280 
281 	debug("%s: gem%d emio rx clock source unknown\n", __func__,
282 	      id - gem0_clk);
283 
284 	return -ENOSYS;
285 }
286 
287 static unsigned long zynq_clk_calc_peripheral_two_divs(ulong rate,
288 						       ulong pll_rate,
289 						       u32 *div0, u32 *div1)
290 {
291 	long new_err, best_err = (long)(~0UL >> 1);
292 	ulong new_rate, best_rate = 0;
293 	u32 d0, d1;
294 
295 	for (d0 = 1; d0 <= ZYNQ_CLK_MAXDIV; d0++) {
296 		for (d1 = 1; d1 <= ZYNQ_CLK_MAXDIV >> 1; d1++) {
297 			new_rate = DIV_ROUND_CLOSEST(
298 					DIV_ROUND_CLOSEST(pll_rate, d0), d1);
299 			new_err = abs(new_rate - rate);
300 
301 			if (new_err < best_err) {
302 				*div0 = d0;
303 				*div1 = d1;
304 				best_err = new_err;
305 				best_rate = new_rate;
306 			}
307 		}
308 	}
309 
310 	return best_rate;
311 }
312 
313 static ulong zynq_clk_set_peripheral_rate(struct zynq_clk_priv *priv,
314 					  enum zynq_clk id, ulong rate,
315 					  bool two_divs)
316 {
317 	enum zynq_clk pll;
318 	u32 clk_ctrl, div0 = 0, div1 = 0;
319 	ulong pll_rate, new_rate;
320 	u32 *reg;
321 
322 	reg = zynq_clk_get_register(id);
323 	clk_ctrl = readl(reg);
324 
325 	pll = zynq_clk_get_peripheral_pll(clk_ctrl);
326 	pll_rate = zynq_clk_get_pll_rate(priv, pll);
327 	clk_ctrl &= ~CLK_CTRL_DIV0_MASK;
328 	if (two_divs) {
329 		clk_ctrl &= ~CLK_CTRL_DIV1_MASK;
330 		new_rate = zynq_clk_calc_peripheral_two_divs(rate, pll_rate,
331 				&div0, &div1);
332 		clk_ctrl |= div1 << CLK_CTRL_DIV1_SHIFT;
333 	} else {
334 		div0 = DIV_ROUND_CLOSEST(pll_rate, rate);
335 		if (div0 > ZYNQ_CLK_MAXDIV)
336 			div0 = ZYNQ_CLK_MAXDIV;
337 		new_rate = DIV_ROUND_CLOSEST(rate, div0);
338 	}
339 	clk_ctrl |= div0 << CLK_CTRL_DIV0_SHIFT;
340 
341 	zynq_slcr_unlock();
342 	writel(clk_ctrl, reg);
343 	zynq_slcr_lock();
344 
345 	return new_rate;
346 }
347 
348 static ulong zynq_clk_set_gem_rate(struct zynq_clk_priv *priv, enum zynq_clk id,
349 				   ulong rate)
350 {
351 	struct clk *parent;
352 
353 	if (zynq_clk_get_gem_rclk(id) == mio_clk)
354 		return zynq_clk_set_peripheral_rate(priv, id, rate, true);
355 
356 	parent = &priv->gem_emio_clk[id - gem0_clk];
357 	if (parent->dev)
358 		return clk_set_rate(parent, rate);
359 
360 	debug("%s: gem%d emio rx clock source unknown\n", __func__,
361 	      id - gem0_clk);
362 
363 	return -ENOSYS;
364 }
365 #endif
366 
367 #ifndef CONFIG_SPL_BUILD
368 static ulong zynq_clk_get_rate(struct clk *clk)
369 {
370 	struct zynq_clk_priv *priv = dev_get_priv(clk->dev);
371 	enum zynq_clk id = clk->id;
372 	bool two_divs = false;
373 
374 	switch (id) {
375 	case armpll_clk ... iopll_clk:
376 		return zynq_clk_get_pll_rate(priv, id);
377 	case cpu_6or4x_clk ... cpu_1x_clk:
378 		return zynq_clk_get_cpu_rate(priv, id);
379 	case ddr2x_clk:
380 		return zynq_clk_get_ddr2x_rate(priv);
381 	case ddr3x_clk:
382 		return zynq_clk_get_ddr3x_rate(priv);
383 	case dci_clk:
384 		return zynq_clk_get_dci_rate(priv);
385 	case gem0_clk ... gem1_clk:
386 		return zynq_clk_get_gem_rate(priv, id);
387 	case fclk0_clk ... can1_clk:
388 		two_divs = true;
389 		/* fall through */
390 	case dbg_trc_clk ... dbg_apb_clk:
391 	case lqspi_clk ... pcap_clk:
392 	case sdio0_clk ... spi1_clk:
393 		return zynq_clk_get_peripheral_rate(priv, id, two_divs);
394 	case dma_clk:
395 		return zynq_clk_get_cpu_rate(priv, cpu_2x_clk);
396 	case usb0_aper_clk ... swdt_clk:
397 		return zynq_clk_get_cpu_rate(priv, cpu_1x_clk);
398 	default:
399 		return -ENXIO;
400 	}
401 }
402 
403 static ulong zynq_clk_set_rate(struct clk *clk, ulong rate)
404 {
405 	struct zynq_clk_priv *priv = dev_get_priv(clk->dev);
406 	enum zynq_clk id = clk->id;
407 	bool two_divs = false;
408 
409 	switch (id) {
410 	case gem0_clk ... gem1_clk:
411 		return zynq_clk_set_gem_rate(priv, id, rate);
412 	case fclk0_clk ... can1_clk:
413 		two_divs = true;
414 		/* fall through */
415 	case lqspi_clk ... pcap_clk:
416 	case sdio0_clk ... spi1_clk:
417 	case dbg_trc_clk ... dbg_apb_clk:
418 		return zynq_clk_set_peripheral_rate(priv, id, rate, two_divs);
419 	default:
420 		return -ENXIO;
421 	}
422 }
423 #else
424 static ulong zynq_clk_get_rate(struct clk *clk)
425 {
426 	struct zynq_clk_priv *priv = dev_get_priv(clk->dev);
427 	enum zynq_clk id = clk->id;
428 
429 	switch (id) {
430 	case cpu_6or4x_clk ... cpu_1x_clk:
431 		return zynq_clk_get_cpu_rate(priv, id);
432 	case ddr3x_clk:
433 		return zynq_clk_get_ddr3x_rate(priv);
434 	case lqspi_clk ... pcap_clk:
435 	case sdio0_clk ... spi1_clk:
436 		return zynq_clk_get_peripheral_rate(priv, id, 0);
437 	default:
438 		return -ENXIO;
439 	}
440 }
441 #endif
442 
443 static struct clk_ops zynq_clk_ops = {
444 	.get_rate = zynq_clk_get_rate,
445 #ifndef CONFIG_SPL_BUILD
446 	.set_rate = zynq_clk_set_rate,
447 #endif
448 };
449 
450 static int zynq_clk_probe(struct udevice *dev)
451 {
452 	struct zynq_clk_priv *priv = dev_get_priv(dev);
453 #ifndef CONFIG_SPL_BUILD
454 	unsigned int i;
455 	char name[16];
456 	int ret;
457 
458 	for (i = 0; i < 2; i++) {
459 		sprintf(name, "gem%d_emio_clk", i);
460 		ret = clk_get_by_name(dev, name, &priv->gem_emio_clk[i]);
461 		if (ret < 0 && ret != -ENODATA) {
462 			dev_err(dev, "failed to get %s clock\n", name);
463 			return ret;
464 		}
465 	}
466 #endif
467 
468 	priv->ps_clk_freq = fdtdec_get_uint(gd->fdt_blob, dev_of_offset(dev),
469 					    "ps-clk-frequency", 33333333UL);
470 
471 	return 0;
472 }
473 
474 static const struct udevice_id zynq_clk_ids[] = {
475 	{ .compatible = "xlnx,ps7-clkc"},
476 	{}
477 };
478 
479 U_BOOT_DRIVER(zynq_clk) = {
480 	.name		= "zynq_clk",
481 	.id		= UCLASS_CLK,
482 	.of_match	= zynq_clk_ids,
483 	.ops		= &zynq_clk_ops,
484 	.priv_auto_alloc_size = sizeof(struct zynq_clk_priv),
485 	.probe		= zynq_clk_probe,
486 };
487