xref: /openbmc/u-boot/arch/arm/mach-keystone/clock.c (revision 8502f9f6)
1 /*
2  * Keystone2: pll initialization
3  *
4  * (C) Copyright 2012-2014
5  *     Texas Instruments Incorporated, <www.ti.com>
6  *
7  * SPDX-License-Identifier:     GPL-2.0+
8  */
9 
10 #include <common.h>
11 #include <asm/arch/clock.h>
12 #include <asm/arch/clock_defs.h>
13 
14 /* DEV and ARM speed definitions as specified in DEVSPEED register */
15 int __weak speeds[DEVSPEED_NUMSPDS] = {
16 	SPD1000,
17 	SPD1200,
18 	SPD1350,
19 	SPD1400,
20 	SPD1500,
21 	SPD1400,
22 	SPD1350,
23 	SPD1200,
24 	SPD1000,
25 	SPD800,
26 };
27 
28 const struct keystone_pll_regs keystone_pll_regs[] = {
29 	[CORE_PLL]	= {KS2_MAINPLLCTL0, KS2_MAINPLLCTL1},
30 	[PASS_PLL]	= {KS2_PASSPLLCTL0, KS2_PASSPLLCTL1},
31 	[TETRIS_PLL]	= {KS2_ARMPLLCTL0, KS2_ARMPLLCTL1},
32 	[DDR3A_PLL]	= {KS2_DDR3APLLCTL0, KS2_DDR3APLLCTL1},
33 	[DDR3B_PLL]	= {KS2_DDR3BPLLCTL0, KS2_DDR3BPLLCTL1},
34 	[UART_PLL]	= {KS2_UARTPLLCTL0, KS2_UARTPLLCTL1},
35 };
36 
37 inline void pll_pa_clk_sel(void)
38 {
39 	setbits_le32(keystone_pll_regs[PASS_PLL].reg1, CFG_PLLCTL1_PAPLL_MASK);
40 }
41 
42 static void wait_for_completion(const struct pll_init_data *data)
43 {
44 	int i;
45 	for (i = 0; i < 100; i++) {
46 		sdelay(450);
47 		if (!(pllctl_reg_read(data->pll, stat) & PLLSTAT_GOSTAT_MASK))
48 			break;
49 	}
50 }
51 
52 static inline void bypass_main_pll(const struct pll_init_data *data)
53 {
54 	pllctl_reg_clrbits(data->pll, ctl, PLLCTL_PLLENSRC_MASK |
55 			   PLLCTL_PLLEN_MASK);
56 
57 	/* 4 cycles of reference clock CLKIN*/
58 	sdelay(340);
59 }
60 
61 static void configure_mult_div(const struct pll_init_data *data)
62 {
63 	u32 pllm, plld, bwadj;
64 
65 	pllm = data->pll_m - 1;
66 	plld = (data->pll_d - 1) & CFG_PLLCTL0_PLLD_MASK;
67 
68 	/* Program Multiplier */
69 	if (data->pll == MAIN_PLL)
70 		pllctl_reg_write(data->pll, mult, pllm & PLLM_MULT_LO_MASK);
71 
72 	clrsetbits_le32(keystone_pll_regs[data->pll].reg0,
73 			CFG_PLLCTL0_PLLM_MASK,
74 			pllm << CFG_PLLCTL0_PLLM_SHIFT);
75 
76 	/* Program BWADJ */
77 	bwadj = (data->pll_m - 1) >> 1; /* Divide pllm by 2 */
78 	clrsetbits_le32(keystone_pll_regs[data->pll].reg0,
79 			CFG_PLLCTL0_BWADJ_MASK,
80 			(bwadj << CFG_PLLCTL0_BWADJ_SHIFT) &
81 			CFG_PLLCTL0_BWADJ_MASK);
82 	bwadj = bwadj >> CFG_PLLCTL0_BWADJ_BITS;
83 	clrsetbits_le32(keystone_pll_regs[data->pll].reg1,
84 			CFG_PLLCTL1_BWADJ_MASK, bwadj);
85 
86 	/* Program Divider */
87 	clrsetbits_le32(keystone_pll_regs[data->pll].reg0,
88 			CFG_PLLCTL0_PLLD_MASK, plld);
89 }
90 
91 void configure_main_pll(const struct pll_init_data *data)
92 {
93 	u32 tmp, pllod, i, alnctl_val = 0;
94 	u32 *offset;
95 
96 	pllod = data->pll_od - 1;
97 
98 	/* 100 micro sec for stabilization */
99 	sdelay(210000);
100 
101 	tmp = pllctl_reg_read(data->pll, secctl);
102 
103 	/* Check for Bypass */
104 	if (tmp & SECCTL_BYPASS_MASK) {
105 		setbits_le32(keystone_pll_regs[data->pll].reg1,
106 			     CFG_PLLCTL1_ENSAT_MASK);
107 
108 		bypass_main_pll(data);
109 
110 		/* Powerdown and powerup Main Pll */
111 		pllctl_reg_setbits(data->pll, secctl, SECCTL_BYPASS_MASK);
112 		pllctl_reg_setbits(data->pll, ctl, PLLCTL_PLLPWRDN_MASK);
113 		/* 5 micro sec */
114 		sdelay(21000);
115 
116 		pllctl_reg_clrbits(data->pll, ctl, PLLCTL_PLLPWRDN_MASK);
117 	} else {
118 		bypass_main_pll(data);
119 	}
120 
121 	configure_mult_div(data);
122 
123 	/* Program Output Divider */
124 	pllctl_reg_rmw(data->pll, secctl, SECCTL_OP_DIV_MASK,
125 		       ((pllod << SECCTL_OP_DIV_SHIFT) & SECCTL_OP_DIV_MASK));
126 
127 	/* Program PLLDIVn */
128 	wait_for_completion(data);
129 	for (i = 0; i < PLLDIV_MAX; i++) {
130 		if (i < 3)
131 			offset = pllctl_reg(data->pll, div1) + i;
132 		else
133 			offset = pllctl_reg(data->pll, div4) + (i - 3);
134 
135 		if (divn_val[i] != -1) {
136 			__raw_writel(divn_val[i] | PLLDIV_ENABLE_MASK, offset);
137 			alnctl_val |= BIT(i);
138 		}
139 	}
140 
141 	if (alnctl_val) {
142 		pllctl_reg_setbits(data->pll, alnctl, alnctl_val);
143 		/*
144 		 * Set GOSET bit in PLLCMD to initiate the GO operation
145 		 * to change the divide
146 		 */
147 		pllctl_reg_setbits(data->pll, cmd, PLLSTAT_GOSTAT_MASK);
148 		wait_for_completion(data);
149 	}
150 
151 	/* Reset PLL */
152 	pllctl_reg_setbits(data->pll, ctl, PLLCTL_PLLRST_MASK);
153 	sdelay(21000);	/* Wait for a minimum of 7 us*/
154 	pllctl_reg_clrbits(data->pll, ctl, PLLCTL_PLLRST_MASK);
155 	sdelay(105000);	/* Wait for PLL Lock time (min 50 us) */
156 
157 	/* Enable PLL */
158 	pllctl_reg_clrbits(data->pll, secctl, SECCTL_BYPASS_MASK);
159 	pllctl_reg_setbits(data->pll, ctl, PLLCTL_PLLEN_MASK);
160 }
161 
162 void configure_secondary_pll(const struct pll_init_data *data)
163 {
164 	int pllod = data->pll_od - 1;
165 
166 	/* Enable Glitch free bypass for ARM PLL */
167 	if (cpu_is_k2hk() && data->pll == TETRIS_PLL)
168 		clrbits_le32(KS2_MISC_CTRL, MISC_CTL1_ARM_PLL_EN);
169 
170 	/* Enable Bypass mode */
171 	setbits_le32(keystone_pll_regs[data->pll].reg1, CFG_PLLCTL1_ENSAT_MASK);
172 	setbits_le32(keystone_pll_regs[data->pll].reg0,
173 		     CFG_PLLCTL0_BYPASS_MASK);
174 
175 	configure_mult_div(data);
176 
177 	/* Program Output Divider */
178 	clrsetbits_le32(keystone_pll_regs[data->pll].reg0,
179 			CFG_PLLCTL0_CLKOD_MASK,
180 			(pllod << CFG_PLLCTL0_CLKOD_SHIFT) &
181 			CFG_PLLCTL0_CLKOD_MASK);
182 
183 	/* Reset PLL */
184 	setbits_le32(keystone_pll_regs[data->pll].reg1, CFG_PLLCTL1_RST_MASK);
185 	/* Wait for 5 micro seconds */
186 	sdelay(21000);
187 
188 	/* Select the Output of PASS PLL as input to PASS */
189 	if (data->pll == PASS_PLL && cpu_is_k2hk())
190 		pll_pa_clk_sel();
191 
192 	clrbits_le32(keystone_pll_regs[data->pll].reg1, CFG_PLLCTL1_RST_MASK);
193 	/* Wait for 500 * REFCLK cucles * (PLLD + 1) */
194 	sdelay(105000);
195 
196 	/* Switch to PLL mode */
197 	clrbits_le32(keystone_pll_regs[data->pll].reg0,
198 		     CFG_PLLCTL0_BYPASS_MASK);
199 
200 	/* Select the Output of ARM PLL as input to ARM */
201 	if (cpu_is_k2hk() && data->pll == TETRIS_PLL)
202 		setbits_le32(KS2_MISC_CTRL, MISC_CTL1_ARM_PLL_EN);
203 }
204 
205 void init_pll(const struct pll_init_data *data)
206 {
207 	if (data->pll == MAIN_PLL)
208 		configure_main_pll(data);
209 	else
210 		configure_secondary_pll(data);
211 
212 	/*
213 	 * This is required to provide a delay between multiple
214 	 * consequent PPL configurations
215 	 */
216 	sdelay(210000);
217 }
218 
219 void init_plls(void)
220 {
221 	struct pll_init_data *data;
222 	int pll;
223 
224 	for (pll = MAIN_PLL; pll < MAX_PLL_COUNT; pll++) {
225 		data = get_pll_init_data(pll);
226 		if (data)
227 			init_pll(data);
228 	}
229 }
230 
231 static int get_max_speed(u32 val, u32 speed_supported, int *spds)
232 {
233 	int speed;
234 
235 	/* Left most setbit gives the speed */
236 	for (speed = DEVSPEED_NUMSPDS; speed >= 0; speed--) {
237 		if ((val & BIT(speed)) & speed_supported)
238 			return spds[speed];
239 	}
240 
241 	/* If no bit is set, return minimum speed */
242 	if (cpu_is_k2g())
243 		return SPD200;
244 	else
245 		return SPD800;
246 }
247 
248 static inline u32 read_efuse_bootrom(void)
249 {
250 	if (cpu_is_k2hk() && (cpu_revision() <= 1))
251 		return __raw_readl(KS2_REV1_DEVSPEED);
252 	else
253 		return __raw_readl(KS2_EFUSE_BOOTROM);
254 }
255 
256 int get_max_arm_speed(int *spds)
257 {
258 	u32 armspeed = read_efuse_bootrom();
259 
260 	armspeed = (armspeed & DEVSPEED_ARMSPEED_MASK) >>
261 		    DEVSPEED_ARMSPEED_SHIFT;
262 
263 	return get_max_speed(armspeed, ARM_SUPPORTED_SPEEDS, spds);
264 }
265 
266 int get_max_dev_speed(int *spds)
267 {
268 	u32 devspeed = read_efuse_bootrom();
269 
270 	devspeed = (devspeed & DEVSPEED_DEVSPEED_MASK) >>
271 		    DEVSPEED_DEVSPEED_SHIFT;
272 
273 	return get_max_speed(devspeed, DEV_SUPPORTED_SPEEDS, spds);
274 }
275 
276 /**
277  * pll_freq_get - get pll frequency
278  * @pll:	pll identifier
279  */
280 static unsigned long pll_freq_get(int pll)
281 {
282 	unsigned long mult = 1, prediv = 1, output_div = 2;
283 	unsigned long ret;
284 	u32 tmp, reg;
285 
286 	if (pll == MAIN_PLL) {
287 		ret = get_external_clk(sys_clk);
288 		if (pllctl_reg_read(pll, ctl) & PLLCTL_PLLEN_MASK) {
289 			/* PLL mode */
290 			tmp = __raw_readl(KS2_MAINPLLCTL0);
291 			prediv = (tmp & CFG_PLLCTL0_PLLD_MASK) + 1;
292 			mult = ((tmp & CFG_PLLCTL0_PLLM_HI_MASK) >>
293 				CFG_PLLCTL0_PLLM_SHIFT |
294 				(pllctl_reg_read(pll, mult) &
295 				 PLLM_MULT_LO_MASK)) + 1;
296 			output_div = ((pllctl_reg_read(pll, secctl) &
297 				       SECCTL_OP_DIV_MASK) >>
298 				       SECCTL_OP_DIV_SHIFT) + 1;
299 
300 			ret = ret / prediv / output_div * mult;
301 		}
302 	} else {
303 		switch (pll) {
304 		case PASS_PLL:
305 			ret = get_external_clk(pa_clk);
306 			reg = KS2_PASSPLLCTL0;
307 			break;
308 		case TETRIS_PLL:
309 			ret = get_external_clk(tetris_clk);
310 			reg = KS2_ARMPLLCTL0;
311 			break;
312 		case DDR3A_PLL:
313 			ret = get_external_clk(ddr3a_clk);
314 			reg = KS2_DDR3APLLCTL0;
315 			break;
316 		case DDR3B_PLL:
317 			ret = get_external_clk(ddr3b_clk);
318 			reg = KS2_DDR3BPLLCTL0;
319 			break;
320 		case UART_PLL:
321 			ret = get_external_clk(uart_clk);
322 			reg = KS2_UARTPLLCTL0;
323 			break;
324 		default:
325 			return 0;
326 		}
327 
328 		tmp = __raw_readl(reg);
329 
330 		if (!(tmp & CFG_PLLCTL0_BYPASS_MASK)) {
331 			/* Bypass disabled */
332 			prediv = (tmp & CFG_PLLCTL0_PLLD_MASK) + 1;
333 			mult = ((tmp & CFG_PLLCTL0_PLLM_MASK) >>
334 				CFG_PLLCTL0_PLLM_SHIFT) + 1;
335 			output_div = ((tmp & CFG_PLLCTL0_CLKOD_MASK) >>
336 				      CFG_PLLCTL0_CLKOD_SHIFT) + 1;
337 			ret = ((ret / prediv) * mult) / output_div;
338 		}
339 	}
340 
341 	return ret;
342 }
343 
344 unsigned long ks_clk_get_rate(unsigned int clk)
345 {
346 	unsigned long freq = 0;
347 
348 	switch (clk) {
349 	case core_pll_clk:
350 		freq = pll_freq_get(CORE_PLL);
351 		break;
352 	case pass_pll_clk:
353 		freq = pll_freq_get(PASS_PLL);
354 		break;
355 	case tetris_pll_clk:
356 		if (!cpu_is_k2e())
357 			freq = pll_freq_get(TETRIS_PLL);
358 		break;
359 	case ddr3a_pll_clk:
360 		freq = pll_freq_get(DDR3A_PLL);
361 		break;
362 	case ddr3b_pll_clk:
363 		if (cpu_is_k2hk())
364 			freq = pll_freq_get(DDR3B_PLL);
365 		break;
366 	case uart_pll_clk:
367 		if (cpu_is_k2g())
368 			freq = pll_freq_get(UART_PLL);
369 		break;
370 	case sys_clk0_1_clk:
371 	case sys_clk0_clk:
372 		freq = pll_freq_get(CORE_PLL) / pll0div_read(1);
373 		break;
374 	case sys_clk1_clk:
375 	return pll_freq_get(CORE_PLL) / pll0div_read(2);
376 		break;
377 	case sys_clk2_clk:
378 		freq = pll_freq_get(CORE_PLL) / pll0div_read(3);
379 		break;
380 	case sys_clk3_clk:
381 		freq = pll_freq_get(CORE_PLL) / pll0div_read(4);
382 		break;
383 	case sys_clk0_2_clk:
384 		freq = ks_clk_get_rate(sys_clk0_clk) / 2;
385 		break;
386 	case sys_clk0_3_clk:
387 		freq = ks_clk_get_rate(sys_clk0_clk) / 3;
388 		break;
389 	case sys_clk0_4_clk:
390 		freq = ks_clk_get_rate(sys_clk0_clk) / 4;
391 		break;
392 	case sys_clk0_6_clk:
393 		freq = ks_clk_get_rate(sys_clk0_clk) / 6;
394 		break;
395 	case sys_clk0_8_clk:
396 		freq = ks_clk_get_rate(sys_clk0_clk) / 8;
397 		break;
398 	case sys_clk0_12_clk:
399 		freq = ks_clk_get_rate(sys_clk0_clk) / 12;
400 		break;
401 	case sys_clk0_24_clk:
402 		freq = ks_clk_get_rate(sys_clk0_clk) / 24;
403 		break;
404 	case sys_clk1_3_clk:
405 		freq = ks_clk_get_rate(sys_clk1_clk) / 3;
406 		break;
407 	case sys_clk1_4_clk:
408 		freq = ks_clk_get_rate(sys_clk1_clk) / 4;
409 		break;
410 	case sys_clk1_6_clk:
411 		freq = ks_clk_get_rate(sys_clk1_clk) / 6;
412 		break;
413 	case sys_clk1_12_clk:
414 		freq = ks_clk_get_rate(sys_clk1_clk) / 12;
415 		break;
416 	default:
417 		break;
418 	}
419 
420 	return freq;
421 }
422