1 /* 2 * SPDX-License-Identifier: GPL-2.0 3 * Copyright (c) 2018, The Linux Foundation 4 */ 5 6 #include <linux/clk.h> 7 #include <linux/clk-provider.h> 8 #include <linux/iopoll.h> 9 10 #include "dsi_phy.h" 11 #include "dsi.xml.h" 12 #include "dsi_phy_7nm.xml.h" 13 14 /* 15 * DSI PLL 7nm - clock diagram (eg: DSI0): TODO: updated CPHY diagram 16 * 17 * dsi0_pll_out_div_clk dsi0_pll_bit_clk 18 * | | 19 * | | 20 * +---------+ | +----------+ | +----+ 21 * dsi0vco_clk ---| out_div |--o--| divl_3_0 |--o--| /8 |-- dsi0_phy_pll_out_byteclk 22 * +---------+ | +----------+ | +----+ 23 * | | 24 * | | dsi0_pll_by_2_bit_clk 25 * | | | 26 * | | +----+ | |\ dsi0_pclk_mux 27 * | |--| /2 |--o--| \ | 28 * | | +----+ | \ | +---------+ 29 * | --------------| |--o--| div_7_4 |-- dsi0_phy_pll_out_dsiclk 30 * |------------------------------| / +---------+ 31 * | +-----+ | / 32 * -----------| /4? |--o----------|/ 33 * +-----+ | | 34 * | |dsiclk_sel 35 * | 36 * dsi0_pll_post_out_div_clk 37 */ 38 39 #define VCO_REF_CLK_RATE 19200000 40 #define FRAC_BITS 18 41 42 /* Hardware is pre V4.1 */ 43 #define DSI_PHY_7NM_QUIRK_PRE_V4_1 BIT(0) 44 /* Hardware is V4.1 */ 45 #define DSI_PHY_7NM_QUIRK_V4_1 BIT(1) 46 /* Hardware is V4.2 */ 47 #define DSI_PHY_7NM_QUIRK_V4_2 BIT(2) 48 /* Hardware is V4.3 */ 49 #define DSI_PHY_7NM_QUIRK_V4_3 BIT(3) 50 /* Hardware is V5.2 */ 51 #define DSI_PHY_7NM_QUIRK_V5_2 BIT(4) 52 53 struct dsi_pll_config { 54 bool enable_ssc; 55 bool ssc_center; 56 u32 ssc_freq; 57 u32 ssc_offset; 58 u32 ssc_adj_per; 59 60 /* out */ 61 u32 decimal_div_start; 62 u32 frac_div_start; 63 u32 pll_clock_inverters; 64 u32 ssc_stepsize; 65 u32 ssc_div_per; 66 }; 67 68 struct pll_7nm_cached_state { 69 unsigned long vco_rate; 70 u8 bit_clk_div; 71 u8 pix_clk_div; 72 u8 pll_out_div; 73 u8 pll_mux; 74 }; 75 76 struct dsi_pll_7nm { 77 struct clk_hw clk_hw; 78 79 struct msm_dsi_phy *phy; 80 81 u64 vco_current_rate; 82 83 /* protects REG_DSI_7nm_PHY_CMN_CLK_CFG0 register */ 84 spinlock_t postdiv_lock; 85 86 struct pll_7nm_cached_state cached_state; 87 88 struct dsi_pll_7nm *slave; 89 }; 90 91 #define to_pll_7nm(x) container_of(x, struct dsi_pll_7nm, clk_hw) 92 93 /* 94 * Global list of private DSI PLL struct pointers. We need this for bonded DSI 95 * mode, where the master PLL's clk_ops needs access the slave's private data 96 */ 97 static struct dsi_pll_7nm *pll_7nm_list[DSI_MAX]; 98 99 static void dsi_pll_setup_config(struct dsi_pll_config *config) 100 { 101 config->ssc_freq = 31500; 102 config->ssc_offset = 4800; 103 config->ssc_adj_per = 2; 104 105 /* TODO: ssc enable */ 106 config->enable_ssc = false; 107 config->ssc_center = 0; 108 } 109 110 static void dsi_pll_calc_dec_frac(struct dsi_pll_7nm *pll, struct dsi_pll_config *config) 111 { 112 u64 fref = VCO_REF_CLK_RATE; 113 u64 pll_freq; 114 u64 divider; 115 u64 dec, dec_multiple; 116 u32 frac; 117 u64 multiplier; 118 119 pll_freq = pll->vco_current_rate; 120 121 divider = fref * 2; 122 123 multiplier = 1 << FRAC_BITS; 124 dec_multiple = div_u64(pll_freq * multiplier, divider); 125 dec = div_u64_rem(dec_multiple, multiplier, &frac); 126 127 if (pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_PRE_V4_1) 128 config->pll_clock_inverters = 0x28; 129 else if ((pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2)) { 130 if (pll_freq <= 1300000000ULL) 131 config->pll_clock_inverters = 0xa0; 132 else if (pll_freq <= 2500000000ULL) 133 config->pll_clock_inverters = 0x20; 134 else if (pll_freq <= 4000000000ULL) 135 config->pll_clock_inverters = 0x00; 136 else 137 config->pll_clock_inverters = 0x40; 138 } else if (pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_1) { 139 if (pll_freq <= 1000000000ULL) 140 config->pll_clock_inverters = 0xa0; 141 else if (pll_freq <= 2500000000ULL) 142 config->pll_clock_inverters = 0x20; 143 else if (pll_freq <= 3020000000ULL) 144 config->pll_clock_inverters = 0x00; 145 else 146 config->pll_clock_inverters = 0x40; 147 } else { 148 /* 4.2, 4.3 */ 149 if (pll_freq <= 1000000000ULL) 150 config->pll_clock_inverters = 0xa0; 151 else if (pll_freq <= 2500000000ULL) 152 config->pll_clock_inverters = 0x20; 153 else if (pll_freq <= 3500000000ULL) 154 config->pll_clock_inverters = 0x00; 155 else 156 config->pll_clock_inverters = 0x40; 157 } 158 159 config->decimal_div_start = dec; 160 config->frac_div_start = frac; 161 } 162 163 #define SSC_CENTER BIT(0) 164 #define SSC_EN BIT(1) 165 166 static void dsi_pll_calc_ssc(struct dsi_pll_7nm *pll, struct dsi_pll_config *config) 167 { 168 u32 ssc_per; 169 u32 ssc_mod; 170 u64 ssc_step_size; 171 u64 frac; 172 173 if (!config->enable_ssc) { 174 DBG("SSC not enabled\n"); 175 return; 176 } 177 178 ssc_per = DIV_ROUND_CLOSEST(VCO_REF_CLK_RATE, config->ssc_freq) / 2 - 1; 179 ssc_mod = (ssc_per + 1) % (config->ssc_adj_per + 1); 180 ssc_per -= ssc_mod; 181 182 frac = config->frac_div_start; 183 ssc_step_size = config->decimal_div_start; 184 ssc_step_size *= (1 << FRAC_BITS); 185 ssc_step_size += frac; 186 ssc_step_size *= config->ssc_offset; 187 ssc_step_size *= (config->ssc_adj_per + 1); 188 ssc_step_size = div_u64(ssc_step_size, (ssc_per + 1)); 189 ssc_step_size = DIV_ROUND_CLOSEST_ULL(ssc_step_size, 1000000); 190 191 config->ssc_div_per = ssc_per; 192 config->ssc_stepsize = ssc_step_size; 193 194 pr_debug("SCC: Dec:%d, frac:%llu, frac_bits:%d\n", 195 config->decimal_div_start, frac, FRAC_BITS); 196 pr_debug("SSC: div_per:0x%X, stepsize:0x%X, adjper:0x%X\n", 197 ssc_per, (u32)ssc_step_size, config->ssc_adj_per); 198 } 199 200 static void dsi_pll_ssc_commit(struct dsi_pll_7nm *pll, struct dsi_pll_config *config) 201 { 202 void __iomem *base = pll->phy->pll_base; 203 204 if (config->enable_ssc) { 205 pr_debug("SSC is enabled\n"); 206 207 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_SSC_STEPSIZE_LOW_1, 208 config->ssc_stepsize & 0xff); 209 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_SSC_STEPSIZE_HIGH_1, 210 config->ssc_stepsize >> 8); 211 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_SSC_DIV_PER_LOW_1, 212 config->ssc_div_per & 0xff); 213 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_SSC_DIV_PER_HIGH_1, 214 config->ssc_div_per >> 8); 215 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_SSC_ADJPER_LOW_1, 216 config->ssc_adj_per & 0xff); 217 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_SSC_ADJPER_HIGH_1, 218 config->ssc_adj_per >> 8); 219 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_SSC_CONTROL, 220 SSC_EN | (config->ssc_center ? SSC_CENTER : 0)); 221 } 222 } 223 224 static void dsi_pll_config_hzindep_reg(struct dsi_pll_7nm *pll) 225 { 226 void __iomem *base = pll->phy->pll_base; 227 u8 analog_controls_five_1 = 0x01, vco_config_1 = 0x00; 228 229 if (!(pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_PRE_V4_1)) 230 if (pll->vco_current_rate >= 3100000000ULL) 231 analog_controls_five_1 = 0x03; 232 233 if (pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_1) { 234 if (pll->vco_current_rate < 1520000000ULL) 235 vco_config_1 = 0x08; 236 else if (pll->vco_current_rate < 2990000000ULL) 237 vco_config_1 = 0x01; 238 } 239 240 if ((pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_2) || 241 (pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_3)) { 242 if (pll->vco_current_rate < 1520000000ULL) 243 vco_config_1 = 0x08; 244 else if (pll->vco_current_rate >= 2990000000ULL) 245 vco_config_1 = 0x01; 246 } 247 248 if ((pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2)) { 249 if (pll->vco_current_rate < 1557000000ULL) 250 vco_config_1 = 0x08; 251 else 252 vco_config_1 = 0x01; 253 } 254 255 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_ANALOG_CONTROLS_FIVE_1, 256 analog_controls_five_1); 257 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_VCO_CONFIG_1, vco_config_1); 258 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_ANALOG_CONTROLS_FIVE, 0x01); 259 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_ANALOG_CONTROLS_TWO, 0x03); 260 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_ANALOG_CONTROLS_THREE, 0x00); 261 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_DSM_DIVIDER, 0x00); 262 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_FEEDBACK_DIVIDER, 0x4e); 263 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_CALIBRATION_SETTINGS, 0x40); 264 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_BAND_SEL_CAL_SETTINGS_THREE, 0xba); 265 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_FREQ_DETECT_SETTINGS_ONE, 0x0c); 266 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_OUTDIV, 0x00); 267 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_CORE_OVERRIDE, 0x00); 268 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_PLL_DIGITAL_TIMERS_TWO, 0x08); 269 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_PLL_PROP_GAIN_RATE_1, 0x0a); 270 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_PLL_BAND_SEL_RATE_1, 0xc0); 271 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_PLL_INT_GAIN_IFILT_BAND_1, 0x84); 272 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_PLL_INT_GAIN_IFILT_BAND_1, 0x82); 273 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_PLL_FL_INT_GAIN_PFILT_BAND_1, 0x4c); 274 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_PLL_LOCK_OVERRIDE, 0x80); 275 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_PFILT, 0x29); 276 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_PFILT, 0x2f); 277 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_IFILT, 0x2a); 278 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_IFILT, 279 !(pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_PRE_V4_1) ? 0x3f : 0x22); 280 281 if (!(pll->phy->cfg->quirks & DSI_PHY_7NM_QUIRK_PRE_V4_1)) { 282 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_PERF_OPTIMIZE, 0x22); 283 if (pll->slave) 284 dsi_phy_write(pll->slave->phy->pll_base + REG_DSI_7nm_PHY_PLL_PERF_OPTIMIZE, 0x22); 285 } 286 } 287 288 static void dsi_pll_commit(struct dsi_pll_7nm *pll, struct dsi_pll_config *config) 289 { 290 void __iomem *base = pll->phy->pll_base; 291 292 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_CORE_INPUT_OVERRIDE, 0x12); 293 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_DECIMAL_DIV_START_1, 294 config->decimal_div_start); 295 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_LOW_1, 296 config->frac_div_start & 0xff); 297 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_MID_1, 298 (config->frac_div_start & 0xff00) >> 8); 299 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_HIGH_1, 300 (config->frac_div_start & 0x30000) >> 16); 301 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_PLL_LOCKDET_RATE_1, 0x40); 302 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_PLL_LOCK_DELAY, 0x06); 303 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_CMODE_1, 304 pll->phy->cphy_mode ? 0x00 : 0x10); 305 dsi_phy_write(base + REG_DSI_7nm_PHY_PLL_CLOCK_INVERTERS, 306 config->pll_clock_inverters); 307 } 308 309 static int dsi_pll_7nm_vco_set_rate(struct clk_hw *hw, unsigned long rate, 310 unsigned long parent_rate) 311 { 312 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(hw); 313 struct dsi_pll_config config; 314 315 DBG("DSI PLL%d rate=%lu, parent's=%lu", pll_7nm->phy->id, rate, 316 parent_rate); 317 318 pll_7nm->vco_current_rate = rate; 319 320 dsi_pll_setup_config(&config); 321 322 dsi_pll_calc_dec_frac(pll_7nm, &config); 323 324 dsi_pll_calc_ssc(pll_7nm, &config); 325 326 dsi_pll_commit(pll_7nm, &config); 327 328 dsi_pll_config_hzindep_reg(pll_7nm); 329 330 dsi_pll_ssc_commit(pll_7nm, &config); 331 332 /* flush, ensure all register writes are done*/ 333 wmb(); 334 335 return 0; 336 } 337 338 static int dsi_pll_7nm_lock_status(struct dsi_pll_7nm *pll) 339 { 340 int rc; 341 u32 status = 0; 342 u32 const delay_us = 100; 343 u32 const timeout_us = 5000; 344 345 rc = readl_poll_timeout_atomic(pll->phy->pll_base + 346 REG_DSI_7nm_PHY_PLL_COMMON_STATUS_ONE, 347 status, 348 ((status & BIT(0)) > 0), 349 delay_us, 350 timeout_us); 351 if (rc) 352 pr_err("DSI PLL(%d) lock failed, status=0x%08x\n", 353 pll->phy->id, status); 354 355 return rc; 356 } 357 358 static void dsi_pll_disable_pll_bias(struct dsi_pll_7nm *pll) 359 { 360 u32 data = dsi_phy_read(pll->phy->base + REG_DSI_7nm_PHY_CMN_CTRL_0); 361 362 dsi_phy_write(pll->phy->pll_base + REG_DSI_7nm_PHY_PLL_SYSTEM_MUXES, 0); 363 dsi_phy_write(pll->phy->base + REG_DSI_7nm_PHY_CMN_CTRL_0, data & ~BIT(5)); 364 ndelay(250); 365 } 366 367 static void dsi_pll_enable_pll_bias(struct dsi_pll_7nm *pll) 368 { 369 u32 data = dsi_phy_read(pll->phy->base + REG_DSI_7nm_PHY_CMN_CTRL_0); 370 371 dsi_phy_write(pll->phy->base + REG_DSI_7nm_PHY_CMN_CTRL_0, data | BIT(5)); 372 dsi_phy_write(pll->phy->pll_base + REG_DSI_7nm_PHY_PLL_SYSTEM_MUXES, 0xc0); 373 ndelay(250); 374 } 375 376 static void dsi_pll_disable_global_clk(struct dsi_pll_7nm *pll) 377 { 378 u32 data; 379 380 data = dsi_phy_read(pll->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG1); 381 dsi_phy_write(pll->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG1, data & ~BIT(5)); 382 } 383 384 static void dsi_pll_enable_global_clk(struct dsi_pll_7nm *pll) 385 { 386 u32 data; 387 388 dsi_phy_write(pll->phy->base + REG_DSI_7nm_PHY_CMN_CTRL_3, 0x04); 389 390 data = dsi_phy_read(pll->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG1); 391 dsi_phy_write(pll->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG1, 392 data | BIT(5) | BIT(4)); 393 } 394 395 static void dsi_pll_phy_dig_reset(struct dsi_pll_7nm *pll) 396 { 397 /* 398 * Reset the PHY digital domain. This would be needed when 399 * coming out of a CX or analog rail power collapse while 400 * ensuring that the pads maintain LP00 or LP11 state 401 */ 402 dsi_phy_write(pll->phy->base + REG_DSI_7nm_PHY_CMN_GLBL_DIGTOP_SPARE4, BIT(0)); 403 wmb(); /* Ensure that the reset is deasserted */ 404 dsi_phy_write(pll->phy->base + REG_DSI_7nm_PHY_CMN_GLBL_DIGTOP_SPARE4, 0x0); 405 wmb(); /* Ensure that the reset is deasserted */ 406 } 407 408 static int dsi_pll_7nm_vco_prepare(struct clk_hw *hw) 409 { 410 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(hw); 411 int rc; 412 413 dsi_pll_enable_pll_bias(pll_7nm); 414 if (pll_7nm->slave) 415 dsi_pll_enable_pll_bias(pll_7nm->slave); 416 417 /* Start PLL */ 418 dsi_phy_write(pll_7nm->phy->base + REG_DSI_7nm_PHY_CMN_PLL_CNTRL, 0x01); 419 420 /* 421 * ensure all PLL configurations are written prior to checking 422 * for PLL lock. 423 */ 424 wmb(); 425 426 /* Check for PLL lock */ 427 rc = dsi_pll_7nm_lock_status(pll_7nm); 428 if (rc) { 429 pr_err("PLL(%d) lock failed\n", pll_7nm->phy->id); 430 goto error; 431 } 432 433 pll_7nm->phy->pll_on = true; 434 435 /* 436 * assert power on reset for PHY digital in case the PLL is 437 * enabled after CX of analog domain power collapse. This needs 438 * to be done before enabling the global clk. 439 */ 440 dsi_pll_phy_dig_reset(pll_7nm); 441 if (pll_7nm->slave) 442 dsi_pll_phy_dig_reset(pll_7nm->slave); 443 444 dsi_pll_enable_global_clk(pll_7nm); 445 if (pll_7nm->slave) 446 dsi_pll_enable_global_clk(pll_7nm->slave); 447 448 error: 449 return rc; 450 } 451 452 static void dsi_pll_disable_sub(struct dsi_pll_7nm *pll) 453 { 454 dsi_phy_write(pll->phy->base + REG_DSI_7nm_PHY_CMN_RBUF_CTRL, 0); 455 dsi_pll_disable_pll_bias(pll); 456 } 457 458 static void dsi_pll_7nm_vco_unprepare(struct clk_hw *hw) 459 { 460 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(hw); 461 462 /* 463 * To avoid any stray glitches while abruptly powering down the PLL 464 * make sure to gate the clock using the clock enable bit before 465 * powering down the PLL 466 */ 467 dsi_pll_disable_global_clk(pll_7nm); 468 dsi_phy_write(pll_7nm->phy->base + REG_DSI_7nm_PHY_CMN_PLL_CNTRL, 0); 469 dsi_pll_disable_sub(pll_7nm); 470 if (pll_7nm->slave) { 471 dsi_pll_disable_global_clk(pll_7nm->slave); 472 dsi_pll_disable_sub(pll_7nm->slave); 473 } 474 /* flush, ensure all register writes are done */ 475 wmb(); 476 pll_7nm->phy->pll_on = false; 477 } 478 479 static unsigned long dsi_pll_7nm_vco_recalc_rate(struct clk_hw *hw, 480 unsigned long parent_rate) 481 { 482 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(hw); 483 void __iomem *base = pll_7nm->phy->pll_base; 484 u64 ref_clk = VCO_REF_CLK_RATE; 485 u64 vco_rate = 0x0; 486 u64 multiplier; 487 u32 frac; 488 u32 dec; 489 u64 pll_freq, tmp64; 490 491 dec = dsi_phy_read(base + REG_DSI_7nm_PHY_PLL_DECIMAL_DIV_START_1); 492 dec &= 0xff; 493 494 frac = dsi_phy_read(base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_LOW_1); 495 frac |= ((dsi_phy_read(base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_MID_1) & 496 0xff) << 8); 497 frac |= ((dsi_phy_read(base + REG_DSI_7nm_PHY_PLL_FRAC_DIV_START_HIGH_1) & 498 0x3) << 16); 499 500 /* 501 * TODO: 502 * 1. Assumes prescaler is disabled 503 */ 504 multiplier = 1 << FRAC_BITS; 505 pll_freq = dec * (ref_clk * 2); 506 tmp64 = (ref_clk * 2 * frac); 507 pll_freq += div_u64(tmp64, multiplier); 508 509 vco_rate = pll_freq; 510 pll_7nm->vco_current_rate = vco_rate; 511 512 DBG("DSI PLL%d returning vco rate = %lu, dec = %x, frac = %x", 513 pll_7nm->phy->id, (unsigned long)vco_rate, dec, frac); 514 515 return (unsigned long)vco_rate; 516 } 517 518 static long dsi_pll_7nm_clk_round_rate(struct clk_hw *hw, 519 unsigned long rate, unsigned long *parent_rate) 520 { 521 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(hw); 522 523 if (rate < pll_7nm->phy->cfg->min_pll_rate) 524 return pll_7nm->phy->cfg->min_pll_rate; 525 else if (rate > pll_7nm->phy->cfg->max_pll_rate) 526 return pll_7nm->phy->cfg->max_pll_rate; 527 else 528 return rate; 529 } 530 531 static const struct clk_ops clk_ops_dsi_pll_7nm_vco = { 532 .round_rate = dsi_pll_7nm_clk_round_rate, 533 .set_rate = dsi_pll_7nm_vco_set_rate, 534 .recalc_rate = dsi_pll_7nm_vco_recalc_rate, 535 .prepare = dsi_pll_7nm_vco_prepare, 536 .unprepare = dsi_pll_7nm_vco_unprepare, 537 }; 538 539 /* 540 * PLL Callbacks 541 */ 542 543 static void dsi_7nm_pll_save_state(struct msm_dsi_phy *phy) 544 { 545 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(phy->vco_hw); 546 struct pll_7nm_cached_state *cached = &pll_7nm->cached_state; 547 void __iomem *phy_base = pll_7nm->phy->base; 548 u32 cmn_clk_cfg0, cmn_clk_cfg1; 549 550 cached->pll_out_div = dsi_phy_read(pll_7nm->phy->pll_base + 551 REG_DSI_7nm_PHY_PLL_PLL_OUTDIV_RATE); 552 cached->pll_out_div &= 0x3; 553 554 cmn_clk_cfg0 = dsi_phy_read(phy_base + REG_DSI_7nm_PHY_CMN_CLK_CFG0); 555 cached->bit_clk_div = cmn_clk_cfg0 & 0xf; 556 cached->pix_clk_div = (cmn_clk_cfg0 & 0xf0) >> 4; 557 558 cmn_clk_cfg1 = dsi_phy_read(phy_base + REG_DSI_7nm_PHY_CMN_CLK_CFG1); 559 cached->pll_mux = cmn_clk_cfg1 & 0x3; 560 561 DBG("DSI PLL%d outdiv %x bit_clk_div %x pix_clk_div %x pll_mux %x", 562 pll_7nm->phy->id, cached->pll_out_div, cached->bit_clk_div, 563 cached->pix_clk_div, cached->pll_mux); 564 } 565 566 static int dsi_7nm_pll_restore_state(struct msm_dsi_phy *phy) 567 { 568 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(phy->vco_hw); 569 struct pll_7nm_cached_state *cached = &pll_7nm->cached_state; 570 void __iomem *phy_base = pll_7nm->phy->base; 571 u32 val; 572 int ret; 573 574 val = dsi_phy_read(pll_7nm->phy->pll_base + REG_DSI_7nm_PHY_PLL_PLL_OUTDIV_RATE); 575 val &= ~0x3; 576 val |= cached->pll_out_div; 577 dsi_phy_write(pll_7nm->phy->pll_base + REG_DSI_7nm_PHY_PLL_PLL_OUTDIV_RATE, val); 578 579 dsi_phy_write(phy_base + REG_DSI_7nm_PHY_CMN_CLK_CFG0, 580 cached->bit_clk_div | (cached->pix_clk_div << 4)); 581 582 val = dsi_phy_read(phy_base + REG_DSI_7nm_PHY_CMN_CLK_CFG1); 583 val &= ~0x3; 584 val |= cached->pll_mux; 585 dsi_phy_write(phy_base + REG_DSI_7nm_PHY_CMN_CLK_CFG1, val); 586 587 ret = dsi_pll_7nm_vco_set_rate(phy->vco_hw, 588 pll_7nm->vco_current_rate, 589 VCO_REF_CLK_RATE); 590 if (ret) { 591 DRM_DEV_ERROR(&pll_7nm->phy->pdev->dev, 592 "restore vco rate failed. ret=%d\n", ret); 593 return ret; 594 } 595 596 DBG("DSI PLL%d", pll_7nm->phy->id); 597 598 return 0; 599 } 600 601 static int dsi_7nm_set_usecase(struct msm_dsi_phy *phy) 602 { 603 struct dsi_pll_7nm *pll_7nm = to_pll_7nm(phy->vco_hw); 604 void __iomem *base = phy->base; 605 u32 data = 0x0; /* internal PLL */ 606 607 DBG("DSI PLL%d", pll_7nm->phy->id); 608 609 switch (phy->usecase) { 610 case MSM_DSI_PHY_STANDALONE: 611 break; 612 case MSM_DSI_PHY_MASTER: 613 pll_7nm->slave = pll_7nm_list[(pll_7nm->phy->id + 1) % DSI_MAX]; 614 break; 615 case MSM_DSI_PHY_SLAVE: 616 data = 0x1; /* external PLL */ 617 break; 618 default: 619 return -EINVAL; 620 } 621 622 /* set PLL src */ 623 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_CLK_CFG1, (data << 2)); 624 625 return 0; 626 } 627 628 /* 629 * The post dividers and mux clocks are created using the standard divider and 630 * mux API. Unlike the 14nm PHY, the slave PLL doesn't need its dividers/mux 631 * state to follow the master PLL's divider/mux state. Therefore, we don't 632 * require special clock ops that also configure the slave PLL registers 633 */ 634 static int pll_7nm_register(struct dsi_pll_7nm *pll_7nm, struct clk_hw **provided_clocks) 635 { 636 char clk_name[32]; 637 struct clk_init_data vco_init = { 638 .parent_data = &(const struct clk_parent_data) { 639 .fw_name = "ref", 640 }, 641 .num_parents = 1, 642 .name = clk_name, 643 .flags = CLK_IGNORE_UNUSED, 644 .ops = &clk_ops_dsi_pll_7nm_vco, 645 }; 646 struct device *dev = &pll_7nm->phy->pdev->dev; 647 struct clk_hw *hw, *pll_out_div, *pll_bit, *pll_by_2_bit; 648 struct clk_hw *pll_post_out_div, *phy_pll_out_dsi_parent; 649 int ret; 650 651 DBG("DSI%d", pll_7nm->phy->id); 652 653 snprintf(clk_name, sizeof(clk_name), "dsi%dvco_clk", pll_7nm->phy->id); 654 pll_7nm->clk_hw.init = &vco_init; 655 656 ret = devm_clk_hw_register(dev, &pll_7nm->clk_hw); 657 if (ret) 658 return ret; 659 660 snprintf(clk_name, sizeof(clk_name), "dsi%d_pll_out_div_clk", pll_7nm->phy->id); 661 662 pll_out_div = devm_clk_hw_register_divider_parent_hw(dev, clk_name, 663 &pll_7nm->clk_hw, CLK_SET_RATE_PARENT, 664 pll_7nm->phy->pll_base + 665 REG_DSI_7nm_PHY_PLL_PLL_OUTDIV_RATE, 666 0, 2, CLK_DIVIDER_POWER_OF_TWO, NULL); 667 if (IS_ERR(pll_out_div)) { 668 ret = PTR_ERR(pll_out_div); 669 goto fail; 670 } 671 672 snprintf(clk_name, sizeof(clk_name), "dsi%d_pll_bit_clk", pll_7nm->phy->id); 673 674 /* BIT CLK: DIV_CTRL_3_0 */ 675 pll_bit = devm_clk_hw_register_divider_parent_hw(dev, clk_name, 676 pll_out_div, CLK_SET_RATE_PARENT, 677 pll_7nm->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG0, 678 0, 4, CLK_DIVIDER_ONE_BASED, &pll_7nm->postdiv_lock); 679 if (IS_ERR(pll_bit)) { 680 ret = PTR_ERR(pll_bit); 681 goto fail; 682 } 683 684 snprintf(clk_name, sizeof(clk_name), "dsi%d_phy_pll_out_byteclk", pll_7nm->phy->id); 685 686 /* DSI Byte clock = VCO_CLK / OUT_DIV / BIT_DIV / 8 */ 687 hw = devm_clk_hw_register_fixed_factor_parent_hw(dev, clk_name, 688 pll_bit, CLK_SET_RATE_PARENT, 1, 689 pll_7nm->phy->cphy_mode ? 7 : 8); 690 if (IS_ERR(hw)) { 691 ret = PTR_ERR(hw); 692 goto fail; 693 } 694 695 provided_clocks[DSI_BYTE_PLL_CLK] = hw; 696 697 snprintf(clk_name, sizeof(clk_name), "dsi%d_pll_by_2_bit_clk", pll_7nm->phy->id); 698 699 pll_by_2_bit = devm_clk_hw_register_fixed_factor_parent_hw(dev, 700 clk_name, pll_bit, 0, 1, 2); 701 if (IS_ERR(pll_by_2_bit)) { 702 ret = PTR_ERR(pll_by_2_bit); 703 goto fail; 704 } 705 706 snprintf(clk_name, sizeof(clk_name), "dsi%d_pll_post_out_div_clk", pll_7nm->phy->id); 707 708 if (pll_7nm->phy->cphy_mode) 709 pll_post_out_div = devm_clk_hw_register_fixed_factor_parent_hw( 710 dev, clk_name, pll_out_div, 0, 2, 7); 711 else 712 pll_post_out_div = devm_clk_hw_register_fixed_factor_parent_hw( 713 dev, clk_name, pll_out_div, 0, 1, 4); 714 if (IS_ERR(pll_post_out_div)) { 715 ret = PTR_ERR(pll_post_out_div); 716 goto fail; 717 } 718 719 /* in CPHY mode, pclk_mux will always have post_out_div as parent 720 * don't register a pclk_mux clock and just use post_out_div instead 721 */ 722 if (pll_7nm->phy->cphy_mode) { 723 u32 data; 724 725 data = dsi_phy_read(pll_7nm->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG1); 726 dsi_phy_write(pll_7nm->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG1, data | 3); 727 728 phy_pll_out_dsi_parent = pll_post_out_div; 729 } else { 730 snprintf(clk_name, sizeof(clk_name), "dsi%d_pclk_mux", pll_7nm->phy->id); 731 732 hw = devm_clk_hw_register_mux_parent_hws(dev, clk_name, 733 ((const struct clk_hw *[]){ 734 pll_bit, 735 pll_by_2_bit, 736 }), 2, 0, pll_7nm->phy->base + 737 REG_DSI_7nm_PHY_CMN_CLK_CFG1, 738 0, 1, 0, NULL); 739 if (IS_ERR(hw)) { 740 ret = PTR_ERR(hw); 741 goto fail; 742 } 743 744 phy_pll_out_dsi_parent = hw; 745 } 746 747 snprintf(clk_name, sizeof(clk_name), "dsi%d_phy_pll_out_dsiclk", pll_7nm->phy->id); 748 749 /* PIX CLK DIV : DIV_CTRL_7_4*/ 750 hw = devm_clk_hw_register_divider_parent_hw(dev, clk_name, 751 phy_pll_out_dsi_parent, 0, 752 pll_7nm->phy->base + REG_DSI_7nm_PHY_CMN_CLK_CFG0, 753 4, 4, CLK_DIVIDER_ONE_BASED, &pll_7nm->postdiv_lock); 754 if (IS_ERR(hw)) { 755 ret = PTR_ERR(hw); 756 goto fail; 757 } 758 759 provided_clocks[DSI_PIXEL_PLL_CLK] = hw; 760 761 return 0; 762 763 fail: 764 765 return ret; 766 } 767 768 static int dsi_pll_7nm_init(struct msm_dsi_phy *phy) 769 { 770 struct platform_device *pdev = phy->pdev; 771 struct dsi_pll_7nm *pll_7nm; 772 int ret; 773 774 pll_7nm = devm_kzalloc(&pdev->dev, sizeof(*pll_7nm), GFP_KERNEL); 775 if (!pll_7nm) 776 return -ENOMEM; 777 778 DBG("DSI PLL%d", phy->id); 779 780 pll_7nm_list[phy->id] = pll_7nm; 781 782 spin_lock_init(&pll_7nm->postdiv_lock); 783 784 pll_7nm->phy = phy; 785 786 ret = pll_7nm_register(pll_7nm, phy->provided_clocks->hws); 787 if (ret) { 788 DRM_DEV_ERROR(&pdev->dev, "failed to register PLL: %d\n", ret); 789 return ret; 790 } 791 792 phy->vco_hw = &pll_7nm->clk_hw; 793 794 /* TODO: Remove this when we have proper display handover support */ 795 msm_dsi_phy_pll_save_state(phy); 796 797 return 0; 798 } 799 800 static int dsi_phy_hw_v4_0_is_pll_on(struct msm_dsi_phy *phy) 801 { 802 void __iomem *base = phy->base; 803 u32 data = 0; 804 805 data = dsi_phy_read(base + REG_DSI_7nm_PHY_CMN_PLL_CNTRL); 806 mb(); /* make sure read happened */ 807 808 return (data & BIT(0)); 809 } 810 811 static void dsi_phy_hw_v4_0_config_lpcdrx(struct msm_dsi_phy *phy, bool enable) 812 { 813 void __iomem *lane_base = phy->lane_base; 814 int phy_lane_0 = 0; /* TODO: Support all lane swap configs */ 815 816 /* 817 * LPRX and CDRX need to enabled only for physical data lane 818 * corresponding to the logical data lane 0 819 */ 820 if (enable) 821 dsi_phy_write(lane_base + 822 REG_DSI_7nm_PHY_LN_LPRX_CTRL(phy_lane_0), 0x3); 823 else 824 dsi_phy_write(lane_base + 825 REG_DSI_7nm_PHY_LN_LPRX_CTRL(phy_lane_0), 0); 826 } 827 828 static void dsi_phy_hw_v4_0_lane_settings(struct msm_dsi_phy *phy) 829 { 830 int i; 831 const u8 tx_dctrl_0[] = { 0x00, 0x00, 0x00, 0x04, 0x01 }; 832 const u8 tx_dctrl_1[] = { 0x40, 0x40, 0x40, 0x46, 0x41 }; 833 const u8 *tx_dctrl = tx_dctrl_0; 834 void __iomem *lane_base = phy->lane_base; 835 836 if (!(phy->cfg->quirks & DSI_PHY_7NM_QUIRK_PRE_V4_1)) 837 tx_dctrl = tx_dctrl_1; 838 839 /* Strength ctrl settings */ 840 for (i = 0; i < 5; i++) { 841 /* 842 * Disable LPRX and CDRX for all lanes. And later on, it will 843 * be only enabled for the physical data lane corresponding 844 * to the logical data lane 0 845 */ 846 dsi_phy_write(lane_base + REG_DSI_7nm_PHY_LN_LPRX_CTRL(i), 0); 847 dsi_phy_write(lane_base + REG_DSI_7nm_PHY_LN_PIN_SWAP(i), 0x0); 848 } 849 850 dsi_phy_hw_v4_0_config_lpcdrx(phy, true); 851 852 /* other settings */ 853 for (i = 0; i < 5; i++) { 854 dsi_phy_write(lane_base + REG_DSI_7nm_PHY_LN_CFG0(i), 0x0); 855 dsi_phy_write(lane_base + REG_DSI_7nm_PHY_LN_CFG1(i), 0x0); 856 dsi_phy_write(lane_base + REG_DSI_7nm_PHY_LN_CFG2(i), i == 4 ? 0x8a : 0xa); 857 dsi_phy_write(lane_base + REG_DSI_7nm_PHY_LN_TX_DCTRL(i), tx_dctrl[i]); 858 } 859 } 860 861 static int dsi_7nm_phy_enable(struct msm_dsi_phy *phy, 862 struct msm_dsi_phy_clk_request *clk_req) 863 { 864 int ret; 865 u32 status; 866 u32 const delay_us = 5; 867 u32 const timeout_us = 1000; 868 struct msm_dsi_dphy_timing *timing = &phy->timing; 869 void __iomem *base = phy->base; 870 bool less_than_1500_mhz; 871 u32 vreg_ctrl_0, vreg_ctrl_1, lane_ctrl0; 872 u32 glbl_pemph_ctrl_0; 873 u32 glbl_str_swi_cal_sel_ctrl, glbl_hstx_str_ctrl_0; 874 u32 glbl_rescode_top_ctrl, glbl_rescode_bot_ctrl; 875 u32 data; 876 877 DBG(""); 878 879 if (phy->cphy_mode) 880 ret = msm_dsi_cphy_timing_calc_v4(timing, clk_req); 881 else 882 ret = msm_dsi_dphy_timing_calc_v4(timing, clk_req); 883 if (ret) { 884 DRM_DEV_ERROR(&phy->pdev->dev, 885 "%s: PHY timing calculation failed\n", __func__); 886 return -EINVAL; 887 } 888 889 if (dsi_phy_hw_v4_0_is_pll_on(phy)) 890 pr_warn("PLL turned on before configuring PHY\n"); 891 892 /* Request for REFGEN READY */ 893 if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_3) || 894 (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2)) { 895 dsi_phy_write(phy->base + REG_DSI_7nm_PHY_CMN_GLBL_DIGTOP_SPARE10, 0x1); 896 udelay(500); 897 } 898 899 /* wait for REFGEN READY */ 900 ret = readl_poll_timeout_atomic(base + REG_DSI_7nm_PHY_CMN_PHY_STATUS, 901 status, (status & BIT(0)), 902 delay_us, timeout_us); 903 if (ret) { 904 pr_err("Ref gen not ready. Aborting\n"); 905 return -EINVAL; 906 } 907 908 /* TODO: CPHY enable path (this is for DPHY only) */ 909 910 /* Alter PHY configurations if data rate less than 1.5GHZ*/ 911 less_than_1500_mhz = (clk_req->bitclk_rate <= 1500000000); 912 913 glbl_str_swi_cal_sel_ctrl = 0x00; 914 if (phy->cphy_mode) { 915 vreg_ctrl_0 = 0x51; 916 vreg_ctrl_1 = 0x55; 917 glbl_hstx_str_ctrl_0 = 0x00; 918 glbl_pemph_ctrl_0 = 0x11; 919 lane_ctrl0 = 0x17; 920 } else { 921 vreg_ctrl_0 = less_than_1500_mhz ? 0x53 : 0x52; 922 vreg_ctrl_1 = 0x5c; 923 glbl_hstx_str_ctrl_0 = 0x88; 924 glbl_pemph_ctrl_0 = 0x00; 925 lane_ctrl0 = 0x1f; 926 } 927 928 if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2)) { 929 if (phy->cphy_mode) { 930 vreg_ctrl_0 = 0x45; 931 vreg_ctrl_1 = 0x41; 932 glbl_rescode_top_ctrl = 0x00; 933 glbl_rescode_bot_ctrl = 0x00; 934 } else { 935 vreg_ctrl_0 = 0x44; 936 vreg_ctrl_1 = 0x19; 937 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3c : 0x03; 938 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 : 0x3c; 939 } 940 } else if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_3)) { 941 if (phy->cphy_mode) { 942 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3d : 0x01; 943 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 : 0x3b; 944 } else { 945 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3d : 0x01; 946 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 : 0x39; 947 } 948 } else if (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_2) { 949 if (phy->cphy_mode) { 950 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3d : 0x01; 951 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 : 0x3b; 952 } else { 953 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3c : 0x00; 954 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x38 : 0x39; 955 } 956 } else if (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_1) { 957 if (phy->cphy_mode) { 958 glbl_hstx_str_ctrl_0 = 0x88; 959 glbl_rescode_top_ctrl = 0x00; 960 glbl_rescode_bot_ctrl = 0x3c; 961 } else { 962 glbl_rescode_top_ctrl = less_than_1500_mhz ? 0x3d : 0x00; 963 glbl_rescode_bot_ctrl = less_than_1500_mhz ? 0x39 : 0x3c; 964 } 965 } else { 966 if (phy->cphy_mode) { 967 glbl_str_swi_cal_sel_ctrl = 0x03; 968 glbl_hstx_str_ctrl_0 = 0x66; 969 } else { 970 vreg_ctrl_0 = less_than_1500_mhz ? 0x5B : 0x59; 971 glbl_str_swi_cal_sel_ctrl = less_than_1500_mhz ? 0x03 : 0x00; 972 glbl_hstx_str_ctrl_0 = less_than_1500_mhz ? 0x66 : 0x88; 973 } 974 glbl_rescode_top_ctrl = 0x03; 975 glbl_rescode_bot_ctrl = 0x3c; 976 } 977 978 /* de-assert digital and pll power down */ 979 data = BIT(6) | BIT(5); 980 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_CTRL_0, data); 981 982 /* Assert PLL core reset */ 983 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_PLL_CNTRL, 0x00); 984 985 /* turn off resync FIFO */ 986 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_RBUF_CTRL, 0x00); 987 988 /* program CMN_CTRL_4 for minor_ver 2 chipsets*/ 989 if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2) || 990 (dsi_phy_read(base + REG_DSI_7nm_PHY_CMN_REVISION_ID0) & (0xf0)) == 0x20) 991 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_CTRL_4, 0x04); 992 993 /* Configure PHY lane swap (TODO: we need to calculate this) */ 994 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_LANE_CFG0, 0x21); 995 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_LANE_CFG1, 0x84); 996 997 if (phy->cphy_mode) 998 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_GLBL_CTRL, BIT(6)); 999 1000 /* Enable LDO */ 1001 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_VREG_CTRL_0, vreg_ctrl_0); 1002 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_VREG_CTRL_1, vreg_ctrl_1); 1003 1004 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_CTRL_3, 0x00); 1005 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_GLBL_STR_SWI_CAL_SEL_CTRL, 1006 glbl_str_swi_cal_sel_ctrl); 1007 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_GLBL_HSTX_STR_CTRL_0, 1008 glbl_hstx_str_ctrl_0); 1009 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_GLBL_PEMPH_CTRL_0, 1010 glbl_pemph_ctrl_0); 1011 if (phy->cphy_mode) 1012 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_GLBL_PEMPH_CTRL_1, 0x01); 1013 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_GLBL_RESCODE_OFFSET_TOP_CTRL, 1014 glbl_rescode_top_ctrl); 1015 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_GLBL_RESCODE_OFFSET_BOT_CTRL, 1016 glbl_rescode_bot_ctrl); 1017 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_GLBL_LPTX_STR_CTRL, 0x55); 1018 1019 /* Remove power down from all blocks */ 1020 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_CTRL_0, 0x7f); 1021 1022 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_LANE_CTRL0, lane_ctrl0); 1023 1024 /* Select full-rate mode */ 1025 if (!phy->cphy_mode) 1026 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_CTRL_2, 0x40); 1027 1028 ret = dsi_7nm_set_usecase(phy); 1029 if (ret) { 1030 DRM_DEV_ERROR(&phy->pdev->dev, "%s: set pll usecase failed, %d\n", 1031 __func__, ret); 1032 return ret; 1033 } 1034 1035 /* DSI PHY timings */ 1036 if (phy->cphy_mode) { 1037 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_0, 0x00); 1038 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_4, timing->hs_exit); 1039 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_5, 1040 timing->shared_timings.clk_pre); 1041 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_6, timing->clk_prepare); 1042 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_7, 1043 timing->shared_timings.clk_post); 1044 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_8, timing->hs_rqst); 1045 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_9, 0x02); 1046 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_10, 0x04); 1047 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_11, 0x00); 1048 } else { 1049 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_0, 0x00); 1050 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_1, timing->clk_zero); 1051 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_2, timing->clk_prepare); 1052 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_3, timing->clk_trail); 1053 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_4, timing->hs_exit); 1054 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_5, timing->hs_zero); 1055 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_6, timing->hs_prepare); 1056 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_7, timing->hs_trail); 1057 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_8, timing->hs_rqst); 1058 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_9, 0x02); 1059 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_10, 0x04); 1060 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_11, 0x00); 1061 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_12, 1062 timing->shared_timings.clk_pre); 1063 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_TIMING_CTRL_13, 1064 timing->shared_timings.clk_post); 1065 } 1066 1067 /* DSI lane settings */ 1068 dsi_phy_hw_v4_0_lane_settings(phy); 1069 1070 DBG("DSI%d PHY enabled", phy->id); 1071 1072 return 0; 1073 } 1074 1075 static bool dsi_7nm_set_continuous_clock(struct msm_dsi_phy *phy, bool enable) 1076 { 1077 void __iomem *base = phy->base; 1078 u32 data; 1079 1080 data = dsi_phy_read(base + REG_DSI_7nm_PHY_CMN_LANE_CTRL1); 1081 if (enable) 1082 data |= BIT(5) | BIT(6); 1083 else 1084 data &= ~(BIT(5) | BIT(6)); 1085 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_LANE_CTRL1, data); 1086 1087 return enable; 1088 } 1089 1090 static void dsi_7nm_phy_disable(struct msm_dsi_phy *phy) 1091 { 1092 void __iomem *base = phy->base; 1093 u32 data; 1094 1095 DBG(""); 1096 1097 if (dsi_phy_hw_v4_0_is_pll_on(phy)) 1098 pr_warn("Turning OFF PHY while PLL is on\n"); 1099 1100 dsi_phy_hw_v4_0_config_lpcdrx(phy, false); 1101 1102 /* Turn off REFGEN Vote */ 1103 if ((phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V4_3) || 1104 (phy->cfg->quirks & DSI_PHY_7NM_QUIRK_V5_2)) { 1105 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_GLBL_DIGTOP_SPARE10, 0x0); 1106 wmb(); 1107 /* Delay to ensure HW removes vote before PHY shut down */ 1108 udelay(2); 1109 } 1110 1111 data = dsi_phy_read(base + REG_DSI_7nm_PHY_CMN_CTRL_0); 1112 1113 /* disable all lanes */ 1114 data &= ~0x1F; 1115 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_CTRL_0, data); 1116 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_LANE_CTRL0, 0); 1117 1118 /* Turn off all PHY blocks */ 1119 dsi_phy_write(base + REG_DSI_7nm_PHY_CMN_CTRL_0, 0x00); 1120 /* make sure phy is turned off */ 1121 wmb(); 1122 1123 DBG("DSI%d PHY disabled", phy->id); 1124 } 1125 1126 static const struct regulator_bulk_data dsi_phy_7nm_36mA_regulators[] = { 1127 { .supply = "vdds", .init_load_uA = 36000 }, 1128 }; 1129 1130 static const struct regulator_bulk_data dsi_phy_7nm_37750uA_regulators[] = { 1131 { .supply = "vdds", .init_load_uA = 37550 }, 1132 }; 1133 1134 static const struct regulator_bulk_data dsi_phy_7nm_97800uA_regulators[] = { 1135 { .supply = "vdds", .init_load_uA = 97800 }, 1136 }; 1137 1138 static const struct regulator_bulk_data dsi_phy_7nm_98400uA_regulators[] = { 1139 { .supply = "vdds", .init_load_uA = 98400 }, 1140 }; 1141 1142 const struct msm_dsi_phy_cfg dsi_phy_7nm_cfgs = { 1143 .has_phy_lane = true, 1144 .regulator_data = dsi_phy_7nm_36mA_regulators, 1145 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_36mA_regulators), 1146 .ops = { 1147 .enable = dsi_7nm_phy_enable, 1148 .disable = dsi_7nm_phy_disable, 1149 .pll_init = dsi_pll_7nm_init, 1150 .save_pll_state = dsi_7nm_pll_save_state, 1151 .restore_pll_state = dsi_7nm_pll_restore_state, 1152 .set_continuous_clock = dsi_7nm_set_continuous_clock, 1153 }, 1154 .min_pll_rate = 600000000UL, 1155 #ifdef CONFIG_64BIT 1156 .max_pll_rate = 5000000000UL, 1157 #else 1158 .max_pll_rate = ULONG_MAX, 1159 #endif 1160 .io_start = { 0xae94400, 0xae96400 }, 1161 .num_dsi_phy = 2, 1162 .quirks = DSI_PHY_7NM_QUIRK_V4_1, 1163 }; 1164 1165 const struct msm_dsi_phy_cfg dsi_phy_7nm_6375_cfgs = { 1166 .has_phy_lane = true, 1167 .ops = { 1168 .enable = dsi_7nm_phy_enable, 1169 .disable = dsi_7nm_phy_disable, 1170 .pll_init = dsi_pll_7nm_init, 1171 .save_pll_state = dsi_7nm_pll_save_state, 1172 .restore_pll_state = dsi_7nm_pll_restore_state, 1173 }, 1174 .min_pll_rate = 600000000UL, 1175 #ifdef CONFIG_64BIT 1176 .max_pll_rate = 5000000000ULL, 1177 #else 1178 .max_pll_rate = ULONG_MAX, 1179 #endif 1180 .io_start = { 0x5e94400 }, 1181 .num_dsi_phy = 1, 1182 .quirks = DSI_PHY_7NM_QUIRK_V4_1, 1183 }; 1184 1185 const struct msm_dsi_phy_cfg dsi_phy_7nm_8150_cfgs = { 1186 .has_phy_lane = true, 1187 .regulator_data = dsi_phy_7nm_36mA_regulators, 1188 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_36mA_regulators), 1189 .ops = { 1190 .enable = dsi_7nm_phy_enable, 1191 .disable = dsi_7nm_phy_disable, 1192 .pll_init = dsi_pll_7nm_init, 1193 .save_pll_state = dsi_7nm_pll_save_state, 1194 .restore_pll_state = dsi_7nm_pll_restore_state, 1195 .set_continuous_clock = dsi_7nm_set_continuous_clock, 1196 }, 1197 .min_pll_rate = 1000000000UL, 1198 .max_pll_rate = 3500000000UL, 1199 .io_start = { 0xae94400, 0xae96400 }, 1200 .num_dsi_phy = 2, 1201 .quirks = DSI_PHY_7NM_QUIRK_PRE_V4_1, 1202 }; 1203 1204 const struct msm_dsi_phy_cfg dsi_phy_7nm_7280_cfgs = { 1205 .has_phy_lane = true, 1206 .regulator_data = dsi_phy_7nm_37750uA_regulators, 1207 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_37750uA_regulators), 1208 .ops = { 1209 .enable = dsi_7nm_phy_enable, 1210 .disable = dsi_7nm_phy_disable, 1211 .pll_init = dsi_pll_7nm_init, 1212 .save_pll_state = dsi_7nm_pll_save_state, 1213 .restore_pll_state = dsi_7nm_pll_restore_state, 1214 }, 1215 .min_pll_rate = 600000000UL, 1216 #ifdef CONFIG_64BIT 1217 .max_pll_rate = 5000000000ULL, 1218 #else 1219 .max_pll_rate = ULONG_MAX, 1220 #endif 1221 .io_start = { 0xae94400 }, 1222 .num_dsi_phy = 1, 1223 .quirks = DSI_PHY_7NM_QUIRK_V4_1, 1224 }; 1225 1226 const struct msm_dsi_phy_cfg dsi_phy_5nm_8350_cfgs = { 1227 .has_phy_lane = true, 1228 .regulator_data = dsi_phy_7nm_37750uA_regulators, 1229 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_37750uA_regulators), 1230 .ops = { 1231 .enable = dsi_7nm_phy_enable, 1232 .disable = dsi_7nm_phy_disable, 1233 .pll_init = dsi_pll_7nm_init, 1234 .save_pll_state = dsi_7nm_pll_save_state, 1235 .restore_pll_state = dsi_7nm_pll_restore_state, 1236 .set_continuous_clock = dsi_7nm_set_continuous_clock, 1237 }, 1238 .min_pll_rate = 600000000UL, 1239 #ifdef CONFIG_64BIT 1240 .max_pll_rate = 5000000000UL, 1241 #else 1242 .max_pll_rate = ULONG_MAX, 1243 #endif 1244 .io_start = { 0xae94400, 0xae96400 }, 1245 .num_dsi_phy = 2, 1246 .quirks = DSI_PHY_7NM_QUIRK_V4_2, 1247 }; 1248 1249 const struct msm_dsi_phy_cfg dsi_phy_5nm_8450_cfgs = { 1250 .has_phy_lane = true, 1251 .regulator_data = dsi_phy_7nm_97800uA_regulators, 1252 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_97800uA_regulators), 1253 .ops = { 1254 .enable = dsi_7nm_phy_enable, 1255 .disable = dsi_7nm_phy_disable, 1256 .pll_init = dsi_pll_7nm_init, 1257 .save_pll_state = dsi_7nm_pll_save_state, 1258 .restore_pll_state = dsi_7nm_pll_restore_state, 1259 .set_continuous_clock = dsi_7nm_set_continuous_clock, 1260 }, 1261 .min_pll_rate = 600000000UL, 1262 #ifdef CONFIG_64BIT 1263 .max_pll_rate = 5000000000UL, 1264 #else 1265 .max_pll_rate = ULONG_MAX, 1266 #endif 1267 .io_start = { 0xae94400, 0xae96400 }, 1268 .num_dsi_phy = 2, 1269 .quirks = DSI_PHY_7NM_QUIRK_V4_3, 1270 }; 1271 1272 const struct msm_dsi_phy_cfg dsi_phy_4nm_8550_cfgs = { 1273 .has_phy_lane = true, 1274 .regulator_data = dsi_phy_7nm_98400uA_regulators, 1275 .num_regulators = ARRAY_SIZE(dsi_phy_7nm_98400uA_regulators), 1276 .ops = { 1277 .enable = dsi_7nm_phy_enable, 1278 .disable = dsi_7nm_phy_disable, 1279 .pll_init = dsi_pll_7nm_init, 1280 .save_pll_state = dsi_7nm_pll_save_state, 1281 .restore_pll_state = dsi_7nm_pll_restore_state, 1282 .set_continuous_clock = dsi_7nm_set_continuous_clock, 1283 }, 1284 .min_pll_rate = 600000000UL, 1285 #ifdef CONFIG_64BIT 1286 .max_pll_rate = 5000000000UL, 1287 #else 1288 .max_pll_rate = ULONG_MAX, 1289 #endif 1290 .io_start = { 0xae95000, 0xae97000 }, 1291 .num_dsi_phy = 2, 1292 .quirks = DSI_PHY_7NM_QUIRK_V5_2, 1293 }; 1294