1 /* 2 * Copyright 2011 Advanced Micro Devices, Inc. 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice shall be included in 12 * all copies or substantial portions of the Software. 13 * 14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 20 * OTHER DEALINGS IN THE SOFTWARE. 21 * 22 * Authors: Alex Deucher 23 */ 24 25 #include "drmP.h" 26 #include "radeon.h" 27 #include "radeon_asic.h" 28 #include "rv770d.h" 29 #include "r600_dpm.h" 30 #include "rv770_dpm.h" 31 #include "cypress_dpm.h" 32 #include "atom.h" 33 #include <linux/seq_file.h> 34 35 #define MC_CG_ARB_FREQ_F0 0x0a 36 #define MC_CG_ARB_FREQ_F1 0x0b 37 #define MC_CG_ARB_FREQ_F2 0x0c 38 #define MC_CG_ARB_FREQ_F3 0x0d 39 40 #define MC_CG_SEQ_DRAMCONF_S0 0x05 41 #define MC_CG_SEQ_DRAMCONF_S1 0x06 42 43 #define PCIE_BUS_CLK 10000 44 #define TCLK (PCIE_BUS_CLK / 10) 45 46 #define SMC_RAM_END 0xC000 47 48 struct rv7xx_ps *rv770_get_ps(struct radeon_ps *rps) 49 { 50 struct rv7xx_ps *ps = rps->ps_priv; 51 52 return ps; 53 } 54 55 struct rv7xx_power_info *rv770_get_pi(struct radeon_device *rdev) 56 { 57 struct rv7xx_power_info *pi = rdev->pm.dpm.priv; 58 59 return pi; 60 } 61 62 struct evergreen_power_info *evergreen_get_pi(struct radeon_device *rdev) 63 { 64 struct evergreen_power_info *pi = rdev->pm.dpm.priv; 65 66 return pi; 67 } 68 69 static void rv770_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev, 70 bool enable) 71 { 72 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 73 u32 tmp; 74 75 tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL); 76 if (enable) { 77 tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK; 78 tmp |= LC_HW_VOLTAGE_IF_CONTROL(1); 79 tmp |= LC_GEN2_EN_STRAP; 80 } else { 81 if (!pi->boot_in_gen2) { 82 tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK; 83 tmp &= ~LC_GEN2_EN_STRAP; 84 } 85 } 86 if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) || 87 (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2)) 88 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp); 89 90 } 91 92 static void rv770_enable_l0s(struct radeon_device *rdev) 93 { 94 u32 tmp; 95 96 tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL) & ~LC_L0S_INACTIVITY_MASK; 97 tmp |= LC_L0S_INACTIVITY(3); 98 WREG32_PCIE_PORT(PCIE_LC_CNTL, tmp); 99 } 100 101 static void rv770_enable_l1(struct radeon_device *rdev) 102 { 103 u32 tmp; 104 105 tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL); 106 tmp &= ~LC_L1_INACTIVITY_MASK; 107 tmp |= LC_L1_INACTIVITY(4); 108 tmp &= ~LC_PMI_TO_L1_DIS; 109 tmp &= ~LC_ASPM_TO_L1_DIS; 110 WREG32_PCIE_PORT(PCIE_LC_CNTL, tmp); 111 } 112 113 static void rv770_enable_pll_sleep_in_l1(struct radeon_device *rdev) 114 { 115 u32 tmp; 116 117 tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL) & ~LC_L1_INACTIVITY_MASK; 118 tmp |= LC_L1_INACTIVITY(8); 119 WREG32_PCIE_PORT(PCIE_LC_CNTL, tmp); 120 121 /* NOTE, this is a PCIE indirect reg, not PCIE PORT */ 122 tmp = RREG32_PCIE(PCIE_P_CNTL); 123 tmp |= P_PLL_PWRDN_IN_L1L23; 124 tmp &= ~P_PLL_BUF_PDNB; 125 tmp &= ~P_PLL_PDNB; 126 tmp |= P_ALLOW_PRX_FRONTEND_SHUTOFF; 127 WREG32_PCIE(PCIE_P_CNTL, tmp); 128 } 129 130 static void rv770_gfx_clock_gating_enable(struct radeon_device *rdev, 131 bool enable) 132 { 133 if (enable) 134 WREG32_P(SCLK_PWRMGT_CNTL, DYN_GFX_CLK_OFF_EN, ~DYN_GFX_CLK_OFF_EN); 135 else { 136 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN); 137 WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON); 138 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON); 139 RREG32(GB_TILING_CONFIG); 140 } 141 } 142 143 static void rv770_mg_clock_gating_enable(struct radeon_device *rdev, 144 bool enable) 145 { 146 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 147 148 if (enable) { 149 u32 mgcg_cgtt_local0; 150 151 if (rdev->family == CHIP_RV770) 152 mgcg_cgtt_local0 = RV770_MGCGTTLOCAL0_DFLT; 153 else 154 mgcg_cgtt_local0 = RV7XX_MGCGTTLOCAL0_DFLT; 155 156 WREG32(CG_CGTT_LOCAL_0, mgcg_cgtt_local0); 157 WREG32(CG_CGTT_LOCAL_1, (RV770_MGCGTTLOCAL1_DFLT & 0xFFFFCFFF)); 158 159 if (pi->mgcgtssm) 160 WREG32(CGTS_SM_CTRL_REG, RV770_MGCGCGTSSMCTRL_DFLT); 161 } else { 162 WREG32(CG_CGTT_LOCAL_0, 0xFFFFFFFF); 163 WREG32(CG_CGTT_LOCAL_1, 0xFFFFCFFF); 164 } 165 } 166 167 void rv770_restore_cgcg(struct radeon_device *rdev) 168 { 169 bool dpm_en = false, cg_en = false; 170 171 if (RREG32(GENERAL_PWRMGT) & GLOBAL_PWRMGT_EN) 172 dpm_en = true; 173 if (RREG32(SCLK_PWRMGT_CNTL) & DYN_GFX_CLK_OFF_EN) 174 cg_en = true; 175 176 if (dpm_en && !cg_en) 177 WREG32_P(SCLK_PWRMGT_CNTL, DYN_GFX_CLK_OFF_EN, ~DYN_GFX_CLK_OFF_EN); 178 } 179 180 static void rv770_start_dpm(struct radeon_device *rdev) 181 { 182 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~SCLK_PWRMGT_OFF); 183 184 WREG32_P(MCLK_PWRMGT_CNTL, 0, ~MPLL_PWRMGT_OFF); 185 186 WREG32_P(GENERAL_PWRMGT, GLOBAL_PWRMGT_EN, ~GLOBAL_PWRMGT_EN); 187 } 188 189 void rv770_stop_dpm(struct radeon_device *rdev) 190 { 191 PPSMC_Result result; 192 193 result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_TwoLevelsDisabled); 194 195 if (result != PPSMC_Result_OK) 196 DRM_ERROR("Could not force DPM to low.\n"); 197 198 WREG32_P(GENERAL_PWRMGT, 0, ~GLOBAL_PWRMGT_EN); 199 200 WREG32_P(SCLK_PWRMGT_CNTL, SCLK_PWRMGT_OFF, ~SCLK_PWRMGT_OFF); 201 202 WREG32_P(MCLK_PWRMGT_CNTL, MPLL_PWRMGT_OFF, ~MPLL_PWRMGT_OFF); 203 } 204 205 bool rv770_dpm_enabled(struct radeon_device *rdev) 206 { 207 if (RREG32(GENERAL_PWRMGT) & GLOBAL_PWRMGT_EN) 208 return true; 209 else 210 return false; 211 } 212 213 void rv770_enable_thermal_protection(struct radeon_device *rdev, 214 bool enable) 215 { 216 if (enable) 217 WREG32_P(GENERAL_PWRMGT, 0, ~THERMAL_PROTECTION_DIS); 218 else 219 WREG32_P(GENERAL_PWRMGT, THERMAL_PROTECTION_DIS, ~THERMAL_PROTECTION_DIS); 220 } 221 222 void rv770_enable_acpi_pm(struct radeon_device *rdev) 223 { 224 WREG32_P(GENERAL_PWRMGT, STATIC_PM_EN, ~STATIC_PM_EN); 225 } 226 227 u8 rv770_get_seq_value(struct radeon_device *rdev, 228 struct rv7xx_pl *pl) 229 { 230 return (pl->flags & ATOM_PPLIB_R600_FLAGS_LOWPOWER) ? 231 MC_CG_SEQ_DRAMCONF_S0 : MC_CG_SEQ_DRAMCONF_S1; 232 } 233 234 int rv770_read_smc_soft_register(struct radeon_device *rdev, 235 u16 reg_offset, u32 *value) 236 { 237 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 238 239 return rv770_read_smc_sram_dword(rdev, 240 pi->soft_regs_start + reg_offset, 241 value, pi->sram_end); 242 } 243 244 int rv770_write_smc_soft_register(struct radeon_device *rdev, 245 u16 reg_offset, u32 value) 246 { 247 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 248 249 return rv770_write_smc_sram_dword(rdev, 250 pi->soft_regs_start + reg_offset, 251 value, pi->sram_end); 252 } 253 254 int rv770_populate_smc_t(struct radeon_device *rdev, 255 struct radeon_ps *radeon_state, 256 RV770_SMC_SWSTATE *smc_state) 257 { 258 struct rv7xx_ps *state = rv770_get_ps(radeon_state); 259 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 260 int i; 261 int a_n; 262 int a_d; 263 u8 l[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE]; 264 u8 r[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE]; 265 u32 a_t; 266 267 l[0] = 0; 268 r[2] = 100; 269 270 a_n = (int)state->medium.sclk * pi->lmp + 271 (int)state->low.sclk * (R600_AH_DFLT - pi->rlp); 272 a_d = (int)state->low.sclk * (100 - (int)pi->rlp) + 273 (int)state->medium.sclk * pi->lmp; 274 275 l[1] = (u8)(pi->lmp - (int)pi->lmp * a_n / a_d); 276 r[0] = (u8)(pi->rlp + (100 - (int)pi->rlp) * a_n / a_d); 277 278 a_n = (int)state->high.sclk * pi->lhp + (int)state->medium.sclk * 279 (R600_AH_DFLT - pi->rmp); 280 a_d = (int)state->medium.sclk * (100 - (int)pi->rmp) + 281 (int)state->high.sclk * pi->lhp; 282 283 l[2] = (u8)(pi->lhp - (int)pi->lhp * a_n / a_d); 284 r[1] = (u8)(pi->rmp + (100 - (int)pi->rmp) * a_n / a_d); 285 286 for (i = 0; i < (RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1); i++) { 287 a_t = CG_R(r[i] * pi->bsp / 200) | CG_L(l[i] * pi->bsp / 200); 288 smc_state->levels[i].aT = cpu_to_be32(a_t); 289 } 290 291 a_t = CG_R(r[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1] * pi->pbsp / 200) | 292 CG_L(l[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1] * pi->pbsp / 200); 293 294 smc_state->levels[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1].aT = 295 cpu_to_be32(a_t); 296 297 return 0; 298 } 299 300 int rv770_populate_smc_sp(struct radeon_device *rdev, 301 struct radeon_ps *radeon_state, 302 RV770_SMC_SWSTATE *smc_state) 303 { 304 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 305 int i; 306 307 for (i = 0; i < (RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1); i++) 308 smc_state->levels[i].bSP = cpu_to_be32(pi->dsp); 309 310 smc_state->levels[RV770_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1].bSP = 311 cpu_to_be32(pi->psp); 312 313 return 0; 314 } 315 316 static void rv770_calculate_fractional_mpll_feedback_divider(u32 memory_clock, 317 u32 reference_clock, 318 bool gddr5, 319 struct atom_clock_dividers *dividers, 320 u32 *clkf, 321 u32 *clkfrac) 322 { 323 u32 post_divider, reference_divider, feedback_divider8; 324 u32 fyclk; 325 326 if (gddr5) 327 fyclk = (memory_clock * 8) / 2; 328 else 329 fyclk = (memory_clock * 4) / 2; 330 331 post_divider = dividers->post_div; 332 reference_divider = dividers->ref_div; 333 334 feedback_divider8 = 335 (8 * fyclk * reference_divider * post_divider) / reference_clock; 336 337 *clkf = feedback_divider8 / 8; 338 *clkfrac = feedback_divider8 % 8; 339 } 340 341 static int rv770_encode_yclk_post_div(u32 postdiv, u32 *encoded_postdiv) 342 { 343 int ret = 0; 344 345 switch (postdiv) { 346 case 1: 347 *encoded_postdiv = 0; 348 break; 349 case 2: 350 *encoded_postdiv = 1; 351 break; 352 case 4: 353 *encoded_postdiv = 2; 354 break; 355 case 8: 356 *encoded_postdiv = 3; 357 break; 358 case 16: 359 *encoded_postdiv = 4; 360 break; 361 default: 362 ret = -EINVAL; 363 break; 364 } 365 366 return ret; 367 } 368 369 u32 rv770_map_clkf_to_ibias(struct radeon_device *rdev, u32 clkf) 370 { 371 if (clkf <= 0x10) 372 return 0x4B; 373 if (clkf <= 0x19) 374 return 0x5B; 375 if (clkf <= 0x21) 376 return 0x2B; 377 if (clkf <= 0x27) 378 return 0x6C; 379 if (clkf <= 0x31) 380 return 0x9D; 381 return 0xC6; 382 } 383 384 static int rv770_populate_mclk_value(struct radeon_device *rdev, 385 u32 engine_clock, u32 memory_clock, 386 RV7XX_SMC_MCLK_VALUE *mclk) 387 { 388 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 389 u8 encoded_reference_dividers[] = { 0, 16, 17, 20, 21 }; 390 u32 mpll_ad_func_cntl = 391 pi->clk_regs.rv770.mpll_ad_func_cntl; 392 u32 mpll_ad_func_cntl_2 = 393 pi->clk_regs.rv770.mpll_ad_func_cntl_2; 394 u32 mpll_dq_func_cntl = 395 pi->clk_regs.rv770.mpll_dq_func_cntl; 396 u32 mpll_dq_func_cntl_2 = 397 pi->clk_regs.rv770.mpll_dq_func_cntl_2; 398 u32 mclk_pwrmgt_cntl = 399 pi->clk_regs.rv770.mclk_pwrmgt_cntl; 400 u32 dll_cntl = pi->clk_regs.rv770.dll_cntl; 401 struct atom_clock_dividers dividers; 402 u32 reference_clock = rdev->clock.mpll.reference_freq; 403 u32 clkf, clkfrac; 404 u32 postdiv_yclk; 405 u32 ibias; 406 int ret; 407 408 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM, 409 memory_clock, false, ÷rs); 410 if (ret) 411 return ret; 412 413 if ((dividers.ref_div < 1) || (dividers.ref_div > 5)) 414 return -EINVAL; 415 416 rv770_calculate_fractional_mpll_feedback_divider(memory_clock, reference_clock, 417 pi->mem_gddr5, 418 ÷rs, &clkf, &clkfrac); 419 420 ret = rv770_encode_yclk_post_div(dividers.post_div, &postdiv_yclk); 421 if (ret) 422 return ret; 423 424 ibias = rv770_map_clkf_to_ibias(rdev, clkf); 425 426 mpll_ad_func_cntl &= ~(CLKR_MASK | 427 YCLK_POST_DIV_MASK | 428 CLKF_MASK | 429 CLKFRAC_MASK | 430 IBIAS_MASK); 431 mpll_ad_func_cntl |= CLKR(encoded_reference_dividers[dividers.ref_div - 1]); 432 mpll_ad_func_cntl |= YCLK_POST_DIV(postdiv_yclk); 433 mpll_ad_func_cntl |= CLKF(clkf); 434 mpll_ad_func_cntl |= CLKFRAC(clkfrac); 435 mpll_ad_func_cntl |= IBIAS(ibias); 436 437 if (dividers.vco_mode) 438 mpll_ad_func_cntl_2 |= VCO_MODE; 439 else 440 mpll_ad_func_cntl_2 &= ~VCO_MODE; 441 442 if (pi->mem_gddr5) { 443 rv770_calculate_fractional_mpll_feedback_divider(memory_clock, 444 reference_clock, 445 pi->mem_gddr5, 446 ÷rs, &clkf, &clkfrac); 447 448 ibias = rv770_map_clkf_to_ibias(rdev, clkf); 449 450 ret = rv770_encode_yclk_post_div(dividers.post_div, &postdiv_yclk); 451 if (ret) 452 return ret; 453 454 mpll_dq_func_cntl &= ~(CLKR_MASK | 455 YCLK_POST_DIV_MASK | 456 CLKF_MASK | 457 CLKFRAC_MASK | 458 IBIAS_MASK); 459 mpll_dq_func_cntl |= CLKR(encoded_reference_dividers[dividers.ref_div - 1]); 460 mpll_dq_func_cntl |= YCLK_POST_DIV(postdiv_yclk); 461 mpll_dq_func_cntl |= CLKF(clkf); 462 mpll_dq_func_cntl |= CLKFRAC(clkfrac); 463 mpll_dq_func_cntl |= IBIAS(ibias); 464 465 if (dividers.vco_mode) 466 mpll_dq_func_cntl_2 |= VCO_MODE; 467 else 468 mpll_dq_func_cntl_2 &= ~VCO_MODE; 469 } 470 471 mclk->mclk770.mclk_value = cpu_to_be32(memory_clock); 472 mclk->mclk770.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl); 473 mclk->mclk770.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2); 474 mclk->mclk770.vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl); 475 mclk->mclk770.vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2); 476 mclk->mclk770.vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl); 477 mclk->mclk770.vDLL_CNTL = cpu_to_be32(dll_cntl); 478 479 return 0; 480 } 481 482 static int rv770_populate_sclk_value(struct radeon_device *rdev, 483 u32 engine_clock, 484 RV770_SMC_SCLK_VALUE *sclk) 485 { 486 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 487 struct atom_clock_dividers dividers; 488 u32 spll_func_cntl = 489 pi->clk_regs.rv770.cg_spll_func_cntl; 490 u32 spll_func_cntl_2 = 491 pi->clk_regs.rv770.cg_spll_func_cntl_2; 492 u32 spll_func_cntl_3 = 493 pi->clk_regs.rv770.cg_spll_func_cntl_3; 494 u32 cg_spll_spread_spectrum = 495 pi->clk_regs.rv770.cg_spll_spread_spectrum; 496 u32 cg_spll_spread_spectrum_2 = 497 pi->clk_regs.rv770.cg_spll_spread_spectrum_2; 498 u64 tmp; 499 u32 reference_clock = rdev->clock.spll.reference_freq; 500 u32 reference_divider, post_divider; 501 u32 fbdiv; 502 int ret; 503 504 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, 505 engine_clock, false, ÷rs); 506 if (ret) 507 return ret; 508 509 reference_divider = 1 + dividers.ref_div; 510 511 if (dividers.enable_post_div) 512 post_divider = (0x0f & (dividers.post_div >> 4)) + (0x0f & dividers.post_div) + 2; 513 else 514 post_divider = 1; 515 516 tmp = (u64) engine_clock * reference_divider * post_divider * 16384; 517 do_div(tmp, reference_clock); 518 fbdiv = (u32) tmp; 519 520 if (dividers.enable_post_div) 521 spll_func_cntl |= SPLL_DIVEN; 522 else 523 spll_func_cntl &= ~SPLL_DIVEN; 524 spll_func_cntl &= ~(SPLL_HILEN_MASK | SPLL_LOLEN_MASK | SPLL_REF_DIV_MASK); 525 spll_func_cntl |= SPLL_REF_DIV(dividers.ref_div); 526 spll_func_cntl |= SPLL_HILEN((dividers.post_div >> 4) & 0xf); 527 spll_func_cntl |= SPLL_LOLEN(dividers.post_div & 0xf); 528 529 spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK; 530 spll_func_cntl_2 |= SCLK_MUX_SEL(2); 531 532 spll_func_cntl_3 &= ~SPLL_FB_DIV_MASK; 533 spll_func_cntl_3 |= SPLL_FB_DIV(fbdiv); 534 spll_func_cntl_3 |= SPLL_DITHEN; 535 536 if (pi->sclk_ss) { 537 struct radeon_atom_ss ss; 538 u32 vco_freq = engine_clock * post_divider; 539 540 if (radeon_atombios_get_asic_ss_info(rdev, &ss, 541 ASIC_INTERNAL_ENGINE_SS, vco_freq)) { 542 u32 clk_s = reference_clock * 5 / (reference_divider * ss.rate); 543 u32 clk_v = ss.percentage * fbdiv / (clk_s * 10000); 544 545 cg_spll_spread_spectrum &= ~CLKS_MASK; 546 cg_spll_spread_spectrum |= CLKS(clk_s); 547 cg_spll_spread_spectrum |= SSEN; 548 549 cg_spll_spread_spectrum_2 &= ~CLKV_MASK; 550 cg_spll_spread_spectrum_2 |= CLKV(clk_v); 551 } 552 } 553 554 sclk->sclk_value = cpu_to_be32(engine_clock); 555 sclk->vCG_SPLL_FUNC_CNTL = cpu_to_be32(spll_func_cntl); 556 sclk->vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(spll_func_cntl_2); 557 sclk->vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(spll_func_cntl_3); 558 sclk->vCG_SPLL_SPREAD_SPECTRUM = cpu_to_be32(cg_spll_spread_spectrum); 559 sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cpu_to_be32(cg_spll_spread_spectrum_2); 560 561 return 0; 562 } 563 564 int rv770_populate_vddc_value(struct radeon_device *rdev, u16 vddc, 565 RV770_SMC_VOLTAGE_VALUE *voltage) 566 { 567 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 568 int i; 569 570 if (!pi->voltage_control) { 571 voltage->index = 0; 572 voltage->value = 0; 573 return 0; 574 } 575 576 for (i = 0; i < pi->valid_vddc_entries; i++) { 577 if (vddc <= pi->vddc_table[i].vddc) { 578 voltage->index = pi->vddc_table[i].vddc_index; 579 voltage->value = cpu_to_be16(vddc); 580 break; 581 } 582 } 583 584 if (i == pi->valid_vddc_entries) 585 return -EINVAL; 586 587 return 0; 588 } 589 590 int rv770_populate_mvdd_value(struct radeon_device *rdev, u32 mclk, 591 RV770_SMC_VOLTAGE_VALUE *voltage) 592 { 593 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 594 595 if (!pi->mvdd_control) { 596 voltage->index = MVDD_HIGH_INDEX; 597 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE); 598 return 0; 599 } 600 601 if (mclk <= pi->mvdd_split_frequency) { 602 voltage->index = MVDD_LOW_INDEX; 603 voltage->value = cpu_to_be16(MVDD_LOW_VALUE); 604 } else { 605 voltage->index = MVDD_HIGH_INDEX; 606 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE); 607 } 608 609 return 0; 610 } 611 612 static int rv770_convert_power_level_to_smc(struct radeon_device *rdev, 613 struct rv7xx_pl *pl, 614 RV770_SMC_HW_PERFORMANCE_LEVEL *level, 615 u8 watermark_level) 616 { 617 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 618 int ret; 619 620 level->gen2PCIE = pi->pcie_gen2 ? 621 ((pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0) : 0; 622 level->gen2XSP = (pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0; 623 level->backbias = (pl->flags & ATOM_PPLIB_R600_FLAGS_BACKBIASENABLE) ? 1 : 0; 624 level->displayWatermark = watermark_level; 625 626 if (rdev->family == CHIP_RV740) 627 ret = rv740_populate_sclk_value(rdev, pl->sclk, 628 &level->sclk); 629 else if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710)) 630 ret = rv730_populate_sclk_value(rdev, pl->sclk, 631 &level->sclk); 632 else 633 ret = rv770_populate_sclk_value(rdev, pl->sclk, 634 &level->sclk); 635 if (ret) 636 return ret; 637 638 if (rdev->family == CHIP_RV740) { 639 if (pi->mem_gddr5) { 640 if (pl->mclk <= pi->mclk_strobe_mode_threshold) 641 level->strobeMode = 642 rv740_get_mclk_frequency_ratio(pl->mclk) | 0x10; 643 else 644 level->strobeMode = 0; 645 646 if (pl->mclk > pi->mclk_edc_enable_threshold) 647 level->mcFlags = SMC_MC_EDC_RD_FLAG | SMC_MC_EDC_WR_FLAG; 648 else 649 level->mcFlags = 0; 650 } 651 ret = rv740_populate_mclk_value(rdev, pl->sclk, 652 pl->mclk, &level->mclk); 653 } else if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710)) 654 ret = rv730_populate_mclk_value(rdev, pl->sclk, 655 pl->mclk, &level->mclk); 656 else 657 ret = rv770_populate_mclk_value(rdev, pl->sclk, 658 pl->mclk, &level->mclk); 659 if (ret) 660 return ret; 661 662 ret = rv770_populate_vddc_value(rdev, pl->vddc, 663 &level->vddc); 664 if (ret) 665 return ret; 666 667 ret = rv770_populate_mvdd_value(rdev, pl->mclk, &level->mvdd); 668 669 return ret; 670 } 671 672 static int rv770_convert_power_state_to_smc(struct radeon_device *rdev, 673 struct radeon_ps *radeon_state, 674 RV770_SMC_SWSTATE *smc_state) 675 { 676 struct rv7xx_ps *state = rv770_get_ps(radeon_state); 677 int ret; 678 679 if (!(radeon_state->caps & ATOM_PPLIB_DISALLOW_ON_DC)) 680 smc_state->flags |= PPSMC_SWSTATE_FLAG_DC; 681 682 ret = rv770_convert_power_level_to_smc(rdev, 683 &state->low, 684 &smc_state->levels[0], 685 PPSMC_DISPLAY_WATERMARK_LOW); 686 if (ret) 687 return ret; 688 689 ret = rv770_convert_power_level_to_smc(rdev, 690 &state->medium, 691 &smc_state->levels[1], 692 PPSMC_DISPLAY_WATERMARK_LOW); 693 if (ret) 694 return ret; 695 696 ret = rv770_convert_power_level_to_smc(rdev, 697 &state->high, 698 &smc_state->levels[2], 699 PPSMC_DISPLAY_WATERMARK_HIGH); 700 if (ret) 701 return ret; 702 703 smc_state->levels[0].arbValue = MC_CG_ARB_FREQ_F1; 704 smc_state->levels[1].arbValue = MC_CG_ARB_FREQ_F2; 705 smc_state->levels[2].arbValue = MC_CG_ARB_FREQ_F3; 706 707 smc_state->levels[0].seqValue = rv770_get_seq_value(rdev, 708 &state->low); 709 smc_state->levels[1].seqValue = rv770_get_seq_value(rdev, 710 &state->medium); 711 smc_state->levels[2].seqValue = rv770_get_seq_value(rdev, 712 &state->high); 713 714 rv770_populate_smc_sp(rdev, radeon_state, smc_state); 715 716 return rv770_populate_smc_t(rdev, radeon_state, smc_state); 717 718 } 719 720 u32 rv770_calculate_memory_refresh_rate(struct radeon_device *rdev, 721 u32 engine_clock) 722 { 723 u32 dram_rows; 724 u32 dram_refresh_rate; 725 u32 mc_arb_rfsh_rate; 726 u32 tmp; 727 728 tmp = (RREG32(MC_ARB_RAMCFG) & NOOFROWS_MASK) >> NOOFROWS_SHIFT; 729 dram_rows = 1 << (tmp + 10); 730 tmp = RREG32(MC_SEQ_MISC0) & 3; 731 dram_refresh_rate = 1 << (tmp + 3); 732 mc_arb_rfsh_rate = ((engine_clock * 10) * dram_refresh_rate / dram_rows - 32) / 64; 733 734 return mc_arb_rfsh_rate; 735 } 736 737 static void rv770_program_memory_timing_parameters(struct radeon_device *rdev, 738 struct radeon_ps *radeon_state) 739 { 740 struct rv7xx_ps *state = rv770_get_ps(radeon_state); 741 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 742 u32 sqm_ratio; 743 u32 arb_refresh_rate; 744 u32 high_clock; 745 746 if (state->high.sclk < (state->low.sclk * 0xFF / 0x40)) 747 high_clock = state->high.sclk; 748 else 749 high_clock = (state->low.sclk * 0xFF / 0x40); 750 751 radeon_atom_set_engine_dram_timings(rdev, high_clock, 752 state->high.mclk); 753 754 sqm_ratio = 755 STATE0(64 * high_clock / pi->boot_sclk) | 756 STATE1(64 * high_clock / state->low.sclk) | 757 STATE2(64 * high_clock / state->medium.sclk) | 758 STATE3(64 * high_clock / state->high.sclk); 759 WREG32(MC_ARB_SQM_RATIO, sqm_ratio); 760 761 arb_refresh_rate = 762 POWERMODE0(rv770_calculate_memory_refresh_rate(rdev, pi->boot_sclk)) | 763 POWERMODE1(rv770_calculate_memory_refresh_rate(rdev, state->low.sclk)) | 764 POWERMODE2(rv770_calculate_memory_refresh_rate(rdev, state->medium.sclk)) | 765 POWERMODE3(rv770_calculate_memory_refresh_rate(rdev, state->high.sclk)); 766 WREG32(MC_ARB_RFSH_RATE, arb_refresh_rate); 767 } 768 769 void rv770_enable_backbias(struct radeon_device *rdev, 770 bool enable) 771 { 772 if (enable) 773 WREG32_P(GENERAL_PWRMGT, BACKBIAS_PAD_EN, ~BACKBIAS_PAD_EN); 774 else 775 WREG32_P(GENERAL_PWRMGT, 0, ~(BACKBIAS_VALUE | BACKBIAS_PAD_EN)); 776 } 777 778 static void rv770_enable_spread_spectrum(struct radeon_device *rdev, 779 bool enable) 780 { 781 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 782 783 if (enable) { 784 if (pi->sclk_ss) 785 WREG32_P(GENERAL_PWRMGT, DYN_SPREAD_SPECTRUM_EN, ~DYN_SPREAD_SPECTRUM_EN); 786 787 if (pi->mclk_ss) { 788 if (rdev->family == CHIP_RV740) 789 rv740_enable_mclk_spread_spectrum(rdev, true); 790 } 791 } else { 792 WREG32_P(CG_SPLL_SPREAD_SPECTRUM, 0, ~SSEN); 793 794 WREG32_P(GENERAL_PWRMGT, 0, ~DYN_SPREAD_SPECTRUM_EN); 795 796 WREG32_P(CG_MPLL_SPREAD_SPECTRUM, 0, ~SSEN); 797 798 if (rdev->family == CHIP_RV740) 799 rv740_enable_mclk_spread_spectrum(rdev, false); 800 } 801 } 802 803 static void rv770_program_mpll_timing_parameters(struct radeon_device *rdev) 804 { 805 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 806 807 if ((rdev->family == CHIP_RV770) && !pi->mem_gddr5) { 808 WREG32(MPLL_TIME, 809 (MPLL_LOCK_TIME(R600_MPLLLOCKTIME_DFLT * pi->ref_div) | 810 MPLL_RESET_TIME(R600_MPLLRESETTIME_DFLT))); 811 } 812 } 813 814 void rv770_setup_bsp(struct radeon_device *rdev) 815 { 816 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 817 u32 xclk = radeon_get_xclk(rdev); 818 819 r600_calculate_u_and_p(pi->asi, 820 xclk, 821 16, 822 &pi->bsp, 823 &pi->bsu); 824 825 r600_calculate_u_and_p(pi->pasi, 826 xclk, 827 16, 828 &pi->pbsp, 829 &pi->pbsu); 830 831 pi->dsp = BSP(pi->bsp) | BSU(pi->bsu); 832 pi->psp = BSP(pi->pbsp) | BSU(pi->pbsu); 833 834 WREG32(CG_BSP, pi->dsp); 835 836 } 837 838 void rv770_program_git(struct radeon_device *rdev) 839 { 840 WREG32_P(CG_GIT, CG_GICST(R600_GICST_DFLT), ~CG_GICST_MASK); 841 } 842 843 void rv770_program_tp(struct radeon_device *rdev) 844 { 845 int i; 846 enum r600_td td = R600_TD_DFLT; 847 848 for (i = 0; i < R600_PM_NUMBER_OF_TC; i++) 849 WREG32(CG_FFCT_0 + (i * 4), (UTC_0(r600_utc[i]) | DTC_0(r600_dtc[i]))); 850 851 if (td == R600_TD_AUTO) 852 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~FIR_FORCE_TREND_SEL); 853 else 854 WREG32_P(SCLK_PWRMGT_CNTL, FIR_FORCE_TREND_SEL, ~FIR_FORCE_TREND_SEL); 855 if (td == R600_TD_UP) 856 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~FIR_TREND_MODE); 857 if (td == R600_TD_DOWN) 858 WREG32_P(SCLK_PWRMGT_CNTL, FIR_TREND_MODE, ~FIR_TREND_MODE); 859 } 860 861 void rv770_program_tpp(struct radeon_device *rdev) 862 { 863 WREG32(CG_TPC, R600_TPC_DFLT); 864 } 865 866 void rv770_program_sstp(struct radeon_device *rdev) 867 { 868 WREG32(CG_SSP, (SSTU(R600_SSTU_DFLT) | SST(R600_SST_DFLT))); 869 } 870 871 void rv770_program_engine_speed_parameters(struct radeon_device *rdev) 872 { 873 WREG32_P(SPLL_CNTL_MODE, SPLL_DIV_SYNC, ~SPLL_DIV_SYNC); 874 } 875 876 static void rv770_enable_display_gap(struct radeon_device *rdev) 877 { 878 u32 tmp = RREG32(CG_DISPLAY_GAP_CNTL); 879 880 tmp &= ~(DISP1_GAP_MCHG_MASK | DISP2_GAP_MCHG_MASK); 881 tmp |= (DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE) | 882 DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE)); 883 WREG32(CG_DISPLAY_GAP_CNTL, tmp); 884 } 885 886 void rv770_program_vc(struct radeon_device *rdev) 887 { 888 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 889 890 WREG32(CG_FTV, pi->vrc); 891 } 892 893 void rv770_clear_vc(struct radeon_device *rdev) 894 { 895 WREG32(CG_FTV, 0); 896 } 897 898 int rv770_upload_firmware(struct radeon_device *rdev) 899 { 900 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 901 int ret; 902 903 rv770_reset_smc(rdev); 904 rv770_stop_smc_clock(rdev); 905 906 ret = rv770_load_smc_ucode(rdev, pi->sram_end); 907 if (ret) 908 return ret; 909 910 return 0; 911 } 912 913 static int rv770_populate_smc_acpi_state(struct radeon_device *rdev, 914 RV770_SMC_STATETABLE *table) 915 { 916 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 917 918 u32 mpll_ad_func_cntl = 919 pi->clk_regs.rv770.mpll_ad_func_cntl; 920 u32 mpll_ad_func_cntl_2 = 921 pi->clk_regs.rv770.mpll_ad_func_cntl_2; 922 u32 mpll_dq_func_cntl = 923 pi->clk_regs.rv770.mpll_dq_func_cntl; 924 u32 mpll_dq_func_cntl_2 = 925 pi->clk_regs.rv770.mpll_dq_func_cntl_2; 926 u32 spll_func_cntl = 927 pi->clk_regs.rv770.cg_spll_func_cntl; 928 u32 spll_func_cntl_2 = 929 pi->clk_regs.rv770.cg_spll_func_cntl_2; 930 u32 spll_func_cntl_3 = 931 pi->clk_regs.rv770.cg_spll_func_cntl_3; 932 u32 mclk_pwrmgt_cntl; 933 u32 dll_cntl; 934 935 table->ACPIState = table->initialState; 936 937 table->ACPIState.flags &= ~PPSMC_SWSTATE_FLAG_DC; 938 939 if (pi->acpi_vddc) { 940 rv770_populate_vddc_value(rdev, pi->acpi_vddc, 941 &table->ACPIState.levels[0].vddc); 942 if (pi->pcie_gen2) { 943 if (pi->acpi_pcie_gen2) 944 table->ACPIState.levels[0].gen2PCIE = 1; 945 else 946 table->ACPIState.levels[0].gen2PCIE = 0; 947 } else 948 table->ACPIState.levels[0].gen2PCIE = 0; 949 if (pi->acpi_pcie_gen2) 950 table->ACPIState.levels[0].gen2XSP = 1; 951 else 952 table->ACPIState.levels[0].gen2XSP = 0; 953 } else { 954 rv770_populate_vddc_value(rdev, pi->min_vddc_in_table, 955 &table->ACPIState.levels[0].vddc); 956 table->ACPIState.levels[0].gen2PCIE = 0; 957 } 958 959 960 mpll_ad_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN; 961 962 mpll_dq_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN; 963 964 mclk_pwrmgt_cntl = (MRDCKA0_RESET | 965 MRDCKA1_RESET | 966 MRDCKB0_RESET | 967 MRDCKB1_RESET | 968 MRDCKC0_RESET | 969 MRDCKC1_RESET | 970 MRDCKD0_RESET | 971 MRDCKD1_RESET); 972 973 dll_cntl = 0xff000000; 974 975 spll_func_cntl |= SPLL_RESET | SPLL_SLEEP | SPLL_BYPASS_EN; 976 977 spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK; 978 spll_func_cntl_2 |= SCLK_MUX_SEL(4); 979 980 table->ACPIState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl); 981 table->ACPIState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2); 982 table->ACPIState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl); 983 table->ACPIState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2); 984 985 table->ACPIState.levels[0].mclk.mclk770.vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl); 986 table->ACPIState.levels[0].mclk.mclk770.vDLL_CNTL = cpu_to_be32(dll_cntl); 987 988 table->ACPIState.levels[0].mclk.mclk770.mclk_value = 0; 989 990 table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL = cpu_to_be32(spll_func_cntl); 991 table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(spll_func_cntl_2); 992 table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(spll_func_cntl_3); 993 994 table->ACPIState.levels[0].sclk.sclk_value = 0; 995 996 rv770_populate_mvdd_value(rdev, 0, &table->ACPIState.levels[0].mvdd); 997 998 table->ACPIState.levels[1] = table->ACPIState.levels[0]; 999 table->ACPIState.levels[2] = table->ACPIState.levels[0]; 1000 1001 return 0; 1002 } 1003 1004 int rv770_populate_initial_mvdd_value(struct radeon_device *rdev, 1005 RV770_SMC_VOLTAGE_VALUE *voltage) 1006 { 1007 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 1008 1009 if ((pi->s0_vid_lower_smio_cntl & pi->mvdd_mask_low) == 1010 (pi->mvdd_low_smio[MVDD_LOW_INDEX] & pi->mvdd_mask_low) ) { 1011 voltage->index = MVDD_LOW_INDEX; 1012 voltage->value = cpu_to_be16(MVDD_LOW_VALUE); 1013 } else { 1014 voltage->index = MVDD_HIGH_INDEX; 1015 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE); 1016 } 1017 1018 return 0; 1019 } 1020 1021 static int rv770_populate_smc_initial_state(struct radeon_device *rdev, 1022 struct radeon_ps *radeon_state, 1023 RV770_SMC_STATETABLE *table) 1024 { 1025 struct rv7xx_ps *initial_state = rv770_get_ps(radeon_state); 1026 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 1027 u32 a_t; 1028 1029 table->initialState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL = 1030 cpu_to_be32(pi->clk_regs.rv770.mpll_ad_func_cntl); 1031 table->initialState.levels[0].mclk.mclk770.vMPLL_AD_FUNC_CNTL_2 = 1032 cpu_to_be32(pi->clk_regs.rv770.mpll_ad_func_cntl_2); 1033 table->initialState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL = 1034 cpu_to_be32(pi->clk_regs.rv770.mpll_dq_func_cntl); 1035 table->initialState.levels[0].mclk.mclk770.vMPLL_DQ_FUNC_CNTL_2 = 1036 cpu_to_be32(pi->clk_regs.rv770.mpll_dq_func_cntl_2); 1037 table->initialState.levels[0].mclk.mclk770.vMCLK_PWRMGT_CNTL = 1038 cpu_to_be32(pi->clk_regs.rv770.mclk_pwrmgt_cntl); 1039 table->initialState.levels[0].mclk.mclk770.vDLL_CNTL = 1040 cpu_to_be32(pi->clk_regs.rv770.dll_cntl); 1041 1042 table->initialState.levels[0].mclk.mclk770.vMPLL_SS = 1043 cpu_to_be32(pi->clk_regs.rv770.mpll_ss1); 1044 table->initialState.levels[0].mclk.mclk770.vMPLL_SS2 = 1045 cpu_to_be32(pi->clk_regs.rv770.mpll_ss2); 1046 1047 table->initialState.levels[0].mclk.mclk770.mclk_value = 1048 cpu_to_be32(initial_state->low.mclk); 1049 1050 table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL = 1051 cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl); 1052 table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 = 1053 cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl_2); 1054 table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 = 1055 cpu_to_be32(pi->clk_regs.rv770.cg_spll_func_cntl_3); 1056 table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM = 1057 cpu_to_be32(pi->clk_regs.rv770.cg_spll_spread_spectrum); 1058 table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM_2 = 1059 cpu_to_be32(pi->clk_regs.rv770.cg_spll_spread_spectrum_2); 1060 1061 table->initialState.levels[0].sclk.sclk_value = 1062 cpu_to_be32(initial_state->low.sclk); 1063 1064 table->initialState.levels[0].arbValue = MC_CG_ARB_FREQ_F0; 1065 1066 table->initialState.levels[0].seqValue = 1067 rv770_get_seq_value(rdev, &initial_state->low); 1068 1069 rv770_populate_vddc_value(rdev, 1070 initial_state->low.vddc, 1071 &table->initialState.levels[0].vddc); 1072 rv770_populate_initial_mvdd_value(rdev, 1073 &table->initialState.levels[0].mvdd); 1074 1075 a_t = CG_R(0xffff) | CG_L(0); 1076 table->initialState.levels[0].aT = cpu_to_be32(a_t); 1077 1078 table->initialState.levels[0].bSP = cpu_to_be32(pi->dsp); 1079 1080 if (pi->boot_in_gen2) 1081 table->initialState.levels[0].gen2PCIE = 1; 1082 else 1083 table->initialState.levels[0].gen2PCIE = 0; 1084 if (initial_state->low.flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) 1085 table->initialState.levels[0].gen2XSP = 1; 1086 else 1087 table->initialState.levels[0].gen2XSP = 0; 1088 1089 if (rdev->family == CHIP_RV740) { 1090 if (pi->mem_gddr5) { 1091 if (initial_state->low.mclk <= pi->mclk_strobe_mode_threshold) 1092 table->initialState.levels[0].strobeMode = 1093 rv740_get_mclk_frequency_ratio(initial_state->low.mclk) | 0x10; 1094 else 1095 table->initialState.levels[0].strobeMode = 0; 1096 1097 if (initial_state->low.mclk >= pi->mclk_edc_enable_threshold) 1098 table->initialState.levels[0].mcFlags = SMC_MC_EDC_RD_FLAG | SMC_MC_EDC_WR_FLAG; 1099 else 1100 table->initialState.levels[0].mcFlags = 0; 1101 } 1102 } 1103 1104 table->initialState.levels[1] = table->initialState.levels[0]; 1105 table->initialState.levels[2] = table->initialState.levels[0]; 1106 1107 table->initialState.flags |= PPSMC_SWSTATE_FLAG_DC; 1108 1109 return 0; 1110 } 1111 1112 static int rv770_populate_smc_vddc_table(struct radeon_device *rdev, 1113 RV770_SMC_STATETABLE *table) 1114 { 1115 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 1116 int i; 1117 1118 for (i = 0; i < pi->valid_vddc_entries; i++) { 1119 table->highSMIO[pi->vddc_table[i].vddc_index] = 1120 pi->vddc_table[i].high_smio; 1121 table->lowSMIO[pi->vddc_table[i].vddc_index] = 1122 cpu_to_be32(pi->vddc_table[i].low_smio); 1123 } 1124 1125 table->voltageMaskTable.highMask[RV770_SMC_VOLTAGEMASK_VDDC] = 0; 1126 table->voltageMaskTable.lowMask[RV770_SMC_VOLTAGEMASK_VDDC] = 1127 cpu_to_be32(pi->vddc_mask_low); 1128 1129 for (i = 0; 1130 ((i < pi->valid_vddc_entries) && 1131 (pi->max_vddc_in_table > 1132 pi->vddc_table[i].vddc)); 1133 i++); 1134 1135 table->maxVDDCIndexInPPTable = 1136 pi->vddc_table[i].vddc_index; 1137 1138 return 0; 1139 } 1140 1141 static int rv770_populate_smc_mvdd_table(struct radeon_device *rdev, 1142 RV770_SMC_STATETABLE *table) 1143 { 1144 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 1145 1146 if (pi->mvdd_control) { 1147 table->lowSMIO[MVDD_HIGH_INDEX] |= 1148 cpu_to_be32(pi->mvdd_low_smio[MVDD_HIGH_INDEX]); 1149 table->lowSMIO[MVDD_LOW_INDEX] |= 1150 cpu_to_be32(pi->mvdd_low_smio[MVDD_LOW_INDEX]); 1151 1152 table->voltageMaskTable.highMask[RV770_SMC_VOLTAGEMASK_MVDD] = 0; 1153 table->voltageMaskTable.lowMask[RV770_SMC_VOLTAGEMASK_MVDD] = 1154 cpu_to_be32(pi->mvdd_mask_low); 1155 } 1156 1157 return 0; 1158 } 1159 1160 static int rv770_init_smc_table(struct radeon_device *rdev, 1161 struct radeon_ps *radeon_boot_state) 1162 { 1163 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 1164 struct rv7xx_ps *boot_state = rv770_get_ps(radeon_boot_state); 1165 RV770_SMC_STATETABLE *table = &pi->smc_statetable; 1166 int ret; 1167 1168 memset(table, 0, sizeof(RV770_SMC_STATETABLE)); 1169 1170 pi->boot_sclk = boot_state->low.sclk; 1171 1172 rv770_populate_smc_vddc_table(rdev, table); 1173 rv770_populate_smc_mvdd_table(rdev, table); 1174 1175 switch (rdev->pm.int_thermal_type) { 1176 case THERMAL_TYPE_RV770: 1177 case THERMAL_TYPE_ADT7473_WITH_INTERNAL: 1178 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_INTERNAL; 1179 break; 1180 case THERMAL_TYPE_NONE: 1181 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_NONE; 1182 break; 1183 case THERMAL_TYPE_EXTERNAL_GPIO: 1184 default: 1185 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_EXTERNAL; 1186 break; 1187 } 1188 1189 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_HARDWAREDC) { 1190 table->systemFlags |= PPSMC_SYSTEMFLAG_GPIO_DC; 1191 1192 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_DONT_WAIT_FOR_VBLANK_ON_ALERT) 1193 table->extraFlags |= PPSMC_EXTRAFLAGS_AC2DC_DONT_WAIT_FOR_VBLANK; 1194 1195 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_GOTO_BOOT_ON_ALERT) 1196 table->extraFlags |= PPSMC_EXTRAFLAGS_AC2DC_ACTION_GOTOINITIALSTATE; 1197 } 1198 1199 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC) 1200 table->systemFlags |= PPSMC_SYSTEMFLAG_STEPVDDC; 1201 1202 if (pi->mem_gddr5) 1203 table->systemFlags |= PPSMC_SYSTEMFLAG_GDDR5; 1204 1205 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710)) 1206 ret = rv730_populate_smc_initial_state(rdev, radeon_boot_state, table); 1207 else 1208 ret = rv770_populate_smc_initial_state(rdev, radeon_boot_state, table); 1209 if (ret) 1210 return ret; 1211 1212 if (rdev->family == CHIP_RV740) 1213 ret = rv740_populate_smc_acpi_state(rdev, table); 1214 else if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710)) 1215 ret = rv730_populate_smc_acpi_state(rdev, table); 1216 else 1217 ret = rv770_populate_smc_acpi_state(rdev, table); 1218 if (ret) 1219 return ret; 1220 1221 table->driverState = table->initialState; 1222 1223 return rv770_copy_bytes_to_smc(rdev, 1224 pi->state_table_start, 1225 (const u8 *)table, 1226 sizeof(RV770_SMC_STATETABLE), 1227 pi->sram_end); 1228 } 1229 1230 static int rv770_construct_vddc_table(struct radeon_device *rdev) 1231 { 1232 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 1233 u16 min, max, step; 1234 u32 steps = 0; 1235 u8 vddc_index = 0; 1236 u32 i; 1237 1238 radeon_atom_get_min_voltage(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, &min); 1239 radeon_atom_get_max_voltage(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, &max); 1240 radeon_atom_get_voltage_step(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, &step); 1241 1242 steps = (max - min) / step + 1; 1243 1244 if (steps > MAX_NO_VREG_STEPS) 1245 return -EINVAL; 1246 1247 for (i = 0; i < steps; i++) { 1248 u32 gpio_pins, gpio_mask; 1249 1250 pi->vddc_table[i].vddc = (u16)(min + i * step); 1251 radeon_atom_get_voltage_gpio_settings(rdev, 1252 pi->vddc_table[i].vddc, 1253 SET_VOLTAGE_TYPE_ASIC_VDDC, 1254 &gpio_pins, &gpio_mask); 1255 pi->vddc_table[i].low_smio = gpio_pins & gpio_mask; 1256 pi->vddc_table[i].high_smio = 0; 1257 pi->vddc_mask_low = gpio_mask; 1258 if (i > 0) { 1259 if ((pi->vddc_table[i].low_smio != 1260 pi->vddc_table[i - 1].low_smio ) || 1261 (pi->vddc_table[i].high_smio != 1262 pi->vddc_table[i - 1].high_smio)) 1263 vddc_index++; 1264 } 1265 pi->vddc_table[i].vddc_index = vddc_index; 1266 } 1267 1268 pi->valid_vddc_entries = (u8)steps; 1269 1270 return 0; 1271 } 1272 1273 static u32 rv770_get_mclk_split_point(struct atom_memory_info *memory_info) 1274 { 1275 if (memory_info->mem_type == MEM_TYPE_GDDR3) 1276 return 30000; 1277 1278 return 0; 1279 } 1280 1281 static int rv770_get_mvdd_pin_configuration(struct radeon_device *rdev) 1282 { 1283 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 1284 u32 gpio_pins, gpio_mask; 1285 1286 radeon_atom_get_voltage_gpio_settings(rdev, 1287 MVDD_HIGH_VALUE, SET_VOLTAGE_TYPE_ASIC_MVDDC, 1288 &gpio_pins, &gpio_mask); 1289 pi->mvdd_mask_low = gpio_mask; 1290 pi->mvdd_low_smio[MVDD_HIGH_INDEX] = 1291 gpio_pins & gpio_mask; 1292 1293 radeon_atom_get_voltage_gpio_settings(rdev, 1294 MVDD_LOW_VALUE, SET_VOLTAGE_TYPE_ASIC_MVDDC, 1295 &gpio_pins, &gpio_mask); 1296 pi->mvdd_low_smio[MVDD_LOW_INDEX] = 1297 gpio_pins & gpio_mask; 1298 1299 return 0; 1300 } 1301 1302 u8 rv770_get_memory_module_index(struct radeon_device *rdev) 1303 { 1304 return (u8) ((RREG32(BIOS_SCRATCH_4) >> 16) & 0xff); 1305 } 1306 1307 static int rv770_get_mvdd_configuration(struct radeon_device *rdev) 1308 { 1309 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 1310 u8 memory_module_index; 1311 struct atom_memory_info memory_info; 1312 1313 memory_module_index = rv770_get_memory_module_index(rdev); 1314 1315 if (radeon_atom_get_memory_info(rdev, memory_module_index, &memory_info)) { 1316 pi->mvdd_control = false; 1317 return 0; 1318 } 1319 1320 pi->mvdd_split_frequency = 1321 rv770_get_mclk_split_point(&memory_info); 1322 1323 if (pi->mvdd_split_frequency == 0) { 1324 pi->mvdd_control = false; 1325 return 0; 1326 } 1327 1328 return rv770_get_mvdd_pin_configuration(rdev); 1329 } 1330 1331 void rv770_enable_voltage_control(struct radeon_device *rdev, 1332 bool enable) 1333 { 1334 if (enable) 1335 WREG32_P(GENERAL_PWRMGT, VOLT_PWRMGT_EN, ~VOLT_PWRMGT_EN); 1336 else 1337 WREG32_P(GENERAL_PWRMGT, 0, ~VOLT_PWRMGT_EN); 1338 } 1339 1340 static void rv770_program_display_gap(struct radeon_device *rdev) 1341 { 1342 u32 tmp = RREG32(CG_DISPLAY_GAP_CNTL); 1343 1344 tmp &= ~(DISP1_GAP_MCHG_MASK | DISP2_GAP_MCHG_MASK); 1345 if (rdev->pm.dpm.new_active_crtcs & 1) { 1346 tmp |= DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_VBLANK); 1347 tmp |= DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE); 1348 } else if (rdev->pm.dpm.new_active_crtcs & 2) { 1349 tmp |= DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE); 1350 tmp |= DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_VBLANK); 1351 } else { 1352 tmp |= DISP1_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE); 1353 tmp |= DISP2_GAP_MCHG(R600_PM_DISPLAY_GAP_IGNORE); 1354 } 1355 WREG32(CG_DISPLAY_GAP_CNTL, tmp); 1356 } 1357 1358 static void rv770_enable_dynamic_pcie_gen2(struct radeon_device *rdev, 1359 bool enable) 1360 { 1361 rv770_enable_bif_dynamic_pcie_gen2(rdev, enable); 1362 1363 if (enable) 1364 WREG32_P(GENERAL_PWRMGT, ENABLE_GEN2PCIE, ~ENABLE_GEN2PCIE); 1365 else 1366 WREG32_P(GENERAL_PWRMGT, 0, ~ENABLE_GEN2PCIE); 1367 } 1368 1369 static void r7xx_program_memory_timing_parameters(struct radeon_device *rdev, 1370 struct radeon_ps *radeon_new_state) 1371 { 1372 if ((rdev->family == CHIP_RV730) || 1373 (rdev->family == CHIP_RV710) || 1374 (rdev->family == CHIP_RV740)) 1375 rv730_program_memory_timing_parameters(rdev, radeon_new_state); 1376 else 1377 rv770_program_memory_timing_parameters(rdev, radeon_new_state); 1378 } 1379 1380 static int rv770_upload_sw_state(struct radeon_device *rdev, 1381 struct radeon_ps *radeon_new_state) 1382 { 1383 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 1384 u16 address = pi->state_table_start + 1385 offsetof(RV770_SMC_STATETABLE, driverState); 1386 RV770_SMC_SWSTATE state = { 0 }; 1387 int ret; 1388 1389 ret = rv770_convert_power_state_to_smc(rdev, radeon_new_state, &state); 1390 if (ret) 1391 return ret; 1392 1393 return rv770_copy_bytes_to_smc(rdev, address, (const u8 *)&state, 1394 sizeof(RV770_SMC_SWSTATE), 1395 pi->sram_end); 1396 } 1397 1398 int rv770_halt_smc(struct radeon_device *rdev) 1399 { 1400 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_Halt) != PPSMC_Result_OK) 1401 return -EINVAL; 1402 1403 if (rv770_wait_for_smc_inactive(rdev) != PPSMC_Result_OK) 1404 return -EINVAL; 1405 1406 return 0; 1407 } 1408 1409 int rv770_resume_smc(struct radeon_device *rdev) 1410 { 1411 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_Resume) != PPSMC_Result_OK) 1412 return -EINVAL; 1413 return 0; 1414 } 1415 1416 int rv770_set_sw_state(struct radeon_device *rdev) 1417 { 1418 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_SwitchToSwState) != PPSMC_Result_OK) 1419 return -EINVAL; 1420 return 0; 1421 } 1422 1423 int rv770_set_boot_state(struct radeon_device *rdev) 1424 { 1425 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_SwitchToInitialState) != PPSMC_Result_OK) 1426 return -EINVAL; 1427 return 0; 1428 } 1429 1430 void rv770_set_uvd_clock_before_set_eng_clock(struct radeon_device *rdev, 1431 struct radeon_ps *new_ps, 1432 struct radeon_ps *old_ps) 1433 { 1434 struct rv7xx_ps *new_state = rv770_get_ps(new_ps); 1435 struct rv7xx_ps *current_state = rv770_get_ps(old_ps); 1436 1437 if ((new_ps->vclk == old_ps->vclk) && 1438 (new_ps->dclk == old_ps->dclk)) 1439 return; 1440 1441 if (new_state->high.sclk >= current_state->high.sclk) 1442 return; 1443 1444 radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk); 1445 } 1446 1447 void rv770_set_uvd_clock_after_set_eng_clock(struct radeon_device *rdev, 1448 struct radeon_ps *new_ps, 1449 struct radeon_ps *old_ps) 1450 { 1451 struct rv7xx_ps *new_state = rv770_get_ps(new_ps); 1452 struct rv7xx_ps *current_state = rv770_get_ps(old_ps); 1453 1454 if ((new_ps->vclk == old_ps->vclk) && 1455 (new_ps->dclk == old_ps->dclk)) 1456 return; 1457 1458 if (new_state->high.sclk < current_state->high.sclk) 1459 return; 1460 1461 radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk); 1462 } 1463 1464 int rv770_restrict_performance_levels_before_switch(struct radeon_device *rdev) 1465 { 1466 if (rv770_send_msg_to_smc(rdev, (PPSMC_Msg)(PPSMC_MSG_NoForcedLevel)) != PPSMC_Result_OK) 1467 return -EINVAL; 1468 1469 if (rv770_send_msg_to_smc(rdev, (PPSMC_Msg)(PPSMC_MSG_TwoLevelsDisabled)) != PPSMC_Result_OK) 1470 return -EINVAL; 1471 1472 return 0; 1473 } 1474 1475 int rv770_dpm_force_performance_level(struct radeon_device *rdev, 1476 enum radeon_dpm_forced_level level) 1477 { 1478 PPSMC_Msg msg; 1479 1480 if (level == RADEON_DPM_FORCED_LEVEL_HIGH) { 1481 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_ZeroLevelsDisabled) != PPSMC_Result_OK) 1482 return -EINVAL; 1483 msg = PPSMC_MSG_ForceHigh; 1484 } else if (level == RADEON_DPM_FORCED_LEVEL_LOW) { 1485 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_NoForcedLevel) != PPSMC_Result_OK) 1486 return -EINVAL; 1487 msg = (PPSMC_Msg)(PPSMC_MSG_TwoLevelsDisabled); 1488 } else { 1489 if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_NoForcedLevel) != PPSMC_Result_OK) 1490 return -EINVAL; 1491 msg = (PPSMC_Msg)(PPSMC_MSG_ZeroLevelsDisabled); 1492 } 1493 1494 if (rv770_send_msg_to_smc(rdev, msg) != PPSMC_Result_OK) 1495 return -EINVAL; 1496 1497 rdev->pm.dpm.forced_level = level; 1498 1499 return 0; 1500 } 1501 1502 void r7xx_start_smc(struct radeon_device *rdev) 1503 { 1504 rv770_start_smc(rdev); 1505 rv770_start_smc_clock(rdev); 1506 } 1507 1508 1509 void r7xx_stop_smc(struct radeon_device *rdev) 1510 { 1511 rv770_reset_smc(rdev); 1512 rv770_stop_smc_clock(rdev); 1513 } 1514 1515 static void rv770_read_clock_registers(struct radeon_device *rdev) 1516 { 1517 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 1518 1519 pi->clk_regs.rv770.cg_spll_func_cntl = 1520 RREG32(CG_SPLL_FUNC_CNTL); 1521 pi->clk_regs.rv770.cg_spll_func_cntl_2 = 1522 RREG32(CG_SPLL_FUNC_CNTL_2); 1523 pi->clk_regs.rv770.cg_spll_func_cntl_3 = 1524 RREG32(CG_SPLL_FUNC_CNTL_3); 1525 pi->clk_regs.rv770.cg_spll_spread_spectrum = 1526 RREG32(CG_SPLL_SPREAD_SPECTRUM); 1527 pi->clk_regs.rv770.cg_spll_spread_spectrum_2 = 1528 RREG32(CG_SPLL_SPREAD_SPECTRUM_2); 1529 pi->clk_regs.rv770.mpll_ad_func_cntl = 1530 RREG32(MPLL_AD_FUNC_CNTL); 1531 pi->clk_regs.rv770.mpll_ad_func_cntl_2 = 1532 RREG32(MPLL_AD_FUNC_CNTL_2); 1533 pi->clk_regs.rv770.mpll_dq_func_cntl = 1534 RREG32(MPLL_DQ_FUNC_CNTL); 1535 pi->clk_regs.rv770.mpll_dq_func_cntl_2 = 1536 RREG32(MPLL_DQ_FUNC_CNTL_2); 1537 pi->clk_regs.rv770.mclk_pwrmgt_cntl = 1538 RREG32(MCLK_PWRMGT_CNTL); 1539 pi->clk_regs.rv770.dll_cntl = RREG32(DLL_CNTL); 1540 } 1541 1542 static void r7xx_read_clock_registers(struct radeon_device *rdev) 1543 { 1544 if (rdev->family == CHIP_RV740) 1545 rv740_read_clock_registers(rdev); 1546 else if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710)) 1547 rv730_read_clock_registers(rdev); 1548 else 1549 rv770_read_clock_registers(rdev); 1550 } 1551 1552 void rv770_read_voltage_smio_registers(struct radeon_device *rdev) 1553 { 1554 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 1555 1556 pi->s0_vid_lower_smio_cntl = 1557 RREG32(S0_VID_LOWER_SMIO_CNTL); 1558 } 1559 1560 void rv770_reset_smio_status(struct radeon_device *rdev) 1561 { 1562 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 1563 u32 sw_smio_index, vid_smio_cntl; 1564 1565 sw_smio_index = 1566 (RREG32(GENERAL_PWRMGT) & SW_SMIO_INDEX_MASK) >> SW_SMIO_INDEX_SHIFT; 1567 switch (sw_smio_index) { 1568 case 3: 1569 vid_smio_cntl = RREG32(S3_VID_LOWER_SMIO_CNTL); 1570 break; 1571 case 2: 1572 vid_smio_cntl = RREG32(S2_VID_LOWER_SMIO_CNTL); 1573 break; 1574 case 1: 1575 vid_smio_cntl = RREG32(S1_VID_LOWER_SMIO_CNTL); 1576 break; 1577 case 0: 1578 return; 1579 default: 1580 vid_smio_cntl = pi->s0_vid_lower_smio_cntl; 1581 break; 1582 } 1583 1584 WREG32(S0_VID_LOWER_SMIO_CNTL, vid_smio_cntl); 1585 WREG32_P(GENERAL_PWRMGT, SW_SMIO_INDEX(0), ~SW_SMIO_INDEX_MASK); 1586 } 1587 1588 void rv770_get_memory_type(struct radeon_device *rdev) 1589 { 1590 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 1591 u32 tmp; 1592 1593 tmp = RREG32(MC_SEQ_MISC0); 1594 1595 if (((tmp & MC_SEQ_MISC0_GDDR5_MASK) >> MC_SEQ_MISC0_GDDR5_SHIFT) == 1596 MC_SEQ_MISC0_GDDR5_VALUE) 1597 pi->mem_gddr5 = true; 1598 else 1599 pi->mem_gddr5 = false; 1600 1601 } 1602 1603 void rv770_get_pcie_gen2_status(struct radeon_device *rdev) 1604 { 1605 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 1606 u32 tmp; 1607 1608 tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL); 1609 1610 if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) && 1611 (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2)) 1612 pi->pcie_gen2 = true; 1613 else 1614 pi->pcie_gen2 = false; 1615 1616 if (pi->pcie_gen2) { 1617 if (tmp & LC_CURRENT_DATA_RATE) 1618 pi->boot_in_gen2 = true; 1619 else 1620 pi->boot_in_gen2 = false; 1621 } else 1622 pi->boot_in_gen2 = false; 1623 } 1624 1625 #if 0 1626 static int rv770_enter_ulp_state(struct radeon_device *rdev) 1627 { 1628 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 1629 1630 if (pi->gfx_clock_gating) { 1631 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN); 1632 WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON); 1633 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON); 1634 RREG32(GB_TILING_CONFIG); 1635 } 1636 1637 WREG32_P(SMC_MSG, HOST_SMC_MSG(PPSMC_MSG_SwitchToMinimumPower), 1638 ~HOST_SMC_MSG_MASK); 1639 1640 udelay(7000); 1641 1642 return 0; 1643 } 1644 1645 static int rv770_exit_ulp_state(struct radeon_device *rdev) 1646 { 1647 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 1648 int i; 1649 1650 WREG32_P(SMC_MSG, HOST_SMC_MSG(PPSMC_MSG_ResumeFromMinimumPower), 1651 ~HOST_SMC_MSG_MASK); 1652 1653 udelay(7000); 1654 1655 for (i = 0; i < rdev->usec_timeout; i++) { 1656 if (((RREG32(SMC_MSG) & HOST_SMC_RESP_MASK) >> HOST_SMC_RESP_SHIFT) == 1) 1657 break; 1658 udelay(1000); 1659 } 1660 1661 if (pi->gfx_clock_gating) 1662 WREG32_P(SCLK_PWRMGT_CNTL, DYN_GFX_CLK_OFF_EN, ~DYN_GFX_CLK_OFF_EN); 1663 1664 return 0; 1665 } 1666 #endif 1667 1668 static void rv770_get_mclk_odt_threshold(struct radeon_device *rdev) 1669 { 1670 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 1671 u8 memory_module_index; 1672 struct atom_memory_info memory_info; 1673 1674 pi->mclk_odt_threshold = 0; 1675 1676 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710)) { 1677 memory_module_index = rv770_get_memory_module_index(rdev); 1678 1679 if (radeon_atom_get_memory_info(rdev, memory_module_index, &memory_info)) 1680 return; 1681 1682 if (memory_info.mem_type == MEM_TYPE_DDR2 || 1683 memory_info.mem_type == MEM_TYPE_DDR3) 1684 pi->mclk_odt_threshold = 30000; 1685 } 1686 } 1687 1688 void rv770_get_max_vddc(struct radeon_device *rdev) 1689 { 1690 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 1691 u16 vddc; 1692 1693 if (radeon_atom_get_max_vddc(rdev, 0, 0, &vddc)) 1694 pi->max_vddc = 0; 1695 else 1696 pi->max_vddc = vddc; 1697 } 1698 1699 void rv770_program_response_times(struct radeon_device *rdev) 1700 { 1701 u32 voltage_response_time, backbias_response_time; 1702 u32 acpi_delay_time, vbi_time_out; 1703 u32 vddc_dly, bb_dly, acpi_dly, vbi_dly; 1704 u32 reference_clock; 1705 1706 voltage_response_time = (u32)rdev->pm.dpm.voltage_response_time; 1707 backbias_response_time = (u32)rdev->pm.dpm.backbias_response_time; 1708 1709 if (voltage_response_time == 0) 1710 voltage_response_time = 1000; 1711 1712 if (backbias_response_time == 0) 1713 backbias_response_time = 1000; 1714 1715 acpi_delay_time = 15000; 1716 vbi_time_out = 100000; 1717 1718 reference_clock = radeon_get_xclk(rdev); 1719 1720 vddc_dly = (voltage_response_time * reference_clock) / 1600; 1721 bb_dly = (backbias_response_time * reference_clock) / 1600; 1722 acpi_dly = (acpi_delay_time * reference_clock) / 1600; 1723 vbi_dly = (vbi_time_out * reference_clock) / 1600; 1724 1725 rv770_write_smc_soft_register(rdev, 1726 RV770_SMC_SOFT_REGISTER_delay_vreg, vddc_dly); 1727 rv770_write_smc_soft_register(rdev, 1728 RV770_SMC_SOFT_REGISTER_delay_bbias, bb_dly); 1729 rv770_write_smc_soft_register(rdev, 1730 RV770_SMC_SOFT_REGISTER_delay_acpi, acpi_dly); 1731 rv770_write_smc_soft_register(rdev, 1732 RV770_SMC_SOFT_REGISTER_mclk_chg_timeout, vbi_dly); 1733 #if 0 1734 /* XXX look up hw revision */ 1735 if (WEKIVA_A21) 1736 rv770_write_smc_soft_register(rdev, 1737 RV770_SMC_SOFT_REGISTER_baby_step_timer, 1738 0x10); 1739 #endif 1740 } 1741 1742 static void rv770_program_dcodt_before_state_switch(struct radeon_device *rdev, 1743 struct radeon_ps *radeon_new_state, 1744 struct radeon_ps *radeon_current_state) 1745 { 1746 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 1747 struct rv7xx_ps *new_state = rv770_get_ps(radeon_new_state); 1748 struct rv7xx_ps *current_state = rv770_get_ps(radeon_current_state); 1749 bool current_use_dc = false; 1750 bool new_use_dc = false; 1751 1752 if (pi->mclk_odt_threshold == 0) 1753 return; 1754 1755 if (current_state->high.mclk <= pi->mclk_odt_threshold) 1756 current_use_dc = true; 1757 1758 if (new_state->high.mclk <= pi->mclk_odt_threshold) 1759 new_use_dc = true; 1760 1761 if (current_use_dc == new_use_dc) 1762 return; 1763 1764 if (!current_use_dc && new_use_dc) 1765 return; 1766 1767 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710)) 1768 rv730_program_dcodt(rdev, new_use_dc); 1769 } 1770 1771 static void rv770_program_dcodt_after_state_switch(struct radeon_device *rdev, 1772 struct radeon_ps *radeon_new_state, 1773 struct radeon_ps *radeon_current_state) 1774 { 1775 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 1776 struct rv7xx_ps *new_state = rv770_get_ps(radeon_new_state); 1777 struct rv7xx_ps *current_state = rv770_get_ps(radeon_current_state); 1778 bool current_use_dc = false; 1779 bool new_use_dc = false; 1780 1781 if (pi->mclk_odt_threshold == 0) 1782 return; 1783 1784 if (current_state->high.mclk <= pi->mclk_odt_threshold) 1785 current_use_dc = true; 1786 1787 if (new_state->high.mclk <= pi->mclk_odt_threshold) 1788 new_use_dc = true; 1789 1790 if (current_use_dc == new_use_dc) 1791 return; 1792 1793 if (current_use_dc && !new_use_dc) 1794 return; 1795 1796 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710)) 1797 rv730_program_dcodt(rdev, new_use_dc); 1798 } 1799 1800 static void rv770_retrieve_odt_values(struct radeon_device *rdev) 1801 { 1802 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 1803 1804 if (pi->mclk_odt_threshold == 0) 1805 return; 1806 1807 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710)) 1808 rv730_get_odt_values(rdev); 1809 } 1810 1811 static void rv770_set_dpm_event_sources(struct radeon_device *rdev, u32 sources) 1812 { 1813 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 1814 bool want_thermal_protection; 1815 enum radeon_dpm_event_src dpm_event_src; 1816 1817 switch (sources) { 1818 case 0: 1819 default: 1820 want_thermal_protection = false; 1821 break; 1822 case (1 << RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL): 1823 want_thermal_protection = true; 1824 dpm_event_src = RADEON_DPM_EVENT_SRC_DIGITAL; 1825 break; 1826 1827 case (1 << RADEON_DPM_AUTO_THROTTLE_SRC_EXTERNAL): 1828 want_thermal_protection = true; 1829 dpm_event_src = RADEON_DPM_EVENT_SRC_EXTERNAL; 1830 break; 1831 1832 case ((1 << RADEON_DPM_AUTO_THROTTLE_SRC_EXTERNAL) | 1833 (1 << RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL)): 1834 want_thermal_protection = true; 1835 dpm_event_src = RADEON_DPM_EVENT_SRC_DIGIAL_OR_EXTERNAL; 1836 break; 1837 } 1838 1839 if (want_thermal_protection) { 1840 WREG32_P(CG_THERMAL_CTRL, DPM_EVENT_SRC(dpm_event_src), ~DPM_EVENT_SRC_MASK); 1841 if (pi->thermal_protection) 1842 WREG32_P(GENERAL_PWRMGT, 0, ~THERMAL_PROTECTION_DIS); 1843 } else { 1844 WREG32_P(GENERAL_PWRMGT, THERMAL_PROTECTION_DIS, ~THERMAL_PROTECTION_DIS); 1845 } 1846 } 1847 1848 void rv770_enable_auto_throttle_source(struct radeon_device *rdev, 1849 enum radeon_dpm_auto_throttle_src source, 1850 bool enable) 1851 { 1852 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 1853 1854 if (enable) { 1855 if (!(pi->active_auto_throttle_sources & (1 << source))) { 1856 pi->active_auto_throttle_sources |= 1 << source; 1857 rv770_set_dpm_event_sources(rdev, pi->active_auto_throttle_sources); 1858 } 1859 } else { 1860 if (pi->active_auto_throttle_sources & (1 << source)) { 1861 pi->active_auto_throttle_sources &= ~(1 << source); 1862 rv770_set_dpm_event_sources(rdev, pi->active_auto_throttle_sources); 1863 } 1864 } 1865 } 1866 1867 static int rv770_set_thermal_temperature_range(struct radeon_device *rdev, 1868 int min_temp, int max_temp) 1869 { 1870 int low_temp = 0 * 1000; 1871 int high_temp = 255 * 1000; 1872 1873 if (low_temp < min_temp) 1874 low_temp = min_temp; 1875 if (high_temp > max_temp) 1876 high_temp = max_temp; 1877 if (high_temp < low_temp) { 1878 DRM_ERROR("invalid thermal range: %d - %d\n", low_temp, high_temp); 1879 return -EINVAL; 1880 } 1881 1882 WREG32_P(CG_THERMAL_INT, DIG_THERM_INTH(high_temp / 1000), ~DIG_THERM_INTH_MASK); 1883 WREG32_P(CG_THERMAL_INT, DIG_THERM_INTL(low_temp / 1000), ~DIG_THERM_INTL_MASK); 1884 WREG32_P(CG_THERMAL_CTRL, DIG_THERM_DPM(high_temp / 1000), ~DIG_THERM_DPM_MASK); 1885 1886 rdev->pm.dpm.thermal.min_temp = low_temp; 1887 rdev->pm.dpm.thermal.max_temp = high_temp; 1888 1889 return 0; 1890 } 1891 1892 int rv770_dpm_enable(struct radeon_device *rdev) 1893 { 1894 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 1895 struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps; 1896 int ret; 1897 1898 if (pi->gfx_clock_gating) 1899 rv770_restore_cgcg(rdev); 1900 1901 if (rv770_dpm_enabled(rdev)) 1902 return -EINVAL; 1903 1904 if (pi->voltage_control) { 1905 rv770_enable_voltage_control(rdev, true); 1906 ret = rv770_construct_vddc_table(rdev); 1907 if (ret) { 1908 DRM_ERROR("rv770_construct_vddc_table failed\n"); 1909 return ret; 1910 } 1911 } 1912 1913 if (pi->dcodt) 1914 rv770_retrieve_odt_values(rdev); 1915 1916 if (pi->mvdd_control) { 1917 ret = rv770_get_mvdd_configuration(rdev); 1918 if (ret) { 1919 DRM_ERROR("rv770_get_mvdd_configuration failed\n"); 1920 return ret; 1921 } 1922 } 1923 1924 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_BACKBIAS) 1925 rv770_enable_backbias(rdev, true); 1926 1927 rv770_enable_spread_spectrum(rdev, true); 1928 1929 if (pi->thermal_protection) 1930 rv770_enable_thermal_protection(rdev, true); 1931 1932 rv770_program_mpll_timing_parameters(rdev); 1933 rv770_setup_bsp(rdev); 1934 rv770_program_git(rdev); 1935 rv770_program_tp(rdev); 1936 rv770_program_tpp(rdev); 1937 rv770_program_sstp(rdev); 1938 rv770_program_engine_speed_parameters(rdev); 1939 rv770_enable_display_gap(rdev); 1940 rv770_program_vc(rdev); 1941 1942 if (pi->dynamic_pcie_gen2) 1943 rv770_enable_dynamic_pcie_gen2(rdev, true); 1944 1945 ret = rv770_upload_firmware(rdev); 1946 if (ret) { 1947 DRM_ERROR("rv770_upload_firmware failed\n"); 1948 return ret; 1949 } 1950 ret = rv770_init_smc_table(rdev, boot_ps); 1951 if (ret) { 1952 DRM_ERROR("rv770_init_smc_table failed\n"); 1953 return ret; 1954 } 1955 1956 rv770_program_response_times(rdev); 1957 r7xx_start_smc(rdev); 1958 1959 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710)) 1960 rv730_start_dpm(rdev); 1961 else 1962 rv770_start_dpm(rdev); 1963 1964 if (pi->gfx_clock_gating) 1965 rv770_gfx_clock_gating_enable(rdev, true); 1966 1967 if (pi->mg_clock_gating) 1968 rv770_mg_clock_gating_enable(rdev, true); 1969 1970 rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true); 1971 1972 return 0; 1973 } 1974 1975 int rv770_dpm_late_enable(struct radeon_device *rdev) 1976 { 1977 int ret; 1978 1979 if (rdev->irq.installed && 1980 r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) { 1981 PPSMC_Result result; 1982 1983 ret = rv770_set_thermal_temperature_range(rdev, R600_TEMP_RANGE_MIN, R600_TEMP_RANGE_MAX); 1984 if (ret) 1985 return ret; 1986 rdev->irq.dpm_thermal = true; 1987 radeon_irq_set(rdev); 1988 result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_EnableThermalInterrupt); 1989 1990 if (result != PPSMC_Result_OK) 1991 DRM_DEBUG_KMS("Could not enable thermal interrupts.\n"); 1992 } 1993 1994 return 0; 1995 } 1996 1997 void rv770_dpm_disable(struct radeon_device *rdev) 1998 { 1999 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 2000 2001 if (!rv770_dpm_enabled(rdev)) 2002 return; 2003 2004 rv770_clear_vc(rdev); 2005 2006 if (pi->thermal_protection) 2007 rv770_enable_thermal_protection(rdev, false); 2008 2009 rv770_enable_spread_spectrum(rdev, false); 2010 2011 if (pi->dynamic_pcie_gen2) 2012 rv770_enable_dynamic_pcie_gen2(rdev, false); 2013 2014 if (rdev->irq.installed && 2015 r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) { 2016 rdev->irq.dpm_thermal = false; 2017 radeon_irq_set(rdev); 2018 } 2019 2020 if (pi->gfx_clock_gating) 2021 rv770_gfx_clock_gating_enable(rdev, false); 2022 2023 if (pi->mg_clock_gating) 2024 rv770_mg_clock_gating_enable(rdev, false); 2025 2026 if ((rdev->family == CHIP_RV730) || (rdev->family == CHIP_RV710)) 2027 rv730_stop_dpm(rdev); 2028 else 2029 rv770_stop_dpm(rdev); 2030 2031 r7xx_stop_smc(rdev); 2032 rv770_reset_smio_status(rdev); 2033 } 2034 2035 int rv770_dpm_set_power_state(struct radeon_device *rdev) 2036 { 2037 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 2038 struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps; 2039 struct radeon_ps *old_ps = rdev->pm.dpm.current_ps; 2040 int ret; 2041 2042 ret = rv770_restrict_performance_levels_before_switch(rdev); 2043 if (ret) { 2044 DRM_ERROR("rv770_restrict_performance_levels_before_switch failed\n"); 2045 return ret; 2046 } 2047 rv770_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps); 2048 ret = rv770_halt_smc(rdev); 2049 if (ret) { 2050 DRM_ERROR("rv770_halt_smc failed\n"); 2051 return ret; 2052 } 2053 ret = rv770_upload_sw_state(rdev, new_ps); 2054 if (ret) { 2055 DRM_ERROR("rv770_upload_sw_state failed\n"); 2056 return ret; 2057 } 2058 r7xx_program_memory_timing_parameters(rdev, new_ps); 2059 if (pi->dcodt) 2060 rv770_program_dcodt_before_state_switch(rdev, new_ps, old_ps); 2061 ret = rv770_resume_smc(rdev); 2062 if (ret) { 2063 DRM_ERROR("rv770_resume_smc failed\n"); 2064 return ret; 2065 } 2066 ret = rv770_set_sw_state(rdev); 2067 if (ret) { 2068 DRM_ERROR("rv770_set_sw_state failed\n"); 2069 return ret; 2070 } 2071 if (pi->dcodt) 2072 rv770_program_dcodt_after_state_switch(rdev, new_ps, old_ps); 2073 rv770_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps); 2074 2075 return 0; 2076 } 2077 2078 void rv770_dpm_reset_asic(struct radeon_device *rdev) 2079 { 2080 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 2081 struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps; 2082 2083 rv770_restrict_performance_levels_before_switch(rdev); 2084 if (pi->dcodt) 2085 rv770_program_dcodt_before_state_switch(rdev, boot_ps, boot_ps); 2086 rv770_set_boot_state(rdev); 2087 if (pi->dcodt) 2088 rv770_program_dcodt_after_state_switch(rdev, boot_ps, boot_ps); 2089 } 2090 2091 void rv770_dpm_setup_asic(struct radeon_device *rdev) 2092 { 2093 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 2094 2095 r7xx_read_clock_registers(rdev); 2096 rv770_read_voltage_smio_registers(rdev); 2097 rv770_get_memory_type(rdev); 2098 if (pi->dcodt) 2099 rv770_get_mclk_odt_threshold(rdev); 2100 rv770_get_pcie_gen2_status(rdev); 2101 2102 rv770_enable_acpi_pm(rdev); 2103 2104 if (radeon_aspm != 0) { 2105 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_ASPM_L0s) 2106 rv770_enable_l0s(rdev); 2107 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_ASPM_L1) 2108 rv770_enable_l1(rdev); 2109 if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_TURNOFFPLL_ASPML1) 2110 rv770_enable_pll_sleep_in_l1(rdev); 2111 } 2112 } 2113 2114 void rv770_dpm_display_configuration_changed(struct radeon_device *rdev) 2115 { 2116 rv770_program_display_gap(rdev); 2117 } 2118 2119 union power_info { 2120 struct _ATOM_POWERPLAY_INFO info; 2121 struct _ATOM_POWERPLAY_INFO_V2 info_2; 2122 struct _ATOM_POWERPLAY_INFO_V3 info_3; 2123 struct _ATOM_PPLIB_POWERPLAYTABLE pplib; 2124 struct _ATOM_PPLIB_POWERPLAYTABLE2 pplib2; 2125 struct _ATOM_PPLIB_POWERPLAYTABLE3 pplib3; 2126 }; 2127 2128 union pplib_clock_info { 2129 struct _ATOM_PPLIB_R600_CLOCK_INFO r600; 2130 struct _ATOM_PPLIB_RS780_CLOCK_INFO rs780; 2131 struct _ATOM_PPLIB_EVERGREEN_CLOCK_INFO evergreen; 2132 struct _ATOM_PPLIB_SUMO_CLOCK_INFO sumo; 2133 }; 2134 2135 union pplib_power_state { 2136 struct _ATOM_PPLIB_STATE v1; 2137 struct _ATOM_PPLIB_STATE_V2 v2; 2138 }; 2139 2140 static void rv7xx_parse_pplib_non_clock_info(struct radeon_device *rdev, 2141 struct radeon_ps *rps, 2142 struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info, 2143 u8 table_rev) 2144 { 2145 rps->caps = le32_to_cpu(non_clock_info->ulCapsAndSettings); 2146 rps->class = le16_to_cpu(non_clock_info->usClassification); 2147 rps->class2 = le16_to_cpu(non_clock_info->usClassification2); 2148 2149 if (ATOM_PPLIB_NONCLOCKINFO_VER1 < table_rev) { 2150 rps->vclk = le32_to_cpu(non_clock_info->ulVCLK); 2151 rps->dclk = le32_to_cpu(non_clock_info->ulDCLK); 2152 } else { 2153 rps->vclk = 0; 2154 rps->dclk = 0; 2155 } 2156 2157 if (r600_is_uvd_state(rps->class, rps->class2)) { 2158 if ((rps->vclk == 0) || (rps->dclk == 0)) { 2159 rps->vclk = RV770_DEFAULT_VCLK_FREQ; 2160 rps->dclk = RV770_DEFAULT_DCLK_FREQ; 2161 } 2162 } 2163 2164 if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT) 2165 rdev->pm.dpm.boot_ps = rps; 2166 if (rps->class & ATOM_PPLIB_CLASSIFICATION_UVDSTATE) 2167 rdev->pm.dpm.uvd_ps = rps; 2168 } 2169 2170 static void rv7xx_parse_pplib_clock_info(struct radeon_device *rdev, 2171 struct radeon_ps *rps, int index, 2172 union pplib_clock_info *clock_info) 2173 { 2174 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 2175 struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev); 2176 struct rv7xx_ps *ps = rv770_get_ps(rps); 2177 u32 sclk, mclk; 2178 struct rv7xx_pl *pl; 2179 2180 switch (index) { 2181 case 0: 2182 pl = &ps->low; 2183 break; 2184 case 1: 2185 pl = &ps->medium; 2186 break; 2187 case 2: 2188 default: 2189 pl = &ps->high; 2190 break; 2191 } 2192 2193 if (rdev->family >= CHIP_CEDAR) { 2194 sclk = le16_to_cpu(clock_info->evergreen.usEngineClockLow); 2195 sclk |= clock_info->evergreen.ucEngineClockHigh << 16; 2196 mclk = le16_to_cpu(clock_info->evergreen.usMemoryClockLow); 2197 mclk |= clock_info->evergreen.ucMemoryClockHigh << 16; 2198 2199 pl->vddc = le16_to_cpu(clock_info->evergreen.usVDDC); 2200 pl->vddci = le16_to_cpu(clock_info->evergreen.usVDDCI); 2201 pl->flags = le32_to_cpu(clock_info->evergreen.ulFlags); 2202 } else { 2203 sclk = le16_to_cpu(clock_info->r600.usEngineClockLow); 2204 sclk |= clock_info->r600.ucEngineClockHigh << 16; 2205 mclk = le16_to_cpu(clock_info->r600.usMemoryClockLow); 2206 mclk |= clock_info->r600.ucMemoryClockHigh << 16; 2207 2208 pl->vddc = le16_to_cpu(clock_info->r600.usVDDC); 2209 pl->flags = le32_to_cpu(clock_info->r600.ulFlags); 2210 } 2211 2212 pl->mclk = mclk; 2213 pl->sclk = sclk; 2214 2215 /* patch up vddc if necessary */ 2216 if (pl->vddc == 0xff01) { 2217 if (pi->max_vddc) 2218 pl->vddc = pi->max_vddc; 2219 } 2220 2221 if (rps->class & ATOM_PPLIB_CLASSIFICATION_ACPI) { 2222 pi->acpi_vddc = pl->vddc; 2223 if (rdev->family >= CHIP_CEDAR) 2224 eg_pi->acpi_vddci = pl->vddci; 2225 if (ps->low.flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) 2226 pi->acpi_pcie_gen2 = true; 2227 else 2228 pi->acpi_pcie_gen2 = false; 2229 } 2230 2231 if (rps->class2 & ATOM_PPLIB_CLASSIFICATION2_ULV) { 2232 if (rdev->family >= CHIP_BARTS) { 2233 eg_pi->ulv.supported = true; 2234 eg_pi->ulv.pl = pl; 2235 } 2236 } 2237 2238 if (pi->min_vddc_in_table > pl->vddc) 2239 pi->min_vddc_in_table = pl->vddc; 2240 2241 if (pi->max_vddc_in_table < pl->vddc) 2242 pi->max_vddc_in_table = pl->vddc; 2243 2244 /* patch up boot state */ 2245 if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT) { 2246 u16 vddc, vddci, mvdd; 2247 radeon_atombios_get_default_voltages(rdev, &vddc, &vddci, &mvdd); 2248 pl->mclk = rdev->clock.default_mclk; 2249 pl->sclk = rdev->clock.default_sclk; 2250 pl->vddc = vddc; 2251 pl->vddci = vddci; 2252 } 2253 2254 if ((rps->class & ATOM_PPLIB_CLASSIFICATION_UI_MASK) == 2255 ATOM_PPLIB_CLASSIFICATION_UI_PERFORMANCE) { 2256 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.sclk = pl->sclk; 2257 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.mclk = pl->mclk; 2258 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddc = pl->vddc; 2259 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddci = pl->vddci; 2260 } 2261 } 2262 2263 int rv7xx_parse_power_table(struct radeon_device *rdev) 2264 { 2265 struct radeon_mode_info *mode_info = &rdev->mode_info; 2266 struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info; 2267 union pplib_power_state *power_state; 2268 int i, j; 2269 union pplib_clock_info *clock_info; 2270 union power_info *power_info; 2271 int index = GetIndexIntoMasterTable(DATA, PowerPlayInfo); 2272 u16 data_offset; 2273 u8 frev, crev; 2274 struct rv7xx_ps *ps; 2275 2276 if (!atom_parse_data_header(mode_info->atom_context, index, NULL, 2277 &frev, &crev, &data_offset)) 2278 return -EINVAL; 2279 power_info = (union power_info *)(mode_info->atom_context->bios + data_offset); 2280 2281 rdev->pm.dpm.ps = kzalloc(sizeof(struct radeon_ps) * 2282 power_info->pplib.ucNumStates, GFP_KERNEL); 2283 if (!rdev->pm.dpm.ps) 2284 return -ENOMEM; 2285 2286 for (i = 0; i < power_info->pplib.ucNumStates; i++) { 2287 power_state = (union pplib_power_state *) 2288 (mode_info->atom_context->bios + data_offset + 2289 le16_to_cpu(power_info->pplib.usStateArrayOffset) + 2290 i * power_info->pplib.ucStateEntrySize); 2291 non_clock_info = (struct _ATOM_PPLIB_NONCLOCK_INFO *) 2292 (mode_info->atom_context->bios + data_offset + 2293 le16_to_cpu(power_info->pplib.usNonClockInfoArrayOffset) + 2294 (power_state->v1.ucNonClockStateIndex * 2295 power_info->pplib.ucNonClockSize)); 2296 if (power_info->pplib.ucStateEntrySize - 1) { 2297 u8 *idx; 2298 ps = kzalloc(sizeof(struct rv7xx_ps), GFP_KERNEL); 2299 if (ps == NULL) { 2300 kfree(rdev->pm.dpm.ps); 2301 return -ENOMEM; 2302 } 2303 rdev->pm.dpm.ps[i].ps_priv = ps; 2304 rv7xx_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i], 2305 non_clock_info, 2306 power_info->pplib.ucNonClockSize); 2307 idx = (u8 *)&power_state->v1.ucClockStateIndices[0]; 2308 for (j = 0; j < (power_info->pplib.ucStateEntrySize - 1); j++) { 2309 clock_info = (union pplib_clock_info *) 2310 (mode_info->atom_context->bios + data_offset + 2311 le16_to_cpu(power_info->pplib.usClockInfoArrayOffset) + 2312 (idx[j] * power_info->pplib.ucClockInfoSize)); 2313 rv7xx_parse_pplib_clock_info(rdev, 2314 &rdev->pm.dpm.ps[i], j, 2315 clock_info); 2316 } 2317 } 2318 } 2319 rdev->pm.dpm.num_ps = power_info->pplib.ucNumStates; 2320 return 0; 2321 } 2322 2323 void rv770_get_engine_memory_ss(struct radeon_device *rdev) 2324 { 2325 struct rv7xx_power_info *pi = rv770_get_pi(rdev); 2326 struct radeon_atom_ss ss; 2327 2328 pi->sclk_ss = radeon_atombios_get_asic_ss_info(rdev, &ss, 2329 ASIC_INTERNAL_ENGINE_SS, 0); 2330 pi->mclk_ss = radeon_atombios_get_asic_ss_info(rdev, &ss, 2331 ASIC_INTERNAL_MEMORY_SS, 0); 2332 2333 if (pi->sclk_ss || pi->mclk_ss) 2334 pi->dynamic_ss = true; 2335 else 2336 pi->dynamic_ss = false; 2337 } 2338 2339 int rv770_dpm_init(struct radeon_device *rdev) 2340 { 2341 struct rv7xx_power_info *pi; 2342 struct atom_clock_dividers dividers; 2343 int ret; 2344 2345 pi = kzalloc(sizeof(struct rv7xx_power_info), GFP_KERNEL); 2346 if (pi == NULL) 2347 return -ENOMEM; 2348 rdev->pm.dpm.priv = pi; 2349 2350 rv770_get_max_vddc(rdev); 2351 2352 pi->acpi_vddc = 0; 2353 pi->min_vddc_in_table = 0; 2354 pi->max_vddc_in_table = 0; 2355 2356 ret = r600_get_platform_caps(rdev); 2357 if (ret) 2358 return ret; 2359 2360 ret = rv7xx_parse_power_table(rdev); 2361 if (ret) 2362 return ret; 2363 2364 if (rdev->pm.dpm.voltage_response_time == 0) 2365 rdev->pm.dpm.voltage_response_time = R600_VOLTAGERESPONSETIME_DFLT; 2366 if (rdev->pm.dpm.backbias_response_time == 0) 2367 rdev->pm.dpm.backbias_response_time = R600_BACKBIASRESPONSETIME_DFLT; 2368 2369 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, 2370 0, false, ÷rs); 2371 if (ret) 2372 pi->ref_div = dividers.ref_div + 1; 2373 else 2374 pi->ref_div = R600_REFERENCEDIVIDER_DFLT; 2375 2376 pi->mclk_strobe_mode_threshold = 30000; 2377 pi->mclk_edc_enable_threshold = 30000; 2378 2379 pi->rlp = RV770_RLP_DFLT; 2380 pi->rmp = RV770_RMP_DFLT; 2381 pi->lhp = RV770_LHP_DFLT; 2382 pi->lmp = RV770_LMP_DFLT; 2383 2384 pi->voltage_control = 2385 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0); 2386 2387 pi->mvdd_control = 2388 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_MVDDC, 0); 2389 2390 rv770_get_engine_memory_ss(rdev); 2391 2392 pi->asi = RV770_ASI_DFLT; 2393 pi->pasi = RV770_HASI_DFLT; 2394 pi->vrc = RV770_VRC_DFLT; 2395 2396 pi->power_gating = false; 2397 2398 pi->gfx_clock_gating = true; 2399 2400 pi->mg_clock_gating = true; 2401 pi->mgcgtssm = true; 2402 2403 pi->dynamic_pcie_gen2 = true; 2404 2405 if (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE) 2406 pi->thermal_protection = true; 2407 else 2408 pi->thermal_protection = false; 2409 2410 pi->display_gap = true; 2411 2412 if (rdev->flags & RADEON_IS_MOBILITY) 2413 pi->dcodt = true; 2414 else 2415 pi->dcodt = false; 2416 2417 pi->ulps = true; 2418 2419 pi->mclk_stutter_mode_threshold = 0; 2420 2421 pi->sram_end = SMC_RAM_END; 2422 pi->state_table_start = RV770_SMC_TABLE_ADDRESS; 2423 pi->soft_regs_start = RV770_SMC_SOFT_REGISTERS_START; 2424 2425 return 0; 2426 } 2427 2428 void rv770_dpm_print_power_state(struct radeon_device *rdev, 2429 struct radeon_ps *rps) 2430 { 2431 struct rv7xx_ps *ps = rv770_get_ps(rps); 2432 struct rv7xx_pl *pl; 2433 2434 r600_dpm_print_class_info(rps->class, rps->class2); 2435 r600_dpm_print_cap_info(rps->caps); 2436 printk("\tuvd vclk: %d dclk: %d\n", rps->vclk, rps->dclk); 2437 if (rdev->family >= CHIP_CEDAR) { 2438 pl = &ps->low; 2439 printk("\t\tpower level 0 sclk: %u mclk: %u vddc: %u vddci: %u\n", 2440 pl->sclk, pl->mclk, pl->vddc, pl->vddci); 2441 pl = &ps->medium; 2442 printk("\t\tpower level 1 sclk: %u mclk: %u vddc: %u vddci: %u\n", 2443 pl->sclk, pl->mclk, pl->vddc, pl->vddci); 2444 pl = &ps->high; 2445 printk("\t\tpower level 2 sclk: %u mclk: %u vddc: %u vddci: %u\n", 2446 pl->sclk, pl->mclk, pl->vddc, pl->vddci); 2447 } else { 2448 pl = &ps->low; 2449 printk("\t\tpower level 0 sclk: %u mclk: %u vddc: %u\n", 2450 pl->sclk, pl->mclk, pl->vddc); 2451 pl = &ps->medium; 2452 printk("\t\tpower level 1 sclk: %u mclk: %u vddc: %u\n", 2453 pl->sclk, pl->mclk, pl->vddc); 2454 pl = &ps->high; 2455 printk("\t\tpower level 2 sclk: %u mclk: %u vddc: %u\n", 2456 pl->sclk, pl->mclk, pl->vddc); 2457 } 2458 r600_dpm_print_ps_status(rdev, rps); 2459 } 2460 2461 void rv770_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev, 2462 struct seq_file *m) 2463 { 2464 struct radeon_ps *rps = rdev->pm.dpm.current_ps; 2465 struct rv7xx_ps *ps = rv770_get_ps(rps); 2466 struct rv7xx_pl *pl; 2467 u32 current_index = 2468 (RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_PROFILE_INDEX_MASK) >> 2469 CURRENT_PROFILE_INDEX_SHIFT; 2470 2471 if (current_index > 2) { 2472 seq_printf(m, "invalid dpm profile %d\n", current_index); 2473 } else { 2474 if (current_index == 0) 2475 pl = &ps->low; 2476 else if (current_index == 1) 2477 pl = &ps->medium; 2478 else /* current_index == 2 */ 2479 pl = &ps->high; 2480 seq_printf(m, "uvd vclk: %d dclk: %d\n", rps->vclk, rps->dclk); 2481 if (rdev->family >= CHIP_CEDAR) { 2482 seq_printf(m, "power level %d sclk: %u mclk: %u vddc: %u vddci: %u\n", 2483 current_index, pl->sclk, pl->mclk, pl->vddc, pl->vddci); 2484 } else { 2485 seq_printf(m, "power level %d sclk: %u mclk: %u vddc: %u\n", 2486 current_index, pl->sclk, pl->mclk, pl->vddc); 2487 } 2488 } 2489 } 2490 2491 void rv770_dpm_fini(struct radeon_device *rdev) 2492 { 2493 int i; 2494 2495 for (i = 0; i < rdev->pm.dpm.num_ps; i++) { 2496 kfree(rdev->pm.dpm.ps[i].ps_priv); 2497 } 2498 kfree(rdev->pm.dpm.ps); 2499 kfree(rdev->pm.dpm.priv); 2500 } 2501 2502 u32 rv770_dpm_get_sclk(struct radeon_device *rdev, bool low) 2503 { 2504 struct rv7xx_ps *requested_state = rv770_get_ps(rdev->pm.dpm.requested_ps); 2505 2506 if (low) 2507 return requested_state->low.sclk; 2508 else 2509 return requested_state->high.sclk; 2510 } 2511 2512 u32 rv770_dpm_get_mclk(struct radeon_device *rdev, bool low) 2513 { 2514 struct rv7xx_ps *requested_state = rv770_get_ps(rdev->pm.dpm.requested_ps); 2515 2516 if (low) 2517 return requested_state->low.mclk; 2518 else 2519 return requested_state->high.mclk; 2520 } 2521 2522 bool rv770_dpm_vblank_too_short(struct radeon_device *rdev) 2523 { 2524 u32 vblank_time = r600_dpm_get_vblank_time(rdev); 2525 u32 switch_limit = 200; /* 300 */ 2526 2527 /* RV770 */ 2528 /* mclk switching doesn't seem to work reliably on desktop RV770s */ 2529 if ((rdev->family == CHIP_RV770) && 2530 !(rdev->flags & RADEON_IS_MOBILITY)) 2531 switch_limit = 0xffffffff; /* disable mclk switching */ 2532 2533 if (vblank_time < switch_limit) 2534 return true; 2535 else 2536 return false; 2537 2538 } 2539