1 /* 2 * Copyright 2007-8 Advanced Micro Devices, Inc. 3 * Copyright 2008 Red Hat Inc. 4 * 5 * Permission is hereby granted, free of charge, to any person obtaining a 6 * copy of this software and associated documentation files (the "Software"), 7 * to deal in the Software without restriction, including without limitation 8 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 9 * and/or sell copies of the Software, and to permit persons to whom the 10 * Software is furnished to do so, subject to the following conditions: 11 * 12 * The above copyright notice and this permission notice shall be included in 13 * all copies or substantial portions of the Software. 14 * 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 21 * OTHER DEALINGS IN THE SOFTWARE. 22 * 23 * Authors: Dave Airlie 24 * Alex Deucher 25 */ 26 #include <drm/drmP.h> 27 #include <drm/drm_crtc_helper.h> 28 #include <drm/radeon_drm.h> 29 #include <drm/drm_fixed.h> 30 #include "radeon.h" 31 #include "atom.h" 32 #include "atom-bits.h" 33 34 static void atombios_overscan_setup(struct drm_crtc *crtc, 35 struct drm_display_mode *mode, 36 struct drm_display_mode *adjusted_mode) 37 { 38 struct drm_device *dev = crtc->dev; 39 struct radeon_device *rdev = dev->dev_private; 40 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 41 SET_CRTC_OVERSCAN_PS_ALLOCATION args; 42 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_OverScan); 43 int a1, a2; 44 45 memset(&args, 0, sizeof(args)); 46 47 args.ucCRTC = radeon_crtc->crtc_id; 48 49 switch (radeon_crtc->rmx_type) { 50 case RMX_CENTER: 51 args.usOverscanTop = cpu_to_le16((adjusted_mode->crtc_vdisplay - mode->crtc_vdisplay) / 2); 52 args.usOverscanBottom = cpu_to_le16((adjusted_mode->crtc_vdisplay - mode->crtc_vdisplay) / 2); 53 args.usOverscanLeft = cpu_to_le16((adjusted_mode->crtc_hdisplay - mode->crtc_hdisplay) / 2); 54 args.usOverscanRight = cpu_to_le16((adjusted_mode->crtc_hdisplay - mode->crtc_hdisplay) / 2); 55 break; 56 case RMX_ASPECT: 57 a1 = mode->crtc_vdisplay * adjusted_mode->crtc_hdisplay; 58 a2 = adjusted_mode->crtc_vdisplay * mode->crtc_hdisplay; 59 60 if (a1 > a2) { 61 args.usOverscanLeft = cpu_to_le16((adjusted_mode->crtc_hdisplay - (a2 / mode->crtc_vdisplay)) / 2); 62 args.usOverscanRight = cpu_to_le16((adjusted_mode->crtc_hdisplay - (a2 / mode->crtc_vdisplay)) / 2); 63 } else if (a2 > a1) { 64 args.usOverscanTop = cpu_to_le16((adjusted_mode->crtc_vdisplay - (a1 / mode->crtc_hdisplay)) / 2); 65 args.usOverscanBottom = cpu_to_le16((adjusted_mode->crtc_vdisplay - (a1 / mode->crtc_hdisplay)) / 2); 66 } 67 break; 68 case RMX_FULL: 69 default: 70 args.usOverscanRight = cpu_to_le16(radeon_crtc->h_border); 71 args.usOverscanLeft = cpu_to_le16(radeon_crtc->h_border); 72 args.usOverscanBottom = cpu_to_le16(radeon_crtc->v_border); 73 args.usOverscanTop = cpu_to_le16(radeon_crtc->v_border); 74 break; 75 } 76 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 77 } 78 79 static void atombios_scaler_setup(struct drm_crtc *crtc) 80 { 81 struct drm_device *dev = crtc->dev; 82 struct radeon_device *rdev = dev->dev_private; 83 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 84 ENABLE_SCALER_PS_ALLOCATION args; 85 int index = GetIndexIntoMasterTable(COMMAND, EnableScaler); 86 struct radeon_encoder *radeon_encoder = 87 to_radeon_encoder(radeon_crtc->encoder); 88 /* fixme - fill in enc_priv for atom dac */ 89 enum radeon_tv_std tv_std = TV_STD_NTSC; 90 bool is_tv = false, is_cv = false; 91 92 if (!ASIC_IS_AVIVO(rdev) && radeon_crtc->crtc_id) 93 return; 94 95 if (radeon_encoder->active_device & ATOM_DEVICE_TV_SUPPORT) { 96 struct radeon_encoder_atom_dac *tv_dac = radeon_encoder->enc_priv; 97 tv_std = tv_dac->tv_std; 98 is_tv = true; 99 } 100 101 memset(&args, 0, sizeof(args)); 102 103 args.ucScaler = radeon_crtc->crtc_id; 104 105 if (is_tv) { 106 switch (tv_std) { 107 case TV_STD_NTSC: 108 default: 109 args.ucTVStandard = ATOM_TV_NTSC; 110 break; 111 case TV_STD_PAL: 112 args.ucTVStandard = ATOM_TV_PAL; 113 break; 114 case TV_STD_PAL_M: 115 args.ucTVStandard = ATOM_TV_PALM; 116 break; 117 case TV_STD_PAL_60: 118 args.ucTVStandard = ATOM_TV_PAL60; 119 break; 120 case TV_STD_NTSC_J: 121 args.ucTVStandard = ATOM_TV_NTSCJ; 122 break; 123 case TV_STD_SCART_PAL: 124 args.ucTVStandard = ATOM_TV_PAL; /* ??? */ 125 break; 126 case TV_STD_SECAM: 127 args.ucTVStandard = ATOM_TV_SECAM; 128 break; 129 case TV_STD_PAL_CN: 130 args.ucTVStandard = ATOM_TV_PALCN; 131 break; 132 } 133 args.ucEnable = SCALER_ENABLE_MULTITAP_MODE; 134 } else if (is_cv) { 135 args.ucTVStandard = ATOM_TV_CV; 136 args.ucEnable = SCALER_ENABLE_MULTITAP_MODE; 137 } else { 138 switch (radeon_crtc->rmx_type) { 139 case RMX_FULL: 140 args.ucEnable = ATOM_SCALER_EXPANSION; 141 break; 142 case RMX_CENTER: 143 args.ucEnable = ATOM_SCALER_CENTER; 144 break; 145 case RMX_ASPECT: 146 args.ucEnable = ATOM_SCALER_EXPANSION; 147 break; 148 default: 149 if (ASIC_IS_AVIVO(rdev)) 150 args.ucEnable = ATOM_SCALER_DISABLE; 151 else 152 args.ucEnable = ATOM_SCALER_CENTER; 153 break; 154 } 155 } 156 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 157 if ((is_tv || is_cv) 158 && rdev->family >= CHIP_RV515 && rdev->family <= CHIP_R580) { 159 atom_rv515_force_tv_scaler(rdev, radeon_crtc); 160 } 161 } 162 163 static void atombios_lock_crtc(struct drm_crtc *crtc, int lock) 164 { 165 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 166 struct drm_device *dev = crtc->dev; 167 struct radeon_device *rdev = dev->dev_private; 168 int index = 169 GetIndexIntoMasterTable(COMMAND, UpdateCRTC_DoubleBufferRegisters); 170 ENABLE_CRTC_PS_ALLOCATION args; 171 172 memset(&args, 0, sizeof(args)); 173 174 args.ucCRTC = radeon_crtc->crtc_id; 175 args.ucEnable = lock; 176 177 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 178 } 179 180 static void atombios_enable_crtc(struct drm_crtc *crtc, int state) 181 { 182 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 183 struct drm_device *dev = crtc->dev; 184 struct radeon_device *rdev = dev->dev_private; 185 int index = GetIndexIntoMasterTable(COMMAND, EnableCRTC); 186 ENABLE_CRTC_PS_ALLOCATION args; 187 188 memset(&args, 0, sizeof(args)); 189 190 args.ucCRTC = radeon_crtc->crtc_id; 191 args.ucEnable = state; 192 193 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 194 } 195 196 static void atombios_enable_crtc_memreq(struct drm_crtc *crtc, int state) 197 { 198 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 199 struct drm_device *dev = crtc->dev; 200 struct radeon_device *rdev = dev->dev_private; 201 int index = GetIndexIntoMasterTable(COMMAND, EnableCRTCMemReq); 202 ENABLE_CRTC_PS_ALLOCATION args; 203 204 memset(&args, 0, sizeof(args)); 205 206 args.ucCRTC = radeon_crtc->crtc_id; 207 args.ucEnable = state; 208 209 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 210 } 211 212 static void atombios_blank_crtc(struct drm_crtc *crtc, int state) 213 { 214 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 215 struct drm_device *dev = crtc->dev; 216 struct radeon_device *rdev = dev->dev_private; 217 int index = GetIndexIntoMasterTable(COMMAND, BlankCRTC); 218 BLANK_CRTC_PS_ALLOCATION args; 219 220 memset(&args, 0, sizeof(args)); 221 222 args.ucCRTC = radeon_crtc->crtc_id; 223 args.ucBlanking = state; 224 225 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 226 } 227 228 static void atombios_powergate_crtc(struct drm_crtc *crtc, int state) 229 { 230 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 231 struct drm_device *dev = crtc->dev; 232 struct radeon_device *rdev = dev->dev_private; 233 int index = GetIndexIntoMasterTable(COMMAND, EnableDispPowerGating); 234 ENABLE_DISP_POWER_GATING_PARAMETERS_V2_1 args; 235 236 memset(&args, 0, sizeof(args)); 237 238 args.ucDispPipeId = radeon_crtc->crtc_id; 239 args.ucEnable = state; 240 241 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 242 } 243 244 void atombios_crtc_dpms(struct drm_crtc *crtc, int mode) 245 { 246 struct drm_device *dev = crtc->dev; 247 struct radeon_device *rdev = dev->dev_private; 248 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 249 250 switch (mode) { 251 case DRM_MODE_DPMS_ON: 252 radeon_crtc->enabled = true; 253 /* adjust pm to dpms changes BEFORE enabling crtcs */ 254 radeon_pm_compute_clocks(rdev); 255 atombios_enable_crtc(crtc, ATOM_ENABLE); 256 if (ASIC_IS_DCE3(rdev) && !ASIC_IS_DCE6(rdev)) 257 atombios_enable_crtc_memreq(crtc, ATOM_ENABLE); 258 atombios_blank_crtc(crtc, ATOM_DISABLE); 259 drm_vblank_post_modeset(dev, radeon_crtc->crtc_id); 260 radeon_crtc_load_lut(crtc); 261 break; 262 case DRM_MODE_DPMS_STANDBY: 263 case DRM_MODE_DPMS_SUSPEND: 264 case DRM_MODE_DPMS_OFF: 265 drm_vblank_pre_modeset(dev, radeon_crtc->crtc_id); 266 if (radeon_crtc->enabled) 267 atombios_blank_crtc(crtc, ATOM_ENABLE); 268 if (ASIC_IS_DCE3(rdev) && !ASIC_IS_DCE6(rdev)) 269 atombios_enable_crtc_memreq(crtc, ATOM_DISABLE); 270 atombios_enable_crtc(crtc, ATOM_DISABLE); 271 radeon_crtc->enabled = false; 272 /* adjust pm to dpms changes AFTER disabling crtcs */ 273 radeon_pm_compute_clocks(rdev); 274 break; 275 } 276 } 277 278 static void 279 atombios_set_crtc_dtd_timing(struct drm_crtc *crtc, 280 struct drm_display_mode *mode) 281 { 282 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 283 struct drm_device *dev = crtc->dev; 284 struct radeon_device *rdev = dev->dev_private; 285 SET_CRTC_USING_DTD_TIMING_PARAMETERS args; 286 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_UsingDTDTiming); 287 u16 misc = 0; 288 289 memset(&args, 0, sizeof(args)); 290 args.usH_Size = cpu_to_le16(mode->crtc_hdisplay - (radeon_crtc->h_border * 2)); 291 args.usH_Blanking_Time = 292 cpu_to_le16(mode->crtc_hblank_end - mode->crtc_hdisplay + (radeon_crtc->h_border * 2)); 293 args.usV_Size = cpu_to_le16(mode->crtc_vdisplay - (radeon_crtc->v_border * 2)); 294 args.usV_Blanking_Time = 295 cpu_to_le16(mode->crtc_vblank_end - mode->crtc_vdisplay + (radeon_crtc->v_border * 2)); 296 args.usH_SyncOffset = 297 cpu_to_le16(mode->crtc_hsync_start - mode->crtc_hdisplay + radeon_crtc->h_border); 298 args.usH_SyncWidth = 299 cpu_to_le16(mode->crtc_hsync_end - mode->crtc_hsync_start); 300 args.usV_SyncOffset = 301 cpu_to_le16(mode->crtc_vsync_start - mode->crtc_vdisplay + radeon_crtc->v_border); 302 args.usV_SyncWidth = 303 cpu_to_le16(mode->crtc_vsync_end - mode->crtc_vsync_start); 304 args.ucH_Border = radeon_crtc->h_border; 305 args.ucV_Border = radeon_crtc->v_border; 306 307 if (mode->flags & DRM_MODE_FLAG_NVSYNC) 308 misc |= ATOM_VSYNC_POLARITY; 309 if (mode->flags & DRM_MODE_FLAG_NHSYNC) 310 misc |= ATOM_HSYNC_POLARITY; 311 if (mode->flags & DRM_MODE_FLAG_CSYNC) 312 misc |= ATOM_COMPOSITESYNC; 313 if (mode->flags & DRM_MODE_FLAG_INTERLACE) 314 misc |= ATOM_INTERLACE; 315 if (mode->flags & DRM_MODE_FLAG_DBLSCAN) 316 misc |= ATOM_DOUBLE_CLOCK_MODE; 317 318 args.susModeMiscInfo.usAccess = cpu_to_le16(misc); 319 args.ucCRTC = radeon_crtc->crtc_id; 320 321 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 322 } 323 324 static void atombios_crtc_set_timing(struct drm_crtc *crtc, 325 struct drm_display_mode *mode) 326 { 327 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 328 struct drm_device *dev = crtc->dev; 329 struct radeon_device *rdev = dev->dev_private; 330 SET_CRTC_TIMING_PARAMETERS_PS_ALLOCATION args; 331 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_Timing); 332 u16 misc = 0; 333 334 memset(&args, 0, sizeof(args)); 335 args.usH_Total = cpu_to_le16(mode->crtc_htotal); 336 args.usH_Disp = cpu_to_le16(mode->crtc_hdisplay); 337 args.usH_SyncStart = cpu_to_le16(mode->crtc_hsync_start); 338 args.usH_SyncWidth = 339 cpu_to_le16(mode->crtc_hsync_end - mode->crtc_hsync_start); 340 args.usV_Total = cpu_to_le16(mode->crtc_vtotal); 341 args.usV_Disp = cpu_to_le16(mode->crtc_vdisplay); 342 args.usV_SyncStart = cpu_to_le16(mode->crtc_vsync_start); 343 args.usV_SyncWidth = 344 cpu_to_le16(mode->crtc_vsync_end - mode->crtc_vsync_start); 345 346 args.ucOverscanRight = radeon_crtc->h_border; 347 args.ucOverscanLeft = radeon_crtc->h_border; 348 args.ucOverscanBottom = radeon_crtc->v_border; 349 args.ucOverscanTop = radeon_crtc->v_border; 350 351 if (mode->flags & DRM_MODE_FLAG_NVSYNC) 352 misc |= ATOM_VSYNC_POLARITY; 353 if (mode->flags & DRM_MODE_FLAG_NHSYNC) 354 misc |= ATOM_HSYNC_POLARITY; 355 if (mode->flags & DRM_MODE_FLAG_CSYNC) 356 misc |= ATOM_COMPOSITESYNC; 357 if (mode->flags & DRM_MODE_FLAG_INTERLACE) 358 misc |= ATOM_INTERLACE; 359 if (mode->flags & DRM_MODE_FLAG_DBLSCAN) 360 misc |= ATOM_DOUBLE_CLOCK_MODE; 361 362 args.susModeMiscInfo.usAccess = cpu_to_le16(misc); 363 args.ucCRTC = radeon_crtc->crtc_id; 364 365 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 366 } 367 368 static void atombios_disable_ss(struct radeon_device *rdev, int pll_id) 369 { 370 u32 ss_cntl; 371 372 if (ASIC_IS_DCE4(rdev)) { 373 switch (pll_id) { 374 case ATOM_PPLL1: 375 ss_cntl = RREG32(EVERGREEN_P1PLL_SS_CNTL); 376 ss_cntl &= ~EVERGREEN_PxPLL_SS_EN; 377 WREG32(EVERGREEN_P1PLL_SS_CNTL, ss_cntl); 378 break; 379 case ATOM_PPLL2: 380 ss_cntl = RREG32(EVERGREEN_P2PLL_SS_CNTL); 381 ss_cntl &= ~EVERGREEN_PxPLL_SS_EN; 382 WREG32(EVERGREEN_P2PLL_SS_CNTL, ss_cntl); 383 break; 384 case ATOM_DCPLL: 385 case ATOM_PPLL_INVALID: 386 return; 387 } 388 } else if (ASIC_IS_AVIVO(rdev)) { 389 switch (pll_id) { 390 case ATOM_PPLL1: 391 ss_cntl = RREG32(AVIVO_P1PLL_INT_SS_CNTL); 392 ss_cntl &= ~1; 393 WREG32(AVIVO_P1PLL_INT_SS_CNTL, ss_cntl); 394 break; 395 case ATOM_PPLL2: 396 ss_cntl = RREG32(AVIVO_P2PLL_INT_SS_CNTL); 397 ss_cntl &= ~1; 398 WREG32(AVIVO_P2PLL_INT_SS_CNTL, ss_cntl); 399 break; 400 case ATOM_DCPLL: 401 case ATOM_PPLL_INVALID: 402 return; 403 } 404 } 405 } 406 407 408 union atom_enable_ss { 409 ENABLE_LVDS_SS_PARAMETERS lvds_ss; 410 ENABLE_LVDS_SS_PARAMETERS_V2 lvds_ss_2; 411 ENABLE_SPREAD_SPECTRUM_ON_PPLL_PS_ALLOCATION v1; 412 ENABLE_SPREAD_SPECTRUM_ON_PPLL_V2 v2; 413 ENABLE_SPREAD_SPECTRUM_ON_PPLL_V3 v3; 414 }; 415 416 static void atombios_crtc_program_ss(struct radeon_device *rdev, 417 int enable, 418 int pll_id, 419 int crtc_id, 420 struct radeon_atom_ss *ss) 421 { 422 unsigned i; 423 int index = GetIndexIntoMasterTable(COMMAND, EnableSpreadSpectrumOnPPLL); 424 union atom_enable_ss args; 425 426 if (!enable) { 427 for (i = 0; i < rdev->num_crtc; i++) { 428 if (rdev->mode_info.crtcs[i] && 429 rdev->mode_info.crtcs[i]->enabled && 430 i != crtc_id && 431 pll_id == rdev->mode_info.crtcs[i]->pll_id) { 432 /* one other crtc is using this pll don't turn 433 * off spread spectrum as it might turn off 434 * display on active crtc 435 */ 436 return; 437 } 438 } 439 } 440 441 memset(&args, 0, sizeof(args)); 442 443 if (ASIC_IS_DCE5(rdev)) { 444 args.v3.usSpreadSpectrumAmountFrac = cpu_to_le16(0); 445 args.v3.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK; 446 switch (pll_id) { 447 case ATOM_PPLL1: 448 args.v3.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V3_P1PLL; 449 break; 450 case ATOM_PPLL2: 451 args.v3.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V3_P2PLL; 452 break; 453 case ATOM_DCPLL: 454 args.v3.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V3_DCPLL; 455 break; 456 case ATOM_PPLL_INVALID: 457 return; 458 } 459 args.v3.usSpreadSpectrumAmount = cpu_to_le16(ss->amount); 460 args.v3.usSpreadSpectrumStep = cpu_to_le16(ss->step); 461 args.v3.ucEnable = enable; 462 if ((ss->percentage == 0) || (ss->type & ATOM_EXTERNAL_SS_MASK) || ASIC_IS_DCE61(rdev)) 463 args.v3.ucEnable = ATOM_DISABLE; 464 } else if (ASIC_IS_DCE4(rdev)) { 465 args.v2.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage); 466 args.v2.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK; 467 switch (pll_id) { 468 case ATOM_PPLL1: 469 args.v2.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V2_P1PLL; 470 break; 471 case ATOM_PPLL2: 472 args.v2.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V2_P2PLL; 473 break; 474 case ATOM_DCPLL: 475 args.v2.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V2_DCPLL; 476 break; 477 case ATOM_PPLL_INVALID: 478 return; 479 } 480 args.v2.usSpreadSpectrumAmount = cpu_to_le16(ss->amount); 481 args.v2.usSpreadSpectrumStep = cpu_to_le16(ss->step); 482 args.v2.ucEnable = enable; 483 if ((ss->percentage == 0) || (ss->type & ATOM_EXTERNAL_SS_MASK) || ASIC_IS_DCE41(rdev)) 484 args.v2.ucEnable = ATOM_DISABLE; 485 } else if (ASIC_IS_DCE3(rdev)) { 486 args.v1.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage); 487 args.v1.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK; 488 args.v1.ucSpreadSpectrumStep = ss->step; 489 args.v1.ucSpreadSpectrumDelay = ss->delay; 490 args.v1.ucSpreadSpectrumRange = ss->range; 491 args.v1.ucPpll = pll_id; 492 args.v1.ucEnable = enable; 493 } else if (ASIC_IS_AVIVO(rdev)) { 494 if ((enable == ATOM_DISABLE) || (ss->percentage == 0) || 495 (ss->type & ATOM_EXTERNAL_SS_MASK)) { 496 atombios_disable_ss(rdev, pll_id); 497 return; 498 } 499 args.lvds_ss_2.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage); 500 args.lvds_ss_2.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK; 501 args.lvds_ss_2.ucSpreadSpectrumStep = ss->step; 502 args.lvds_ss_2.ucSpreadSpectrumDelay = ss->delay; 503 args.lvds_ss_2.ucSpreadSpectrumRange = ss->range; 504 args.lvds_ss_2.ucEnable = enable; 505 } else { 506 if ((enable == ATOM_DISABLE) || (ss->percentage == 0) || 507 (ss->type & ATOM_EXTERNAL_SS_MASK)) { 508 atombios_disable_ss(rdev, pll_id); 509 return; 510 } 511 args.lvds_ss.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage); 512 args.lvds_ss.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK; 513 args.lvds_ss.ucSpreadSpectrumStepSize_Delay = (ss->step & 3) << 2; 514 args.lvds_ss.ucSpreadSpectrumStepSize_Delay |= (ss->delay & 7) << 4; 515 args.lvds_ss.ucEnable = enable; 516 } 517 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 518 } 519 520 union adjust_pixel_clock { 521 ADJUST_DISPLAY_PLL_PS_ALLOCATION v1; 522 ADJUST_DISPLAY_PLL_PS_ALLOCATION_V3 v3; 523 }; 524 525 static u32 atombios_adjust_pll(struct drm_crtc *crtc, 526 struct drm_display_mode *mode) 527 { 528 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 529 struct drm_device *dev = crtc->dev; 530 struct radeon_device *rdev = dev->dev_private; 531 struct drm_encoder *encoder = radeon_crtc->encoder; 532 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder); 533 struct drm_connector *connector = radeon_get_connector_for_encoder(encoder); 534 u32 adjusted_clock = mode->clock; 535 int encoder_mode = atombios_get_encoder_mode(encoder); 536 u32 dp_clock = mode->clock; 537 int bpc = radeon_get_monitor_bpc(connector); 538 bool is_duallink = radeon_dig_monitor_is_duallink(encoder, mode->clock); 539 540 /* reset the pll flags */ 541 radeon_crtc->pll_flags = 0; 542 543 if (ASIC_IS_AVIVO(rdev)) { 544 if ((rdev->family == CHIP_RS600) || 545 (rdev->family == CHIP_RS690) || 546 (rdev->family == CHIP_RS740)) 547 radeon_crtc->pll_flags |= (/*RADEON_PLL_USE_FRAC_FB_DIV |*/ 548 RADEON_PLL_PREFER_CLOSEST_LOWER); 549 550 if (ASIC_IS_DCE32(rdev) && mode->clock > 200000) /* range limits??? */ 551 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_HIGH_FB_DIV; 552 else 553 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_LOW_REF_DIV; 554 555 if (rdev->family < CHIP_RV770) 556 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_MINM_OVER_MAXP; 557 /* use frac fb div on APUs */ 558 if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE61(rdev) || ASIC_IS_DCE8(rdev)) 559 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV; 560 /* use frac fb div on RS780/RS880 */ 561 if ((rdev->family == CHIP_RS780) || (rdev->family == CHIP_RS880)) 562 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV; 563 if (ASIC_IS_DCE32(rdev) && mode->clock > 165000) 564 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV; 565 } else { 566 radeon_crtc->pll_flags |= RADEON_PLL_LEGACY; 567 568 if (mode->clock > 200000) /* range limits??? */ 569 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_HIGH_FB_DIV; 570 else 571 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_LOW_REF_DIV; 572 } 573 574 if ((radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT | ATOM_DEVICE_DFP_SUPPORT)) || 575 (radeon_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) { 576 if (connector) { 577 struct radeon_connector *radeon_connector = to_radeon_connector(connector); 578 struct radeon_connector_atom_dig *dig_connector = 579 radeon_connector->con_priv; 580 581 dp_clock = dig_connector->dp_clock; 582 } 583 } 584 585 /* use recommended ref_div for ss */ 586 if (radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) { 587 if (radeon_crtc->ss_enabled) { 588 if (radeon_crtc->ss.refdiv) { 589 radeon_crtc->pll_flags |= RADEON_PLL_USE_REF_DIV; 590 radeon_crtc->pll_reference_div = radeon_crtc->ss.refdiv; 591 if (ASIC_IS_AVIVO(rdev)) 592 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV; 593 } 594 } 595 } 596 597 if (ASIC_IS_AVIVO(rdev)) { 598 /* DVO wants 2x pixel clock if the DVO chip is in 12 bit mode */ 599 if (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1) 600 adjusted_clock = mode->clock * 2; 601 if (radeon_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 602 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_CLOSEST_LOWER; 603 if (radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) 604 radeon_crtc->pll_flags |= RADEON_PLL_IS_LCD; 605 } else { 606 if (encoder->encoder_type != DRM_MODE_ENCODER_DAC) 607 radeon_crtc->pll_flags |= RADEON_PLL_NO_ODD_POST_DIV; 608 if (encoder->encoder_type == DRM_MODE_ENCODER_LVDS) 609 radeon_crtc->pll_flags |= RADEON_PLL_USE_REF_DIV; 610 } 611 612 /* DCE3+ has an AdjustDisplayPll that will adjust the pixel clock 613 * accordingly based on the encoder/transmitter to work around 614 * special hw requirements. 615 */ 616 if (ASIC_IS_DCE3(rdev)) { 617 union adjust_pixel_clock args; 618 u8 frev, crev; 619 int index; 620 621 index = GetIndexIntoMasterTable(COMMAND, AdjustDisplayPll); 622 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, 623 &crev)) 624 return adjusted_clock; 625 626 memset(&args, 0, sizeof(args)); 627 628 switch (frev) { 629 case 1: 630 switch (crev) { 631 case 1: 632 case 2: 633 args.v1.usPixelClock = cpu_to_le16(mode->clock / 10); 634 args.v1.ucTransmitterID = radeon_encoder->encoder_id; 635 args.v1.ucEncodeMode = encoder_mode; 636 if (radeon_crtc->ss_enabled && radeon_crtc->ss.percentage) 637 args.v1.ucConfig |= 638 ADJUST_DISPLAY_CONFIG_SS_ENABLE; 639 640 atom_execute_table(rdev->mode_info.atom_context, 641 index, (uint32_t *)&args); 642 adjusted_clock = le16_to_cpu(args.v1.usPixelClock) * 10; 643 break; 644 case 3: 645 args.v3.sInput.usPixelClock = cpu_to_le16(mode->clock / 10); 646 args.v3.sInput.ucTransmitterID = radeon_encoder->encoder_id; 647 args.v3.sInput.ucEncodeMode = encoder_mode; 648 args.v3.sInput.ucDispPllConfig = 0; 649 if (radeon_crtc->ss_enabled && radeon_crtc->ss.percentage) 650 args.v3.sInput.ucDispPllConfig |= 651 DISPPLL_CONFIG_SS_ENABLE; 652 if (ENCODER_MODE_IS_DP(encoder_mode)) { 653 args.v3.sInput.ucDispPllConfig |= 654 DISPPLL_CONFIG_COHERENT_MODE; 655 /* 16200 or 27000 */ 656 args.v3.sInput.usPixelClock = cpu_to_le16(dp_clock / 10); 657 } else if (radeon_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 658 struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv; 659 if (encoder_mode == ATOM_ENCODER_MODE_HDMI) 660 /* deep color support */ 661 args.v3.sInput.usPixelClock = 662 cpu_to_le16((mode->clock * bpc / 8) / 10); 663 if (dig->coherent_mode) 664 args.v3.sInput.ucDispPllConfig |= 665 DISPPLL_CONFIG_COHERENT_MODE; 666 if (is_duallink) 667 args.v3.sInput.ucDispPllConfig |= 668 DISPPLL_CONFIG_DUAL_LINK; 669 } 670 if (radeon_encoder_get_dp_bridge_encoder_id(encoder) != 671 ENCODER_OBJECT_ID_NONE) 672 args.v3.sInput.ucExtTransmitterID = 673 radeon_encoder_get_dp_bridge_encoder_id(encoder); 674 else 675 args.v3.sInput.ucExtTransmitterID = 0; 676 677 atom_execute_table(rdev->mode_info.atom_context, 678 index, (uint32_t *)&args); 679 adjusted_clock = le32_to_cpu(args.v3.sOutput.ulDispPllFreq) * 10; 680 if (args.v3.sOutput.ucRefDiv) { 681 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV; 682 radeon_crtc->pll_flags |= RADEON_PLL_USE_REF_DIV; 683 radeon_crtc->pll_reference_div = args.v3.sOutput.ucRefDiv; 684 } 685 if (args.v3.sOutput.ucPostDiv) { 686 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV; 687 radeon_crtc->pll_flags |= RADEON_PLL_USE_POST_DIV; 688 radeon_crtc->pll_post_div = args.v3.sOutput.ucPostDiv; 689 } 690 break; 691 default: 692 DRM_ERROR("Unknown table version %d %d\n", frev, crev); 693 return adjusted_clock; 694 } 695 break; 696 default: 697 DRM_ERROR("Unknown table version %d %d\n", frev, crev); 698 return adjusted_clock; 699 } 700 } 701 return adjusted_clock; 702 } 703 704 union set_pixel_clock { 705 SET_PIXEL_CLOCK_PS_ALLOCATION base; 706 PIXEL_CLOCK_PARAMETERS v1; 707 PIXEL_CLOCK_PARAMETERS_V2 v2; 708 PIXEL_CLOCK_PARAMETERS_V3 v3; 709 PIXEL_CLOCK_PARAMETERS_V5 v5; 710 PIXEL_CLOCK_PARAMETERS_V6 v6; 711 }; 712 713 /* on DCE5, make sure the voltage is high enough to support the 714 * required disp clk. 715 */ 716 static void atombios_crtc_set_disp_eng_pll(struct radeon_device *rdev, 717 u32 dispclk) 718 { 719 u8 frev, crev; 720 int index; 721 union set_pixel_clock args; 722 723 memset(&args, 0, sizeof(args)); 724 725 index = GetIndexIntoMasterTable(COMMAND, SetPixelClock); 726 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, 727 &crev)) 728 return; 729 730 switch (frev) { 731 case 1: 732 switch (crev) { 733 case 5: 734 /* if the default dcpll clock is specified, 735 * SetPixelClock provides the dividers 736 */ 737 args.v5.ucCRTC = ATOM_CRTC_INVALID; 738 args.v5.usPixelClock = cpu_to_le16(dispclk); 739 args.v5.ucPpll = ATOM_DCPLL; 740 break; 741 case 6: 742 /* if the default dcpll clock is specified, 743 * SetPixelClock provides the dividers 744 */ 745 args.v6.ulDispEngClkFreq = cpu_to_le32(dispclk); 746 if (ASIC_IS_DCE61(rdev) || ASIC_IS_DCE8(rdev)) 747 args.v6.ucPpll = ATOM_EXT_PLL1; 748 else if (ASIC_IS_DCE6(rdev)) 749 args.v6.ucPpll = ATOM_PPLL0; 750 else 751 args.v6.ucPpll = ATOM_DCPLL; 752 break; 753 default: 754 DRM_ERROR("Unknown table version %d %d\n", frev, crev); 755 return; 756 } 757 break; 758 default: 759 DRM_ERROR("Unknown table version %d %d\n", frev, crev); 760 return; 761 } 762 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 763 } 764 765 static void atombios_crtc_program_pll(struct drm_crtc *crtc, 766 u32 crtc_id, 767 int pll_id, 768 u32 encoder_mode, 769 u32 encoder_id, 770 u32 clock, 771 u32 ref_div, 772 u32 fb_div, 773 u32 frac_fb_div, 774 u32 post_div, 775 int bpc, 776 bool ss_enabled, 777 struct radeon_atom_ss *ss) 778 { 779 struct drm_device *dev = crtc->dev; 780 struct radeon_device *rdev = dev->dev_private; 781 u8 frev, crev; 782 int index = GetIndexIntoMasterTable(COMMAND, SetPixelClock); 783 union set_pixel_clock args; 784 785 memset(&args, 0, sizeof(args)); 786 787 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, 788 &crev)) 789 return; 790 791 switch (frev) { 792 case 1: 793 switch (crev) { 794 case 1: 795 if (clock == ATOM_DISABLE) 796 return; 797 args.v1.usPixelClock = cpu_to_le16(clock / 10); 798 args.v1.usRefDiv = cpu_to_le16(ref_div); 799 args.v1.usFbDiv = cpu_to_le16(fb_div); 800 args.v1.ucFracFbDiv = frac_fb_div; 801 args.v1.ucPostDiv = post_div; 802 args.v1.ucPpll = pll_id; 803 args.v1.ucCRTC = crtc_id; 804 args.v1.ucRefDivSrc = 1; 805 break; 806 case 2: 807 args.v2.usPixelClock = cpu_to_le16(clock / 10); 808 args.v2.usRefDiv = cpu_to_le16(ref_div); 809 args.v2.usFbDiv = cpu_to_le16(fb_div); 810 args.v2.ucFracFbDiv = frac_fb_div; 811 args.v2.ucPostDiv = post_div; 812 args.v2.ucPpll = pll_id; 813 args.v2.ucCRTC = crtc_id; 814 args.v2.ucRefDivSrc = 1; 815 break; 816 case 3: 817 args.v3.usPixelClock = cpu_to_le16(clock / 10); 818 args.v3.usRefDiv = cpu_to_le16(ref_div); 819 args.v3.usFbDiv = cpu_to_le16(fb_div); 820 args.v3.ucFracFbDiv = frac_fb_div; 821 args.v3.ucPostDiv = post_div; 822 args.v3.ucPpll = pll_id; 823 if (crtc_id == ATOM_CRTC2) 824 args.v3.ucMiscInfo = PIXEL_CLOCK_MISC_CRTC_SEL_CRTC2; 825 else 826 args.v3.ucMiscInfo = PIXEL_CLOCK_MISC_CRTC_SEL_CRTC1; 827 if (ss_enabled && (ss->type & ATOM_EXTERNAL_SS_MASK)) 828 args.v3.ucMiscInfo |= PIXEL_CLOCK_MISC_REF_DIV_SRC; 829 args.v3.ucTransmitterId = encoder_id; 830 args.v3.ucEncoderMode = encoder_mode; 831 break; 832 case 5: 833 args.v5.ucCRTC = crtc_id; 834 args.v5.usPixelClock = cpu_to_le16(clock / 10); 835 args.v5.ucRefDiv = ref_div; 836 args.v5.usFbDiv = cpu_to_le16(fb_div); 837 args.v5.ulFbDivDecFrac = cpu_to_le32(frac_fb_div * 100000); 838 args.v5.ucPostDiv = post_div; 839 args.v5.ucMiscInfo = 0; /* HDMI depth, etc. */ 840 if (ss_enabled && (ss->type & ATOM_EXTERNAL_SS_MASK)) 841 args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_REF_DIV_SRC; 842 switch (bpc) { 843 case 8: 844 default: 845 args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_HDMI_24BPP; 846 break; 847 case 10: 848 args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_HDMI_30BPP; 849 break; 850 } 851 args.v5.ucTransmitterID = encoder_id; 852 args.v5.ucEncoderMode = encoder_mode; 853 args.v5.ucPpll = pll_id; 854 break; 855 case 6: 856 args.v6.ulDispEngClkFreq = cpu_to_le32(crtc_id << 24 | clock / 10); 857 args.v6.ucRefDiv = ref_div; 858 args.v6.usFbDiv = cpu_to_le16(fb_div); 859 args.v6.ulFbDivDecFrac = cpu_to_le32(frac_fb_div * 100000); 860 args.v6.ucPostDiv = post_div; 861 args.v6.ucMiscInfo = 0; /* HDMI depth, etc. */ 862 if (ss_enabled && (ss->type & ATOM_EXTERNAL_SS_MASK)) 863 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_REF_DIV_SRC; 864 switch (bpc) { 865 case 8: 866 default: 867 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_24BPP; 868 break; 869 case 10: 870 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_30BPP; 871 break; 872 case 12: 873 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_36BPP; 874 break; 875 case 16: 876 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_48BPP; 877 break; 878 } 879 args.v6.ucTransmitterID = encoder_id; 880 args.v6.ucEncoderMode = encoder_mode; 881 args.v6.ucPpll = pll_id; 882 break; 883 default: 884 DRM_ERROR("Unknown table version %d %d\n", frev, crev); 885 return; 886 } 887 break; 888 default: 889 DRM_ERROR("Unknown table version %d %d\n", frev, crev); 890 return; 891 } 892 893 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 894 } 895 896 static bool atombios_crtc_prepare_pll(struct drm_crtc *crtc, struct drm_display_mode *mode) 897 { 898 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 899 struct drm_device *dev = crtc->dev; 900 struct radeon_device *rdev = dev->dev_private; 901 struct radeon_encoder *radeon_encoder = 902 to_radeon_encoder(radeon_crtc->encoder); 903 int encoder_mode = atombios_get_encoder_mode(radeon_crtc->encoder); 904 905 radeon_crtc->bpc = 8; 906 radeon_crtc->ss_enabled = false; 907 908 if ((radeon_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT | ATOM_DEVICE_DFP_SUPPORT)) || 909 (radeon_encoder_get_dp_bridge_encoder_id(radeon_crtc->encoder) != ENCODER_OBJECT_ID_NONE)) { 910 struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv; 911 struct drm_connector *connector = 912 radeon_get_connector_for_encoder(radeon_crtc->encoder); 913 struct radeon_connector *radeon_connector = 914 to_radeon_connector(connector); 915 struct radeon_connector_atom_dig *dig_connector = 916 radeon_connector->con_priv; 917 int dp_clock; 918 radeon_crtc->bpc = radeon_get_monitor_bpc(connector); 919 920 switch (encoder_mode) { 921 case ATOM_ENCODER_MODE_DP_MST: 922 case ATOM_ENCODER_MODE_DP: 923 /* DP/eDP */ 924 dp_clock = dig_connector->dp_clock / 10; 925 if (ASIC_IS_DCE4(rdev)) 926 radeon_crtc->ss_enabled = 927 radeon_atombios_get_asic_ss_info(rdev, &radeon_crtc->ss, 928 ASIC_INTERNAL_SS_ON_DP, 929 dp_clock); 930 else { 931 if (dp_clock == 16200) { 932 radeon_crtc->ss_enabled = 933 radeon_atombios_get_ppll_ss_info(rdev, 934 &radeon_crtc->ss, 935 ATOM_DP_SS_ID2); 936 if (!radeon_crtc->ss_enabled) 937 radeon_crtc->ss_enabled = 938 radeon_atombios_get_ppll_ss_info(rdev, 939 &radeon_crtc->ss, 940 ATOM_DP_SS_ID1); 941 } else 942 radeon_crtc->ss_enabled = 943 radeon_atombios_get_ppll_ss_info(rdev, 944 &radeon_crtc->ss, 945 ATOM_DP_SS_ID1); 946 } 947 break; 948 case ATOM_ENCODER_MODE_LVDS: 949 if (ASIC_IS_DCE4(rdev)) 950 radeon_crtc->ss_enabled = 951 radeon_atombios_get_asic_ss_info(rdev, 952 &radeon_crtc->ss, 953 dig->lcd_ss_id, 954 mode->clock / 10); 955 else 956 radeon_crtc->ss_enabled = 957 radeon_atombios_get_ppll_ss_info(rdev, 958 &radeon_crtc->ss, 959 dig->lcd_ss_id); 960 break; 961 case ATOM_ENCODER_MODE_DVI: 962 if (ASIC_IS_DCE4(rdev)) 963 radeon_crtc->ss_enabled = 964 radeon_atombios_get_asic_ss_info(rdev, 965 &radeon_crtc->ss, 966 ASIC_INTERNAL_SS_ON_TMDS, 967 mode->clock / 10); 968 break; 969 case ATOM_ENCODER_MODE_HDMI: 970 if (ASIC_IS_DCE4(rdev)) 971 radeon_crtc->ss_enabled = 972 radeon_atombios_get_asic_ss_info(rdev, 973 &radeon_crtc->ss, 974 ASIC_INTERNAL_SS_ON_HDMI, 975 mode->clock / 10); 976 break; 977 default: 978 break; 979 } 980 } 981 982 /* adjust pixel clock as needed */ 983 radeon_crtc->adjusted_clock = atombios_adjust_pll(crtc, mode); 984 985 return true; 986 } 987 988 static void atombios_crtc_set_pll(struct drm_crtc *crtc, struct drm_display_mode *mode) 989 { 990 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 991 struct drm_device *dev = crtc->dev; 992 struct radeon_device *rdev = dev->dev_private; 993 struct radeon_encoder *radeon_encoder = 994 to_radeon_encoder(radeon_crtc->encoder); 995 u32 pll_clock = mode->clock; 996 u32 ref_div = 0, fb_div = 0, frac_fb_div = 0, post_div = 0; 997 struct radeon_pll *pll; 998 int encoder_mode = atombios_get_encoder_mode(radeon_crtc->encoder); 999 1000 switch (radeon_crtc->pll_id) { 1001 case ATOM_PPLL1: 1002 pll = &rdev->clock.p1pll; 1003 break; 1004 case ATOM_PPLL2: 1005 pll = &rdev->clock.p2pll; 1006 break; 1007 case ATOM_DCPLL: 1008 case ATOM_PPLL_INVALID: 1009 default: 1010 pll = &rdev->clock.dcpll; 1011 break; 1012 } 1013 1014 /* update pll params */ 1015 pll->flags = radeon_crtc->pll_flags; 1016 pll->reference_div = radeon_crtc->pll_reference_div; 1017 pll->post_div = radeon_crtc->pll_post_div; 1018 1019 if (radeon_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1020 /* TV seems to prefer the legacy algo on some boards */ 1021 radeon_compute_pll_legacy(pll, radeon_crtc->adjusted_clock, &pll_clock, 1022 &fb_div, &frac_fb_div, &ref_div, &post_div); 1023 else if (ASIC_IS_AVIVO(rdev)) 1024 radeon_compute_pll_avivo(pll, radeon_crtc->adjusted_clock, &pll_clock, 1025 &fb_div, &frac_fb_div, &ref_div, &post_div); 1026 else 1027 radeon_compute_pll_legacy(pll, radeon_crtc->adjusted_clock, &pll_clock, 1028 &fb_div, &frac_fb_div, &ref_div, &post_div); 1029 1030 atombios_crtc_program_ss(rdev, ATOM_DISABLE, radeon_crtc->pll_id, 1031 radeon_crtc->crtc_id, &radeon_crtc->ss); 1032 1033 atombios_crtc_program_pll(crtc, radeon_crtc->crtc_id, radeon_crtc->pll_id, 1034 encoder_mode, radeon_encoder->encoder_id, mode->clock, 1035 ref_div, fb_div, frac_fb_div, post_div, 1036 radeon_crtc->bpc, radeon_crtc->ss_enabled, &radeon_crtc->ss); 1037 1038 if (radeon_crtc->ss_enabled) { 1039 /* calculate ss amount and step size */ 1040 if (ASIC_IS_DCE4(rdev)) { 1041 u32 step_size; 1042 u32 amount = (((fb_div * 10) + frac_fb_div) * radeon_crtc->ss.percentage) / 10000; 1043 radeon_crtc->ss.amount = (amount / 10) & ATOM_PPLL_SS_AMOUNT_V2_FBDIV_MASK; 1044 radeon_crtc->ss.amount |= ((amount - (amount / 10)) << ATOM_PPLL_SS_AMOUNT_V2_NFRAC_SHIFT) & 1045 ATOM_PPLL_SS_AMOUNT_V2_NFRAC_MASK; 1046 if (radeon_crtc->ss.type & ATOM_PPLL_SS_TYPE_V2_CENTRE_SPREAD) 1047 step_size = (4 * amount * ref_div * (radeon_crtc->ss.rate * 2048)) / 1048 (125 * 25 * pll->reference_freq / 100); 1049 else 1050 step_size = (2 * amount * ref_div * (radeon_crtc->ss.rate * 2048)) / 1051 (125 * 25 * pll->reference_freq / 100); 1052 radeon_crtc->ss.step = step_size; 1053 } 1054 1055 atombios_crtc_program_ss(rdev, ATOM_ENABLE, radeon_crtc->pll_id, 1056 radeon_crtc->crtc_id, &radeon_crtc->ss); 1057 } 1058 } 1059 1060 static int dce4_crtc_do_set_base(struct drm_crtc *crtc, 1061 struct drm_framebuffer *fb, 1062 int x, int y, int atomic) 1063 { 1064 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 1065 struct drm_device *dev = crtc->dev; 1066 struct radeon_device *rdev = dev->dev_private; 1067 struct radeon_framebuffer *radeon_fb; 1068 struct drm_framebuffer *target_fb; 1069 struct drm_gem_object *obj; 1070 struct radeon_bo *rbo; 1071 uint64_t fb_location; 1072 uint32_t fb_format, fb_pitch_pixels, tiling_flags; 1073 unsigned bankw, bankh, mtaspect, tile_split; 1074 u32 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_NONE); 1075 u32 tmp, viewport_w, viewport_h; 1076 int r; 1077 1078 /* no fb bound */ 1079 if (!atomic && !crtc->fb) { 1080 DRM_DEBUG_KMS("No FB bound\n"); 1081 return 0; 1082 } 1083 1084 if (atomic) { 1085 radeon_fb = to_radeon_framebuffer(fb); 1086 target_fb = fb; 1087 } 1088 else { 1089 radeon_fb = to_radeon_framebuffer(crtc->fb); 1090 target_fb = crtc->fb; 1091 } 1092 1093 /* If atomic, assume fb object is pinned & idle & fenced and 1094 * just update base pointers 1095 */ 1096 obj = radeon_fb->obj; 1097 rbo = gem_to_radeon_bo(obj); 1098 r = radeon_bo_reserve(rbo, false); 1099 if (unlikely(r != 0)) 1100 return r; 1101 1102 if (atomic) 1103 fb_location = radeon_bo_gpu_offset(rbo); 1104 else { 1105 r = radeon_bo_pin(rbo, RADEON_GEM_DOMAIN_VRAM, &fb_location); 1106 if (unlikely(r != 0)) { 1107 radeon_bo_unreserve(rbo); 1108 return -EINVAL; 1109 } 1110 } 1111 1112 radeon_bo_get_tiling_flags(rbo, &tiling_flags, NULL); 1113 radeon_bo_unreserve(rbo); 1114 1115 switch (target_fb->bits_per_pixel) { 1116 case 8: 1117 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_8BPP) | 1118 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_INDEXED)); 1119 break; 1120 case 15: 1121 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) | 1122 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB1555)); 1123 break; 1124 case 16: 1125 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) | 1126 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB565)); 1127 #ifdef __BIG_ENDIAN 1128 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN16); 1129 #endif 1130 break; 1131 case 24: 1132 case 32: 1133 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_32BPP) | 1134 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB8888)); 1135 #ifdef __BIG_ENDIAN 1136 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN32); 1137 #endif 1138 break; 1139 default: 1140 DRM_ERROR("Unsupported screen depth %d\n", 1141 target_fb->bits_per_pixel); 1142 return -EINVAL; 1143 } 1144 1145 if (tiling_flags & RADEON_TILING_MACRO) { 1146 if (rdev->family >= CHIP_BONAIRE) 1147 tmp = rdev->config.cik.tile_config; 1148 else if (rdev->family >= CHIP_TAHITI) 1149 tmp = rdev->config.si.tile_config; 1150 else if (rdev->family >= CHIP_CAYMAN) 1151 tmp = rdev->config.cayman.tile_config; 1152 else 1153 tmp = rdev->config.evergreen.tile_config; 1154 1155 switch ((tmp & 0xf0) >> 4) { 1156 case 0: /* 4 banks */ 1157 fb_format |= EVERGREEN_GRPH_NUM_BANKS(EVERGREEN_ADDR_SURF_4_BANK); 1158 break; 1159 case 1: /* 8 banks */ 1160 default: 1161 fb_format |= EVERGREEN_GRPH_NUM_BANKS(EVERGREEN_ADDR_SURF_8_BANK); 1162 break; 1163 case 2: /* 16 banks */ 1164 fb_format |= EVERGREEN_GRPH_NUM_BANKS(EVERGREEN_ADDR_SURF_16_BANK); 1165 break; 1166 } 1167 1168 fb_format |= EVERGREEN_GRPH_ARRAY_MODE(EVERGREEN_GRPH_ARRAY_2D_TILED_THIN1); 1169 1170 evergreen_tiling_fields(tiling_flags, &bankw, &bankh, &mtaspect, &tile_split); 1171 fb_format |= EVERGREEN_GRPH_TILE_SPLIT(tile_split); 1172 fb_format |= EVERGREEN_GRPH_BANK_WIDTH(bankw); 1173 fb_format |= EVERGREEN_GRPH_BANK_HEIGHT(bankh); 1174 fb_format |= EVERGREEN_GRPH_MACRO_TILE_ASPECT(mtaspect); 1175 if (rdev->family >= CHIP_BONAIRE) { 1176 /* XXX need to know more about the surface tiling mode */ 1177 fb_format |= CIK_GRPH_MICRO_TILE_MODE(CIK_DISPLAY_MICRO_TILING); 1178 } 1179 } else if (tiling_flags & RADEON_TILING_MICRO) 1180 fb_format |= EVERGREEN_GRPH_ARRAY_MODE(EVERGREEN_GRPH_ARRAY_1D_TILED_THIN1); 1181 1182 if (rdev->family >= CHIP_BONAIRE) { 1183 u32 num_pipe_configs = rdev->config.cik.max_tile_pipes; 1184 u32 num_rb = rdev->config.cik.max_backends_per_se; 1185 if (num_pipe_configs > 8) 1186 num_pipe_configs = 8; 1187 if (num_pipe_configs == 8) 1188 fb_format |= CIK_GRPH_PIPE_CONFIG(CIK_ADDR_SURF_P8_32x32_16x16); 1189 else if (num_pipe_configs == 4) { 1190 if (num_rb == 4) 1191 fb_format |= CIK_GRPH_PIPE_CONFIG(CIK_ADDR_SURF_P4_16x16); 1192 else if (num_rb < 4) 1193 fb_format |= CIK_GRPH_PIPE_CONFIG(CIK_ADDR_SURF_P4_8x16); 1194 } else if (num_pipe_configs == 2) 1195 fb_format |= CIK_GRPH_PIPE_CONFIG(CIK_ADDR_SURF_P2); 1196 } else if ((rdev->family == CHIP_TAHITI) || 1197 (rdev->family == CHIP_PITCAIRN)) 1198 fb_format |= SI_GRPH_PIPE_CONFIG(SI_ADDR_SURF_P8_32x32_8x16); 1199 else if (rdev->family == CHIP_VERDE) 1200 fb_format |= SI_GRPH_PIPE_CONFIG(SI_ADDR_SURF_P4_8x16); 1201 1202 switch (radeon_crtc->crtc_id) { 1203 case 0: 1204 WREG32(AVIVO_D1VGA_CONTROL, 0); 1205 break; 1206 case 1: 1207 WREG32(AVIVO_D2VGA_CONTROL, 0); 1208 break; 1209 case 2: 1210 WREG32(EVERGREEN_D3VGA_CONTROL, 0); 1211 break; 1212 case 3: 1213 WREG32(EVERGREEN_D4VGA_CONTROL, 0); 1214 break; 1215 case 4: 1216 WREG32(EVERGREEN_D5VGA_CONTROL, 0); 1217 break; 1218 case 5: 1219 WREG32(EVERGREEN_D6VGA_CONTROL, 0); 1220 break; 1221 default: 1222 break; 1223 } 1224 1225 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset, 1226 upper_32_bits(fb_location)); 1227 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset, 1228 upper_32_bits(fb_location)); 1229 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset, 1230 (u32)fb_location & EVERGREEN_GRPH_SURFACE_ADDRESS_MASK); 1231 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset, 1232 (u32) fb_location & EVERGREEN_GRPH_SURFACE_ADDRESS_MASK); 1233 WREG32(EVERGREEN_GRPH_CONTROL + radeon_crtc->crtc_offset, fb_format); 1234 WREG32(EVERGREEN_GRPH_SWAP_CONTROL + radeon_crtc->crtc_offset, fb_swap); 1235 1236 WREG32(EVERGREEN_GRPH_SURFACE_OFFSET_X + radeon_crtc->crtc_offset, 0); 1237 WREG32(EVERGREEN_GRPH_SURFACE_OFFSET_Y + radeon_crtc->crtc_offset, 0); 1238 WREG32(EVERGREEN_GRPH_X_START + radeon_crtc->crtc_offset, 0); 1239 WREG32(EVERGREEN_GRPH_Y_START + radeon_crtc->crtc_offset, 0); 1240 WREG32(EVERGREEN_GRPH_X_END + radeon_crtc->crtc_offset, target_fb->width); 1241 WREG32(EVERGREEN_GRPH_Y_END + radeon_crtc->crtc_offset, target_fb->height); 1242 1243 fb_pitch_pixels = target_fb->pitches[0] / (target_fb->bits_per_pixel / 8); 1244 WREG32(EVERGREEN_GRPH_PITCH + radeon_crtc->crtc_offset, fb_pitch_pixels); 1245 WREG32(EVERGREEN_GRPH_ENABLE + radeon_crtc->crtc_offset, 1); 1246 1247 if (rdev->family >= CHIP_BONAIRE) 1248 WREG32(CIK_LB_DESKTOP_HEIGHT + radeon_crtc->crtc_offset, 1249 target_fb->height); 1250 else 1251 WREG32(EVERGREEN_DESKTOP_HEIGHT + radeon_crtc->crtc_offset, 1252 target_fb->height); 1253 x &= ~3; 1254 y &= ~1; 1255 WREG32(EVERGREEN_VIEWPORT_START + radeon_crtc->crtc_offset, 1256 (x << 16) | y); 1257 viewport_w = crtc->mode.hdisplay; 1258 viewport_h = (crtc->mode.vdisplay + 1) & ~1; 1259 WREG32(EVERGREEN_VIEWPORT_SIZE + radeon_crtc->crtc_offset, 1260 (viewport_w << 16) | viewport_h); 1261 1262 /* pageflip setup */ 1263 /* make sure flip is at vb rather than hb */ 1264 tmp = RREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset); 1265 tmp &= ~EVERGREEN_GRPH_SURFACE_UPDATE_H_RETRACE_EN; 1266 WREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset, tmp); 1267 1268 /* set pageflip to happen anywhere in vblank interval */ 1269 WREG32(EVERGREEN_MASTER_UPDATE_MODE + radeon_crtc->crtc_offset, 0); 1270 1271 if (!atomic && fb && fb != crtc->fb) { 1272 radeon_fb = to_radeon_framebuffer(fb); 1273 rbo = gem_to_radeon_bo(radeon_fb->obj); 1274 r = radeon_bo_reserve(rbo, false); 1275 if (unlikely(r != 0)) 1276 return r; 1277 radeon_bo_unpin(rbo); 1278 radeon_bo_unreserve(rbo); 1279 } 1280 1281 /* Bytes per pixel may have changed */ 1282 radeon_bandwidth_update(rdev); 1283 1284 return 0; 1285 } 1286 1287 static int avivo_crtc_do_set_base(struct drm_crtc *crtc, 1288 struct drm_framebuffer *fb, 1289 int x, int y, int atomic) 1290 { 1291 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 1292 struct drm_device *dev = crtc->dev; 1293 struct radeon_device *rdev = dev->dev_private; 1294 struct radeon_framebuffer *radeon_fb; 1295 struct drm_gem_object *obj; 1296 struct radeon_bo *rbo; 1297 struct drm_framebuffer *target_fb; 1298 uint64_t fb_location; 1299 uint32_t fb_format, fb_pitch_pixels, tiling_flags; 1300 u32 fb_swap = R600_D1GRPH_SWAP_ENDIAN_NONE; 1301 u32 tmp, viewport_w, viewport_h; 1302 int r; 1303 1304 /* no fb bound */ 1305 if (!atomic && !crtc->fb) { 1306 DRM_DEBUG_KMS("No FB bound\n"); 1307 return 0; 1308 } 1309 1310 if (atomic) { 1311 radeon_fb = to_radeon_framebuffer(fb); 1312 target_fb = fb; 1313 } 1314 else { 1315 radeon_fb = to_radeon_framebuffer(crtc->fb); 1316 target_fb = crtc->fb; 1317 } 1318 1319 obj = radeon_fb->obj; 1320 rbo = gem_to_radeon_bo(obj); 1321 r = radeon_bo_reserve(rbo, false); 1322 if (unlikely(r != 0)) 1323 return r; 1324 1325 /* If atomic, assume fb object is pinned & idle & fenced and 1326 * just update base pointers 1327 */ 1328 if (atomic) 1329 fb_location = radeon_bo_gpu_offset(rbo); 1330 else { 1331 r = radeon_bo_pin(rbo, RADEON_GEM_DOMAIN_VRAM, &fb_location); 1332 if (unlikely(r != 0)) { 1333 radeon_bo_unreserve(rbo); 1334 return -EINVAL; 1335 } 1336 } 1337 radeon_bo_get_tiling_flags(rbo, &tiling_flags, NULL); 1338 radeon_bo_unreserve(rbo); 1339 1340 switch (target_fb->bits_per_pixel) { 1341 case 8: 1342 fb_format = 1343 AVIVO_D1GRPH_CONTROL_DEPTH_8BPP | 1344 AVIVO_D1GRPH_CONTROL_8BPP_INDEXED; 1345 break; 1346 case 15: 1347 fb_format = 1348 AVIVO_D1GRPH_CONTROL_DEPTH_16BPP | 1349 AVIVO_D1GRPH_CONTROL_16BPP_ARGB1555; 1350 break; 1351 case 16: 1352 fb_format = 1353 AVIVO_D1GRPH_CONTROL_DEPTH_16BPP | 1354 AVIVO_D1GRPH_CONTROL_16BPP_RGB565; 1355 #ifdef __BIG_ENDIAN 1356 fb_swap = R600_D1GRPH_SWAP_ENDIAN_16BIT; 1357 #endif 1358 break; 1359 case 24: 1360 case 32: 1361 fb_format = 1362 AVIVO_D1GRPH_CONTROL_DEPTH_32BPP | 1363 AVIVO_D1GRPH_CONTROL_32BPP_ARGB8888; 1364 #ifdef __BIG_ENDIAN 1365 fb_swap = R600_D1GRPH_SWAP_ENDIAN_32BIT; 1366 #endif 1367 break; 1368 default: 1369 DRM_ERROR("Unsupported screen depth %d\n", 1370 target_fb->bits_per_pixel); 1371 return -EINVAL; 1372 } 1373 1374 if (rdev->family >= CHIP_R600) { 1375 if (tiling_flags & RADEON_TILING_MACRO) 1376 fb_format |= R600_D1GRPH_ARRAY_MODE_2D_TILED_THIN1; 1377 else if (tiling_flags & RADEON_TILING_MICRO) 1378 fb_format |= R600_D1GRPH_ARRAY_MODE_1D_TILED_THIN1; 1379 } else { 1380 if (tiling_flags & RADEON_TILING_MACRO) 1381 fb_format |= AVIVO_D1GRPH_MACRO_ADDRESS_MODE; 1382 1383 if (tiling_flags & RADEON_TILING_MICRO) 1384 fb_format |= AVIVO_D1GRPH_TILED; 1385 } 1386 1387 if (radeon_crtc->crtc_id == 0) 1388 WREG32(AVIVO_D1VGA_CONTROL, 0); 1389 else 1390 WREG32(AVIVO_D2VGA_CONTROL, 0); 1391 1392 if (rdev->family >= CHIP_RV770) { 1393 if (radeon_crtc->crtc_id) { 1394 WREG32(R700_D2GRPH_PRIMARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location)); 1395 WREG32(R700_D2GRPH_SECONDARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location)); 1396 } else { 1397 WREG32(R700_D1GRPH_PRIMARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location)); 1398 WREG32(R700_D1GRPH_SECONDARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location)); 1399 } 1400 } 1401 WREG32(AVIVO_D1GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset, 1402 (u32) fb_location); 1403 WREG32(AVIVO_D1GRPH_SECONDARY_SURFACE_ADDRESS + 1404 radeon_crtc->crtc_offset, (u32) fb_location); 1405 WREG32(AVIVO_D1GRPH_CONTROL + radeon_crtc->crtc_offset, fb_format); 1406 if (rdev->family >= CHIP_R600) 1407 WREG32(R600_D1GRPH_SWAP_CONTROL + radeon_crtc->crtc_offset, fb_swap); 1408 1409 WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_X + radeon_crtc->crtc_offset, 0); 1410 WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_Y + radeon_crtc->crtc_offset, 0); 1411 WREG32(AVIVO_D1GRPH_X_START + radeon_crtc->crtc_offset, 0); 1412 WREG32(AVIVO_D1GRPH_Y_START + radeon_crtc->crtc_offset, 0); 1413 WREG32(AVIVO_D1GRPH_X_END + radeon_crtc->crtc_offset, target_fb->width); 1414 WREG32(AVIVO_D1GRPH_Y_END + radeon_crtc->crtc_offset, target_fb->height); 1415 1416 fb_pitch_pixels = target_fb->pitches[0] / (target_fb->bits_per_pixel / 8); 1417 WREG32(AVIVO_D1GRPH_PITCH + radeon_crtc->crtc_offset, fb_pitch_pixels); 1418 WREG32(AVIVO_D1GRPH_ENABLE + radeon_crtc->crtc_offset, 1); 1419 1420 WREG32(AVIVO_D1MODE_DESKTOP_HEIGHT + radeon_crtc->crtc_offset, 1421 target_fb->height); 1422 x &= ~3; 1423 y &= ~1; 1424 WREG32(AVIVO_D1MODE_VIEWPORT_START + radeon_crtc->crtc_offset, 1425 (x << 16) | y); 1426 viewport_w = crtc->mode.hdisplay; 1427 viewport_h = (crtc->mode.vdisplay + 1) & ~1; 1428 WREG32(AVIVO_D1MODE_VIEWPORT_SIZE + radeon_crtc->crtc_offset, 1429 (viewport_w << 16) | viewport_h); 1430 1431 /* pageflip setup */ 1432 /* make sure flip is at vb rather than hb */ 1433 tmp = RREG32(AVIVO_D1GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset); 1434 tmp &= ~AVIVO_D1GRPH_SURFACE_UPDATE_H_RETRACE_EN; 1435 WREG32(AVIVO_D1GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset, tmp); 1436 1437 /* set pageflip to happen anywhere in vblank interval */ 1438 WREG32(AVIVO_D1MODE_MASTER_UPDATE_MODE + radeon_crtc->crtc_offset, 0); 1439 1440 if (!atomic && fb && fb != crtc->fb) { 1441 radeon_fb = to_radeon_framebuffer(fb); 1442 rbo = gem_to_radeon_bo(radeon_fb->obj); 1443 r = radeon_bo_reserve(rbo, false); 1444 if (unlikely(r != 0)) 1445 return r; 1446 radeon_bo_unpin(rbo); 1447 radeon_bo_unreserve(rbo); 1448 } 1449 1450 /* Bytes per pixel may have changed */ 1451 radeon_bandwidth_update(rdev); 1452 1453 return 0; 1454 } 1455 1456 int atombios_crtc_set_base(struct drm_crtc *crtc, int x, int y, 1457 struct drm_framebuffer *old_fb) 1458 { 1459 struct drm_device *dev = crtc->dev; 1460 struct radeon_device *rdev = dev->dev_private; 1461 1462 if (ASIC_IS_DCE4(rdev)) 1463 return dce4_crtc_do_set_base(crtc, old_fb, x, y, 0); 1464 else if (ASIC_IS_AVIVO(rdev)) 1465 return avivo_crtc_do_set_base(crtc, old_fb, x, y, 0); 1466 else 1467 return radeon_crtc_do_set_base(crtc, old_fb, x, y, 0); 1468 } 1469 1470 int atombios_crtc_set_base_atomic(struct drm_crtc *crtc, 1471 struct drm_framebuffer *fb, 1472 int x, int y, enum mode_set_atomic state) 1473 { 1474 struct drm_device *dev = crtc->dev; 1475 struct radeon_device *rdev = dev->dev_private; 1476 1477 if (ASIC_IS_DCE4(rdev)) 1478 return dce4_crtc_do_set_base(crtc, fb, x, y, 1); 1479 else if (ASIC_IS_AVIVO(rdev)) 1480 return avivo_crtc_do_set_base(crtc, fb, x, y, 1); 1481 else 1482 return radeon_crtc_do_set_base(crtc, fb, x, y, 1); 1483 } 1484 1485 /* properly set additional regs when using atombios */ 1486 static void radeon_legacy_atom_fixup(struct drm_crtc *crtc) 1487 { 1488 struct drm_device *dev = crtc->dev; 1489 struct radeon_device *rdev = dev->dev_private; 1490 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 1491 u32 disp_merge_cntl; 1492 1493 switch (radeon_crtc->crtc_id) { 1494 case 0: 1495 disp_merge_cntl = RREG32(RADEON_DISP_MERGE_CNTL); 1496 disp_merge_cntl &= ~RADEON_DISP_RGB_OFFSET_EN; 1497 WREG32(RADEON_DISP_MERGE_CNTL, disp_merge_cntl); 1498 break; 1499 case 1: 1500 disp_merge_cntl = RREG32(RADEON_DISP2_MERGE_CNTL); 1501 disp_merge_cntl &= ~RADEON_DISP2_RGB_OFFSET_EN; 1502 WREG32(RADEON_DISP2_MERGE_CNTL, disp_merge_cntl); 1503 WREG32(RADEON_FP_H2_SYNC_STRT_WID, RREG32(RADEON_CRTC2_H_SYNC_STRT_WID)); 1504 WREG32(RADEON_FP_V2_SYNC_STRT_WID, RREG32(RADEON_CRTC2_V_SYNC_STRT_WID)); 1505 break; 1506 } 1507 } 1508 1509 /** 1510 * radeon_get_pll_use_mask - look up a mask of which pplls are in use 1511 * 1512 * @crtc: drm crtc 1513 * 1514 * Returns the mask of which PPLLs (Pixel PLLs) are in use. 1515 */ 1516 static u32 radeon_get_pll_use_mask(struct drm_crtc *crtc) 1517 { 1518 struct drm_device *dev = crtc->dev; 1519 struct drm_crtc *test_crtc; 1520 struct radeon_crtc *test_radeon_crtc; 1521 u32 pll_in_use = 0; 1522 1523 list_for_each_entry(test_crtc, &dev->mode_config.crtc_list, head) { 1524 if (crtc == test_crtc) 1525 continue; 1526 1527 test_radeon_crtc = to_radeon_crtc(test_crtc); 1528 if (test_radeon_crtc->pll_id != ATOM_PPLL_INVALID) 1529 pll_in_use |= (1 << test_radeon_crtc->pll_id); 1530 } 1531 return pll_in_use; 1532 } 1533 1534 /** 1535 * radeon_get_shared_dp_ppll - return the PPLL used by another crtc for DP 1536 * 1537 * @crtc: drm crtc 1538 * 1539 * Returns the PPLL (Pixel PLL) used by another crtc/encoder which is 1540 * also in DP mode. For DP, a single PPLL can be used for all DP 1541 * crtcs/encoders. 1542 */ 1543 static int radeon_get_shared_dp_ppll(struct drm_crtc *crtc) 1544 { 1545 struct drm_device *dev = crtc->dev; 1546 struct drm_crtc *test_crtc; 1547 struct radeon_crtc *test_radeon_crtc; 1548 1549 list_for_each_entry(test_crtc, &dev->mode_config.crtc_list, head) { 1550 if (crtc == test_crtc) 1551 continue; 1552 test_radeon_crtc = to_radeon_crtc(test_crtc); 1553 if (test_radeon_crtc->encoder && 1554 ENCODER_MODE_IS_DP(atombios_get_encoder_mode(test_radeon_crtc->encoder))) { 1555 /* for DP use the same PLL for all */ 1556 if (test_radeon_crtc->pll_id != ATOM_PPLL_INVALID) 1557 return test_radeon_crtc->pll_id; 1558 } 1559 } 1560 return ATOM_PPLL_INVALID; 1561 } 1562 1563 /** 1564 * radeon_get_shared_nondp_ppll - return the PPLL used by another non-DP crtc 1565 * 1566 * @crtc: drm crtc 1567 * @encoder: drm encoder 1568 * 1569 * Returns the PPLL (Pixel PLL) used by another non-DP crtc/encoder which can 1570 * be shared (i.e., same clock). 1571 */ 1572 static int radeon_get_shared_nondp_ppll(struct drm_crtc *crtc) 1573 { 1574 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 1575 struct drm_device *dev = crtc->dev; 1576 struct drm_crtc *test_crtc; 1577 struct radeon_crtc *test_radeon_crtc; 1578 u32 adjusted_clock, test_adjusted_clock; 1579 1580 adjusted_clock = radeon_crtc->adjusted_clock; 1581 1582 if (adjusted_clock == 0) 1583 return ATOM_PPLL_INVALID; 1584 1585 list_for_each_entry(test_crtc, &dev->mode_config.crtc_list, head) { 1586 if (crtc == test_crtc) 1587 continue; 1588 test_radeon_crtc = to_radeon_crtc(test_crtc); 1589 if (test_radeon_crtc->encoder && 1590 !ENCODER_MODE_IS_DP(atombios_get_encoder_mode(test_radeon_crtc->encoder))) { 1591 /* check if we are already driving this connector with another crtc */ 1592 if (test_radeon_crtc->connector == radeon_crtc->connector) { 1593 /* if we are, return that pll */ 1594 if (test_radeon_crtc->pll_id != ATOM_PPLL_INVALID) 1595 return test_radeon_crtc->pll_id; 1596 } 1597 /* for non-DP check the clock */ 1598 test_adjusted_clock = test_radeon_crtc->adjusted_clock; 1599 if ((crtc->mode.clock == test_crtc->mode.clock) && 1600 (adjusted_clock == test_adjusted_clock) && 1601 (radeon_crtc->ss_enabled == test_radeon_crtc->ss_enabled) && 1602 (test_radeon_crtc->pll_id != ATOM_PPLL_INVALID)) 1603 return test_radeon_crtc->pll_id; 1604 } 1605 } 1606 return ATOM_PPLL_INVALID; 1607 } 1608 1609 /** 1610 * radeon_atom_pick_pll - Allocate a PPLL for use by the crtc. 1611 * 1612 * @crtc: drm crtc 1613 * 1614 * Returns the PPLL (Pixel PLL) to be used by the crtc. For DP monitors 1615 * a single PPLL can be used for all DP crtcs/encoders. For non-DP 1616 * monitors a dedicated PPLL must be used. If a particular board has 1617 * an external DP PLL, return ATOM_PPLL_INVALID to skip PLL programming 1618 * as there is no need to program the PLL itself. If we are not able to 1619 * allocate a PLL, return ATOM_PPLL_INVALID to skip PLL programming to 1620 * avoid messing up an existing monitor. 1621 * 1622 * Asic specific PLL information 1623 * 1624 * DCE 8.x 1625 * KB/KV 1626 * - PPLL1, PPLL2 are available for all UNIPHY (both DP and non-DP) 1627 * CI 1628 * - PPLL0, PPLL1, PPLL2 are available for all UNIPHY (both DP and non-DP) and DAC 1629 * 1630 * DCE 6.1 1631 * - PPLL2 is only available to UNIPHYA (both DP and non-DP) 1632 * - PPLL0, PPLL1 are available for UNIPHYB/C/D/E/F (both DP and non-DP) 1633 * 1634 * DCE 6.0 1635 * - PPLL0 is available to all UNIPHY (DP only) 1636 * - PPLL1, PPLL2 are available for all UNIPHY (both DP and non-DP) and DAC 1637 * 1638 * DCE 5.0 1639 * - DCPLL is available to all UNIPHY (DP only) 1640 * - PPLL1, PPLL2 are available for all UNIPHY (both DP and non-DP) and DAC 1641 * 1642 * DCE 3.0/4.0/4.1 1643 * - PPLL1, PPLL2 are available for all UNIPHY (both DP and non-DP) and DAC 1644 * 1645 */ 1646 static int radeon_atom_pick_pll(struct drm_crtc *crtc) 1647 { 1648 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 1649 struct drm_device *dev = crtc->dev; 1650 struct radeon_device *rdev = dev->dev_private; 1651 struct radeon_encoder *radeon_encoder = 1652 to_radeon_encoder(radeon_crtc->encoder); 1653 u32 pll_in_use; 1654 int pll; 1655 1656 if (ASIC_IS_DCE8(rdev)) { 1657 if (ENCODER_MODE_IS_DP(atombios_get_encoder_mode(radeon_crtc->encoder))) { 1658 if (rdev->clock.dp_extclk) 1659 /* skip PPLL programming if using ext clock */ 1660 return ATOM_PPLL_INVALID; 1661 else { 1662 /* use the same PPLL for all DP monitors */ 1663 pll = radeon_get_shared_dp_ppll(crtc); 1664 if (pll != ATOM_PPLL_INVALID) 1665 return pll; 1666 } 1667 } else { 1668 /* use the same PPLL for all monitors with the same clock */ 1669 pll = radeon_get_shared_nondp_ppll(crtc); 1670 if (pll != ATOM_PPLL_INVALID) 1671 return pll; 1672 } 1673 /* otherwise, pick one of the plls */ 1674 if ((rdev->family == CHIP_KAVERI) || 1675 (rdev->family == CHIP_KABINI)) { 1676 /* KB/KV has PPLL1 and PPLL2 */ 1677 pll_in_use = radeon_get_pll_use_mask(crtc); 1678 if (!(pll_in_use & (1 << ATOM_PPLL2))) 1679 return ATOM_PPLL2; 1680 if (!(pll_in_use & (1 << ATOM_PPLL1))) 1681 return ATOM_PPLL1; 1682 DRM_ERROR("unable to allocate a PPLL\n"); 1683 return ATOM_PPLL_INVALID; 1684 } else { 1685 /* CI has PPLL0, PPLL1, and PPLL2 */ 1686 pll_in_use = radeon_get_pll_use_mask(crtc); 1687 if (!(pll_in_use & (1 << ATOM_PPLL2))) 1688 return ATOM_PPLL2; 1689 if (!(pll_in_use & (1 << ATOM_PPLL1))) 1690 return ATOM_PPLL1; 1691 if (!(pll_in_use & (1 << ATOM_PPLL0))) 1692 return ATOM_PPLL0; 1693 DRM_ERROR("unable to allocate a PPLL\n"); 1694 return ATOM_PPLL_INVALID; 1695 } 1696 } else if (ASIC_IS_DCE61(rdev)) { 1697 struct radeon_encoder_atom_dig *dig = 1698 radeon_encoder->enc_priv; 1699 1700 if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_UNIPHY) && 1701 (dig->linkb == false)) 1702 /* UNIPHY A uses PPLL2 */ 1703 return ATOM_PPLL2; 1704 else if (ENCODER_MODE_IS_DP(atombios_get_encoder_mode(radeon_crtc->encoder))) { 1705 /* UNIPHY B/C/D/E/F */ 1706 if (rdev->clock.dp_extclk) 1707 /* skip PPLL programming if using ext clock */ 1708 return ATOM_PPLL_INVALID; 1709 else { 1710 /* use the same PPLL for all DP monitors */ 1711 pll = radeon_get_shared_dp_ppll(crtc); 1712 if (pll != ATOM_PPLL_INVALID) 1713 return pll; 1714 } 1715 } else { 1716 /* use the same PPLL for all monitors with the same clock */ 1717 pll = radeon_get_shared_nondp_ppll(crtc); 1718 if (pll != ATOM_PPLL_INVALID) 1719 return pll; 1720 } 1721 /* UNIPHY B/C/D/E/F */ 1722 pll_in_use = radeon_get_pll_use_mask(crtc); 1723 if (!(pll_in_use & (1 << ATOM_PPLL0))) 1724 return ATOM_PPLL0; 1725 if (!(pll_in_use & (1 << ATOM_PPLL1))) 1726 return ATOM_PPLL1; 1727 DRM_ERROR("unable to allocate a PPLL\n"); 1728 return ATOM_PPLL_INVALID; 1729 } else if (ASIC_IS_DCE4(rdev)) { 1730 /* in DP mode, the DP ref clock can come from PPLL, DCPLL, or ext clock, 1731 * depending on the asic: 1732 * DCE4: PPLL or ext clock 1733 * DCE5: PPLL, DCPLL, or ext clock 1734 * DCE6: PPLL, PPLL0, or ext clock 1735 * 1736 * Setting ATOM_PPLL_INVALID will cause SetPixelClock to skip 1737 * PPLL/DCPLL programming and only program the DP DTO for the 1738 * crtc virtual pixel clock. 1739 */ 1740 if (ENCODER_MODE_IS_DP(atombios_get_encoder_mode(radeon_crtc->encoder))) { 1741 if (rdev->clock.dp_extclk) 1742 /* skip PPLL programming if using ext clock */ 1743 return ATOM_PPLL_INVALID; 1744 else if (ASIC_IS_DCE6(rdev)) 1745 /* use PPLL0 for all DP */ 1746 return ATOM_PPLL0; 1747 else if (ASIC_IS_DCE5(rdev)) 1748 /* use DCPLL for all DP */ 1749 return ATOM_DCPLL; 1750 else { 1751 /* use the same PPLL for all DP monitors */ 1752 pll = radeon_get_shared_dp_ppll(crtc); 1753 if (pll != ATOM_PPLL_INVALID) 1754 return pll; 1755 } 1756 } else if (!ASIC_IS_DCE41(rdev)) { /* Don't share PLLs on DCE4.1 chips */ 1757 /* use the same PPLL for all monitors with the same clock */ 1758 pll = radeon_get_shared_nondp_ppll(crtc); 1759 if (pll != ATOM_PPLL_INVALID) 1760 return pll; 1761 } 1762 /* all other cases */ 1763 pll_in_use = radeon_get_pll_use_mask(crtc); 1764 if (!(pll_in_use & (1 << ATOM_PPLL1))) 1765 return ATOM_PPLL1; 1766 if (!(pll_in_use & (1 << ATOM_PPLL2))) 1767 return ATOM_PPLL2; 1768 DRM_ERROR("unable to allocate a PPLL\n"); 1769 return ATOM_PPLL_INVALID; 1770 } else { 1771 /* on pre-R5xx asics, the crtc to pll mapping is hardcoded */ 1772 /* some atombios (observed in some DCE2/DCE3) code have a bug, 1773 * the matching btw pll and crtc is done through 1774 * PCLK_CRTC[1|2]_CNTL (0x480/0x484) but atombios code use the 1775 * pll (1 or 2) to select which register to write. ie if using 1776 * pll1 it will use PCLK_CRTC1_CNTL (0x480) and if using pll2 1777 * it will use PCLK_CRTC2_CNTL (0x484), it then use crtc id to 1778 * choose which value to write. Which is reverse order from 1779 * register logic. So only case that works is when pllid is 1780 * same as crtcid or when both pll and crtc are enabled and 1781 * both use same clock. 1782 * 1783 * So just return crtc id as if crtc and pll were hard linked 1784 * together even if they aren't 1785 */ 1786 return radeon_crtc->crtc_id; 1787 } 1788 } 1789 1790 void radeon_atom_disp_eng_pll_init(struct radeon_device *rdev) 1791 { 1792 /* always set DCPLL */ 1793 if (ASIC_IS_DCE6(rdev)) 1794 atombios_crtc_set_disp_eng_pll(rdev, rdev->clock.default_dispclk); 1795 else if (ASIC_IS_DCE4(rdev)) { 1796 struct radeon_atom_ss ss; 1797 bool ss_enabled = radeon_atombios_get_asic_ss_info(rdev, &ss, 1798 ASIC_INTERNAL_SS_ON_DCPLL, 1799 rdev->clock.default_dispclk); 1800 if (ss_enabled) 1801 atombios_crtc_program_ss(rdev, ATOM_DISABLE, ATOM_DCPLL, -1, &ss); 1802 /* XXX: DCE5, make sure voltage, dispclk is high enough */ 1803 atombios_crtc_set_disp_eng_pll(rdev, rdev->clock.default_dispclk); 1804 if (ss_enabled) 1805 atombios_crtc_program_ss(rdev, ATOM_ENABLE, ATOM_DCPLL, -1, &ss); 1806 } 1807 1808 } 1809 1810 int atombios_crtc_mode_set(struct drm_crtc *crtc, 1811 struct drm_display_mode *mode, 1812 struct drm_display_mode *adjusted_mode, 1813 int x, int y, struct drm_framebuffer *old_fb) 1814 { 1815 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 1816 struct drm_device *dev = crtc->dev; 1817 struct radeon_device *rdev = dev->dev_private; 1818 struct radeon_encoder *radeon_encoder = 1819 to_radeon_encoder(radeon_crtc->encoder); 1820 bool is_tvcv = false; 1821 1822 if (radeon_encoder->active_device & 1823 (ATOM_DEVICE_TV_SUPPORT | ATOM_DEVICE_CV_SUPPORT)) 1824 is_tvcv = true; 1825 1826 atombios_crtc_set_pll(crtc, adjusted_mode); 1827 1828 if (ASIC_IS_DCE4(rdev)) 1829 atombios_set_crtc_dtd_timing(crtc, adjusted_mode); 1830 else if (ASIC_IS_AVIVO(rdev)) { 1831 if (is_tvcv) 1832 atombios_crtc_set_timing(crtc, adjusted_mode); 1833 else 1834 atombios_set_crtc_dtd_timing(crtc, adjusted_mode); 1835 } else { 1836 atombios_crtc_set_timing(crtc, adjusted_mode); 1837 if (radeon_crtc->crtc_id == 0) 1838 atombios_set_crtc_dtd_timing(crtc, adjusted_mode); 1839 radeon_legacy_atom_fixup(crtc); 1840 } 1841 atombios_crtc_set_base(crtc, x, y, old_fb); 1842 atombios_overscan_setup(crtc, mode, adjusted_mode); 1843 atombios_scaler_setup(crtc); 1844 /* update the hw version fpr dpm */ 1845 radeon_crtc->hw_mode = *adjusted_mode; 1846 1847 return 0; 1848 } 1849 1850 static bool atombios_crtc_mode_fixup(struct drm_crtc *crtc, 1851 const struct drm_display_mode *mode, 1852 struct drm_display_mode *adjusted_mode) 1853 { 1854 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 1855 struct drm_device *dev = crtc->dev; 1856 struct drm_encoder *encoder; 1857 1858 /* assign the encoder to the radeon crtc to avoid repeated lookups later */ 1859 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { 1860 if (encoder->crtc == crtc) { 1861 radeon_crtc->encoder = encoder; 1862 radeon_crtc->connector = radeon_get_connector_for_encoder(encoder); 1863 break; 1864 } 1865 } 1866 if ((radeon_crtc->encoder == NULL) || (radeon_crtc->connector == NULL)) { 1867 radeon_crtc->encoder = NULL; 1868 radeon_crtc->connector = NULL; 1869 return false; 1870 } 1871 if (!radeon_crtc_scaling_mode_fixup(crtc, mode, adjusted_mode)) 1872 return false; 1873 if (!atombios_crtc_prepare_pll(crtc, adjusted_mode)) 1874 return false; 1875 /* pick pll */ 1876 radeon_crtc->pll_id = radeon_atom_pick_pll(crtc); 1877 /* if we can't get a PPLL for a non-DP encoder, fail */ 1878 if ((radeon_crtc->pll_id == ATOM_PPLL_INVALID) && 1879 !ENCODER_MODE_IS_DP(atombios_get_encoder_mode(radeon_crtc->encoder))) 1880 return false; 1881 1882 return true; 1883 } 1884 1885 static void atombios_crtc_prepare(struct drm_crtc *crtc) 1886 { 1887 struct drm_device *dev = crtc->dev; 1888 struct radeon_device *rdev = dev->dev_private; 1889 1890 /* disable crtc pair power gating before programming */ 1891 if (ASIC_IS_DCE6(rdev)) 1892 atombios_powergate_crtc(crtc, ATOM_DISABLE); 1893 1894 atombios_lock_crtc(crtc, ATOM_ENABLE); 1895 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_OFF); 1896 } 1897 1898 static void atombios_crtc_commit(struct drm_crtc *crtc) 1899 { 1900 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_ON); 1901 atombios_lock_crtc(crtc, ATOM_DISABLE); 1902 } 1903 1904 static void atombios_crtc_disable(struct drm_crtc *crtc) 1905 { 1906 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 1907 struct drm_device *dev = crtc->dev; 1908 struct radeon_device *rdev = dev->dev_private; 1909 struct radeon_atom_ss ss; 1910 int i; 1911 1912 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_OFF); 1913 if (crtc->fb) { 1914 int r; 1915 struct radeon_framebuffer *radeon_fb; 1916 struct radeon_bo *rbo; 1917 1918 radeon_fb = to_radeon_framebuffer(crtc->fb); 1919 rbo = gem_to_radeon_bo(radeon_fb->obj); 1920 r = radeon_bo_reserve(rbo, false); 1921 if (unlikely(r)) 1922 DRM_ERROR("failed to reserve rbo before unpin\n"); 1923 else { 1924 radeon_bo_unpin(rbo); 1925 radeon_bo_unreserve(rbo); 1926 } 1927 } 1928 /* disable the GRPH */ 1929 if (ASIC_IS_DCE4(rdev)) 1930 WREG32(EVERGREEN_GRPH_ENABLE + radeon_crtc->crtc_offset, 0); 1931 else if (ASIC_IS_AVIVO(rdev)) 1932 WREG32(AVIVO_D1GRPH_ENABLE + radeon_crtc->crtc_offset, 0); 1933 1934 if (ASIC_IS_DCE6(rdev)) 1935 atombios_powergate_crtc(crtc, ATOM_ENABLE); 1936 1937 for (i = 0; i < rdev->num_crtc; i++) { 1938 if (rdev->mode_info.crtcs[i] && 1939 rdev->mode_info.crtcs[i]->enabled && 1940 i != radeon_crtc->crtc_id && 1941 radeon_crtc->pll_id == rdev->mode_info.crtcs[i]->pll_id) { 1942 /* one other crtc is using this pll don't turn 1943 * off the pll 1944 */ 1945 goto done; 1946 } 1947 } 1948 1949 switch (radeon_crtc->pll_id) { 1950 case ATOM_PPLL1: 1951 case ATOM_PPLL2: 1952 /* disable the ppll */ 1953 atombios_crtc_program_pll(crtc, radeon_crtc->crtc_id, radeon_crtc->pll_id, 1954 0, 0, ATOM_DISABLE, 0, 0, 0, 0, 0, false, &ss); 1955 break; 1956 case ATOM_PPLL0: 1957 /* disable the ppll */ 1958 if ((rdev->family == CHIP_ARUBA) || 1959 (rdev->family == CHIP_BONAIRE) || 1960 (rdev->family == CHIP_HAWAII)) 1961 atombios_crtc_program_pll(crtc, radeon_crtc->crtc_id, radeon_crtc->pll_id, 1962 0, 0, ATOM_DISABLE, 0, 0, 0, 0, 0, false, &ss); 1963 break; 1964 default: 1965 break; 1966 } 1967 done: 1968 radeon_crtc->pll_id = ATOM_PPLL_INVALID; 1969 radeon_crtc->adjusted_clock = 0; 1970 radeon_crtc->encoder = NULL; 1971 radeon_crtc->connector = NULL; 1972 } 1973 1974 static const struct drm_crtc_helper_funcs atombios_helper_funcs = { 1975 .dpms = atombios_crtc_dpms, 1976 .mode_fixup = atombios_crtc_mode_fixup, 1977 .mode_set = atombios_crtc_mode_set, 1978 .mode_set_base = atombios_crtc_set_base, 1979 .mode_set_base_atomic = atombios_crtc_set_base_atomic, 1980 .prepare = atombios_crtc_prepare, 1981 .commit = atombios_crtc_commit, 1982 .load_lut = radeon_crtc_load_lut, 1983 .disable = atombios_crtc_disable, 1984 }; 1985 1986 void radeon_atombios_init_crtc(struct drm_device *dev, 1987 struct radeon_crtc *radeon_crtc) 1988 { 1989 struct radeon_device *rdev = dev->dev_private; 1990 1991 if (ASIC_IS_DCE4(rdev)) { 1992 switch (radeon_crtc->crtc_id) { 1993 case 0: 1994 default: 1995 radeon_crtc->crtc_offset = EVERGREEN_CRTC0_REGISTER_OFFSET; 1996 break; 1997 case 1: 1998 radeon_crtc->crtc_offset = EVERGREEN_CRTC1_REGISTER_OFFSET; 1999 break; 2000 case 2: 2001 radeon_crtc->crtc_offset = EVERGREEN_CRTC2_REGISTER_OFFSET; 2002 break; 2003 case 3: 2004 radeon_crtc->crtc_offset = EVERGREEN_CRTC3_REGISTER_OFFSET; 2005 break; 2006 case 4: 2007 radeon_crtc->crtc_offset = EVERGREEN_CRTC4_REGISTER_OFFSET; 2008 break; 2009 case 5: 2010 radeon_crtc->crtc_offset = EVERGREEN_CRTC5_REGISTER_OFFSET; 2011 break; 2012 } 2013 } else { 2014 if (radeon_crtc->crtc_id == 1) 2015 radeon_crtc->crtc_offset = 2016 AVIVO_D2CRTC_H_TOTAL - AVIVO_D1CRTC_H_TOTAL; 2017 else 2018 radeon_crtc->crtc_offset = 0; 2019 } 2020 radeon_crtc->pll_id = ATOM_PPLL_INVALID; 2021 radeon_crtc->adjusted_clock = 0; 2022 radeon_crtc->encoder = NULL; 2023 radeon_crtc->connector = NULL; 2024 drm_crtc_helper_add(&radeon_crtc->base, &atombios_helper_funcs); 2025 } 2026