1 /* 2 * Copyright 2007-8 Advanced Micro Devices, Inc. 3 * Copyright 2008 Red Hat Inc. 4 * 5 * Permission is hereby granted, free of charge, to any person obtaining a 6 * copy of this software and associated documentation files (the "Software"), 7 * to deal in the Software without restriction, including without limitation 8 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 9 * and/or sell copies of the Software, and to permit persons to whom the 10 * Software is furnished to do so, subject to the following conditions: 11 * 12 * The above copyright notice and this permission notice shall be included in 13 * all copies or substantial portions of the Software. 14 * 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 21 * OTHER DEALINGS IN THE SOFTWARE. 22 * 23 * Authors: Dave Airlie 24 * Alex Deucher 25 */ 26 #include <drm/drmP.h> 27 #include <drm/drm_crtc_helper.h> 28 #include <drm/radeon_drm.h> 29 #include "radeon_fixed.h" 30 #include "radeon.h" 31 #include "atom.h" 32 #include "atom-bits.h" 33 34 static void atombios_overscan_setup(struct drm_crtc *crtc, 35 struct drm_display_mode *mode, 36 struct drm_display_mode *adjusted_mode) 37 { 38 struct drm_device *dev = crtc->dev; 39 struct radeon_device *rdev = dev->dev_private; 40 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 41 SET_CRTC_OVERSCAN_PS_ALLOCATION args; 42 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_OverScan); 43 int a1, a2; 44 45 memset(&args, 0, sizeof(args)); 46 47 args.usOverscanRight = 0; 48 args.usOverscanLeft = 0; 49 args.usOverscanBottom = 0; 50 args.usOverscanTop = 0; 51 args.ucCRTC = radeon_crtc->crtc_id; 52 53 switch (radeon_crtc->rmx_type) { 54 case RMX_CENTER: 55 args.usOverscanTop = (adjusted_mode->crtc_vdisplay - mode->crtc_vdisplay) / 2; 56 args.usOverscanBottom = (adjusted_mode->crtc_vdisplay - mode->crtc_vdisplay) / 2; 57 args.usOverscanLeft = (adjusted_mode->crtc_hdisplay - mode->crtc_hdisplay) / 2; 58 args.usOverscanRight = (adjusted_mode->crtc_hdisplay - mode->crtc_hdisplay) / 2; 59 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 60 break; 61 case RMX_ASPECT: 62 a1 = mode->crtc_vdisplay * adjusted_mode->crtc_hdisplay; 63 a2 = adjusted_mode->crtc_vdisplay * mode->crtc_hdisplay; 64 65 if (a1 > a2) { 66 args.usOverscanLeft = (adjusted_mode->crtc_hdisplay - (a2 / mode->crtc_vdisplay)) / 2; 67 args.usOverscanRight = (adjusted_mode->crtc_hdisplay - (a2 / mode->crtc_vdisplay)) / 2; 68 } else if (a2 > a1) { 69 args.usOverscanLeft = (adjusted_mode->crtc_vdisplay - (a1 / mode->crtc_hdisplay)) / 2; 70 args.usOverscanRight = (adjusted_mode->crtc_vdisplay - (a1 / mode->crtc_hdisplay)) / 2; 71 } 72 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 73 break; 74 case RMX_FULL: 75 default: 76 args.usOverscanRight = 0; 77 args.usOverscanLeft = 0; 78 args.usOverscanBottom = 0; 79 args.usOverscanTop = 0; 80 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 81 break; 82 } 83 } 84 85 static void atombios_scaler_setup(struct drm_crtc *crtc) 86 { 87 struct drm_device *dev = crtc->dev; 88 struct radeon_device *rdev = dev->dev_private; 89 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 90 ENABLE_SCALER_PS_ALLOCATION args; 91 int index = GetIndexIntoMasterTable(COMMAND, EnableScaler); 92 93 /* fixme - fill in enc_priv for atom dac */ 94 enum radeon_tv_std tv_std = TV_STD_NTSC; 95 bool is_tv = false, is_cv = false; 96 struct drm_encoder *encoder; 97 98 if (!ASIC_IS_AVIVO(rdev) && radeon_crtc->crtc_id) 99 return; 100 101 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { 102 /* find tv std */ 103 if (encoder->crtc == crtc) { 104 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder); 105 if (radeon_encoder->active_device & ATOM_DEVICE_TV_SUPPORT) { 106 struct radeon_encoder_atom_dac *tv_dac = radeon_encoder->enc_priv; 107 tv_std = tv_dac->tv_std; 108 is_tv = true; 109 } 110 } 111 } 112 113 memset(&args, 0, sizeof(args)); 114 115 args.ucScaler = radeon_crtc->crtc_id; 116 117 if (is_tv) { 118 switch (tv_std) { 119 case TV_STD_NTSC: 120 default: 121 args.ucTVStandard = ATOM_TV_NTSC; 122 break; 123 case TV_STD_PAL: 124 args.ucTVStandard = ATOM_TV_PAL; 125 break; 126 case TV_STD_PAL_M: 127 args.ucTVStandard = ATOM_TV_PALM; 128 break; 129 case TV_STD_PAL_60: 130 args.ucTVStandard = ATOM_TV_PAL60; 131 break; 132 case TV_STD_NTSC_J: 133 args.ucTVStandard = ATOM_TV_NTSCJ; 134 break; 135 case TV_STD_SCART_PAL: 136 args.ucTVStandard = ATOM_TV_PAL; /* ??? */ 137 break; 138 case TV_STD_SECAM: 139 args.ucTVStandard = ATOM_TV_SECAM; 140 break; 141 case TV_STD_PAL_CN: 142 args.ucTVStandard = ATOM_TV_PALCN; 143 break; 144 } 145 args.ucEnable = SCALER_ENABLE_MULTITAP_MODE; 146 } else if (is_cv) { 147 args.ucTVStandard = ATOM_TV_CV; 148 args.ucEnable = SCALER_ENABLE_MULTITAP_MODE; 149 } else { 150 switch (radeon_crtc->rmx_type) { 151 case RMX_FULL: 152 args.ucEnable = ATOM_SCALER_EXPANSION; 153 break; 154 case RMX_CENTER: 155 args.ucEnable = ATOM_SCALER_CENTER; 156 break; 157 case RMX_ASPECT: 158 args.ucEnable = ATOM_SCALER_EXPANSION; 159 break; 160 default: 161 if (ASIC_IS_AVIVO(rdev)) 162 args.ucEnable = ATOM_SCALER_DISABLE; 163 else 164 args.ucEnable = ATOM_SCALER_CENTER; 165 break; 166 } 167 } 168 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 169 if ((is_tv || is_cv) 170 && rdev->family >= CHIP_RV515 && rdev->family <= CHIP_R580) { 171 atom_rv515_force_tv_scaler(rdev, radeon_crtc); 172 } 173 } 174 175 static void atombios_lock_crtc(struct drm_crtc *crtc, int lock) 176 { 177 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 178 struct drm_device *dev = crtc->dev; 179 struct radeon_device *rdev = dev->dev_private; 180 int index = 181 GetIndexIntoMasterTable(COMMAND, UpdateCRTC_DoubleBufferRegisters); 182 ENABLE_CRTC_PS_ALLOCATION args; 183 184 memset(&args, 0, sizeof(args)); 185 186 args.ucCRTC = radeon_crtc->crtc_id; 187 args.ucEnable = lock; 188 189 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 190 } 191 192 static void atombios_enable_crtc(struct drm_crtc *crtc, int state) 193 { 194 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 195 struct drm_device *dev = crtc->dev; 196 struct radeon_device *rdev = dev->dev_private; 197 int index = GetIndexIntoMasterTable(COMMAND, EnableCRTC); 198 ENABLE_CRTC_PS_ALLOCATION args; 199 200 memset(&args, 0, sizeof(args)); 201 202 args.ucCRTC = radeon_crtc->crtc_id; 203 args.ucEnable = state; 204 205 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 206 } 207 208 static void atombios_enable_crtc_memreq(struct drm_crtc *crtc, int state) 209 { 210 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 211 struct drm_device *dev = crtc->dev; 212 struct radeon_device *rdev = dev->dev_private; 213 int index = GetIndexIntoMasterTable(COMMAND, EnableCRTCMemReq); 214 ENABLE_CRTC_PS_ALLOCATION args; 215 216 memset(&args, 0, sizeof(args)); 217 218 args.ucCRTC = radeon_crtc->crtc_id; 219 args.ucEnable = state; 220 221 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 222 } 223 224 static void atombios_blank_crtc(struct drm_crtc *crtc, int state) 225 { 226 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 227 struct drm_device *dev = crtc->dev; 228 struct radeon_device *rdev = dev->dev_private; 229 int index = GetIndexIntoMasterTable(COMMAND, BlankCRTC); 230 BLANK_CRTC_PS_ALLOCATION args; 231 232 memset(&args, 0, sizeof(args)); 233 234 args.ucCRTC = radeon_crtc->crtc_id; 235 args.ucBlanking = state; 236 237 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 238 } 239 240 void atombios_crtc_dpms(struct drm_crtc *crtc, int mode) 241 { 242 struct drm_device *dev = crtc->dev; 243 struct radeon_device *rdev = dev->dev_private; 244 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 245 246 switch (mode) { 247 case DRM_MODE_DPMS_ON: 248 atombios_enable_crtc(crtc, ATOM_ENABLE); 249 if (ASIC_IS_DCE3(rdev)) 250 atombios_enable_crtc_memreq(crtc, ATOM_ENABLE); 251 atombios_blank_crtc(crtc, ATOM_DISABLE); 252 /* XXX re-enable when interrupt support is added */ 253 if (!ASIC_IS_DCE4(rdev)) 254 drm_vblank_post_modeset(dev, radeon_crtc->crtc_id); 255 radeon_crtc_load_lut(crtc); 256 break; 257 case DRM_MODE_DPMS_STANDBY: 258 case DRM_MODE_DPMS_SUSPEND: 259 case DRM_MODE_DPMS_OFF: 260 /* XXX re-enable when interrupt support is added */ 261 if (!ASIC_IS_DCE4(rdev)) 262 drm_vblank_pre_modeset(dev, radeon_crtc->crtc_id); 263 atombios_blank_crtc(crtc, ATOM_ENABLE); 264 if (ASIC_IS_DCE3(rdev)) 265 atombios_enable_crtc_memreq(crtc, ATOM_DISABLE); 266 atombios_enable_crtc(crtc, ATOM_DISABLE); 267 break; 268 } 269 } 270 271 static void 272 atombios_set_crtc_dtd_timing(struct drm_crtc *crtc, 273 struct drm_display_mode *mode) 274 { 275 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 276 struct drm_device *dev = crtc->dev; 277 struct radeon_device *rdev = dev->dev_private; 278 SET_CRTC_USING_DTD_TIMING_PARAMETERS args; 279 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_UsingDTDTiming); 280 u16 misc = 0; 281 282 memset(&args, 0, sizeof(args)); 283 args.usH_Size = cpu_to_le16(mode->crtc_hdisplay); 284 args.usH_Blanking_Time = 285 cpu_to_le16(mode->crtc_hblank_end - mode->crtc_hdisplay); 286 args.usV_Size = cpu_to_le16(mode->crtc_vdisplay); 287 args.usV_Blanking_Time = 288 cpu_to_le16(mode->crtc_vblank_end - mode->crtc_vdisplay); 289 args.usH_SyncOffset = 290 cpu_to_le16(mode->crtc_hsync_start - mode->crtc_hdisplay); 291 args.usH_SyncWidth = 292 cpu_to_le16(mode->crtc_hsync_end - mode->crtc_hsync_start); 293 args.usV_SyncOffset = 294 cpu_to_le16(mode->crtc_vsync_start - mode->crtc_vdisplay); 295 args.usV_SyncWidth = 296 cpu_to_le16(mode->crtc_vsync_end - mode->crtc_vsync_start); 297 /*args.ucH_Border = mode->hborder;*/ 298 /*args.ucV_Border = mode->vborder;*/ 299 300 if (mode->flags & DRM_MODE_FLAG_NVSYNC) 301 misc |= ATOM_VSYNC_POLARITY; 302 if (mode->flags & DRM_MODE_FLAG_NHSYNC) 303 misc |= ATOM_HSYNC_POLARITY; 304 if (mode->flags & DRM_MODE_FLAG_CSYNC) 305 misc |= ATOM_COMPOSITESYNC; 306 if (mode->flags & DRM_MODE_FLAG_INTERLACE) 307 misc |= ATOM_INTERLACE; 308 if (mode->flags & DRM_MODE_FLAG_DBLSCAN) 309 misc |= ATOM_DOUBLE_CLOCK_MODE; 310 311 args.susModeMiscInfo.usAccess = cpu_to_le16(misc); 312 args.ucCRTC = radeon_crtc->crtc_id; 313 314 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 315 } 316 317 static void atombios_crtc_set_timing(struct drm_crtc *crtc, 318 struct drm_display_mode *mode) 319 { 320 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 321 struct drm_device *dev = crtc->dev; 322 struct radeon_device *rdev = dev->dev_private; 323 SET_CRTC_TIMING_PARAMETERS_PS_ALLOCATION args; 324 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_Timing); 325 u16 misc = 0; 326 327 memset(&args, 0, sizeof(args)); 328 args.usH_Total = cpu_to_le16(mode->crtc_htotal); 329 args.usH_Disp = cpu_to_le16(mode->crtc_hdisplay); 330 args.usH_SyncStart = cpu_to_le16(mode->crtc_hsync_start); 331 args.usH_SyncWidth = 332 cpu_to_le16(mode->crtc_hsync_end - mode->crtc_hsync_start); 333 args.usV_Total = cpu_to_le16(mode->crtc_vtotal); 334 args.usV_Disp = cpu_to_le16(mode->crtc_vdisplay); 335 args.usV_SyncStart = cpu_to_le16(mode->crtc_vsync_start); 336 args.usV_SyncWidth = 337 cpu_to_le16(mode->crtc_vsync_end - mode->crtc_vsync_start); 338 339 if (mode->flags & DRM_MODE_FLAG_NVSYNC) 340 misc |= ATOM_VSYNC_POLARITY; 341 if (mode->flags & DRM_MODE_FLAG_NHSYNC) 342 misc |= ATOM_HSYNC_POLARITY; 343 if (mode->flags & DRM_MODE_FLAG_CSYNC) 344 misc |= ATOM_COMPOSITESYNC; 345 if (mode->flags & DRM_MODE_FLAG_INTERLACE) 346 misc |= ATOM_INTERLACE; 347 if (mode->flags & DRM_MODE_FLAG_DBLSCAN) 348 misc |= ATOM_DOUBLE_CLOCK_MODE; 349 350 args.susModeMiscInfo.usAccess = cpu_to_le16(misc); 351 args.ucCRTC = radeon_crtc->crtc_id; 352 353 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 354 } 355 356 static void atombios_disable_ss(struct drm_crtc *crtc) 357 { 358 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 359 struct drm_device *dev = crtc->dev; 360 struct radeon_device *rdev = dev->dev_private; 361 u32 ss_cntl; 362 363 if (ASIC_IS_DCE4(rdev)) { 364 switch (radeon_crtc->pll_id) { 365 case ATOM_PPLL1: 366 ss_cntl = RREG32(EVERGREEN_P1PLL_SS_CNTL); 367 ss_cntl &= ~EVERGREEN_PxPLL_SS_EN; 368 WREG32(EVERGREEN_P1PLL_SS_CNTL, ss_cntl); 369 break; 370 case ATOM_PPLL2: 371 ss_cntl = RREG32(EVERGREEN_P2PLL_SS_CNTL); 372 ss_cntl &= ~EVERGREEN_PxPLL_SS_EN; 373 WREG32(EVERGREEN_P2PLL_SS_CNTL, ss_cntl); 374 break; 375 case ATOM_DCPLL: 376 case ATOM_PPLL_INVALID: 377 return; 378 } 379 } else if (ASIC_IS_AVIVO(rdev)) { 380 switch (radeon_crtc->pll_id) { 381 case ATOM_PPLL1: 382 ss_cntl = RREG32(AVIVO_P1PLL_INT_SS_CNTL); 383 ss_cntl &= ~1; 384 WREG32(AVIVO_P1PLL_INT_SS_CNTL, ss_cntl); 385 break; 386 case ATOM_PPLL2: 387 ss_cntl = RREG32(AVIVO_P2PLL_INT_SS_CNTL); 388 ss_cntl &= ~1; 389 WREG32(AVIVO_P2PLL_INT_SS_CNTL, ss_cntl); 390 break; 391 case ATOM_DCPLL: 392 case ATOM_PPLL_INVALID: 393 return; 394 } 395 } 396 } 397 398 399 union atom_enable_ss { 400 ENABLE_LVDS_SS_PARAMETERS legacy; 401 ENABLE_SPREAD_SPECTRUM_ON_PPLL_PS_ALLOCATION v1; 402 }; 403 404 static void atombios_enable_ss(struct drm_crtc *crtc) 405 { 406 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 407 struct drm_device *dev = crtc->dev; 408 struct radeon_device *rdev = dev->dev_private; 409 struct drm_encoder *encoder = NULL; 410 struct radeon_encoder *radeon_encoder = NULL; 411 struct radeon_encoder_atom_dig *dig = NULL; 412 int index = GetIndexIntoMasterTable(COMMAND, EnableSpreadSpectrumOnPPLL); 413 union atom_enable_ss args; 414 uint16_t percentage = 0; 415 uint8_t type = 0, step = 0, delay = 0, range = 0; 416 417 /* XXX add ss support for DCE4 */ 418 if (ASIC_IS_DCE4(rdev)) 419 return; 420 421 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { 422 if (encoder->crtc == crtc) { 423 radeon_encoder = to_radeon_encoder(encoder); 424 /* only enable spread spectrum on LVDS */ 425 if (radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) { 426 dig = radeon_encoder->enc_priv; 427 if (dig && dig->ss) { 428 percentage = dig->ss->percentage; 429 type = dig->ss->type; 430 step = dig->ss->step; 431 delay = dig->ss->delay; 432 range = dig->ss->range; 433 } else 434 return; 435 } else 436 return; 437 break; 438 } 439 } 440 441 if (!radeon_encoder) 442 return; 443 444 memset(&args, 0, sizeof(args)); 445 if (ASIC_IS_AVIVO(rdev)) { 446 args.v1.usSpreadSpectrumPercentage = cpu_to_le16(percentage); 447 args.v1.ucSpreadSpectrumType = type; 448 args.v1.ucSpreadSpectrumStep = step; 449 args.v1.ucSpreadSpectrumDelay = delay; 450 args.v1.ucSpreadSpectrumRange = range; 451 args.v1.ucPpll = radeon_crtc->crtc_id ? ATOM_PPLL2 : ATOM_PPLL1; 452 args.v1.ucEnable = ATOM_ENABLE; 453 } else { 454 args.legacy.usSpreadSpectrumPercentage = cpu_to_le16(percentage); 455 args.legacy.ucSpreadSpectrumType = type; 456 args.legacy.ucSpreadSpectrumStepSize_Delay = (step & 3) << 2; 457 args.legacy.ucSpreadSpectrumStepSize_Delay |= (delay & 7) << 4; 458 args.legacy.ucEnable = ATOM_ENABLE; 459 } 460 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 461 } 462 463 union adjust_pixel_clock { 464 ADJUST_DISPLAY_PLL_PS_ALLOCATION v1; 465 ADJUST_DISPLAY_PLL_PS_ALLOCATION_V3 v3; 466 }; 467 468 static u32 atombios_adjust_pll(struct drm_crtc *crtc, 469 struct drm_display_mode *mode, 470 struct radeon_pll *pll) 471 { 472 struct drm_device *dev = crtc->dev; 473 struct radeon_device *rdev = dev->dev_private; 474 struct drm_encoder *encoder = NULL; 475 struct radeon_encoder *radeon_encoder = NULL; 476 u32 adjusted_clock = mode->clock; 477 int encoder_mode = 0; 478 479 /* reset the pll flags */ 480 pll->flags = 0; 481 482 /* select the PLL algo */ 483 if (ASIC_IS_AVIVO(rdev)) { 484 if (radeon_new_pll == 0) 485 pll->algo = PLL_ALGO_LEGACY; 486 else 487 pll->algo = PLL_ALGO_NEW; 488 } else { 489 if (radeon_new_pll == 1) 490 pll->algo = PLL_ALGO_NEW; 491 else 492 pll->algo = PLL_ALGO_LEGACY; 493 } 494 495 if (ASIC_IS_AVIVO(rdev)) { 496 if ((rdev->family == CHIP_RS600) || 497 (rdev->family == CHIP_RS690) || 498 (rdev->family == CHIP_RS740)) 499 pll->flags |= (RADEON_PLL_USE_FRAC_FB_DIV | 500 RADEON_PLL_PREFER_CLOSEST_LOWER); 501 502 if (ASIC_IS_DCE32(rdev) && mode->clock > 200000) /* range limits??? */ 503 pll->flags |= RADEON_PLL_PREFER_HIGH_FB_DIV; 504 else 505 pll->flags |= RADEON_PLL_PREFER_LOW_REF_DIV; 506 } else { 507 pll->flags |= RADEON_PLL_LEGACY; 508 509 if (mode->clock > 200000) /* range limits??? */ 510 pll->flags |= RADEON_PLL_PREFER_HIGH_FB_DIV; 511 else 512 pll->flags |= RADEON_PLL_PREFER_LOW_REF_DIV; 513 514 } 515 516 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { 517 if (encoder->crtc == crtc) { 518 radeon_encoder = to_radeon_encoder(encoder); 519 encoder_mode = atombios_get_encoder_mode(encoder); 520 if (ASIC_IS_AVIVO(rdev)) { 521 /* DVO wants 2x pixel clock if the DVO chip is in 12 bit mode */ 522 if (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1) 523 adjusted_clock = mode->clock * 2; 524 } else { 525 if (encoder->encoder_type != DRM_MODE_ENCODER_DAC) 526 pll->flags |= RADEON_PLL_NO_ODD_POST_DIV; 527 if (encoder->encoder_type == DRM_MODE_ENCODER_LVDS) 528 pll->flags |= RADEON_PLL_USE_REF_DIV; 529 } 530 break; 531 } 532 } 533 534 /* DCE3+ has an AdjustDisplayPll that will adjust the pixel clock 535 * accordingly based on the encoder/transmitter to work around 536 * special hw requirements. 537 */ 538 if (ASIC_IS_DCE3(rdev)) { 539 union adjust_pixel_clock args; 540 u8 frev, crev; 541 int index; 542 543 index = GetIndexIntoMasterTable(COMMAND, AdjustDisplayPll); 544 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, 545 &crev)) 546 return adjusted_clock; 547 548 memset(&args, 0, sizeof(args)); 549 550 switch (frev) { 551 case 1: 552 switch (crev) { 553 case 1: 554 case 2: 555 args.v1.usPixelClock = cpu_to_le16(mode->clock / 10); 556 args.v1.ucTransmitterID = radeon_encoder->encoder_id; 557 args.v1.ucEncodeMode = encoder_mode; 558 559 atom_execute_table(rdev->mode_info.atom_context, 560 index, (uint32_t *)&args); 561 adjusted_clock = le16_to_cpu(args.v1.usPixelClock) * 10; 562 break; 563 case 3: 564 args.v3.sInput.usPixelClock = cpu_to_le16(mode->clock / 10); 565 args.v3.sInput.ucTransmitterID = radeon_encoder->encoder_id; 566 args.v3.sInput.ucEncodeMode = encoder_mode; 567 args.v3.sInput.ucDispPllConfig = 0; 568 if (radeon_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 569 struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv; 570 571 if (encoder_mode == ATOM_ENCODER_MODE_DP) 572 args.v3.sInput.ucDispPllConfig |= 573 DISPPLL_CONFIG_COHERENT_MODE; 574 else { 575 if (dig->coherent_mode) 576 args.v3.sInput.ucDispPllConfig |= 577 DISPPLL_CONFIG_COHERENT_MODE; 578 if (mode->clock > 165000) 579 args.v3.sInput.ucDispPllConfig |= 580 DISPPLL_CONFIG_DUAL_LINK; 581 } 582 } else if (radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) { 583 /* may want to enable SS on DP/eDP eventually */ 584 /*args.v3.sInput.ucDispPllConfig |= 585 DISPPLL_CONFIG_SS_ENABLE;*/ 586 if (encoder_mode == ATOM_ENCODER_MODE_DP) 587 args.v3.sInput.ucDispPllConfig |= 588 DISPPLL_CONFIG_COHERENT_MODE; 589 else { 590 if (mode->clock > 165000) 591 args.v3.sInput.ucDispPllConfig |= 592 DISPPLL_CONFIG_DUAL_LINK; 593 } 594 } 595 atom_execute_table(rdev->mode_info.atom_context, 596 index, (uint32_t *)&args); 597 adjusted_clock = le32_to_cpu(args.v3.sOutput.ulDispPllFreq) * 10; 598 if (args.v3.sOutput.ucRefDiv) { 599 pll->flags |= RADEON_PLL_USE_REF_DIV; 600 pll->reference_div = args.v3.sOutput.ucRefDiv; 601 } 602 if (args.v3.sOutput.ucPostDiv) { 603 pll->flags |= RADEON_PLL_USE_POST_DIV; 604 pll->post_div = args.v3.sOutput.ucPostDiv; 605 } 606 break; 607 default: 608 DRM_ERROR("Unknown table version %d %d\n", frev, crev); 609 return adjusted_clock; 610 } 611 break; 612 default: 613 DRM_ERROR("Unknown table version %d %d\n", frev, crev); 614 return adjusted_clock; 615 } 616 } 617 return adjusted_clock; 618 } 619 620 union set_pixel_clock { 621 SET_PIXEL_CLOCK_PS_ALLOCATION base; 622 PIXEL_CLOCK_PARAMETERS v1; 623 PIXEL_CLOCK_PARAMETERS_V2 v2; 624 PIXEL_CLOCK_PARAMETERS_V3 v3; 625 PIXEL_CLOCK_PARAMETERS_V5 v5; 626 }; 627 628 static void atombios_crtc_set_dcpll(struct drm_crtc *crtc) 629 { 630 struct drm_device *dev = crtc->dev; 631 struct radeon_device *rdev = dev->dev_private; 632 u8 frev, crev; 633 int index; 634 union set_pixel_clock args; 635 636 memset(&args, 0, sizeof(args)); 637 638 index = GetIndexIntoMasterTable(COMMAND, SetPixelClock); 639 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, 640 &crev)) 641 return; 642 643 switch (frev) { 644 case 1: 645 switch (crev) { 646 case 5: 647 /* if the default dcpll clock is specified, 648 * SetPixelClock provides the dividers 649 */ 650 args.v5.ucCRTC = ATOM_CRTC_INVALID; 651 args.v5.usPixelClock = rdev->clock.default_dispclk; 652 args.v5.ucPpll = ATOM_DCPLL; 653 break; 654 default: 655 DRM_ERROR("Unknown table version %d %d\n", frev, crev); 656 return; 657 } 658 break; 659 default: 660 DRM_ERROR("Unknown table version %d %d\n", frev, crev); 661 return; 662 } 663 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 664 } 665 666 static void atombios_crtc_set_pll(struct drm_crtc *crtc, struct drm_display_mode *mode) 667 { 668 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 669 struct drm_device *dev = crtc->dev; 670 struct radeon_device *rdev = dev->dev_private; 671 struct drm_encoder *encoder = NULL; 672 struct radeon_encoder *radeon_encoder = NULL; 673 u8 frev, crev; 674 int index; 675 union set_pixel_clock args; 676 u32 pll_clock = mode->clock; 677 u32 ref_div = 0, fb_div = 0, frac_fb_div = 0, post_div = 0; 678 struct radeon_pll *pll; 679 u32 adjusted_clock; 680 int encoder_mode = 0; 681 682 memset(&args, 0, sizeof(args)); 683 684 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { 685 if (encoder->crtc == crtc) { 686 radeon_encoder = to_radeon_encoder(encoder); 687 encoder_mode = atombios_get_encoder_mode(encoder); 688 break; 689 } 690 } 691 692 if (!radeon_encoder) 693 return; 694 695 switch (radeon_crtc->pll_id) { 696 case ATOM_PPLL1: 697 pll = &rdev->clock.p1pll; 698 break; 699 case ATOM_PPLL2: 700 pll = &rdev->clock.p2pll; 701 break; 702 case ATOM_DCPLL: 703 case ATOM_PPLL_INVALID: 704 pll = &rdev->clock.dcpll; 705 break; 706 } 707 708 /* adjust pixel clock as needed */ 709 adjusted_clock = atombios_adjust_pll(crtc, mode, pll); 710 711 radeon_compute_pll(pll, adjusted_clock, &pll_clock, &fb_div, &frac_fb_div, 712 &ref_div, &post_div); 713 714 index = GetIndexIntoMasterTable(COMMAND, SetPixelClock); 715 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, 716 &crev)) 717 return; 718 719 switch (frev) { 720 case 1: 721 switch (crev) { 722 case 1: 723 args.v1.usPixelClock = cpu_to_le16(mode->clock / 10); 724 args.v1.usRefDiv = cpu_to_le16(ref_div); 725 args.v1.usFbDiv = cpu_to_le16(fb_div); 726 args.v1.ucFracFbDiv = frac_fb_div; 727 args.v1.ucPostDiv = post_div; 728 args.v1.ucPpll = radeon_crtc->pll_id; 729 args.v1.ucCRTC = radeon_crtc->crtc_id; 730 args.v1.ucRefDivSrc = 1; 731 break; 732 case 2: 733 args.v2.usPixelClock = cpu_to_le16(mode->clock / 10); 734 args.v2.usRefDiv = cpu_to_le16(ref_div); 735 args.v2.usFbDiv = cpu_to_le16(fb_div); 736 args.v2.ucFracFbDiv = frac_fb_div; 737 args.v2.ucPostDiv = post_div; 738 args.v2.ucPpll = radeon_crtc->pll_id; 739 args.v2.ucCRTC = radeon_crtc->crtc_id; 740 args.v2.ucRefDivSrc = 1; 741 break; 742 case 3: 743 args.v3.usPixelClock = cpu_to_le16(mode->clock / 10); 744 args.v3.usRefDiv = cpu_to_le16(ref_div); 745 args.v3.usFbDiv = cpu_to_le16(fb_div); 746 args.v3.ucFracFbDiv = frac_fb_div; 747 args.v3.ucPostDiv = post_div; 748 args.v3.ucPpll = radeon_crtc->pll_id; 749 args.v3.ucMiscInfo = (radeon_crtc->pll_id << 2); 750 args.v3.ucTransmitterId = radeon_encoder->encoder_id; 751 args.v3.ucEncoderMode = encoder_mode; 752 break; 753 case 5: 754 args.v5.ucCRTC = radeon_crtc->crtc_id; 755 args.v5.usPixelClock = cpu_to_le16(mode->clock / 10); 756 args.v5.ucRefDiv = ref_div; 757 args.v5.usFbDiv = cpu_to_le16(fb_div); 758 args.v5.ulFbDivDecFrac = cpu_to_le32(frac_fb_div * 100000); 759 args.v5.ucPostDiv = post_div; 760 args.v5.ucMiscInfo = 0; /* HDMI depth, etc. */ 761 args.v5.ucTransmitterID = radeon_encoder->encoder_id; 762 args.v5.ucEncoderMode = encoder_mode; 763 args.v5.ucPpll = radeon_crtc->pll_id; 764 break; 765 default: 766 DRM_ERROR("Unknown table version %d %d\n", frev, crev); 767 return; 768 } 769 break; 770 default: 771 DRM_ERROR("Unknown table version %d %d\n", frev, crev); 772 return; 773 } 774 775 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 776 } 777 778 static int evergreen_crtc_set_base(struct drm_crtc *crtc, int x, int y, 779 struct drm_framebuffer *old_fb) 780 { 781 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 782 struct drm_device *dev = crtc->dev; 783 struct radeon_device *rdev = dev->dev_private; 784 struct radeon_framebuffer *radeon_fb; 785 struct drm_gem_object *obj; 786 struct radeon_bo *rbo; 787 uint64_t fb_location; 788 uint32_t fb_format, fb_pitch_pixels, tiling_flags; 789 int r; 790 791 /* no fb bound */ 792 if (!crtc->fb) { 793 DRM_DEBUG("No FB bound\n"); 794 return 0; 795 } 796 797 radeon_fb = to_radeon_framebuffer(crtc->fb); 798 799 /* Pin framebuffer & get tilling informations */ 800 obj = radeon_fb->obj; 801 rbo = obj->driver_private; 802 r = radeon_bo_reserve(rbo, false); 803 if (unlikely(r != 0)) 804 return r; 805 r = radeon_bo_pin(rbo, RADEON_GEM_DOMAIN_VRAM, &fb_location); 806 if (unlikely(r != 0)) { 807 radeon_bo_unreserve(rbo); 808 return -EINVAL; 809 } 810 radeon_bo_get_tiling_flags(rbo, &tiling_flags, NULL); 811 radeon_bo_unreserve(rbo); 812 813 switch (crtc->fb->bits_per_pixel) { 814 case 8: 815 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_8BPP) | 816 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_INDEXED)); 817 break; 818 case 15: 819 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) | 820 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB1555)); 821 break; 822 case 16: 823 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) | 824 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB565)); 825 break; 826 case 24: 827 case 32: 828 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_32BPP) | 829 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB8888)); 830 break; 831 default: 832 DRM_ERROR("Unsupported screen depth %d\n", 833 crtc->fb->bits_per_pixel); 834 return -EINVAL; 835 } 836 837 switch (radeon_crtc->crtc_id) { 838 case 0: 839 WREG32(AVIVO_D1VGA_CONTROL, 0); 840 break; 841 case 1: 842 WREG32(AVIVO_D2VGA_CONTROL, 0); 843 break; 844 case 2: 845 WREG32(EVERGREEN_D3VGA_CONTROL, 0); 846 break; 847 case 3: 848 WREG32(EVERGREEN_D4VGA_CONTROL, 0); 849 break; 850 case 4: 851 WREG32(EVERGREEN_D5VGA_CONTROL, 0); 852 break; 853 case 5: 854 WREG32(EVERGREEN_D6VGA_CONTROL, 0); 855 break; 856 default: 857 break; 858 } 859 860 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset, 861 upper_32_bits(fb_location)); 862 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset, 863 upper_32_bits(fb_location)); 864 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset, 865 (u32)fb_location & EVERGREEN_GRPH_SURFACE_ADDRESS_MASK); 866 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset, 867 (u32) fb_location & EVERGREEN_GRPH_SURFACE_ADDRESS_MASK); 868 WREG32(EVERGREEN_GRPH_CONTROL + radeon_crtc->crtc_offset, fb_format); 869 870 WREG32(EVERGREEN_GRPH_SURFACE_OFFSET_X + radeon_crtc->crtc_offset, 0); 871 WREG32(EVERGREEN_GRPH_SURFACE_OFFSET_Y + radeon_crtc->crtc_offset, 0); 872 WREG32(EVERGREEN_GRPH_X_START + radeon_crtc->crtc_offset, 0); 873 WREG32(EVERGREEN_GRPH_Y_START + radeon_crtc->crtc_offset, 0); 874 WREG32(EVERGREEN_GRPH_X_END + radeon_crtc->crtc_offset, crtc->fb->width); 875 WREG32(EVERGREEN_GRPH_Y_END + radeon_crtc->crtc_offset, crtc->fb->height); 876 877 fb_pitch_pixels = crtc->fb->pitch / (crtc->fb->bits_per_pixel / 8); 878 WREG32(EVERGREEN_GRPH_PITCH + radeon_crtc->crtc_offset, fb_pitch_pixels); 879 WREG32(EVERGREEN_GRPH_ENABLE + radeon_crtc->crtc_offset, 1); 880 881 WREG32(EVERGREEN_DESKTOP_HEIGHT + radeon_crtc->crtc_offset, 882 crtc->mode.vdisplay); 883 x &= ~3; 884 y &= ~1; 885 WREG32(EVERGREEN_VIEWPORT_START + radeon_crtc->crtc_offset, 886 (x << 16) | y); 887 WREG32(EVERGREEN_VIEWPORT_SIZE + radeon_crtc->crtc_offset, 888 (crtc->mode.hdisplay << 16) | crtc->mode.vdisplay); 889 890 if (crtc->mode.flags & DRM_MODE_FLAG_INTERLACE) 891 WREG32(EVERGREEN_DATA_FORMAT + radeon_crtc->crtc_offset, 892 EVERGREEN_INTERLEAVE_EN); 893 else 894 WREG32(EVERGREEN_DATA_FORMAT + radeon_crtc->crtc_offset, 0); 895 896 if (old_fb && old_fb != crtc->fb) { 897 radeon_fb = to_radeon_framebuffer(old_fb); 898 rbo = radeon_fb->obj->driver_private; 899 r = radeon_bo_reserve(rbo, false); 900 if (unlikely(r != 0)) 901 return r; 902 radeon_bo_unpin(rbo); 903 radeon_bo_unreserve(rbo); 904 } 905 906 /* Bytes per pixel may have changed */ 907 radeon_bandwidth_update(rdev); 908 909 return 0; 910 } 911 912 static int avivo_crtc_set_base(struct drm_crtc *crtc, int x, int y, 913 struct drm_framebuffer *old_fb) 914 { 915 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 916 struct drm_device *dev = crtc->dev; 917 struct radeon_device *rdev = dev->dev_private; 918 struct radeon_framebuffer *radeon_fb; 919 struct drm_gem_object *obj; 920 struct radeon_bo *rbo; 921 uint64_t fb_location; 922 uint32_t fb_format, fb_pitch_pixels, tiling_flags; 923 int r; 924 925 /* no fb bound */ 926 if (!crtc->fb) { 927 DRM_DEBUG("No FB bound\n"); 928 return 0; 929 } 930 931 radeon_fb = to_radeon_framebuffer(crtc->fb); 932 933 /* Pin framebuffer & get tilling informations */ 934 obj = radeon_fb->obj; 935 rbo = obj->driver_private; 936 r = radeon_bo_reserve(rbo, false); 937 if (unlikely(r != 0)) 938 return r; 939 r = radeon_bo_pin(rbo, RADEON_GEM_DOMAIN_VRAM, &fb_location); 940 if (unlikely(r != 0)) { 941 radeon_bo_unreserve(rbo); 942 return -EINVAL; 943 } 944 radeon_bo_get_tiling_flags(rbo, &tiling_flags, NULL); 945 radeon_bo_unreserve(rbo); 946 947 switch (crtc->fb->bits_per_pixel) { 948 case 8: 949 fb_format = 950 AVIVO_D1GRPH_CONTROL_DEPTH_8BPP | 951 AVIVO_D1GRPH_CONTROL_8BPP_INDEXED; 952 break; 953 case 15: 954 fb_format = 955 AVIVO_D1GRPH_CONTROL_DEPTH_16BPP | 956 AVIVO_D1GRPH_CONTROL_16BPP_ARGB1555; 957 break; 958 case 16: 959 fb_format = 960 AVIVO_D1GRPH_CONTROL_DEPTH_16BPP | 961 AVIVO_D1GRPH_CONTROL_16BPP_RGB565; 962 break; 963 case 24: 964 case 32: 965 fb_format = 966 AVIVO_D1GRPH_CONTROL_DEPTH_32BPP | 967 AVIVO_D1GRPH_CONTROL_32BPP_ARGB8888; 968 break; 969 default: 970 DRM_ERROR("Unsupported screen depth %d\n", 971 crtc->fb->bits_per_pixel); 972 return -EINVAL; 973 } 974 975 if (tiling_flags & RADEON_TILING_MACRO) 976 fb_format |= AVIVO_D1GRPH_MACRO_ADDRESS_MODE; 977 978 if (tiling_flags & RADEON_TILING_MICRO) 979 fb_format |= AVIVO_D1GRPH_TILED; 980 981 if (radeon_crtc->crtc_id == 0) 982 WREG32(AVIVO_D1VGA_CONTROL, 0); 983 else 984 WREG32(AVIVO_D2VGA_CONTROL, 0); 985 986 if (rdev->family >= CHIP_RV770) { 987 if (radeon_crtc->crtc_id) { 988 WREG32(R700_D2GRPH_PRIMARY_SURFACE_ADDRESS_HIGH, 0); 989 WREG32(R700_D2GRPH_SECONDARY_SURFACE_ADDRESS_HIGH, 0); 990 } else { 991 WREG32(R700_D1GRPH_PRIMARY_SURFACE_ADDRESS_HIGH, 0); 992 WREG32(R700_D1GRPH_SECONDARY_SURFACE_ADDRESS_HIGH, 0); 993 } 994 } 995 WREG32(AVIVO_D1GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset, 996 (u32) fb_location); 997 WREG32(AVIVO_D1GRPH_SECONDARY_SURFACE_ADDRESS + 998 radeon_crtc->crtc_offset, (u32) fb_location); 999 WREG32(AVIVO_D1GRPH_CONTROL + radeon_crtc->crtc_offset, fb_format); 1000 1001 WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_X + radeon_crtc->crtc_offset, 0); 1002 WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_Y + radeon_crtc->crtc_offset, 0); 1003 WREG32(AVIVO_D1GRPH_X_START + radeon_crtc->crtc_offset, 0); 1004 WREG32(AVIVO_D1GRPH_Y_START + radeon_crtc->crtc_offset, 0); 1005 WREG32(AVIVO_D1GRPH_X_END + radeon_crtc->crtc_offset, crtc->fb->width); 1006 WREG32(AVIVO_D1GRPH_Y_END + radeon_crtc->crtc_offset, crtc->fb->height); 1007 1008 fb_pitch_pixels = crtc->fb->pitch / (crtc->fb->bits_per_pixel / 8); 1009 WREG32(AVIVO_D1GRPH_PITCH + radeon_crtc->crtc_offset, fb_pitch_pixels); 1010 WREG32(AVIVO_D1GRPH_ENABLE + radeon_crtc->crtc_offset, 1); 1011 1012 WREG32(AVIVO_D1MODE_DESKTOP_HEIGHT + radeon_crtc->crtc_offset, 1013 crtc->mode.vdisplay); 1014 x &= ~3; 1015 y &= ~1; 1016 WREG32(AVIVO_D1MODE_VIEWPORT_START + radeon_crtc->crtc_offset, 1017 (x << 16) | y); 1018 WREG32(AVIVO_D1MODE_VIEWPORT_SIZE + radeon_crtc->crtc_offset, 1019 (crtc->mode.hdisplay << 16) | crtc->mode.vdisplay); 1020 1021 if (crtc->mode.flags & DRM_MODE_FLAG_INTERLACE) 1022 WREG32(AVIVO_D1MODE_DATA_FORMAT + radeon_crtc->crtc_offset, 1023 AVIVO_D1MODE_INTERLEAVE_EN); 1024 else 1025 WREG32(AVIVO_D1MODE_DATA_FORMAT + radeon_crtc->crtc_offset, 0); 1026 1027 if (old_fb && old_fb != crtc->fb) { 1028 radeon_fb = to_radeon_framebuffer(old_fb); 1029 rbo = radeon_fb->obj->driver_private; 1030 r = radeon_bo_reserve(rbo, false); 1031 if (unlikely(r != 0)) 1032 return r; 1033 radeon_bo_unpin(rbo); 1034 radeon_bo_unreserve(rbo); 1035 } 1036 1037 /* Bytes per pixel may have changed */ 1038 radeon_bandwidth_update(rdev); 1039 1040 return 0; 1041 } 1042 1043 int atombios_crtc_set_base(struct drm_crtc *crtc, int x, int y, 1044 struct drm_framebuffer *old_fb) 1045 { 1046 struct drm_device *dev = crtc->dev; 1047 struct radeon_device *rdev = dev->dev_private; 1048 1049 if (ASIC_IS_DCE4(rdev)) 1050 return evergreen_crtc_set_base(crtc, x, y, old_fb); 1051 else if (ASIC_IS_AVIVO(rdev)) 1052 return avivo_crtc_set_base(crtc, x, y, old_fb); 1053 else 1054 return radeon_crtc_set_base(crtc, x, y, old_fb); 1055 } 1056 1057 /* properly set additional regs when using atombios */ 1058 static void radeon_legacy_atom_fixup(struct drm_crtc *crtc) 1059 { 1060 struct drm_device *dev = crtc->dev; 1061 struct radeon_device *rdev = dev->dev_private; 1062 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 1063 u32 disp_merge_cntl; 1064 1065 switch (radeon_crtc->crtc_id) { 1066 case 0: 1067 disp_merge_cntl = RREG32(RADEON_DISP_MERGE_CNTL); 1068 disp_merge_cntl &= ~RADEON_DISP_RGB_OFFSET_EN; 1069 WREG32(RADEON_DISP_MERGE_CNTL, disp_merge_cntl); 1070 break; 1071 case 1: 1072 disp_merge_cntl = RREG32(RADEON_DISP2_MERGE_CNTL); 1073 disp_merge_cntl &= ~RADEON_DISP2_RGB_OFFSET_EN; 1074 WREG32(RADEON_DISP2_MERGE_CNTL, disp_merge_cntl); 1075 WREG32(RADEON_FP_H2_SYNC_STRT_WID, RREG32(RADEON_CRTC2_H_SYNC_STRT_WID)); 1076 WREG32(RADEON_FP_V2_SYNC_STRT_WID, RREG32(RADEON_CRTC2_V_SYNC_STRT_WID)); 1077 break; 1078 } 1079 } 1080 1081 static int radeon_atom_pick_pll(struct drm_crtc *crtc) 1082 { 1083 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 1084 struct drm_device *dev = crtc->dev; 1085 struct radeon_device *rdev = dev->dev_private; 1086 struct drm_encoder *test_encoder; 1087 struct drm_crtc *test_crtc; 1088 uint32_t pll_in_use = 0; 1089 1090 if (ASIC_IS_DCE4(rdev)) { 1091 /* if crtc is driving DP and we have an ext clock, use that */ 1092 list_for_each_entry(test_encoder, &dev->mode_config.encoder_list, head) { 1093 if (test_encoder->crtc && (test_encoder->crtc == crtc)) { 1094 if (atombios_get_encoder_mode(test_encoder) == ATOM_ENCODER_MODE_DP) { 1095 if (rdev->clock.dp_extclk) 1096 return ATOM_PPLL_INVALID; 1097 } 1098 } 1099 } 1100 1101 /* otherwise, pick one of the plls */ 1102 list_for_each_entry(test_crtc, &dev->mode_config.crtc_list, head) { 1103 struct radeon_crtc *radeon_test_crtc; 1104 1105 if (crtc == test_crtc) 1106 continue; 1107 1108 radeon_test_crtc = to_radeon_crtc(test_crtc); 1109 if ((radeon_test_crtc->pll_id >= ATOM_PPLL1) && 1110 (radeon_test_crtc->pll_id <= ATOM_PPLL2)) 1111 pll_in_use |= (1 << radeon_test_crtc->pll_id); 1112 } 1113 if (!(pll_in_use & 1)) 1114 return ATOM_PPLL1; 1115 return ATOM_PPLL2; 1116 } else 1117 return radeon_crtc->crtc_id; 1118 1119 } 1120 1121 int atombios_crtc_mode_set(struct drm_crtc *crtc, 1122 struct drm_display_mode *mode, 1123 struct drm_display_mode *adjusted_mode, 1124 int x, int y, struct drm_framebuffer *old_fb) 1125 { 1126 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 1127 struct drm_device *dev = crtc->dev; 1128 struct radeon_device *rdev = dev->dev_private; 1129 1130 /* TODO color tiling */ 1131 1132 atombios_disable_ss(crtc); 1133 /* always set DCPLL */ 1134 if (ASIC_IS_DCE4(rdev)) 1135 atombios_crtc_set_dcpll(crtc); 1136 atombios_crtc_set_pll(crtc, adjusted_mode); 1137 atombios_enable_ss(crtc); 1138 1139 if (ASIC_IS_DCE4(rdev)) 1140 atombios_set_crtc_dtd_timing(crtc, adjusted_mode); 1141 else if (ASIC_IS_AVIVO(rdev)) 1142 atombios_crtc_set_timing(crtc, adjusted_mode); 1143 else { 1144 atombios_crtc_set_timing(crtc, adjusted_mode); 1145 if (radeon_crtc->crtc_id == 0) 1146 atombios_set_crtc_dtd_timing(crtc, adjusted_mode); 1147 radeon_legacy_atom_fixup(crtc); 1148 } 1149 atombios_crtc_set_base(crtc, x, y, old_fb); 1150 atombios_overscan_setup(crtc, mode, adjusted_mode); 1151 atombios_scaler_setup(crtc); 1152 return 0; 1153 } 1154 1155 static bool atombios_crtc_mode_fixup(struct drm_crtc *crtc, 1156 struct drm_display_mode *mode, 1157 struct drm_display_mode *adjusted_mode) 1158 { 1159 if (!radeon_crtc_scaling_mode_fixup(crtc, mode, adjusted_mode)) 1160 return false; 1161 return true; 1162 } 1163 1164 static void atombios_crtc_prepare(struct drm_crtc *crtc) 1165 { 1166 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 1167 1168 /* pick pll */ 1169 radeon_crtc->pll_id = radeon_atom_pick_pll(crtc); 1170 1171 atombios_lock_crtc(crtc, ATOM_ENABLE); 1172 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_OFF); 1173 } 1174 1175 static void atombios_crtc_commit(struct drm_crtc *crtc) 1176 { 1177 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_ON); 1178 atombios_lock_crtc(crtc, ATOM_DISABLE); 1179 } 1180 1181 static const struct drm_crtc_helper_funcs atombios_helper_funcs = { 1182 .dpms = atombios_crtc_dpms, 1183 .mode_fixup = atombios_crtc_mode_fixup, 1184 .mode_set = atombios_crtc_mode_set, 1185 .mode_set_base = atombios_crtc_set_base, 1186 .prepare = atombios_crtc_prepare, 1187 .commit = atombios_crtc_commit, 1188 .load_lut = radeon_crtc_load_lut, 1189 }; 1190 1191 void radeon_atombios_init_crtc(struct drm_device *dev, 1192 struct radeon_crtc *radeon_crtc) 1193 { 1194 struct radeon_device *rdev = dev->dev_private; 1195 1196 if (ASIC_IS_DCE4(rdev)) { 1197 switch (radeon_crtc->crtc_id) { 1198 case 0: 1199 default: 1200 radeon_crtc->crtc_offset = EVERGREEN_CRTC0_REGISTER_OFFSET; 1201 break; 1202 case 1: 1203 radeon_crtc->crtc_offset = EVERGREEN_CRTC1_REGISTER_OFFSET; 1204 break; 1205 case 2: 1206 radeon_crtc->crtc_offset = EVERGREEN_CRTC2_REGISTER_OFFSET; 1207 break; 1208 case 3: 1209 radeon_crtc->crtc_offset = EVERGREEN_CRTC3_REGISTER_OFFSET; 1210 break; 1211 case 4: 1212 radeon_crtc->crtc_offset = EVERGREEN_CRTC4_REGISTER_OFFSET; 1213 break; 1214 case 5: 1215 radeon_crtc->crtc_offset = EVERGREEN_CRTC5_REGISTER_OFFSET; 1216 break; 1217 } 1218 } else { 1219 if (radeon_crtc->crtc_id == 1) 1220 radeon_crtc->crtc_offset = 1221 AVIVO_D2CRTC_H_TOTAL - AVIVO_D1CRTC_H_TOTAL; 1222 else 1223 radeon_crtc->crtc_offset = 0; 1224 } 1225 radeon_crtc->pll_id = -1; 1226 drm_crtc_helper_add(&radeon_crtc->base, &atombios_helper_funcs); 1227 } 1228