1 /* 2 * Copyright 2007-11 Advanced Micro Devices, Inc. 3 * Copyright 2008 Red Hat Inc. 4 * 5 * Permission is hereby granted, free of charge, to any person obtaining a 6 * copy of this software and associated documentation files (the "Software"), 7 * to deal in the Software without restriction, including without limitation 8 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 9 * and/or sell copies of the Software, and to permit persons to whom the 10 * Software is furnished to do so, subject to the following conditions: 11 * 12 * The above copyright notice and this permission notice shall be included in 13 * all copies or substantial portions of the Software. 14 * 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 21 * OTHER DEALINGS IN THE SOFTWARE. 22 * 23 * Authors: Dave Airlie 24 * Alex Deucher 25 */ 26 #include <drm/drmP.h> 27 #include <drm/drm_crtc_helper.h> 28 #include <drm/amdgpu_drm.h> 29 #include "amdgpu.h" 30 #include "amdgpu_connectors.h" 31 #include "amdgpu_display.h" 32 #include "atom.h" 33 #include "atombios_encoders.h" 34 #include "atombios_dp.h" 35 #include <linux/backlight.h> 36 #include "bif/bif_4_1_d.h" 37 38 u8 39 amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device *adev) 40 { 41 u8 backlight_level; 42 u32 bios_2_scratch; 43 44 bios_2_scratch = RREG32(mmBIOS_SCRATCH_2); 45 46 backlight_level = ((bios_2_scratch & ATOM_S2_CURRENT_BL_LEVEL_MASK) >> 47 ATOM_S2_CURRENT_BL_LEVEL_SHIFT); 48 49 return backlight_level; 50 } 51 52 void 53 amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device *adev, 54 u8 backlight_level) 55 { 56 u32 bios_2_scratch; 57 58 bios_2_scratch = RREG32(mmBIOS_SCRATCH_2); 59 60 bios_2_scratch &= ~ATOM_S2_CURRENT_BL_LEVEL_MASK; 61 bios_2_scratch |= ((backlight_level << ATOM_S2_CURRENT_BL_LEVEL_SHIFT) & 62 ATOM_S2_CURRENT_BL_LEVEL_MASK); 63 64 WREG32(mmBIOS_SCRATCH_2, bios_2_scratch); 65 } 66 67 u8 68 amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder *amdgpu_encoder) 69 { 70 struct drm_device *dev = amdgpu_encoder->base.dev; 71 struct amdgpu_device *adev = dev->dev_private; 72 73 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU)) 74 return 0; 75 76 return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev); 77 } 78 79 void 80 amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder *amdgpu_encoder, 81 u8 level) 82 { 83 struct drm_encoder *encoder = &amdgpu_encoder->base; 84 struct drm_device *dev = amdgpu_encoder->base.dev; 85 struct amdgpu_device *adev = dev->dev_private; 86 struct amdgpu_encoder_atom_dig *dig; 87 88 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU)) 89 return; 90 91 if ((amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) && 92 amdgpu_encoder->enc_priv) { 93 dig = amdgpu_encoder->enc_priv; 94 dig->backlight_level = level; 95 amdgpu_atombios_encoder_set_backlight_level_to_reg(adev, dig->backlight_level); 96 97 switch (amdgpu_encoder->encoder_id) { 98 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 99 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 100 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 101 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 102 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 103 if (dig->backlight_level == 0) 104 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 105 ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0); 106 else { 107 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 108 ATOM_TRANSMITTER_ACTION_BL_BRIGHTNESS_CONTROL, 0, 0); 109 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 110 ATOM_TRANSMITTER_ACTION_LCD_BLON, 0, 0); 111 } 112 break; 113 default: 114 break; 115 } 116 } 117 } 118 119 #if defined(CONFIG_BACKLIGHT_CLASS_DEVICE) || defined(CONFIG_BACKLIGHT_CLASS_DEVICE_MODULE) 120 121 static u8 amdgpu_atombios_encoder_backlight_level(struct backlight_device *bd) 122 { 123 u8 level; 124 125 /* Convert brightness to hardware level */ 126 if (bd->props.brightness < 0) 127 level = 0; 128 else if (bd->props.brightness > AMDGPU_MAX_BL_LEVEL) 129 level = AMDGPU_MAX_BL_LEVEL; 130 else 131 level = bd->props.brightness; 132 133 return level; 134 } 135 136 static int amdgpu_atombios_encoder_update_backlight_status(struct backlight_device *bd) 137 { 138 struct amdgpu_backlight_privdata *pdata = bl_get_data(bd); 139 struct amdgpu_encoder *amdgpu_encoder = pdata->encoder; 140 141 amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, 142 amdgpu_atombios_encoder_backlight_level(bd)); 143 144 return 0; 145 } 146 147 static int 148 amdgpu_atombios_encoder_get_backlight_brightness(struct backlight_device *bd) 149 { 150 struct amdgpu_backlight_privdata *pdata = bl_get_data(bd); 151 struct amdgpu_encoder *amdgpu_encoder = pdata->encoder; 152 struct drm_device *dev = amdgpu_encoder->base.dev; 153 struct amdgpu_device *adev = dev->dev_private; 154 155 return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev); 156 } 157 158 static const struct backlight_ops amdgpu_atombios_encoder_backlight_ops = { 159 .get_brightness = amdgpu_atombios_encoder_get_backlight_brightness, 160 .update_status = amdgpu_atombios_encoder_update_backlight_status, 161 }; 162 163 void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *amdgpu_encoder, 164 struct drm_connector *drm_connector) 165 { 166 struct drm_device *dev = amdgpu_encoder->base.dev; 167 struct amdgpu_device *adev = dev->dev_private; 168 struct backlight_device *bd; 169 struct backlight_properties props; 170 struct amdgpu_backlight_privdata *pdata; 171 struct amdgpu_encoder_atom_dig *dig; 172 u8 backlight_level; 173 char bl_name[16]; 174 175 /* Mac laptops with multiple GPUs use the gmux driver for backlight 176 * so don't register a backlight device 177 */ 178 if ((adev->pdev->subsystem_vendor == PCI_VENDOR_ID_APPLE) && 179 (adev->pdev->device == 0x6741)) 180 return; 181 182 if (!amdgpu_encoder->enc_priv) 183 return; 184 185 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU)) 186 return; 187 188 pdata = kmalloc(sizeof(struct amdgpu_backlight_privdata), GFP_KERNEL); 189 if (!pdata) { 190 DRM_ERROR("Memory allocation failed\n"); 191 goto error; 192 } 193 194 memset(&props, 0, sizeof(props)); 195 props.max_brightness = AMDGPU_MAX_BL_LEVEL; 196 props.type = BACKLIGHT_RAW; 197 snprintf(bl_name, sizeof(bl_name), 198 "amdgpu_bl%d", dev->primary->index); 199 bd = backlight_device_register(bl_name, drm_connector->kdev, 200 pdata, &amdgpu_atombios_encoder_backlight_ops, &props); 201 if (IS_ERR(bd)) { 202 DRM_ERROR("Backlight registration failed\n"); 203 goto error; 204 } 205 206 pdata->encoder = amdgpu_encoder; 207 208 backlight_level = amdgpu_atombios_encoder_get_backlight_level_from_reg(adev); 209 210 dig = amdgpu_encoder->enc_priv; 211 dig->bl_dev = bd; 212 213 bd->props.brightness = amdgpu_atombios_encoder_get_backlight_brightness(bd); 214 bd->props.power = FB_BLANK_UNBLANK; 215 backlight_update_status(bd); 216 217 DRM_INFO("amdgpu atom DIG backlight initialized\n"); 218 219 return; 220 221 error: 222 kfree(pdata); 223 return; 224 } 225 226 void 227 amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *amdgpu_encoder) 228 { 229 struct drm_device *dev = amdgpu_encoder->base.dev; 230 struct amdgpu_device *adev = dev->dev_private; 231 struct backlight_device *bd = NULL; 232 struct amdgpu_encoder_atom_dig *dig; 233 234 if (!amdgpu_encoder->enc_priv) 235 return; 236 237 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU)) 238 return; 239 240 dig = amdgpu_encoder->enc_priv; 241 bd = dig->bl_dev; 242 dig->bl_dev = NULL; 243 244 if (bd) { 245 struct amdgpu_legacy_backlight_privdata *pdata; 246 247 pdata = bl_get_data(bd); 248 backlight_device_unregister(bd); 249 kfree(pdata); 250 251 DRM_INFO("amdgpu atom LVDS backlight unloaded\n"); 252 } 253 } 254 255 #else /* !CONFIG_BACKLIGHT_CLASS_DEVICE */ 256 257 void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *encoder) 258 { 259 } 260 261 void amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *encoder) 262 { 263 } 264 265 #endif 266 267 bool amdgpu_atombios_encoder_is_digital(struct drm_encoder *encoder) 268 { 269 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 270 switch (amdgpu_encoder->encoder_id) { 271 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 272 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 273 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 274 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 275 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 276 return true; 277 default: 278 return false; 279 } 280 } 281 282 bool amdgpu_atombios_encoder_mode_fixup(struct drm_encoder *encoder, 283 const struct drm_display_mode *mode, 284 struct drm_display_mode *adjusted_mode) 285 { 286 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 287 288 /* set the active encoder to connector routing */ 289 amdgpu_encoder_set_active_device(encoder); 290 drm_mode_set_crtcinfo(adjusted_mode, 0); 291 292 /* hw bug */ 293 if ((mode->flags & DRM_MODE_FLAG_INTERLACE) 294 && (mode->crtc_vsync_start < (mode->crtc_vdisplay + 2))) 295 adjusted_mode->crtc_vsync_start = adjusted_mode->crtc_vdisplay + 2; 296 297 /* vertical FP must be at least 1 */ 298 if (mode->crtc_vsync_start == mode->crtc_vdisplay) 299 adjusted_mode->crtc_vsync_start++; 300 301 /* get the native mode for scaling */ 302 if (amdgpu_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT)) 303 amdgpu_panel_mode_fixup(encoder, adjusted_mode); 304 else if (amdgpu_encoder->rmx_type != RMX_OFF) 305 amdgpu_panel_mode_fixup(encoder, adjusted_mode); 306 307 if ((amdgpu_encoder->active_device & (ATOM_DEVICE_DFP_SUPPORT | ATOM_DEVICE_LCD_SUPPORT)) || 308 (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) { 309 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 310 amdgpu_atombios_dp_set_link_config(connector, adjusted_mode); 311 } 312 313 return true; 314 } 315 316 static void 317 amdgpu_atombios_encoder_setup_dac(struct drm_encoder *encoder, int action) 318 { 319 struct drm_device *dev = encoder->dev; 320 struct amdgpu_device *adev = dev->dev_private; 321 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 322 DAC_ENCODER_CONTROL_PS_ALLOCATION args; 323 int index = 0; 324 325 memset(&args, 0, sizeof(args)); 326 327 switch (amdgpu_encoder->encoder_id) { 328 case ENCODER_OBJECT_ID_INTERNAL_DAC1: 329 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 330 index = GetIndexIntoMasterTable(COMMAND, DAC1EncoderControl); 331 break; 332 case ENCODER_OBJECT_ID_INTERNAL_DAC2: 333 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2: 334 index = GetIndexIntoMasterTable(COMMAND, DAC2EncoderControl); 335 break; 336 } 337 338 args.ucAction = action; 339 args.ucDacStandard = ATOM_DAC1_PS2; 340 args.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 341 342 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 343 344 } 345 346 static u8 amdgpu_atombios_encoder_get_bpc(struct drm_encoder *encoder) 347 { 348 int bpc = 8; 349 350 if (encoder->crtc) { 351 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc); 352 bpc = amdgpu_crtc->bpc; 353 } 354 355 switch (bpc) { 356 case 0: 357 return PANEL_BPC_UNDEFINE; 358 case 6: 359 return PANEL_6BIT_PER_COLOR; 360 case 8: 361 default: 362 return PANEL_8BIT_PER_COLOR; 363 case 10: 364 return PANEL_10BIT_PER_COLOR; 365 case 12: 366 return PANEL_12BIT_PER_COLOR; 367 case 16: 368 return PANEL_16BIT_PER_COLOR; 369 } 370 } 371 372 union dvo_encoder_control { 373 ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION ext_tmds; 374 DVO_ENCODER_CONTROL_PS_ALLOCATION dvo; 375 DVO_ENCODER_CONTROL_PS_ALLOCATION_V3 dvo_v3; 376 DVO_ENCODER_CONTROL_PS_ALLOCATION_V1_4 dvo_v4; 377 }; 378 379 static void 380 amdgpu_atombios_encoder_setup_dvo(struct drm_encoder *encoder, int action) 381 { 382 struct drm_device *dev = encoder->dev; 383 struct amdgpu_device *adev = dev->dev_private; 384 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 385 union dvo_encoder_control args; 386 int index = GetIndexIntoMasterTable(COMMAND, DVOEncoderControl); 387 uint8_t frev, crev; 388 389 memset(&args, 0, sizeof(args)); 390 391 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 392 return; 393 394 switch (frev) { 395 case 1: 396 switch (crev) { 397 case 1: 398 /* R4xx, R5xx */ 399 args.ext_tmds.sXTmdsEncoder.ucEnable = action; 400 401 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 402 args.ext_tmds.sXTmdsEncoder.ucMisc |= PANEL_ENCODER_MISC_DUAL; 403 404 args.ext_tmds.sXTmdsEncoder.ucMisc |= ATOM_PANEL_MISC_888RGB; 405 break; 406 case 2: 407 /* RS600/690/740 */ 408 args.dvo.sDVOEncoder.ucAction = action; 409 args.dvo.sDVOEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 410 /* DFP1, CRT1, TV1 depending on the type of port */ 411 args.dvo.sDVOEncoder.ucDeviceType = ATOM_DEVICE_DFP1_INDEX; 412 413 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 414 args.dvo.sDVOEncoder.usDevAttr.sDigAttrib.ucAttribute |= PANEL_ENCODER_MISC_DUAL; 415 break; 416 case 3: 417 /* R6xx */ 418 args.dvo_v3.ucAction = action; 419 args.dvo_v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 420 args.dvo_v3.ucDVOConfig = 0; /* XXX */ 421 break; 422 case 4: 423 /* DCE8 */ 424 args.dvo_v4.ucAction = action; 425 args.dvo_v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 426 args.dvo_v4.ucDVOConfig = 0; /* XXX */ 427 args.dvo_v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder); 428 break; 429 default: 430 DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 431 break; 432 } 433 break; 434 default: 435 DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 436 break; 437 } 438 439 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 440 } 441 442 int amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder *encoder) 443 { 444 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 445 struct drm_connector *connector; 446 struct amdgpu_connector *amdgpu_connector; 447 struct amdgpu_connector_atom_dig *dig_connector; 448 449 /* dp bridges are always DP */ 450 if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) 451 return ATOM_ENCODER_MODE_DP; 452 453 /* DVO is always DVO */ 454 if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DVO1) || 455 (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1)) 456 return ATOM_ENCODER_MODE_DVO; 457 458 connector = amdgpu_get_connector_for_encoder(encoder); 459 /* if we don't have an active device yet, just use one of 460 * the connectors tied to the encoder. 461 */ 462 if (!connector) 463 connector = amdgpu_get_connector_for_encoder_init(encoder); 464 amdgpu_connector = to_amdgpu_connector(connector); 465 466 switch (connector->connector_type) { 467 case DRM_MODE_CONNECTOR_DVII: 468 case DRM_MODE_CONNECTOR_HDMIB: /* HDMI-B is basically DL-DVI; analog works fine */ 469 if (amdgpu_audio != 0) { 470 if (amdgpu_connector->use_digital && 471 (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)) 472 return ATOM_ENCODER_MODE_HDMI; 473 else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) && 474 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO)) 475 return ATOM_ENCODER_MODE_HDMI; 476 else if (amdgpu_connector->use_digital) 477 return ATOM_ENCODER_MODE_DVI; 478 else 479 return ATOM_ENCODER_MODE_CRT; 480 } else if (amdgpu_connector->use_digital) { 481 return ATOM_ENCODER_MODE_DVI; 482 } else { 483 return ATOM_ENCODER_MODE_CRT; 484 } 485 break; 486 case DRM_MODE_CONNECTOR_DVID: 487 case DRM_MODE_CONNECTOR_HDMIA: 488 default: 489 if (amdgpu_audio != 0) { 490 if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE) 491 return ATOM_ENCODER_MODE_HDMI; 492 else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) && 493 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO)) 494 return ATOM_ENCODER_MODE_HDMI; 495 else 496 return ATOM_ENCODER_MODE_DVI; 497 } else { 498 return ATOM_ENCODER_MODE_DVI; 499 } 500 break; 501 case DRM_MODE_CONNECTOR_LVDS: 502 return ATOM_ENCODER_MODE_LVDS; 503 break; 504 case DRM_MODE_CONNECTOR_DisplayPort: 505 dig_connector = amdgpu_connector->con_priv; 506 if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) || 507 (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) { 508 return ATOM_ENCODER_MODE_DP; 509 } else if (amdgpu_audio != 0) { 510 if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE) 511 return ATOM_ENCODER_MODE_HDMI; 512 else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) && 513 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO)) 514 return ATOM_ENCODER_MODE_HDMI; 515 else 516 return ATOM_ENCODER_MODE_DVI; 517 } else { 518 return ATOM_ENCODER_MODE_DVI; 519 } 520 break; 521 case DRM_MODE_CONNECTOR_eDP: 522 return ATOM_ENCODER_MODE_DP; 523 case DRM_MODE_CONNECTOR_DVIA: 524 case DRM_MODE_CONNECTOR_VGA: 525 return ATOM_ENCODER_MODE_CRT; 526 break; 527 case DRM_MODE_CONNECTOR_Composite: 528 case DRM_MODE_CONNECTOR_SVIDEO: 529 case DRM_MODE_CONNECTOR_9PinDIN: 530 /* fix me */ 531 return ATOM_ENCODER_MODE_TV; 532 /*return ATOM_ENCODER_MODE_CV;*/ 533 break; 534 } 535 } 536 537 /* 538 * DIG Encoder/Transmitter Setup 539 * 540 * DCE 6.0 541 * - 3 DIG transmitter blocks UNIPHY0/1/2 (links A and B). 542 * Supports up to 6 digital outputs 543 * - 6 DIG encoder blocks. 544 * - DIG to PHY mapping is hardcoded 545 * DIG1 drives UNIPHY0 link A, A+B 546 * DIG2 drives UNIPHY0 link B 547 * DIG3 drives UNIPHY1 link A, A+B 548 * DIG4 drives UNIPHY1 link B 549 * DIG5 drives UNIPHY2 link A, A+B 550 * DIG6 drives UNIPHY2 link B 551 * 552 * Routing 553 * crtc -> dig encoder -> UNIPHY/LVTMA (1 or 2 links) 554 * Examples: 555 * crtc0 -> dig2 -> LVTMA links A+B -> TMDS/HDMI 556 * crtc1 -> dig1 -> UNIPHY0 link B -> DP 557 * crtc0 -> dig1 -> UNIPHY2 link A -> LVDS 558 * crtc1 -> dig2 -> UNIPHY1 link B+A -> TMDS/HDMI 559 */ 560 561 union dig_encoder_control { 562 DIG_ENCODER_CONTROL_PS_ALLOCATION v1; 563 DIG_ENCODER_CONTROL_PARAMETERS_V2 v2; 564 DIG_ENCODER_CONTROL_PARAMETERS_V3 v3; 565 DIG_ENCODER_CONTROL_PARAMETERS_V4 v4; 566 DIG_ENCODER_CONTROL_PARAMETERS_V5 v5; 567 }; 568 569 void 570 amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder *encoder, 571 int action, int panel_mode) 572 { 573 struct drm_device *dev = encoder->dev; 574 struct amdgpu_device *adev = dev->dev_private; 575 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 576 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; 577 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 578 union dig_encoder_control args; 579 int index = GetIndexIntoMasterTable(COMMAND, DIGxEncoderControl); 580 uint8_t frev, crev; 581 int dp_clock = 0; 582 int dp_lane_count = 0; 583 int hpd_id = AMDGPU_HPD_NONE; 584 585 if (connector) { 586 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 587 struct amdgpu_connector_atom_dig *dig_connector = 588 amdgpu_connector->con_priv; 589 590 dp_clock = dig_connector->dp_clock; 591 dp_lane_count = dig_connector->dp_lane_count; 592 hpd_id = amdgpu_connector->hpd.hpd; 593 } 594 595 /* no dig encoder assigned */ 596 if (dig->dig_encoder == -1) 597 return; 598 599 memset(&args, 0, sizeof(args)); 600 601 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 602 return; 603 604 switch (frev) { 605 case 1: 606 switch (crev) { 607 case 1: 608 args.v1.ucAction = action; 609 args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 610 if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE) 611 args.v3.ucPanelMode = panel_mode; 612 else 613 args.v1.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 614 615 if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode)) 616 args.v1.ucLaneNum = dp_lane_count; 617 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 618 args.v1.ucLaneNum = 8; 619 else 620 args.v1.ucLaneNum = 4; 621 622 if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode) && (dp_clock == 270000)) 623 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ; 624 switch (amdgpu_encoder->encoder_id) { 625 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 626 args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER1; 627 break; 628 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 629 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 630 args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER2; 631 break; 632 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 633 args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER3; 634 break; 635 } 636 if (dig->linkb) 637 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKB; 638 else 639 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKA; 640 break; 641 case 2: 642 case 3: 643 args.v3.ucAction = action; 644 args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 645 if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE) 646 args.v3.ucPanelMode = panel_mode; 647 else 648 args.v3.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 649 650 if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode)) 651 args.v3.ucLaneNum = dp_lane_count; 652 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 653 args.v3.ucLaneNum = 8; 654 else 655 args.v3.ucLaneNum = 4; 656 657 if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode) && (dp_clock == 270000)) 658 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ; 659 args.v3.acConfig.ucDigSel = dig->dig_encoder; 660 args.v3.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder); 661 break; 662 case 4: 663 args.v4.ucAction = action; 664 args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 665 if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE) 666 args.v4.ucPanelMode = panel_mode; 667 else 668 args.v4.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 669 670 if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) 671 args.v4.ucLaneNum = dp_lane_count; 672 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 673 args.v4.ucLaneNum = 8; 674 else 675 args.v4.ucLaneNum = 4; 676 677 if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) { 678 if (dp_clock == 540000) 679 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_5_40GHZ; 680 else if (dp_clock == 324000) 681 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_3_24GHZ; 682 else if (dp_clock == 270000) 683 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_2_70GHZ; 684 else 685 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_1_62GHZ; 686 } 687 args.v4.acConfig.ucDigSel = dig->dig_encoder; 688 args.v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder); 689 if (hpd_id == AMDGPU_HPD_NONE) 690 args.v4.ucHPD_ID = 0; 691 else 692 args.v4.ucHPD_ID = hpd_id + 1; 693 break; 694 case 5: 695 switch (action) { 696 case ATOM_ENCODER_CMD_SETUP_PANEL_MODE: 697 args.v5.asDPPanelModeParam.ucAction = action; 698 args.v5.asDPPanelModeParam.ucPanelMode = panel_mode; 699 args.v5.asDPPanelModeParam.ucDigId = dig->dig_encoder; 700 break; 701 case ATOM_ENCODER_CMD_STREAM_SETUP: 702 args.v5.asStreamParam.ucAction = action; 703 args.v5.asStreamParam.ucDigId = dig->dig_encoder; 704 args.v5.asStreamParam.ucDigMode = 705 amdgpu_atombios_encoder_get_encoder_mode(encoder); 706 if (ENCODER_MODE_IS_DP(args.v5.asStreamParam.ucDigMode)) 707 args.v5.asStreamParam.ucLaneNum = dp_lane_count; 708 else if (amdgpu_dig_monitor_is_duallink(encoder, 709 amdgpu_encoder->pixel_clock)) 710 args.v5.asStreamParam.ucLaneNum = 8; 711 else 712 args.v5.asStreamParam.ucLaneNum = 4; 713 args.v5.asStreamParam.ulPixelClock = 714 cpu_to_le32(amdgpu_encoder->pixel_clock / 10); 715 args.v5.asStreamParam.ucBitPerColor = 716 amdgpu_atombios_encoder_get_bpc(encoder); 717 args.v5.asStreamParam.ucLinkRateIn270Mhz = dp_clock / 27000; 718 break; 719 case ATOM_ENCODER_CMD_DP_LINK_TRAINING_START: 720 case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1: 721 case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2: 722 case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3: 723 case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN4: 724 case ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE: 725 case ATOM_ENCODER_CMD_DP_VIDEO_OFF: 726 case ATOM_ENCODER_CMD_DP_VIDEO_ON: 727 args.v5.asCmdParam.ucAction = action; 728 args.v5.asCmdParam.ucDigId = dig->dig_encoder; 729 break; 730 default: 731 DRM_ERROR("Unsupported action 0x%x\n", action); 732 break; 733 } 734 break; 735 default: 736 DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 737 break; 738 } 739 break; 740 default: 741 DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 742 break; 743 } 744 745 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 746 747 } 748 749 union dig_transmitter_control { 750 DIG_TRANSMITTER_CONTROL_PS_ALLOCATION v1; 751 DIG_TRANSMITTER_CONTROL_PARAMETERS_V2 v2; 752 DIG_TRANSMITTER_CONTROL_PARAMETERS_V3 v3; 753 DIG_TRANSMITTER_CONTROL_PARAMETERS_V4 v4; 754 DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_5 v5; 755 DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_6 v6; 756 }; 757 758 void 759 amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder *encoder, int action, 760 uint8_t lane_num, uint8_t lane_set) 761 { 762 struct drm_device *dev = encoder->dev; 763 struct amdgpu_device *adev = dev->dev_private; 764 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 765 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; 766 struct drm_connector *connector; 767 union dig_transmitter_control args; 768 int index = 0; 769 uint8_t frev, crev; 770 bool is_dp = false; 771 int pll_id = 0; 772 int dp_clock = 0; 773 int dp_lane_count = 0; 774 int connector_object_id = 0; 775 int igp_lane_info = 0; 776 int dig_encoder = dig->dig_encoder; 777 int hpd_id = AMDGPU_HPD_NONE; 778 779 if (action == ATOM_TRANSMITTER_ACTION_INIT) { 780 connector = amdgpu_get_connector_for_encoder_init(encoder); 781 /* just needed to avoid bailing in the encoder check. the encoder 782 * isn't used for init 783 */ 784 dig_encoder = 0; 785 } else 786 connector = amdgpu_get_connector_for_encoder(encoder); 787 788 if (connector) { 789 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 790 struct amdgpu_connector_atom_dig *dig_connector = 791 amdgpu_connector->con_priv; 792 793 hpd_id = amdgpu_connector->hpd.hpd; 794 dp_clock = dig_connector->dp_clock; 795 dp_lane_count = dig_connector->dp_lane_count; 796 connector_object_id = 797 (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT; 798 } 799 800 if (encoder->crtc) { 801 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc); 802 pll_id = amdgpu_crtc->pll_id; 803 } 804 805 /* no dig encoder assigned */ 806 if (dig_encoder == -1) 807 return; 808 809 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder))) 810 is_dp = true; 811 812 memset(&args, 0, sizeof(args)); 813 814 switch (amdgpu_encoder->encoder_id) { 815 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 816 index = GetIndexIntoMasterTable(COMMAND, DVOOutputControl); 817 break; 818 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 819 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 820 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 821 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 822 index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl); 823 break; 824 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 825 index = GetIndexIntoMasterTable(COMMAND, LVTMATransmitterControl); 826 break; 827 } 828 829 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 830 return; 831 832 switch (frev) { 833 case 1: 834 switch (crev) { 835 case 1: 836 args.v1.ucAction = action; 837 if (action == ATOM_TRANSMITTER_ACTION_INIT) { 838 args.v1.usInitInfo = cpu_to_le16(connector_object_id); 839 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) { 840 args.v1.asMode.ucLaneSel = lane_num; 841 args.v1.asMode.ucLaneSet = lane_set; 842 } else { 843 if (is_dp) 844 args.v1.usPixelClock = cpu_to_le16(dp_clock / 10); 845 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 846 args.v1.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10); 847 else 848 args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 849 } 850 851 args.v1.ucConfig = ATOM_TRANSMITTER_CONFIG_CLKSRC_PPLL; 852 853 if (dig_encoder) 854 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG2_ENCODER; 855 else 856 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG1_ENCODER; 857 858 if ((adev->flags & AMD_IS_APU) && 859 (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_UNIPHY)) { 860 if (is_dp || 861 !amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) { 862 if (igp_lane_info & 0x1) 863 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_3; 864 else if (igp_lane_info & 0x2) 865 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_4_7; 866 else if (igp_lane_info & 0x4) 867 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_11; 868 else if (igp_lane_info & 0x8) 869 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_12_15; 870 } else { 871 if (igp_lane_info & 0x3) 872 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_7; 873 else if (igp_lane_info & 0xc) 874 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_15; 875 } 876 } 877 878 if (dig->linkb) 879 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKB; 880 else 881 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKA; 882 883 if (is_dp) 884 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT; 885 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 886 if (dig->coherent_mode) 887 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT; 888 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 889 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_8LANE_LINK; 890 } 891 break; 892 case 2: 893 args.v2.ucAction = action; 894 if (action == ATOM_TRANSMITTER_ACTION_INIT) { 895 args.v2.usInitInfo = cpu_to_le16(connector_object_id); 896 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) { 897 args.v2.asMode.ucLaneSel = lane_num; 898 args.v2.asMode.ucLaneSet = lane_set; 899 } else { 900 if (is_dp) 901 args.v2.usPixelClock = cpu_to_le16(dp_clock / 10); 902 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 903 args.v2.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10); 904 else 905 args.v2.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 906 } 907 908 args.v2.acConfig.ucEncoderSel = dig_encoder; 909 if (dig->linkb) 910 args.v2.acConfig.ucLinkSel = 1; 911 912 switch (amdgpu_encoder->encoder_id) { 913 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 914 args.v2.acConfig.ucTransmitterSel = 0; 915 break; 916 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 917 args.v2.acConfig.ucTransmitterSel = 1; 918 break; 919 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 920 args.v2.acConfig.ucTransmitterSel = 2; 921 break; 922 } 923 924 if (is_dp) { 925 args.v2.acConfig.fCoherentMode = 1; 926 args.v2.acConfig.fDPConnector = 1; 927 } else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 928 if (dig->coherent_mode) 929 args.v2.acConfig.fCoherentMode = 1; 930 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 931 args.v2.acConfig.fDualLinkConnector = 1; 932 } 933 break; 934 case 3: 935 args.v3.ucAction = action; 936 if (action == ATOM_TRANSMITTER_ACTION_INIT) { 937 args.v3.usInitInfo = cpu_to_le16(connector_object_id); 938 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) { 939 args.v3.asMode.ucLaneSel = lane_num; 940 args.v3.asMode.ucLaneSet = lane_set; 941 } else { 942 if (is_dp) 943 args.v3.usPixelClock = cpu_to_le16(dp_clock / 10); 944 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 945 args.v3.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10); 946 else 947 args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 948 } 949 950 if (is_dp) 951 args.v3.ucLaneNum = dp_lane_count; 952 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 953 args.v3.ucLaneNum = 8; 954 else 955 args.v3.ucLaneNum = 4; 956 957 if (dig->linkb) 958 args.v3.acConfig.ucLinkSel = 1; 959 if (dig_encoder & 1) 960 args.v3.acConfig.ucEncoderSel = 1; 961 962 /* Select the PLL for the PHY 963 * DP PHY should be clocked from external src if there is 964 * one. 965 */ 966 /* On DCE4, if there is an external clock, it generates the DP ref clock */ 967 if (is_dp && adev->clock.dp_extclk) 968 args.v3.acConfig.ucRefClkSource = 2; /* external src */ 969 else 970 args.v3.acConfig.ucRefClkSource = pll_id; 971 972 switch (amdgpu_encoder->encoder_id) { 973 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 974 args.v3.acConfig.ucTransmitterSel = 0; 975 break; 976 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 977 args.v3.acConfig.ucTransmitterSel = 1; 978 break; 979 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 980 args.v3.acConfig.ucTransmitterSel = 2; 981 break; 982 } 983 984 if (is_dp) 985 args.v3.acConfig.fCoherentMode = 1; /* DP requires coherent */ 986 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 987 if (dig->coherent_mode) 988 args.v3.acConfig.fCoherentMode = 1; 989 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 990 args.v3.acConfig.fDualLinkConnector = 1; 991 } 992 break; 993 case 4: 994 args.v4.ucAction = action; 995 if (action == ATOM_TRANSMITTER_ACTION_INIT) { 996 args.v4.usInitInfo = cpu_to_le16(connector_object_id); 997 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) { 998 args.v4.asMode.ucLaneSel = lane_num; 999 args.v4.asMode.ucLaneSet = lane_set; 1000 } else { 1001 if (is_dp) 1002 args.v4.usPixelClock = cpu_to_le16(dp_clock / 10); 1003 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1004 args.v4.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10); 1005 else 1006 args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 1007 } 1008 1009 if (is_dp) 1010 args.v4.ucLaneNum = dp_lane_count; 1011 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1012 args.v4.ucLaneNum = 8; 1013 else 1014 args.v4.ucLaneNum = 4; 1015 1016 if (dig->linkb) 1017 args.v4.acConfig.ucLinkSel = 1; 1018 if (dig_encoder & 1) 1019 args.v4.acConfig.ucEncoderSel = 1; 1020 1021 /* Select the PLL for the PHY 1022 * DP PHY should be clocked from external src if there is 1023 * one. 1024 */ 1025 /* On DCE5 DCPLL usually generates the DP ref clock */ 1026 if (is_dp) { 1027 if (adev->clock.dp_extclk) 1028 args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_EXTCLK; 1029 else 1030 args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_DCPLL; 1031 } else 1032 args.v4.acConfig.ucRefClkSource = pll_id; 1033 1034 switch (amdgpu_encoder->encoder_id) { 1035 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1036 args.v4.acConfig.ucTransmitterSel = 0; 1037 break; 1038 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1039 args.v4.acConfig.ucTransmitterSel = 1; 1040 break; 1041 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1042 args.v4.acConfig.ucTransmitterSel = 2; 1043 break; 1044 } 1045 1046 if (is_dp) 1047 args.v4.acConfig.fCoherentMode = 1; /* DP requires coherent */ 1048 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 1049 if (dig->coherent_mode) 1050 args.v4.acConfig.fCoherentMode = 1; 1051 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1052 args.v4.acConfig.fDualLinkConnector = 1; 1053 } 1054 break; 1055 case 5: 1056 args.v5.ucAction = action; 1057 if (is_dp) 1058 args.v5.usSymClock = cpu_to_le16(dp_clock / 10); 1059 else 1060 args.v5.usSymClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 1061 1062 switch (amdgpu_encoder->encoder_id) { 1063 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1064 if (dig->linkb) 1065 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYB; 1066 else 1067 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYA; 1068 break; 1069 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1070 if (dig->linkb) 1071 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYD; 1072 else 1073 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYC; 1074 break; 1075 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1076 if (dig->linkb) 1077 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYF; 1078 else 1079 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYE; 1080 break; 1081 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1082 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYG; 1083 break; 1084 } 1085 if (is_dp) 1086 args.v5.ucLaneNum = dp_lane_count; 1087 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1088 args.v5.ucLaneNum = 8; 1089 else 1090 args.v5.ucLaneNum = 4; 1091 args.v5.ucConnObjId = connector_object_id; 1092 args.v5.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1093 1094 if (is_dp && adev->clock.dp_extclk) 1095 args.v5.asConfig.ucPhyClkSrcId = ENCODER_REFCLK_SRC_EXTCLK; 1096 else 1097 args.v5.asConfig.ucPhyClkSrcId = pll_id; 1098 1099 if (is_dp) 1100 args.v5.asConfig.ucCoherentMode = 1; /* DP requires coherent */ 1101 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 1102 if (dig->coherent_mode) 1103 args.v5.asConfig.ucCoherentMode = 1; 1104 } 1105 if (hpd_id == AMDGPU_HPD_NONE) 1106 args.v5.asConfig.ucHPDSel = 0; 1107 else 1108 args.v5.asConfig.ucHPDSel = hpd_id + 1; 1109 args.v5.ucDigEncoderSel = 1 << dig_encoder; 1110 args.v5.ucDPLaneSet = lane_set; 1111 break; 1112 case 6: 1113 args.v6.ucAction = action; 1114 if (is_dp) 1115 args.v6.ulSymClock = cpu_to_le32(dp_clock / 10); 1116 else 1117 args.v6.ulSymClock = cpu_to_le32(amdgpu_encoder->pixel_clock / 10); 1118 1119 switch (amdgpu_encoder->encoder_id) { 1120 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1121 if (dig->linkb) 1122 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYB; 1123 else 1124 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYA; 1125 break; 1126 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1127 if (dig->linkb) 1128 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYD; 1129 else 1130 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYC; 1131 break; 1132 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1133 if (dig->linkb) 1134 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYF; 1135 else 1136 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYE; 1137 break; 1138 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1139 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYG; 1140 break; 1141 } 1142 if (is_dp) 1143 args.v6.ucLaneNum = dp_lane_count; 1144 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1145 args.v6.ucLaneNum = 8; 1146 else 1147 args.v6.ucLaneNum = 4; 1148 args.v6.ucConnObjId = connector_object_id; 1149 if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) 1150 args.v6.ucDPLaneSet = lane_set; 1151 else 1152 args.v6.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1153 1154 if (hpd_id == AMDGPU_HPD_NONE) 1155 args.v6.ucHPDSel = 0; 1156 else 1157 args.v6.ucHPDSel = hpd_id + 1; 1158 args.v6.ucDigEncoderSel = 1 << dig_encoder; 1159 break; 1160 default: 1161 DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 1162 break; 1163 } 1164 break; 1165 default: 1166 DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 1167 break; 1168 } 1169 1170 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 1171 } 1172 1173 bool 1174 amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector *connector, 1175 int action) 1176 { 1177 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 1178 struct drm_device *dev = amdgpu_connector->base.dev; 1179 struct amdgpu_device *adev = dev->dev_private; 1180 union dig_transmitter_control args; 1181 int index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl); 1182 uint8_t frev, crev; 1183 1184 if (connector->connector_type != DRM_MODE_CONNECTOR_eDP) 1185 goto done; 1186 1187 if ((action != ATOM_TRANSMITTER_ACTION_POWER_ON) && 1188 (action != ATOM_TRANSMITTER_ACTION_POWER_OFF)) 1189 goto done; 1190 1191 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 1192 goto done; 1193 1194 memset(&args, 0, sizeof(args)); 1195 1196 args.v1.ucAction = action; 1197 1198 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 1199 1200 /* wait for the panel to power up */ 1201 if (action == ATOM_TRANSMITTER_ACTION_POWER_ON) { 1202 int i; 1203 1204 for (i = 0; i < 300; i++) { 1205 if (amdgpu_display_hpd_sense(adev, amdgpu_connector->hpd.hpd)) 1206 return true; 1207 mdelay(1); 1208 } 1209 return false; 1210 } 1211 done: 1212 return true; 1213 } 1214 1215 union external_encoder_control { 1216 EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION v1; 1217 EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION_V3 v3; 1218 }; 1219 1220 static void 1221 amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder *encoder, 1222 struct drm_encoder *ext_encoder, 1223 int action) 1224 { 1225 struct drm_device *dev = encoder->dev; 1226 struct amdgpu_device *adev = dev->dev_private; 1227 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1228 struct amdgpu_encoder *ext_amdgpu_encoder = to_amdgpu_encoder(ext_encoder); 1229 union external_encoder_control args; 1230 struct drm_connector *connector; 1231 int index = GetIndexIntoMasterTable(COMMAND, ExternalEncoderControl); 1232 u8 frev, crev; 1233 int dp_clock = 0; 1234 int dp_lane_count = 0; 1235 int connector_object_id = 0; 1236 u32 ext_enum = (ext_amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT; 1237 1238 if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT) 1239 connector = amdgpu_get_connector_for_encoder_init(encoder); 1240 else 1241 connector = amdgpu_get_connector_for_encoder(encoder); 1242 1243 if (connector) { 1244 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 1245 struct amdgpu_connector_atom_dig *dig_connector = 1246 amdgpu_connector->con_priv; 1247 1248 dp_clock = dig_connector->dp_clock; 1249 dp_lane_count = dig_connector->dp_lane_count; 1250 connector_object_id = 1251 (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT; 1252 } 1253 1254 memset(&args, 0, sizeof(args)); 1255 1256 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 1257 return; 1258 1259 switch (frev) { 1260 case 1: 1261 /* no params on frev 1 */ 1262 break; 1263 case 2: 1264 switch (crev) { 1265 case 1: 1266 case 2: 1267 args.v1.sDigEncoder.ucAction = action; 1268 args.v1.sDigEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 1269 args.v1.sDigEncoder.ucEncoderMode = 1270 amdgpu_atombios_encoder_get_encoder_mode(encoder); 1271 1272 if (ENCODER_MODE_IS_DP(args.v1.sDigEncoder.ucEncoderMode)) { 1273 if (dp_clock == 270000) 1274 args.v1.sDigEncoder.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ; 1275 args.v1.sDigEncoder.ucLaneNum = dp_lane_count; 1276 } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1277 args.v1.sDigEncoder.ucLaneNum = 8; 1278 else 1279 args.v1.sDigEncoder.ucLaneNum = 4; 1280 break; 1281 case 3: 1282 args.v3.sExtEncoder.ucAction = action; 1283 if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT) 1284 args.v3.sExtEncoder.usConnectorId = cpu_to_le16(connector_object_id); 1285 else 1286 args.v3.sExtEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 1287 args.v3.sExtEncoder.ucEncoderMode = 1288 amdgpu_atombios_encoder_get_encoder_mode(encoder); 1289 1290 if (ENCODER_MODE_IS_DP(args.v3.sExtEncoder.ucEncoderMode)) { 1291 if (dp_clock == 270000) 1292 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ; 1293 else if (dp_clock == 540000) 1294 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_5_40GHZ; 1295 args.v3.sExtEncoder.ucLaneNum = dp_lane_count; 1296 } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1297 args.v3.sExtEncoder.ucLaneNum = 8; 1298 else 1299 args.v3.sExtEncoder.ucLaneNum = 4; 1300 switch (ext_enum) { 1301 case GRAPH_OBJECT_ENUM_ID1: 1302 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER1; 1303 break; 1304 case GRAPH_OBJECT_ENUM_ID2: 1305 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER2; 1306 break; 1307 case GRAPH_OBJECT_ENUM_ID3: 1308 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER3; 1309 break; 1310 } 1311 args.v3.sExtEncoder.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder); 1312 break; 1313 default: 1314 DRM_ERROR("Unknown table version: %d, %d\n", frev, crev); 1315 return; 1316 } 1317 break; 1318 default: 1319 DRM_ERROR("Unknown table version: %d, %d\n", frev, crev); 1320 return; 1321 } 1322 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 1323 } 1324 1325 static void 1326 amdgpu_atombios_encoder_setup_dig(struct drm_encoder *encoder, int action) 1327 { 1328 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1329 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder); 1330 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; 1331 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 1332 struct amdgpu_connector *amdgpu_connector = NULL; 1333 struct amdgpu_connector_atom_dig *amdgpu_dig_connector = NULL; 1334 1335 if (connector) { 1336 amdgpu_connector = to_amdgpu_connector(connector); 1337 amdgpu_dig_connector = amdgpu_connector->con_priv; 1338 } 1339 1340 if (action == ATOM_ENABLE) { 1341 if (!connector) 1342 dig->panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE; 1343 else 1344 dig->panel_mode = amdgpu_atombios_dp_get_panel_mode(encoder, connector); 1345 1346 /* setup and enable the encoder */ 1347 amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_SETUP, 0); 1348 amdgpu_atombios_encoder_setup_dig_encoder(encoder, 1349 ATOM_ENCODER_CMD_SETUP_PANEL_MODE, 1350 dig->panel_mode); 1351 if (ext_encoder) 1352 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, 1353 EXTERNAL_ENCODER_ACTION_V3_ENCODER_SETUP); 1354 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) && 1355 connector) { 1356 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) { 1357 amdgpu_atombios_encoder_set_edp_panel_power(connector, 1358 ATOM_TRANSMITTER_ACTION_POWER_ON); 1359 amdgpu_dig_connector->edp_on = true; 1360 } 1361 } 1362 /* enable the transmitter */ 1363 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 1364 ATOM_TRANSMITTER_ACTION_ENABLE, 1365 0, 0); 1366 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) && 1367 connector) { 1368 /* DP_SET_POWER_D0 is set in amdgpu_atombios_dp_link_train */ 1369 amdgpu_atombios_dp_link_train(encoder, connector); 1370 amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON, 0); 1371 } 1372 if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) 1373 amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, dig->backlight_level); 1374 if (ext_encoder) 1375 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_ENABLE); 1376 } else { 1377 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) && 1378 connector) 1379 amdgpu_atombios_encoder_setup_dig_encoder(encoder, 1380 ATOM_ENCODER_CMD_DP_VIDEO_OFF, 0); 1381 if (ext_encoder) 1382 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_DISABLE); 1383 if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) 1384 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 1385 ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0); 1386 1387 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) && 1388 connector) 1389 amdgpu_atombios_dp_set_rx_power_state(connector, DP_SET_POWER_D3); 1390 /* disable the transmitter */ 1391 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 1392 ATOM_TRANSMITTER_ACTION_DISABLE, 0, 0); 1393 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) && 1394 connector) { 1395 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) { 1396 amdgpu_atombios_encoder_set_edp_panel_power(connector, 1397 ATOM_TRANSMITTER_ACTION_POWER_OFF); 1398 amdgpu_dig_connector->edp_on = false; 1399 } 1400 } 1401 } 1402 } 1403 1404 void 1405 amdgpu_atombios_encoder_dpms(struct drm_encoder *encoder, int mode) 1406 { 1407 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1408 1409 DRM_DEBUG_KMS("encoder dpms %d to mode %d, devices %08x, active_devices %08x\n", 1410 amdgpu_encoder->encoder_id, mode, amdgpu_encoder->devices, 1411 amdgpu_encoder->active_device); 1412 switch (amdgpu_encoder->encoder_id) { 1413 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1414 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1415 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1416 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1417 switch (mode) { 1418 case DRM_MODE_DPMS_ON: 1419 amdgpu_atombios_encoder_setup_dig(encoder, ATOM_ENABLE); 1420 break; 1421 case DRM_MODE_DPMS_STANDBY: 1422 case DRM_MODE_DPMS_SUSPEND: 1423 case DRM_MODE_DPMS_OFF: 1424 amdgpu_atombios_encoder_setup_dig(encoder, ATOM_DISABLE); 1425 break; 1426 } 1427 break; 1428 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 1429 switch (mode) { 1430 case DRM_MODE_DPMS_ON: 1431 amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_ENABLE); 1432 break; 1433 case DRM_MODE_DPMS_STANDBY: 1434 case DRM_MODE_DPMS_SUSPEND: 1435 case DRM_MODE_DPMS_OFF: 1436 amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_DISABLE); 1437 break; 1438 } 1439 break; 1440 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 1441 switch (mode) { 1442 case DRM_MODE_DPMS_ON: 1443 amdgpu_atombios_encoder_setup_dac(encoder, ATOM_ENABLE); 1444 break; 1445 case DRM_MODE_DPMS_STANDBY: 1446 case DRM_MODE_DPMS_SUSPEND: 1447 case DRM_MODE_DPMS_OFF: 1448 amdgpu_atombios_encoder_setup_dac(encoder, ATOM_DISABLE); 1449 break; 1450 } 1451 break; 1452 default: 1453 return; 1454 } 1455 } 1456 1457 union crtc_source_param { 1458 SELECT_CRTC_SOURCE_PS_ALLOCATION v1; 1459 SELECT_CRTC_SOURCE_PARAMETERS_V2 v2; 1460 SELECT_CRTC_SOURCE_PARAMETERS_V3 v3; 1461 }; 1462 1463 void 1464 amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder *encoder) 1465 { 1466 struct drm_device *dev = encoder->dev; 1467 struct amdgpu_device *adev = dev->dev_private; 1468 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1469 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc); 1470 union crtc_source_param args; 1471 int index = GetIndexIntoMasterTable(COMMAND, SelectCRTC_Source); 1472 uint8_t frev, crev; 1473 struct amdgpu_encoder_atom_dig *dig; 1474 1475 memset(&args, 0, sizeof(args)); 1476 1477 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 1478 return; 1479 1480 switch (frev) { 1481 case 1: 1482 switch (crev) { 1483 case 1: 1484 default: 1485 args.v1.ucCRTC = amdgpu_crtc->crtc_id; 1486 switch (amdgpu_encoder->encoder_id) { 1487 case ENCODER_OBJECT_ID_INTERNAL_TMDS1: 1488 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1: 1489 args.v1.ucDevice = ATOM_DEVICE_DFP1_INDEX; 1490 break; 1491 case ENCODER_OBJECT_ID_INTERNAL_LVDS: 1492 case ENCODER_OBJECT_ID_INTERNAL_LVTM1: 1493 if (amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) 1494 args.v1.ucDevice = ATOM_DEVICE_LCD1_INDEX; 1495 else 1496 args.v1.ucDevice = ATOM_DEVICE_DFP3_INDEX; 1497 break; 1498 case ENCODER_OBJECT_ID_INTERNAL_DVO1: 1499 case ENCODER_OBJECT_ID_INTERNAL_DDI: 1500 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 1501 args.v1.ucDevice = ATOM_DEVICE_DFP2_INDEX; 1502 break; 1503 case ENCODER_OBJECT_ID_INTERNAL_DAC1: 1504 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 1505 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1506 args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX; 1507 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1508 args.v1.ucDevice = ATOM_DEVICE_CV_INDEX; 1509 else 1510 args.v1.ucDevice = ATOM_DEVICE_CRT1_INDEX; 1511 break; 1512 case ENCODER_OBJECT_ID_INTERNAL_DAC2: 1513 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2: 1514 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1515 args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX; 1516 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1517 args.v1.ucDevice = ATOM_DEVICE_CV_INDEX; 1518 else 1519 args.v1.ucDevice = ATOM_DEVICE_CRT2_INDEX; 1520 break; 1521 } 1522 break; 1523 case 2: 1524 args.v2.ucCRTC = amdgpu_crtc->crtc_id; 1525 if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) { 1526 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 1527 1528 if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS) 1529 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS; 1530 else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA) 1531 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT; 1532 else 1533 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1534 } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) { 1535 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS; 1536 } else { 1537 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1538 } 1539 switch (amdgpu_encoder->encoder_id) { 1540 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1541 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1542 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1543 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1544 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 1545 dig = amdgpu_encoder->enc_priv; 1546 switch (dig->dig_encoder) { 1547 case 0: 1548 args.v2.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID; 1549 break; 1550 case 1: 1551 args.v2.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID; 1552 break; 1553 case 2: 1554 args.v2.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID; 1555 break; 1556 case 3: 1557 args.v2.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID; 1558 break; 1559 case 4: 1560 args.v2.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID; 1561 break; 1562 case 5: 1563 args.v2.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID; 1564 break; 1565 case 6: 1566 args.v2.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID; 1567 break; 1568 } 1569 break; 1570 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 1571 args.v2.ucEncoderID = ASIC_INT_DVO_ENCODER_ID; 1572 break; 1573 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 1574 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1575 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1576 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1577 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1578 else 1579 args.v2.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID; 1580 break; 1581 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2: 1582 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1583 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1584 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1585 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1586 else 1587 args.v2.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID; 1588 break; 1589 } 1590 break; 1591 case 3: 1592 args.v3.ucCRTC = amdgpu_crtc->crtc_id; 1593 if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) { 1594 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 1595 1596 if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS) 1597 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS; 1598 else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA) 1599 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT; 1600 else 1601 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1602 } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) { 1603 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS; 1604 } else { 1605 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1606 } 1607 args.v3.ucDstBpc = amdgpu_atombios_encoder_get_bpc(encoder); 1608 switch (amdgpu_encoder->encoder_id) { 1609 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1610 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1611 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1612 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1613 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 1614 dig = amdgpu_encoder->enc_priv; 1615 switch (dig->dig_encoder) { 1616 case 0: 1617 args.v3.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID; 1618 break; 1619 case 1: 1620 args.v3.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID; 1621 break; 1622 case 2: 1623 args.v3.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID; 1624 break; 1625 case 3: 1626 args.v3.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID; 1627 break; 1628 case 4: 1629 args.v3.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID; 1630 break; 1631 case 5: 1632 args.v3.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID; 1633 break; 1634 case 6: 1635 args.v3.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID; 1636 break; 1637 } 1638 break; 1639 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 1640 args.v3.ucEncoderID = ASIC_INT_DVO_ENCODER_ID; 1641 break; 1642 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 1643 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1644 args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1645 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1646 args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1647 else 1648 args.v3.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID; 1649 break; 1650 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2: 1651 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1652 args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1653 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1654 args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1655 else 1656 args.v3.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID; 1657 break; 1658 } 1659 break; 1660 } 1661 break; 1662 default: 1663 DRM_ERROR("Unknown table version: %d, %d\n", frev, crev); 1664 return; 1665 } 1666 1667 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 1668 } 1669 1670 /* This only needs to be called once at startup */ 1671 void 1672 amdgpu_atombios_encoder_init_dig(struct amdgpu_device *adev) 1673 { 1674 struct drm_device *dev = adev->ddev; 1675 struct drm_encoder *encoder; 1676 1677 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { 1678 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1679 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder); 1680 1681 switch (amdgpu_encoder->encoder_id) { 1682 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1683 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1684 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1685 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1686 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, ATOM_TRANSMITTER_ACTION_INIT, 1687 0, 0); 1688 break; 1689 } 1690 1691 if (ext_encoder) 1692 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, 1693 EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT); 1694 } 1695 } 1696 1697 static bool 1698 amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder *encoder, 1699 struct drm_connector *connector) 1700 { 1701 struct drm_device *dev = encoder->dev; 1702 struct amdgpu_device *adev = dev->dev_private; 1703 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1704 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 1705 1706 if (amdgpu_encoder->devices & (ATOM_DEVICE_TV_SUPPORT | 1707 ATOM_DEVICE_CV_SUPPORT | 1708 ATOM_DEVICE_CRT_SUPPORT)) { 1709 DAC_LOAD_DETECTION_PS_ALLOCATION args; 1710 int index = GetIndexIntoMasterTable(COMMAND, DAC_LoadDetection); 1711 uint8_t frev, crev; 1712 1713 memset(&args, 0, sizeof(args)); 1714 1715 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 1716 return false; 1717 1718 args.sDacload.ucMisc = 0; 1719 1720 if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DAC1) || 1721 (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1)) 1722 args.sDacload.ucDacType = ATOM_DAC_A; 1723 else 1724 args.sDacload.ucDacType = ATOM_DAC_B; 1725 1726 if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) 1727 args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT1_SUPPORT); 1728 else if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) 1729 args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT2_SUPPORT); 1730 else if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) { 1731 args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CV_SUPPORT); 1732 if (crev >= 3) 1733 args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb; 1734 } else if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) { 1735 args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_TV1_SUPPORT); 1736 if (crev >= 3) 1737 args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb; 1738 } 1739 1740 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 1741 1742 return true; 1743 } else 1744 return false; 1745 } 1746 1747 enum drm_connector_status 1748 amdgpu_atombios_encoder_dac_detect(struct drm_encoder *encoder, 1749 struct drm_connector *connector) 1750 { 1751 struct drm_device *dev = encoder->dev; 1752 struct amdgpu_device *adev = dev->dev_private; 1753 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1754 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 1755 uint32_t bios_0_scratch; 1756 1757 if (!amdgpu_atombios_encoder_dac_load_detect(encoder, connector)) { 1758 DRM_DEBUG_KMS("detect returned false \n"); 1759 return connector_status_unknown; 1760 } 1761 1762 bios_0_scratch = RREG32(mmBIOS_SCRATCH_0); 1763 1764 DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices); 1765 if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) { 1766 if (bios_0_scratch & ATOM_S0_CRT1_MASK) 1767 return connector_status_connected; 1768 } 1769 if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) { 1770 if (bios_0_scratch & ATOM_S0_CRT2_MASK) 1771 return connector_status_connected; 1772 } 1773 if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) { 1774 if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A)) 1775 return connector_status_connected; 1776 } 1777 if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) { 1778 if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A)) 1779 return connector_status_connected; /* CTV */ 1780 else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A)) 1781 return connector_status_connected; /* STV */ 1782 } 1783 return connector_status_disconnected; 1784 } 1785 1786 enum drm_connector_status 1787 amdgpu_atombios_encoder_dig_detect(struct drm_encoder *encoder, 1788 struct drm_connector *connector) 1789 { 1790 struct drm_device *dev = encoder->dev; 1791 struct amdgpu_device *adev = dev->dev_private; 1792 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1793 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 1794 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder); 1795 u32 bios_0_scratch; 1796 1797 if (!ext_encoder) 1798 return connector_status_unknown; 1799 1800 if ((amdgpu_connector->devices & ATOM_DEVICE_CRT_SUPPORT) == 0) 1801 return connector_status_unknown; 1802 1803 /* load detect on the dp bridge */ 1804 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, 1805 EXTERNAL_ENCODER_ACTION_V3_DACLOAD_DETECTION); 1806 1807 bios_0_scratch = RREG32(mmBIOS_SCRATCH_0); 1808 1809 DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices); 1810 if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) { 1811 if (bios_0_scratch & ATOM_S0_CRT1_MASK) 1812 return connector_status_connected; 1813 } 1814 if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) { 1815 if (bios_0_scratch & ATOM_S0_CRT2_MASK) 1816 return connector_status_connected; 1817 } 1818 if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) { 1819 if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A)) 1820 return connector_status_connected; 1821 } 1822 if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) { 1823 if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A)) 1824 return connector_status_connected; /* CTV */ 1825 else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A)) 1826 return connector_status_connected; /* STV */ 1827 } 1828 return connector_status_disconnected; 1829 } 1830 1831 void 1832 amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder *encoder) 1833 { 1834 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder); 1835 1836 if (ext_encoder) 1837 /* ddc_setup on the dp bridge */ 1838 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, 1839 EXTERNAL_ENCODER_ACTION_V3_DDC_SETUP); 1840 1841 } 1842 1843 void 1844 amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector *connector, 1845 struct drm_encoder *encoder, 1846 bool connected) 1847 { 1848 struct drm_device *dev = connector->dev; 1849 struct amdgpu_device *adev = dev->dev_private; 1850 struct amdgpu_connector *amdgpu_connector = 1851 to_amdgpu_connector(connector); 1852 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1853 uint32_t bios_0_scratch, bios_3_scratch, bios_6_scratch; 1854 1855 bios_0_scratch = RREG32(mmBIOS_SCRATCH_0); 1856 bios_3_scratch = RREG32(mmBIOS_SCRATCH_3); 1857 bios_6_scratch = RREG32(mmBIOS_SCRATCH_6); 1858 1859 if ((amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) && 1860 (amdgpu_connector->devices & ATOM_DEVICE_LCD1_SUPPORT)) { 1861 if (connected) { 1862 DRM_DEBUG_KMS("LCD1 connected\n"); 1863 bios_0_scratch |= ATOM_S0_LCD1; 1864 bios_3_scratch |= ATOM_S3_LCD1_ACTIVE; 1865 bios_6_scratch |= ATOM_S6_ACC_REQ_LCD1; 1866 } else { 1867 DRM_DEBUG_KMS("LCD1 disconnected\n"); 1868 bios_0_scratch &= ~ATOM_S0_LCD1; 1869 bios_3_scratch &= ~ATOM_S3_LCD1_ACTIVE; 1870 bios_6_scratch &= ~ATOM_S6_ACC_REQ_LCD1; 1871 } 1872 } 1873 if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT1_SUPPORT) && 1874 (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)) { 1875 if (connected) { 1876 DRM_DEBUG_KMS("CRT1 connected\n"); 1877 bios_0_scratch |= ATOM_S0_CRT1_COLOR; 1878 bios_3_scratch |= ATOM_S3_CRT1_ACTIVE; 1879 bios_6_scratch |= ATOM_S6_ACC_REQ_CRT1; 1880 } else { 1881 DRM_DEBUG_KMS("CRT1 disconnected\n"); 1882 bios_0_scratch &= ~ATOM_S0_CRT1_MASK; 1883 bios_3_scratch &= ~ATOM_S3_CRT1_ACTIVE; 1884 bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT1; 1885 } 1886 } 1887 if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT2_SUPPORT) && 1888 (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)) { 1889 if (connected) { 1890 DRM_DEBUG_KMS("CRT2 connected\n"); 1891 bios_0_scratch |= ATOM_S0_CRT2_COLOR; 1892 bios_3_scratch |= ATOM_S3_CRT2_ACTIVE; 1893 bios_6_scratch |= ATOM_S6_ACC_REQ_CRT2; 1894 } else { 1895 DRM_DEBUG_KMS("CRT2 disconnected\n"); 1896 bios_0_scratch &= ~ATOM_S0_CRT2_MASK; 1897 bios_3_scratch &= ~ATOM_S3_CRT2_ACTIVE; 1898 bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT2; 1899 } 1900 } 1901 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP1_SUPPORT) && 1902 (amdgpu_connector->devices & ATOM_DEVICE_DFP1_SUPPORT)) { 1903 if (connected) { 1904 DRM_DEBUG_KMS("DFP1 connected\n"); 1905 bios_0_scratch |= ATOM_S0_DFP1; 1906 bios_3_scratch |= ATOM_S3_DFP1_ACTIVE; 1907 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP1; 1908 } else { 1909 DRM_DEBUG_KMS("DFP1 disconnected\n"); 1910 bios_0_scratch &= ~ATOM_S0_DFP1; 1911 bios_3_scratch &= ~ATOM_S3_DFP1_ACTIVE; 1912 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP1; 1913 } 1914 } 1915 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP2_SUPPORT) && 1916 (amdgpu_connector->devices & ATOM_DEVICE_DFP2_SUPPORT)) { 1917 if (connected) { 1918 DRM_DEBUG_KMS("DFP2 connected\n"); 1919 bios_0_scratch |= ATOM_S0_DFP2; 1920 bios_3_scratch |= ATOM_S3_DFP2_ACTIVE; 1921 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP2; 1922 } else { 1923 DRM_DEBUG_KMS("DFP2 disconnected\n"); 1924 bios_0_scratch &= ~ATOM_S0_DFP2; 1925 bios_3_scratch &= ~ATOM_S3_DFP2_ACTIVE; 1926 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP2; 1927 } 1928 } 1929 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP3_SUPPORT) && 1930 (amdgpu_connector->devices & ATOM_DEVICE_DFP3_SUPPORT)) { 1931 if (connected) { 1932 DRM_DEBUG_KMS("DFP3 connected\n"); 1933 bios_0_scratch |= ATOM_S0_DFP3; 1934 bios_3_scratch |= ATOM_S3_DFP3_ACTIVE; 1935 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP3; 1936 } else { 1937 DRM_DEBUG_KMS("DFP3 disconnected\n"); 1938 bios_0_scratch &= ~ATOM_S0_DFP3; 1939 bios_3_scratch &= ~ATOM_S3_DFP3_ACTIVE; 1940 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP3; 1941 } 1942 } 1943 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP4_SUPPORT) && 1944 (amdgpu_connector->devices & ATOM_DEVICE_DFP4_SUPPORT)) { 1945 if (connected) { 1946 DRM_DEBUG_KMS("DFP4 connected\n"); 1947 bios_0_scratch |= ATOM_S0_DFP4; 1948 bios_3_scratch |= ATOM_S3_DFP4_ACTIVE; 1949 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP4; 1950 } else { 1951 DRM_DEBUG_KMS("DFP4 disconnected\n"); 1952 bios_0_scratch &= ~ATOM_S0_DFP4; 1953 bios_3_scratch &= ~ATOM_S3_DFP4_ACTIVE; 1954 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP4; 1955 } 1956 } 1957 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP5_SUPPORT) && 1958 (amdgpu_connector->devices & ATOM_DEVICE_DFP5_SUPPORT)) { 1959 if (connected) { 1960 DRM_DEBUG_KMS("DFP5 connected\n"); 1961 bios_0_scratch |= ATOM_S0_DFP5; 1962 bios_3_scratch |= ATOM_S3_DFP5_ACTIVE; 1963 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP5; 1964 } else { 1965 DRM_DEBUG_KMS("DFP5 disconnected\n"); 1966 bios_0_scratch &= ~ATOM_S0_DFP5; 1967 bios_3_scratch &= ~ATOM_S3_DFP5_ACTIVE; 1968 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP5; 1969 } 1970 } 1971 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP6_SUPPORT) && 1972 (amdgpu_connector->devices & ATOM_DEVICE_DFP6_SUPPORT)) { 1973 if (connected) { 1974 DRM_DEBUG_KMS("DFP6 connected\n"); 1975 bios_0_scratch |= ATOM_S0_DFP6; 1976 bios_3_scratch |= ATOM_S3_DFP6_ACTIVE; 1977 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP6; 1978 } else { 1979 DRM_DEBUG_KMS("DFP6 disconnected\n"); 1980 bios_0_scratch &= ~ATOM_S0_DFP6; 1981 bios_3_scratch &= ~ATOM_S3_DFP6_ACTIVE; 1982 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP6; 1983 } 1984 } 1985 1986 WREG32(mmBIOS_SCRATCH_0, bios_0_scratch); 1987 WREG32(mmBIOS_SCRATCH_3, bios_3_scratch); 1988 WREG32(mmBIOS_SCRATCH_6, bios_6_scratch); 1989 } 1990 1991 union lvds_info { 1992 struct _ATOM_LVDS_INFO info; 1993 struct _ATOM_LVDS_INFO_V12 info_12; 1994 }; 1995 1996 struct amdgpu_encoder_atom_dig * 1997 amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder *encoder) 1998 { 1999 struct drm_device *dev = encoder->base.dev; 2000 struct amdgpu_device *adev = dev->dev_private; 2001 struct amdgpu_mode_info *mode_info = &adev->mode_info; 2002 int index = GetIndexIntoMasterTable(DATA, LVDS_Info); 2003 uint16_t data_offset, misc; 2004 union lvds_info *lvds_info; 2005 uint8_t frev, crev; 2006 struct amdgpu_encoder_atom_dig *lvds = NULL; 2007 int encoder_enum = (encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT; 2008 2009 if (amdgpu_atom_parse_data_header(mode_info->atom_context, index, NULL, 2010 &frev, &crev, &data_offset)) { 2011 lvds_info = 2012 (union lvds_info *)(mode_info->atom_context->bios + data_offset); 2013 lvds = 2014 kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL); 2015 2016 if (!lvds) 2017 return NULL; 2018 2019 lvds->native_mode.clock = 2020 le16_to_cpu(lvds_info->info.sLCDTiming.usPixClk) * 10; 2021 lvds->native_mode.hdisplay = 2022 le16_to_cpu(lvds_info->info.sLCDTiming.usHActive); 2023 lvds->native_mode.vdisplay = 2024 le16_to_cpu(lvds_info->info.sLCDTiming.usVActive); 2025 lvds->native_mode.htotal = lvds->native_mode.hdisplay + 2026 le16_to_cpu(lvds_info->info.sLCDTiming.usHBlanking_Time); 2027 lvds->native_mode.hsync_start = lvds->native_mode.hdisplay + 2028 le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncOffset); 2029 lvds->native_mode.hsync_end = lvds->native_mode.hsync_start + 2030 le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncWidth); 2031 lvds->native_mode.vtotal = lvds->native_mode.vdisplay + 2032 le16_to_cpu(lvds_info->info.sLCDTiming.usVBlanking_Time); 2033 lvds->native_mode.vsync_start = lvds->native_mode.vdisplay + 2034 le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncOffset); 2035 lvds->native_mode.vsync_end = lvds->native_mode.vsync_start + 2036 le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncWidth); 2037 lvds->panel_pwr_delay = 2038 le16_to_cpu(lvds_info->info.usOffDelayInMs); 2039 lvds->lcd_misc = lvds_info->info.ucLVDS_Misc; 2040 2041 misc = le16_to_cpu(lvds_info->info.sLCDTiming.susModeMiscInfo.usAccess); 2042 if (misc & ATOM_VSYNC_POLARITY) 2043 lvds->native_mode.flags |= DRM_MODE_FLAG_NVSYNC; 2044 if (misc & ATOM_HSYNC_POLARITY) 2045 lvds->native_mode.flags |= DRM_MODE_FLAG_NHSYNC; 2046 if (misc & ATOM_COMPOSITESYNC) 2047 lvds->native_mode.flags |= DRM_MODE_FLAG_CSYNC; 2048 if (misc & ATOM_INTERLACE) 2049 lvds->native_mode.flags |= DRM_MODE_FLAG_INTERLACE; 2050 if (misc & ATOM_DOUBLE_CLOCK_MODE) 2051 lvds->native_mode.flags |= DRM_MODE_FLAG_DBLSCAN; 2052 2053 lvds->native_mode.width_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageHSize); 2054 lvds->native_mode.height_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageVSize); 2055 2056 /* set crtc values */ 2057 drm_mode_set_crtcinfo(&lvds->native_mode, CRTC_INTERLACE_HALVE_V); 2058 2059 lvds->lcd_ss_id = lvds_info->info.ucSS_Id; 2060 2061 encoder->native_mode = lvds->native_mode; 2062 2063 if (encoder_enum == 2) 2064 lvds->linkb = true; 2065 else 2066 lvds->linkb = false; 2067 2068 /* parse the lcd record table */ 2069 if (le16_to_cpu(lvds_info->info.usModePatchTableOffset)) { 2070 ATOM_FAKE_EDID_PATCH_RECORD *fake_edid_record; 2071 ATOM_PANEL_RESOLUTION_PATCH_RECORD *panel_res_record; 2072 bool bad_record = false; 2073 u8 *record; 2074 2075 if ((frev == 1) && (crev < 2)) 2076 /* absolute */ 2077 record = (u8 *)(mode_info->atom_context->bios + 2078 le16_to_cpu(lvds_info->info.usModePatchTableOffset)); 2079 else 2080 /* relative */ 2081 record = (u8 *)(mode_info->atom_context->bios + 2082 data_offset + 2083 le16_to_cpu(lvds_info->info.usModePatchTableOffset)); 2084 while (*record != ATOM_RECORD_END_TYPE) { 2085 switch (*record) { 2086 case LCD_MODE_PATCH_RECORD_MODE_TYPE: 2087 record += sizeof(ATOM_PATCH_RECORD_MODE); 2088 break; 2089 case LCD_RTS_RECORD_TYPE: 2090 record += sizeof(ATOM_LCD_RTS_RECORD); 2091 break; 2092 case LCD_CAP_RECORD_TYPE: 2093 record += sizeof(ATOM_LCD_MODE_CONTROL_CAP); 2094 break; 2095 case LCD_FAKE_EDID_PATCH_RECORD_TYPE: 2096 fake_edid_record = (ATOM_FAKE_EDID_PATCH_RECORD *)record; 2097 if (fake_edid_record->ucFakeEDIDLength) { 2098 struct edid *edid; 2099 int edid_size = 2100 max((int)EDID_LENGTH, (int)fake_edid_record->ucFakeEDIDLength); 2101 edid = kmalloc(edid_size, GFP_KERNEL); 2102 if (edid) { 2103 memcpy((u8 *)edid, (u8 *)&fake_edid_record->ucFakeEDIDString[0], 2104 fake_edid_record->ucFakeEDIDLength); 2105 2106 if (drm_edid_is_valid(edid)) { 2107 adev->mode_info.bios_hardcoded_edid = edid; 2108 adev->mode_info.bios_hardcoded_edid_size = edid_size; 2109 } else 2110 kfree(edid); 2111 } 2112 } 2113 record += fake_edid_record->ucFakeEDIDLength ? 2114 fake_edid_record->ucFakeEDIDLength + 2 : 2115 sizeof(ATOM_FAKE_EDID_PATCH_RECORD); 2116 break; 2117 case LCD_PANEL_RESOLUTION_RECORD_TYPE: 2118 panel_res_record = (ATOM_PANEL_RESOLUTION_PATCH_RECORD *)record; 2119 lvds->native_mode.width_mm = panel_res_record->usHSize; 2120 lvds->native_mode.height_mm = panel_res_record->usVSize; 2121 record += sizeof(ATOM_PANEL_RESOLUTION_PATCH_RECORD); 2122 break; 2123 default: 2124 DRM_ERROR("Bad LCD record %d\n", *record); 2125 bad_record = true; 2126 break; 2127 } 2128 if (bad_record) 2129 break; 2130 } 2131 } 2132 } 2133 return lvds; 2134 } 2135 2136 struct amdgpu_encoder_atom_dig * 2137 amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder *amdgpu_encoder) 2138 { 2139 int encoder_enum = (amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT; 2140 struct amdgpu_encoder_atom_dig *dig = kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL); 2141 2142 if (!dig) 2143 return NULL; 2144 2145 /* coherent mode by default */ 2146 dig->coherent_mode = true; 2147 dig->dig_encoder = -1; 2148 2149 if (encoder_enum == 2) 2150 dig->linkb = true; 2151 else 2152 dig->linkb = false; 2153 2154 return dig; 2155 } 2156 2157