1 /* 2 * Copyright 2007-11 Advanced Micro Devices, Inc. 3 * Copyright 2008 Red Hat Inc. 4 * 5 * Permission is hereby granted, free of charge, to any person obtaining a 6 * copy of this software and associated documentation files (the "Software"), 7 * to deal in the Software without restriction, including without limitation 8 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 9 * and/or sell copies of the Software, and to permit persons to whom the 10 * Software is furnished to do so, subject to the following conditions: 11 * 12 * The above copyright notice and this permission notice shall be included in 13 * all copies or substantial portions of the Software. 14 * 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 21 * OTHER DEALINGS IN THE SOFTWARE. 22 * 23 * Authors: Dave Airlie 24 * Alex Deucher 25 */ 26 27 #include <linux/pci.h> 28 29 #include <drm/drm_crtc_helper.h> 30 #include <drm/amdgpu_drm.h> 31 #include "amdgpu.h" 32 #include "amdgpu_connectors.h" 33 #include "amdgpu_display.h" 34 #include "atom.h" 35 #include "atombios_encoders.h" 36 #include "atombios_dp.h" 37 #include <linux/backlight.h> 38 #include "bif/bif_4_1_d.h" 39 40 u8 41 amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device *adev) 42 { 43 u8 backlight_level; 44 u32 bios_2_scratch; 45 46 bios_2_scratch = RREG32(mmBIOS_SCRATCH_2); 47 48 backlight_level = ((bios_2_scratch & ATOM_S2_CURRENT_BL_LEVEL_MASK) >> 49 ATOM_S2_CURRENT_BL_LEVEL_SHIFT); 50 51 return backlight_level; 52 } 53 54 void 55 amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device *adev, 56 u8 backlight_level) 57 { 58 u32 bios_2_scratch; 59 60 bios_2_scratch = RREG32(mmBIOS_SCRATCH_2); 61 62 bios_2_scratch &= ~ATOM_S2_CURRENT_BL_LEVEL_MASK; 63 bios_2_scratch |= ((backlight_level << ATOM_S2_CURRENT_BL_LEVEL_SHIFT) & 64 ATOM_S2_CURRENT_BL_LEVEL_MASK); 65 66 WREG32(mmBIOS_SCRATCH_2, bios_2_scratch); 67 } 68 69 u8 70 amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder *amdgpu_encoder) 71 { 72 struct drm_device *dev = amdgpu_encoder->base.dev; 73 struct amdgpu_device *adev = drm_to_adev(dev); 74 75 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU)) 76 return 0; 77 78 return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev); 79 } 80 81 void 82 amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder *amdgpu_encoder, 83 u8 level) 84 { 85 struct drm_encoder *encoder = &amdgpu_encoder->base; 86 struct drm_device *dev = amdgpu_encoder->base.dev; 87 struct amdgpu_device *adev = drm_to_adev(dev); 88 struct amdgpu_encoder_atom_dig *dig; 89 90 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU)) 91 return; 92 93 if ((amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) && 94 amdgpu_encoder->enc_priv) { 95 dig = amdgpu_encoder->enc_priv; 96 dig->backlight_level = level; 97 amdgpu_atombios_encoder_set_backlight_level_to_reg(adev, dig->backlight_level); 98 99 switch (amdgpu_encoder->encoder_id) { 100 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 101 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 102 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 103 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 104 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 105 if (dig->backlight_level == 0) 106 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 107 ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0); 108 else { 109 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 110 ATOM_TRANSMITTER_ACTION_BL_BRIGHTNESS_CONTROL, 0, 0); 111 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 112 ATOM_TRANSMITTER_ACTION_LCD_BLON, 0, 0); 113 } 114 break; 115 default: 116 break; 117 } 118 } 119 } 120 121 #if defined(CONFIG_BACKLIGHT_CLASS_DEVICE) || defined(CONFIG_BACKLIGHT_CLASS_DEVICE_MODULE) 122 123 static u8 amdgpu_atombios_encoder_backlight_level(struct backlight_device *bd) 124 { 125 u8 level; 126 127 /* Convert brightness to hardware level */ 128 if (bd->props.brightness < 0) 129 level = 0; 130 else if (bd->props.brightness > AMDGPU_MAX_BL_LEVEL) 131 level = AMDGPU_MAX_BL_LEVEL; 132 else 133 level = bd->props.brightness; 134 135 return level; 136 } 137 138 static int amdgpu_atombios_encoder_update_backlight_status(struct backlight_device *bd) 139 { 140 struct amdgpu_backlight_privdata *pdata = bl_get_data(bd); 141 struct amdgpu_encoder *amdgpu_encoder = pdata->encoder; 142 143 amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, 144 amdgpu_atombios_encoder_backlight_level(bd)); 145 146 return 0; 147 } 148 149 static int 150 amdgpu_atombios_encoder_get_backlight_brightness(struct backlight_device *bd) 151 { 152 struct amdgpu_backlight_privdata *pdata = bl_get_data(bd); 153 struct amdgpu_encoder *amdgpu_encoder = pdata->encoder; 154 struct drm_device *dev = amdgpu_encoder->base.dev; 155 struct amdgpu_device *adev = drm_to_adev(dev); 156 157 return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev); 158 } 159 160 static const struct backlight_ops amdgpu_atombios_encoder_backlight_ops = { 161 .get_brightness = amdgpu_atombios_encoder_get_backlight_brightness, 162 .update_status = amdgpu_atombios_encoder_update_backlight_status, 163 }; 164 165 void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *amdgpu_encoder, 166 struct drm_connector *drm_connector) 167 { 168 struct drm_device *dev = amdgpu_encoder->base.dev; 169 struct amdgpu_device *adev = drm_to_adev(dev); 170 struct backlight_device *bd; 171 struct backlight_properties props; 172 struct amdgpu_backlight_privdata *pdata; 173 struct amdgpu_encoder_atom_dig *dig; 174 u8 backlight_level; 175 char bl_name[16]; 176 177 /* Mac laptops with multiple GPUs use the gmux driver for backlight 178 * so don't register a backlight device 179 */ 180 if ((adev->pdev->subsystem_vendor == PCI_VENDOR_ID_APPLE) && 181 (adev->pdev->device == 0x6741)) 182 return; 183 184 if (!amdgpu_encoder->enc_priv) 185 return; 186 187 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU)) 188 return; 189 190 pdata = kmalloc(sizeof(struct amdgpu_backlight_privdata), GFP_KERNEL); 191 if (!pdata) { 192 DRM_ERROR("Memory allocation failed\n"); 193 goto error; 194 } 195 196 memset(&props, 0, sizeof(props)); 197 props.max_brightness = AMDGPU_MAX_BL_LEVEL; 198 props.type = BACKLIGHT_RAW; 199 snprintf(bl_name, sizeof(bl_name), 200 "amdgpu_bl%d", dev->primary->index); 201 bd = backlight_device_register(bl_name, drm_connector->kdev, 202 pdata, &amdgpu_atombios_encoder_backlight_ops, &props); 203 if (IS_ERR(bd)) { 204 DRM_ERROR("Backlight registration failed\n"); 205 goto error; 206 } 207 208 pdata->encoder = amdgpu_encoder; 209 210 backlight_level = amdgpu_atombios_encoder_get_backlight_level_from_reg(adev); 211 212 dig = amdgpu_encoder->enc_priv; 213 dig->bl_dev = bd; 214 215 bd->props.brightness = amdgpu_atombios_encoder_get_backlight_brightness(bd); 216 bd->props.power = FB_BLANK_UNBLANK; 217 backlight_update_status(bd); 218 219 DRM_INFO("amdgpu atom DIG backlight initialized\n"); 220 221 return; 222 223 error: 224 kfree(pdata); 225 return; 226 } 227 228 void 229 amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *amdgpu_encoder) 230 { 231 struct drm_device *dev = amdgpu_encoder->base.dev; 232 struct amdgpu_device *adev = drm_to_adev(dev); 233 struct backlight_device *bd = NULL; 234 struct amdgpu_encoder_atom_dig *dig; 235 236 if (!amdgpu_encoder->enc_priv) 237 return; 238 239 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU)) 240 return; 241 242 dig = amdgpu_encoder->enc_priv; 243 bd = dig->bl_dev; 244 dig->bl_dev = NULL; 245 246 if (bd) { 247 struct amdgpu_legacy_backlight_privdata *pdata; 248 249 pdata = bl_get_data(bd); 250 backlight_device_unregister(bd); 251 kfree(pdata); 252 253 DRM_INFO("amdgpu atom LVDS backlight unloaded\n"); 254 } 255 } 256 257 #else /* !CONFIG_BACKLIGHT_CLASS_DEVICE */ 258 259 void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *encoder) 260 { 261 } 262 263 void amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *encoder) 264 { 265 } 266 267 #endif 268 269 bool amdgpu_atombios_encoder_is_digital(struct drm_encoder *encoder) 270 { 271 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 272 switch (amdgpu_encoder->encoder_id) { 273 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 274 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 275 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 276 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 277 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 278 return true; 279 default: 280 return false; 281 } 282 } 283 284 bool amdgpu_atombios_encoder_mode_fixup(struct drm_encoder *encoder, 285 const struct drm_display_mode *mode, 286 struct drm_display_mode *adjusted_mode) 287 { 288 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 289 290 /* set the active encoder to connector routing */ 291 amdgpu_encoder_set_active_device(encoder); 292 drm_mode_set_crtcinfo(adjusted_mode, 0); 293 294 /* hw bug */ 295 if ((mode->flags & DRM_MODE_FLAG_INTERLACE) 296 && (mode->crtc_vsync_start < (mode->crtc_vdisplay + 2))) 297 adjusted_mode->crtc_vsync_start = adjusted_mode->crtc_vdisplay + 2; 298 299 /* vertical FP must be at least 1 */ 300 if (mode->crtc_vsync_start == mode->crtc_vdisplay) 301 adjusted_mode->crtc_vsync_start++; 302 303 /* get the native mode for scaling */ 304 if (amdgpu_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT)) 305 amdgpu_panel_mode_fixup(encoder, adjusted_mode); 306 else if (amdgpu_encoder->rmx_type != RMX_OFF) 307 amdgpu_panel_mode_fixup(encoder, adjusted_mode); 308 309 if ((amdgpu_encoder->active_device & (ATOM_DEVICE_DFP_SUPPORT | ATOM_DEVICE_LCD_SUPPORT)) || 310 (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) { 311 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 312 amdgpu_atombios_dp_set_link_config(connector, adjusted_mode); 313 } 314 315 return true; 316 } 317 318 static void 319 amdgpu_atombios_encoder_setup_dac(struct drm_encoder *encoder, int action) 320 { 321 struct drm_device *dev = encoder->dev; 322 struct amdgpu_device *adev = drm_to_adev(dev); 323 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 324 DAC_ENCODER_CONTROL_PS_ALLOCATION args; 325 int index = 0; 326 327 memset(&args, 0, sizeof(args)); 328 329 switch (amdgpu_encoder->encoder_id) { 330 case ENCODER_OBJECT_ID_INTERNAL_DAC1: 331 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 332 index = GetIndexIntoMasterTable(COMMAND, DAC1EncoderControl); 333 break; 334 case ENCODER_OBJECT_ID_INTERNAL_DAC2: 335 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2: 336 index = GetIndexIntoMasterTable(COMMAND, DAC2EncoderControl); 337 break; 338 } 339 340 args.ucAction = action; 341 args.ucDacStandard = ATOM_DAC1_PS2; 342 args.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 343 344 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 345 346 } 347 348 static u8 amdgpu_atombios_encoder_get_bpc(struct drm_encoder *encoder) 349 { 350 int bpc = 8; 351 352 if (encoder->crtc) { 353 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc); 354 bpc = amdgpu_crtc->bpc; 355 } 356 357 switch (bpc) { 358 case 0: 359 return PANEL_BPC_UNDEFINE; 360 case 6: 361 return PANEL_6BIT_PER_COLOR; 362 case 8: 363 default: 364 return PANEL_8BIT_PER_COLOR; 365 case 10: 366 return PANEL_10BIT_PER_COLOR; 367 case 12: 368 return PANEL_12BIT_PER_COLOR; 369 case 16: 370 return PANEL_16BIT_PER_COLOR; 371 } 372 } 373 374 union dvo_encoder_control { 375 ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION ext_tmds; 376 DVO_ENCODER_CONTROL_PS_ALLOCATION dvo; 377 DVO_ENCODER_CONTROL_PS_ALLOCATION_V3 dvo_v3; 378 DVO_ENCODER_CONTROL_PS_ALLOCATION_V1_4 dvo_v4; 379 }; 380 381 static void 382 amdgpu_atombios_encoder_setup_dvo(struct drm_encoder *encoder, int action) 383 { 384 struct drm_device *dev = encoder->dev; 385 struct amdgpu_device *adev = drm_to_adev(dev); 386 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 387 union dvo_encoder_control args; 388 int index = GetIndexIntoMasterTable(COMMAND, DVOEncoderControl); 389 uint8_t frev, crev; 390 391 memset(&args, 0, sizeof(args)); 392 393 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 394 return; 395 396 switch (frev) { 397 case 1: 398 switch (crev) { 399 case 1: 400 /* R4xx, R5xx */ 401 args.ext_tmds.sXTmdsEncoder.ucEnable = action; 402 403 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 404 args.ext_tmds.sXTmdsEncoder.ucMisc |= PANEL_ENCODER_MISC_DUAL; 405 406 args.ext_tmds.sXTmdsEncoder.ucMisc |= ATOM_PANEL_MISC_888RGB; 407 break; 408 case 2: 409 /* RS600/690/740 */ 410 args.dvo.sDVOEncoder.ucAction = action; 411 args.dvo.sDVOEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 412 /* DFP1, CRT1, TV1 depending on the type of port */ 413 args.dvo.sDVOEncoder.ucDeviceType = ATOM_DEVICE_DFP1_INDEX; 414 415 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 416 args.dvo.sDVOEncoder.usDevAttr.sDigAttrib.ucAttribute |= PANEL_ENCODER_MISC_DUAL; 417 break; 418 case 3: 419 /* R6xx */ 420 args.dvo_v3.ucAction = action; 421 args.dvo_v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 422 args.dvo_v3.ucDVOConfig = 0; /* XXX */ 423 break; 424 case 4: 425 /* DCE8 */ 426 args.dvo_v4.ucAction = action; 427 args.dvo_v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 428 args.dvo_v4.ucDVOConfig = 0; /* XXX */ 429 args.dvo_v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder); 430 break; 431 default: 432 DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 433 break; 434 } 435 break; 436 default: 437 DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 438 break; 439 } 440 441 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 442 } 443 444 int amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder *encoder) 445 { 446 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 447 struct drm_connector *connector; 448 struct amdgpu_connector *amdgpu_connector; 449 struct amdgpu_connector_atom_dig *dig_connector; 450 451 /* dp bridges are always DP */ 452 if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) 453 return ATOM_ENCODER_MODE_DP; 454 455 /* DVO is always DVO */ 456 if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DVO1) || 457 (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1)) 458 return ATOM_ENCODER_MODE_DVO; 459 460 connector = amdgpu_get_connector_for_encoder(encoder); 461 /* if we don't have an active device yet, just use one of 462 * the connectors tied to the encoder. 463 */ 464 if (!connector) 465 connector = amdgpu_get_connector_for_encoder_init(encoder); 466 amdgpu_connector = to_amdgpu_connector(connector); 467 468 switch (connector->connector_type) { 469 case DRM_MODE_CONNECTOR_DVII: 470 case DRM_MODE_CONNECTOR_HDMIB: /* HDMI-B is basically DL-DVI; analog works fine */ 471 if (amdgpu_audio != 0) { 472 if (amdgpu_connector->use_digital && 473 (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)) 474 return ATOM_ENCODER_MODE_HDMI; 475 else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) && 476 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO)) 477 return ATOM_ENCODER_MODE_HDMI; 478 else if (amdgpu_connector->use_digital) 479 return ATOM_ENCODER_MODE_DVI; 480 else 481 return ATOM_ENCODER_MODE_CRT; 482 } else if (amdgpu_connector->use_digital) { 483 return ATOM_ENCODER_MODE_DVI; 484 } else { 485 return ATOM_ENCODER_MODE_CRT; 486 } 487 break; 488 case DRM_MODE_CONNECTOR_DVID: 489 case DRM_MODE_CONNECTOR_HDMIA: 490 default: 491 if (amdgpu_audio != 0) { 492 if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE) 493 return ATOM_ENCODER_MODE_HDMI; 494 else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) && 495 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO)) 496 return ATOM_ENCODER_MODE_HDMI; 497 else 498 return ATOM_ENCODER_MODE_DVI; 499 } else { 500 return ATOM_ENCODER_MODE_DVI; 501 } 502 case DRM_MODE_CONNECTOR_LVDS: 503 return ATOM_ENCODER_MODE_LVDS; 504 case DRM_MODE_CONNECTOR_DisplayPort: 505 dig_connector = amdgpu_connector->con_priv; 506 if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) || 507 (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) { 508 return ATOM_ENCODER_MODE_DP; 509 } else if (amdgpu_audio != 0) { 510 if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE) 511 return ATOM_ENCODER_MODE_HDMI; 512 else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) && 513 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO)) 514 return ATOM_ENCODER_MODE_HDMI; 515 else 516 return ATOM_ENCODER_MODE_DVI; 517 } else { 518 return ATOM_ENCODER_MODE_DVI; 519 } 520 case DRM_MODE_CONNECTOR_eDP: 521 return ATOM_ENCODER_MODE_DP; 522 case DRM_MODE_CONNECTOR_DVIA: 523 case DRM_MODE_CONNECTOR_VGA: 524 return ATOM_ENCODER_MODE_CRT; 525 case DRM_MODE_CONNECTOR_Composite: 526 case DRM_MODE_CONNECTOR_SVIDEO: 527 case DRM_MODE_CONNECTOR_9PinDIN: 528 /* fix me */ 529 return ATOM_ENCODER_MODE_TV; 530 } 531 } 532 533 /* 534 * DIG Encoder/Transmitter Setup 535 * 536 * DCE 6.0 537 * - 3 DIG transmitter blocks UNIPHY0/1/2 (links A and B). 538 * Supports up to 6 digital outputs 539 * - 6 DIG encoder blocks. 540 * - DIG to PHY mapping is hardcoded 541 * DIG1 drives UNIPHY0 link A, A+B 542 * DIG2 drives UNIPHY0 link B 543 * DIG3 drives UNIPHY1 link A, A+B 544 * DIG4 drives UNIPHY1 link B 545 * DIG5 drives UNIPHY2 link A, A+B 546 * DIG6 drives UNIPHY2 link B 547 * 548 * Routing 549 * crtc -> dig encoder -> UNIPHY/LVTMA (1 or 2 links) 550 * Examples: 551 * crtc0 -> dig2 -> LVTMA links A+B -> TMDS/HDMI 552 * crtc1 -> dig1 -> UNIPHY0 link B -> DP 553 * crtc0 -> dig1 -> UNIPHY2 link A -> LVDS 554 * crtc1 -> dig2 -> UNIPHY1 link B+A -> TMDS/HDMI 555 */ 556 557 union dig_encoder_control { 558 DIG_ENCODER_CONTROL_PS_ALLOCATION v1; 559 DIG_ENCODER_CONTROL_PARAMETERS_V2 v2; 560 DIG_ENCODER_CONTROL_PARAMETERS_V3 v3; 561 DIG_ENCODER_CONTROL_PARAMETERS_V4 v4; 562 DIG_ENCODER_CONTROL_PARAMETERS_V5 v5; 563 }; 564 565 void 566 amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder *encoder, 567 int action, int panel_mode) 568 { 569 struct drm_device *dev = encoder->dev; 570 struct amdgpu_device *adev = drm_to_adev(dev); 571 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 572 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; 573 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 574 union dig_encoder_control args; 575 int index = GetIndexIntoMasterTable(COMMAND, DIGxEncoderControl); 576 uint8_t frev, crev; 577 int dp_clock = 0; 578 int dp_lane_count = 0; 579 int hpd_id = AMDGPU_HPD_NONE; 580 581 if (connector) { 582 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 583 struct amdgpu_connector_atom_dig *dig_connector = 584 amdgpu_connector->con_priv; 585 586 dp_clock = dig_connector->dp_clock; 587 dp_lane_count = dig_connector->dp_lane_count; 588 hpd_id = amdgpu_connector->hpd.hpd; 589 } 590 591 /* no dig encoder assigned */ 592 if (dig->dig_encoder == -1) 593 return; 594 595 memset(&args, 0, sizeof(args)); 596 597 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 598 return; 599 600 switch (frev) { 601 case 1: 602 switch (crev) { 603 case 1: 604 args.v1.ucAction = action; 605 args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 606 if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE) 607 args.v3.ucPanelMode = panel_mode; 608 else 609 args.v1.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 610 611 if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode)) 612 args.v1.ucLaneNum = dp_lane_count; 613 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 614 args.v1.ucLaneNum = 8; 615 else 616 args.v1.ucLaneNum = 4; 617 618 if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode) && (dp_clock == 270000)) 619 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ; 620 switch (amdgpu_encoder->encoder_id) { 621 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 622 args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER1; 623 break; 624 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 625 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 626 args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER2; 627 break; 628 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 629 args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER3; 630 break; 631 } 632 if (dig->linkb) 633 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKB; 634 else 635 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKA; 636 break; 637 case 2: 638 case 3: 639 args.v3.ucAction = action; 640 args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 641 if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE) 642 args.v3.ucPanelMode = panel_mode; 643 else 644 args.v3.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 645 646 if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode)) 647 args.v3.ucLaneNum = dp_lane_count; 648 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 649 args.v3.ucLaneNum = 8; 650 else 651 args.v3.ucLaneNum = 4; 652 653 if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode) && (dp_clock == 270000)) 654 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ; 655 args.v3.acConfig.ucDigSel = dig->dig_encoder; 656 args.v3.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder); 657 break; 658 case 4: 659 args.v4.ucAction = action; 660 args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 661 if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE) 662 args.v4.ucPanelMode = panel_mode; 663 else 664 args.v4.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 665 666 if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) 667 args.v4.ucLaneNum = dp_lane_count; 668 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 669 args.v4.ucLaneNum = 8; 670 else 671 args.v4.ucLaneNum = 4; 672 673 if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) { 674 if (dp_clock == 540000) 675 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_5_40GHZ; 676 else if (dp_clock == 324000) 677 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_3_24GHZ; 678 else if (dp_clock == 270000) 679 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_2_70GHZ; 680 else 681 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_1_62GHZ; 682 } 683 args.v4.acConfig.ucDigSel = dig->dig_encoder; 684 args.v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder); 685 if (hpd_id == AMDGPU_HPD_NONE) 686 args.v4.ucHPD_ID = 0; 687 else 688 args.v4.ucHPD_ID = hpd_id + 1; 689 break; 690 case 5: 691 switch (action) { 692 case ATOM_ENCODER_CMD_SETUP_PANEL_MODE: 693 args.v5.asDPPanelModeParam.ucAction = action; 694 args.v5.asDPPanelModeParam.ucPanelMode = panel_mode; 695 args.v5.asDPPanelModeParam.ucDigId = dig->dig_encoder; 696 break; 697 case ATOM_ENCODER_CMD_STREAM_SETUP: 698 args.v5.asStreamParam.ucAction = action; 699 args.v5.asStreamParam.ucDigId = dig->dig_encoder; 700 args.v5.asStreamParam.ucDigMode = 701 amdgpu_atombios_encoder_get_encoder_mode(encoder); 702 if (ENCODER_MODE_IS_DP(args.v5.asStreamParam.ucDigMode)) 703 args.v5.asStreamParam.ucLaneNum = dp_lane_count; 704 else if (amdgpu_dig_monitor_is_duallink(encoder, 705 amdgpu_encoder->pixel_clock)) 706 args.v5.asStreamParam.ucLaneNum = 8; 707 else 708 args.v5.asStreamParam.ucLaneNum = 4; 709 args.v5.asStreamParam.ulPixelClock = 710 cpu_to_le32(amdgpu_encoder->pixel_clock / 10); 711 args.v5.asStreamParam.ucBitPerColor = 712 amdgpu_atombios_encoder_get_bpc(encoder); 713 args.v5.asStreamParam.ucLinkRateIn270Mhz = dp_clock / 27000; 714 break; 715 case ATOM_ENCODER_CMD_DP_LINK_TRAINING_START: 716 case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1: 717 case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2: 718 case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3: 719 case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN4: 720 case ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE: 721 case ATOM_ENCODER_CMD_DP_VIDEO_OFF: 722 case ATOM_ENCODER_CMD_DP_VIDEO_ON: 723 args.v5.asCmdParam.ucAction = action; 724 args.v5.asCmdParam.ucDigId = dig->dig_encoder; 725 break; 726 default: 727 DRM_ERROR("Unsupported action 0x%x\n", action); 728 break; 729 } 730 break; 731 default: 732 DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 733 break; 734 } 735 break; 736 default: 737 DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 738 break; 739 } 740 741 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 742 743 } 744 745 union dig_transmitter_control { 746 DIG_TRANSMITTER_CONTROL_PS_ALLOCATION v1; 747 DIG_TRANSMITTER_CONTROL_PARAMETERS_V2 v2; 748 DIG_TRANSMITTER_CONTROL_PARAMETERS_V3 v3; 749 DIG_TRANSMITTER_CONTROL_PARAMETERS_V4 v4; 750 DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_5 v5; 751 DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_6 v6; 752 }; 753 754 void 755 amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder *encoder, int action, 756 uint8_t lane_num, uint8_t lane_set) 757 { 758 struct drm_device *dev = encoder->dev; 759 struct amdgpu_device *adev = drm_to_adev(dev); 760 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 761 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; 762 struct drm_connector *connector; 763 union dig_transmitter_control args; 764 int index = 0; 765 uint8_t frev, crev; 766 bool is_dp = false; 767 int pll_id = 0; 768 int dp_clock = 0; 769 int dp_lane_count = 0; 770 int connector_object_id = 0; 771 int igp_lane_info = 0; 772 int dig_encoder = dig->dig_encoder; 773 int hpd_id = AMDGPU_HPD_NONE; 774 775 if (action == ATOM_TRANSMITTER_ACTION_INIT) { 776 connector = amdgpu_get_connector_for_encoder_init(encoder); 777 /* just needed to avoid bailing in the encoder check. the encoder 778 * isn't used for init 779 */ 780 dig_encoder = 0; 781 } else 782 connector = amdgpu_get_connector_for_encoder(encoder); 783 784 if (connector) { 785 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 786 struct amdgpu_connector_atom_dig *dig_connector = 787 amdgpu_connector->con_priv; 788 789 hpd_id = amdgpu_connector->hpd.hpd; 790 dp_clock = dig_connector->dp_clock; 791 dp_lane_count = dig_connector->dp_lane_count; 792 connector_object_id = 793 (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT; 794 } 795 796 if (encoder->crtc) { 797 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc); 798 pll_id = amdgpu_crtc->pll_id; 799 } 800 801 /* no dig encoder assigned */ 802 if (dig_encoder == -1) 803 return; 804 805 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder))) 806 is_dp = true; 807 808 memset(&args, 0, sizeof(args)); 809 810 switch (amdgpu_encoder->encoder_id) { 811 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 812 index = GetIndexIntoMasterTable(COMMAND, DVOOutputControl); 813 break; 814 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 815 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 816 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 817 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 818 index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl); 819 break; 820 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 821 index = GetIndexIntoMasterTable(COMMAND, LVTMATransmitterControl); 822 break; 823 } 824 825 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 826 return; 827 828 switch (frev) { 829 case 1: 830 switch (crev) { 831 case 1: 832 args.v1.ucAction = action; 833 if (action == ATOM_TRANSMITTER_ACTION_INIT) { 834 args.v1.usInitInfo = cpu_to_le16(connector_object_id); 835 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) { 836 args.v1.asMode.ucLaneSel = lane_num; 837 args.v1.asMode.ucLaneSet = lane_set; 838 } else { 839 if (is_dp) 840 args.v1.usPixelClock = cpu_to_le16(dp_clock / 10); 841 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 842 args.v1.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10); 843 else 844 args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 845 } 846 847 args.v1.ucConfig = ATOM_TRANSMITTER_CONFIG_CLKSRC_PPLL; 848 849 if (dig_encoder) 850 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG2_ENCODER; 851 else 852 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG1_ENCODER; 853 854 if ((adev->flags & AMD_IS_APU) && 855 (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_UNIPHY)) { 856 if (is_dp || 857 !amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) { 858 if (igp_lane_info & 0x1) 859 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_3; 860 else if (igp_lane_info & 0x2) 861 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_4_7; 862 else if (igp_lane_info & 0x4) 863 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_11; 864 else if (igp_lane_info & 0x8) 865 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_12_15; 866 } else { 867 if (igp_lane_info & 0x3) 868 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_7; 869 else if (igp_lane_info & 0xc) 870 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_15; 871 } 872 } 873 874 if (dig->linkb) 875 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKB; 876 else 877 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKA; 878 879 if (is_dp) 880 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT; 881 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 882 if (dig->coherent_mode) 883 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT; 884 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 885 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_8LANE_LINK; 886 } 887 break; 888 case 2: 889 args.v2.ucAction = action; 890 if (action == ATOM_TRANSMITTER_ACTION_INIT) { 891 args.v2.usInitInfo = cpu_to_le16(connector_object_id); 892 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) { 893 args.v2.asMode.ucLaneSel = lane_num; 894 args.v2.asMode.ucLaneSet = lane_set; 895 } else { 896 if (is_dp) 897 args.v2.usPixelClock = cpu_to_le16(dp_clock / 10); 898 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 899 args.v2.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10); 900 else 901 args.v2.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 902 } 903 904 args.v2.acConfig.ucEncoderSel = dig_encoder; 905 if (dig->linkb) 906 args.v2.acConfig.ucLinkSel = 1; 907 908 switch (amdgpu_encoder->encoder_id) { 909 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 910 args.v2.acConfig.ucTransmitterSel = 0; 911 break; 912 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 913 args.v2.acConfig.ucTransmitterSel = 1; 914 break; 915 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 916 args.v2.acConfig.ucTransmitterSel = 2; 917 break; 918 } 919 920 if (is_dp) { 921 args.v2.acConfig.fCoherentMode = 1; 922 args.v2.acConfig.fDPConnector = 1; 923 } else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 924 if (dig->coherent_mode) 925 args.v2.acConfig.fCoherentMode = 1; 926 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 927 args.v2.acConfig.fDualLinkConnector = 1; 928 } 929 break; 930 case 3: 931 args.v3.ucAction = action; 932 if (action == ATOM_TRANSMITTER_ACTION_INIT) { 933 args.v3.usInitInfo = cpu_to_le16(connector_object_id); 934 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) { 935 args.v3.asMode.ucLaneSel = lane_num; 936 args.v3.asMode.ucLaneSet = lane_set; 937 } else { 938 if (is_dp) 939 args.v3.usPixelClock = cpu_to_le16(dp_clock / 10); 940 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 941 args.v3.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10); 942 else 943 args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 944 } 945 946 if (is_dp) 947 args.v3.ucLaneNum = dp_lane_count; 948 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 949 args.v3.ucLaneNum = 8; 950 else 951 args.v3.ucLaneNum = 4; 952 953 if (dig->linkb) 954 args.v3.acConfig.ucLinkSel = 1; 955 if (dig_encoder & 1) 956 args.v3.acConfig.ucEncoderSel = 1; 957 958 /* Select the PLL for the PHY 959 * DP PHY should be clocked from external src if there is 960 * one. 961 */ 962 /* On DCE4, if there is an external clock, it generates the DP ref clock */ 963 if (is_dp && adev->clock.dp_extclk) 964 args.v3.acConfig.ucRefClkSource = 2; /* external src */ 965 else 966 args.v3.acConfig.ucRefClkSource = pll_id; 967 968 switch (amdgpu_encoder->encoder_id) { 969 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 970 args.v3.acConfig.ucTransmitterSel = 0; 971 break; 972 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 973 args.v3.acConfig.ucTransmitterSel = 1; 974 break; 975 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 976 args.v3.acConfig.ucTransmitterSel = 2; 977 break; 978 } 979 980 if (is_dp) 981 args.v3.acConfig.fCoherentMode = 1; /* DP requires coherent */ 982 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 983 if (dig->coherent_mode) 984 args.v3.acConfig.fCoherentMode = 1; 985 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 986 args.v3.acConfig.fDualLinkConnector = 1; 987 } 988 break; 989 case 4: 990 args.v4.ucAction = action; 991 if (action == ATOM_TRANSMITTER_ACTION_INIT) { 992 args.v4.usInitInfo = cpu_to_le16(connector_object_id); 993 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) { 994 args.v4.asMode.ucLaneSel = lane_num; 995 args.v4.asMode.ucLaneSet = lane_set; 996 } else { 997 if (is_dp) 998 args.v4.usPixelClock = cpu_to_le16(dp_clock / 10); 999 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1000 args.v4.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10); 1001 else 1002 args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 1003 } 1004 1005 if (is_dp) 1006 args.v4.ucLaneNum = dp_lane_count; 1007 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1008 args.v4.ucLaneNum = 8; 1009 else 1010 args.v4.ucLaneNum = 4; 1011 1012 if (dig->linkb) 1013 args.v4.acConfig.ucLinkSel = 1; 1014 if (dig_encoder & 1) 1015 args.v4.acConfig.ucEncoderSel = 1; 1016 1017 /* Select the PLL for the PHY 1018 * DP PHY should be clocked from external src if there is 1019 * one. 1020 */ 1021 /* On DCE5 DCPLL usually generates the DP ref clock */ 1022 if (is_dp) { 1023 if (adev->clock.dp_extclk) 1024 args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_EXTCLK; 1025 else 1026 args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_DCPLL; 1027 } else 1028 args.v4.acConfig.ucRefClkSource = pll_id; 1029 1030 switch (amdgpu_encoder->encoder_id) { 1031 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1032 args.v4.acConfig.ucTransmitterSel = 0; 1033 break; 1034 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1035 args.v4.acConfig.ucTransmitterSel = 1; 1036 break; 1037 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1038 args.v4.acConfig.ucTransmitterSel = 2; 1039 break; 1040 } 1041 1042 if (is_dp) 1043 args.v4.acConfig.fCoherentMode = 1; /* DP requires coherent */ 1044 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 1045 if (dig->coherent_mode) 1046 args.v4.acConfig.fCoherentMode = 1; 1047 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1048 args.v4.acConfig.fDualLinkConnector = 1; 1049 } 1050 break; 1051 case 5: 1052 args.v5.ucAction = action; 1053 if (is_dp) 1054 args.v5.usSymClock = cpu_to_le16(dp_clock / 10); 1055 else 1056 args.v5.usSymClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 1057 1058 switch (amdgpu_encoder->encoder_id) { 1059 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1060 if (dig->linkb) 1061 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYB; 1062 else 1063 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYA; 1064 break; 1065 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1066 if (dig->linkb) 1067 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYD; 1068 else 1069 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYC; 1070 break; 1071 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1072 if (dig->linkb) 1073 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYF; 1074 else 1075 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYE; 1076 break; 1077 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1078 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYG; 1079 break; 1080 } 1081 if (is_dp) 1082 args.v5.ucLaneNum = dp_lane_count; 1083 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1084 args.v5.ucLaneNum = 8; 1085 else 1086 args.v5.ucLaneNum = 4; 1087 args.v5.ucConnObjId = connector_object_id; 1088 args.v5.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1089 1090 if (is_dp && adev->clock.dp_extclk) 1091 args.v5.asConfig.ucPhyClkSrcId = ENCODER_REFCLK_SRC_EXTCLK; 1092 else 1093 args.v5.asConfig.ucPhyClkSrcId = pll_id; 1094 1095 if (is_dp) 1096 args.v5.asConfig.ucCoherentMode = 1; /* DP requires coherent */ 1097 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 1098 if (dig->coherent_mode) 1099 args.v5.asConfig.ucCoherentMode = 1; 1100 } 1101 if (hpd_id == AMDGPU_HPD_NONE) 1102 args.v5.asConfig.ucHPDSel = 0; 1103 else 1104 args.v5.asConfig.ucHPDSel = hpd_id + 1; 1105 args.v5.ucDigEncoderSel = 1 << dig_encoder; 1106 args.v5.ucDPLaneSet = lane_set; 1107 break; 1108 case 6: 1109 args.v6.ucAction = action; 1110 if (is_dp) 1111 args.v6.ulSymClock = cpu_to_le32(dp_clock / 10); 1112 else 1113 args.v6.ulSymClock = cpu_to_le32(amdgpu_encoder->pixel_clock / 10); 1114 1115 switch (amdgpu_encoder->encoder_id) { 1116 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1117 if (dig->linkb) 1118 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYB; 1119 else 1120 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYA; 1121 break; 1122 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1123 if (dig->linkb) 1124 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYD; 1125 else 1126 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYC; 1127 break; 1128 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1129 if (dig->linkb) 1130 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYF; 1131 else 1132 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYE; 1133 break; 1134 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1135 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYG; 1136 break; 1137 } 1138 if (is_dp) 1139 args.v6.ucLaneNum = dp_lane_count; 1140 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1141 args.v6.ucLaneNum = 8; 1142 else 1143 args.v6.ucLaneNum = 4; 1144 args.v6.ucConnObjId = connector_object_id; 1145 if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) 1146 args.v6.ucDPLaneSet = lane_set; 1147 else 1148 args.v6.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1149 1150 if (hpd_id == AMDGPU_HPD_NONE) 1151 args.v6.ucHPDSel = 0; 1152 else 1153 args.v6.ucHPDSel = hpd_id + 1; 1154 args.v6.ucDigEncoderSel = 1 << dig_encoder; 1155 break; 1156 default: 1157 DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 1158 break; 1159 } 1160 break; 1161 default: 1162 DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 1163 break; 1164 } 1165 1166 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 1167 } 1168 1169 bool 1170 amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector *connector, 1171 int action) 1172 { 1173 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 1174 struct drm_device *dev = amdgpu_connector->base.dev; 1175 struct amdgpu_device *adev = drm_to_adev(dev); 1176 union dig_transmitter_control args; 1177 int index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl); 1178 uint8_t frev, crev; 1179 1180 if (connector->connector_type != DRM_MODE_CONNECTOR_eDP) 1181 goto done; 1182 1183 if ((action != ATOM_TRANSMITTER_ACTION_POWER_ON) && 1184 (action != ATOM_TRANSMITTER_ACTION_POWER_OFF)) 1185 goto done; 1186 1187 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 1188 goto done; 1189 1190 memset(&args, 0, sizeof(args)); 1191 1192 args.v1.ucAction = action; 1193 1194 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 1195 1196 /* wait for the panel to power up */ 1197 if (action == ATOM_TRANSMITTER_ACTION_POWER_ON) { 1198 int i; 1199 1200 for (i = 0; i < 300; i++) { 1201 if (amdgpu_display_hpd_sense(adev, amdgpu_connector->hpd.hpd)) 1202 return true; 1203 mdelay(1); 1204 } 1205 return false; 1206 } 1207 done: 1208 return true; 1209 } 1210 1211 union external_encoder_control { 1212 EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION v1; 1213 EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION_V3 v3; 1214 }; 1215 1216 static void 1217 amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder *encoder, 1218 struct drm_encoder *ext_encoder, 1219 int action) 1220 { 1221 struct drm_device *dev = encoder->dev; 1222 struct amdgpu_device *adev = drm_to_adev(dev); 1223 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1224 struct amdgpu_encoder *ext_amdgpu_encoder = to_amdgpu_encoder(ext_encoder); 1225 union external_encoder_control args; 1226 struct drm_connector *connector; 1227 int index = GetIndexIntoMasterTable(COMMAND, ExternalEncoderControl); 1228 u8 frev, crev; 1229 int dp_clock = 0; 1230 int dp_lane_count = 0; 1231 int connector_object_id = 0; 1232 u32 ext_enum = (ext_amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT; 1233 1234 if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT) 1235 connector = amdgpu_get_connector_for_encoder_init(encoder); 1236 else 1237 connector = amdgpu_get_connector_for_encoder(encoder); 1238 1239 if (connector) { 1240 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 1241 struct amdgpu_connector_atom_dig *dig_connector = 1242 amdgpu_connector->con_priv; 1243 1244 dp_clock = dig_connector->dp_clock; 1245 dp_lane_count = dig_connector->dp_lane_count; 1246 connector_object_id = 1247 (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT; 1248 } 1249 1250 memset(&args, 0, sizeof(args)); 1251 1252 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 1253 return; 1254 1255 switch (frev) { 1256 case 1: 1257 /* no params on frev 1 */ 1258 break; 1259 case 2: 1260 switch (crev) { 1261 case 1: 1262 case 2: 1263 args.v1.sDigEncoder.ucAction = action; 1264 args.v1.sDigEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 1265 args.v1.sDigEncoder.ucEncoderMode = 1266 amdgpu_atombios_encoder_get_encoder_mode(encoder); 1267 1268 if (ENCODER_MODE_IS_DP(args.v1.sDigEncoder.ucEncoderMode)) { 1269 if (dp_clock == 270000) 1270 args.v1.sDigEncoder.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ; 1271 args.v1.sDigEncoder.ucLaneNum = dp_lane_count; 1272 } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1273 args.v1.sDigEncoder.ucLaneNum = 8; 1274 else 1275 args.v1.sDigEncoder.ucLaneNum = 4; 1276 break; 1277 case 3: 1278 args.v3.sExtEncoder.ucAction = action; 1279 if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT) 1280 args.v3.sExtEncoder.usConnectorId = cpu_to_le16(connector_object_id); 1281 else 1282 args.v3.sExtEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 1283 args.v3.sExtEncoder.ucEncoderMode = 1284 amdgpu_atombios_encoder_get_encoder_mode(encoder); 1285 1286 if (ENCODER_MODE_IS_DP(args.v3.sExtEncoder.ucEncoderMode)) { 1287 if (dp_clock == 270000) 1288 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ; 1289 else if (dp_clock == 540000) 1290 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_5_40GHZ; 1291 args.v3.sExtEncoder.ucLaneNum = dp_lane_count; 1292 } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1293 args.v3.sExtEncoder.ucLaneNum = 8; 1294 else 1295 args.v3.sExtEncoder.ucLaneNum = 4; 1296 switch (ext_enum) { 1297 case GRAPH_OBJECT_ENUM_ID1: 1298 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER1; 1299 break; 1300 case GRAPH_OBJECT_ENUM_ID2: 1301 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER2; 1302 break; 1303 case GRAPH_OBJECT_ENUM_ID3: 1304 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER3; 1305 break; 1306 } 1307 args.v3.sExtEncoder.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder); 1308 break; 1309 default: 1310 DRM_ERROR("Unknown table version: %d, %d\n", frev, crev); 1311 return; 1312 } 1313 break; 1314 default: 1315 DRM_ERROR("Unknown table version: %d, %d\n", frev, crev); 1316 return; 1317 } 1318 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 1319 } 1320 1321 static void 1322 amdgpu_atombios_encoder_setup_dig(struct drm_encoder *encoder, int action) 1323 { 1324 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1325 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder); 1326 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; 1327 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 1328 struct amdgpu_connector *amdgpu_connector = NULL; 1329 struct amdgpu_connector_atom_dig *amdgpu_dig_connector = NULL; 1330 1331 if (connector) { 1332 amdgpu_connector = to_amdgpu_connector(connector); 1333 amdgpu_dig_connector = amdgpu_connector->con_priv; 1334 } 1335 1336 if (action == ATOM_ENABLE) { 1337 if (!connector) 1338 dig->panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE; 1339 else 1340 dig->panel_mode = amdgpu_atombios_dp_get_panel_mode(encoder, connector); 1341 1342 /* setup and enable the encoder */ 1343 amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_SETUP, 0); 1344 amdgpu_atombios_encoder_setup_dig_encoder(encoder, 1345 ATOM_ENCODER_CMD_SETUP_PANEL_MODE, 1346 dig->panel_mode); 1347 if (ext_encoder) 1348 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, 1349 EXTERNAL_ENCODER_ACTION_V3_ENCODER_SETUP); 1350 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) && 1351 connector) { 1352 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) { 1353 amdgpu_atombios_encoder_set_edp_panel_power(connector, 1354 ATOM_TRANSMITTER_ACTION_POWER_ON); 1355 amdgpu_dig_connector->edp_on = true; 1356 } 1357 } 1358 /* enable the transmitter */ 1359 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 1360 ATOM_TRANSMITTER_ACTION_ENABLE, 1361 0, 0); 1362 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) && 1363 connector) { 1364 /* DP_SET_POWER_D0 is set in amdgpu_atombios_dp_link_train */ 1365 amdgpu_atombios_dp_link_train(encoder, connector); 1366 amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON, 0); 1367 } 1368 if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) 1369 amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, dig->backlight_level); 1370 if (ext_encoder) 1371 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_ENABLE); 1372 } else { 1373 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) && 1374 connector) 1375 amdgpu_atombios_encoder_setup_dig_encoder(encoder, 1376 ATOM_ENCODER_CMD_DP_VIDEO_OFF, 0); 1377 if (ext_encoder) 1378 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_DISABLE); 1379 if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) 1380 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 1381 ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0); 1382 1383 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) && 1384 connector) 1385 amdgpu_atombios_dp_set_rx_power_state(connector, DP_SET_POWER_D3); 1386 /* disable the transmitter */ 1387 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 1388 ATOM_TRANSMITTER_ACTION_DISABLE, 0, 0); 1389 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) && 1390 connector) { 1391 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) { 1392 amdgpu_atombios_encoder_set_edp_panel_power(connector, 1393 ATOM_TRANSMITTER_ACTION_POWER_OFF); 1394 amdgpu_dig_connector->edp_on = false; 1395 } 1396 } 1397 } 1398 } 1399 1400 void 1401 amdgpu_atombios_encoder_dpms(struct drm_encoder *encoder, int mode) 1402 { 1403 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1404 1405 DRM_DEBUG_KMS("encoder dpms %d to mode %d, devices %08x, active_devices %08x\n", 1406 amdgpu_encoder->encoder_id, mode, amdgpu_encoder->devices, 1407 amdgpu_encoder->active_device); 1408 switch (amdgpu_encoder->encoder_id) { 1409 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1410 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1411 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1412 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1413 switch (mode) { 1414 case DRM_MODE_DPMS_ON: 1415 amdgpu_atombios_encoder_setup_dig(encoder, ATOM_ENABLE); 1416 break; 1417 case DRM_MODE_DPMS_STANDBY: 1418 case DRM_MODE_DPMS_SUSPEND: 1419 case DRM_MODE_DPMS_OFF: 1420 amdgpu_atombios_encoder_setup_dig(encoder, ATOM_DISABLE); 1421 break; 1422 } 1423 break; 1424 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 1425 switch (mode) { 1426 case DRM_MODE_DPMS_ON: 1427 amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_ENABLE); 1428 break; 1429 case DRM_MODE_DPMS_STANDBY: 1430 case DRM_MODE_DPMS_SUSPEND: 1431 case DRM_MODE_DPMS_OFF: 1432 amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_DISABLE); 1433 break; 1434 } 1435 break; 1436 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 1437 switch (mode) { 1438 case DRM_MODE_DPMS_ON: 1439 amdgpu_atombios_encoder_setup_dac(encoder, ATOM_ENABLE); 1440 break; 1441 case DRM_MODE_DPMS_STANDBY: 1442 case DRM_MODE_DPMS_SUSPEND: 1443 case DRM_MODE_DPMS_OFF: 1444 amdgpu_atombios_encoder_setup_dac(encoder, ATOM_DISABLE); 1445 break; 1446 } 1447 break; 1448 default: 1449 return; 1450 } 1451 } 1452 1453 union crtc_source_param { 1454 SELECT_CRTC_SOURCE_PS_ALLOCATION v1; 1455 SELECT_CRTC_SOURCE_PARAMETERS_V2 v2; 1456 SELECT_CRTC_SOURCE_PARAMETERS_V3 v3; 1457 }; 1458 1459 void 1460 amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder *encoder) 1461 { 1462 struct drm_device *dev = encoder->dev; 1463 struct amdgpu_device *adev = drm_to_adev(dev); 1464 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1465 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc); 1466 union crtc_source_param args; 1467 int index = GetIndexIntoMasterTable(COMMAND, SelectCRTC_Source); 1468 uint8_t frev, crev; 1469 struct amdgpu_encoder_atom_dig *dig; 1470 1471 memset(&args, 0, sizeof(args)); 1472 1473 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 1474 return; 1475 1476 switch (frev) { 1477 case 1: 1478 switch (crev) { 1479 case 1: 1480 default: 1481 args.v1.ucCRTC = amdgpu_crtc->crtc_id; 1482 switch (amdgpu_encoder->encoder_id) { 1483 case ENCODER_OBJECT_ID_INTERNAL_TMDS1: 1484 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1: 1485 args.v1.ucDevice = ATOM_DEVICE_DFP1_INDEX; 1486 break; 1487 case ENCODER_OBJECT_ID_INTERNAL_LVDS: 1488 case ENCODER_OBJECT_ID_INTERNAL_LVTM1: 1489 if (amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) 1490 args.v1.ucDevice = ATOM_DEVICE_LCD1_INDEX; 1491 else 1492 args.v1.ucDevice = ATOM_DEVICE_DFP3_INDEX; 1493 break; 1494 case ENCODER_OBJECT_ID_INTERNAL_DVO1: 1495 case ENCODER_OBJECT_ID_INTERNAL_DDI: 1496 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 1497 args.v1.ucDevice = ATOM_DEVICE_DFP2_INDEX; 1498 break; 1499 case ENCODER_OBJECT_ID_INTERNAL_DAC1: 1500 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 1501 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1502 args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX; 1503 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1504 args.v1.ucDevice = ATOM_DEVICE_CV_INDEX; 1505 else 1506 args.v1.ucDevice = ATOM_DEVICE_CRT1_INDEX; 1507 break; 1508 case ENCODER_OBJECT_ID_INTERNAL_DAC2: 1509 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2: 1510 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1511 args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX; 1512 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1513 args.v1.ucDevice = ATOM_DEVICE_CV_INDEX; 1514 else 1515 args.v1.ucDevice = ATOM_DEVICE_CRT2_INDEX; 1516 break; 1517 } 1518 break; 1519 case 2: 1520 args.v2.ucCRTC = amdgpu_crtc->crtc_id; 1521 if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) { 1522 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 1523 1524 if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS) 1525 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS; 1526 else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA) 1527 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT; 1528 else 1529 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1530 } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) { 1531 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS; 1532 } else { 1533 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1534 } 1535 switch (amdgpu_encoder->encoder_id) { 1536 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1537 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1538 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1539 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1540 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 1541 dig = amdgpu_encoder->enc_priv; 1542 switch (dig->dig_encoder) { 1543 case 0: 1544 args.v2.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID; 1545 break; 1546 case 1: 1547 args.v2.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID; 1548 break; 1549 case 2: 1550 args.v2.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID; 1551 break; 1552 case 3: 1553 args.v2.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID; 1554 break; 1555 case 4: 1556 args.v2.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID; 1557 break; 1558 case 5: 1559 args.v2.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID; 1560 break; 1561 case 6: 1562 args.v2.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID; 1563 break; 1564 } 1565 break; 1566 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 1567 args.v2.ucEncoderID = ASIC_INT_DVO_ENCODER_ID; 1568 break; 1569 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 1570 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1571 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1572 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1573 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1574 else 1575 args.v2.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID; 1576 break; 1577 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2: 1578 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1579 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1580 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1581 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1582 else 1583 args.v2.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID; 1584 break; 1585 } 1586 break; 1587 case 3: 1588 args.v3.ucCRTC = amdgpu_crtc->crtc_id; 1589 if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) { 1590 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 1591 1592 if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS) 1593 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS; 1594 else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA) 1595 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT; 1596 else 1597 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1598 } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) { 1599 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS; 1600 } else { 1601 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1602 } 1603 args.v3.ucDstBpc = amdgpu_atombios_encoder_get_bpc(encoder); 1604 switch (amdgpu_encoder->encoder_id) { 1605 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1606 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1607 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1608 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1609 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 1610 dig = amdgpu_encoder->enc_priv; 1611 switch (dig->dig_encoder) { 1612 case 0: 1613 args.v3.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID; 1614 break; 1615 case 1: 1616 args.v3.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID; 1617 break; 1618 case 2: 1619 args.v3.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID; 1620 break; 1621 case 3: 1622 args.v3.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID; 1623 break; 1624 case 4: 1625 args.v3.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID; 1626 break; 1627 case 5: 1628 args.v3.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID; 1629 break; 1630 case 6: 1631 args.v3.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID; 1632 break; 1633 } 1634 break; 1635 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 1636 args.v3.ucEncoderID = ASIC_INT_DVO_ENCODER_ID; 1637 break; 1638 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 1639 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1640 args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1641 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1642 args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1643 else 1644 args.v3.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID; 1645 break; 1646 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2: 1647 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1648 args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1649 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1650 args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1651 else 1652 args.v3.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID; 1653 break; 1654 } 1655 break; 1656 } 1657 break; 1658 default: 1659 DRM_ERROR("Unknown table version: %d, %d\n", frev, crev); 1660 return; 1661 } 1662 1663 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 1664 } 1665 1666 /* This only needs to be called once at startup */ 1667 void 1668 amdgpu_atombios_encoder_init_dig(struct amdgpu_device *adev) 1669 { 1670 struct drm_device *dev = adev_to_drm(adev); 1671 struct drm_encoder *encoder; 1672 1673 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { 1674 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1675 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder); 1676 1677 switch (amdgpu_encoder->encoder_id) { 1678 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1679 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1680 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1681 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1682 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, ATOM_TRANSMITTER_ACTION_INIT, 1683 0, 0); 1684 break; 1685 } 1686 1687 if (ext_encoder) 1688 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, 1689 EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT); 1690 } 1691 } 1692 1693 static bool 1694 amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder *encoder, 1695 struct drm_connector *connector) 1696 { 1697 struct drm_device *dev = encoder->dev; 1698 struct amdgpu_device *adev = drm_to_adev(dev); 1699 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1700 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 1701 1702 if (amdgpu_encoder->devices & (ATOM_DEVICE_TV_SUPPORT | 1703 ATOM_DEVICE_CV_SUPPORT | 1704 ATOM_DEVICE_CRT_SUPPORT)) { 1705 DAC_LOAD_DETECTION_PS_ALLOCATION args; 1706 int index = GetIndexIntoMasterTable(COMMAND, DAC_LoadDetection); 1707 uint8_t frev, crev; 1708 1709 memset(&args, 0, sizeof(args)); 1710 1711 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 1712 return false; 1713 1714 args.sDacload.ucMisc = 0; 1715 1716 if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DAC1) || 1717 (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1)) 1718 args.sDacload.ucDacType = ATOM_DAC_A; 1719 else 1720 args.sDacload.ucDacType = ATOM_DAC_B; 1721 1722 if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) 1723 args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT1_SUPPORT); 1724 else if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) 1725 args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT2_SUPPORT); 1726 else if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) { 1727 args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CV_SUPPORT); 1728 if (crev >= 3) 1729 args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb; 1730 } else if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) { 1731 args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_TV1_SUPPORT); 1732 if (crev >= 3) 1733 args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb; 1734 } 1735 1736 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 1737 1738 return true; 1739 } else 1740 return false; 1741 } 1742 1743 enum drm_connector_status 1744 amdgpu_atombios_encoder_dac_detect(struct drm_encoder *encoder, 1745 struct drm_connector *connector) 1746 { 1747 struct drm_device *dev = encoder->dev; 1748 struct amdgpu_device *adev = drm_to_adev(dev); 1749 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1750 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 1751 uint32_t bios_0_scratch; 1752 1753 if (!amdgpu_atombios_encoder_dac_load_detect(encoder, connector)) { 1754 DRM_DEBUG_KMS("detect returned false \n"); 1755 return connector_status_unknown; 1756 } 1757 1758 bios_0_scratch = RREG32(mmBIOS_SCRATCH_0); 1759 1760 DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices); 1761 if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) { 1762 if (bios_0_scratch & ATOM_S0_CRT1_MASK) 1763 return connector_status_connected; 1764 } 1765 if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) { 1766 if (bios_0_scratch & ATOM_S0_CRT2_MASK) 1767 return connector_status_connected; 1768 } 1769 if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) { 1770 if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A)) 1771 return connector_status_connected; 1772 } 1773 if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) { 1774 if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A)) 1775 return connector_status_connected; /* CTV */ 1776 else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A)) 1777 return connector_status_connected; /* STV */ 1778 } 1779 return connector_status_disconnected; 1780 } 1781 1782 enum drm_connector_status 1783 amdgpu_atombios_encoder_dig_detect(struct drm_encoder *encoder, 1784 struct drm_connector *connector) 1785 { 1786 struct drm_device *dev = encoder->dev; 1787 struct amdgpu_device *adev = drm_to_adev(dev); 1788 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1789 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 1790 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder); 1791 u32 bios_0_scratch; 1792 1793 if (!ext_encoder) 1794 return connector_status_unknown; 1795 1796 if ((amdgpu_connector->devices & ATOM_DEVICE_CRT_SUPPORT) == 0) 1797 return connector_status_unknown; 1798 1799 /* load detect on the dp bridge */ 1800 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, 1801 EXTERNAL_ENCODER_ACTION_V3_DACLOAD_DETECTION); 1802 1803 bios_0_scratch = RREG32(mmBIOS_SCRATCH_0); 1804 1805 DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices); 1806 if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) { 1807 if (bios_0_scratch & ATOM_S0_CRT1_MASK) 1808 return connector_status_connected; 1809 } 1810 if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) { 1811 if (bios_0_scratch & ATOM_S0_CRT2_MASK) 1812 return connector_status_connected; 1813 } 1814 if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) { 1815 if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A)) 1816 return connector_status_connected; 1817 } 1818 if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) { 1819 if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A)) 1820 return connector_status_connected; /* CTV */ 1821 else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A)) 1822 return connector_status_connected; /* STV */ 1823 } 1824 return connector_status_disconnected; 1825 } 1826 1827 void 1828 amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder *encoder) 1829 { 1830 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder); 1831 1832 if (ext_encoder) 1833 /* ddc_setup on the dp bridge */ 1834 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, 1835 EXTERNAL_ENCODER_ACTION_V3_DDC_SETUP); 1836 1837 } 1838 1839 void 1840 amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector *connector, 1841 struct drm_encoder *encoder, 1842 bool connected) 1843 { 1844 struct drm_device *dev = connector->dev; 1845 struct amdgpu_device *adev = drm_to_adev(dev); 1846 struct amdgpu_connector *amdgpu_connector = 1847 to_amdgpu_connector(connector); 1848 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1849 uint32_t bios_0_scratch, bios_3_scratch, bios_6_scratch; 1850 1851 bios_0_scratch = RREG32(mmBIOS_SCRATCH_0); 1852 bios_3_scratch = RREG32(mmBIOS_SCRATCH_3); 1853 bios_6_scratch = RREG32(mmBIOS_SCRATCH_6); 1854 1855 if ((amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) && 1856 (amdgpu_connector->devices & ATOM_DEVICE_LCD1_SUPPORT)) { 1857 if (connected) { 1858 DRM_DEBUG_KMS("LCD1 connected\n"); 1859 bios_0_scratch |= ATOM_S0_LCD1; 1860 bios_3_scratch |= ATOM_S3_LCD1_ACTIVE; 1861 bios_6_scratch |= ATOM_S6_ACC_REQ_LCD1; 1862 } else { 1863 DRM_DEBUG_KMS("LCD1 disconnected\n"); 1864 bios_0_scratch &= ~ATOM_S0_LCD1; 1865 bios_3_scratch &= ~ATOM_S3_LCD1_ACTIVE; 1866 bios_6_scratch &= ~ATOM_S6_ACC_REQ_LCD1; 1867 } 1868 } 1869 if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT1_SUPPORT) && 1870 (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)) { 1871 if (connected) { 1872 DRM_DEBUG_KMS("CRT1 connected\n"); 1873 bios_0_scratch |= ATOM_S0_CRT1_COLOR; 1874 bios_3_scratch |= ATOM_S3_CRT1_ACTIVE; 1875 bios_6_scratch |= ATOM_S6_ACC_REQ_CRT1; 1876 } else { 1877 DRM_DEBUG_KMS("CRT1 disconnected\n"); 1878 bios_0_scratch &= ~ATOM_S0_CRT1_MASK; 1879 bios_3_scratch &= ~ATOM_S3_CRT1_ACTIVE; 1880 bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT1; 1881 } 1882 } 1883 if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT2_SUPPORT) && 1884 (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)) { 1885 if (connected) { 1886 DRM_DEBUG_KMS("CRT2 connected\n"); 1887 bios_0_scratch |= ATOM_S0_CRT2_COLOR; 1888 bios_3_scratch |= ATOM_S3_CRT2_ACTIVE; 1889 bios_6_scratch |= ATOM_S6_ACC_REQ_CRT2; 1890 } else { 1891 DRM_DEBUG_KMS("CRT2 disconnected\n"); 1892 bios_0_scratch &= ~ATOM_S0_CRT2_MASK; 1893 bios_3_scratch &= ~ATOM_S3_CRT2_ACTIVE; 1894 bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT2; 1895 } 1896 } 1897 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP1_SUPPORT) && 1898 (amdgpu_connector->devices & ATOM_DEVICE_DFP1_SUPPORT)) { 1899 if (connected) { 1900 DRM_DEBUG_KMS("DFP1 connected\n"); 1901 bios_0_scratch |= ATOM_S0_DFP1; 1902 bios_3_scratch |= ATOM_S3_DFP1_ACTIVE; 1903 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP1; 1904 } else { 1905 DRM_DEBUG_KMS("DFP1 disconnected\n"); 1906 bios_0_scratch &= ~ATOM_S0_DFP1; 1907 bios_3_scratch &= ~ATOM_S3_DFP1_ACTIVE; 1908 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP1; 1909 } 1910 } 1911 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP2_SUPPORT) && 1912 (amdgpu_connector->devices & ATOM_DEVICE_DFP2_SUPPORT)) { 1913 if (connected) { 1914 DRM_DEBUG_KMS("DFP2 connected\n"); 1915 bios_0_scratch |= ATOM_S0_DFP2; 1916 bios_3_scratch |= ATOM_S3_DFP2_ACTIVE; 1917 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP2; 1918 } else { 1919 DRM_DEBUG_KMS("DFP2 disconnected\n"); 1920 bios_0_scratch &= ~ATOM_S0_DFP2; 1921 bios_3_scratch &= ~ATOM_S3_DFP2_ACTIVE; 1922 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP2; 1923 } 1924 } 1925 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP3_SUPPORT) && 1926 (amdgpu_connector->devices & ATOM_DEVICE_DFP3_SUPPORT)) { 1927 if (connected) { 1928 DRM_DEBUG_KMS("DFP3 connected\n"); 1929 bios_0_scratch |= ATOM_S0_DFP3; 1930 bios_3_scratch |= ATOM_S3_DFP3_ACTIVE; 1931 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP3; 1932 } else { 1933 DRM_DEBUG_KMS("DFP3 disconnected\n"); 1934 bios_0_scratch &= ~ATOM_S0_DFP3; 1935 bios_3_scratch &= ~ATOM_S3_DFP3_ACTIVE; 1936 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP3; 1937 } 1938 } 1939 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP4_SUPPORT) && 1940 (amdgpu_connector->devices & ATOM_DEVICE_DFP4_SUPPORT)) { 1941 if (connected) { 1942 DRM_DEBUG_KMS("DFP4 connected\n"); 1943 bios_0_scratch |= ATOM_S0_DFP4; 1944 bios_3_scratch |= ATOM_S3_DFP4_ACTIVE; 1945 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP4; 1946 } else { 1947 DRM_DEBUG_KMS("DFP4 disconnected\n"); 1948 bios_0_scratch &= ~ATOM_S0_DFP4; 1949 bios_3_scratch &= ~ATOM_S3_DFP4_ACTIVE; 1950 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP4; 1951 } 1952 } 1953 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP5_SUPPORT) && 1954 (amdgpu_connector->devices & ATOM_DEVICE_DFP5_SUPPORT)) { 1955 if (connected) { 1956 DRM_DEBUG_KMS("DFP5 connected\n"); 1957 bios_0_scratch |= ATOM_S0_DFP5; 1958 bios_3_scratch |= ATOM_S3_DFP5_ACTIVE; 1959 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP5; 1960 } else { 1961 DRM_DEBUG_KMS("DFP5 disconnected\n"); 1962 bios_0_scratch &= ~ATOM_S0_DFP5; 1963 bios_3_scratch &= ~ATOM_S3_DFP5_ACTIVE; 1964 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP5; 1965 } 1966 } 1967 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP6_SUPPORT) && 1968 (amdgpu_connector->devices & ATOM_DEVICE_DFP6_SUPPORT)) { 1969 if (connected) { 1970 DRM_DEBUG_KMS("DFP6 connected\n"); 1971 bios_0_scratch |= ATOM_S0_DFP6; 1972 bios_3_scratch |= ATOM_S3_DFP6_ACTIVE; 1973 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP6; 1974 } else { 1975 DRM_DEBUG_KMS("DFP6 disconnected\n"); 1976 bios_0_scratch &= ~ATOM_S0_DFP6; 1977 bios_3_scratch &= ~ATOM_S3_DFP6_ACTIVE; 1978 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP6; 1979 } 1980 } 1981 1982 WREG32(mmBIOS_SCRATCH_0, bios_0_scratch); 1983 WREG32(mmBIOS_SCRATCH_3, bios_3_scratch); 1984 WREG32(mmBIOS_SCRATCH_6, bios_6_scratch); 1985 } 1986 1987 union lvds_info { 1988 struct _ATOM_LVDS_INFO info; 1989 struct _ATOM_LVDS_INFO_V12 info_12; 1990 }; 1991 1992 struct amdgpu_encoder_atom_dig * 1993 amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder *encoder) 1994 { 1995 struct drm_device *dev = encoder->base.dev; 1996 struct amdgpu_device *adev = drm_to_adev(dev); 1997 struct amdgpu_mode_info *mode_info = &adev->mode_info; 1998 int index = GetIndexIntoMasterTable(DATA, LVDS_Info); 1999 uint16_t data_offset, misc; 2000 union lvds_info *lvds_info; 2001 uint8_t frev, crev; 2002 struct amdgpu_encoder_atom_dig *lvds = NULL; 2003 int encoder_enum = (encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT; 2004 2005 if (amdgpu_atom_parse_data_header(mode_info->atom_context, index, NULL, 2006 &frev, &crev, &data_offset)) { 2007 lvds_info = 2008 (union lvds_info *)(mode_info->atom_context->bios + data_offset); 2009 lvds = 2010 kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL); 2011 2012 if (!lvds) 2013 return NULL; 2014 2015 lvds->native_mode.clock = 2016 le16_to_cpu(lvds_info->info.sLCDTiming.usPixClk) * 10; 2017 lvds->native_mode.hdisplay = 2018 le16_to_cpu(lvds_info->info.sLCDTiming.usHActive); 2019 lvds->native_mode.vdisplay = 2020 le16_to_cpu(lvds_info->info.sLCDTiming.usVActive); 2021 lvds->native_mode.htotal = lvds->native_mode.hdisplay + 2022 le16_to_cpu(lvds_info->info.sLCDTiming.usHBlanking_Time); 2023 lvds->native_mode.hsync_start = lvds->native_mode.hdisplay + 2024 le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncOffset); 2025 lvds->native_mode.hsync_end = lvds->native_mode.hsync_start + 2026 le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncWidth); 2027 lvds->native_mode.vtotal = lvds->native_mode.vdisplay + 2028 le16_to_cpu(lvds_info->info.sLCDTiming.usVBlanking_Time); 2029 lvds->native_mode.vsync_start = lvds->native_mode.vdisplay + 2030 le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncOffset); 2031 lvds->native_mode.vsync_end = lvds->native_mode.vsync_start + 2032 le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncWidth); 2033 lvds->panel_pwr_delay = 2034 le16_to_cpu(lvds_info->info.usOffDelayInMs); 2035 lvds->lcd_misc = lvds_info->info.ucLVDS_Misc; 2036 2037 misc = le16_to_cpu(lvds_info->info.sLCDTiming.susModeMiscInfo.usAccess); 2038 if (misc & ATOM_VSYNC_POLARITY) 2039 lvds->native_mode.flags |= DRM_MODE_FLAG_NVSYNC; 2040 if (misc & ATOM_HSYNC_POLARITY) 2041 lvds->native_mode.flags |= DRM_MODE_FLAG_NHSYNC; 2042 if (misc & ATOM_COMPOSITESYNC) 2043 lvds->native_mode.flags |= DRM_MODE_FLAG_CSYNC; 2044 if (misc & ATOM_INTERLACE) 2045 lvds->native_mode.flags |= DRM_MODE_FLAG_INTERLACE; 2046 if (misc & ATOM_DOUBLE_CLOCK_MODE) 2047 lvds->native_mode.flags |= DRM_MODE_FLAG_DBLSCAN; 2048 2049 lvds->native_mode.width_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageHSize); 2050 lvds->native_mode.height_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageVSize); 2051 2052 /* set crtc values */ 2053 drm_mode_set_crtcinfo(&lvds->native_mode, CRTC_INTERLACE_HALVE_V); 2054 2055 lvds->lcd_ss_id = lvds_info->info.ucSS_Id; 2056 2057 encoder->native_mode = lvds->native_mode; 2058 2059 if (encoder_enum == 2) 2060 lvds->linkb = true; 2061 else 2062 lvds->linkb = false; 2063 2064 /* parse the lcd record table */ 2065 if (le16_to_cpu(lvds_info->info.usModePatchTableOffset)) { 2066 ATOM_FAKE_EDID_PATCH_RECORD *fake_edid_record; 2067 ATOM_PANEL_RESOLUTION_PATCH_RECORD *panel_res_record; 2068 bool bad_record = false; 2069 u8 *record; 2070 2071 if ((frev == 1) && (crev < 2)) 2072 /* absolute */ 2073 record = (u8 *)(mode_info->atom_context->bios + 2074 le16_to_cpu(lvds_info->info.usModePatchTableOffset)); 2075 else 2076 /* relative */ 2077 record = (u8 *)(mode_info->atom_context->bios + 2078 data_offset + 2079 le16_to_cpu(lvds_info->info.usModePatchTableOffset)); 2080 while (*record != ATOM_RECORD_END_TYPE) { 2081 switch (*record) { 2082 case LCD_MODE_PATCH_RECORD_MODE_TYPE: 2083 record += sizeof(ATOM_PATCH_RECORD_MODE); 2084 break; 2085 case LCD_RTS_RECORD_TYPE: 2086 record += sizeof(ATOM_LCD_RTS_RECORD); 2087 break; 2088 case LCD_CAP_RECORD_TYPE: 2089 record += sizeof(ATOM_LCD_MODE_CONTROL_CAP); 2090 break; 2091 case LCD_FAKE_EDID_PATCH_RECORD_TYPE: 2092 fake_edid_record = (ATOM_FAKE_EDID_PATCH_RECORD *)record; 2093 if (fake_edid_record->ucFakeEDIDLength) { 2094 struct edid *edid; 2095 int edid_size = 2096 max((int)EDID_LENGTH, (int)fake_edid_record->ucFakeEDIDLength); 2097 edid = kmalloc(edid_size, GFP_KERNEL); 2098 if (edid) { 2099 memcpy((u8 *)edid, (u8 *)&fake_edid_record->ucFakeEDIDString[0], 2100 fake_edid_record->ucFakeEDIDLength); 2101 2102 if (drm_edid_is_valid(edid)) { 2103 adev->mode_info.bios_hardcoded_edid = edid; 2104 adev->mode_info.bios_hardcoded_edid_size = edid_size; 2105 } else 2106 kfree(edid); 2107 } 2108 } 2109 record += fake_edid_record->ucFakeEDIDLength ? 2110 fake_edid_record->ucFakeEDIDLength + 2 : 2111 sizeof(ATOM_FAKE_EDID_PATCH_RECORD); 2112 break; 2113 case LCD_PANEL_RESOLUTION_RECORD_TYPE: 2114 panel_res_record = (ATOM_PANEL_RESOLUTION_PATCH_RECORD *)record; 2115 lvds->native_mode.width_mm = panel_res_record->usHSize; 2116 lvds->native_mode.height_mm = panel_res_record->usVSize; 2117 record += sizeof(ATOM_PANEL_RESOLUTION_PATCH_RECORD); 2118 break; 2119 default: 2120 DRM_ERROR("Bad LCD record %d\n", *record); 2121 bad_record = true; 2122 break; 2123 } 2124 if (bad_record) 2125 break; 2126 } 2127 } 2128 } 2129 return lvds; 2130 } 2131 2132 struct amdgpu_encoder_atom_dig * 2133 amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder *amdgpu_encoder) 2134 { 2135 int encoder_enum = (amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT; 2136 struct amdgpu_encoder_atom_dig *dig = kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL); 2137 2138 if (!dig) 2139 return NULL; 2140 2141 /* coherent mode by default */ 2142 dig->coherent_mode = true; 2143 dig->dig_encoder = -1; 2144 2145 if (encoder_enum == 2) 2146 dig->linkb = true; 2147 else 2148 dig->linkb = false; 2149 2150 return dig; 2151 } 2152 2153