1 /* 2 * Copyright 2007-11 Advanced Micro Devices, Inc. 3 * Copyright 2008 Red Hat Inc. 4 * 5 * Permission is hereby granted, free of charge, to any person obtaining a 6 * copy of this software and associated documentation files (the "Software"), 7 * to deal in the Software without restriction, including without limitation 8 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 9 * and/or sell copies of the Software, and to permit persons to whom the 10 * Software is furnished to do so, subject to the following conditions: 11 * 12 * The above copyright notice and this permission notice shall be included in 13 * all copies or substantial portions of the Software. 14 * 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 21 * OTHER DEALINGS IN THE SOFTWARE. 22 * 23 * Authors: Dave Airlie 24 * Alex Deucher 25 */ 26 27 #include <linux/pci.h> 28 29 #include <drm/drm_crtc_helper.h> 30 #include <drm/amdgpu_drm.h> 31 #include "amdgpu.h" 32 #include "amdgpu_connectors.h" 33 #include "amdgpu_display.h" 34 #include "atom.h" 35 #include "atombios_encoders.h" 36 #include "atombios_dp.h" 37 #include <linux/backlight.h> 38 #include "bif/bif_4_1_d.h" 39 40 u8 41 amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device *adev) 42 { 43 u8 backlight_level; 44 u32 bios_2_scratch; 45 46 bios_2_scratch = RREG32(mmBIOS_SCRATCH_2); 47 48 backlight_level = ((bios_2_scratch & ATOM_S2_CURRENT_BL_LEVEL_MASK) >> 49 ATOM_S2_CURRENT_BL_LEVEL_SHIFT); 50 51 return backlight_level; 52 } 53 54 void 55 amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device *adev, 56 u8 backlight_level) 57 { 58 u32 bios_2_scratch; 59 60 bios_2_scratch = RREG32(mmBIOS_SCRATCH_2); 61 62 bios_2_scratch &= ~ATOM_S2_CURRENT_BL_LEVEL_MASK; 63 bios_2_scratch |= ((backlight_level << ATOM_S2_CURRENT_BL_LEVEL_SHIFT) & 64 ATOM_S2_CURRENT_BL_LEVEL_MASK); 65 66 WREG32(mmBIOS_SCRATCH_2, bios_2_scratch); 67 } 68 69 u8 70 amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder *amdgpu_encoder) 71 { 72 struct drm_device *dev = amdgpu_encoder->base.dev; 73 struct amdgpu_device *adev = drm_to_adev(dev); 74 75 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU)) 76 return 0; 77 78 return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev); 79 } 80 81 void 82 amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder *amdgpu_encoder, 83 u8 level) 84 { 85 struct drm_encoder *encoder = &amdgpu_encoder->base; 86 struct drm_device *dev = amdgpu_encoder->base.dev; 87 struct amdgpu_device *adev = drm_to_adev(dev); 88 struct amdgpu_encoder_atom_dig *dig; 89 90 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU)) 91 return; 92 93 if ((amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) && 94 amdgpu_encoder->enc_priv) { 95 dig = amdgpu_encoder->enc_priv; 96 dig->backlight_level = level; 97 amdgpu_atombios_encoder_set_backlight_level_to_reg(adev, dig->backlight_level); 98 99 switch (amdgpu_encoder->encoder_id) { 100 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 101 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 102 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 103 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 104 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 105 if (dig->backlight_level == 0) 106 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 107 ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0); 108 else { 109 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 110 ATOM_TRANSMITTER_ACTION_BL_BRIGHTNESS_CONTROL, 0, 0); 111 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 112 ATOM_TRANSMITTER_ACTION_LCD_BLON, 0, 0); 113 } 114 break; 115 default: 116 break; 117 } 118 } 119 } 120 121 #if defined(CONFIG_BACKLIGHT_CLASS_DEVICE) || defined(CONFIG_BACKLIGHT_CLASS_DEVICE_MODULE) 122 123 static u8 amdgpu_atombios_encoder_backlight_level(struct backlight_device *bd) 124 { 125 u8 level; 126 127 /* Convert brightness to hardware level */ 128 if (bd->props.brightness < 0) 129 level = 0; 130 else if (bd->props.brightness > AMDGPU_MAX_BL_LEVEL) 131 level = AMDGPU_MAX_BL_LEVEL; 132 else 133 level = bd->props.brightness; 134 135 return level; 136 } 137 138 static int amdgpu_atombios_encoder_update_backlight_status(struct backlight_device *bd) 139 { 140 struct amdgpu_backlight_privdata *pdata = bl_get_data(bd); 141 struct amdgpu_encoder *amdgpu_encoder = pdata->encoder; 142 143 amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, 144 amdgpu_atombios_encoder_backlight_level(bd)); 145 146 return 0; 147 } 148 149 static int 150 amdgpu_atombios_encoder_get_backlight_brightness(struct backlight_device *bd) 151 { 152 struct amdgpu_backlight_privdata *pdata = bl_get_data(bd); 153 struct amdgpu_encoder *amdgpu_encoder = pdata->encoder; 154 struct drm_device *dev = amdgpu_encoder->base.dev; 155 struct amdgpu_device *adev = drm_to_adev(dev); 156 157 return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev); 158 } 159 160 static const struct backlight_ops amdgpu_atombios_encoder_backlight_ops = { 161 .get_brightness = amdgpu_atombios_encoder_get_backlight_brightness, 162 .update_status = amdgpu_atombios_encoder_update_backlight_status, 163 }; 164 165 void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *amdgpu_encoder, 166 struct drm_connector *drm_connector) 167 { 168 struct drm_device *dev = amdgpu_encoder->base.dev; 169 struct amdgpu_device *adev = drm_to_adev(dev); 170 struct backlight_device *bd; 171 struct backlight_properties props; 172 struct amdgpu_backlight_privdata *pdata; 173 struct amdgpu_encoder_atom_dig *dig; 174 char bl_name[16]; 175 176 /* Mac laptops with multiple GPUs use the gmux driver for backlight 177 * so don't register a backlight device 178 */ 179 if ((adev->pdev->subsystem_vendor == PCI_VENDOR_ID_APPLE) && 180 (adev->pdev->device == 0x6741)) 181 return; 182 183 if (!amdgpu_encoder->enc_priv) 184 return; 185 186 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU)) 187 return; 188 189 pdata = kmalloc(sizeof(struct amdgpu_backlight_privdata), GFP_KERNEL); 190 if (!pdata) { 191 DRM_ERROR("Memory allocation failed\n"); 192 goto error; 193 } 194 195 memset(&props, 0, sizeof(props)); 196 props.max_brightness = AMDGPU_MAX_BL_LEVEL; 197 props.type = BACKLIGHT_RAW; 198 snprintf(bl_name, sizeof(bl_name), 199 "amdgpu_bl%d", dev->primary->index); 200 bd = backlight_device_register(bl_name, drm_connector->kdev, 201 pdata, &amdgpu_atombios_encoder_backlight_ops, &props); 202 if (IS_ERR(bd)) { 203 DRM_ERROR("Backlight registration failed\n"); 204 goto error; 205 } 206 207 pdata->encoder = amdgpu_encoder; 208 209 dig = amdgpu_encoder->enc_priv; 210 dig->bl_dev = bd; 211 212 bd->props.brightness = amdgpu_atombios_encoder_get_backlight_brightness(bd); 213 bd->props.power = FB_BLANK_UNBLANK; 214 backlight_update_status(bd); 215 216 DRM_INFO("amdgpu atom DIG backlight initialized\n"); 217 218 return; 219 220 error: 221 kfree(pdata); 222 return; 223 } 224 225 void 226 amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *amdgpu_encoder) 227 { 228 struct drm_device *dev = amdgpu_encoder->base.dev; 229 struct amdgpu_device *adev = drm_to_adev(dev); 230 struct backlight_device *bd = NULL; 231 struct amdgpu_encoder_atom_dig *dig; 232 233 if (!amdgpu_encoder->enc_priv) 234 return; 235 236 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU)) 237 return; 238 239 dig = amdgpu_encoder->enc_priv; 240 bd = dig->bl_dev; 241 dig->bl_dev = NULL; 242 243 if (bd) { 244 struct amdgpu_legacy_backlight_privdata *pdata; 245 246 pdata = bl_get_data(bd); 247 backlight_device_unregister(bd); 248 kfree(pdata); 249 250 DRM_INFO("amdgpu atom LVDS backlight unloaded\n"); 251 } 252 } 253 254 #else /* !CONFIG_BACKLIGHT_CLASS_DEVICE */ 255 256 void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *encoder) 257 { 258 } 259 260 void amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *encoder) 261 { 262 } 263 264 #endif 265 266 bool amdgpu_atombios_encoder_is_digital(struct drm_encoder *encoder) 267 { 268 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 269 switch (amdgpu_encoder->encoder_id) { 270 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 271 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 272 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 273 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 274 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 275 return true; 276 default: 277 return false; 278 } 279 } 280 281 bool amdgpu_atombios_encoder_mode_fixup(struct drm_encoder *encoder, 282 const struct drm_display_mode *mode, 283 struct drm_display_mode *adjusted_mode) 284 { 285 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 286 287 /* set the active encoder to connector routing */ 288 amdgpu_encoder_set_active_device(encoder); 289 drm_mode_set_crtcinfo(adjusted_mode, 0); 290 291 /* hw bug */ 292 if ((mode->flags & DRM_MODE_FLAG_INTERLACE) 293 && (mode->crtc_vsync_start < (mode->crtc_vdisplay + 2))) 294 adjusted_mode->crtc_vsync_start = adjusted_mode->crtc_vdisplay + 2; 295 296 /* vertical FP must be at least 1 */ 297 if (mode->crtc_vsync_start == mode->crtc_vdisplay) 298 adjusted_mode->crtc_vsync_start++; 299 300 /* get the native mode for scaling */ 301 if (amdgpu_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT)) 302 amdgpu_panel_mode_fixup(encoder, adjusted_mode); 303 else if (amdgpu_encoder->rmx_type != RMX_OFF) 304 amdgpu_panel_mode_fixup(encoder, adjusted_mode); 305 306 if ((amdgpu_encoder->active_device & (ATOM_DEVICE_DFP_SUPPORT | ATOM_DEVICE_LCD_SUPPORT)) || 307 (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) { 308 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 309 amdgpu_atombios_dp_set_link_config(connector, adjusted_mode); 310 } 311 312 return true; 313 } 314 315 static void 316 amdgpu_atombios_encoder_setup_dac(struct drm_encoder *encoder, int action) 317 { 318 struct drm_device *dev = encoder->dev; 319 struct amdgpu_device *adev = drm_to_adev(dev); 320 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 321 DAC_ENCODER_CONTROL_PS_ALLOCATION args; 322 int index = 0; 323 324 memset(&args, 0, sizeof(args)); 325 326 switch (amdgpu_encoder->encoder_id) { 327 case ENCODER_OBJECT_ID_INTERNAL_DAC1: 328 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 329 index = GetIndexIntoMasterTable(COMMAND, DAC1EncoderControl); 330 break; 331 case ENCODER_OBJECT_ID_INTERNAL_DAC2: 332 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2: 333 index = GetIndexIntoMasterTable(COMMAND, DAC2EncoderControl); 334 break; 335 } 336 337 args.ucAction = action; 338 args.ucDacStandard = ATOM_DAC1_PS2; 339 args.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 340 341 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 342 343 } 344 345 static u8 amdgpu_atombios_encoder_get_bpc(struct drm_encoder *encoder) 346 { 347 int bpc = 8; 348 349 if (encoder->crtc) { 350 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc); 351 bpc = amdgpu_crtc->bpc; 352 } 353 354 switch (bpc) { 355 case 0: 356 return PANEL_BPC_UNDEFINE; 357 case 6: 358 return PANEL_6BIT_PER_COLOR; 359 case 8: 360 default: 361 return PANEL_8BIT_PER_COLOR; 362 case 10: 363 return PANEL_10BIT_PER_COLOR; 364 case 12: 365 return PANEL_12BIT_PER_COLOR; 366 case 16: 367 return PANEL_16BIT_PER_COLOR; 368 } 369 } 370 371 union dvo_encoder_control { 372 ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION ext_tmds; 373 DVO_ENCODER_CONTROL_PS_ALLOCATION dvo; 374 DVO_ENCODER_CONTROL_PS_ALLOCATION_V3 dvo_v3; 375 DVO_ENCODER_CONTROL_PS_ALLOCATION_V1_4 dvo_v4; 376 }; 377 378 static void 379 amdgpu_atombios_encoder_setup_dvo(struct drm_encoder *encoder, int action) 380 { 381 struct drm_device *dev = encoder->dev; 382 struct amdgpu_device *adev = drm_to_adev(dev); 383 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 384 union dvo_encoder_control args; 385 int index = GetIndexIntoMasterTable(COMMAND, DVOEncoderControl); 386 uint8_t frev, crev; 387 388 memset(&args, 0, sizeof(args)); 389 390 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 391 return; 392 393 switch (frev) { 394 case 1: 395 switch (crev) { 396 case 1: 397 /* R4xx, R5xx */ 398 args.ext_tmds.sXTmdsEncoder.ucEnable = action; 399 400 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 401 args.ext_tmds.sXTmdsEncoder.ucMisc |= PANEL_ENCODER_MISC_DUAL; 402 403 args.ext_tmds.sXTmdsEncoder.ucMisc |= ATOM_PANEL_MISC_888RGB; 404 break; 405 case 2: 406 /* RS600/690/740 */ 407 args.dvo.sDVOEncoder.ucAction = action; 408 args.dvo.sDVOEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 409 /* DFP1, CRT1, TV1 depending on the type of port */ 410 args.dvo.sDVOEncoder.ucDeviceType = ATOM_DEVICE_DFP1_INDEX; 411 412 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 413 args.dvo.sDVOEncoder.usDevAttr.sDigAttrib.ucAttribute |= PANEL_ENCODER_MISC_DUAL; 414 break; 415 case 3: 416 /* R6xx */ 417 args.dvo_v3.ucAction = action; 418 args.dvo_v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 419 args.dvo_v3.ucDVOConfig = 0; /* XXX */ 420 break; 421 case 4: 422 /* DCE8 */ 423 args.dvo_v4.ucAction = action; 424 args.dvo_v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 425 args.dvo_v4.ucDVOConfig = 0; /* XXX */ 426 args.dvo_v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder); 427 break; 428 default: 429 DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 430 break; 431 } 432 break; 433 default: 434 DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 435 break; 436 } 437 438 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 439 } 440 441 int amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder *encoder) 442 { 443 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 444 struct drm_connector *connector; 445 struct amdgpu_connector *amdgpu_connector; 446 struct amdgpu_connector_atom_dig *dig_connector; 447 448 /* dp bridges are always DP */ 449 if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) 450 return ATOM_ENCODER_MODE_DP; 451 452 /* DVO is always DVO */ 453 if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DVO1) || 454 (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1)) 455 return ATOM_ENCODER_MODE_DVO; 456 457 connector = amdgpu_get_connector_for_encoder(encoder); 458 /* if we don't have an active device yet, just use one of 459 * the connectors tied to the encoder. 460 */ 461 if (!connector) 462 connector = amdgpu_get_connector_for_encoder_init(encoder); 463 amdgpu_connector = to_amdgpu_connector(connector); 464 465 switch (connector->connector_type) { 466 case DRM_MODE_CONNECTOR_DVII: 467 case DRM_MODE_CONNECTOR_HDMIB: /* HDMI-B is basically DL-DVI; analog works fine */ 468 if (amdgpu_audio != 0) { 469 if (amdgpu_connector->use_digital && 470 (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)) 471 return ATOM_ENCODER_MODE_HDMI; 472 else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) && 473 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO)) 474 return ATOM_ENCODER_MODE_HDMI; 475 else if (amdgpu_connector->use_digital) 476 return ATOM_ENCODER_MODE_DVI; 477 else 478 return ATOM_ENCODER_MODE_CRT; 479 } else if (amdgpu_connector->use_digital) { 480 return ATOM_ENCODER_MODE_DVI; 481 } else { 482 return ATOM_ENCODER_MODE_CRT; 483 } 484 break; 485 case DRM_MODE_CONNECTOR_DVID: 486 case DRM_MODE_CONNECTOR_HDMIA: 487 default: 488 if (amdgpu_audio != 0) { 489 if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE) 490 return ATOM_ENCODER_MODE_HDMI; 491 else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) && 492 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO)) 493 return ATOM_ENCODER_MODE_HDMI; 494 else 495 return ATOM_ENCODER_MODE_DVI; 496 } else { 497 return ATOM_ENCODER_MODE_DVI; 498 } 499 case DRM_MODE_CONNECTOR_LVDS: 500 return ATOM_ENCODER_MODE_LVDS; 501 case DRM_MODE_CONNECTOR_DisplayPort: 502 dig_connector = amdgpu_connector->con_priv; 503 if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) || 504 (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) { 505 return ATOM_ENCODER_MODE_DP; 506 } else if (amdgpu_audio != 0) { 507 if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE) 508 return ATOM_ENCODER_MODE_HDMI; 509 else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) && 510 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO)) 511 return ATOM_ENCODER_MODE_HDMI; 512 else 513 return ATOM_ENCODER_MODE_DVI; 514 } else { 515 return ATOM_ENCODER_MODE_DVI; 516 } 517 case DRM_MODE_CONNECTOR_eDP: 518 return ATOM_ENCODER_MODE_DP; 519 case DRM_MODE_CONNECTOR_DVIA: 520 case DRM_MODE_CONNECTOR_VGA: 521 return ATOM_ENCODER_MODE_CRT; 522 case DRM_MODE_CONNECTOR_Composite: 523 case DRM_MODE_CONNECTOR_SVIDEO: 524 case DRM_MODE_CONNECTOR_9PinDIN: 525 /* fix me */ 526 return ATOM_ENCODER_MODE_TV; 527 } 528 } 529 530 /* 531 * DIG Encoder/Transmitter Setup 532 * 533 * DCE 6.0 534 * - 3 DIG transmitter blocks UNIPHY0/1/2 (links A and B). 535 * Supports up to 6 digital outputs 536 * - 6 DIG encoder blocks. 537 * - DIG to PHY mapping is hardcoded 538 * DIG1 drives UNIPHY0 link A, A+B 539 * DIG2 drives UNIPHY0 link B 540 * DIG3 drives UNIPHY1 link A, A+B 541 * DIG4 drives UNIPHY1 link B 542 * DIG5 drives UNIPHY2 link A, A+B 543 * DIG6 drives UNIPHY2 link B 544 * 545 * Routing 546 * crtc -> dig encoder -> UNIPHY/LVTMA (1 or 2 links) 547 * Examples: 548 * crtc0 -> dig2 -> LVTMA links A+B -> TMDS/HDMI 549 * crtc1 -> dig1 -> UNIPHY0 link B -> DP 550 * crtc0 -> dig1 -> UNIPHY2 link A -> LVDS 551 * crtc1 -> dig2 -> UNIPHY1 link B+A -> TMDS/HDMI 552 */ 553 554 union dig_encoder_control { 555 DIG_ENCODER_CONTROL_PS_ALLOCATION v1; 556 DIG_ENCODER_CONTROL_PARAMETERS_V2 v2; 557 DIG_ENCODER_CONTROL_PARAMETERS_V3 v3; 558 DIG_ENCODER_CONTROL_PARAMETERS_V4 v4; 559 DIG_ENCODER_CONTROL_PARAMETERS_V5 v5; 560 }; 561 562 void 563 amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder *encoder, 564 int action, int panel_mode) 565 { 566 struct drm_device *dev = encoder->dev; 567 struct amdgpu_device *adev = drm_to_adev(dev); 568 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 569 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; 570 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 571 union dig_encoder_control args; 572 int index = GetIndexIntoMasterTable(COMMAND, DIGxEncoderControl); 573 uint8_t frev, crev; 574 int dp_clock = 0; 575 int dp_lane_count = 0; 576 int hpd_id = AMDGPU_HPD_NONE; 577 578 if (connector) { 579 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 580 struct amdgpu_connector_atom_dig *dig_connector = 581 amdgpu_connector->con_priv; 582 583 dp_clock = dig_connector->dp_clock; 584 dp_lane_count = dig_connector->dp_lane_count; 585 hpd_id = amdgpu_connector->hpd.hpd; 586 } 587 588 /* no dig encoder assigned */ 589 if (dig->dig_encoder == -1) 590 return; 591 592 memset(&args, 0, sizeof(args)); 593 594 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 595 return; 596 597 switch (frev) { 598 case 1: 599 switch (crev) { 600 case 1: 601 args.v1.ucAction = action; 602 args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 603 if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE) 604 args.v3.ucPanelMode = panel_mode; 605 else 606 args.v1.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 607 608 if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode)) 609 args.v1.ucLaneNum = dp_lane_count; 610 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 611 args.v1.ucLaneNum = 8; 612 else 613 args.v1.ucLaneNum = 4; 614 615 if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode) && (dp_clock == 270000)) 616 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ; 617 switch (amdgpu_encoder->encoder_id) { 618 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 619 args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER1; 620 break; 621 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 622 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 623 args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER2; 624 break; 625 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 626 args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER3; 627 break; 628 } 629 if (dig->linkb) 630 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKB; 631 else 632 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKA; 633 break; 634 case 2: 635 case 3: 636 args.v3.ucAction = action; 637 args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 638 if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE) 639 args.v3.ucPanelMode = panel_mode; 640 else 641 args.v3.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 642 643 if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode)) 644 args.v3.ucLaneNum = dp_lane_count; 645 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 646 args.v3.ucLaneNum = 8; 647 else 648 args.v3.ucLaneNum = 4; 649 650 if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode) && (dp_clock == 270000)) 651 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ; 652 args.v3.acConfig.ucDigSel = dig->dig_encoder; 653 args.v3.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder); 654 break; 655 case 4: 656 args.v4.ucAction = action; 657 args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 658 if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE) 659 args.v4.ucPanelMode = panel_mode; 660 else 661 args.v4.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 662 663 if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) 664 args.v4.ucLaneNum = dp_lane_count; 665 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 666 args.v4.ucLaneNum = 8; 667 else 668 args.v4.ucLaneNum = 4; 669 670 if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) { 671 if (dp_clock == 540000) 672 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_5_40GHZ; 673 else if (dp_clock == 324000) 674 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_3_24GHZ; 675 else if (dp_clock == 270000) 676 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_2_70GHZ; 677 else 678 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_1_62GHZ; 679 } 680 args.v4.acConfig.ucDigSel = dig->dig_encoder; 681 args.v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder); 682 if (hpd_id == AMDGPU_HPD_NONE) 683 args.v4.ucHPD_ID = 0; 684 else 685 args.v4.ucHPD_ID = hpd_id + 1; 686 break; 687 case 5: 688 switch (action) { 689 case ATOM_ENCODER_CMD_SETUP_PANEL_MODE: 690 args.v5.asDPPanelModeParam.ucAction = action; 691 args.v5.asDPPanelModeParam.ucPanelMode = panel_mode; 692 args.v5.asDPPanelModeParam.ucDigId = dig->dig_encoder; 693 break; 694 case ATOM_ENCODER_CMD_STREAM_SETUP: 695 args.v5.asStreamParam.ucAction = action; 696 args.v5.asStreamParam.ucDigId = dig->dig_encoder; 697 args.v5.asStreamParam.ucDigMode = 698 amdgpu_atombios_encoder_get_encoder_mode(encoder); 699 if (ENCODER_MODE_IS_DP(args.v5.asStreamParam.ucDigMode)) 700 args.v5.asStreamParam.ucLaneNum = dp_lane_count; 701 else if (amdgpu_dig_monitor_is_duallink(encoder, 702 amdgpu_encoder->pixel_clock)) 703 args.v5.asStreamParam.ucLaneNum = 8; 704 else 705 args.v5.asStreamParam.ucLaneNum = 4; 706 args.v5.asStreamParam.ulPixelClock = 707 cpu_to_le32(amdgpu_encoder->pixel_clock / 10); 708 args.v5.asStreamParam.ucBitPerColor = 709 amdgpu_atombios_encoder_get_bpc(encoder); 710 args.v5.asStreamParam.ucLinkRateIn270Mhz = dp_clock / 27000; 711 break; 712 case ATOM_ENCODER_CMD_DP_LINK_TRAINING_START: 713 case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1: 714 case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2: 715 case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3: 716 case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN4: 717 case ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE: 718 case ATOM_ENCODER_CMD_DP_VIDEO_OFF: 719 case ATOM_ENCODER_CMD_DP_VIDEO_ON: 720 args.v5.asCmdParam.ucAction = action; 721 args.v5.asCmdParam.ucDigId = dig->dig_encoder; 722 break; 723 default: 724 DRM_ERROR("Unsupported action 0x%x\n", action); 725 break; 726 } 727 break; 728 default: 729 DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 730 break; 731 } 732 break; 733 default: 734 DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 735 break; 736 } 737 738 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 739 740 } 741 742 union dig_transmitter_control { 743 DIG_TRANSMITTER_CONTROL_PS_ALLOCATION v1; 744 DIG_TRANSMITTER_CONTROL_PARAMETERS_V2 v2; 745 DIG_TRANSMITTER_CONTROL_PARAMETERS_V3 v3; 746 DIG_TRANSMITTER_CONTROL_PARAMETERS_V4 v4; 747 DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_5 v5; 748 DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_6 v6; 749 }; 750 751 void 752 amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder *encoder, int action, 753 uint8_t lane_num, uint8_t lane_set) 754 { 755 struct drm_device *dev = encoder->dev; 756 struct amdgpu_device *adev = drm_to_adev(dev); 757 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 758 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; 759 struct drm_connector *connector; 760 union dig_transmitter_control args; 761 int index = 0; 762 uint8_t frev, crev; 763 bool is_dp = false; 764 int pll_id = 0; 765 int dp_clock = 0; 766 int dp_lane_count = 0; 767 int connector_object_id = 0; 768 int igp_lane_info = 0; 769 int dig_encoder = dig->dig_encoder; 770 int hpd_id = AMDGPU_HPD_NONE; 771 772 if (action == ATOM_TRANSMITTER_ACTION_INIT) { 773 connector = amdgpu_get_connector_for_encoder_init(encoder); 774 /* just needed to avoid bailing in the encoder check. the encoder 775 * isn't used for init 776 */ 777 dig_encoder = 0; 778 } else 779 connector = amdgpu_get_connector_for_encoder(encoder); 780 781 if (connector) { 782 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 783 struct amdgpu_connector_atom_dig *dig_connector = 784 amdgpu_connector->con_priv; 785 786 hpd_id = amdgpu_connector->hpd.hpd; 787 dp_clock = dig_connector->dp_clock; 788 dp_lane_count = dig_connector->dp_lane_count; 789 connector_object_id = 790 (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT; 791 } 792 793 if (encoder->crtc) { 794 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc); 795 pll_id = amdgpu_crtc->pll_id; 796 } 797 798 /* no dig encoder assigned */ 799 if (dig_encoder == -1) 800 return; 801 802 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder))) 803 is_dp = true; 804 805 memset(&args, 0, sizeof(args)); 806 807 switch (amdgpu_encoder->encoder_id) { 808 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 809 index = GetIndexIntoMasterTable(COMMAND, DVOOutputControl); 810 break; 811 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 812 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 813 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 814 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 815 index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl); 816 break; 817 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 818 index = GetIndexIntoMasterTable(COMMAND, LVTMATransmitterControl); 819 break; 820 } 821 822 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 823 return; 824 825 switch (frev) { 826 case 1: 827 switch (crev) { 828 case 1: 829 args.v1.ucAction = action; 830 if (action == ATOM_TRANSMITTER_ACTION_INIT) { 831 args.v1.usInitInfo = cpu_to_le16(connector_object_id); 832 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) { 833 args.v1.asMode.ucLaneSel = lane_num; 834 args.v1.asMode.ucLaneSet = lane_set; 835 } else { 836 if (is_dp) 837 args.v1.usPixelClock = cpu_to_le16(dp_clock / 10); 838 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 839 args.v1.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10); 840 else 841 args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 842 } 843 844 args.v1.ucConfig = ATOM_TRANSMITTER_CONFIG_CLKSRC_PPLL; 845 846 if (dig_encoder) 847 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG2_ENCODER; 848 else 849 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG1_ENCODER; 850 851 if ((adev->flags & AMD_IS_APU) && 852 (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_UNIPHY)) { 853 if (is_dp || 854 !amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) { 855 if (igp_lane_info & 0x1) 856 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_3; 857 else if (igp_lane_info & 0x2) 858 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_4_7; 859 else if (igp_lane_info & 0x4) 860 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_11; 861 else if (igp_lane_info & 0x8) 862 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_12_15; 863 } else { 864 if (igp_lane_info & 0x3) 865 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_7; 866 else if (igp_lane_info & 0xc) 867 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_15; 868 } 869 } 870 871 if (dig->linkb) 872 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKB; 873 else 874 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKA; 875 876 if (is_dp) 877 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT; 878 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 879 if (dig->coherent_mode) 880 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT; 881 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 882 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_8LANE_LINK; 883 } 884 break; 885 case 2: 886 args.v2.ucAction = action; 887 if (action == ATOM_TRANSMITTER_ACTION_INIT) { 888 args.v2.usInitInfo = cpu_to_le16(connector_object_id); 889 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) { 890 args.v2.asMode.ucLaneSel = lane_num; 891 args.v2.asMode.ucLaneSet = lane_set; 892 } else { 893 if (is_dp) 894 args.v2.usPixelClock = cpu_to_le16(dp_clock / 10); 895 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 896 args.v2.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10); 897 else 898 args.v2.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 899 } 900 901 args.v2.acConfig.ucEncoderSel = dig_encoder; 902 if (dig->linkb) 903 args.v2.acConfig.ucLinkSel = 1; 904 905 switch (amdgpu_encoder->encoder_id) { 906 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 907 args.v2.acConfig.ucTransmitterSel = 0; 908 break; 909 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 910 args.v2.acConfig.ucTransmitterSel = 1; 911 break; 912 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 913 args.v2.acConfig.ucTransmitterSel = 2; 914 break; 915 } 916 917 if (is_dp) { 918 args.v2.acConfig.fCoherentMode = 1; 919 args.v2.acConfig.fDPConnector = 1; 920 } else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 921 if (dig->coherent_mode) 922 args.v2.acConfig.fCoherentMode = 1; 923 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 924 args.v2.acConfig.fDualLinkConnector = 1; 925 } 926 break; 927 case 3: 928 args.v3.ucAction = action; 929 if (action == ATOM_TRANSMITTER_ACTION_INIT) { 930 args.v3.usInitInfo = cpu_to_le16(connector_object_id); 931 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) { 932 args.v3.asMode.ucLaneSel = lane_num; 933 args.v3.asMode.ucLaneSet = lane_set; 934 } else { 935 if (is_dp) 936 args.v3.usPixelClock = cpu_to_le16(dp_clock / 10); 937 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 938 args.v3.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10); 939 else 940 args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 941 } 942 943 if (is_dp) 944 args.v3.ucLaneNum = dp_lane_count; 945 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 946 args.v3.ucLaneNum = 8; 947 else 948 args.v3.ucLaneNum = 4; 949 950 if (dig->linkb) 951 args.v3.acConfig.ucLinkSel = 1; 952 if (dig_encoder & 1) 953 args.v3.acConfig.ucEncoderSel = 1; 954 955 /* Select the PLL for the PHY 956 * DP PHY should be clocked from external src if there is 957 * one. 958 */ 959 /* On DCE4, if there is an external clock, it generates the DP ref clock */ 960 if (is_dp && adev->clock.dp_extclk) 961 args.v3.acConfig.ucRefClkSource = 2; /* external src */ 962 else 963 args.v3.acConfig.ucRefClkSource = pll_id; 964 965 switch (amdgpu_encoder->encoder_id) { 966 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 967 args.v3.acConfig.ucTransmitterSel = 0; 968 break; 969 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 970 args.v3.acConfig.ucTransmitterSel = 1; 971 break; 972 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 973 args.v3.acConfig.ucTransmitterSel = 2; 974 break; 975 } 976 977 if (is_dp) 978 args.v3.acConfig.fCoherentMode = 1; /* DP requires coherent */ 979 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 980 if (dig->coherent_mode) 981 args.v3.acConfig.fCoherentMode = 1; 982 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 983 args.v3.acConfig.fDualLinkConnector = 1; 984 } 985 break; 986 case 4: 987 args.v4.ucAction = action; 988 if (action == ATOM_TRANSMITTER_ACTION_INIT) { 989 args.v4.usInitInfo = cpu_to_le16(connector_object_id); 990 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) { 991 args.v4.asMode.ucLaneSel = lane_num; 992 args.v4.asMode.ucLaneSet = lane_set; 993 } else { 994 if (is_dp) 995 args.v4.usPixelClock = cpu_to_le16(dp_clock / 10); 996 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 997 args.v4.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10); 998 else 999 args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 1000 } 1001 1002 if (is_dp) 1003 args.v4.ucLaneNum = dp_lane_count; 1004 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1005 args.v4.ucLaneNum = 8; 1006 else 1007 args.v4.ucLaneNum = 4; 1008 1009 if (dig->linkb) 1010 args.v4.acConfig.ucLinkSel = 1; 1011 if (dig_encoder & 1) 1012 args.v4.acConfig.ucEncoderSel = 1; 1013 1014 /* Select the PLL for the PHY 1015 * DP PHY should be clocked from external src if there is 1016 * one. 1017 */ 1018 /* On DCE5 DCPLL usually generates the DP ref clock */ 1019 if (is_dp) { 1020 if (adev->clock.dp_extclk) 1021 args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_EXTCLK; 1022 else 1023 args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_DCPLL; 1024 } else 1025 args.v4.acConfig.ucRefClkSource = pll_id; 1026 1027 switch (amdgpu_encoder->encoder_id) { 1028 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1029 args.v4.acConfig.ucTransmitterSel = 0; 1030 break; 1031 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1032 args.v4.acConfig.ucTransmitterSel = 1; 1033 break; 1034 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1035 args.v4.acConfig.ucTransmitterSel = 2; 1036 break; 1037 } 1038 1039 if (is_dp) 1040 args.v4.acConfig.fCoherentMode = 1; /* DP requires coherent */ 1041 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 1042 if (dig->coherent_mode) 1043 args.v4.acConfig.fCoherentMode = 1; 1044 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1045 args.v4.acConfig.fDualLinkConnector = 1; 1046 } 1047 break; 1048 case 5: 1049 args.v5.ucAction = action; 1050 if (is_dp) 1051 args.v5.usSymClock = cpu_to_le16(dp_clock / 10); 1052 else 1053 args.v5.usSymClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 1054 1055 switch (amdgpu_encoder->encoder_id) { 1056 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1057 if (dig->linkb) 1058 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYB; 1059 else 1060 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYA; 1061 break; 1062 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1063 if (dig->linkb) 1064 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYD; 1065 else 1066 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYC; 1067 break; 1068 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1069 if (dig->linkb) 1070 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYF; 1071 else 1072 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYE; 1073 break; 1074 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1075 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYG; 1076 break; 1077 } 1078 if (is_dp) 1079 args.v5.ucLaneNum = dp_lane_count; 1080 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1081 args.v5.ucLaneNum = 8; 1082 else 1083 args.v5.ucLaneNum = 4; 1084 args.v5.ucConnObjId = connector_object_id; 1085 args.v5.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1086 1087 if (is_dp && adev->clock.dp_extclk) 1088 args.v5.asConfig.ucPhyClkSrcId = ENCODER_REFCLK_SRC_EXTCLK; 1089 else 1090 args.v5.asConfig.ucPhyClkSrcId = pll_id; 1091 1092 if (is_dp) 1093 args.v5.asConfig.ucCoherentMode = 1; /* DP requires coherent */ 1094 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 1095 if (dig->coherent_mode) 1096 args.v5.asConfig.ucCoherentMode = 1; 1097 } 1098 if (hpd_id == AMDGPU_HPD_NONE) 1099 args.v5.asConfig.ucHPDSel = 0; 1100 else 1101 args.v5.asConfig.ucHPDSel = hpd_id + 1; 1102 args.v5.ucDigEncoderSel = 1 << dig_encoder; 1103 args.v5.ucDPLaneSet = lane_set; 1104 break; 1105 case 6: 1106 args.v6.ucAction = action; 1107 if (is_dp) 1108 args.v6.ulSymClock = cpu_to_le32(dp_clock / 10); 1109 else 1110 args.v6.ulSymClock = cpu_to_le32(amdgpu_encoder->pixel_clock / 10); 1111 1112 switch (amdgpu_encoder->encoder_id) { 1113 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1114 if (dig->linkb) 1115 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYB; 1116 else 1117 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYA; 1118 break; 1119 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1120 if (dig->linkb) 1121 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYD; 1122 else 1123 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYC; 1124 break; 1125 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1126 if (dig->linkb) 1127 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYF; 1128 else 1129 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYE; 1130 break; 1131 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1132 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYG; 1133 break; 1134 } 1135 if (is_dp) 1136 args.v6.ucLaneNum = dp_lane_count; 1137 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1138 args.v6.ucLaneNum = 8; 1139 else 1140 args.v6.ucLaneNum = 4; 1141 args.v6.ucConnObjId = connector_object_id; 1142 if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) 1143 args.v6.ucDPLaneSet = lane_set; 1144 else 1145 args.v6.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1146 1147 if (hpd_id == AMDGPU_HPD_NONE) 1148 args.v6.ucHPDSel = 0; 1149 else 1150 args.v6.ucHPDSel = hpd_id + 1; 1151 args.v6.ucDigEncoderSel = 1 << dig_encoder; 1152 break; 1153 default: 1154 DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 1155 break; 1156 } 1157 break; 1158 default: 1159 DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 1160 break; 1161 } 1162 1163 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 1164 } 1165 1166 bool 1167 amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector *connector, 1168 int action) 1169 { 1170 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 1171 struct drm_device *dev = amdgpu_connector->base.dev; 1172 struct amdgpu_device *adev = drm_to_adev(dev); 1173 union dig_transmitter_control args; 1174 int index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl); 1175 uint8_t frev, crev; 1176 1177 if (connector->connector_type != DRM_MODE_CONNECTOR_eDP) 1178 goto done; 1179 1180 if ((action != ATOM_TRANSMITTER_ACTION_POWER_ON) && 1181 (action != ATOM_TRANSMITTER_ACTION_POWER_OFF)) 1182 goto done; 1183 1184 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 1185 goto done; 1186 1187 memset(&args, 0, sizeof(args)); 1188 1189 args.v1.ucAction = action; 1190 1191 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 1192 1193 /* wait for the panel to power up */ 1194 if (action == ATOM_TRANSMITTER_ACTION_POWER_ON) { 1195 int i; 1196 1197 for (i = 0; i < 300; i++) { 1198 if (amdgpu_display_hpd_sense(adev, amdgpu_connector->hpd.hpd)) 1199 return true; 1200 mdelay(1); 1201 } 1202 return false; 1203 } 1204 done: 1205 return true; 1206 } 1207 1208 union external_encoder_control { 1209 EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION v1; 1210 EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION_V3 v3; 1211 }; 1212 1213 static void 1214 amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder *encoder, 1215 struct drm_encoder *ext_encoder, 1216 int action) 1217 { 1218 struct drm_device *dev = encoder->dev; 1219 struct amdgpu_device *adev = drm_to_adev(dev); 1220 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1221 struct amdgpu_encoder *ext_amdgpu_encoder = to_amdgpu_encoder(ext_encoder); 1222 union external_encoder_control args; 1223 struct drm_connector *connector; 1224 int index = GetIndexIntoMasterTable(COMMAND, ExternalEncoderControl); 1225 u8 frev, crev; 1226 int dp_clock = 0; 1227 int dp_lane_count = 0; 1228 int connector_object_id = 0; 1229 u32 ext_enum = (ext_amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT; 1230 1231 if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT) 1232 connector = amdgpu_get_connector_for_encoder_init(encoder); 1233 else 1234 connector = amdgpu_get_connector_for_encoder(encoder); 1235 1236 if (connector) { 1237 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 1238 struct amdgpu_connector_atom_dig *dig_connector = 1239 amdgpu_connector->con_priv; 1240 1241 dp_clock = dig_connector->dp_clock; 1242 dp_lane_count = dig_connector->dp_lane_count; 1243 connector_object_id = 1244 (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT; 1245 } 1246 1247 memset(&args, 0, sizeof(args)); 1248 1249 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 1250 return; 1251 1252 switch (frev) { 1253 case 1: 1254 /* no params on frev 1 */ 1255 break; 1256 case 2: 1257 switch (crev) { 1258 case 1: 1259 case 2: 1260 args.v1.sDigEncoder.ucAction = action; 1261 args.v1.sDigEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 1262 args.v1.sDigEncoder.ucEncoderMode = 1263 amdgpu_atombios_encoder_get_encoder_mode(encoder); 1264 1265 if (ENCODER_MODE_IS_DP(args.v1.sDigEncoder.ucEncoderMode)) { 1266 if (dp_clock == 270000) 1267 args.v1.sDigEncoder.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ; 1268 args.v1.sDigEncoder.ucLaneNum = dp_lane_count; 1269 } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1270 args.v1.sDigEncoder.ucLaneNum = 8; 1271 else 1272 args.v1.sDigEncoder.ucLaneNum = 4; 1273 break; 1274 case 3: 1275 args.v3.sExtEncoder.ucAction = action; 1276 if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT) 1277 args.v3.sExtEncoder.usConnectorId = cpu_to_le16(connector_object_id); 1278 else 1279 args.v3.sExtEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 1280 args.v3.sExtEncoder.ucEncoderMode = 1281 amdgpu_atombios_encoder_get_encoder_mode(encoder); 1282 1283 if (ENCODER_MODE_IS_DP(args.v3.sExtEncoder.ucEncoderMode)) { 1284 if (dp_clock == 270000) 1285 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ; 1286 else if (dp_clock == 540000) 1287 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_5_40GHZ; 1288 args.v3.sExtEncoder.ucLaneNum = dp_lane_count; 1289 } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1290 args.v3.sExtEncoder.ucLaneNum = 8; 1291 else 1292 args.v3.sExtEncoder.ucLaneNum = 4; 1293 switch (ext_enum) { 1294 case GRAPH_OBJECT_ENUM_ID1: 1295 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER1; 1296 break; 1297 case GRAPH_OBJECT_ENUM_ID2: 1298 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER2; 1299 break; 1300 case GRAPH_OBJECT_ENUM_ID3: 1301 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER3; 1302 break; 1303 } 1304 args.v3.sExtEncoder.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder); 1305 break; 1306 default: 1307 DRM_ERROR("Unknown table version: %d, %d\n", frev, crev); 1308 return; 1309 } 1310 break; 1311 default: 1312 DRM_ERROR("Unknown table version: %d, %d\n", frev, crev); 1313 return; 1314 } 1315 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 1316 } 1317 1318 static void 1319 amdgpu_atombios_encoder_setup_dig(struct drm_encoder *encoder, int action) 1320 { 1321 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1322 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder); 1323 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; 1324 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 1325 struct amdgpu_connector *amdgpu_connector = NULL; 1326 struct amdgpu_connector_atom_dig *amdgpu_dig_connector = NULL; 1327 1328 if (connector) { 1329 amdgpu_connector = to_amdgpu_connector(connector); 1330 amdgpu_dig_connector = amdgpu_connector->con_priv; 1331 } 1332 1333 if (action == ATOM_ENABLE) { 1334 if (!connector) 1335 dig->panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE; 1336 else 1337 dig->panel_mode = amdgpu_atombios_dp_get_panel_mode(encoder, connector); 1338 1339 /* setup and enable the encoder */ 1340 amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_SETUP, 0); 1341 amdgpu_atombios_encoder_setup_dig_encoder(encoder, 1342 ATOM_ENCODER_CMD_SETUP_PANEL_MODE, 1343 dig->panel_mode); 1344 if (ext_encoder) 1345 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, 1346 EXTERNAL_ENCODER_ACTION_V3_ENCODER_SETUP); 1347 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) && 1348 connector) { 1349 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) { 1350 amdgpu_atombios_encoder_set_edp_panel_power(connector, 1351 ATOM_TRANSMITTER_ACTION_POWER_ON); 1352 amdgpu_dig_connector->edp_on = true; 1353 } 1354 } 1355 /* enable the transmitter */ 1356 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 1357 ATOM_TRANSMITTER_ACTION_ENABLE, 1358 0, 0); 1359 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) && 1360 connector) { 1361 /* DP_SET_POWER_D0 is set in amdgpu_atombios_dp_link_train */ 1362 amdgpu_atombios_dp_link_train(encoder, connector); 1363 amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON, 0); 1364 } 1365 if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) 1366 amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, dig->backlight_level); 1367 if (ext_encoder) 1368 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_ENABLE); 1369 } else { 1370 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) && 1371 connector) 1372 amdgpu_atombios_encoder_setup_dig_encoder(encoder, 1373 ATOM_ENCODER_CMD_DP_VIDEO_OFF, 0); 1374 if (ext_encoder) 1375 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_DISABLE); 1376 if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) 1377 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 1378 ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0); 1379 1380 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) && 1381 connector) 1382 amdgpu_atombios_dp_set_rx_power_state(connector, DP_SET_POWER_D3); 1383 /* disable the transmitter */ 1384 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 1385 ATOM_TRANSMITTER_ACTION_DISABLE, 0, 0); 1386 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) && 1387 connector) { 1388 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) { 1389 amdgpu_atombios_encoder_set_edp_panel_power(connector, 1390 ATOM_TRANSMITTER_ACTION_POWER_OFF); 1391 amdgpu_dig_connector->edp_on = false; 1392 } 1393 } 1394 } 1395 } 1396 1397 void 1398 amdgpu_atombios_encoder_dpms(struct drm_encoder *encoder, int mode) 1399 { 1400 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1401 1402 DRM_DEBUG_KMS("encoder dpms %d to mode %d, devices %08x, active_devices %08x\n", 1403 amdgpu_encoder->encoder_id, mode, amdgpu_encoder->devices, 1404 amdgpu_encoder->active_device); 1405 switch (amdgpu_encoder->encoder_id) { 1406 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1407 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1408 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1409 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1410 switch (mode) { 1411 case DRM_MODE_DPMS_ON: 1412 amdgpu_atombios_encoder_setup_dig(encoder, ATOM_ENABLE); 1413 break; 1414 case DRM_MODE_DPMS_STANDBY: 1415 case DRM_MODE_DPMS_SUSPEND: 1416 case DRM_MODE_DPMS_OFF: 1417 amdgpu_atombios_encoder_setup_dig(encoder, ATOM_DISABLE); 1418 break; 1419 } 1420 break; 1421 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 1422 switch (mode) { 1423 case DRM_MODE_DPMS_ON: 1424 amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_ENABLE); 1425 break; 1426 case DRM_MODE_DPMS_STANDBY: 1427 case DRM_MODE_DPMS_SUSPEND: 1428 case DRM_MODE_DPMS_OFF: 1429 amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_DISABLE); 1430 break; 1431 } 1432 break; 1433 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 1434 switch (mode) { 1435 case DRM_MODE_DPMS_ON: 1436 amdgpu_atombios_encoder_setup_dac(encoder, ATOM_ENABLE); 1437 break; 1438 case DRM_MODE_DPMS_STANDBY: 1439 case DRM_MODE_DPMS_SUSPEND: 1440 case DRM_MODE_DPMS_OFF: 1441 amdgpu_atombios_encoder_setup_dac(encoder, ATOM_DISABLE); 1442 break; 1443 } 1444 break; 1445 default: 1446 return; 1447 } 1448 } 1449 1450 union crtc_source_param { 1451 SELECT_CRTC_SOURCE_PS_ALLOCATION v1; 1452 SELECT_CRTC_SOURCE_PARAMETERS_V2 v2; 1453 SELECT_CRTC_SOURCE_PARAMETERS_V3 v3; 1454 }; 1455 1456 void 1457 amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder *encoder) 1458 { 1459 struct drm_device *dev = encoder->dev; 1460 struct amdgpu_device *adev = drm_to_adev(dev); 1461 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1462 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc); 1463 union crtc_source_param args; 1464 int index = GetIndexIntoMasterTable(COMMAND, SelectCRTC_Source); 1465 uint8_t frev, crev; 1466 struct amdgpu_encoder_atom_dig *dig; 1467 1468 memset(&args, 0, sizeof(args)); 1469 1470 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 1471 return; 1472 1473 switch (frev) { 1474 case 1: 1475 switch (crev) { 1476 case 1: 1477 default: 1478 args.v1.ucCRTC = amdgpu_crtc->crtc_id; 1479 switch (amdgpu_encoder->encoder_id) { 1480 case ENCODER_OBJECT_ID_INTERNAL_TMDS1: 1481 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1: 1482 args.v1.ucDevice = ATOM_DEVICE_DFP1_INDEX; 1483 break; 1484 case ENCODER_OBJECT_ID_INTERNAL_LVDS: 1485 case ENCODER_OBJECT_ID_INTERNAL_LVTM1: 1486 if (amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) 1487 args.v1.ucDevice = ATOM_DEVICE_LCD1_INDEX; 1488 else 1489 args.v1.ucDevice = ATOM_DEVICE_DFP3_INDEX; 1490 break; 1491 case ENCODER_OBJECT_ID_INTERNAL_DVO1: 1492 case ENCODER_OBJECT_ID_INTERNAL_DDI: 1493 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 1494 args.v1.ucDevice = ATOM_DEVICE_DFP2_INDEX; 1495 break; 1496 case ENCODER_OBJECT_ID_INTERNAL_DAC1: 1497 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 1498 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1499 args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX; 1500 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1501 args.v1.ucDevice = ATOM_DEVICE_CV_INDEX; 1502 else 1503 args.v1.ucDevice = ATOM_DEVICE_CRT1_INDEX; 1504 break; 1505 case ENCODER_OBJECT_ID_INTERNAL_DAC2: 1506 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2: 1507 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1508 args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX; 1509 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1510 args.v1.ucDevice = ATOM_DEVICE_CV_INDEX; 1511 else 1512 args.v1.ucDevice = ATOM_DEVICE_CRT2_INDEX; 1513 break; 1514 } 1515 break; 1516 case 2: 1517 args.v2.ucCRTC = amdgpu_crtc->crtc_id; 1518 if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) { 1519 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 1520 1521 if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS) 1522 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS; 1523 else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA) 1524 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT; 1525 else 1526 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1527 } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) { 1528 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS; 1529 } else { 1530 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1531 } 1532 switch (amdgpu_encoder->encoder_id) { 1533 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1534 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1535 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1536 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1537 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 1538 dig = amdgpu_encoder->enc_priv; 1539 switch (dig->dig_encoder) { 1540 case 0: 1541 args.v2.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID; 1542 break; 1543 case 1: 1544 args.v2.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID; 1545 break; 1546 case 2: 1547 args.v2.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID; 1548 break; 1549 case 3: 1550 args.v2.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID; 1551 break; 1552 case 4: 1553 args.v2.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID; 1554 break; 1555 case 5: 1556 args.v2.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID; 1557 break; 1558 case 6: 1559 args.v2.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID; 1560 break; 1561 } 1562 break; 1563 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 1564 args.v2.ucEncoderID = ASIC_INT_DVO_ENCODER_ID; 1565 break; 1566 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 1567 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1568 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1569 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1570 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1571 else 1572 args.v2.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID; 1573 break; 1574 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2: 1575 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1576 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1577 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1578 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1579 else 1580 args.v2.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID; 1581 break; 1582 } 1583 break; 1584 case 3: 1585 args.v3.ucCRTC = amdgpu_crtc->crtc_id; 1586 if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) { 1587 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 1588 1589 if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS) 1590 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS; 1591 else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA) 1592 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT; 1593 else 1594 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1595 } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) { 1596 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS; 1597 } else { 1598 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1599 } 1600 args.v3.ucDstBpc = amdgpu_atombios_encoder_get_bpc(encoder); 1601 switch (amdgpu_encoder->encoder_id) { 1602 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1603 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1604 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1605 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1606 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 1607 dig = amdgpu_encoder->enc_priv; 1608 switch (dig->dig_encoder) { 1609 case 0: 1610 args.v3.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID; 1611 break; 1612 case 1: 1613 args.v3.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID; 1614 break; 1615 case 2: 1616 args.v3.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID; 1617 break; 1618 case 3: 1619 args.v3.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID; 1620 break; 1621 case 4: 1622 args.v3.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID; 1623 break; 1624 case 5: 1625 args.v3.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID; 1626 break; 1627 case 6: 1628 args.v3.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID; 1629 break; 1630 } 1631 break; 1632 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 1633 args.v3.ucEncoderID = ASIC_INT_DVO_ENCODER_ID; 1634 break; 1635 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 1636 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1637 args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1638 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1639 args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1640 else 1641 args.v3.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID; 1642 break; 1643 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2: 1644 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1645 args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1646 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1647 args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1648 else 1649 args.v3.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID; 1650 break; 1651 } 1652 break; 1653 } 1654 break; 1655 default: 1656 DRM_ERROR("Unknown table version: %d, %d\n", frev, crev); 1657 return; 1658 } 1659 1660 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 1661 } 1662 1663 /* This only needs to be called once at startup */ 1664 void 1665 amdgpu_atombios_encoder_init_dig(struct amdgpu_device *adev) 1666 { 1667 struct drm_device *dev = adev_to_drm(adev); 1668 struct drm_encoder *encoder; 1669 1670 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { 1671 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1672 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder); 1673 1674 switch (amdgpu_encoder->encoder_id) { 1675 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1676 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1677 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1678 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1679 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, ATOM_TRANSMITTER_ACTION_INIT, 1680 0, 0); 1681 break; 1682 } 1683 1684 if (ext_encoder) 1685 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, 1686 EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT); 1687 } 1688 } 1689 1690 static bool 1691 amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder *encoder, 1692 struct drm_connector *connector) 1693 { 1694 struct drm_device *dev = encoder->dev; 1695 struct amdgpu_device *adev = drm_to_adev(dev); 1696 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1697 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 1698 1699 if (amdgpu_encoder->devices & (ATOM_DEVICE_TV_SUPPORT | 1700 ATOM_DEVICE_CV_SUPPORT | 1701 ATOM_DEVICE_CRT_SUPPORT)) { 1702 DAC_LOAD_DETECTION_PS_ALLOCATION args; 1703 int index = GetIndexIntoMasterTable(COMMAND, DAC_LoadDetection); 1704 uint8_t frev, crev; 1705 1706 memset(&args, 0, sizeof(args)); 1707 1708 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 1709 return false; 1710 1711 args.sDacload.ucMisc = 0; 1712 1713 if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DAC1) || 1714 (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1)) 1715 args.sDacload.ucDacType = ATOM_DAC_A; 1716 else 1717 args.sDacload.ucDacType = ATOM_DAC_B; 1718 1719 if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) 1720 args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT1_SUPPORT); 1721 else if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) 1722 args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT2_SUPPORT); 1723 else if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) { 1724 args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CV_SUPPORT); 1725 if (crev >= 3) 1726 args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb; 1727 } else if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) { 1728 args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_TV1_SUPPORT); 1729 if (crev >= 3) 1730 args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb; 1731 } 1732 1733 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 1734 1735 return true; 1736 } else 1737 return false; 1738 } 1739 1740 enum drm_connector_status 1741 amdgpu_atombios_encoder_dac_detect(struct drm_encoder *encoder, 1742 struct drm_connector *connector) 1743 { 1744 struct drm_device *dev = encoder->dev; 1745 struct amdgpu_device *adev = drm_to_adev(dev); 1746 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1747 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 1748 uint32_t bios_0_scratch; 1749 1750 if (!amdgpu_atombios_encoder_dac_load_detect(encoder, connector)) { 1751 DRM_DEBUG_KMS("detect returned false \n"); 1752 return connector_status_unknown; 1753 } 1754 1755 bios_0_scratch = RREG32(mmBIOS_SCRATCH_0); 1756 1757 DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices); 1758 if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) { 1759 if (bios_0_scratch & ATOM_S0_CRT1_MASK) 1760 return connector_status_connected; 1761 } 1762 if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) { 1763 if (bios_0_scratch & ATOM_S0_CRT2_MASK) 1764 return connector_status_connected; 1765 } 1766 if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) { 1767 if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A)) 1768 return connector_status_connected; 1769 } 1770 if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) { 1771 if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A)) 1772 return connector_status_connected; /* CTV */ 1773 else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A)) 1774 return connector_status_connected; /* STV */ 1775 } 1776 return connector_status_disconnected; 1777 } 1778 1779 enum drm_connector_status 1780 amdgpu_atombios_encoder_dig_detect(struct drm_encoder *encoder, 1781 struct drm_connector *connector) 1782 { 1783 struct drm_device *dev = encoder->dev; 1784 struct amdgpu_device *adev = drm_to_adev(dev); 1785 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1786 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 1787 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder); 1788 u32 bios_0_scratch; 1789 1790 if (!ext_encoder) 1791 return connector_status_unknown; 1792 1793 if ((amdgpu_connector->devices & ATOM_DEVICE_CRT_SUPPORT) == 0) 1794 return connector_status_unknown; 1795 1796 /* load detect on the dp bridge */ 1797 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, 1798 EXTERNAL_ENCODER_ACTION_V3_DACLOAD_DETECTION); 1799 1800 bios_0_scratch = RREG32(mmBIOS_SCRATCH_0); 1801 1802 DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices); 1803 if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) { 1804 if (bios_0_scratch & ATOM_S0_CRT1_MASK) 1805 return connector_status_connected; 1806 } 1807 if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) { 1808 if (bios_0_scratch & ATOM_S0_CRT2_MASK) 1809 return connector_status_connected; 1810 } 1811 if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) { 1812 if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A)) 1813 return connector_status_connected; 1814 } 1815 if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) { 1816 if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A)) 1817 return connector_status_connected; /* CTV */ 1818 else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A)) 1819 return connector_status_connected; /* STV */ 1820 } 1821 return connector_status_disconnected; 1822 } 1823 1824 void 1825 amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder *encoder) 1826 { 1827 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder); 1828 1829 if (ext_encoder) 1830 /* ddc_setup on the dp bridge */ 1831 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, 1832 EXTERNAL_ENCODER_ACTION_V3_DDC_SETUP); 1833 1834 } 1835 1836 void 1837 amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector *connector, 1838 struct drm_encoder *encoder, 1839 bool connected) 1840 { 1841 struct drm_device *dev = connector->dev; 1842 struct amdgpu_device *adev = drm_to_adev(dev); 1843 struct amdgpu_connector *amdgpu_connector = 1844 to_amdgpu_connector(connector); 1845 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1846 uint32_t bios_0_scratch, bios_3_scratch, bios_6_scratch; 1847 1848 bios_0_scratch = RREG32(mmBIOS_SCRATCH_0); 1849 bios_3_scratch = RREG32(mmBIOS_SCRATCH_3); 1850 bios_6_scratch = RREG32(mmBIOS_SCRATCH_6); 1851 1852 if ((amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) && 1853 (amdgpu_connector->devices & ATOM_DEVICE_LCD1_SUPPORT)) { 1854 if (connected) { 1855 DRM_DEBUG_KMS("LCD1 connected\n"); 1856 bios_0_scratch |= ATOM_S0_LCD1; 1857 bios_3_scratch |= ATOM_S3_LCD1_ACTIVE; 1858 bios_6_scratch |= ATOM_S6_ACC_REQ_LCD1; 1859 } else { 1860 DRM_DEBUG_KMS("LCD1 disconnected\n"); 1861 bios_0_scratch &= ~ATOM_S0_LCD1; 1862 bios_3_scratch &= ~ATOM_S3_LCD1_ACTIVE; 1863 bios_6_scratch &= ~ATOM_S6_ACC_REQ_LCD1; 1864 } 1865 } 1866 if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT1_SUPPORT) && 1867 (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)) { 1868 if (connected) { 1869 DRM_DEBUG_KMS("CRT1 connected\n"); 1870 bios_0_scratch |= ATOM_S0_CRT1_COLOR; 1871 bios_3_scratch |= ATOM_S3_CRT1_ACTIVE; 1872 bios_6_scratch |= ATOM_S6_ACC_REQ_CRT1; 1873 } else { 1874 DRM_DEBUG_KMS("CRT1 disconnected\n"); 1875 bios_0_scratch &= ~ATOM_S0_CRT1_MASK; 1876 bios_3_scratch &= ~ATOM_S3_CRT1_ACTIVE; 1877 bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT1; 1878 } 1879 } 1880 if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT2_SUPPORT) && 1881 (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)) { 1882 if (connected) { 1883 DRM_DEBUG_KMS("CRT2 connected\n"); 1884 bios_0_scratch |= ATOM_S0_CRT2_COLOR; 1885 bios_3_scratch |= ATOM_S3_CRT2_ACTIVE; 1886 bios_6_scratch |= ATOM_S6_ACC_REQ_CRT2; 1887 } else { 1888 DRM_DEBUG_KMS("CRT2 disconnected\n"); 1889 bios_0_scratch &= ~ATOM_S0_CRT2_MASK; 1890 bios_3_scratch &= ~ATOM_S3_CRT2_ACTIVE; 1891 bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT2; 1892 } 1893 } 1894 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP1_SUPPORT) && 1895 (amdgpu_connector->devices & ATOM_DEVICE_DFP1_SUPPORT)) { 1896 if (connected) { 1897 DRM_DEBUG_KMS("DFP1 connected\n"); 1898 bios_0_scratch |= ATOM_S0_DFP1; 1899 bios_3_scratch |= ATOM_S3_DFP1_ACTIVE; 1900 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP1; 1901 } else { 1902 DRM_DEBUG_KMS("DFP1 disconnected\n"); 1903 bios_0_scratch &= ~ATOM_S0_DFP1; 1904 bios_3_scratch &= ~ATOM_S3_DFP1_ACTIVE; 1905 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP1; 1906 } 1907 } 1908 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP2_SUPPORT) && 1909 (amdgpu_connector->devices & ATOM_DEVICE_DFP2_SUPPORT)) { 1910 if (connected) { 1911 DRM_DEBUG_KMS("DFP2 connected\n"); 1912 bios_0_scratch |= ATOM_S0_DFP2; 1913 bios_3_scratch |= ATOM_S3_DFP2_ACTIVE; 1914 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP2; 1915 } else { 1916 DRM_DEBUG_KMS("DFP2 disconnected\n"); 1917 bios_0_scratch &= ~ATOM_S0_DFP2; 1918 bios_3_scratch &= ~ATOM_S3_DFP2_ACTIVE; 1919 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP2; 1920 } 1921 } 1922 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP3_SUPPORT) && 1923 (amdgpu_connector->devices & ATOM_DEVICE_DFP3_SUPPORT)) { 1924 if (connected) { 1925 DRM_DEBUG_KMS("DFP3 connected\n"); 1926 bios_0_scratch |= ATOM_S0_DFP3; 1927 bios_3_scratch |= ATOM_S3_DFP3_ACTIVE; 1928 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP3; 1929 } else { 1930 DRM_DEBUG_KMS("DFP3 disconnected\n"); 1931 bios_0_scratch &= ~ATOM_S0_DFP3; 1932 bios_3_scratch &= ~ATOM_S3_DFP3_ACTIVE; 1933 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP3; 1934 } 1935 } 1936 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP4_SUPPORT) && 1937 (amdgpu_connector->devices & ATOM_DEVICE_DFP4_SUPPORT)) { 1938 if (connected) { 1939 DRM_DEBUG_KMS("DFP4 connected\n"); 1940 bios_0_scratch |= ATOM_S0_DFP4; 1941 bios_3_scratch |= ATOM_S3_DFP4_ACTIVE; 1942 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP4; 1943 } else { 1944 DRM_DEBUG_KMS("DFP4 disconnected\n"); 1945 bios_0_scratch &= ~ATOM_S0_DFP4; 1946 bios_3_scratch &= ~ATOM_S3_DFP4_ACTIVE; 1947 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP4; 1948 } 1949 } 1950 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP5_SUPPORT) && 1951 (amdgpu_connector->devices & ATOM_DEVICE_DFP5_SUPPORT)) { 1952 if (connected) { 1953 DRM_DEBUG_KMS("DFP5 connected\n"); 1954 bios_0_scratch |= ATOM_S0_DFP5; 1955 bios_3_scratch |= ATOM_S3_DFP5_ACTIVE; 1956 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP5; 1957 } else { 1958 DRM_DEBUG_KMS("DFP5 disconnected\n"); 1959 bios_0_scratch &= ~ATOM_S0_DFP5; 1960 bios_3_scratch &= ~ATOM_S3_DFP5_ACTIVE; 1961 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP5; 1962 } 1963 } 1964 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP6_SUPPORT) && 1965 (amdgpu_connector->devices & ATOM_DEVICE_DFP6_SUPPORT)) { 1966 if (connected) { 1967 DRM_DEBUG_KMS("DFP6 connected\n"); 1968 bios_0_scratch |= ATOM_S0_DFP6; 1969 bios_3_scratch |= ATOM_S3_DFP6_ACTIVE; 1970 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP6; 1971 } else { 1972 DRM_DEBUG_KMS("DFP6 disconnected\n"); 1973 bios_0_scratch &= ~ATOM_S0_DFP6; 1974 bios_3_scratch &= ~ATOM_S3_DFP6_ACTIVE; 1975 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP6; 1976 } 1977 } 1978 1979 WREG32(mmBIOS_SCRATCH_0, bios_0_scratch); 1980 WREG32(mmBIOS_SCRATCH_3, bios_3_scratch); 1981 WREG32(mmBIOS_SCRATCH_6, bios_6_scratch); 1982 } 1983 1984 union lvds_info { 1985 struct _ATOM_LVDS_INFO info; 1986 struct _ATOM_LVDS_INFO_V12 info_12; 1987 }; 1988 1989 struct amdgpu_encoder_atom_dig * 1990 amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder *encoder) 1991 { 1992 struct drm_device *dev = encoder->base.dev; 1993 struct amdgpu_device *adev = drm_to_adev(dev); 1994 struct amdgpu_mode_info *mode_info = &adev->mode_info; 1995 int index = GetIndexIntoMasterTable(DATA, LVDS_Info); 1996 uint16_t data_offset, misc; 1997 union lvds_info *lvds_info; 1998 uint8_t frev, crev; 1999 struct amdgpu_encoder_atom_dig *lvds = NULL; 2000 int encoder_enum = (encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT; 2001 2002 if (amdgpu_atom_parse_data_header(mode_info->atom_context, index, NULL, 2003 &frev, &crev, &data_offset)) { 2004 lvds_info = 2005 (union lvds_info *)(mode_info->atom_context->bios + data_offset); 2006 lvds = 2007 kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL); 2008 2009 if (!lvds) 2010 return NULL; 2011 2012 lvds->native_mode.clock = 2013 le16_to_cpu(lvds_info->info.sLCDTiming.usPixClk) * 10; 2014 lvds->native_mode.hdisplay = 2015 le16_to_cpu(lvds_info->info.sLCDTiming.usHActive); 2016 lvds->native_mode.vdisplay = 2017 le16_to_cpu(lvds_info->info.sLCDTiming.usVActive); 2018 lvds->native_mode.htotal = lvds->native_mode.hdisplay + 2019 le16_to_cpu(lvds_info->info.sLCDTiming.usHBlanking_Time); 2020 lvds->native_mode.hsync_start = lvds->native_mode.hdisplay + 2021 le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncOffset); 2022 lvds->native_mode.hsync_end = lvds->native_mode.hsync_start + 2023 le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncWidth); 2024 lvds->native_mode.vtotal = lvds->native_mode.vdisplay + 2025 le16_to_cpu(lvds_info->info.sLCDTiming.usVBlanking_Time); 2026 lvds->native_mode.vsync_start = lvds->native_mode.vdisplay + 2027 le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncOffset); 2028 lvds->native_mode.vsync_end = lvds->native_mode.vsync_start + 2029 le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncWidth); 2030 lvds->panel_pwr_delay = 2031 le16_to_cpu(lvds_info->info.usOffDelayInMs); 2032 lvds->lcd_misc = lvds_info->info.ucLVDS_Misc; 2033 2034 misc = le16_to_cpu(lvds_info->info.sLCDTiming.susModeMiscInfo.usAccess); 2035 if (misc & ATOM_VSYNC_POLARITY) 2036 lvds->native_mode.flags |= DRM_MODE_FLAG_NVSYNC; 2037 if (misc & ATOM_HSYNC_POLARITY) 2038 lvds->native_mode.flags |= DRM_MODE_FLAG_NHSYNC; 2039 if (misc & ATOM_COMPOSITESYNC) 2040 lvds->native_mode.flags |= DRM_MODE_FLAG_CSYNC; 2041 if (misc & ATOM_INTERLACE) 2042 lvds->native_mode.flags |= DRM_MODE_FLAG_INTERLACE; 2043 if (misc & ATOM_DOUBLE_CLOCK_MODE) 2044 lvds->native_mode.flags |= DRM_MODE_FLAG_DBLSCAN; 2045 2046 lvds->native_mode.width_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageHSize); 2047 lvds->native_mode.height_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageVSize); 2048 2049 /* set crtc values */ 2050 drm_mode_set_crtcinfo(&lvds->native_mode, CRTC_INTERLACE_HALVE_V); 2051 2052 lvds->lcd_ss_id = lvds_info->info.ucSS_Id; 2053 2054 encoder->native_mode = lvds->native_mode; 2055 2056 if (encoder_enum == 2) 2057 lvds->linkb = true; 2058 else 2059 lvds->linkb = false; 2060 2061 /* parse the lcd record table */ 2062 if (le16_to_cpu(lvds_info->info.usModePatchTableOffset)) { 2063 ATOM_FAKE_EDID_PATCH_RECORD *fake_edid_record; 2064 ATOM_PANEL_RESOLUTION_PATCH_RECORD *panel_res_record; 2065 bool bad_record = false; 2066 u8 *record; 2067 2068 if ((frev == 1) && (crev < 2)) 2069 /* absolute */ 2070 record = (u8 *)(mode_info->atom_context->bios + 2071 le16_to_cpu(lvds_info->info.usModePatchTableOffset)); 2072 else 2073 /* relative */ 2074 record = (u8 *)(mode_info->atom_context->bios + 2075 data_offset + 2076 le16_to_cpu(lvds_info->info.usModePatchTableOffset)); 2077 while (*record != ATOM_RECORD_END_TYPE) { 2078 switch (*record) { 2079 case LCD_MODE_PATCH_RECORD_MODE_TYPE: 2080 record += sizeof(ATOM_PATCH_RECORD_MODE); 2081 break; 2082 case LCD_RTS_RECORD_TYPE: 2083 record += sizeof(ATOM_LCD_RTS_RECORD); 2084 break; 2085 case LCD_CAP_RECORD_TYPE: 2086 record += sizeof(ATOM_LCD_MODE_CONTROL_CAP); 2087 break; 2088 case LCD_FAKE_EDID_PATCH_RECORD_TYPE: 2089 fake_edid_record = (ATOM_FAKE_EDID_PATCH_RECORD *)record; 2090 if (fake_edid_record->ucFakeEDIDLength) { 2091 struct edid *edid; 2092 int edid_size = 2093 max((int)EDID_LENGTH, (int)fake_edid_record->ucFakeEDIDLength); 2094 edid = kmalloc(edid_size, GFP_KERNEL); 2095 if (edid) { 2096 memcpy((u8 *)edid, (u8 *)&fake_edid_record->ucFakeEDIDString[0], 2097 fake_edid_record->ucFakeEDIDLength); 2098 2099 if (drm_edid_is_valid(edid)) { 2100 adev->mode_info.bios_hardcoded_edid = edid; 2101 adev->mode_info.bios_hardcoded_edid_size = edid_size; 2102 } else 2103 kfree(edid); 2104 } 2105 } 2106 record += fake_edid_record->ucFakeEDIDLength ? 2107 fake_edid_record->ucFakeEDIDLength + 2 : 2108 sizeof(ATOM_FAKE_EDID_PATCH_RECORD); 2109 break; 2110 case LCD_PANEL_RESOLUTION_RECORD_TYPE: 2111 panel_res_record = (ATOM_PANEL_RESOLUTION_PATCH_RECORD *)record; 2112 lvds->native_mode.width_mm = panel_res_record->usHSize; 2113 lvds->native_mode.height_mm = panel_res_record->usVSize; 2114 record += sizeof(ATOM_PANEL_RESOLUTION_PATCH_RECORD); 2115 break; 2116 default: 2117 DRM_ERROR("Bad LCD record %d\n", *record); 2118 bad_record = true; 2119 break; 2120 } 2121 if (bad_record) 2122 break; 2123 } 2124 } 2125 } 2126 return lvds; 2127 } 2128 2129 struct amdgpu_encoder_atom_dig * 2130 amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder *amdgpu_encoder) 2131 { 2132 int encoder_enum = (amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT; 2133 struct amdgpu_encoder_atom_dig *dig = kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL); 2134 2135 if (!dig) 2136 return NULL; 2137 2138 /* coherent mode by default */ 2139 dig->coherent_mode = true; 2140 dig->dig_encoder = -1; 2141 2142 if (encoder_enum == 2) 2143 dig->linkb = true; 2144 else 2145 dig->linkb = false; 2146 2147 return dig; 2148 } 2149 2150