1 /* 2 * Copyright 2007-11 Advanced Micro Devices, Inc. 3 * Copyright 2008 Red Hat Inc. 4 * 5 * Permission is hereby granted, free of charge, to any person obtaining a 6 * copy of this software and associated documentation files (the "Software"), 7 * to deal in the Software without restriction, including without limitation 8 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 9 * and/or sell copies of the Software, and to permit persons to whom the 10 * Software is furnished to do so, subject to the following conditions: 11 * 12 * The above copyright notice and this permission notice shall be included in 13 * all copies or substantial portions of the Software. 14 * 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 21 * OTHER DEALINGS IN THE SOFTWARE. 22 * 23 * Authors: Dave Airlie 24 * Alex Deucher 25 */ 26 #include <drm/drmP.h> 27 #include <drm/drm_crtc_helper.h> 28 #include <drm/amdgpu_drm.h> 29 #include "amdgpu.h" 30 #include "amdgpu_connectors.h" 31 #include "atom.h" 32 #include "atombios_encoders.h" 33 #include "atombios_dp.h" 34 #include <linux/backlight.h> 35 #include "bif/bif_4_1_d.h" 36 37 static u8 38 amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device *adev) 39 { 40 u8 backlight_level; 41 u32 bios_2_scratch; 42 43 bios_2_scratch = RREG32(mmBIOS_SCRATCH_2); 44 45 backlight_level = ((bios_2_scratch & ATOM_S2_CURRENT_BL_LEVEL_MASK) >> 46 ATOM_S2_CURRENT_BL_LEVEL_SHIFT); 47 48 return backlight_level; 49 } 50 51 static void 52 amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device *adev, 53 u8 backlight_level) 54 { 55 u32 bios_2_scratch; 56 57 bios_2_scratch = RREG32(mmBIOS_SCRATCH_2); 58 59 bios_2_scratch &= ~ATOM_S2_CURRENT_BL_LEVEL_MASK; 60 bios_2_scratch |= ((backlight_level << ATOM_S2_CURRENT_BL_LEVEL_SHIFT) & 61 ATOM_S2_CURRENT_BL_LEVEL_MASK); 62 63 WREG32(mmBIOS_SCRATCH_2, bios_2_scratch); 64 } 65 66 u8 67 amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder *amdgpu_encoder) 68 { 69 struct drm_device *dev = amdgpu_encoder->base.dev; 70 struct amdgpu_device *adev = dev->dev_private; 71 72 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU)) 73 return 0; 74 75 return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev); 76 } 77 78 void 79 amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder *amdgpu_encoder, 80 u8 level) 81 { 82 struct drm_encoder *encoder = &amdgpu_encoder->base; 83 struct drm_device *dev = amdgpu_encoder->base.dev; 84 struct amdgpu_device *adev = dev->dev_private; 85 struct amdgpu_encoder_atom_dig *dig; 86 87 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU)) 88 return; 89 90 if ((amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) && 91 amdgpu_encoder->enc_priv) { 92 dig = amdgpu_encoder->enc_priv; 93 dig->backlight_level = level; 94 amdgpu_atombios_encoder_set_backlight_level_to_reg(adev, dig->backlight_level); 95 96 switch (amdgpu_encoder->encoder_id) { 97 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 98 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 99 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 100 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 101 if (dig->backlight_level == 0) 102 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 103 ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0); 104 else { 105 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 106 ATOM_TRANSMITTER_ACTION_BL_BRIGHTNESS_CONTROL, 0, 0); 107 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 108 ATOM_TRANSMITTER_ACTION_LCD_BLON, 0, 0); 109 } 110 break; 111 default: 112 break; 113 } 114 } 115 } 116 117 #if defined(CONFIG_BACKLIGHT_CLASS_DEVICE) || defined(CONFIG_BACKLIGHT_CLASS_DEVICE_MODULE) 118 119 static u8 amdgpu_atombios_encoder_backlight_level(struct backlight_device *bd) 120 { 121 u8 level; 122 123 /* Convert brightness to hardware level */ 124 if (bd->props.brightness < 0) 125 level = 0; 126 else if (bd->props.brightness > AMDGPU_MAX_BL_LEVEL) 127 level = AMDGPU_MAX_BL_LEVEL; 128 else 129 level = bd->props.brightness; 130 131 return level; 132 } 133 134 static int amdgpu_atombios_encoder_update_backlight_status(struct backlight_device *bd) 135 { 136 struct amdgpu_backlight_privdata *pdata = bl_get_data(bd); 137 struct amdgpu_encoder *amdgpu_encoder = pdata->encoder; 138 139 amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, 140 amdgpu_atombios_encoder_backlight_level(bd)); 141 142 return 0; 143 } 144 145 static int 146 amdgpu_atombios_encoder_get_backlight_brightness(struct backlight_device *bd) 147 { 148 struct amdgpu_backlight_privdata *pdata = bl_get_data(bd); 149 struct amdgpu_encoder *amdgpu_encoder = pdata->encoder; 150 struct drm_device *dev = amdgpu_encoder->base.dev; 151 struct amdgpu_device *adev = dev->dev_private; 152 153 return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev); 154 } 155 156 static const struct backlight_ops amdgpu_atombios_encoder_backlight_ops = { 157 .get_brightness = amdgpu_atombios_encoder_get_backlight_brightness, 158 .update_status = amdgpu_atombios_encoder_update_backlight_status, 159 }; 160 161 void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *amdgpu_encoder, 162 struct drm_connector *drm_connector) 163 { 164 struct drm_device *dev = amdgpu_encoder->base.dev; 165 struct amdgpu_device *adev = dev->dev_private; 166 struct backlight_device *bd; 167 struct backlight_properties props; 168 struct amdgpu_backlight_privdata *pdata; 169 struct amdgpu_encoder_atom_dig *dig; 170 u8 backlight_level; 171 char bl_name[16]; 172 173 /* Mac laptops with multiple GPUs use the gmux driver for backlight 174 * so don't register a backlight device 175 */ 176 if ((adev->pdev->subsystem_vendor == PCI_VENDOR_ID_APPLE) && 177 (adev->pdev->device == 0x6741)) 178 return; 179 180 if (!amdgpu_encoder->enc_priv) 181 return; 182 183 if (!adev->is_atom_bios) 184 return; 185 186 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU)) 187 return; 188 189 pdata = kmalloc(sizeof(struct amdgpu_backlight_privdata), GFP_KERNEL); 190 if (!pdata) { 191 DRM_ERROR("Memory allocation failed\n"); 192 goto error; 193 } 194 195 memset(&props, 0, sizeof(props)); 196 props.max_brightness = AMDGPU_MAX_BL_LEVEL; 197 props.type = BACKLIGHT_RAW; 198 snprintf(bl_name, sizeof(bl_name), 199 "amdgpu_bl%d", dev->primary->index); 200 bd = backlight_device_register(bl_name, drm_connector->kdev, 201 pdata, &amdgpu_atombios_encoder_backlight_ops, &props); 202 if (IS_ERR(bd)) { 203 DRM_ERROR("Backlight registration failed\n"); 204 goto error; 205 } 206 207 pdata->encoder = amdgpu_encoder; 208 209 backlight_level = amdgpu_atombios_encoder_get_backlight_level_from_reg(adev); 210 211 dig = amdgpu_encoder->enc_priv; 212 dig->bl_dev = bd; 213 214 bd->props.brightness = amdgpu_atombios_encoder_get_backlight_brightness(bd); 215 bd->props.power = FB_BLANK_UNBLANK; 216 backlight_update_status(bd); 217 218 DRM_INFO("amdgpu atom DIG backlight initialized\n"); 219 220 return; 221 222 error: 223 kfree(pdata); 224 return; 225 } 226 227 void 228 amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *amdgpu_encoder) 229 { 230 struct drm_device *dev = amdgpu_encoder->base.dev; 231 struct amdgpu_device *adev = dev->dev_private; 232 struct backlight_device *bd = NULL; 233 struct amdgpu_encoder_atom_dig *dig; 234 235 if (!amdgpu_encoder->enc_priv) 236 return; 237 238 if (!adev->is_atom_bios) 239 return; 240 241 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU)) 242 return; 243 244 dig = amdgpu_encoder->enc_priv; 245 bd = dig->bl_dev; 246 dig->bl_dev = NULL; 247 248 if (bd) { 249 struct amdgpu_legacy_backlight_privdata *pdata; 250 251 pdata = bl_get_data(bd); 252 backlight_device_unregister(bd); 253 kfree(pdata); 254 255 DRM_INFO("amdgpu atom LVDS backlight unloaded\n"); 256 } 257 } 258 259 #else /* !CONFIG_BACKLIGHT_CLASS_DEVICE */ 260 261 void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *encoder) 262 { 263 } 264 265 void amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *encoder) 266 { 267 } 268 269 #endif 270 271 bool amdgpu_atombios_encoder_is_digital(struct drm_encoder *encoder) 272 { 273 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 274 switch (amdgpu_encoder->encoder_id) { 275 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 276 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 277 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 278 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 279 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 280 return true; 281 default: 282 return false; 283 } 284 } 285 286 bool amdgpu_atombios_encoder_mode_fixup(struct drm_encoder *encoder, 287 const struct drm_display_mode *mode, 288 struct drm_display_mode *adjusted_mode) 289 { 290 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 291 292 /* set the active encoder to connector routing */ 293 amdgpu_encoder_set_active_device(encoder); 294 drm_mode_set_crtcinfo(adjusted_mode, 0); 295 296 /* hw bug */ 297 if ((mode->flags & DRM_MODE_FLAG_INTERLACE) 298 && (mode->crtc_vsync_start < (mode->crtc_vdisplay + 2))) 299 adjusted_mode->crtc_vsync_start = adjusted_mode->crtc_vdisplay + 2; 300 301 /* get the native mode for scaling */ 302 if (amdgpu_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT)) 303 amdgpu_panel_mode_fixup(encoder, adjusted_mode); 304 else if (amdgpu_encoder->rmx_type != RMX_OFF) 305 amdgpu_panel_mode_fixup(encoder, adjusted_mode); 306 307 if ((amdgpu_encoder->active_device & (ATOM_DEVICE_DFP_SUPPORT | ATOM_DEVICE_LCD_SUPPORT)) || 308 (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) { 309 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 310 amdgpu_atombios_dp_set_link_config(connector, adjusted_mode); 311 } 312 313 return true; 314 } 315 316 static void 317 amdgpu_atombios_encoder_setup_dac(struct drm_encoder *encoder, int action) 318 { 319 struct drm_device *dev = encoder->dev; 320 struct amdgpu_device *adev = dev->dev_private; 321 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 322 DAC_ENCODER_CONTROL_PS_ALLOCATION args; 323 int index = 0; 324 325 memset(&args, 0, sizeof(args)); 326 327 switch (amdgpu_encoder->encoder_id) { 328 case ENCODER_OBJECT_ID_INTERNAL_DAC1: 329 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 330 index = GetIndexIntoMasterTable(COMMAND, DAC1EncoderControl); 331 break; 332 case ENCODER_OBJECT_ID_INTERNAL_DAC2: 333 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2: 334 index = GetIndexIntoMasterTable(COMMAND, DAC2EncoderControl); 335 break; 336 } 337 338 args.ucAction = action; 339 args.ucDacStandard = ATOM_DAC1_PS2; 340 args.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 341 342 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 343 344 } 345 346 static u8 amdgpu_atombios_encoder_get_bpc(struct drm_encoder *encoder) 347 { 348 int bpc = 8; 349 350 if (encoder->crtc) { 351 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc); 352 bpc = amdgpu_crtc->bpc; 353 } 354 355 switch (bpc) { 356 case 0: 357 return PANEL_BPC_UNDEFINE; 358 case 6: 359 return PANEL_6BIT_PER_COLOR; 360 case 8: 361 default: 362 return PANEL_8BIT_PER_COLOR; 363 case 10: 364 return PANEL_10BIT_PER_COLOR; 365 case 12: 366 return PANEL_12BIT_PER_COLOR; 367 case 16: 368 return PANEL_16BIT_PER_COLOR; 369 } 370 } 371 372 union dvo_encoder_control { 373 ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION ext_tmds; 374 DVO_ENCODER_CONTROL_PS_ALLOCATION dvo; 375 DVO_ENCODER_CONTROL_PS_ALLOCATION_V3 dvo_v3; 376 DVO_ENCODER_CONTROL_PS_ALLOCATION_V1_4 dvo_v4; 377 }; 378 379 static void 380 amdgpu_atombios_encoder_setup_dvo(struct drm_encoder *encoder, int action) 381 { 382 struct drm_device *dev = encoder->dev; 383 struct amdgpu_device *adev = dev->dev_private; 384 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 385 union dvo_encoder_control args; 386 int index = GetIndexIntoMasterTable(COMMAND, DVOEncoderControl); 387 uint8_t frev, crev; 388 389 memset(&args, 0, sizeof(args)); 390 391 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 392 return; 393 394 switch (frev) { 395 case 1: 396 switch (crev) { 397 case 1: 398 /* R4xx, R5xx */ 399 args.ext_tmds.sXTmdsEncoder.ucEnable = action; 400 401 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 402 args.ext_tmds.sXTmdsEncoder.ucMisc |= PANEL_ENCODER_MISC_DUAL; 403 404 args.ext_tmds.sXTmdsEncoder.ucMisc |= ATOM_PANEL_MISC_888RGB; 405 break; 406 case 2: 407 /* RS600/690/740 */ 408 args.dvo.sDVOEncoder.ucAction = action; 409 args.dvo.sDVOEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 410 /* DFP1, CRT1, TV1 depending on the type of port */ 411 args.dvo.sDVOEncoder.ucDeviceType = ATOM_DEVICE_DFP1_INDEX; 412 413 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 414 args.dvo.sDVOEncoder.usDevAttr.sDigAttrib.ucAttribute |= PANEL_ENCODER_MISC_DUAL; 415 break; 416 case 3: 417 /* R6xx */ 418 args.dvo_v3.ucAction = action; 419 args.dvo_v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 420 args.dvo_v3.ucDVOConfig = 0; /* XXX */ 421 break; 422 case 4: 423 /* DCE8 */ 424 args.dvo_v4.ucAction = action; 425 args.dvo_v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 426 args.dvo_v4.ucDVOConfig = 0; /* XXX */ 427 args.dvo_v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder); 428 break; 429 default: 430 DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 431 break; 432 } 433 break; 434 default: 435 DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 436 break; 437 } 438 439 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 440 } 441 442 int amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder *encoder) 443 { 444 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 445 struct drm_connector *connector; 446 struct amdgpu_connector *amdgpu_connector; 447 struct amdgpu_connector_atom_dig *dig_connector; 448 449 /* dp bridges are always DP */ 450 if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) 451 return ATOM_ENCODER_MODE_DP; 452 453 /* DVO is always DVO */ 454 if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DVO1) || 455 (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1)) 456 return ATOM_ENCODER_MODE_DVO; 457 458 connector = amdgpu_get_connector_for_encoder(encoder); 459 /* if we don't have an active device yet, just use one of 460 * the connectors tied to the encoder. 461 */ 462 if (!connector) 463 connector = amdgpu_get_connector_for_encoder_init(encoder); 464 amdgpu_connector = to_amdgpu_connector(connector); 465 466 switch (connector->connector_type) { 467 case DRM_MODE_CONNECTOR_DVII: 468 case DRM_MODE_CONNECTOR_HDMIB: /* HDMI-B is basically DL-DVI; analog works fine */ 469 if (amdgpu_audio != 0) { 470 if (amdgpu_connector->use_digital && 471 (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)) 472 return ATOM_ENCODER_MODE_HDMI; 473 else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) && 474 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO)) 475 return ATOM_ENCODER_MODE_HDMI; 476 else if (amdgpu_connector->use_digital) 477 return ATOM_ENCODER_MODE_DVI; 478 else 479 return ATOM_ENCODER_MODE_CRT; 480 } else if (amdgpu_connector->use_digital) { 481 return ATOM_ENCODER_MODE_DVI; 482 } else { 483 return ATOM_ENCODER_MODE_CRT; 484 } 485 break; 486 case DRM_MODE_CONNECTOR_DVID: 487 case DRM_MODE_CONNECTOR_HDMIA: 488 default: 489 if (amdgpu_audio != 0) { 490 if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE) 491 return ATOM_ENCODER_MODE_HDMI; 492 else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) && 493 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO)) 494 return ATOM_ENCODER_MODE_HDMI; 495 else 496 return ATOM_ENCODER_MODE_DVI; 497 } else { 498 return ATOM_ENCODER_MODE_DVI; 499 } 500 break; 501 case DRM_MODE_CONNECTOR_LVDS: 502 return ATOM_ENCODER_MODE_LVDS; 503 break; 504 case DRM_MODE_CONNECTOR_DisplayPort: 505 dig_connector = amdgpu_connector->con_priv; 506 if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) || 507 (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) { 508 return ATOM_ENCODER_MODE_DP; 509 } else if (amdgpu_audio != 0) { 510 if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE) 511 return ATOM_ENCODER_MODE_HDMI; 512 else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) && 513 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO)) 514 return ATOM_ENCODER_MODE_HDMI; 515 else 516 return ATOM_ENCODER_MODE_DVI; 517 } else { 518 return ATOM_ENCODER_MODE_DVI; 519 } 520 break; 521 case DRM_MODE_CONNECTOR_eDP: 522 return ATOM_ENCODER_MODE_DP; 523 case DRM_MODE_CONNECTOR_DVIA: 524 case DRM_MODE_CONNECTOR_VGA: 525 return ATOM_ENCODER_MODE_CRT; 526 break; 527 case DRM_MODE_CONNECTOR_Composite: 528 case DRM_MODE_CONNECTOR_SVIDEO: 529 case DRM_MODE_CONNECTOR_9PinDIN: 530 /* fix me */ 531 return ATOM_ENCODER_MODE_TV; 532 /*return ATOM_ENCODER_MODE_CV;*/ 533 break; 534 } 535 } 536 537 /* 538 * DIG Encoder/Transmitter Setup 539 * 540 * DCE 6.0 541 * - 3 DIG transmitter blocks UNIPHY0/1/2 (links A and B). 542 * Supports up to 6 digital outputs 543 * - 6 DIG encoder blocks. 544 * - DIG to PHY mapping is hardcoded 545 * DIG1 drives UNIPHY0 link A, A+B 546 * DIG2 drives UNIPHY0 link B 547 * DIG3 drives UNIPHY1 link A, A+B 548 * DIG4 drives UNIPHY1 link B 549 * DIG5 drives UNIPHY2 link A, A+B 550 * DIG6 drives UNIPHY2 link B 551 * 552 * Routing 553 * crtc -> dig encoder -> UNIPHY/LVTMA (1 or 2 links) 554 * Examples: 555 * crtc0 -> dig2 -> LVTMA links A+B -> TMDS/HDMI 556 * crtc1 -> dig1 -> UNIPHY0 link B -> DP 557 * crtc0 -> dig1 -> UNIPHY2 link A -> LVDS 558 * crtc1 -> dig2 -> UNIPHY1 link B+A -> TMDS/HDMI 559 */ 560 561 union dig_encoder_control { 562 DIG_ENCODER_CONTROL_PS_ALLOCATION v1; 563 DIG_ENCODER_CONTROL_PARAMETERS_V2 v2; 564 DIG_ENCODER_CONTROL_PARAMETERS_V3 v3; 565 DIG_ENCODER_CONTROL_PARAMETERS_V4 v4; 566 }; 567 568 void 569 amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder *encoder, 570 int action, int panel_mode) 571 { 572 struct drm_device *dev = encoder->dev; 573 struct amdgpu_device *adev = dev->dev_private; 574 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 575 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; 576 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 577 union dig_encoder_control args; 578 int index = GetIndexIntoMasterTable(COMMAND, DIGxEncoderControl); 579 uint8_t frev, crev; 580 int dp_clock = 0; 581 int dp_lane_count = 0; 582 int hpd_id = AMDGPU_HPD_NONE; 583 584 if (connector) { 585 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 586 struct amdgpu_connector_atom_dig *dig_connector = 587 amdgpu_connector->con_priv; 588 589 dp_clock = dig_connector->dp_clock; 590 dp_lane_count = dig_connector->dp_lane_count; 591 hpd_id = amdgpu_connector->hpd.hpd; 592 } 593 594 /* no dig encoder assigned */ 595 if (dig->dig_encoder == -1) 596 return; 597 598 memset(&args, 0, sizeof(args)); 599 600 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 601 return; 602 603 switch (frev) { 604 case 1: 605 switch (crev) { 606 case 1: 607 args.v1.ucAction = action; 608 args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 609 if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE) 610 args.v3.ucPanelMode = panel_mode; 611 else 612 args.v1.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 613 614 if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode)) 615 args.v1.ucLaneNum = dp_lane_count; 616 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 617 args.v1.ucLaneNum = 8; 618 else 619 args.v1.ucLaneNum = 4; 620 621 if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode) && (dp_clock == 270000)) 622 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ; 623 switch (amdgpu_encoder->encoder_id) { 624 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 625 args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER1; 626 break; 627 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 628 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 629 args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER2; 630 break; 631 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 632 args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER3; 633 break; 634 } 635 if (dig->linkb) 636 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKB; 637 else 638 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKA; 639 break; 640 case 2: 641 case 3: 642 args.v3.ucAction = action; 643 args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 644 if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE) 645 args.v3.ucPanelMode = panel_mode; 646 else 647 args.v3.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 648 649 if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode)) 650 args.v3.ucLaneNum = dp_lane_count; 651 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 652 args.v3.ucLaneNum = 8; 653 else 654 args.v3.ucLaneNum = 4; 655 656 if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode) && (dp_clock == 270000)) 657 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ; 658 args.v3.acConfig.ucDigSel = dig->dig_encoder; 659 args.v3.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder); 660 break; 661 case 4: 662 args.v4.ucAction = action; 663 args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 664 if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE) 665 args.v4.ucPanelMode = panel_mode; 666 else 667 args.v4.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 668 669 if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) 670 args.v4.ucLaneNum = dp_lane_count; 671 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 672 args.v4.ucLaneNum = 8; 673 else 674 args.v4.ucLaneNum = 4; 675 676 if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) { 677 if (dp_clock == 540000) 678 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_5_40GHZ; 679 else if (dp_clock == 324000) 680 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_3_24GHZ; 681 else if (dp_clock == 270000) 682 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_2_70GHZ; 683 else 684 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_1_62GHZ; 685 } 686 args.v4.acConfig.ucDigSel = dig->dig_encoder; 687 args.v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder); 688 if (hpd_id == AMDGPU_HPD_NONE) 689 args.v4.ucHPD_ID = 0; 690 else 691 args.v4.ucHPD_ID = hpd_id + 1; 692 break; 693 default: 694 DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 695 break; 696 } 697 break; 698 default: 699 DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 700 break; 701 } 702 703 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 704 705 } 706 707 union dig_transmitter_control { 708 DIG_TRANSMITTER_CONTROL_PS_ALLOCATION v1; 709 DIG_TRANSMITTER_CONTROL_PARAMETERS_V2 v2; 710 DIG_TRANSMITTER_CONTROL_PARAMETERS_V3 v3; 711 DIG_TRANSMITTER_CONTROL_PARAMETERS_V4 v4; 712 DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_5 v5; 713 }; 714 715 void 716 amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder *encoder, int action, 717 uint8_t lane_num, uint8_t lane_set) 718 { 719 struct drm_device *dev = encoder->dev; 720 struct amdgpu_device *adev = dev->dev_private; 721 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 722 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; 723 struct drm_connector *connector; 724 union dig_transmitter_control args; 725 int index = 0; 726 uint8_t frev, crev; 727 bool is_dp = false; 728 int pll_id = 0; 729 int dp_clock = 0; 730 int dp_lane_count = 0; 731 int connector_object_id = 0; 732 int igp_lane_info = 0; 733 int dig_encoder = dig->dig_encoder; 734 int hpd_id = AMDGPU_HPD_NONE; 735 736 if (action == ATOM_TRANSMITTER_ACTION_INIT) { 737 connector = amdgpu_get_connector_for_encoder_init(encoder); 738 /* just needed to avoid bailing in the encoder check. the encoder 739 * isn't used for init 740 */ 741 dig_encoder = 0; 742 } else 743 connector = amdgpu_get_connector_for_encoder(encoder); 744 745 if (connector) { 746 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 747 struct amdgpu_connector_atom_dig *dig_connector = 748 amdgpu_connector->con_priv; 749 750 hpd_id = amdgpu_connector->hpd.hpd; 751 dp_clock = dig_connector->dp_clock; 752 dp_lane_count = dig_connector->dp_lane_count; 753 connector_object_id = 754 (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT; 755 } 756 757 if (encoder->crtc) { 758 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc); 759 pll_id = amdgpu_crtc->pll_id; 760 } 761 762 /* no dig encoder assigned */ 763 if (dig_encoder == -1) 764 return; 765 766 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder))) 767 is_dp = true; 768 769 memset(&args, 0, sizeof(args)); 770 771 switch (amdgpu_encoder->encoder_id) { 772 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 773 index = GetIndexIntoMasterTable(COMMAND, DVOOutputControl); 774 break; 775 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 776 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 777 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 778 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 779 index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl); 780 break; 781 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 782 index = GetIndexIntoMasterTable(COMMAND, LVTMATransmitterControl); 783 break; 784 } 785 786 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 787 return; 788 789 switch (frev) { 790 case 1: 791 switch (crev) { 792 case 1: 793 args.v1.ucAction = action; 794 if (action == ATOM_TRANSMITTER_ACTION_INIT) { 795 args.v1.usInitInfo = cpu_to_le16(connector_object_id); 796 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) { 797 args.v1.asMode.ucLaneSel = lane_num; 798 args.v1.asMode.ucLaneSet = lane_set; 799 } else { 800 if (is_dp) 801 args.v1.usPixelClock = cpu_to_le16(dp_clock / 10); 802 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 803 args.v1.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10); 804 else 805 args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 806 } 807 808 args.v1.ucConfig = ATOM_TRANSMITTER_CONFIG_CLKSRC_PPLL; 809 810 if (dig_encoder) 811 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG2_ENCODER; 812 else 813 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG1_ENCODER; 814 815 if ((adev->flags & AMD_IS_APU) && 816 (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_UNIPHY)) { 817 if (is_dp || 818 !amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) { 819 if (igp_lane_info & 0x1) 820 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_3; 821 else if (igp_lane_info & 0x2) 822 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_4_7; 823 else if (igp_lane_info & 0x4) 824 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_11; 825 else if (igp_lane_info & 0x8) 826 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_12_15; 827 } else { 828 if (igp_lane_info & 0x3) 829 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_7; 830 else if (igp_lane_info & 0xc) 831 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_15; 832 } 833 } 834 835 if (dig->linkb) 836 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKB; 837 else 838 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKA; 839 840 if (is_dp) 841 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT; 842 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 843 if (dig->coherent_mode) 844 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT; 845 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 846 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_8LANE_LINK; 847 } 848 break; 849 case 2: 850 args.v2.ucAction = action; 851 if (action == ATOM_TRANSMITTER_ACTION_INIT) { 852 args.v2.usInitInfo = cpu_to_le16(connector_object_id); 853 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) { 854 args.v2.asMode.ucLaneSel = lane_num; 855 args.v2.asMode.ucLaneSet = lane_set; 856 } else { 857 if (is_dp) 858 args.v2.usPixelClock = cpu_to_le16(dp_clock / 10); 859 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 860 args.v2.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10); 861 else 862 args.v2.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 863 } 864 865 args.v2.acConfig.ucEncoderSel = dig_encoder; 866 if (dig->linkb) 867 args.v2.acConfig.ucLinkSel = 1; 868 869 switch (amdgpu_encoder->encoder_id) { 870 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 871 args.v2.acConfig.ucTransmitterSel = 0; 872 break; 873 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 874 args.v2.acConfig.ucTransmitterSel = 1; 875 break; 876 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 877 args.v2.acConfig.ucTransmitterSel = 2; 878 break; 879 } 880 881 if (is_dp) { 882 args.v2.acConfig.fCoherentMode = 1; 883 args.v2.acConfig.fDPConnector = 1; 884 } else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 885 if (dig->coherent_mode) 886 args.v2.acConfig.fCoherentMode = 1; 887 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 888 args.v2.acConfig.fDualLinkConnector = 1; 889 } 890 break; 891 case 3: 892 args.v3.ucAction = action; 893 if (action == ATOM_TRANSMITTER_ACTION_INIT) { 894 args.v3.usInitInfo = cpu_to_le16(connector_object_id); 895 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) { 896 args.v3.asMode.ucLaneSel = lane_num; 897 args.v3.asMode.ucLaneSet = lane_set; 898 } else { 899 if (is_dp) 900 args.v3.usPixelClock = cpu_to_le16(dp_clock / 10); 901 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 902 args.v3.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10); 903 else 904 args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 905 } 906 907 if (is_dp) 908 args.v3.ucLaneNum = dp_lane_count; 909 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 910 args.v3.ucLaneNum = 8; 911 else 912 args.v3.ucLaneNum = 4; 913 914 if (dig->linkb) 915 args.v3.acConfig.ucLinkSel = 1; 916 if (dig_encoder & 1) 917 args.v3.acConfig.ucEncoderSel = 1; 918 919 /* Select the PLL for the PHY 920 * DP PHY should be clocked from external src if there is 921 * one. 922 */ 923 /* On DCE4, if there is an external clock, it generates the DP ref clock */ 924 if (is_dp && adev->clock.dp_extclk) 925 args.v3.acConfig.ucRefClkSource = 2; /* external src */ 926 else 927 args.v3.acConfig.ucRefClkSource = pll_id; 928 929 switch (amdgpu_encoder->encoder_id) { 930 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 931 args.v3.acConfig.ucTransmitterSel = 0; 932 break; 933 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 934 args.v3.acConfig.ucTransmitterSel = 1; 935 break; 936 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 937 args.v3.acConfig.ucTransmitterSel = 2; 938 break; 939 } 940 941 if (is_dp) 942 args.v3.acConfig.fCoherentMode = 1; /* DP requires coherent */ 943 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 944 if (dig->coherent_mode) 945 args.v3.acConfig.fCoherentMode = 1; 946 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 947 args.v3.acConfig.fDualLinkConnector = 1; 948 } 949 break; 950 case 4: 951 args.v4.ucAction = action; 952 if (action == ATOM_TRANSMITTER_ACTION_INIT) { 953 args.v4.usInitInfo = cpu_to_le16(connector_object_id); 954 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) { 955 args.v4.asMode.ucLaneSel = lane_num; 956 args.v4.asMode.ucLaneSet = lane_set; 957 } else { 958 if (is_dp) 959 args.v4.usPixelClock = cpu_to_le16(dp_clock / 10); 960 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 961 args.v4.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10); 962 else 963 args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 964 } 965 966 if (is_dp) 967 args.v4.ucLaneNum = dp_lane_count; 968 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 969 args.v4.ucLaneNum = 8; 970 else 971 args.v4.ucLaneNum = 4; 972 973 if (dig->linkb) 974 args.v4.acConfig.ucLinkSel = 1; 975 if (dig_encoder & 1) 976 args.v4.acConfig.ucEncoderSel = 1; 977 978 /* Select the PLL for the PHY 979 * DP PHY should be clocked from external src if there is 980 * one. 981 */ 982 /* On DCE5 DCPLL usually generates the DP ref clock */ 983 if (is_dp) { 984 if (adev->clock.dp_extclk) 985 args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_EXTCLK; 986 else 987 args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_DCPLL; 988 } else 989 args.v4.acConfig.ucRefClkSource = pll_id; 990 991 switch (amdgpu_encoder->encoder_id) { 992 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 993 args.v4.acConfig.ucTransmitterSel = 0; 994 break; 995 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 996 args.v4.acConfig.ucTransmitterSel = 1; 997 break; 998 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 999 args.v4.acConfig.ucTransmitterSel = 2; 1000 break; 1001 } 1002 1003 if (is_dp) 1004 args.v4.acConfig.fCoherentMode = 1; /* DP requires coherent */ 1005 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 1006 if (dig->coherent_mode) 1007 args.v4.acConfig.fCoherentMode = 1; 1008 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1009 args.v4.acConfig.fDualLinkConnector = 1; 1010 } 1011 break; 1012 case 5: 1013 args.v5.ucAction = action; 1014 if (is_dp) 1015 args.v5.usSymClock = cpu_to_le16(dp_clock / 10); 1016 else 1017 args.v5.usSymClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 1018 1019 switch (amdgpu_encoder->encoder_id) { 1020 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1021 if (dig->linkb) 1022 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYB; 1023 else 1024 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYA; 1025 break; 1026 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1027 if (dig->linkb) 1028 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYD; 1029 else 1030 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYC; 1031 break; 1032 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1033 if (dig->linkb) 1034 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYF; 1035 else 1036 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYE; 1037 break; 1038 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1039 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYG; 1040 break; 1041 } 1042 if (is_dp) 1043 args.v5.ucLaneNum = dp_lane_count; 1044 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1045 args.v5.ucLaneNum = 8; 1046 else 1047 args.v5.ucLaneNum = 4; 1048 args.v5.ucConnObjId = connector_object_id; 1049 args.v5.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1050 1051 if (is_dp && adev->clock.dp_extclk) 1052 args.v5.asConfig.ucPhyClkSrcId = ENCODER_REFCLK_SRC_EXTCLK; 1053 else 1054 args.v5.asConfig.ucPhyClkSrcId = pll_id; 1055 1056 if (is_dp) 1057 args.v5.asConfig.ucCoherentMode = 1; /* DP requires coherent */ 1058 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 1059 if (dig->coherent_mode) 1060 args.v5.asConfig.ucCoherentMode = 1; 1061 } 1062 if (hpd_id == AMDGPU_HPD_NONE) 1063 args.v5.asConfig.ucHPDSel = 0; 1064 else 1065 args.v5.asConfig.ucHPDSel = hpd_id + 1; 1066 args.v5.ucDigEncoderSel = 1 << dig_encoder; 1067 args.v5.ucDPLaneSet = lane_set; 1068 break; 1069 default: 1070 DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 1071 break; 1072 } 1073 break; 1074 default: 1075 DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 1076 break; 1077 } 1078 1079 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 1080 } 1081 1082 bool 1083 amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector *connector, 1084 int action) 1085 { 1086 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 1087 struct drm_device *dev = amdgpu_connector->base.dev; 1088 struct amdgpu_device *adev = dev->dev_private; 1089 union dig_transmitter_control args; 1090 int index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl); 1091 uint8_t frev, crev; 1092 1093 if (connector->connector_type != DRM_MODE_CONNECTOR_eDP) 1094 goto done; 1095 1096 if ((action != ATOM_TRANSMITTER_ACTION_POWER_ON) && 1097 (action != ATOM_TRANSMITTER_ACTION_POWER_OFF)) 1098 goto done; 1099 1100 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 1101 goto done; 1102 1103 memset(&args, 0, sizeof(args)); 1104 1105 args.v1.ucAction = action; 1106 1107 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 1108 1109 /* wait for the panel to power up */ 1110 if (action == ATOM_TRANSMITTER_ACTION_POWER_ON) { 1111 int i; 1112 1113 for (i = 0; i < 300; i++) { 1114 if (amdgpu_display_hpd_sense(adev, amdgpu_connector->hpd.hpd)) 1115 return true; 1116 mdelay(1); 1117 } 1118 return false; 1119 } 1120 done: 1121 return true; 1122 } 1123 1124 union external_encoder_control { 1125 EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION v1; 1126 EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION_V3 v3; 1127 }; 1128 1129 static void 1130 amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder *encoder, 1131 struct drm_encoder *ext_encoder, 1132 int action) 1133 { 1134 struct drm_device *dev = encoder->dev; 1135 struct amdgpu_device *adev = dev->dev_private; 1136 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1137 struct amdgpu_encoder *ext_amdgpu_encoder = to_amdgpu_encoder(ext_encoder); 1138 union external_encoder_control args; 1139 struct drm_connector *connector; 1140 int index = GetIndexIntoMasterTable(COMMAND, ExternalEncoderControl); 1141 u8 frev, crev; 1142 int dp_clock = 0; 1143 int dp_lane_count = 0; 1144 int connector_object_id = 0; 1145 u32 ext_enum = (ext_amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT; 1146 1147 if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT) 1148 connector = amdgpu_get_connector_for_encoder_init(encoder); 1149 else 1150 connector = amdgpu_get_connector_for_encoder(encoder); 1151 1152 if (connector) { 1153 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 1154 struct amdgpu_connector_atom_dig *dig_connector = 1155 amdgpu_connector->con_priv; 1156 1157 dp_clock = dig_connector->dp_clock; 1158 dp_lane_count = dig_connector->dp_lane_count; 1159 connector_object_id = 1160 (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT; 1161 } 1162 1163 memset(&args, 0, sizeof(args)); 1164 1165 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 1166 return; 1167 1168 switch (frev) { 1169 case 1: 1170 /* no params on frev 1 */ 1171 break; 1172 case 2: 1173 switch (crev) { 1174 case 1: 1175 case 2: 1176 args.v1.sDigEncoder.ucAction = action; 1177 args.v1.sDigEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 1178 args.v1.sDigEncoder.ucEncoderMode = 1179 amdgpu_atombios_encoder_get_encoder_mode(encoder); 1180 1181 if (ENCODER_MODE_IS_DP(args.v1.sDigEncoder.ucEncoderMode)) { 1182 if (dp_clock == 270000) 1183 args.v1.sDigEncoder.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ; 1184 args.v1.sDigEncoder.ucLaneNum = dp_lane_count; 1185 } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1186 args.v1.sDigEncoder.ucLaneNum = 8; 1187 else 1188 args.v1.sDigEncoder.ucLaneNum = 4; 1189 break; 1190 case 3: 1191 args.v3.sExtEncoder.ucAction = action; 1192 if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT) 1193 args.v3.sExtEncoder.usConnectorId = cpu_to_le16(connector_object_id); 1194 else 1195 args.v3.sExtEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 1196 args.v3.sExtEncoder.ucEncoderMode = 1197 amdgpu_atombios_encoder_get_encoder_mode(encoder); 1198 1199 if (ENCODER_MODE_IS_DP(args.v3.sExtEncoder.ucEncoderMode)) { 1200 if (dp_clock == 270000) 1201 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ; 1202 else if (dp_clock == 540000) 1203 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_5_40GHZ; 1204 args.v3.sExtEncoder.ucLaneNum = dp_lane_count; 1205 } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1206 args.v3.sExtEncoder.ucLaneNum = 8; 1207 else 1208 args.v3.sExtEncoder.ucLaneNum = 4; 1209 switch (ext_enum) { 1210 case GRAPH_OBJECT_ENUM_ID1: 1211 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER1; 1212 break; 1213 case GRAPH_OBJECT_ENUM_ID2: 1214 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER2; 1215 break; 1216 case GRAPH_OBJECT_ENUM_ID3: 1217 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER3; 1218 break; 1219 } 1220 args.v3.sExtEncoder.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder); 1221 break; 1222 default: 1223 DRM_ERROR("Unknown table version: %d, %d\n", frev, crev); 1224 return; 1225 } 1226 break; 1227 default: 1228 DRM_ERROR("Unknown table version: %d, %d\n", frev, crev); 1229 return; 1230 } 1231 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 1232 } 1233 1234 static void 1235 amdgpu_atombios_encoder_setup_dig(struct drm_encoder *encoder, int action) 1236 { 1237 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1238 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder); 1239 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; 1240 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 1241 struct amdgpu_connector *amdgpu_connector = NULL; 1242 struct amdgpu_connector_atom_dig *amdgpu_dig_connector = NULL; 1243 1244 if (connector) { 1245 amdgpu_connector = to_amdgpu_connector(connector); 1246 amdgpu_dig_connector = amdgpu_connector->con_priv; 1247 } 1248 1249 if (action == ATOM_ENABLE) { 1250 if (!connector) 1251 dig->panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE; 1252 else 1253 dig->panel_mode = amdgpu_atombios_dp_get_panel_mode(encoder, connector); 1254 1255 /* setup and enable the encoder */ 1256 amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_SETUP, 0); 1257 amdgpu_atombios_encoder_setup_dig_encoder(encoder, 1258 ATOM_ENCODER_CMD_SETUP_PANEL_MODE, 1259 dig->panel_mode); 1260 if (ext_encoder) 1261 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, 1262 EXTERNAL_ENCODER_ACTION_V3_ENCODER_SETUP); 1263 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) && 1264 connector) { 1265 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) { 1266 amdgpu_atombios_encoder_set_edp_panel_power(connector, 1267 ATOM_TRANSMITTER_ACTION_POWER_ON); 1268 amdgpu_dig_connector->edp_on = true; 1269 } 1270 } 1271 /* enable the transmitter */ 1272 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 1273 ATOM_TRANSMITTER_ACTION_ENABLE, 1274 0, 0); 1275 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) && 1276 connector) { 1277 /* DP_SET_POWER_D0 is set in amdgpu_atombios_dp_link_train */ 1278 amdgpu_atombios_dp_link_train(encoder, connector); 1279 amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON, 0); 1280 } 1281 if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) 1282 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 1283 ATOM_TRANSMITTER_ACTION_LCD_BLON, 0, 0); 1284 if (ext_encoder) 1285 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_ENABLE); 1286 } else { 1287 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) && 1288 connector) 1289 amdgpu_atombios_encoder_setup_dig_encoder(encoder, 1290 ATOM_ENCODER_CMD_DP_VIDEO_OFF, 0); 1291 if (ext_encoder) 1292 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_DISABLE); 1293 if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) 1294 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 1295 ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0); 1296 1297 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) && 1298 connector) 1299 amdgpu_atombios_dp_set_rx_power_state(connector, DP_SET_POWER_D3); 1300 /* disable the transmitter */ 1301 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 1302 ATOM_TRANSMITTER_ACTION_DISABLE, 0, 0); 1303 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) && 1304 connector) { 1305 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) { 1306 amdgpu_atombios_encoder_set_edp_panel_power(connector, 1307 ATOM_TRANSMITTER_ACTION_POWER_OFF); 1308 amdgpu_dig_connector->edp_on = false; 1309 } 1310 } 1311 } 1312 } 1313 1314 void 1315 amdgpu_atombios_encoder_dpms(struct drm_encoder *encoder, int mode) 1316 { 1317 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1318 1319 DRM_DEBUG_KMS("encoder dpms %d to mode %d, devices %08x, active_devices %08x\n", 1320 amdgpu_encoder->encoder_id, mode, amdgpu_encoder->devices, 1321 amdgpu_encoder->active_device); 1322 switch (amdgpu_encoder->encoder_id) { 1323 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1324 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1325 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1326 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1327 switch (mode) { 1328 case DRM_MODE_DPMS_ON: 1329 amdgpu_atombios_encoder_setup_dig(encoder, ATOM_ENABLE); 1330 break; 1331 case DRM_MODE_DPMS_STANDBY: 1332 case DRM_MODE_DPMS_SUSPEND: 1333 case DRM_MODE_DPMS_OFF: 1334 amdgpu_atombios_encoder_setup_dig(encoder, ATOM_DISABLE); 1335 break; 1336 } 1337 break; 1338 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 1339 switch (mode) { 1340 case DRM_MODE_DPMS_ON: 1341 amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_ENABLE); 1342 break; 1343 case DRM_MODE_DPMS_STANDBY: 1344 case DRM_MODE_DPMS_SUSPEND: 1345 case DRM_MODE_DPMS_OFF: 1346 amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_DISABLE); 1347 break; 1348 } 1349 break; 1350 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 1351 switch (mode) { 1352 case DRM_MODE_DPMS_ON: 1353 amdgpu_atombios_encoder_setup_dac(encoder, ATOM_ENABLE); 1354 break; 1355 case DRM_MODE_DPMS_STANDBY: 1356 case DRM_MODE_DPMS_SUSPEND: 1357 case DRM_MODE_DPMS_OFF: 1358 amdgpu_atombios_encoder_setup_dac(encoder, ATOM_DISABLE); 1359 break; 1360 } 1361 break; 1362 default: 1363 return; 1364 } 1365 } 1366 1367 union crtc_source_param { 1368 SELECT_CRTC_SOURCE_PS_ALLOCATION v1; 1369 SELECT_CRTC_SOURCE_PARAMETERS_V2 v2; 1370 SELECT_CRTC_SOURCE_PARAMETERS_V3 v3; 1371 }; 1372 1373 void 1374 amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder *encoder) 1375 { 1376 struct drm_device *dev = encoder->dev; 1377 struct amdgpu_device *adev = dev->dev_private; 1378 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1379 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc); 1380 union crtc_source_param args; 1381 int index = GetIndexIntoMasterTable(COMMAND, SelectCRTC_Source); 1382 uint8_t frev, crev; 1383 struct amdgpu_encoder_atom_dig *dig; 1384 1385 memset(&args, 0, sizeof(args)); 1386 1387 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 1388 return; 1389 1390 switch (frev) { 1391 case 1: 1392 switch (crev) { 1393 case 1: 1394 default: 1395 args.v1.ucCRTC = amdgpu_crtc->crtc_id; 1396 switch (amdgpu_encoder->encoder_id) { 1397 case ENCODER_OBJECT_ID_INTERNAL_TMDS1: 1398 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1: 1399 args.v1.ucDevice = ATOM_DEVICE_DFP1_INDEX; 1400 break; 1401 case ENCODER_OBJECT_ID_INTERNAL_LVDS: 1402 case ENCODER_OBJECT_ID_INTERNAL_LVTM1: 1403 if (amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) 1404 args.v1.ucDevice = ATOM_DEVICE_LCD1_INDEX; 1405 else 1406 args.v1.ucDevice = ATOM_DEVICE_DFP3_INDEX; 1407 break; 1408 case ENCODER_OBJECT_ID_INTERNAL_DVO1: 1409 case ENCODER_OBJECT_ID_INTERNAL_DDI: 1410 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 1411 args.v1.ucDevice = ATOM_DEVICE_DFP2_INDEX; 1412 break; 1413 case ENCODER_OBJECT_ID_INTERNAL_DAC1: 1414 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 1415 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1416 args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX; 1417 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1418 args.v1.ucDevice = ATOM_DEVICE_CV_INDEX; 1419 else 1420 args.v1.ucDevice = ATOM_DEVICE_CRT1_INDEX; 1421 break; 1422 case ENCODER_OBJECT_ID_INTERNAL_DAC2: 1423 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2: 1424 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1425 args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX; 1426 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1427 args.v1.ucDevice = ATOM_DEVICE_CV_INDEX; 1428 else 1429 args.v1.ucDevice = ATOM_DEVICE_CRT2_INDEX; 1430 break; 1431 } 1432 break; 1433 case 2: 1434 args.v2.ucCRTC = amdgpu_crtc->crtc_id; 1435 if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) { 1436 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 1437 1438 if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS) 1439 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS; 1440 else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA) 1441 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT; 1442 else 1443 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1444 } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) { 1445 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS; 1446 } else { 1447 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1448 } 1449 switch (amdgpu_encoder->encoder_id) { 1450 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1451 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1452 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1453 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1454 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 1455 dig = amdgpu_encoder->enc_priv; 1456 switch (dig->dig_encoder) { 1457 case 0: 1458 args.v2.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID; 1459 break; 1460 case 1: 1461 args.v2.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID; 1462 break; 1463 case 2: 1464 args.v2.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID; 1465 break; 1466 case 3: 1467 args.v2.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID; 1468 break; 1469 case 4: 1470 args.v2.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID; 1471 break; 1472 case 5: 1473 args.v2.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID; 1474 break; 1475 case 6: 1476 args.v2.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID; 1477 break; 1478 } 1479 break; 1480 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 1481 args.v2.ucEncoderID = ASIC_INT_DVO_ENCODER_ID; 1482 break; 1483 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 1484 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1485 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1486 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1487 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1488 else 1489 args.v2.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID; 1490 break; 1491 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2: 1492 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1493 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1494 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1495 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1496 else 1497 args.v2.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID; 1498 break; 1499 } 1500 break; 1501 case 3: 1502 args.v3.ucCRTC = amdgpu_crtc->crtc_id; 1503 if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) { 1504 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 1505 1506 if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS) 1507 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS; 1508 else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA) 1509 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT; 1510 else 1511 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1512 } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) { 1513 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS; 1514 } else { 1515 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1516 } 1517 args.v3.ucDstBpc = amdgpu_atombios_encoder_get_bpc(encoder); 1518 switch (amdgpu_encoder->encoder_id) { 1519 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1520 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1521 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1522 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1523 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 1524 dig = amdgpu_encoder->enc_priv; 1525 switch (dig->dig_encoder) { 1526 case 0: 1527 args.v3.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID; 1528 break; 1529 case 1: 1530 args.v3.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID; 1531 break; 1532 case 2: 1533 args.v3.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID; 1534 break; 1535 case 3: 1536 args.v3.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID; 1537 break; 1538 case 4: 1539 args.v3.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID; 1540 break; 1541 case 5: 1542 args.v3.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID; 1543 break; 1544 case 6: 1545 args.v3.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID; 1546 break; 1547 } 1548 break; 1549 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 1550 args.v3.ucEncoderID = ASIC_INT_DVO_ENCODER_ID; 1551 break; 1552 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 1553 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1554 args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1555 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1556 args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1557 else 1558 args.v3.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID; 1559 break; 1560 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2: 1561 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1562 args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1563 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1564 args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1565 else 1566 args.v3.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID; 1567 break; 1568 } 1569 break; 1570 } 1571 break; 1572 default: 1573 DRM_ERROR("Unknown table version: %d, %d\n", frev, crev); 1574 return; 1575 } 1576 1577 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 1578 } 1579 1580 /* This only needs to be called once at startup */ 1581 void 1582 amdgpu_atombios_encoder_init_dig(struct amdgpu_device *adev) 1583 { 1584 struct drm_device *dev = adev->ddev; 1585 struct drm_encoder *encoder; 1586 1587 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { 1588 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1589 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder); 1590 1591 switch (amdgpu_encoder->encoder_id) { 1592 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1593 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1594 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1595 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1596 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, ATOM_TRANSMITTER_ACTION_INIT, 1597 0, 0); 1598 break; 1599 } 1600 1601 if (ext_encoder) 1602 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, 1603 EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT); 1604 } 1605 } 1606 1607 static bool 1608 amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder *encoder, 1609 struct drm_connector *connector) 1610 { 1611 struct drm_device *dev = encoder->dev; 1612 struct amdgpu_device *adev = dev->dev_private; 1613 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1614 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 1615 1616 if (amdgpu_encoder->devices & (ATOM_DEVICE_TV_SUPPORT | 1617 ATOM_DEVICE_CV_SUPPORT | 1618 ATOM_DEVICE_CRT_SUPPORT)) { 1619 DAC_LOAD_DETECTION_PS_ALLOCATION args; 1620 int index = GetIndexIntoMasterTable(COMMAND, DAC_LoadDetection); 1621 uint8_t frev, crev; 1622 1623 memset(&args, 0, sizeof(args)); 1624 1625 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 1626 return false; 1627 1628 args.sDacload.ucMisc = 0; 1629 1630 if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DAC1) || 1631 (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1)) 1632 args.sDacload.ucDacType = ATOM_DAC_A; 1633 else 1634 args.sDacload.ucDacType = ATOM_DAC_B; 1635 1636 if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) 1637 args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT1_SUPPORT); 1638 else if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) 1639 args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT2_SUPPORT); 1640 else if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) { 1641 args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CV_SUPPORT); 1642 if (crev >= 3) 1643 args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb; 1644 } else if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) { 1645 args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_TV1_SUPPORT); 1646 if (crev >= 3) 1647 args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb; 1648 } 1649 1650 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 1651 1652 return true; 1653 } else 1654 return false; 1655 } 1656 1657 enum drm_connector_status 1658 amdgpu_atombios_encoder_dac_detect(struct drm_encoder *encoder, 1659 struct drm_connector *connector) 1660 { 1661 struct drm_device *dev = encoder->dev; 1662 struct amdgpu_device *adev = dev->dev_private; 1663 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1664 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 1665 uint32_t bios_0_scratch; 1666 1667 if (!amdgpu_atombios_encoder_dac_load_detect(encoder, connector)) { 1668 DRM_DEBUG_KMS("detect returned false \n"); 1669 return connector_status_unknown; 1670 } 1671 1672 bios_0_scratch = RREG32(mmBIOS_SCRATCH_0); 1673 1674 DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices); 1675 if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) { 1676 if (bios_0_scratch & ATOM_S0_CRT1_MASK) 1677 return connector_status_connected; 1678 } 1679 if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) { 1680 if (bios_0_scratch & ATOM_S0_CRT2_MASK) 1681 return connector_status_connected; 1682 } 1683 if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) { 1684 if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A)) 1685 return connector_status_connected; 1686 } 1687 if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) { 1688 if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A)) 1689 return connector_status_connected; /* CTV */ 1690 else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A)) 1691 return connector_status_connected; /* STV */ 1692 } 1693 return connector_status_disconnected; 1694 } 1695 1696 enum drm_connector_status 1697 amdgpu_atombios_encoder_dig_detect(struct drm_encoder *encoder, 1698 struct drm_connector *connector) 1699 { 1700 struct drm_device *dev = encoder->dev; 1701 struct amdgpu_device *adev = dev->dev_private; 1702 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1703 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 1704 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder); 1705 u32 bios_0_scratch; 1706 1707 if (!ext_encoder) 1708 return connector_status_unknown; 1709 1710 if ((amdgpu_connector->devices & ATOM_DEVICE_CRT_SUPPORT) == 0) 1711 return connector_status_unknown; 1712 1713 /* load detect on the dp bridge */ 1714 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, 1715 EXTERNAL_ENCODER_ACTION_V3_DACLOAD_DETECTION); 1716 1717 bios_0_scratch = RREG32(mmBIOS_SCRATCH_0); 1718 1719 DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices); 1720 if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) { 1721 if (bios_0_scratch & ATOM_S0_CRT1_MASK) 1722 return connector_status_connected; 1723 } 1724 if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) { 1725 if (bios_0_scratch & ATOM_S0_CRT2_MASK) 1726 return connector_status_connected; 1727 } 1728 if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) { 1729 if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A)) 1730 return connector_status_connected; 1731 } 1732 if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) { 1733 if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A)) 1734 return connector_status_connected; /* CTV */ 1735 else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A)) 1736 return connector_status_connected; /* STV */ 1737 } 1738 return connector_status_disconnected; 1739 } 1740 1741 void 1742 amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder *encoder) 1743 { 1744 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder); 1745 1746 if (ext_encoder) 1747 /* ddc_setup on the dp bridge */ 1748 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, 1749 EXTERNAL_ENCODER_ACTION_V3_DDC_SETUP); 1750 1751 } 1752 1753 void 1754 amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector *connector, 1755 struct drm_encoder *encoder, 1756 bool connected) 1757 { 1758 struct drm_device *dev = connector->dev; 1759 struct amdgpu_device *adev = dev->dev_private; 1760 struct amdgpu_connector *amdgpu_connector = 1761 to_amdgpu_connector(connector); 1762 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1763 uint32_t bios_0_scratch, bios_3_scratch, bios_6_scratch; 1764 1765 bios_0_scratch = RREG32(mmBIOS_SCRATCH_0); 1766 bios_3_scratch = RREG32(mmBIOS_SCRATCH_3); 1767 bios_6_scratch = RREG32(mmBIOS_SCRATCH_6); 1768 1769 if ((amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) && 1770 (amdgpu_connector->devices & ATOM_DEVICE_LCD1_SUPPORT)) { 1771 if (connected) { 1772 DRM_DEBUG_KMS("LCD1 connected\n"); 1773 bios_0_scratch |= ATOM_S0_LCD1; 1774 bios_3_scratch |= ATOM_S3_LCD1_ACTIVE; 1775 bios_6_scratch |= ATOM_S6_ACC_REQ_LCD1; 1776 } else { 1777 DRM_DEBUG_KMS("LCD1 disconnected\n"); 1778 bios_0_scratch &= ~ATOM_S0_LCD1; 1779 bios_3_scratch &= ~ATOM_S3_LCD1_ACTIVE; 1780 bios_6_scratch &= ~ATOM_S6_ACC_REQ_LCD1; 1781 } 1782 } 1783 if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT1_SUPPORT) && 1784 (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)) { 1785 if (connected) { 1786 DRM_DEBUG_KMS("CRT1 connected\n"); 1787 bios_0_scratch |= ATOM_S0_CRT1_COLOR; 1788 bios_3_scratch |= ATOM_S3_CRT1_ACTIVE; 1789 bios_6_scratch |= ATOM_S6_ACC_REQ_CRT1; 1790 } else { 1791 DRM_DEBUG_KMS("CRT1 disconnected\n"); 1792 bios_0_scratch &= ~ATOM_S0_CRT1_MASK; 1793 bios_3_scratch &= ~ATOM_S3_CRT1_ACTIVE; 1794 bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT1; 1795 } 1796 } 1797 if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT2_SUPPORT) && 1798 (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)) { 1799 if (connected) { 1800 DRM_DEBUG_KMS("CRT2 connected\n"); 1801 bios_0_scratch |= ATOM_S0_CRT2_COLOR; 1802 bios_3_scratch |= ATOM_S3_CRT2_ACTIVE; 1803 bios_6_scratch |= ATOM_S6_ACC_REQ_CRT2; 1804 } else { 1805 DRM_DEBUG_KMS("CRT2 disconnected\n"); 1806 bios_0_scratch &= ~ATOM_S0_CRT2_MASK; 1807 bios_3_scratch &= ~ATOM_S3_CRT2_ACTIVE; 1808 bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT2; 1809 } 1810 } 1811 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP1_SUPPORT) && 1812 (amdgpu_connector->devices & ATOM_DEVICE_DFP1_SUPPORT)) { 1813 if (connected) { 1814 DRM_DEBUG_KMS("DFP1 connected\n"); 1815 bios_0_scratch |= ATOM_S0_DFP1; 1816 bios_3_scratch |= ATOM_S3_DFP1_ACTIVE; 1817 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP1; 1818 } else { 1819 DRM_DEBUG_KMS("DFP1 disconnected\n"); 1820 bios_0_scratch &= ~ATOM_S0_DFP1; 1821 bios_3_scratch &= ~ATOM_S3_DFP1_ACTIVE; 1822 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP1; 1823 } 1824 } 1825 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP2_SUPPORT) && 1826 (amdgpu_connector->devices & ATOM_DEVICE_DFP2_SUPPORT)) { 1827 if (connected) { 1828 DRM_DEBUG_KMS("DFP2 connected\n"); 1829 bios_0_scratch |= ATOM_S0_DFP2; 1830 bios_3_scratch |= ATOM_S3_DFP2_ACTIVE; 1831 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP2; 1832 } else { 1833 DRM_DEBUG_KMS("DFP2 disconnected\n"); 1834 bios_0_scratch &= ~ATOM_S0_DFP2; 1835 bios_3_scratch &= ~ATOM_S3_DFP2_ACTIVE; 1836 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP2; 1837 } 1838 } 1839 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP3_SUPPORT) && 1840 (amdgpu_connector->devices & ATOM_DEVICE_DFP3_SUPPORT)) { 1841 if (connected) { 1842 DRM_DEBUG_KMS("DFP3 connected\n"); 1843 bios_0_scratch |= ATOM_S0_DFP3; 1844 bios_3_scratch |= ATOM_S3_DFP3_ACTIVE; 1845 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP3; 1846 } else { 1847 DRM_DEBUG_KMS("DFP3 disconnected\n"); 1848 bios_0_scratch &= ~ATOM_S0_DFP3; 1849 bios_3_scratch &= ~ATOM_S3_DFP3_ACTIVE; 1850 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP3; 1851 } 1852 } 1853 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP4_SUPPORT) && 1854 (amdgpu_connector->devices & ATOM_DEVICE_DFP4_SUPPORT)) { 1855 if (connected) { 1856 DRM_DEBUG_KMS("DFP4 connected\n"); 1857 bios_0_scratch |= ATOM_S0_DFP4; 1858 bios_3_scratch |= ATOM_S3_DFP4_ACTIVE; 1859 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP4; 1860 } else { 1861 DRM_DEBUG_KMS("DFP4 disconnected\n"); 1862 bios_0_scratch &= ~ATOM_S0_DFP4; 1863 bios_3_scratch &= ~ATOM_S3_DFP4_ACTIVE; 1864 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP4; 1865 } 1866 } 1867 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP5_SUPPORT) && 1868 (amdgpu_connector->devices & ATOM_DEVICE_DFP5_SUPPORT)) { 1869 if (connected) { 1870 DRM_DEBUG_KMS("DFP5 connected\n"); 1871 bios_0_scratch |= ATOM_S0_DFP5; 1872 bios_3_scratch |= ATOM_S3_DFP5_ACTIVE; 1873 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP5; 1874 } else { 1875 DRM_DEBUG_KMS("DFP5 disconnected\n"); 1876 bios_0_scratch &= ~ATOM_S0_DFP5; 1877 bios_3_scratch &= ~ATOM_S3_DFP5_ACTIVE; 1878 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP5; 1879 } 1880 } 1881 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP6_SUPPORT) && 1882 (amdgpu_connector->devices & ATOM_DEVICE_DFP6_SUPPORT)) { 1883 if (connected) { 1884 DRM_DEBUG_KMS("DFP6 connected\n"); 1885 bios_0_scratch |= ATOM_S0_DFP6; 1886 bios_3_scratch |= ATOM_S3_DFP6_ACTIVE; 1887 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP6; 1888 } else { 1889 DRM_DEBUG_KMS("DFP6 disconnected\n"); 1890 bios_0_scratch &= ~ATOM_S0_DFP6; 1891 bios_3_scratch &= ~ATOM_S3_DFP6_ACTIVE; 1892 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP6; 1893 } 1894 } 1895 1896 WREG32(mmBIOS_SCRATCH_0, bios_0_scratch); 1897 WREG32(mmBIOS_SCRATCH_3, bios_3_scratch); 1898 WREG32(mmBIOS_SCRATCH_6, bios_6_scratch); 1899 } 1900 1901 union lvds_info { 1902 struct _ATOM_LVDS_INFO info; 1903 struct _ATOM_LVDS_INFO_V12 info_12; 1904 }; 1905 1906 struct amdgpu_encoder_atom_dig * 1907 amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder *encoder) 1908 { 1909 struct drm_device *dev = encoder->base.dev; 1910 struct amdgpu_device *adev = dev->dev_private; 1911 struct amdgpu_mode_info *mode_info = &adev->mode_info; 1912 int index = GetIndexIntoMasterTable(DATA, LVDS_Info); 1913 uint16_t data_offset, misc; 1914 union lvds_info *lvds_info; 1915 uint8_t frev, crev; 1916 struct amdgpu_encoder_atom_dig *lvds = NULL; 1917 int encoder_enum = (encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT; 1918 1919 if (amdgpu_atom_parse_data_header(mode_info->atom_context, index, NULL, 1920 &frev, &crev, &data_offset)) { 1921 lvds_info = 1922 (union lvds_info *)(mode_info->atom_context->bios + data_offset); 1923 lvds = 1924 kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL); 1925 1926 if (!lvds) 1927 return NULL; 1928 1929 lvds->native_mode.clock = 1930 le16_to_cpu(lvds_info->info.sLCDTiming.usPixClk) * 10; 1931 lvds->native_mode.hdisplay = 1932 le16_to_cpu(lvds_info->info.sLCDTiming.usHActive); 1933 lvds->native_mode.vdisplay = 1934 le16_to_cpu(lvds_info->info.sLCDTiming.usVActive); 1935 lvds->native_mode.htotal = lvds->native_mode.hdisplay + 1936 le16_to_cpu(lvds_info->info.sLCDTiming.usHBlanking_Time); 1937 lvds->native_mode.hsync_start = lvds->native_mode.hdisplay + 1938 le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncOffset); 1939 lvds->native_mode.hsync_end = lvds->native_mode.hsync_start + 1940 le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncWidth); 1941 lvds->native_mode.vtotal = lvds->native_mode.vdisplay + 1942 le16_to_cpu(lvds_info->info.sLCDTiming.usVBlanking_Time); 1943 lvds->native_mode.vsync_start = lvds->native_mode.vdisplay + 1944 le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncOffset); 1945 lvds->native_mode.vsync_end = lvds->native_mode.vsync_start + 1946 le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncWidth); 1947 lvds->panel_pwr_delay = 1948 le16_to_cpu(lvds_info->info.usOffDelayInMs); 1949 lvds->lcd_misc = lvds_info->info.ucLVDS_Misc; 1950 1951 misc = le16_to_cpu(lvds_info->info.sLCDTiming.susModeMiscInfo.usAccess); 1952 if (misc & ATOM_VSYNC_POLARITY) 1953 lvds->native_mode.flags |= DRM_MODE_FLAG_NVSYNC; 1954 if (misc & ATOM_HSYNC_POLARITY) 1955 lvds->native_mode.flags |= DRM_MODE_FLAG_NHSYNC; 1956 if (misc & ATOM_COMPOSITESYNC) 1957 lvds->native_mode.flags |= DRM_MODE_FLAG_CSYNC; 1958 if (misc & ATOM_INTERLACE) 1959 lvds->native_mode.flags |= DRM_MODE_FLAG_INTERLACE; 1960 if (misc & ATOM_DOUBLE_CLOCK_MODE) 1961 lvds->native_mode.flags |= DRM_MODE_FLAG_DBLSCAN; 1962 1963 lvds->native_mode.width_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageHSize); 1964 lvds->native_mode.height_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageVSize); 1965 1966 /* set crtc values */ 1967 drm_mode_set_crtcinfo(&lvds->native_mode, CRTC_INTERLACE_HALVE_V); 1968 1969 lvds->lcd_ss_id = lvds_info->info.ucSS_Id; 1970 1971 encoder->native_mode = lvds->native_mode; 1972 1973 if (encoder_enum == 2) 1974 lvds->linkb = true; 1975 else 1976 lvds->linkb = false; 1977 1978 /* parse the lcd record table */ 1979 if (le16_to_cpu(lvds_info->info.usModePatchTableOffset)) { 1980 ATOM_FAKE_EDID_PATCH_RECORD *fake_edid_record; 1981 ATOM_PANEL_RESOLUTION_PATCH_RECORD *panel_res_record; 1982 bool bad_record = false; 1983 u8 *record; 1984 1985 if ((frev == 1) && (crev < 2)) 1986 /* absolute */ 1987 record = (u8 *)(mode_info->atom_context->bios + 1988 le16_to_cpu(lvds_info->info.usModePatchTableOffset)); 1989 else 1990 /* relative */ 1991 record = (u8 *)(mode_info->atom_context->bios + 1992 data_offset + 1993 le16_to_cpu(lvds_info->info.usModePatchTableOffset)); 1994 while (*record != ATOM_RECORD_END_TYPE) { 1995 switch (*record) { 1996 case LCD_MODE_PATCH_RECORD_MODE_TYPE: 1997 record += sizeof(ATOM_PATCH_RECORD_MODE); 1998 break; 1999 case LCD_RTS_RECORD_TYPE: 2000 record += sizeof(ATOM_LCD_RTS_RECORD); 2001 break; 2002 case LCD_CAP_RECORD_TYPE: 2003 record += sizeof(ATOM_LCD_MODE_CONTROL_CAP); 2004 break; 2005 case LCD_FAKE_EDID_PATCH_RECORD_TYPE: 2006 fake_edid_record = (ATOM_FAKE_EDID_PATCH_RECORD *)record; 2007 if (fake_edid_record->ucFakeEDIDLength) { 2008 struct edid *edid; 2009 int edid_size = 2010 max((int)EDID_LENGTH, (int)fake_edid_record->ucFakeEDIDLength); 2011 edid = kmalloc(edid_size, GFP_KERNEL); 2012 if (edid) { 2013 memcpy((u8 *)edid, (u8 *)&fake_edid_record->ucFakeEDIDString[0], 2014 fake_edid_record->ucFakeEDIDLength); 2015 2016 if (drm_edid_is_valid(edid)) { 2017 adev->mode_info.bios_hardcoded_edid = edid; 2018 adev->mode_info.bios_hardcoded_edid_size = edid_size; 2019 } else 2020 kfree(edid); 2021 } 2022 } 2023 record += fake_edid_record->ucFakeEDIDLength ? 2024 fake_edid_record->ucFakeEDIDLength + 2 : 2025 sizeof(ATOM_FAKE_EDID_PATCH_RECORD); 2026 break; 2027 case LCD_PANEL_RESOLUTION_RECORD_TYPE: 2028 panel_res_record = (ATOM_PANEL_RESOLUTION_PATCH_RECORD *)record; 2029 lvds->native_mode.width_mm = panel_res_record->usHSize; 2030 lvds->native_mode.height_mm = panel_res_record->usVSize; 2031 record += sizeof(ATOM_PANEL_RESOLUTION_PATCH_RECORD); 2032 break; 2033 default: 2034 DRM_ERROR("Bad LCD record %d\n", *record); 2035 bad_record = true; 2036 break; 2037 } 2038 if (bad_record) 2039 break; 2040 } 2041 } 2042 } 2043 return lvds; 2044 } 2045 2046 struct amdgpu_encoder_atom_dig * 2047 amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder *amdgpu_encoder) 2048 { 2049 int encoder_enum = (amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT; 2050 struct amdgpu_encoder_atom_dig *dig = kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL); 2051 2052 if (!dig) 2053 return NULL; 2054 2055 /* coherent mode by default */ 2056 dig->coherent_mode = true; 2057 dig->dig_encoder = -1; 2058 2059 if (encoder_enum == 2) 2060 dig->linkb = true; 2061 else 2062 dig->linkb = false; 2063 2064 return dig; 2065 } 2066 2067