1 /* 2 * Copyright 2007-11 Advanced Micro Devices, Inc. 3 * Copyright 2008 Red Hat Inc. 4 * 5 * Permission is hereby granted, free of charge, to any person obtaining a 6 * copy of this software and associated documentation files (the "Software"), 7 * to deal in the Software without restriction, including without limitation 8 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 9 * and/or sell copies of the Software, and to permit persons to whom the 10 * Software is furnished to do so, subject to the following conditions: 11 * 12 * The above copyright notice and this permission notice shall be included in 13 * all copies or substantial portions of the Software. 14 * 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 21 * OTHER DEALINGS IN THE SOFTWARE. 22 * 23 * Authors: Dave Airlie 24 * Alex Deucher 25 */ 26 27 #include <linux/pci.h> 28 29 #include <drm/drm_crtc_helper.h> 30 #include <drm/amdgpu_drm.h> 31 #include "amdgpu.h" 32 #include "amdgpu_connectors.h" 33 #include "amdgpu_display.h" 34 #include "atom.h" 35 #include "atombios_encoders.h" 36 #include "atombios_dp.h" 37 #include <linux/backlight.h> 38 #include "bif/bif_4_1_d.h" 39 40 u8 41 amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device *adev) 42 { 43 u8 backlight_level; 44 u32 bios_2_scratch; 45 46 bios_2_scratch = RREG32(mmBIOS_SCRATCH_2); 47 48 backlight_level = ((bios_2_scratch & ATOM_S2_CURRENT_BL_LEVEL_MASK) >> 49 ATOM_S2_CURRENT_BL_LEVEL_SHIFT); 50 51 return backlight_level; 52 } 53 54 void 55 amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device *adev, 56 u8 backlight_level) 57 { 58 u32 bios_2_scratch; 59 60 bios_2_scratch = RREG32(mmBIOS_SCRATCH_2); 61 62 bios_2_scratch &= ~ATOM_S2_CURRENT_BL_LEVEL_MASK; 63 bios_2_scratch |= ((backlight_level << ATOM_S2_CURRENT_BL_LEVEL_SHIFT) & 64 ATOM_S2_CURRENT_BL_LEVEL_MASK); 65 66 WREG32(mmBIOS_SCRATCH_2, bios_2_scratch); 67 } 68 69 u8 70 amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder *amdgpu_encoder) 71 { 72 struct drm_device *dev = amdgpu_encoder->base.dev; 73 struct amdgpu_device *adev = drm_to_adev(dev); 74 75 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU)) 76 return 0; 77 78 return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev); 79 } 80 81 void 82 amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder *amdgpu_encoder, 83 u8 level) 84 { 85 struct drm_encoder *encoder = &amdgpu_encoder->base; 86 struct drm_device *dev = amdgpu_encoder->base.dev; 87 struct amdgpu_device *adev = drm_to_adev(dev); 88 struct amdgpu_encoder_atom_dig *dig; 89 90 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU)) 91 return; 92 93 if ((amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) && 94 amdgpu_encoder->enc_priv) { 95 dig = amdgpu_encoder->enc_priv; 96 dig->backlight_level = level; 97 amdgpu_atombios_encoder_set_backlight_level_to_reg(adev, dig->backlight_level); 98 99 switch (amdgpu_encoder->encoder_id) { 100 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 101 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 102 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 103 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 104 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 105 if (dig->backlight_level == 0) 106 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 107 ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0); 108 else { 109 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 110 ATOM_TRANSMITTER_ACTION_BL_BRIGHTNESS_CONTROL, 0, 0); 111 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 112 ATOM_TRANSMITTER_ACTION_LCD_BLON, 0, 0); 113 } 114 break; 115 default: 116 break; 117 } 118 } 119 } 120 121 static u8 amdgpu_atombios_encoder_backlight_level(struct backlight_device *bd) 122 { 123 u8 level; 124 125 /* Convert brightness to hardware level */ 126 if (bd->props.brightness < 0) 127 level = 0; 128 else if (bd->props.brightness > AMDGPU_MAX_BL_LEVEL) 129 level = AMDGPU_MAX_BL_LEVEL; 130 else 131 level = bd->props.brightness; 132 133 return level; 134 } 135 136 static int amdgpu_atombios_encoder_update_backlight_status(struct backlight_device *bd) 137 { 138 struct amdgpu_backlight_privdata *pdata = bl_get_data(bd); 139 struct amdgpu_encoder *amdgpu_encoder = pdata->encoder; 140 141 amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, 142 amdgpu_atombios_encoder_backlight_level(bd)); 143 144 return 0; 145 } 146 147 static int 148 amdgpu_atombios_encoder_get_backlight_brightness(struct backlight_device *bd) 149 { 150 struct amdgpu_backlight_privdata *pdata = bl_get_data(bd); 151 struct amdgpu_encoder *amdgpu_encoder = pdata->encoder; 152 struct drm_device *dev = amdgpu_encoder->base.dev; 153 struct amdgpu_device *adev = drm_to_adev(dev); 154 155 return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev); 156 } 157 158 static const struct backlight_ops amdgpu_atombios_encoder_backlight_ops = { 159 .get_brightness = amdgpu_atombios_encoder_get_backlight_brightness, 160 .update_status = amdgpu_atombios_encoder_update_backlight_status, 161 }; 162 163 void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *amdgpu_encoder, 164 struct drm_connector *drm_connector) 165 { 166 struct drm_device *dev = amdgpu_encoder->base.dev; 167 struct amdgpu_device *adev = drm_to_adev(dev); 168 struct backlight_device *bd; 169 struct backlight_properties props; 170 struct amdgpu_backlight_privdata *pdata; 171 struct amdgpu_encoder_atom_dig *dig; 172 char bl_name[16]; 173 174 /* Mac laptops with multiple GPUs use the gmux driver for backlight 175 * so don't register a backlight device 176 */ 177 if ((adev->pdev->subsystem_vendor == PCI_VENDOR_ID_APPLE) && 178 (adev->pdev->device == 0x6741)) 179 return; 180 181 if (!amdgpu_encoder->enc_priv) 182 return; 183 184 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU)) 185 return; 186 187 pdata = kmalloc(sizeof(struct amdgpu_backlight_privdata), GFP_KERNEL); 188 if (!pdata) { 189 DRM_ERROR("Memory allocation failed\n"); 190 goto error; 191 } 192 193 memset(&props, 0, sizeof(props)); 194 props.max_brightness = AMDGPU_MAX_BL_LEVEL; 195 props.type = BACKLIGHT_RAW; 196 snprintf(bl_name, sizeof(bl_name), 197 "amdgpu_bl%d", dev->primary->index); 198 bd = backlight_device_register(bl_name, drm_connector->kdev, 199 pdata, &amdgpu_atombios_encoder_backlight_ops, &props); 200 if (IS_ERR(bd)) { 201 DRM_ERROR("Backlight registration failed\n"); 202 goto error; 203 } 204 205 pdata->encoder = amdgpu_encoder; 206 207 dig = amdgpu_encoder->enc_priv; 208 dig->bl_dev = bd; 209 210 bd->props.brightness = amdgpu_atombios_encoder_get_backlight_brightness(bd); 211 bd->props.power = FB_BLANK_UNBLANK; 212 backlight_update_status(bd); 213 214 DRM_INFO("amdgpu atom DIG backlight initialized\n"); 215 216 return; 217 218 error: 219 kfree(pdata); 220 return; 221 } 222 223 void 224 amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *amdgpu_encoder) 225 { 226 struct drm_device *dev = amdgpu_encoder->base.dev; 227 struct amdgpu_device *adev = drm_to_adev(dev); 228 struct backlight_device *bd = NULL; 229 struct amdgpu_encoder_atom_dig *dig; 230 231 if (!amdgpu_encoder->enc_priv) 232 return; 233 234 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU)) 235 return; 236 237 dig = amdgpu_encoder->enc_priv; 238 bd = dig->bl_dev; 239 dig->bl_dev = NULL; 240 241 if (bd) { 242 struct amdgpu_legacy_backlight_privdata *pdata; 243 244 pdata = bl_get_data(bd); 245 backlight_device_unregister(bd); 246 kfree(pdata); 247 248 DRM_INFO("amdgpu atom LVDS backlight unloaded\n"); 249 } 250 } 251 252 bool amdgpu_atombios_encoder_is_digital(struct drm_encoder *encoder) 253 { 254 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 255 switch (amdgpu_encoder->encoder_id) { 256 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 257 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 258 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 259 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 260 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 261 return true; 262 default: 263 return false; 264 } 265 } 266 267 bool amdgpu_atombios_encoder_mode_fixup(struct drm_encoder *encoder, 268 const struct drm_display_mode *mode, 269 struct drm_display_mode *adjusted_mode) 270 { 271 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 272 273 /* set the active encoder to connector routing */ 274 amdgpu_encoder_set_active_device(encoder); 275 drm_mode_set_crtcinfo(adjusted_mode, 0); 276 277 /* hw bug */ 278 if ((mode->flags & DRM_MODE_FLAG_INTERLACE) 279 && (mode->crtc_vsync_start < (mode->crtc_vdisplay + 2))) 280 adjusted_mode->crtc_vsync_start = adjusted_mode->crtc_vdisplay + 2; 281 282 /* vertical FP must be at least 1 */ 283 if (mode->crtc_vsync_start == mode->crtc_vdisplay) 284 adjusted_mode->crtc_vsync_start++; 285 286 /* get the native mode for scaling */ 287 if (amdgpu_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT)) 288 amdgpu_panel_mode_fixup(encoder, adjusted_mode); 289 else if (amdgpu_encoder->rmx_type != RMX_OFF) 290 amdgpu_panel_mode_fixup(encoder, adjusted_mode); 291 292 if ((amdgpu_encoder->active_device & (ATOM_DEVICE_DFP_SUPPORT | ATOM_DEVICE_LCD_SUPPORT)) || 293 (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) { 294 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 295 amdgpu_atombios_dp_set_link_config(connector, adjusted_mode); 296 } 297 298 return true; 299 } 300 301 static void 302 amdgpu_atombios_encoder_setup_dac(struct drm_encoder *encoder, int action) 303 { 304 struct drm_device *dev = encoder->dev; 305 struct amdgpu_device *adev = drm_to_adev(dev); 306 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 307 DAC_ENCODER_CONTROL_PS_ALLOCATION args; 308 int index = 0; 309 310 memset(&args, 0, sizeof(args)); 311 312 switch (amdgpu_encoder->encoder_id) { 313 case ENCODER_OBJECT_ID_INTERNAL_DAC1: 314 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 315 index = GetIndexIntoMasterTable(COMMAND, DAC1EncoderControl); 316 break; 317 case ENCODER_OBJECT_ID_INTERNAL_DAC2: 318 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2: 319 index = GetIndexIntoMasterTable(COMMAND, DAC2EncoderControl); 320 break; 321 } 322 323 args.ucAction = action; 324 args.ucDacStandard = ATOM_DAC1_PS2; 325 args.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 326 327 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 328 329 } 330 331 static u8 amdgpu_atombios_encoder_get_bpc(struct drm_encoder *encoder) 332 { 333 int bpc = 8; 334 335 if (encoder->crtc) { 336 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc); 337 bpc = amdgpu_crtc->bpc; 338 } 339 340 switch (bpc) { 341 case 0: 342 return PANEL_BPC_UNDEFINE; 343 case 6: 344 return PANEL_6BIT_PER_COLOR; 345 case 8: 346 default: 347 return PANEL_8BIT_PER_COLOR; 348 case 10: 349 return PANEL_10BIT_PER_COLOR; 350 case 12: 351 return PANEL_12BIT_PER_COLOR; 352 case 16: 353 return PANEL_16BIT_PER_COLOR; 354 } 355 } 356 357 union dvo_encoder_control { 358 ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION ext_tmds; 359 DVO_ENCODER_CONTROL_PS_ALLOCATION dvo; 360 DVO_ENCODER_CONTROL_PS_ALLOCATION_V3 dvo_v3; 361 DVO_ENCODER_CONTROL_PS_ALLOCATION_V1_4 dvo_v4; 362 }; 363 364 static void 365 amdgpu_atombios_encoder_setup_dvo(struct drm_encoder *encoder, int action) 366 { 367 struct drm_device *dev = encoder->dev; 368 struct amdgpu_device *adev = drm_to_adev(dev); 369 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 370 union dvo_encoder_control args; 371 int index = GetIndexIntoMasterTable(COMMAND, DVOEncoderControl); 372 uint8_t frev, crev; 373 374 memset(&args, 0, sizeof(args)); 375 376 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 377 return; 378 379 switch (frev) { 380 case 1: 381 switch (crev) { 382 case 1: 383 /* R4xx, R5xx */ 384 args.ext_tmds.sXTmdsEncoder.ucEnable = action; 385 386 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 387 args.ext_tmds.sXTmdsEncoder.ucMisc |= PANEL_ENCODER_MISC_DUAL; 388 389 args.ext_tmds.sXTmdsEncoder.ucMisc |= ATOM_PANEL_MISC_888RGB; 390 break; 391 case 2: 392 /* RS600/690/740 */ 393 args.dvo.sDVOEncoder.ucAction = action; 394 args.dvo.sDVOEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 395 /* DFP1, CRT1, TV1 depending on the type of port */ 396 args.dvo.sDVOEncoder.ucDeviceType = ATOM_DEVICE_DFP1_INDEX; 397 398 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 399 args.dvo.sDVOEncoder.usDevAttr.sDigAttrib.ucAttribute |= PANEL_ENCODER_MISC_DUAL; 400 break; 401 case 3: 402 /* R6xx */ 403 args.dvo_v3.ucAction = action; 404 args.dvo_v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 405 args.dvo_v3.ucDVOConfig = 0; /* XXX */ 406 break; 407 case 4: 408 /* DCE8 */ 409 args.dvo_v4.ucAction = action; 410 args.dvo_v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 411 args.dvo_v4.ucDVOConfig = 0; /* XXX */ 412 args.dvo_v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder); 413 break; 414 default: 415 DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 416 break; 417 } 418 break; 419 default: 420 DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 421 break; 422 } 423 424 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 425 } 426 427 int amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder *encoder) 428 { 429 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 430 struct drm_connector *connector; 431 struct amdgpu_connector *amdgpu_connector; 432 struct amdgpu_connector_atom_dig *dig_connector; 433 434 /* dp bridges are always DP */ 435 if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) 436 return ATOM_ENCODER_MODE_DP; 437 438 /* DVO is always DVO */ 439 if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DVO1) || 440 (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1)) 441 return ATOM_ENCODER_MODE_DVO; 442 443 connector = amdgpu_get_connector_for_encoder(encoder); 444 /* if we don't have an active device yet, just use one of 445 * the connectors tied to the encoder. 446 */ 447 if (!connector) 448 connector = amdgpu_get_connector_for_encoder_init(encoder); 449 amdgpu_connector = to_amdgpu_connector(connector); 450 451 switch (connector->connector_type) { 452 case DRM_MODE_CONNECTOR_DVII: 453 case DRM_MODE_CONNECTOR_HDMIB: /* HDMI-B is basically DL-DVI; analog works fine */ 454 if (amdgpu_audio != 0) { 455 if (amdgpu_connector->use_digital && 456 (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)) 457 return ATOM_ENCODER_MODE_HDMI; 458 else if (connector->display_info.is_hdmi && 459 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO)) 460 return ATOM_ENCODER_MODE_HDMI; 461 else if (amdgpu_connector->use_digital) 462 return ATOM_ENCODER_MODE_DVI; 463 else 464 return ATOM_ENCODER_MODE_CRT; 465 } else if (amdgpu_connector->use_digital) { 466 return ATOM_ENCODER_MODE_DVI; 467 } else { 468 return ATOM_ENCODER_MODE_CRT; 469 } 470 break; 471 case DRM_MODE_CONNECTOR_DVID: 472 case DRM_MODE_CONNECTOR_HDMIA: 473 default: 474 if (amdgpu_audio != 0) { 475 if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE) 476 return ATOM_ENCODER_MODE_HDMI; 477 else if (connector->display_info.is_hdmi && 478 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO)) 479 return ATOM_ENCODER_MODE_HDMI; 480 else 481 return ATOM_ENCODER_MODE_DVI; 482 } else { 483 return ATOM_ENCODER_MODE_DVI; 484 } 485 case DRM_MODE_CONNECTOR_LVDS: 486 return ATOM_ENCODER_MODE_LVDS; 487 case DRM_MODE_CONNECTOR_DisplayPort: 488 dig_connector = amdgpu_connector->con_priv; 489 if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) || 490 (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) { 491 return ATOM_ENCODER_MODE_DP; 492 } else if (amdgpu_audio != 0) { 493 if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE) 494 return ATOM_ENCODER_MODE_HDMI; 495 else if (connector->display_info.is_hdmi && 496 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO)) 497 return ATOM_ENCODER_MODE_HDMI; 498 else 499 return ATOM_ENCODER_MODE_DVI; 500 } else { 501 return ATOM_ENCODER_MODE_DVI; 502 } 503 case DRM_MODE_CONNECTOR_eDP: 504 return ATOM_ENCODER_MODE_DP; 505 case DRM_MODE_CONNECTOR_DVIA: 506 case DRM_MODE_CONNECTOR_VGA: 507 return ATOM_ENCODER_MODE_CRT; 508 case DRM_MODE_CONNECTOR_Composite: 509 case DRM_MODE_CONNECTOR_SVIDEO: 510 case DRM_MODE_CONNECTOR_9PinDIN: 511 /* fix me */ 512 return ATOM_ENCODER_MODE_TV; 513 } 514 } 515 516 /* 517 * DIG Encoder/Transmitter Setup 518 * 519 * DCE 6.0 520 * - 3 DIG transmitter blocks UNIPHY0/1/2 (links A and B). 521 * Supports up to 6 digital outputs 522 * - 6 DIG encoder blocks. 523 * - DIG to PHY mapping is hardcoded 524 * DIG1 drives UNIPHY0 link A, A+B 525 * DIG2 drives UNIPHY0 link B 526 * DIG3 drives UNIPHY1 link A, A+B 527 * DIG4 drives UNIPHY1 link B 528 * DIG5 drives UNIPHY2 link A, A+B 529 * DIG6 drives UNIPHY2 link B 530 * 531 * Routing 532 * crtc -> dig encoder -> UNIPHY/LVTMA (1 or 2 links) 533 * Examples: 534 * crtc0 -> dig2 -> LVTMA links A+B -> TMDS/HDMI 535 * crtc1 -> dig1 -> UNIPHY0 link B -> DP 536 * crtc0 -> dig1 -> UNIPHY2 link A -> LVDS 537 * crtc1 -> dig2 -> UNIPHY1 link B+A -> TMDS/HDMI 538 */ 539 540 union dig_encoder_control { 541 DIG_ENCODER_CONTROL_PS_ALLOCATION v1; 542 DIG_ENCODER_CONTROL_PARAMETERS_V2 v2; 543 DIG_ENCODER_CONTROL_PARAMETERS_V3 v3; 544 DIG_ENCODER_CONTROL_PARAMETERS_V4 v4; 545 DIG_ENCODER_CONTROL_PARAMETERS_V5 v5; 546 }; 547 548 void 549 amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder *encoder, 550 int action, int panel_mode) 551 { 552 struct drm_device *dev = encoder->dev; 553 struct amdgpu_device *adev = drm_to_adev(dev); 554 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 555 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; 556 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 557 union dig_encoder_control args; 558 int index = GetIndexIntoMasterTable(COMMAND, DIGxEncoderControl); 559 uint8_t frev, crev; 560 int dp_clock = 0; 561 int dp_lane_count = 0; 562 int hpd_id = AMDGPU_HPD_NONE; 563 564 if (connector) { 565 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 566 struct amdgpu_connector_atom_dig *dig_connector = 567 amdgpu_connector->con_priv; 568 569 dp_clock = dig_connector->dp_clock; 570 dp_lane_count = dig_connector->dp_lane_count; 571 hpd_id = amdgpu_connector->hpd.hpd; 572 } 573 574 /* no dig encoder assigned */ 575 if (dig->dig_encoder == -1) 576 return; 577 578 memset(&args, 0, sizeof(args)); 579 580 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 581 return; 582 583 switch (frev) { 584 case 1: 585 switch (crev) { 586 case 1: 587 args.v1.ucAction = action; 588 args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 589 if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE) 590 args.v3.ucPanelMode = panel_mode; 591 else 592 args.v1.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 593 594 if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode)) 595 args.v1.ucLaneNum = dp_lane_count; 596 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 597 args.v1.ucLaneNum = 8; 598 else 599 args.v1.ucLaneNum = 4; 600 601 if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode) && (dp_clock == 270000)) 602 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ; 603 switch (amdgpu_encoder->encoder_id) { 604 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 605 args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER1; 606 break; 607 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 608 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 609 args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER2; 610 break; 611 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 612 args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER3; 613 break; 614 } 615 if (dig->linkb) 616 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKB; 617 else 618 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKA; 619 break; 620 case 2: 621 case 3: 622 args.v3.ucAction = action; 623 args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 624 if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE) 625 args.v3.ucPanelMode = panel_mode; 626 else 627 args.v3.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 628 629 if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode)) 630 args.v3.ucLaneNum = dp_lane_count; 631 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 632 args.v3.ucLaneNum = 8; 633 else 634 args.v3.ucLaneNum = 4; 635 636 if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode) && (dp_clock == 270000)) 637 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ; 638 args.v3.acConfig.ucDigSel = dig->dig_encoder; 639 args.v3.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder); 640 break; 641 case 4: 642 args.v4.ucAction = action; 643 args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 644 if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE) 645 args.v4.ucPanelMode = panel_mode; 646 else 647 args.v4.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 648 649 if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) 650 args.v4.ucLaneNum = dp_lane_count; 651 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 652 args.v4.ucLaneNum = 8; 653 else 654 args.v4.ucLaneNum = 4; 655 656 if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) { 657 if (dp_clock == 540000) 658 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_5_40GHZ; 659 else if (dp_clock == 324000) 660 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_3_24GHZ; 661 else if (dp_clock == 270000) 662 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_2_70GHZ; 663 else 664 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_1_62GHZ; 665 } 666 args.v4.acConfig.ucDigSel = dig->dig_encoder; 667 args.v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder); 668 if (hpd_id == AMDGPU_HPD_NONE) 669 args.v4.ucHPD_ID = 0; 670 else 671 args.v4.ucHPD_ID = hpd_id + 1; 672 break; 673 case 5: 674 switch (action) { 675 case ATOM_ENCODER_CMD_SETUP_PANEL_MODE: 676 args.v5.asDPPanelModeParam.ucAction = action; 677 args.v5.asDPPanelModeParam.ucPanelMode = panel_mode; 678 args.v5.asDPPanelModeParam.ucDigId = dig->dig_encoder; 679 break; 680 case ATOM_ENCODER_CMD_STREAM_SETUP: 681 args.v5.asStreamParam.ucAction = action; 682 args.v5.asStreamParam.ucDigId = dig->dig_encoder; 683 args.v5.asStreamParam.ucDigMode = 684 amdgpu_atombios_encoder_get_encoder_mode(encoder); 685 if (ENCODER_MODE_IS_DP(args.v5.asStreamParam.ucDigMode)) 686 args.v5.asStreamParam.ucLaneNum = dp_lane_count; 687 else if (amdgpu_dig_monitor_is_duallink(encoder, 688 amdgpu_encoder->pixel_clock)) 689 args.v5.asStreamParam.ucLaneNum = 8; 690 else 691 args.v5.asStreamParam.ucLaneNum = 4; 692 args.v5.asStreamParam.ulPixelClock = 693 cpu_to_le32(amdgpu_encoder->pixel_clock / 10); 694 args.v5.asStreamParam.ucBitPerColor = 695 amdgpu_atombios_encoder_get_bpc(encoder); 696 args.v5.asStreamParam.ucLinkRateIn270Mhz = dp_clock / 27000; 697 break; 698 case ATOM_ENCODER_CMD_DP_LINK_TRAINING_START: 699 case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1: 700 case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2: 701 case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3: 702 case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN4: 703 case ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE: 704 case ATOM_ENCODER_CMD_DP_VIDEO_OFF: 705 case ATOM_ENCODER_CMD_DP_VIDEO_ON: 706 args.v5.asCmdParam.ucAction = action; 707 args.v5.asCmdParam.ucDigId = dig->dig_encoder; 708 break; 709 default: 710 DRM_ERROR("Unsupported action 0x%x\n", action); 711 break; 712 } 713 break; 714 default: 715 DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 716 break; 717 } 718 break; 719 default: 720 DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 721 break; 722 } 723 724 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 725 726 } 727 728 union dig_transmitter_control { 729 DIG_TRANSMITTER_CONTROL_PS_ALLOCATION v1; 730 DIG_TRANSMITTER_CONTROL_PARAMETERS_V2 v2; 731 DIG_TRANSMITTER_CONTROL_PARAMETERS_V3 v3; 732 DIG_TRANSMITTER_CONTROL_PARAMETERS_V4 v4; 733 DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_5 v5; 734 DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_6 v6; 735 }; 736 737 void 738 amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder *encoder, int action, 739 uint8_t lane_num, uint8_t lane_set) 740 { 741 struct drm_device *dev = encoder->dev; 742 struct amdgpu_device *adev = drm_to_adev(dev); 743 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 744 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; 745 struct drm_connector *connector; 746 union dig_transmitter_control args; 747 int index = 0; 748 uint8_t frev, crev; 749 bool is_dp = false; 750 int pll_id = 0; 751 int dp_clock = 0; 752 int dp_lane_count = 0; 753 int connector_object_id = 0; 754 int dig_encoder = dig->dig_encoder; 755 int hpd_id = AMDGPU_HPD_NONE; 756 757 if (action == ATOM_TRANSMITTER_ACTION_INIT) { 758 connector = amdgpu_get_connector_for_encoder_init(encoder); 759 /* just needed to avoid bailing in the encoder check. the encoder 760 * isn't used for init 761 */ 762 dig_encoder = 0; 763 } else 764 connector = amdgpu_get_connector_for_encoder(encoder); 765 766 if (connector) { 767 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 768 struct amdgpu_connector_atom_dig *dig_connector = 769 amdgpu_connector->con_priv; 770 771 hpd_id = amdgpu_connector->hpd.hpd; 772 dp_clock = dig_connector->dp_clock; 773 dp_lane_count = dig_connector->dp_lane_count; 774 connector_object_id = 775 (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT; 776 } 777 778 if (encoder->crtc) { 779 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc); 780 pll_id = amdgpu_crtc->pll_id; 781 } 782 783 /* no dig encoder assigned */ 784 if (dig_encoder == -1) 785 return; 786 787 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder))) 788 is_dp = true; 789 790 memset(&args, 0, sizeof(args)); 791 792 switch (amdgpu_encoder->encoder_id) { 793 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 794 index = GetIndexIntoMasterTable(COMMAND, DVOOutputControl); 795 break; 796 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 797 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 798 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 799 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 800 index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl); 801 break; 802 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 803 index = GetIndexIntoMasterTable(COMMAND, LVTMATransmitterControl); 804 break; 805 } 806 807 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 808 return; 809 810 switch (frev) { 811 case 1: 812 switch (crev) { 813 case 1: 814 args.v1.ucAction = action; 815 if (action == ATOM_TRANSMITTER_ACTION_INIT) { 816 args.v1.usInitInfo = cpu_to_le16(connector_object_id); 817 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) { 818 args.v1.asMode.ucLaneSel = lane_num; 819 args.v1.asMode.ucLaneSet = lane_set; 820 } else { 821 if (is_dp) 822 args.v1.usPixelClock = cpu_to_le16(dp_clock / 10); 823 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 824 args.v1.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10); 825 else 826 args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 827 } 828 829 args.v1.ucConfig = ATOM_TRANSMITTER_CONFIG_CLKSRC_PPLL; 830 831 if (dig_encoder) 832 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG2_ENCODER; 833 else 834 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG1_ENCODER; 835 836 if (dig->linkb) 837 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKB; 838 else 839 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKA; 840 841 if (is_dp) 842 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT; 843 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 844 if (dig->coherent_mode) 845 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT; 846 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 847 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_8LANE_LINK; 848 } 849 break; 850 case 2: 851 args.v2.ucAction = action; 852 if (action == ATOM_TRANSMITTER_ACTION_INIT) { 853 args.v2.usInitInfo = cpu_to_le16(connector_object_id); 854 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) { 855 args.v2.asMode.ucLaneSel = lane_num; 856 args.v2.asMode.ucLaneSet = lane_set; 857 } else { 858 if (is_dp) 859 args.v2.usPixelClock = cpu_to_le16(dp_clock / 10); 860 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 861 args.v2.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10); 862 else 863 args.v2.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 864 } 865 866 args.v2.acConfig.ucEncoderSel = dig_encoder; 867 if (dig->linkb) 868 args.v2.acConfig.ucLinkSel = 1; 869 870 switch (amdgpu_encoder->encoder_id) { 871 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 872 args.v2.acConfig.ucTransmitterSel = 0; 873 break; 874 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 875 args.v2.acConfig.ucTransmitterSel = 1; 876 break; 877 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 878 args.v2.acConfig.ucTransmitterSel = 2; 879 break; 880 } 881 882 if (is_dp) { 883 args.v2.acConfig.fCoherentMode = 1; 884 args.v2.acConfig.fDPConnector = 1; 885 } else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 886 if (dig->coherent_mode) 887 args.v2.acConfig.fCoherentMode = 1; 888 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 889 args.v2.acConfig.fDualLinkConnector = 1; 890 } 891 break; 892 case 3: 893 args.v3.ucAction = action; 894 if (action == ATOM_TRANSMITTER_ACTION_INIT) { 895 args.v3.usInitInfo = cpu_to_le16(connector_object_id); 896 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) { 897 args.v3.asMode.ucLaneSel = lane_num; 898 args.v3.asMode.ucLaneSet = lane_set; 899 } else { 900 if (is_dp) 901 args.v3.usPixelClock = cpu_to_le16(dp_clock / 10); 902 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 903 args.v3.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10); 904 else 905 args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 906 } 907 908 if (is_dp) 909 args.v3.ucLaneNum = dp_lane_count; 910 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 911 args.v3.ucLaneNum = 8; 912 else 913 args.v3.ucLaneNum = 4; 914 915 if (dig->linkb) 916 args.v3.acConfig.ucLinkSel = 1; 917 if (dig_encoder & 1) 918 args.v3.acConfig.ucEncoderSel = 1; 919 920 /* Select the PLL for the PHY 921 * DP PHY should be clocked from external src if there is 922 * one. 923 */ 924 /* On DCE4, if there is an external clock, it generates the DP ref clock */ 925 if (is_dp && adev->clock.dp_extclk) 926 args.v3.acConfig.ucRefClkSource = 2; /* external src */ 927 else 928 args.v3.acConfig.ucRefClkSource = pll_id; 929 930 switch (amdgpu_encoder->encoder_id) { 931 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 932 args.v3.acConfig.ucTransmitterSel = 0; 933 break; 934 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 935 args.v3.acConfig.ucTransmitterSel = 1; 936 break; 937 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 938 args.v3.acConfig.ucTransmitterSel = 2; 939 break; 940 } 941 942 if (is_dp) 943 args.v3.acConfig.fCoherentMode = 1; /* DP requires coherent */ 944 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 945 if (dig->coherent_mode) 946 args.v3.acConfig.fCoherentMode = 1; 947 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 948 args.v3.acConfig.fDualLinkConnector = 1; 949 } 950 break; 951 case 4: 952 args.v4.ucAction = action; 953 if (action == ATOM_TRANSMITTER_ACTION_INIT) { 954 args.v4.usInitInfo = cpu_to_le16(connector_object_id); 955 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) { 956 args.v4.asMode.ucLaneSel = lane_num; 957 args.v4.asMode.ucLaneSet = lane_set; 958 } else { 959 if (is_dp) 960 args.v4.usPixelClock = cpu_to_le16(dp_clock / 10); 961 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 962 args.v4.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10); 963 else 964 args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 965 } 966 967 if (is_dp) 968 args.v4.ucLaneNum = dp_lane_count; 969 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 970 args.v4.ucLaneNum = 8; 971 else 972 args.v4.ucLaneNum = 4; 973 974 if (dig->linkb) 975 args.v4.acConfig.ucLinkSel = 1; 976 if (dig_encoder & 1) 977 args.v4.acConfig.ucEncoderSel = 1; 978 979 /* Select the PLL for the PHY 980 * DP PHY should be clocked from external src if there is 981 * one. 982 */ 983 /* On DCE5 DCPLL usually generates the DP ref clock */ 984 if (is_dp) { 985 if (adev->clock.dp_extclk) 986 args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_EXTCLK; 987 else 988 args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_DCPLL; 989 } else 990 args.v4.acConfig.ucRefClkSource = pll_id; 991 992 switch (amdgpu_encoder->encoder_id) { 993 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 994 args.v4.acConfig.ucTransmitterSel = 0; 995 break; 996 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 997 args.v4.acConfig.ucTransmitterSel = 1; 998 break; 999 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1000 args.v4.acConfig.ucTransmitterSel = 2; 1001 break; 1002 } 1003 1004 if (is_dp) 1005 args.v4.acConfig.fCoherentMode = 1; /* DP requires coherent */ 1006 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 1007 if (dig->coherent_mode) 1008 args.v4.acConfig.fCoherentMode = 1; 1009 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1010 args.v4.acConfig.fDualLinkConnector = 1; 1011 } 1012 break; 1013 case 5: 1014 args.v5.ucAction = action; 1015 if (is_dp) 1016 args.v5.usSymClock = cpu_to_le16(dp_clock / 10); 1017 else 1018 args.v5.usSymClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 1019 1020 switch (amdgpu_encoder->encoder_id) { 1021 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1022 if (dig->linkb) 1023 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYB; 1024 else 1025 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYA; 1026 break; 1027 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1028 if (dig->linkb) 1029 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYD; 1030 else 1031 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYC; 1032 break; 1033 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1034 if (dig->linkb) 1035 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYF; 1036 else 1037 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYE; 1038 break; 1039 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1040 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYG; 1041 break; 1042 } 1043 if (is_dp) 1044 args.v5.ucLaneNum = dp_lane_count; 1045 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1046 args.v5.ucLaneNum = 8; 1047 else 1048 args.v5.ucLaneNum = 4; 1049 args.v5.ucConnObjId = connector_object_id; 1050 args.v5.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1051 1052 if (is_dp && adev->clock.dp_extclk) 1053 args.v5.asConfig.ucPhyClkSrcId = ENCODER_REFCLK_SRC_EXTCLK; 1054 else 1055 args.v5.asConfig.ucPhyClkSrcId = pll_id; 1056 1057 if (is_dp) 1058 args.v5.asConfig.ucCoherentMode = 1; /* DP requires coherent */ 1059 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 1060 if (dig->coherent_mode) 1061 args.v5.asConfig.ucCoherentMode = 1; 1062 } 1063 if (hpd_id == AMDGPU_HPD_NONE) 1064 args.v5.asConfig.ucHPDSel = 0; 1065 else 1066 args.v5.asConfig.ucHPDSel = hpd_id + 1; 1067 args.v5.ucDigEncoderSel = 1 << dig_encoder; 1068 args.v5.ucDPLaneSet = lane_set; 1069 break; 1070 case 6: 1071 args.v6.ucAction = action; 1072 if (is_dp) 1073 args.v6.ulSymClock = cpu_to_le32(dp_clock / 10); 1074 else 1075 args.v6.ulSymClock = cpu_to_le32(amdgpu_encoder->pixel_clock / 10); 1076 1077 switch (amdgpu_encoder->encoder_id) { 1078 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1079 if (dig->linkb) 1080 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYB; 1081 else 1082 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYA; 1083 break; 1084 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1085 if (dig->linkb) 1086 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYD; 1087 else 1088 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYC; 1089 break; 1090 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1091 if (dig->linkb) 1092 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYF; 1093 else 1094 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYE; 1095 break; 1096 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1097 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYG; 1098 break; 1099 } 1100 if (is_dp) 1101 args.v6.ucLaneNum = dp_lane_count; 1102 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1103 args.v6.ucLaneNum = 8; 1104 else 1105 args.v6.ucLaneNum = 4; 1106 args.v6.ucConnObjId = connector_object_id; 1107 if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) 1108 args.v6.ucDPLaneSet = lane_set; 1109 else 1110 args.v6.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1111 1112 if (hpd_id == AMDGPU_HPD_NONE) 1113 args.v6.ucHPDSel = 0; 1114 else 1115 args.v6.ucHPDSel = hpd_id + 1; 1116 args.v6.ucDigEncoderSel = 1 << dig_encoder; 1117 break; 1118 default: 1119 DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 1120 break; 1121 } 1122 break; 1123 default: 1124 DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 1125 break; 1126 } 1127 1128 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 1129 } 1130 1131 bool 1132 amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector *connector, 1133 int action) 1134 { 1135 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 1136 struct drm_device *dev = amdgpu_connector->base.dev; 1137 struct amdgpu_device *adev = drm_to_adev(dev); 1138 union dig_transmitter_control args; 1139 int index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl); 1140 uint8_t frev, crev; 1141 1142 if (connector->connector_type != DRM_MODE_CONNECTOR_eDP) 1143 goto done; 1144 1145 if ((action != ATOM_TRANSMITTER_ACTION_POWER_ON) && 1146 (action != ATOM_TRANSMITTER_ACTION_POWER_OFF)) 1147 goto done; 1148 1149 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 1150 goto done; 1151 1152 memset(&args, 0, sizeof(args)); 1153 1154 args.v1.ucAction = action; 1155 1156 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 1157 1158 /* wait for the panel to power up */ 1159 if (action == ATOM_TRANSMITTER_ACTION_POWER_ON) { 1160 int i; 1161 1162 for (i = 0; i < 300; i++) { 1163 if (amdgpu_display_hpd_sense(adev, amdgpu_connector->hpd.hpd)) 1164 return true; 1165 mdelay(1); 1166 } 1167 return false; 1168 } 1169 done: 1170 return true; 1171 } 1172 1173 union external_encoder_control { 1174 EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION v1; 1175 EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION_V3 v3; 1176 }; 1177 1178 static void 1179 amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder *encoder, 1180 struct drm_encoder *ext_encoder, 1181 int action) 1182 { 1183 struct drm_device *dev = encoder->dev; 1184 struct amdgpu_device *adev = drm_to_adev(dev); 1185 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1186 struct amdgpu_encoder *ext_amdgpu_encoder = to_amdgpu_encoder(ext_encoder); 1187 union external_encoder_control args; 1188 struct drm_connector *connector; 1189 int index = GetIndexIntoMasterTable(COMMAND, ExternalEncoderControl); 1190 u8 frev, crev; 1191 int dp_clock = 0; 1192 int dp_lane_count = 0; 1193 int connector_object_id = 0; 1194 u32 ext_enum = (ext_amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT; 1195 1196 if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT) 1197 connector = amdgpu_get_connector_for_encoder_init(encoder); 1198 else 1199 connector = amdgpu_get_connector_for_encoder(encoder); 1200 1201 if (connector) { 1202 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 1203 struct amdgpu_connector_atom_dig *dig_connector = 1204 amdgpu_connector->con_priv; 1205 1206 dp_clock = dig_connector->dp_clock; 1207 dp_lane_count = dig_connector->dp_lane_count; 1208 connector_object_id = 1209 (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT; 1210 } 1211 1212 memset(&args, 0, sizeof(args)); 1213 1214 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 1215 return; 1216 1217 switch (frev) { 1218 case 1: 1219 /* no params on frev 1 */ 1220 break; 1221 case 2: 1222 switch (crev) { 1223 case 1: 1224 case 2: 1225 args.v1.sDigEncoder.ucAction = action; 1226 args.v1.sDigEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 1227 args.v1.sDigEncoder.ucEncoderMode = 1228 amdgpu_atombios_encoder_get_encoder_mode(encoder); 1229 1230 if (ENCODER_MODE_IS_DP(args.v1.sDigEncoder.ucEncoderMode)) { 1231 if (dp_clock == 270000) 1232 args.v1.sDigEncoder.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ; 1233 args.v1.sDigEncoder.ucLaneNum = dp_lane_count; 1234 } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1235 args.v1.sDigEncoder.ucLaneNum = 8; 1236 else 1237 args.v1.sDigEncoder.ucLaneNum = 4; 1238 break; 1239 case 3: 1240 args.v3.sExtEncoder.ucAction = action; 1241 if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT) 1242 args.v3.sExtEncoder.usConnectorId = cpu_to_le16(connector_object_id); 1243 else 1244 args.v3.sExtEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 1245 args.v3.sExtEncoder.ucEncoderMode = 1246 amdgpu_atombios_encoder_get_encoder_mode(encoder); 1247 1248 if (ENCODER_MODE_IS_DP(args.v3.sExtEncoder.ucEncoderMode)) { 1249 if (dp_clock == 270000) 1250 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ; 1251 else if (dp_clock == 540000) 1252 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_5_40GHZ; 1253 args.v3.sExtEncoder.ucLaneNum = dp_lane_count; 1254 } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1255 args.v3.sExtEncoder.ucLaneNum = 8; 1256 else 1257 args.v3.sExtEncoder.ucLaneNum = 4; 1258 switch (ext_enum) { 1259 case GRAPH_OBJECT_ENUM_ID1: 1260 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER1; 1261 break; 1262 case GRAPH_OBJECT_ENUM_ID2: 1263 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER2; 1264 break; 1265 case GRAPH_OBJECT_ENUM_ID3: 1266 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER3; 1267 break; 1268 } 1269 args.v3.sExtEncoder.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder); 1270 break; 1271 default: 1272 DRM_ERROR("Unknown table version: %d, %d\n", frev, crev); 1273 return; 1274 } 1275 break; 1276 default: 1277 DRM_ERROR("Unknown table version: %d, %d\n", frev, crev); 1278 return; 1279 } 1280 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 1281 } 1282 1283 static void 1284 amdgpu_atombios_encoder_setup_dig(struct drm_encoder *encoder, int action) 1285 { 1286 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1287 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder); 1288 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; 1289 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 1290 struct amdgpu_connector *amdgpu_connector = NULL; 1291 struct amdgpu_connector_atom_dig *amdgpu_dig_connector = NULL; 1292 1293 if (connector) { 1294 amdgpu_connector = to_amdgpu_connector(connector); 1295 amdgpu_dig_connector = amdgpu_connector->con_priv; 1296 } 1297 1298 if (action == ATOM_ENABLE) { 1299 if (!connector) 1300 dig->panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE; 1301 else 1302 dig->panel_mode = amdgpu_atombios_dp_get_panel_mode(encoder, connector); 1303 1304 /* setup and enable the encoder */ 1305 amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_SETUP, 0); 1306 amdgpu_atombios_encoder_setup_dig_encoder(encoder, 1307 ATOM_ENCODER_CMD_SETUP_PANEL_MODE, 1308 dig->panel_mode); 1309 if (ext_encoder) 1310 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, 1311 EXTERNAL_ENCODER_ACTION_V3_ENCODER_SETUP); 1312 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) && 1313 connector) { 1314 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) { 1315 amdgpu_atombios_encoder_set_edp_panel_power(connector, 1316 ATOM_TRANSMITTER_ACTION_POWER_ON); 1317 amdgpu_dig_connector->edp_on = true; 1318 } 1319 } 1320 /* enable the transmitter */ 1321 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 1322 ATOM_TRANSMITTER_ACTION_ENABLE, 1323 0, 0); 1324 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) && 1325 connector) { 1326 /* DP_SET_POWER_D0 is set in amdgpu_atombios_dp_link_train */ 1327 amdgpu_atombios_dp_link_train(encoder, connector); 1328 amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON, 0); 1329 } 1330 if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) 1331 amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, dig->backlight_level); 1332 if (ext_encoder) 1333 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_ENABLE); 1334 } else { 1335 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) && 1336 connector) 1337 amdgpu_atombios_encoder_setup_dig_encoder(encoder, 1338 ATOM_ENCODER_CMD_DP_VIDEO_OFF, 0); 1339 if (ext_encoder) 1340 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_DISABLE); 1341 if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) 1342 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 1343 ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0); 1344 1345 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) && 1346 connector) 1347 amdgpu_atombios_dp_set_rx_power_state(connector, DP_SET_POWER_D3); 1348 /* disable the transmitter */ 1349 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 1350 ATOM_TRANSMITTER_ACTION_DISABLE, 0, 0); 1351 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) && 1352 connector) { 1353 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) { 1354 amdgpu_atombios_encoder_set_edp_panel_power(connector, 1355 ATOM_TRANSMITTER_ACTION_POWER_OFF); 1356 amdgpu_dig_connector->edp_on = false; 1357 } 1358 } 1359 } 1360 } 1361 1362 void 1363 amdgpu_atombios_encoder_dpms(struct drm_encoder *encoder, int mode) 1364 { 1365 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1366 1367 DRM_DEBUG_KMS("encoder dpms %d to mode %d, devices %08x, active_devices %08x\n", 1368 amdgpu_encoder->encoder_id, mode, amdgpu_encoder->devices, 1369 amdgpu_encoder->active_device); 1370 switch (amdgpu_encoder->encoder_id) { 1371 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1372 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1373 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1374 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1375 switch (mode) { 1376 case DRM_MODE_DPMS_ON: 1377 amdgpu_atombios_encoder_setup_dig(encoder, ATOM_ENABLE); 1378 break; 1379 case DRM_MODE_DPMS_STANDBY: 1380 case DRM_MODE_DPMS_SUSPEND: 1381 case DRM_MODE_DPMS_OFF: 1382 amdgpu_atombios_encoder_setup_dig(encoder, ATOM_DISABLE); 1383 break; 1384 } 1385 break; 1386 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 1387 switch (mode) { 1388 case DRM_MODE_DPMS_ON: 1389 amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_ENABLE); 1390 break; 1391 case DRM_MODE_DPMS_STANDBY: 1392 case DRM_MODE_DPMS_SUSPEND: 1393 case DRM_MODE_DPMS_OFF: 1394 amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_DISABLE); 1395 break; 1396 } 1397 break; 1398 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 1399 switch (mode) { 1400 case DRM_MODE_DPMS_ON: 1401 amdgpu_atombios_encoder_setup_dac(encoder, ATOM_ENABLE); 1402 break; 1403 case DRM_MODE_DPMS_STANDBY: 1404 case DRM_MODE_DPMS_SUSPEND: 1405 case DRM_MODE_DPMS_OFF: 1406 amdgpu_atombios_encoder_setup_dac(encoder, ATOM_DISABLE); 1407 break; 1408 } 1409 break; 1410 default: 1411 return; 1412 } 1413 } 1414 1415 union crtc_source_param { 1416 SELECT_CRTC_SOURCE_PS_ALLOCATION v1; 1417 SELECT_CRTC_SOURCE_PARAMETERS_V2 v2; 1418 SELECT_CRTC_SOURCE_PARAMETERS_V3 v3; 1419 }; 1420 1421 void 1422 amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder *encoder) 1423 { 1424 struct drm_device *dev = encoder->dev; 1425 struct amdgpu_device *adev = drm_to_adev(dev); 1426 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1427 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc); 1428 union crtc_source_param args; 1429 int index = GetIndexIntoMasterTable(COMMAND, SelectCRTC_Source); 1430 uint8_t frev, crev; 1431 struct amdgpu_encoder_atom_dig *dig; 1432 1433 memset(&args, 0, sizeof(args)); 1434 1435 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 1436 return; 1437 1438 switch (frev) { 1439 case 1: 1440 switch (crev) { 1441 case 1: 1442 default: 1443 args.v1.ucCRTC = amdgpu_crtc->crtc_id; 1444 switch (amdgpu_encoder->encoder_id) { 1445 case ENCODER_OBJECT_ID_INTERNAL_TMDS1: 1446 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1: 1447 args.v1.ucDevice = ATOM_DEVICE_DFP1_INDEX; 1448 break; 1449 case ENCODER_OBJECT_ID_INTERNAL_LVDS: 1450 case ENCODER_OBJECT_ID_INTERNAL_LVTM1: 1451 if (amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) 1452 args.v1.ucDevice = ATOM_DEVICE_LCD1_INDEX; 1453 else 1454 args.v1.ucDevice = ATOM_DEVICE_DFP3_INDEX; 1455 break; 1456 case ENCODER_OBJECT_ID_INTERNAL_DVO1: 1457 case ENCODER_OBJECT_ID_INTERNAL_DDI: 1458 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 1459 args.v1.ucDevice = ATOM_DEVICE_DFP2_INDEX; 1460 break; 1461 case ENCODER_OBJECT_ID_INTERNAL_DAC1: 1462 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 1463 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1464 args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX; 1465 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1466 args.v1.ucDevice = ATOM_DEVICE_CV_INDEX; 1467 else 1468 args.v1.ucDevice = ATOM_DEVICE_CRT1_INDEX; 1469 break; 1470 case ENCODER_OBJECT_ID_INTERNAL_DAC2: 1471 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2: 1472 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1473 args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX; 1474 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1475 args.v1.ucDevice = ATOM_DEVICE_CV_INDEX; 1476 else 1477 args.v1.ucDevice = ATOM_DEVICE_CRT2_INDEX; 1478 break; 1479 } 1480 break; 1481 case 2: 1482 args.v2.ucCRTC = amdgpu_crtc->crtc_id; 1483 if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) { 1484 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 1485 1486 if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS) 1487 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS; 1488 else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA) 1489 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT; 1490 else 1491 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1492 } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) { 1493 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS; 1494 } else { 1495 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1496 } 1497 switch (amdgpu_encoder->encoder_id) { 1498 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1499 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1500 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1501 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1502 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 1503 dig = amdgpu_encoder->enc_priv; 1504 switch (dig->dig_encoder) { 1505 case 0: 1506 args.v2.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID; 1507 break; 1508 case 1: 1509 args.v2.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID; 1510 break; 1511 case 2: 1512 args.v2.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID; 1513 break; 1514 case 3: 1515 args.v2.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID; 1516 break; 1517 case 4: 1518 args.v2.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID; 1519 break; 1520 case 5: 1521 args.v2.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID; 1522 break; 1523 case 6: 1524 args.v2.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID; 1525 break; 1526 } 1527 break; 1528 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 1529 args.v2.ucEncoderID = ASIC_INT_DVO_ENCODER_ID; 1530 break; 1531 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 1532 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1533 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1534 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1535 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1536 else 1537 args.v2.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID; 1538 break; 1539 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2: 1540 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1541 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1542 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1543 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1544 else 1545 args.v2.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID; 1546 break; 1547 } 1548 break; 1549 case 3: 1550 args.v3.ucCRTC = amdgpu_crtc->crtc_id; 1551 if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) { 1552 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 1553 1554 if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS) 1555 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS; 1556 else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA) 1557 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT; 1558 else 1559 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1560 } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) { 1561 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS; 1562 } else { 1563 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1564 } 1565 args.v3.ucDstBpc = amdgpu_atombios_encoder_get_bpc(encoder); 1566 switch (amdgpu_encoder->encoder_id) { 1567 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1568 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1569 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1570 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1571 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 1572 dig = amdgpu_encoder->enc_priv; 1573 switch (dig->dig_encoder) { 1574 case 0: 1575 args.v3.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID; 1576 break; 1577 case 1: 1578 args.v3.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID; 1579 break; 1580 case 2: 1581 args.v3.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID; 1582 break; 1583 case 3: 1584 args.v3.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID; 1585 break; 1586 case 4: 1587 args.v3.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID; 1588 break; 1589 case 5: 1590 args.v3.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID; 1591 break; 1592 case 6: 1593 args.v3.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID; 1594 break; 1595 } 1596 break; 1597 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 1598 args.v3.ucEncoderID = ASIC_INT_DVO_ENCODER_ID; 1599 break; 1600 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 1601 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1602 args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1603 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1604 args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1605 else 1606 args.v3.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID; 1607 break; 1608 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2: 1609 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1610 args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1611 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1612 args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1613 else 1614 args.v3.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID; 1615 break; 1616 } 1617 break; 1618 } 1619 break; 1620 default: 1621 DRM_ERROR("Unknown table version: %d, %d\n", frev, crev); 1622 return; 1623 } 1624 1625 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 1626 } 1627 1628 /* This only needs to be called once at startup */ 1629 void 1630 amdgpu_atombios_encoder_init_dig(struct amdgpu_device *adev) 1631 { 1632 struct drm_device *dev = adev_to_drm(adev); 1633 struct drm_encoder *encoder; 1634 1635 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { 1636 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1637 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder); 1638 1639 switch (amdgpu_encoder->encoder_id) { 1640 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1641 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1642 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1643 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1644 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, ATOM_TRANSMITTER_ACTION_INIT, 1645 0, 0); 1646 break; 1647 } 1648 1649 if (ext_encoder) 1650 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, 1651 EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT); 1652 } 1653 } 1654 1655 static bool 1656 amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder *encoder, 1657 struct drm_connector *connector) 1658 { 1659 struct drm_device *dev = encoder->dev; 1660 struct amdgpu_device *adev = drm_to_adev(dev); 1661 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1662 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 1663 1664 if (amdgpu_encoder->devices & (ATOM_DEVICE_TV_SUPPORT | 1665 ATOM_DEVICE_CV_SUPPORT | 1666 ATOM_DEVICE_CRT_SUPPORT)) { 1667 DAC_LOAD_DETECTION_PS_ALLOCATION args; 1668 int index = GetIndexIntoMasterTable(COMMAND, DAC_LoadDetection); 1669 uint8_t frev, crev; 1670 1671 memset(&args, 0, sizeof(args)); 1672 1673 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 1674 return false; 1675 1676 args.sDacload.ucMisc = 0; 1677 1678 if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DAC1) || 1679 (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1)) 1680 args.sDacload.ucDacType = ATOM_DAC_A; 1681 else 1682 args.sDacload.ucDacType = ATOM_DAC_B; 1683 1684 if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) 1685 args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT1_SUPPORT); 1686 else if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) 1687 args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT2_SUPPORT); 1688 else if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) { 1689 args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CV_SUPPORT); 1690 if (crev >= 3) 1691 args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb; 1692 } else if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) { 1693 args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_TV1_SUPPORT); 1694 if (crev >= 3) 1695 args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb; 1696 } 1697 1698 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 1699 1700 return true; 1701 } else 1702 return false; 1703 } 1704 1705 enum drm_connector_status 1706 amdgpu_atombios_encoder_dac_detect(struct drm_encoder *encoder, 1707 struct drm_connector *connector) 1708 { 1709 struct drm_device *dev = encoder->dev; 1710 struct amdgpu_device *adev = drm_to_adev(dev); 1711 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1712 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 1713 uint32_t bios_0_scratch; 1714 1715 if (!amdgpu_atombios_encoder_dac_load_detect(encoder, connector)) { 1716 DRM_DEBUG_KMS("detect returned false \n"); 1717 return connector_status_unknown; 1718 } 1719 1720 bios_0_scratch = RREG32(mmBIOS_SCRATCH_0); 1721 1722 DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices); 1723 if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) { 1724 if (bios_0_scratch & ATOM_S0_CRT1_MASK) 1725 return connector_status_connected; 1726 } 1727 if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) { 1728 if (bios_0_scratch & ATOM_S0_CRT2_MASK) 1729 return connector_status_connected; 1730 } 1731 if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) { 1732 if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A)) 1733 return connector_status_connected; 1734 } 1735 if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) { 1736 if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A)) 1737 return connector_status_connected; /* CTV */ 1738 else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A)) 1739 return connector_status_connected; /* STV */ 1740 } 1741 return connector_status_disconnected; 1742 } 1743 1744 enum drm_connector_status 1745 amdgpu_atombios_encoder_dig_detect(struct drm_encoder *encoder, 1746 struct drm_connector *connector) 1747 { 1748 struct drm_device *dev = encoder->dev; 1749 struct amdgpu_device *adev = drm_to_adev(dev); 1750 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1751 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 1752 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder); 1753 u32 bios_0_scratch; 1754 1755 if (!ext_encoder) 1756 return connector_status_unknown; 1757 1758 if ((amdgpu_connector->devices & ATOM_DEVICE_CRT_SUPPORT) == 0) 1759 return connector_status_unknown; 1760 1761 /* load detect on the dp bridge */ 1762 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, 1763 EXTERNAL_ENCODER_ACTION_V3_DACLOAD_DETECTION); 1764 1765 bios_0_scratch = RREG32(mmBIOS_SCRATCH_0); 1766 1767 DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices); 1768 if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) { 1769 if (bios_0_scratch & ATOM_S0_CRT1_MASK) 1770 return connector_status_connected; 1771 } 1772 if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) { 1773 if (bios_0_scratch & ATOM_S0_CRT2_MASK) 1774 return connector_status_connected; 1775 } 1776 if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) { 1777 if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A)) 1778 return connector_status_connected; 1779 } 1780 if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) { 1781 if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A)) 1782 return connector_status_connected; /* CTV */ 1783 else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A)) 1784 return connector_status_connected; /* STV */ 1785 } 1786 return connector_status_disconnected; 1787 } 1788 1789 void 1790 amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder *encoder) 1791 { 1792 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder); 1793 1794 if (ext_encoder) 1795 /* ddc_setup on the dp bridge */ 1796 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, 1797 EXTERNAL_ENCODER_ACTION_V3_DDC_SETUP); 1798 1799 } 1800 1801 void 1802 amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector *connector, 1803 struct drm_encoder *encoder, 1804 bool connected) 1805 { 1806 struct drm_device *dev = connector->dev; 1807 struct amdgpu_device *adev = drm_to_adev(dev); 1808 struct amdgpu_connector *amdgpu_connector = 1809 to_amdgpu_connector(connector); 1810 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1811 uint32_t bios_0_scratch, bios_3_scratch, bios_6_scratch; 1812 1813 bios_0_scratch = RREG32(mmBIOS_SCRATCH_0); 1814 bios_3_scratch = RREG32(mmBIOS_SCRATCH_3); 1815 bios_6_scratch = RREG32(mmBIOS_SCRATCH_6); 1816 1817 if ((amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) && 1818 (amdgpu_connector->devices & ATOM_DEVICE_LCD1_SUPPORT)) { 1819 if (connected) { 1820 DRM_DEBUG_KMS("LCD1 connected\n"); 1821 bios_0_scratch |= ATOM_S0_LCD1; 1822 bios_3_scratch |= ATOM_S3_LCD1_ACTIVE; 1823 bios_6_scratch |= ATOM_S6_ACC_REQ_LCD1; 1824 } else { 1825 DRM_DEBUG_KMS("LCD1 disconnected\n"); 1826 bios_0_scratch &= ~ATOM_S0_LCD1; 1827 bios_3_scratch &= ~ATOM_S3_LCD1_ACTIVE; 1828 bios_6_scratch &= ~ATOM_S6_ACC_REQ_LCD1; 1829 } 1830 } 1831 if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT1_SUPPORT) && 1832 (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)) { 1833 if (connected) { 1834 DRM_DEBUG_KMS("CRT1 connected\n"); 1835 bios_0_scratch |= ATOM_S0_CRT1_COLOR; 1836 bios_3_scratch |= ATOM_S3_CRT1_ACTIVE; 1837 bios_6_scratch |= ATOM_S6_ACC_REQ_CRT1; 1838 } else { 1839 DRM_DEBUG_KMS("CRT1 disconnected\n"); 1840 bios_0_scratch &= ~ATOM_S0_CRT1_MASK; 1841 bios_3_scratch &= ~ATOM_S3_CRT1_ACTIVE; 1842 bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT1; 1843 } 1844 } 1845 if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT2_SUPPORT) && 1846 (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)) { 1847 if (connected) { 1848 DRM_DEBUG_KMS("CRT2 connected\n"); 1849 bios_0_scratch |= ATOM_S0_CRT2_COLOR; 1850 bios_3_scratch |= ATOM_S3_CRT2_ACTIVE; 1851 bios_6_scratch |= ATOM_S6_ACC_REQ_CRT2; 1852 } else { 1853 DRM_DEBUG_KMS("CRT2 disconnected\n"); 1854 bios_0_scratch &= ~ATOM_S0_CRT2_MASK; 1855 bios_3_scratch &= ~ATOM_S3_CRT2_ACTIVE; 1856 bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT2; 1857 } 1858 } 1859 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP1_SUPPORT) && 1860 (amdgpu_connector->devices & ATOM_DEVICE_DFP1_SUPPORT)) { 1861 if (connected) { 1862 DRM_DEBUG_KMS("DFP1 connected\n"); 1863 bios_0_scratch |= ATOM_S0_DFP1; 1864 bios_3_scratch |= ATOM_S3_DFP1_ACTIVE; 1865 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP1; 1866 } else { 1867 DRM_DEBUG_KMS("DFP1 disconnected\n"); 1868 bios_0_scratch &= ~ATOM_S0_DFP1; 1869 bios_3_scratch &= ~ATOM_S3_DFP1_ACTIVE; 1870 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP1; 1871 } 1872 } 1873 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP2_SUPPORT) && 1874 (amdgpu_connector->devices & ATOM_DEVICE_DFP2_SUPPORT)) { 1875 if (connected) { 1876 DRM_DEBUG_KMS("DFP2 connected\n"); 1877 bios_0_scratch |= ATOM_S0_DFP2; 1878 bios_3_scratch |= ATOM_S3_DFP2_ACTIVE; 1879 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP2; 1880 } else { 1881 DRM_DEBUG_KMS("DFP2 disconnected\n"); 1882 bios_0_scratch &= ~ATOM_S0_DFP2; 1883 bios_3_scratch &= ~ATOM_S3_DFP2_ACTIVE; 1884 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP2; 1885 } 1886 } 1887 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP3_SUPPORT) && 1888 (amdgpu_connector->devices & ATOM_DEVICE_DFP3_SUPPORT)) { 1889 if (connected) { 1890 DRM_DEBUG_KMS("DFP3 connected\n"); 1891 bios_0_scratch |= ATOM_S0_DFP3; 1892 bios_3_scratch |= ATOM_S3_DFP3_ACTIVE; 1893 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP3; 1894 } else { 1895 DRM_DEBUG_KMS("DFP3 disconnected\n"); 1896 bios_0_scratch &= ~ATOM_S0_DFP3; 1897 bios_3_scratch &= ~ATOM_S3_DFP3_ACTIVE; 1898 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP3; 1899 } 1900 } 1901 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP4_SUPPORT) && 1902 (amdgpu_connector->devices & ATOM_DEVICE_DFP4_SUPPORT)) { 1903 if (connected) { 1904 DRM_DEBUG_KMS("DFP4 connected\n"); 1905 bios_0_scratch |= ATOM_S0_DFP4; 1906 bios_3_scratch |= ATOM_S3_DFP4_ACTIVE; 1907 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP4; 1908 } else { 1909 DRM_DEBUG_KMS("DFP4 disconnected\n"); 1910 bios_0_scratch &= ~ATOM_S0_DFP4; 1911 bios_3_scratch &= ~ATOM_S3_DFP4_ACTIVE; 1912 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP4; 1913 } 1914 } 1915 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP5_SUPPORT) && 1916 (amdgpu_connector->devices & ATOM_DEVICE_DFP5_SUPPORT)) { 1917 if (connected) { 1918 DRM_DEBUG_KMS("DFP5 connected\n"); 1919 bios_0_scratch |= ATOM_S0_DFP5; 1920 bios_3_scratch |= ATOM_S3_DFP5_ACTIVE; 1921 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP5; 1922 } else { 1923 DRM_DEBUG_KMS("DFP5 disconnected\n"); 1924 bios_0_scratch &= ~ATOM_S0_DFP5; 1925 bios_3_scratch &= ~ATOM_S3_DFP5_ACTIVE; 1926 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP5; 1927 } 1928 } 1929 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP6_SUPPORT) && 1930 (amdgpu_connector->devices & ATOM_DEVICE_DFP6_SUPPORT)) { 1931 if (connected) { 1932 DRM_DEBUG_KMS("DFP6 connected\n"); 1933 bios_0_scratch |= ATOM_S0_DFP6; 1934 bios_3_scratch |= ATOM_S3_DFP6_ACTIVE; 1935 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP6; 1936 } else { 1937 DRM_DEBUG_KMS("DFP6 disconnected\n"); 1938 bios_0_scratch &= ~ATOM_S0_DFP6; 1939 bios_3_scratch &= ~ATOM_S3_DFP6_ACTIVE; 1940 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP6; 1941 } 1942 } 1943 1944 WREG32(mmBIOS_SCRATCH_0, bios_0_scratch); 1945 WREG32(mmBIOS_SCRATCH_3, bios_3_scratch); 1946 WREG32(mmBIOS_SCRATCH_6, bios_6_scratch); 1947 } 1948 1949 union lvds_info { 1950 struct _ATOM_LVDS_INFO info; 1951 struct _ATOM_LVDS_INFO_V12 info_12; 1952 }; 1953 1954 struct amdgpu_encoder_atom_dig * 1955 amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder *encoder) 1956 { 1957 struct drm_device *dev = encoder->base.dev; 1958 struct amdgpu_device *adev = drm_to_adev(dev); 1959 struct amdgpu_mode_info *mode_info = &adev->mode_info; 1960 int index = GetIndexIntoMasterTable(DATA, LVDS_Info); 1961 uint16_t data_offset, misc; 1962 union lvds_info *lvds_info; 1963 uint8_t frev, crev; 1964 struct amdgpu_encoder_atom_dig *lvds = NULL; 1965 int encoder_enum = (encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT; 1966 1967 if (amdgpu_atom_parse_data_header(mode_info->atom_context, index, NULL, 1968 &frev, &crev, &data_offset)) { 1969 lvds_info = 1970 (union lvds_info *)(mode_info->atom_context->bios + data_offset); 1971 lvds = 1972 kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL); 1973 1974 if (!lvds) 1975 return NULL; 1976 1977 lvds->native_mode.clock = 1978 le16_to_cpu(lvds_info->info.sLCDTiming.usPixClk) * 10; 1979 lvds->native_mode.hdisplay = 1980 le16_to_cpu(lvds_info->info.sLCDTiming.usHActive); 1981 lvds->native_mode.vdisplay = 1982 le16_to_cpu(lvds_info->info.sLCDTiming.usVActive); 1983 lvds->native_mode.htotal = lvds->native_mode.hdisplay + 1984 le16_to_cpu(lvds_info->info.sLCDTiming.usHBlanking_Time); 1985 lvds->native_mode.hsync_start = lvds->native_mode.hdisplay + 1986 le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncOffset); 1987 lvds->native_mode.hsync_end = lvds->native_mode.hsync_start + 1988 le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncWidth); 1989 lvds->native_mode.vtotal = lvds->native_mode.vdisplay + 1990 le16_to_cpu(lvds_info->info.sLCDTiming.usVBlanking_Time); 1991 lvds->native_mode.vsync_start = lvds->native_mode.vdisplay + 1992 le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncOffset); 1993 lvds->native_mode.vsync_end = lvds->native_mode.vsync_start + 1994 le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncWidth); 1995 lvds->panel_pwr_delay = 1996 le16_to_cpu(lvds_info->info.usOffDelayInMs); 1997 lvds->lcd_misc = lvds_info->info.ucLVDS_Misc; 1998 1999 misc = le16_to_cpu(lvds_info->info.sLCDTiming.susModeMiscInfo.usAccess); 2000 if (misc & ATOM_VSYNC_POLARITY) 2001 lvds->native_mode.flags |= DRM_MODE_FLAG_NVSYNC; 2002 if (misc & ATOM_HSYNC_POLARITY) 2003 lvds->native_mode.flags |= DRM_MODE_FLAG_NHSYNC; 2004 if (misc & ATOM_COMPOSITESYNC) 2005 lvds->native_mode.flags |= DRM_MODE_FLAG_CSYNC; 2006 if (misc & ATOM_INTERLACE) 2007 lvds->native_mode.flags |= DRM_MODE_FLAG_INTERLACE; 2008 if (misc & ATOM_DOUBLE_CLOCK_MODE) 2009 lvds->native_mode.flags |= DRM_MODE_FLAG_DBLSCAN; 2010 2011 lvds->native_mode.width_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageHSize); 2012 lvds->native_mode.height_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageVSize); 2013 2014 /* set crtc values */ 2015 drm_mode_set_crtcinfo(&lvds->native_mode, CRTC_INTERLACE_HALVE_V); 2016 2017 lvds->lcd_ss_id = lvds_info->info.ucSS_Id; 2018 2019 encoder->native_mode = lvds->native_mode; 2020 2021 if (encoder_enum == 2) 2022 lvds->linkb = true; 2023 else 2024 lvds->linkb = false; 2025 2026 /* parse the lcd record table */ 2027 if (le16_to_cpu(lvds_info->info.usModePatchTableOffset)) { 2028 ATOM_FAKE_EDID_PATCH_RECORD *fake_edid_record; 2029 ATOM_PANEL_RESOLUTION_PATCH_RECORD *panel_res_record; 2030 bool bad_record = false; 2031 u8 *record; 2032 2033 if ((frev == 1) && (crev < 2)) 2034 /* absolute */ 2035 record = (u8 *)(mode_info->atom_context->bios + 2036 le16_to_cpu(lvds_info->info.usModePatchTableOffset)); 2037 else 2038 /* relative */ 2039 record = (u8 *)(mode_info->atom_context->bios + 2040 data_offset + 2041 le16_to_cpu(lvds_info->info.usModePatchTableOffset)); 2042 while (*record != ATOM_RECORD_END_TYPE) { 2043 switch (*record) { 2044 case LCD_MODE_PATCH_RECORD_MODE_TYPE: 2045 record += sizeof(ATOM_PATCH_RECORD_MODE); 2046 break; 2047 case LCD_RTS_RECORD_TYPE: 2048 record += sizeof(ATOM_LCD_RTS_RECORD); 2049 break; 2050 case LCD_CAP_RECORD_TYPE: 2051 record += sizeof(ATOM_LCD_MODE_CONTROL_CAP); 2052 break; 2053 case LCD_FAKE_EDID_PATCH_RECORD_TYPE: 2054 fake_edid_record = (ATOM_FAKE_EDID_PATCH_RECORD *)record; 2055 if (fake_edid_record->ucFakeEDIDLength) { 2056 struct edid *edid; 2057 int edid_size = 2058 max((int)EDID_LENGTH, (int)fake_edid_record->ucFakeEDIDLength); 2059 edid = kmalloc(edid_size, GFP_KERNEL); 2060 if (edid) { 2061 memcpy((u8 *)edid, (u8 *)&fake_edid_record->ucFakeEDIDString[0], 2062 fake_edid_record->ucFakeEDIDLength); 2063 2064 if (drm_edid_is_valid(edid)) { 2065 adev->mode_info.bios_hardcoded_edid = edid; 2066 adev->mode_info.bios_hardcoded_edid_size = edid_size; 2067 } else 2068 kfree(edid); 2069 } 2070 } 2071 record += fake_edid_record->ucFakeEDIDLength ? 2072 fake_edid_record->ucFakeEDIDLength + 2 : 2073 sizeof(ATOM_FAKE_EDID_PATCH_RECORD); 2074 break; 2075 case LCD_PANEL_RESOLUTION_RECORD_TYPE: 2076 panel_res_record = (ATOM_PANEL_RESOLUTION_PATCH_RECORD *)record; 2077 lvds->native_mode.width_mm = panel_res_record->usHSize; 2078 lvds->native_mode.height_mm = panel_res_record->usVSize; 2079 record += sizeof(ATOM_PANEL_RESOLUTION_PATCH_RECORD); 2080 break; 2081 default: 2082 DRM_ERROR("Bad LCD record %d\n", *record); 2083 bad_record = true; 2084 break; 2085 } 2086 if (bad_record) 2087 break; 2088 } 2089 } 2090 } 2091 return lvds; 2092 } 2093 2094 struct amdgpu_encoder_atom_dig * 2095 amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder *amdgpu_encoder) 2096 { 2097 int encoder_enum = (amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT; 2098 struct amdgpu_encoder_atom_dig *dig = kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL); 2099 2100 if (!dig) 2101 return NULL; 2102 2103 /* coherent mode by default */ 2104 dig->coherent_mode = true; 2105 dig->dig_encoder = -1; 2106 2107 if (encoder_enum == 2) 2108 dig->linkb = true; 2109 else 2110 dig->linkb = false; 2111 2112 return dig; 2113 } 2114 2115