1 /* 2 * Copyright 2007-8 Advanced Micro Devices, Inc. 3 * Copyright 2008 Red Hat Inc. 4 * 5 * Permission is hereby granted, free of charge, to any person obtaining a 6 * copy of this software and associated documentation files (the "Software"), 7 * to deal in the Software without restriction, including without limitation 8 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 9 * and/or sell copies of the Software, and to permit persons to whom the 10 * Software is furnished to do so, subject to the following conditions: 11 * 12 * The above copyright notice and this permission notice shall be included in 13 * all copies or substantial portions of the Software. 14 * 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 21 * OTHER DEALINGS IN THE SOFTWARE. 22 * 23 * Authors: Dave Airlie 24 * Alex Deucher 25 */ 26 #include "drmP.h" 27 #include "radeon_drm.h" 28 #include "radeon.h" 29 30 #include "atom.h" 31 #include "atom-bits.h" 32 #include "drm_dp_helper.h" 33 34 /* move these to drm_dp_helper.c/h */ 35 #define DP_LINK_CONFIGURATION_SIZE 9 36 #define DP_LINK_STATUS_SIZE 6 37 #define DP_DPCD_SIZE 8 38 39 static char *voltage_names[] = { 40 "0.4V", "0.6V", "0.8V", "1.2V" 41 }; 42 static char *pre_emph_names[] = { 43 "0dB", "3.5dB", "6dB", "9.5dB" 44 }; 45 46 /***** radeon AUX functions *****/ 47 union aux_channel_transaction { 48 PROCESS_AUX_CHANNEL_TRANSACTION_PS_ALLOCATION v1; 49 PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS_V2 v2; 50 }; 51 52 static int radeon_process_aux_ch(struct radeon_i2c_chan *chan, 53 u8 *send, int send_bytes, 54 u8 *recv, int recv_size, 55 u8 delay, u8 *ack) 56 { 57 struct drm_device *dev = chan->dev; 58 struct radeon_device *rdev = dev->dev_private; 59 union aux_channel_transaction args; 60 int index = GetIndexIntoMasterTable(COMMAND, ProcessAuxChannelTransaction); 61 unsigned char *base; 62 int recv_bytes; 63 64 memset(&args, 0, sizeof(args)); 65 66 base = (unsigned char *)(rdev->mode_info.atom_context->scratch + 1); 67 68 memcpy(base, send, send_bytes); 69 70 args.v1.lpAuxRequest = 0 + 4; 71 args.v1.lpDataOut = 16 + 4; 72 args.v1.ucDataOutLen = 0; 73 args.v1.ucChannelID = chan->rec.i2c_id; 74 args.v1.ucDelay = delay / 10; 75 if (ASIC_IS_DCE4(rdev)) 76 args.v2.ucHPD_ID = chan->rec.hpd; 77 78 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 79 80 *ack = args.v1.ucReplyStatus; 81 82 /* timeout */ 83 if (args.v1.ucReplyStatus == 1) { 84 DRM_DEBUG_KMS("dp_aux_ch timeout\n"); 85 return -ETIMEDOUT; 86 } 87 88 /* flags not zero */ 89 if (args.v1.ucReplyStatus == 2) { 90 DRM_DEBUG_KMS("dp_aux_ch flags not zero\n"); 91 return -EBUSY; 92 } 93 94 /* error */ 95 if (args.v1.ucReplyStatus == 3) { 96 DRM_DEBUG_KMS("dp_aux_ch error\n"); 97 return -EIO; 98 } 99 100 recv_bytes = args.v1.ucDataOutLen; 101 if (recv_bytes > recv_size) 102 recv_bytes = recv_size; 103 104 if (recv && recv_size) 105 memcpy(recv, base + 16, recv_bytes); 106 107 return recv_bytes; 108 } 109 110 static int radeon_dp_aux_native_write(struct radeon_connector *radeon_connector, 111 u16 address, u8 *send, u8 send_bytes, u8 delay) 112 { 113 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv; 114 int ret; 115 u8 msg[20]; 116 int msg_bytes = send_bytes + 4; 117 u8 ack; 118 unsigned retry; 119 120 if (send_bytes > 16) 121 return -1; 122 123 msg[0] = address; 124 msg[1] = address >> 8; 125 msg[2] = AUX_NATIVE_WRITE << 4; 126 msg[3] = (msg_bytes << 4) | (send_bytes - 1); 127 memcpy(&msg[4], send, send_bytes); 128 129 for (retry = 0; retry < 4; retry++) { 130 ret = radeon_process_aux_ch(dig_connector->dp_i2c_bus, 131 msg, msg_bytes, NULL, 0, delay, &ack); 132 if (ret == -EBUSY) 133 continue; 134 else if (ret < 0) 135 return ret; 136 if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK) 137 return send_bytes; 138 else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER) 139 udelay(400); 140 else 141 return -EIO; 142 } 143 144 return -EIO; 145 } 146 147 static int radeon_dp_aux_native_read(struct radeon_connector *radeon_connector, 148 u16 address, u8 *recv, int recv_bytes, u8 delay) 149 { 150 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv; 151 u8 msg[4]; 152 int msg_bytes = 4; 153 u8 ack; 154 int ret; 155 unsigned retry; 156 157 msg[0] = address; 158 msg[1] = address >> 8; 159 msg[2] = AUX_NATIVE_READ << 4; 160 msg[3] = (msg_bytes << 4) | (recv_bytes - 1); 161 162 for (retry = 0; retry < 4; retry++) { 163 ret = radeon_process_aux_ch(dig_connector->dp_i2c_bus, 164 msg, msg_bytes, recv, recv_bytes, delay, &ack); 165 if (ret == -EBUSY) 166 continue; 167 else if (ret < 0) 168 return ret; 169 if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK) 170 return ret; 171 else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER) 172 udelay(400); 173 else if (ret == 0) 174 return -EPROTO; 175 else 176 return -EIO; 177 } 178 179 return -EIO; 180 } 181 182 static void radeon_write_dpcd_reg(struct radeon_connector *radeon_connector, 183 u16 reg, u8 val) 184 { 185 radeon_dp_aux_native_write(radeon_connector, reg, &val, 1, 0); 186 } 187 188 static u8 radeon_read_dpcd_reg(struct radeon_connector *radeon_connector, 189 u16 reg) 190 { 191 u8 val = 0; 192 193 radeon_dp_aux_native_read(radeon_connector, reg, &val, 1, 0); 194 195 return val; 196 } 197 198 int radeon_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode, 199 u8 write_byte, u8 *read_byte) 200 { 201 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data; 202 struct radeon_i2c_chan *auxch = (struct radeon_i2c_chan *)adapter; 203 u16 address = algo_data->address; 204 u8 msg[5]; 205 u8 reply[2]; 206 unsigned retry; 207 int msg_bytes; 208 int reply_bytes = 1; 209 int ret; 210 u8 ack; 211 212 /* Set up the command byte */ 213 if (mode & MODE_I2C_READ) 214 msg[2] = AUX_I2C_READ << 4; 215 else 216 msg[2] = AUX_I2C_WRITE << 4; 217 218 if (!(mode & MODE_I2C_STOP)) 219 msg[2] |= AUX_I2C_MOT << 4; 220 221 msg[0] = address; 222 msg[1] = address >> 8; 223 224 switch (mode) { 225 case MODE_I2C_WRITE: 226 msg_bytes = 5; 227 msg[3] = msg_bytes << 4; 228 msg[4] = write_byte; 229 break; 230 case MODE_I2C_READ: 231 msg_bytes = 4; 232 msg[3] = msg_bytes << 4; 233 break; 234 default: 235 msg_bytes = 4; 236 msg[3] = 3 << 4; 237 break; 238 } 239 240 for (retry = 0; retry < 4; retry++) { 241 ret = radeon_process_aux_ch(auxch, 242 msg, msg_bytes, reply, reply_bytes, 0, &ack); 243 if (ret == -EBUSY) 244 continue; 245 else if (ret < 0) { 246 DRM_DEBUG_KMS("aux_ch failed %d\n", ret); 247 return ret; 248 } 249 250 switch (ack & AUX_NATIVE_REPLY_MASK) { 251 case AUX_NATIVE_REPLY_ACK: 252 /* I2C-over-AUX Reply field is only valid 253 * when paired with AUX ACK. 254 */ 255 break; 256 case AUX_NATIVE_REPLY_NACK: 257 DRM_DEBUG_KMS("aux_ch native nack\n"); 258 return -EREMOTEIO; 259 case AUX_NATIVE_REPLY_DEFER: 260 DRM_DEBUG_KMS("aux_ch native defer\n"); 261 udelay(400); 262 continue; 263 default: 264 DRM_ERROR("aux_ch invalid native reply 0x%02x\n", ack); 265 return -EREMOTEIO; 266 } 267 268 switch (ack & AUX_I2C_REPLY_MASK) { 269 case AUX_I2C_REPLY_ACK: 270 if (mode == MODE_I2C_READ) 271 *read_byte = reply[0]; 272 return ret; 273 case AUX_I2C_REPLY_NACK: 274 DRM_DEBUG_KMS("aux_i2c nack\n"); 275 return -EREMOTEIO; 276 case AUX_I2C_REPLY_DEFER: 277 DRM_DEBUG_KMS("aux_i2c defer\n"); 278 udelay(400); 279 break; 280 default: 281 DRM_ERROR("aux_i2c invalid reply 0x%02x\n", ack); 282 return -EREMOTEIO; 283 } 284 } 285 286 DRM_DEBUG_KMS("aux i2c too many retries, giving up\n"); 287 return -EREMOTEIO; 288 } 289 290 /***** general DP utility functions *****/ 291 292 static u8 dp_link_status(u8 link_status[DP_LINK_STATUS_SIZE], int r) 293 { 294 return link_status[r - DP_LANE0_1_STATUS]; 295 } 296 297 static u8 dp_get_lane_status(u8 link_status[DP_LINK_STATUS_SIZE], 298 int lane) 299 { 300 int i = DP_LANE0_1_STATUS + (lane >> 1); 301 int s = (lane & 1) * 4; 302 u8 l = dp_link_status(link_status, i); 303 return (l >> s) & 0xf; 304 } 305 306 static bool dp_clock_recovery_ok(u8 link_status[DP_LINK_STATUS_SIZE], 307 int lane_count) 308 { 309 int lane; 310 u8 lane_status; 311 312 for (lane = 0; lane < lane_count; lane++) { 313 lane_status = dp_get_lane_status(link_status, lane); 314 if ((lane_status & DP_LANE_CR_DONE) == 0) 315 return false; 316 } 317 return true; 318 } 319 320 static bool dp_channel_eq_ok(u8 link_status[DP_LINK_STATUS_SIZE], 321 int lane_count) 322 { 323 u8 lane_align; 324 u8 lane_status; 325 int lane; 326 327 lane_align = dp_link_status(link_status, 328 DP_LANE_ALIGN_STATUS_UPDATED); 329 if ((lane_align & DP_INTERLANE_ALIGN_DONE) == 0) 330 return false; 331 for (lane = 0; lane < lane_count; lane++) { 332 lane_status = dp_get_lane_status(link_status, lane); 333 if ((lane_status & DP_CHANNEL_EQ_BITS) != DP_CHANNEL_EQ_BITS) 334 return false; 335 } 336 return true; 337 } 338 339 static u8 dp_get_adjust_request_voltage(u8 link_status[DP_LINK_STATUS_SIZE], 340 int lane) 341 342 { 343 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1); 344 int s = ((lane & 1) ? 345 DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT : 346 DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT); 347 u8 l = dp_link_status(link_status, i); 348 349 return ((l >> s) & 0x3) << DP_TRAIN_VOLTAGE_SWING_SHIFT; 350 } 351 352 static u8 dp_get_adjust_request_pre_emphasis(u8 link_status[DP_LINK_STATUS_SIZE], 353 int lane) 354 { 355 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1); 356 int s = ((lane & 1) ? 357 DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT : 358 DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT); 359 u8 l = dp_link_status(link_status, i); 360 361 return ((l >> s) & 0x3) << DP_TRAIN_PRE_EMPHASIS_SHIFT; 362 } 363 364 #define DP_VOLTAGE_MAX DP_TRAIN_VOLTAGE_SWING_1200 365 #define DP_PRE_EMPHASIS_MAX DP_TRAIN_PRE_EMPHASIS_9_5 366 367 static void dp_get_adjust_train(u8 link_status[DP_LINK_STATUS_SIZE], 368 int lane_count, 369 u8 train_set[4]) 370 { 371 u8 v = 0; 372 u8 p = 0; 373 int lane; 374 375 for (lane = 0; lane < lane_count; lane++) { 376 u8 this_v = dp_get_adjust_request_voltage(link_status, lane); 377 u8 this_p = dp_get_adjust_request_pre_emphasis(link_status, lane); 378 379 DRM_DEBUG_KMS("requested signal parameters: lane %d voltage %s pre_emph %s\n", 380 lane, 381 voltage_names[this_v >> DP_TRAIN_VOLTAGE_SWING_SHIFT], 382 pre_emph_names[this_p >> DP_TRAIN_PRE_EMPHASIS_SHIFT]); 383 384 if (this_v > v) 385 v = this_v; 386 if (this_p > p) 387 p = this_p; 388 } 389 390 if (v >= DP_VOLTAGE_MAX) 391 v |= DP_TRAIN_MAX_SWING_REACHED; 392 393 if (p >= DP_PRE_EMPHASIS_MAX) 394 p |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED; 395 396 DRM_DEBUG_KMS("using signal parameters: voltage %s pre_emph %s\n", 397 voltage_names[(v & DP_TRAIN_VOLTAGE_SWING_MASK) >> DP_TRAIN_VOLTAGE_SWING_SHIFT], 398 pre_emph_names[(p & DP_TRAIN_PRE_EMPHASIS_MASK) >> DP_TRAIN_PRE_EMPHASIS_SHIFT]); 399 400 for (lane = 0; lane < 4; lane++) 401 train_set[lane] = v | p; 402 } 403 404 /* convert bits per color to bits per pixel */ 405 /* get bpc from the EDID */ 406 static int convert_bpc_to_bpp(int bpc) 407 { 408 #if 0 409 if (bpc == 0) 410 return 24; 411 else 412 return bpc * 3; 413 #endif 414 return 24; 415 } 416 417 /* get the max pix clock supported by the link rate and lane num */ 418 static int dp_get_max_dp_pix_clock(int link_rate, 419 int lane_num, 420 int bpp) 421 { 422 return (link_rate * lane_num * 8) / bpp; 423 } 424 425 static int dp_get_max_link_rate(u8 dpcd[DP_DPCD_SIZE]) 426 { 427 switch (dpcd[DP_MAX_LINK_RATE]) { 428 case DP_LINK_BW_1_62: 429 default: 430 return 162000; 431 case DP_LINK_BW_2_7: 432 return 270000; 433 case DP_LINK_BW_5_4: 434 return 540000; 435 } 436 } 437 438 static u8 dp_get_max_lane_number(u8 dpcd[DP_DPCD_SIZE]) 439 { 440 return dpcd[DP_MAX_LANE_COUNT] & DP_MAX_LANE_COUNT_MASK; 441 } 442 443 static u8 dp_get_dp_link_rate_coded(int link_rate) 444 { 445 switch (link_rate) { 446 case 162000: 447 default: 448 return DP_LINK_BW_1_62; 449 case 270000: 450 return DP_LINK_BW_2_7; 451 case 540000: 452 return DP_LINK_BW_5_4; 453 } 454 } 455 456 /***** radeon specific DP functions *****/ 457 458 /* First get the min lane# when low rate is used according to pixel clock 459 * (prefer low rate), second check max lane# supported by DP panel, 460 * if the max lane# < low rate lane# then use max lane# instead. 461 */ 462 static int radeon_dp_get_dp_lane_number(struct drm_connector *connector, 463 u8 dpcd[DP_DPCD_SIZE], 464 int pix_clock) 465 { 466 int bpp = convert_bpc_to_bpp(connector->display_info.bpc); 467 int max_link_rate = dp_get_max_link_rate(dpcd); 468 int max_lane_num = dp_get_max_lane_number(dpcd); 469 int lane_num; 470 int max_dp_pix_clock; 471 472 for (lane_num = 1; lane_num < max_lane_num; lane_num <<= 1) { 473 max_dp_pix_clock = dp_get_max_dp_pix_clock(max_link_rate, lane_num, bpp); 474 if (pix_clock <= max_dp_pix_clock) 475 break; 476 } 477 478 return lane_num; 479 } 480 481 static int radeon_dp_get_dp_link_clock(struct drm_connector *connector, 482 u8 dpcd[DP_DPCD_SIZE], 483 int pix_clock) 484 { 485 int bpp = convert_bpc_to_bpp(connector->display_info.bpc); 486 int lane_num, max_pix_clock; 487 488 if (radeon_connector_encoder_get_dp_bridge_encoder_id(connector) == 489 ENCODER_OBJECT_ID_NUTMEG) 490 return 270000; 491 492 lane_num = radeon_dp_get_dp_lane_number(connector, dpcd, pix_clock); 493 max_pix_clock = dp_get_max_dp_pix_clock(162000, lane_num, bpp); 494 if (pix_clock <= max_pix_clock) 495 return 162000; 496 max_pix_clock = dp_get_max_dp_pix_clock(270000, lane_num, bpp); 497 if (pix_clock <= max_pix_clock) 498 return 270000; 499 if (radeon_connector_is_dp12_capable(connector)) { 500 max_pix_clock = dp_get_max_dp_pix_clock(540000, lane_num, bpp); 501 if (pix_clock <= max_pix_clock) 502 return 540000; 503 } 504 505 return dp_get_max_link_rate(dpcd); 506 } 507 508 static u8 radeon_dp_encoder_service(struct radeon_device *rdev, 509 int action, int dp_clock, 510 u8 ucconfig, u8 lane_num) 511 { 512 DP_ENCODER_SERVICE_PARAMETERS args; 513 int index = GetIndexIntoMasterTable(COMMAND, DPEncoderService); 514 515 memset(&args, 0, sizeof(args)); 516 args.ucLinkClock = dp_clock / 10; 517 args.ucConfig = ucconfig; 518 args.ucAction = action; 519 args.ucLaneNum = lane_num; 520 args.ucStatus = 0; 521 522 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args); 523 return args.ucStatus; 524 } 525 526 u8 radeon_dp_getsinktype(struct radeon_connector *radeon_connector) 527 { 528 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv; 529 struct drm_device *dev = radeon_connector->base.dev; 530 struct radeon_device *rdev = dev->dev_private; 531 532 return radeon_dp_encoder_service(rdev, ATOM_DP_ACTION_GET_SINK_TYPE, 0, 533 dig_connector->dp_i2c_bus->rec.i2c_id, 0); 534 } 535 536 bool radeon_dp_getdpcd(struct radeon_connector *radeon_connector) 537 { 538 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv; 539 u8 msg[25]; 540 int ret, i; 541 542 ret = radeon_dp_aux_native_read(radeon_connector, DP_DPCD_REV, msg, 8, 0); 543 if (ret > 0) { 544 memcpy(dig_connector->dpcd, msg, 8); 545 DRM_DEBUG_KMS("DPCD: "); 546 for (i = 0; i < 8; i++) 547 DRM_DEBUG_KMS("%02x ", msg[i]); 548 DRM_DEBUG_KMS("\n"); 549 return true; 550 } 551 dig_connector->dpcd[0] = 0; 552 return false; 553 } 554 555 int radeon_dp_get_panel_mode(struct drm_encoder *encoder, 556 struct drm_connector *connector) 557 { 558 struct drm_device *dev = encoder->dev; 559 struct radeon_device *rdev = dev->dev_private; 560 struct radeon_connector *radeon_connector = to_radeon_connector(connector); 561 int panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE; 562 563 if (!ASIC_IS_DCE4(rdev)) 564 return panel_mode; 565 566 if (radeon_connector_encoder_get_dp_bridge_encoder_id(connector) == 567 ENCODER_OBJECT_ID_NUTMEG) 568 panel_mode = DP_PANEL_MODE_INTERNAL_DP1_MODE; 569 else if (radeon_connector_encoder_get_dp_bridge_encoder_id(connector) == 570 ENCODER_OBJECT_ID_TRAVIS) { 571 u8 id[6]; 572 int i; 573 for (i = 0; i < 6; i++) 574 id[i] = radeon_read_dpcd_reg(radeon_connector, 0x503 + i); 575 if (id[0] == 0x73 && 576 id[1] == 0x69 && 577 id[2] == 0x76 && 578 id[3] == 0x61 && 579 id[4] == 0x72 && 580 id[5] == 0x54) 581 panel_mode = DP_PANEL_MODE_INTERNAL_DP1_MODE; 582 else 583 panel_mode = DP_PANEL_MODE_INTERNAL_DP2_MODE; 584 } else if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) { 585 u8 tmp = radeon_read_dpcd_reg(radeon_connector, DP_EDP_CONFIGURATION_CAP); 586 if (tmp & 1) 587 panel_mode = DP_PANEL_MODE_INTERNAL_DP2_MODE; 588 } 589 590 return panel_mode; 591 } 592 593 void radeon_dp_set_link_config(struct drm_connector *connector, 594 struct drm_display_mode *mode) 595 { 596 struct radeon_connector *radeon_connector = to_radeon_connector(connector); 597 struct radeon_connector_atom_dig *dig_connector; 598 599 if (!radeon_connector->con_priv) 600 return; 601 dig_connector = radeon_connector->con_priv; 602 603 if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) || 604 (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) { 605 dig_connector->dp_clock = 606 radeon_dp_get_dp_link_clock(connector, dig_connector->dpcd, mode->clock); 607 dig_connector->dp_lane_count = 608 radeon_dp_get_dp_lane_number(connector, dig_connector->dpcd, mode->clock); 609 } 610 } 611 612 int radeon_dp_mode_valid_helper(struct drm_connector *connector, 613 struct drm_display_mode *mode) 614 { 615 struct radeon_connector *radeon_connector = to_radeon_connector(connector); 616 struct radeon_connector_atom_dig *dig_connector; 617 int dp_clock; 618 619 if (!radeon_connector->con_priv) 620 return MODE_CLOCK_HIGH; 621 dig_connector = radeon_connector->con_priv; 622 623 dp_clock = 624 radeon_dp_get_dp_link_clock(connector, dig_connector->dpcd, mode->clock); 625 626 if ((dp_clock == 540000) && 627 (!radeon_connector_is_dp12_capable(connector))) 628 return MODE_CLOCK_HIGH; 629 630 return MODE_OK; 631 } 632 633 static bool radeon_dp_get_link_status(struct radeon_connector *radeon_connector, 634 u8 link_status[DP_LINK_STATUS_SIZE]) 635 { 636 int ret; 637 ret = radeon_dp_aux_native_read(radeon_connector, DP_LANE0_1_STATUS, 638 link_status, DP_LINK_STATUS_SIZE, 100); 639 if (ret <= 0) { 640 DRM_ERROR("displayport link status failed\n"); 641 return false; 642 } 643 644 DRM_DEBUG_KMS("link status %02x %02x %02x %02x %02x %02x\n", 645 link_status[0], link_status[1], link_status[2], 646 link_status[3], link_status[4], link_status[5]); 647 return true; 648 } 649 650 bool radeon_dp_needs_link_train(struct radeon_connector *radeon_connector) 651 { 652 u8 link_status[DP_LINK_STATUS_SIZE]; 653 struct radeon_connector_atom_dig *dig = radeon_connector->con_priv; 654 655 if (!radeon_dp_get_link_status(radeon_connector, link_status)) 656 return false; 657 if (dp_channel_eq_ok(link_status, dig->dp_lane_count)) 658 return false; 659 return true; 660 } 661 662 struct radeon_dp_link_train_info { 663 struct radeon_device *rdev; 664 struct drm_encoder *encoder; 665 struct drm_connector *connector; 666 struct radeon_connector *radeon_connector; 667 int enc_id; 668 int dp_clock; 669 int dp_lane_count; 670 int rd_interval; 671 bool tp3_supported; 672 u8 dpcd[8]; 673 u8 train_set[4]; 674 u8 link_status[DP_LINK_STATUS_SIZE]; 675 u8 tries; 676 bool use_dpencoder; 677 }; 678 679 static void radeon_dp_update_vs_emph(struct radeon_dp_link_train_info *dp_info) 680 { 681 /* set the initial vs/emph on the source */ 682 atombios_dig_transmitter_setup(dp_info->encoder, 683 ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH, 684 0, dp_info->train_set[0]); /* sets all lanes at once */ 685 686 /* set the vs/emph on the sink */ 687 radeon_dp_aux_native_write(dp_info->radeon_connector, DP_TRAINING_LANE0_SET, 688 dp_info->train_set, dp_info->dp_lane_count, 0); 689 } 690 691 static void radeon_dp_set_tp(struct radeon_dp_link_train_info *dp_info, int tp) 692 { 693 int rtp = 0; 694 695 /* set training pattern on the source */ 696 if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder) { 697 switch (tp) { 698 case DP_TRAINING_PATTERN_1: 699 rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1; 700 break; 701 case DP_TRAINING_PATTERN_2: 702 rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2; 703 break; 704 case DP_TRAINING_PATTERN_3: 705 rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3; 706 break; 707 } 708 atombios_dig_encoder_setup(dp_info->encoder, rtp, 0); 709 } else { 710 switch (tp) { 711 case DP_TRAINING_PATTERN_1: 712 rtp = 0; 713 break; 714 case DP_TRAINING_PATTERN_2: 715 rtp = 1; 716 break; 717 } 718 radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_PATTERN_SEL, 719 dp_info->dp_clock, dp_info->enc_id, rtp); 720 } 721 722 /* enable training pattern on the sink */ 723 radeon_write_dpcd_reg(dp_info->radeon_connector, DP_TRAINING_PATTERN_SET, tp); 724 } 725 726 static int radeon_dp_link_train_init(struct radeon_dp_link_train_info *dp_info) 727 { 728 struct radeon_encoder *radeon_encoder = to_radeon_encoder(dp_info->encoder); 729 struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv; 730 u8 tmp; 731 732 /* power up the sink */ 733 if (dp_info->dpcd[0] >= 0x11) 734 radeon_write_dpcd_reg(dp_info->radeon_connector, 735 DP_SET_POWER, DP_SET_POWER_D0); 736 737 /* possibly enable downspread on the sink */ 738 if (dp_info->dpcd[3] & 0x1) 739 radeon_write_dpcd_reg(dp_info->radeon_connector, 740 DP_DOWNSPREAD_CTRL, DP_SPREAD_AMP_0_5); 741 else 742 radeon_write_dpcd_reg(dp_info->radeon_connector, 743 DP_DOWNSPREAD_CTRL, 0); 744 745 if ((dp_info->connector->connector_type == DRM_MODE_CONNECTOR_eDP) && 746 (dig->panel_mode == DP_PANEL_MODE_INTERNAL_DP2_MODE)) { 747 radeon_write_dpcd_reg(dp_info->radeon_connector, DP_EDP_CONFIGURATION_SET, 1); 748 } 749 750 /* set the lane count on the sink */ 751 tmp = dp_info->dp_lane_count; 752 if (dp_info->dpcd[DP_DPCD_REV] >= 0x11 && 753 dp_info->dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP) 754 tmp |= DP_LANE_COUNT_ENHANCED_FRAME_EN; 755 radeon_write_dpcd_reg(dp_info->radeon_connector, DP_LANE_COUNT_SET, tmp); 756 757 /* set the link rate on the sink */ 758 tmp = dp_get_dp_link_rate_coded(dp_info->dp_clock); 759 radeon_write_dpcd_reg(dp_info->radeon_connector, DP_LINK_BW_SET, tmp); 760 761 /* start training on the source */ 762 if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder) 763 atombios_dig_encoder_setup(dp_info->encoder, 764 ATOM_ENCODER_CMD_DP_LINK_TRAINING_START, 0); 765 else 766 radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_START, 767 dp_info->dp_clock, dp_info->enc_id, 0); 768 769 /* disable the training pattern on the sink */ 770 radeon_write_dpcd_reg(dp_info->radeon_connector, 771 DP_TRAINING_PATTERN_SET, 772 DP_TRAINING_PATTERN_DISABLE); 773 774 return 0; 775 } 776 777 static int radeon_dp_link_train_finish(struct radeon_dp_link_train_info *dp_info) 778 { 779 udelay(400); 780 781 /* disable the training pattern on the sink */ 782 radeon_write_dpcd_reg(dp_info->radeon_connector, 783 DP_TRAINING_PATTERN_SET, 784 DP_TRAINING_PATTERN_DISABLE); 785 786 /* disable the training pattern on the source */ 787 if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder) 788 atombios_dig_encoder_setup(dp_info->encoder, 789 ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE, 0); 790 else 791 radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_COMPLETE, 792 dp_info->dp_clock, dp_info->enc_id, 0); 793 794 return 0; 795 } 796 797 static int radeon_dp_link_train_cr(struct radeon_dp_link_train_info *dp_info) 798 { 799 bool clock_recovery; 800 u8 voltage; 801 int i; 802 803 radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_1); 804 memset(dp_info->train_set, 0, 4); 805 radeon_dp_update_vs_emph(dp_info); 806 807 udelay(400); 808 809 /* clock recovery loop */ 810 clock_recovery = false; 811 dp_info->tries = 0; 812 voltage = 0xff; 813 while (1) { 814 if (dp_info->rd_interval == 0) 815 udelay(100); 816 else 817 mdelay(dp_info->rd_interval * 4); 818 819 if (!radeon_dp_get_link_status(dp_info->radeon_connector, dp_info->link_status)) 820 break; 821 822 if (dp_clock_recovery_ok(dp_info->link_status, dp_info->dp_lane_count)) { 823 clock_recovery = true; 824 break; 825 } 826 827 for (i = 0; i < dp_info->dp_lane_count; i++) { 828 if ((dp_info->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0) 829 break; 830 } 831 if (i == dp_info->dp_lane_count) { 832 DRM_ERROR("clock recovery reached max voltage\n"); 833 break; 834 } 835 836 if ((dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) { 837 ++dp_info->tries; 838 if (dp_info->tries == 5) { 839 DRM_ERROR("clock recovery tried 5 times\n"); 840 break; 841 } 842 } else 843 dp_info->tries = 0; 844 845 voltage = dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK; 846 847 /* Compute new train_set as requested by sink */ 848 dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set); 849 850 radeon_dp_update_vs_emph(dp_info); 851 } 852 if (!clock_recovery) { 853 DRM_ERROR("clock recovery failed\n"); 854 return -1; 855 } else { 856 DRM_DEBUG_KMS("clock recovery at voltage %d pre-emphasis %d\n", 857 dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK, 858 (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK) >> 859 DP_TRAIN_PRE_EMPHASIS_SHIFT); 860 return 0; 861 } 862 } 863 864 static int radeon_dp_link_train_ce(struct radeon_dp_link_train_info *dp_info) 865 { 866 bool channel_eq; 867 868 if (dp_info->tp3_supported) 869 radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_3); 870 else 871 radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_2); 872 873 /* channel equalization loop */ 874 dp_info->tries = 0; 875 channel_eq = false; 876 while (1) { 877 if (dp_info->rd_interval == 0) 878 udelay(400); 879 else 880 mdelay(dp_info->rd_interval * 4); 881 882 if (!radeon_dp_get_link_status(dp_info->radeon_connector, dp_info->link_status)) 883 break; 884 885 if (dp_channel_eq_ok(dp_info->link_status, dp_info->dp_lane_count)) { 886 channel_eq = true; 887 break; 888 } 889 890 /* Try 5 times */ 891 if (dp_info->tries > 5) { 892 DRM_ERROR("channel eq failed: 5 tries\n"); 893 break; 894 } 895 896 /* Compute new train_set as requested by sink */ 897 dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set); 898 899 radeon_dp_update_vs_emph(dp_info); 900 dp_info->tries++; 901 } 902 903 if (!channel_eq) { 904 DRM_ERROR("channel eq failed\n"); 905 return -1; 906 } else { 907 DRM_DEBUG_KMS("channel eq at voltage %d pre-emphasis %d\n", 908 dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK, 909 (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK) 910 >> DP_TRAIN_PRE_EMPHASIS_SHIFT); 911 return 0; 912 } 913 } 914 915 void radeon_dp_link_train(struct drm_encoder *encoder, 916 struct drm_connector *connector) 917 { 918 struct drm_device *dev = encoder->dev; 919 struct radeon_device *rdev = dev->dev_private; 920 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder); 921 struct radeon_encoder_atom_dig *dig; 922 struct radeon_connector *radeon_connector; 923 struct radeon_connector_atom_dig *dig_connector; 924 struct radeon_dp_link_train_info dp_info; 925 int index; 926 u8 tmp, frev, crev; 927 928 if (!radeon_encoder->enc_priv) 929 return; 930 dig = radeon_encoder->enc_priv; 931 932 radeon_connector = to_radeon_connector(connector); 933 if (!radeon_connector->con_priv) 934 return; 935 dig_connector = radeon_connector->con_priv; 936 937 if ((dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_DISPLAYPORT) && 938 (dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_eDP)) 939 return; 940 941 /* DPEncoderService newer than 1.1 can't program properly the 942 * training pattern. When facing such version use the 943 * DIGXEncoderControl (X== 1 | 2) 944 */ 945 dp_info.use_dpencoder = true; 946 index = GetIndexIntoMasterTable(COMMAND, DPEncoderService); 947 if (atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev)) { 948 if (crev > 1) { 949 dp_info.use_dpencoder = false; 950 } 951 } 952 953 dp_info.enc_id = 0; 954 if (dig->dig_encoder) 955 dp_info.enc_id |= ATOM_DP_CONFIG_DIG2_ENCODER; 956 else 957 dp_info.enc_id |= ATOM_DP_CONFIG_DIG1_ENCODER; 958 if (dig->linkb) 959 dp_info.enc_id |= ATOM_DP_CONFIG_LINK_B; 960 else 961 dp_info.enc_id |= ATOM_DP_CONFIG_LINK_A; 962 963 dp_info.rd_interval = radeon_read_dpcd_reg(radeon_connector, DP_TRAINING_AUX_RD_INTERVAL); 964 tmp = radeon_read_dpcd_reg(radeon_connector, DP_MAX_LANE_COUNT); 965 if (ASIC_IS_DCE5(rdev) && (tmp & DP_TPS3_SUPPORTED)) 966 dp_info.tp3_supported = true; 967 else 968 dp_info.tp3_supported = false; 969 970 memcpy(dp_info.dpcd, dig_connector->dpcd, 8); 971 dp_info.rdev = rdev; 972 dp_info.encoder = encoder; 973 dp_info.connector = connector; 974 dp_info.radeon_connector = radeon_connector; 975 dp_info.dp_lane_count = dig_connector->dp_lane_count; 976 dp_info.dp_clock = dig_connector->dp_clock; 977 978 if (radeon_dp_link_train_init(&dp_info)) 979 goto done; 980 if (radeon_dp_link_train_cr(&dp_info)) 981 goto done; 982 if (radeon_dp_link_train_ce(&dp_info)) 983 goto done; 984 done: 985 if (radeon_dp_link_train_finish(&dp_info)) 986 return; 987 } 988