1 /* 2 * Copyright © 2012 Intel Corporation 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice (including the next 12 * paragraph) shall be included in all copies or substantial portions of the 13 * Software. 14 * 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 21 * IN THE SOFTWARE. 22 * 23 * Authors: 24 * Keith Packard <keithp@keithp.com> 25 * 26 */ 27 28 #include <linux/i2c.h> 29 #include <linux/module.h> 30 #include <linux/slab.h> 31 32 #include <drm/display/drm_dp_helper.h> 33 #include <drm/drm_crtc.h> 34 #include <drm/drm_crtc_helper.h> 35 #include <drm/drm_simple_kms_helper.h> 36 37 #include "gma_display.h" 38 #include "psb_drv.h" 39 #include "psb_intel_drv.h" 40 #include "psb_intel_reg.h" 41 42 /** 43 * struct i2c_algo_dp_aux_data - driver interface structure for i2c over dp 44 * aux algorithm 45 * @running: set by the algo indicating whether an i2c is ongoing or whether 46 * the i2c bus is quiescent 47 * @address: i2c target address for the currently ongoing transfer 48 * @aux_ch: driver callback to transfer a single byte of the i2c payload 49 */ 50 struct i2c_algo_dp_aux_data { 51 bool running; 52 u16 address; 53 int (*aux_ch) (struct i2c_adapter *adapter, 54 int mode, uint8_t write_byte, 55 uint8_t *read_byte); 56 }; 57 58 /* Run a single AUX_CH I2C transaction, writing/reading data as necessary */ 59 static int 60 i2c_algo_dp_aux_transaction(struct i2c_adapter *adapter, int mode, 61 uint8_t write_byte, uint8_t *read_byte) 62 { 63 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data; 64 int ret; 65 66 ret = (*algo_data->aux_ch)(adapter, mode, 67 write_byte, read_byte); 68 return ret; 69 } 70 71 /* 72 * I2C over AUX CH 73 */ 74 75 /* 76 * Send the address. If the I2C link is running, this 'restarts' 77 * the connection with the new address, this is used for doing 78 * a write followed by a read (as needed for DDC) 79 */ 80 static int 81 i2c_algo_dp_aux_address(struct i2c_adapter *adapter, u16 address, bool reading) 82 { 83 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data; 84 int mode = MODE_I2C_START; 85 86 if (reading) 87 mode |= MODE_I2C_READ; 88 else 89 mode |= MODE_I2C_WRITE; 90 algo_data->address = address; 91 algo_data->running = true; 92 return i2c_algo_dp_aux_transaction(adapter, mode, 0, NULL); 93 } 94 95 /* 96 * Stop the I2C transaction. This closes out the link, sending 97 * a bare address packet with the MOT bit turned off 98 */ 99 static void 100 i2c_algo_dp_aux_stop(struct i2c_adapter *adapter, bool reading) 101 { 102 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data; 103 int mode = MODE_I2C_STOP; 104 105 if (reading) 106 mode |= MODE_I2C_READ; 107 else 108 mode |= MODE_I2C_WRITE; 109 if (algo_data->running) { 110 (void) i2c_algo_dp_aux_transaction(adapter, mode, 0, NULL); 111 algo_data->running = false; 112 } 113 } 114 115 /* 116 * Write a single byte to the current I2C address, the 117 * the I2C link must be running or this returns -EIO 118 */ 119 static int 120 i2c_algo_dp_aux_put_byte(struct i2c_adapter *adapter, u8 byte) 121 { 122 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data; 123 124 if (!algo_data->running) 125 return -EIO; 126 127 return i2c_algo_dp_aux_transaction(adapter, MODE_I2C_WRITE, byte, NULL); 128 } 129 130 /* 131 * Read a single byte from the current I2C address, the 132 * I2C link must be running or this returns -EIO 133 */ 134 static int 135 i2c_algo_dp_aux_get_byte(struct i2c_adapter *adapter, u8 *byte_ret) 136 { 137 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data; 138 139 if (!algo_data->running) 140 return -EIO; 141 142 return i2c_algo_dp_aux_transaction(adapter, MODE_I2C_READ, 0, byte_ret); 143 } 144 145 static int 146 i2c_algo_dp_aux_xfer(struct i2c_adapter *adapter, 147 struct i2c_msg *msgs, 148 int num) 149 { 150 int ret = 0; 151 bool reading = false; 152 int m; 153 int b; 154 155 for (m = 0; m < num; m++) { 156 u16 len = msgs[m].len; 157 u8 *buf = msgs[m].buf; 158 reading = (msgs[m].flags & I2C_M_RD) != 0; 159 ret = i2c_algo_dp_aux_address(adapter, msgs[m].addr, reading); 160 if (ret < 0) 161 break; 162 if (reading) { 163 for (b = 0; b < len; b++) { 164 ret = i2c_algo_dp_aux_get_byte(adapter, &buf[b]); 165 if (ret < 0) 166 break; 167 } 168 } else { 169 for (b = 0; b < len; b++) { 170 ret = i2c_algo_dp_aux_put_byte(adapter, buf[b]); 171 if (ret < 0) 172 break; 173 } 174 } 175 if (ret < 0) 176 break; 177 } 178 if (ret >= 0) 179 ret = num; 180 i2c_algo_dp_aux_stop(adapter, reading); 181 DRM_DEBUG_KMS("dp_aux_xfer return %d\n", ret); 182 return ret; 183 } 184 185 static u32 186 i2c_algo_dp_aux_functionality(struct i2c_adapter *adapter) 187 { 188 return I2C_FUNC_I2C | I2C_FUNC_SMBUS_EMUL | 189 I2C_FUNC_SMBUS_READ_BLOCK_DATA | 190 I2C_FUNC_SMBUS_BLOCK_PROC_CALL | 191 I2C_FUNC_10BIT_ADDR; 192 } 193 194 static const struct i2c_algorithm i2c_dp_aux_algo = { 195 .master_xfer = i2c_algo_dp_aux_xfer, 196 .functionality = i2c_algo_dp_aux_functionality, 197 }; 198 199 static void 200 i2c_dp_aux_reset_bus(struct i2c_adapter *adapter) 201 { 202 (void) i2c_algo_dp_aux_address(adapter, 0, false); 203 (void) i2c_algo_dp_aux_stop(adapter, false); 204 } 205 206 static int 207 i2c_dp_aux_prepare_bus(struct i2c_adapter *adapter) 208 { 209 adapter->algo = &i2c_dp_aux_algo; 210 adapter->retries = 3; 211 i2c_dp_aux_reset_bus(adapter); 212 return 0; 213 } 214 215 /* 216 * FIXME: This is the old dp aux helper, gma500 is the last driver that needs to 217 * be ported over to the new helper code in drm_dp_helper.c like i915 or radeon. 218 */ 219 static int 220 i2c_dp_aux_add_bus(struct i2c_adapter *adapter) 221 { 222 int error; 223 224 error = i2c_dp_aux_prepare_bus(adapter); 225 if (error) 226 return error; 227 error = i2c_add_adapter(adapter); 228 return error; 229 } 230 231 #define _wait_for(COND, MS, W) ({ \ 232 unsigned long timeout__ = jiffies + msecs_to_jiffies(MS); \ 233 int ret__ = 0; \ 234 while (! (COND)) { \ 235 if (time_after(jiffies, timeout__)) { \ 236 ret__ = -ETIMEDOUT; \ 237 break; \ 238 } \ 239 if (W && !in_dbg_master()) msleep(W); \ 240 } \ 241 ret__; \ 242 }) 243 244 #define wait_for(COND, MS) _wait_for(COND, MS, 1) 245 246 #define DP_LINK_CHECK_TIMEOUT (10 * 1000) 247 248 #define DP_LINK_CONFIGURATION_SIZE 9 249 250 #define CDV_FAST_LINK_TRAIN 1 251 252 struct cdv_intel_dp { 253 uint32_t output_reg; 254 uint32_t DP; 255 uint8_t link_configuration[DP_LINK_CONFIGURATION_SIZE]; 256 bool has_audio; 257 int force_audio; 258 uint32_t color_range; 259 uint8_t link_bw; 260 uint8_t lane_count; 261 uint8_t dpcd[4]; 262 struct gma_encoder *encoder; 263 struct i2c_adapter adapter; 264 struct i2c_algo_dp_aux_data algo; 265 uint8_t train_set[4]; 266 uint8_t link_status[DP_LINK_STATUS_SIZE]; 267 int panel_power_up_delay; 268 int panel_power_down_delay; 269 int panel_power_cycle_delay; 270 int backlight_on_delay; 271 int backlight_off_delay; 272 struct drm_display_mode *panel_fixed_mode; /* for eDP */ 273 bool panel_on; 274 }; 275 276 struct ddi_regoff { 277 uint32_t PreEmph1; 278 uint32_t PreEmph2; 279 uint32_t VSwing1; 280 uint32_t VSwing2; 281 uint32_t VSwing3; 282 uint32_t VSwing4; 283 uint32_t VSwing5; 284 }; 285 286 static struct ddi_regoff ddi_DP_train_table[] = { 287 {.PreEmph1 = 0x812c, .PreEmph2 = 0x8124, .VSwing1 = 0x8154, 288 .VSwing2 = 0x8148, .VSwing3 = 0x814C, .VSwing4 = 0x8150, 289 .VSwing5 = 0x8158,}, 290 {.PreEmph1 = 0x822c, .PreEmph2 = 0x8224, .VSwing1 = 0x8254, 291 .VSwing2 = 0x8248, .VSwing3 = 0x824C, .VSwing4 = 0x8250, 292 .VSwing5 = 0x8258,}, 293 }; 294 295 static uint32_t dp_vswing_premph_table[] = { 296 0x55338954, 0x4000, 297 0x554d8954, 0x2000, 298 0x55668954, 0, 299 0x559ac0d4, 0x6000, 300 }; 301 /** 302 * is_edp - is the given port attached to an eDP panel (either CPU or PCH) 303 * @encoder: GMA encoder struct 304 * 305 * If a CPU or PCH DP output is attached to an eDP panel, this function 306 * will return true, and false otherwise. 307 */ 308 static bool is_edp(struct gma_encoder *encoder) 309 { 310 return encoder->type == INTEL_OUTPUT_EDP; 311 } 312 313 314 static void cdv_intel_dp_start_link_train(struct gma_encoder *encoder); 315 static void cdv_intel_dp_complete_link_train(struct gma_encoder *encoder); 316 static void cdv_intel_dp_link_down(struct gma_encoder *encoder); 317 318 static int 319 cdv_intel_dp_max_lane_count(struct gma_encoder *encoder) 320 { 321 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 322 int max_lane_count = 4; 323 324 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11) { 325 max_lane_count = intel_dp->dpcd[DP_MAX_LANE_COUNT] & 0x1f; 326 switch (max_lane_count) { 327 case 1: case 2: case 4: 328 break; 329 default: 330 max_lane_count = 4; 331 } 332 } 333 return max_lane_count; 334 } 335 336 static int 337 cdv_intel_dp_max_link_bw(struct gma_encoder *encoder) 338 { 339 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 340 int max_link_bw = intel_dp->dpcd[DP_MAX_LINK_RATE]; 341 342 switch (max_link_bw) { 343 case DP_LINK_BW_1_62: 344 case DP_LINK_BW_2_7: 345 break; 346 default: 347 max_link_bw = DP_LINK_BW_1_62; 348 break; 349 } 350 return max_link_bw; 351 } 352 353 static int 354 cdv_intel_dp_link_clock(uint8_t link_bw) 355 { 356 if (link_bw == DP_LINK_BW_2_7) 357 return 270000; 358 else 359 return 162000; 360 } 361 362 static int 363 cdv_intel_dp_link_required(int pixel_clock, int bpp) 364 { 365 return (pixel_clock * bpp + 7) / 8; 366 } 367 368 static int 369 cdv_intel_dp_max_data_rate(int max_link_clock, int max_lanes) 370 { 371 return (max_link_clock * max_lanes * 19) / 20; 372 } 373 374 static void cdv_intel_edp_panel_vdd_on(struct gma_encoder *intel_encoder) 375 { 376 struct drm_device *dev = intel_encoder->base.dev; 377 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv; 378 u32 pp; 379 380 if (intel_dp->panel_on) { 381 DRM_DEBUG_KMS("Skip VDD on because of panel on\n"); 382 return; 383 } 384 DRM_DEBUG_KMS("\n"); 385 386 pp = REG_READ(PP_CONTROL); 387 388 pp |= EDP_FORCE_VDD; 389 REG_WRITE(PP_CONTROL, pp); 390 REG_READ(PP_CONTROL); 391 msleep(intel_dp->panel_power_up_delay); 392 } 393 394 static void cdv_intel_edp_panel_vdd_off(struct gma_encoder *intel_encoder) 395 { 396 struct drm_device *dev = intel_encoder->base.dev; 397 u32 pp; 398 399 DRM_DEBUG_KMS("\n"); 400 pp = REG_READ(PP_CONTROL); 401 402 pp &= ~EDP_FORCE_VDD; 403 REG_WRITE(PP_CONTROL, pp); 404 REG_READ(PP_CONTROL); 405 406 } 407 408 /* Returns true if the panel was already on when called */ 409 static bool cdv_intel_edp_panel_on(struct gma_encoder *intel_encoder) 410 { 411 struct drm_device *dev = intel_encoder->base.dev; 412 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv; 413 u32 pp, idle_on_mask = PP_ON | PP_SEQUENCE_NONE; 414 415 if (intel_dp->panel_on) 416 return true; 417 418 DRM_DEBUG_KMS("\n"); 419 pp = REG_READ(PP_CONTROL); 420 pp &= ~PANEL_UNLOCK_MASK; 421 422 pp |= (PANEL_UNLOCK_REGS | POWER_TARGET_ON); 423 REG_WRITE(PP_CONTROL, pp); 424 REG_READ(PP_CONTROL); 425 426 if (wait_for(((REG_READ(PP_STATUS) & idle_on_mask) == idle_on_mask), 1000)) { 427 DRM_DEBUG_KMS("Error in Powering up eDP panel, status %x\n", REG_READ(PP_STATUS)); 428 intel_dp->panel_on = false; 429 } else 430 intel_dp->panel_on = true; 431 msleep(intel_dp->panel_power_up_delay); 432 433 return false; 434 } 435 436 static void cdv_intel_edp_panel_off (struct gma_encoder *intel_encoder) 437 { 438 struct drm_device *dev = intel_encoder->base.dev; 439 u32 pp, idle_off_mask = PP_ON ; 440 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv; 441 442 DRM_DEBUG_KMS("\n"); 443 444 pp = REG_READ(PP_CONTROL); 445 446 if ((pp & POWER_TARGET_ON) == 0) 447 return; 448 449 intel_dp->panel_on = false; 450 pp &= ~PANEL_UNLOCK_MASK; 451 /* ILK workaround: disable reset around power sequence */ 452 453 pp &= ~POWER_TARGET_ON; 454 pp &= ~EDP_FORCE_VDD; 455 pp &= ~EDP_BLC_ENABLE; 456 REG_WRITE(PP_CONTROL, pp); 457 REG_READ(PP_CONTROL); 458 DRM_DEBUG_KMS("PP_STATUS %x\n", REG_READ(PP_STATUS)); 459 460 if (wait_for((REG_READ(PP_STATUS) & idle_off_mask) == 0, 1000)) { 461 DRM_DEBUG_KMS("Error in turning off Panel\n"); 462 } 463 464 msleep(intel_dp->panel_power_cycle_delay); 465 DRM_DEBUG_KMS("Over\n"); 466 } 467 468 static void cdv_intel_edp_backlight_on (struct gma_encoder *intel_encoder) 469 { 470 struct drm_device *dev = intel_encoder->base.dev; 471 u32 pp; 472 473 DRM_DEBUG_KMS("\n"); 474 /* 475 * If we enable the backlight right away following a panel power 476 * on, we may see slight flicker as the panel syncs with the eDP 477 * link. So delay a bit to make sure the image is solid before 478 * allowing it to appear. 479 */ 480 msleep(300); 481 pp = REG_READ(PP_CONTROL); 482 483 pp |= EDP_BLC_ENABLE; 484 REG_WRITE(PP_CONTROL, pp); 485 gma_backlight_enable(dev); 486 } 487 488 static void cdv_intel_edp_backlight_off (struct gma_encoder *intel_encoder) 489 { 490 struct drm_device *dev = intel_encoder->base.dev; 491 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv; 492 u32 pp; 493 494 DRM_DEBUG_KMS("\n"); 495 gma_backlight_disable(dev); 496 msleep(10); 497 pp = REG_READ(PP_CONTROL); 498 499 pp &= ~EDP_BLC_ENABLE; 500 REG_WRITE(PP_CONTROL, pp); 501 msleep(intel_dp->backlight_off_delay); 502 } 503 504 static enum drm_mode_status 505 cdv_intel_dp_mode_valid(struct drm_connector *connector, 506 struct drm_display_mode *mode) 507 { 508 struct gma_encoder *encoder = gma_attached_encoder(connector); 509 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 510 int max_link_clock = cdv_intel_dp_link_clock(cdv_intel_dp_max_link_bw(encoder)); 511 int max_lanes = cdv_intel_dp_max_lane_count(encoder); 512 struct drm_psb_private *dev_priv = to_drm_psb_private(connector->dev); 513 514 if (is_edp(encoder) && intel_dp->panel_fixed_mode) { 515 if (mode->hdisplay > intel_dp->panel_fixed_mode->hdisplay) 516 return MODE_PANEL; 517 if (mode->vdisplay > intel_dp->panel_fixed_mode->vdisplay) 518 return MODE_PANEL; 519 } 520 521 /* only refuse the mode on non eDP since we have seen some weird eDP panels 522 which are outside spec tolerances but somehow work by magic */ 523 if (!is_edp(encoder) && 524 (cdv_intel_dp_link_required(mode->clock, dev_priv->edp.bpp) 525 > cdv_intel_dp_max_data_rate(max_link_clock, max_lanes))) 526 return MODE_CLOCK_HIGH; 527 528 if (is_edp(encoder)) { 529 if (cdv_intel_dp_link_required(mode->clock, 24) 530 > cdv_intel_dp_max_data_rate(max_link_clock, max_lanes)) 531 return MODE_CLOCK_HIGH; 532 533 } 534 if (mode->clock < 10000) 535 return MODE_CLOCK_LOW; 536 537 return MODE_OK; 538 } 539 540 static uint32_t 541 pack_aux(uint8_t *src, int src_bytes) 542 { 543 int i; 544 uint32_t v = 0; 545 546 if (src_bytes > 4) 547 src_bytes = 4; 548 for (i = 0; i < src_bytes; i++) 549 v |= ((uint32_t) src[i]) << ((3-i) * 8); 550 return v; 551 } 552 553 static void 554 unpack_aux(uint32_t src, uint8_t *dst, int dst_bytes) 555 { 556 int i; 557 if (dst_bytes > 4) 558 dst_bytes = 4; 559 for (i = 0; i < dst_bytes; i++) 560 dst[i] = src >> ((3-i) * 8); 561 } 562 563 static int 564 cdv_intel_dp_aux_ch(struct gma_encoder *encoder, 565 uint8_t *send, int send_bytes, 566 uint8_t *recv, int recv_size) 567 { 568 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 569 uint32_t output_reg = intel_dp->output_reg; 570 struct drm_device *dev = encoder->base.dev; 571 uint32_t ch_ctl = output_reg + 0x10; 572 uint32_t ch_data = ch_ctl + 4; 573 int i; 574 int recv_bytes; 575 uint32_t status; 576 uint32_t aux_clock_divider; 577 int try, precharge; 578 579 /* The clock divider is based off the hrawclk, 580 * and would like to run at 2MHz. So, take the 581 * hrawclk value and divide by 2 and use that 582 * On CDV platform it uses 200MHz as hrawclk. 583 * 584 */ 585 aux_clock_divider = 200 / 2; 586 587 precharge = 4; 588 if (is_edp(encoder)) 589 precharge = 10; 590 591 if (REG_READ(ch_ctl) & DP_AUX_CH_CTL_SEND_BUSY) { 592 DRM_ERROR("dp_aux_ch not started status 0x%08x\n", 593 REG_READ(ch_ctl)); 594 return -EBUSY; 595 } 596 597 /* Must try at least 3 times according to DP spec */ 598 for (try = 0; try < 5; try++) { 599 /* Load the send data into the aux channel data registers */ 600 for (i = 0; i < send_bytes; i += 4) 601 REG_WRITE(ch_data + i, 602 pack_aux(send + i, send_bytes - i)); 603 604 /* Send the command and wait for it to complete */ 605 REG_WRITE(ch_ctl, 606 DP_AUX_CH_CTL_SEND_BUSY | 607 DP_AUX_CH_CTL_TIME_OUT_400us | 608 (send_bytes << DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT) | 609 (precharge << DP_AUX_CH_CTL_PRECHARGE_2US_SHIFT) | 610 (aux_clock_divider << DP_AUX_CH_CTL_BIT_CLOCK_2X_SHIFT) | 611 DP_AUX_CH_CTL_DONE | 612 DP_AUX_CH_CTL_TIME_OUT_ERROR | 613 DP_AUX_CH_CTL_RECEIVE_ERROR); 614 for (;;) { 615 status = REG_READ(ch_ctl); 616 if ((status & DP_AUX_CH_CTL_SEND_BUSY) == 0) 617 break; 618 udelay(100); 619 } 620 621 /* Clear done status and any errors */ 622 REG_WRITE(ch_ctl, 623 status | 624 DP_AUX_CH_CTL_DONE | 625 DP_AUX_CH_CTL_TIME_OUT_ERROR | 626 DP_AUX_CH_CTL_RECEIVE_ERROR); 627 if (status & DP_AUX_CH_CTL_DONE) 628 break; 629 } 630 631 if ((status & DP_AUX_CH_CTL_DONE) == 0) { 632 DRM_ERROR("dp_aux_ch not done status 0x%08x\n", status); 633 return -EBUSY; 634 } 635 636 /* Check for timeout or receive error. 637 * Timeouts occur when the sink is not connected 638 */ 639 if (status & DP_AUX_CH_CTL_RECEIVE_ERROR) { 640 DRM_ERROR("dp_aux_ch receive error status 0x%08x\n", status); 641 return -EIO; 642 } 643 644 /* Timeouts occur when the device isn't connected, so they're 645 * "normal" -- don't fill the kernel log with these */ 646 if (status & DP_AUX_CH_CTL_TIME_OUT_ERROR) { 647 DRM_DEBUG_KMS("dp_aux_ch timeout status 0x%08x\n", status); 648 return -ETIMEDOUT; 649 } 650 651 /* Unload any bytes sent back from the other side */ 652 recv_bytes = ((status & DP_AUX_CH_CTL_MESSAGE_SIZE_MASK) >> 653 DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT); 654 if (recv_bytes > recv_size) 655 recv_bytes = recv_size; 656 657 for (i = 0; i < recv_bytes; i += 4) 658 unpack_aux(REG_READ(ch_data + i), 659 recv + i, recv_bytes - i); 660 661 return recv_bytes; 662 } 663 664 /* Write data to the aux channel in native mode */ 665 static int 666 cdv_intel_dp_aux_native_write(struct gma_encoder *encoder, 667 uint16_t address, uint8_t *send, int send_bytes) 668 { 669 int ret; 670 uint8_t msg[20]; 671 int msg_bytes; 672 uint8_t ack; 673 674 if (send_bytes > 16) 675 return -1; 676 msg[0] = DP_AUX_NATIVE_WRITE << 4; 677 msg[1] = address >> 8; 678 msg[2] = address & 0xff; 679 msg[3] = send_bytes - 1; 680 memcpy(&msg[4], send, send_bytes); 681 msg_bytes = send_bytes + 4; 682 for (;;) { 683 ret = cdv_intel_dp_aux_ch(encoder, msg, msg_bytes, &ack, 1); 684 if (ret < 0) 685 return ret; 686 ack >>= 4; 687 if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_ACK) 688 break; 689 else if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_DEFER) 690 udelay(100); 691 else 692 return -EIO; 693 } 694 return send_bytes; 695 } 696 697 /* Write a single byte to the aux channel in native mode */ 698 static int 699 cdv_intel_dp_aux_native_write_1(struct gma_encoder *encoder, 700 uint16_t address, uint8_t byte) 701 { 702 return cdv_intel_dp_aux_native_write(encoder, address, &byte, 1); 703 } 704 705 /* read bytes from a native aux channel */ 706 static int 707 cdv_intel_dp_aux_native_read(struct gma_encoder *encoder, 708 uint16_t address, uint8_t *recv, int recv_bytes) 709 { 710 uint8_t msg[4]; 711 int msg_bytes; 712 uint8_t reply[20]; 713 int reply_bytes; 714 uint8_t ack; 715 int ret; 716 717 msg[0] = DP_AUX_NATIVE_READ << 4; 718 msg[1] = address >> 8; 719 msg[2] = address & 0xff; 720 msg[3] = recv_bytes - 1; 721 722 msg_bytes = 4; 723 reply_bytes = recv_bytes + 1; 724 725 for (;;) { 726 ret = cdv_intel_dp_aux_ch(encoder, msg, msg_bytes, 727 reply, reply_bytes); 728 if (ret == 0) 729 return -EPROTO; 730 if (ret < 0) 731 return ret; 732 ack = reply[0] >> 4; 733 if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_ACK) { 734 memcpy(recv, reply + 1, ret - 1); 735 return ret - 1; 736 } 737 else if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_DEFER) 738 udelay(100); 739 else 740 return -EIO; 741 } 742 } 743 744 static int 745 cdv_intel_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode, 746 uint8_t write_byte, uint8_t *read_byte) 747 { 748 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data; 749 struct cdv_intel_dp *intel_dp = container_of(adapter, 750 struct cdv_intel_dp, 751 adapter); 752 struct gma_encoder *encoder = intel_dp->encoder; 753 uint16_t address = algo_data->address; 754 uint8_t msg[5]; 755 uint8_t reply[2]; 756 unsigned retry; 757 int msg_bytes; 758 int reply_bytes; 759 int ret; 760 761 /* Set up the command byte */ 762 if (mode & MODE_I2C_READ) 763 msg[0] = DP_AUX_I2C_READ << 4; 764 else 765 msg[0] = DP_AUX_I2C_WRITE << 4; 766 767 if (!(mode & MODE_I2C_STOP)) 768 msg[0] |= DP_AUX_I2C_MOT << 4; 769 770 msg[1] = address >> 8; 771 msg[2] = address; 772 773 switch (mode) { 774 case MODE_I2C_WRITE: 775 msg[3] = 0; 776 msg[4] = write_byte; 777 msg_bytes = 5; 778 reply_bytes = 1; 779 break; 780 case MODE_I2C_READ: 781 msg[3] = 0; 782 msg_bytes = 4; 783 reply_bytes = 2; 784 break; 785 default: 786 msg_bytes = 3; 787 reply_bytes = 1; 788 break; 789 } 790 791 for (retry = 0; retry < 5; retry++) { 792 ret = cdv_intel_dp_aux_ch(encoder, 793 msg, msg_bytes, 794 reply, reply_bytes); 795 if (ret < 0) { 796 DRM_DEBUG_KMS("aux_ch failed %d\n", ret); 797 return ret; 798 } 799 800 switch ((reply[0] >> 4) & DP_AUX_NATIVE_REPLY_MASK) { 801 case DP_AUX_NATIVE_REPLY_ACK: 802 /* I2C-over-AUX Reply field is only valid 803 * when paired with AUX ACK. 804 */ 805 break; 806 case DP_AUX_NATIVE_REPLY_NACK: 807 DRM_DEBUG_KMS("aux_ch native nack\n"); 808 return -EREMOTEIO; 809 case DP_AUX_NATIVE_REPLY_DEFER: 810 udelay(100); 811 continue; 812 default: 813 DRM_ERROR("aux_ch invalid native reply 0x%02x\n", 814 reply[0]); 815 return -EREMOTEIO; 816 } 817 818 switch ((reply[0] >> 4) & DP_AUX_I2C_REPLY_MASK) { 819 case DP_AUX_I2C_REPLY_ACK: 820 if (mode == MODE_I2C_READ) { 821 *read_byte = reply[1]; 822 } 823 return reply_bytes - 1; 824 case DP_AUX_I2C_REPLY_NACK: 825 DRM_DEBUG_KMS("aux_i2c nack\n"); 826 return -EREMOTEIO; 827 case DP_AUX_I2C_REPLY_DEFER: 828 DRM_DEBUG_KMS("aux_i2c defer\n"); 829 udelay(100); 830 break; 831 default: 832 DRM_ERROR("aux_i2c invalid reply 0x%02x\n", reply[0]); 833 return -EREMOTEIO; 834 } 835 } 836 837 DRM_ERROR("too many retries, giving up\n"); 838 return -EREMOTEIO; 839 } 840 841 static int 842 cdv_intel_dp_i2c_init(struct gma_connector *connector, 843 struct gma_encoder *encoder, const char *name) 844 { 845 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 846 int ret; 847 848 DRM_DEBUG_KMS("i2c_init %s\n", name); 849 850 intel_dp->algo.running = false; 851 intel_dp->algo.address = 0; 852 intel_dp->algo.aux_ch = cdv_intel_dp_i2c_aux_ch; 853 854 memset(&intel_dp->adapter, '\0', sizeof (intel_dp->adapter)); 855 intel_dp->adapter.owner = THIS_MODULE; 856 intel_dp->adapter.class = I2C_CLASS_DDC; 857 strncpy (intel_dp->adapter.name, name, sizeof(intel_dp->adapter.name) - 1); 858 intel_dp->adapter.name[sizeof(intel_dp->adapter.name) - 1] = '\0'; 859 intel_dp->adapter.algo_data = &intel_dp->algo; 860 intel_dp->adapter.dev.parent = connector->base.kdev; 861 862 if (is_edp(encoder)) 863 cdv_intel_edp_panel_vdd_on(encoder); 864 ret = i2c_dp_aux_add_bus(&intel_dp->adapter); 865 if (is_edp(encoder)) 866 cdv_intel_edp_panel_vdd_off(encoder); 867 868 return ret; 869 } 870 871 static void cdv_intel_fixed_panel_mode(struct drm_display_mode *fixed_mode, 872 struct drm_display_mode *adjusted_mode) 873 { 874 adjusted_mode->hdisplay = fixed_mode->hdisplay; 875 adjusted_mode->hsync_start = fixed_mode->hsync_start; 876 adjusted_mode->hsync_end = fixed_mode->hsync_end; 877 adjusted_mode->htotal = fixed_mode->htotal; 878 879 adjusted_mode->vdisplay = fixed_mode->vdisplay; 880 adjusted_mode->vsync_start = fixed_mode->vsync_start; 881 adjusted_mode->vsync_end = fixed_mode->vsync_end; 882 adjusted_mode->vtotal = fixed_mode->vtotal; 883 884 adjusted_mode->clock = fixed_mode->clock; 885 886 drm_mode_set_crtcinfo(adjusted_mode, CRTC_INTERLACE_HALVE_V); 887 } 888 889 static bool 890 cdv_intel_dp_mode_fixup(struct drm_encoder *encoder, const struct drm_display_mode *mode, 891 struct drm_display_mode *adjusted_mode) 892 { 893 struct drm_psb_private *dev_priv = to_drm_psb_private(encoder->dev); 894 struct gma_encoder *intel_encoder = to_gma_encoder(encoder); 895 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv; 896 int lane_count, clock; 897 int max_lane_count = cdv_intel_dp_max_lane_count(intel_encoder); 898 int max_clock = cdv_intel_dp_max_link_bw(intel_encoder) == DP_LINK_BW_2_7 ? 1 : 0; 899 static int bws[2] = { DP_LINK_BW_1_62, DP_LINK_BW_2_7 }; 900 int refclock = mode->clock; 901 int bpp = 24; 902 903 if (is_edp(intel_encoder) && intel_dp->panel_fixed_mode) { 904 cdv_intel_fixed_panel_mode(intel_dp->panel_fixed_mode, adjusted_mode); 905 refclock = intel_dp->panel_fixed_mode->clock; 906 bpp = dev_priv->edp.bpp; 907 } 908 909 for (lane_count = 1; lane_count <= max_lane_count; lane_count <<= 1) { 910 for (clock = max_clock; clock >= 0; clock--) { 911 int link_avail = cdv_intel_dp_max_data_rate(cdv_intel_dp_link_clock(bws[clock]), lane_count); 912 913 if (cdv_intel_dp_link_required(refclock, bpp) <= link_avail) { 914 intel_dp->link_bw = bws[clock]; 915 intel_dp->lane_count = lane_count; 916 adjusted_mode->clock = cdv_intel_dp_link_clock(intel_dp->link_bw); 917 DRM_DEBUG_KMS("Display port link bw %02x lane " 918 "count %d clock %d\n", 919 intel_dp->link_bw, intel_dp->lane_count, 920 adjusted_mode->clock); 921 return true; 922 } 923 } 924 } 925 if (is_edp(intel_encoder)) { 926 /* okay we failed just pick the highest */ 927 intel_dp->lane_count = max_lane_count; 928 intel_dp->link_bw = bws[max_clock]; 929 adjusted_mode->clock = cdv_intel_dp_link_clock(intel_dp->link_bw); 930 DRM_DEBUG_KMS("Force picking display port link bw %02x lane " 931 "count %d clock %d\n", 932 intel_dp->link_bw, intel_dp->lane_count, 933 adjusted_mode->clock); 934 935 return true; 936 } 937 return false; 938 } 939 940 struct cdv_intel_dp_m_n { 941 uint32_t tu; 942 uint32_t gmch_m; 943 uint32_t gmch_n; 944 uint32_t link_m; 945 uint32_t link_n; 946 }; 947 948 static void 949 cdv_intel_reduce_ratio(uint32_t *num, uint32_t *den) 950 { 951 /* 952 while (*num > 0xffffff || *den > 0xffffff) { 953 *num >>= 1; 954 *den >>= 1; 955 }*/ 956 uint64_t value, m; 957 m = *num; 958 value = m * (0x800000); 959 m = do_div(value, *den); 960 *num = value; 961 *den = 0x800000; 962 } 963 964 static void 965 cdv_intel_dp_compute_m_n(int bpp, 966 int nlanes, 967 int pixel_clock, 968 int link_clock, 969 struct cdv_intel_dp_m_n *m_n) 970 { 971 m_n->tu = 64; 972 m_n->gmch_m = (pixel_clock * bpp + 7) >> 3; 973 m_n->gmch_n = link_clock * nlanes; 974 cdv_intel_reduce_ratio(&m_n->gmch_m, &m_n->gmch_n); 975 m_n->link_m = pixel_clock; 976 m_n->link_n = link_clock; 977 cdv_intel_reduce_ratio(&m_n->link_m, &m_n->link_n); 978 } 979 980 void 981 cdv_intel_dp_set_m_n(struct drm_crtc *crtc, struct drm_display_mode *mode, 982 struct drm_display_mode *adjusted_mode) 983 { 984 struct drm_device *dev = crtc->dev; 985 struct drm_psb_private *dev_priv = to_drm_psb_private(dev); 986 struct drm_mode_config *mode_config = &dev->mode_config; 987 struct drm_encoder *encoder; 988 struct gma_crtc *gma_crtc = to_gma_crtc(crtc); 989 int lane_count = 4, bpp = 24; 990 struct cdv_intel_dp_m_n m_n; 991 int pipe = gma_crtc->pipe; 992 993 /* 994 * Find the lane count in the intel_encoder private 995 */ 996 list_for_each_entry(encoder, &mode_config->encoder_list, head) { 997 struct gma_encoder *intel_encoder; 998 struct cdv_intel_dp *intel_dp; 999 1000 if (encoder->crtc != crtc) 1001 continue; 1002 1003 intel_encoder = to_gma_encoder(encoder); 1004 intel_dp = intel_encoder->dev_priv; 1005 if (intel_encoder->type == INTEL_OUTPUT_DISPLAYPORT) { 1006 lane_count = intel_dp->lane_count; 1007 break; 1008 } else if (is_edp(intel_encoder)) { 1009 lane_count = intel_dp->lane_count; 1010 bpp = dev_priv->edp.bpp; 1011 break; 1012 } 1013 } 1014 1015 /* 1016 * Compute the GMCH and Link ratios. The '3' here is 1017 * the number of bytes_per_pixel post-LUT, which we always 1018 * set up for 8-bits of R/G/B, or 3 bytes total. 1019 */ 1020 cdv_intel_dp_compute_m_n(bpp, lane_count, 1021 mode->clock, adjusted_mode->clock, &m_n); 1022 1023 { 1024 REG_WRITE(PIPE_GMCH_DATA_M(pipe), 1025 ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) | 1026 m_n.gmch_m); 1027 REG_WRITE(PIPE_GMCH_DATA_N(pipe), m_n.gmch_n); 1028 REG_WRITE(PIPE_DP_LINK_M(pipe), m_n.link_m); 1029 REG_WRITE(PIPE_DP_LINK_N(pipe), m_n.link_n); 1030 } 1031 } 1032 1033 static void 1034 cdv_intel_dp_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode, 1035 struct drm_display_mode *adjusted_mode) 1036 { 1037 struct gma_encoder *intel_encoder = to_gma_encoder(encoder); 1038 struct drm_crtc *crtc = encoder->crtc; 1039 struct gma_crtc *gma_crtc = to_gma_crtc(crtc); 1040 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv; 1041 struct drm_device *dev = encoder->dev; 1042 1043 intel_dp->DP = DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0; 1044 intel_dp->DP |= intel_dp->color_range; 1045 1046 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC) 1047 intel_dp->DP |= DP_SYNC_HS_HIGH; 1048 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC) 1049 intel_dp->DP |= DP_SYNC_VS_HIGH; 1050 1051 intel_dp->DP |= DP_LINK_TRAIN_OFF; 1052 1053 switch (intel_dp->lane_count) { 1054 case 1: 1055 intel_dp->DP |= DP_PORT_WIDTH_1; 1056 break; 1057 case 2: 1058 intel_dp->DP |= DP_PORT_WIDTH_2; 1059 break; 1060 case 4: 1061 intel_dp->DP |= DP_PORT_WIDTH_4; 1062 break; 1063 } 1064 if (intel_dp->has_audio) 1065 intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE; 1066 1067 memset(intel_dp->link_configuration, 0, DP_LINK_CONFIGURATION_SIZE); 1068 intel_dp->link_configuration[0] = intel_dp->link_bw; 1069 intel_dp->link_configuration[1] = intel_dp->lane_count; 1070 1071 /* 1072 * Check for DPCD version > 1.1 and enhanced framing support 1073 */ 1074 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 && 1075 (intel_dp->dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP)) { 1076 intel_dp->link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN; 1077 intel_dp->DP |= DP_ENHANCED_FRAMING; 1078 } 1079 1080 /* CPT DP's pipe select is decided in TRANS_DP_CTL */ 1081 if (gma_crtc->pipe == 1) 1082 intel_dp->DP |= DP_PIPEB_SELECT; 1083 1084 REG_WRITE(intel_dp->output_reg, (intel_dp->DP | DP_PORT_EN)); 1085 DRM_DEBUG_KMS("DP expected reg is %x\n", intel_dp->DP); 1086 if (is_edp(intel_encoder)) { 1087 uint32_t pfit_control; 1088 cdv_intel_edp_panel_on(intel_encoder); 1089 1090 if (mode->hdisplay != adjusted_mode->hdisplay || 1091 mode->vdisplay != adjusted_mode->vdisplay) 1092 pfit_control = PFIT_ENABLE; 1093 else 1094 pfit_control = 0; 1095 1096 pfit_control |= gma_crtc->pipe << PFIT_PIPE_SHIFT; 1097 1098 REG_WRITE(PFIT_CONTROL, pfit_control); 1099 } 1100 } 1101 1102 1103 /* If the sink supports it, try to set the power state appropriately */ 1104 static void cdv_intel_dp_sink_dpms(struct gma_encoder *encoder, int mode) 1105 { 1106 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 1107 int ret, i; 1108 1109 /* Should have a valid DPCD by this point */ 1110 if (intel_dp->dpcd[DP_DPCD_REV] < 0x11) 1111 return; 1112 1113 if (mode != DRM_MODE_DPMS_ON) { 1114 ret = cdv_intel_dp_aux_native_write_1(encoder, DP_SET_POWER, 1115 DP_SET_POWER_D3); 1116 if (ret != 1) 1117 DRM_DEBUG_DRIVER("failed to write sink power state\n"); 1118 } else { 1119 /* 1120 * When turning on, we need to retry for 1ms to give the sink 1121 * time to wake up. 1122 */ 1123 for (i = 0; i < 3; i++) { 1124 ret = cdv_intel_dp_aux_native_write_1(encoder, 1125 DP_SET_POWER, 1126 DP_SET_POWER_D0); 1127 if (ret == 1) 1128 break; 1129 udelay(1000); 1130 } 1131 } 1132 } 1133 1134 static void cdv_intel_dp_prepare(struct drm_encoder *encoder) 1135 { 1136 struct gma_encoder *intel_encoder = to_gma_encoder(encoder); 1137 int edp = is_edp(intel_encoder); 1138 1139 if (edp) { 1140 cdv_intel_edp_backlight_off(intel_encoder); 1141 cdv_intel_edp_panel_off(intel_encoder); 1142 cdv_intel_edp_panel_vdd_on(intel_encoder); 1143 } 1144 /* Wake up the sink first */ 1145 cdv_intel_dp_sink_dpms(intel_encoder, DRM_MODE_DPMS_ON); 1146 cdv_intel_dp_link_down(intel_encoder); 1147 if (edp) 1148 cdv_intel_edp_panel_vdd_off(intel_encoder); 1149 } 1150 1151 static void cdv_intel_dp_commit(struct drm_encoder *encoder) 1152 { 1153 struct gma_encoder *intel_encoder = to_gma_encoder(encoder); 1154 int edp = is_edp(intel_encoder); 1155 1156 if (edp) 1157 cdv_intel_edp_panel_on(intel_encoder); 1158 cdv_intel_dp_start_link_train(intel_encoder); 1159 cdv_intel_dp_complete_link_train(intel_encoder); 1160 if (edp) 1161 cdv_intel_edp_backlight_on(intel_encoder); 1162 } 1163 1164 static void 1165 cdv_intel_dp_dpms(struct drm_encoder *encoder, int mode) 1166 { 1167 struct gma_encoder *intel_encoder = to_gma_encoder(encoder); 1168 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv; 1169 struct drm_device *dev = encoder->dev; 1170 uint32_t dp_reg = REG_READ(intel_dp->output_reg); 1171 int edp = is_edp(intel_encoder); 1172 1173 if (mode != DRM_MODE_DPMS_ON) { 1174 if (edp) { 1175 cdv_intel_edp_backlight_off(intel_encoder); 1176 cdv_intel_edp_panel_vdd_on(intel_encoder); 1177 } 1178 cdv_intel_dp_sink_dpms(intel_encoder, mode); 1179 cdv_intel_dp_link_down(intel_encoder); 1180 if (edp) { 1181 cdv_intel_edp_panel_vdd_off(intel_encoder); 1182 cdv_intel_edp_panel_off(intel_encoder); 1183 } 1184 } else { 1185 if (edp) 1186 cdv_intel_edp_panel_on(intel_encoder); 1187 cdv_intel_dp_sink_dpms(intel_encoder, mode); 1188 if (!(dp_reg & DP_PORT_EN)) { 1189 cdv_intel_dp_start_link_train(intel_encoder); 1190 cdv_intel_dp_complete_link_train(intel_encoder); 1191 } 1192 if (edp) 1193 cdv_intel_edp_backlight_on(intel_encoder); 1194 } 1195 } 1196 1197 /* 1198 * Native read with retry for link status and receiver capability reads for 1199 * cases where the sink may still be asleep. 1200 */ 1201 static bool 1202 cdv_intel_dp_aux_native_read_retry(struct gma_encoder *encoder, uint16_t address, 1203 uint8_t *recv, int recv_bytes) 1204 { 1205 int ret, i; 1206 1207 /* 1208 * Sinks are *supposed* to come up within 1ms from an off state, 1209 * but we're also supposed to retry 3 times per the spec. 1210 */ 1211 for (i = 0; i < 3; i++) { 1212 ret = cdv_intel_dp_aux_native_read(encoder, address, recv, 1213 recv_bytes); 1214 if (ret == recv_bytes) 1215 return true; 1216 udelay(1000); 1217 } 1218 1219 return false; 1220 } 1221 1222 /* 1223 * Fetch AUX CH registers 0x202 - 0x207 which contain 1224 * link status information 1225 */ 1226 static bool 1227 cdv_intel_dp_get_link_status(struct gma_encoder *encoder) 1228 { 1229 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 1230 return cdv_intel_dp_aux_native_read_retry(encoder, 1231 DP_LANE0_1_STATUS, 1232 intel_dp->link_status, 1233 DP_LINK_STATUS_SIZE); 1234 } 1235 1236 static uint8_t 1237 cdv_intel_dp_link_status(uint8_t link_status[DP_LINK_STATUS_SIZE], 1238 int r) 1239 { 1240 return link_status[r - DP_LANE0_1_STATUS]; 1241 } 1242 1243 static uint8_t 1244 cdv_intel_get_adjust_request_voltage(uint8_t link_status[DP_LINK_STATUS_SIZE], 1245 int lane) 1246 { 1247 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1); 1248 int s = ((lane & 1) ? 1249 DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT : 1250 DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT); 1251 uint8_t l = cdv_intel_dp_link_status(link_status, i); 1252 1253 return ((l >> s) & 3) << DP_TRAIN_VOLTAGE_SWING_SHIFT; 1254 } 1255 1256 static uint8_t 1257 cdv_intel_get_adjust_request_pre_emphasis(uint8_t link_status[DP_LINK_STATUS_SIZE], 1258 int lane) 1259 { 1260 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1); 1261 int s = ((lane & 1) ? 1262 DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT : 1263 DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT); 1264 uint8_t l = cdv_intel_dp_link_status(link_status, i); 1265 1266 return ((l >> s) & 3) << DP_TRAIN_PRE_EMPHASIS_SHIFT; 1267 } 1268 1269 #define CDV_DP_VOLTAGE_MAX DP_TRAIN_VOLTAGE_SWING_LEVEL_3 1270 1271 static void 1272 cdv_intel_get_adjust_train(struct gma_encoder *encoder) 1273 { 1274 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 1275 uint8_t v = 0; 1276 uint8_t p = 0; 1277 int lane; 1278 1279 for (lane = 0; lane < intel_dp->lane_count; lane++) { 1280 uint8_t this_v = cdv_intel_get_adjust_request_voltage(intel_dp->link_status, lane); 1281 uint8_t this_p = cdv_intel_get_adjust_request_pre_emphasis(intel_dp->link_status, lane); 1282 1283 if (this_v > v) 1284 v = this_v; 1285 if (this_p > p) 1286 p = this_p; 1287 } 1288 1289 if (v >= CDV_DP_VOLTAGE_MAX) 1290 v = CDV_DP_VOLTAGE_MAX | DP_TRAIN_MAX_SWING_REACHED; 1291 1292 if (p == DP_TRAIN_PRE_EMPHASIS_MASK) 1293 p |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED; 1294 1295 for (lane = 0; lane < 4; lane++) 1296 intel_dp->train_set[lane] = v | p; 1297 } 1298 1299 1300 static uint8_t 1301 cdv_intel_get_lane_status(uint8_t link_status[DP_LINK_STATUS_SIZE], 1302 int lane) 1303 { 1304 int i = DP_LANE0_1_STATUS + (lane >> 1); 1305 int s = (lane & 1) * 4; 1306 uint8_t l = cdv_intel_dp_link_status(link_status, i); 1307 1308 return (l >> s) & 0xf; 1309 } 1310 1311 /* Check for clock recovery is done on all channels */ 1312 static bool 1313 cdv_intel_clock_recovery_ok(uint8_t link_status[DP_LINK_STATUS_SIZE], int lane_count) 1314 { 1315 int lane; 1316 uint8_t lane_status; 1317 1318 for (lane = 0; lane < lane_count; lane++) { 1319 lane_status = cdv_intel_get_lane_status(link_status, lane); 1320 if ((lane_status & DP_LANE_CR_DONE) == 0) 1321 return false; 1322 } 1323 return true; 1324 } 1325 1326 /* Check to see if channel eq is done on all channels */ 1327 #define CHANNEL_EQ_BITS (DP_LANE_CR_DONE|\ 1328 DP_LANE_CHANNEL_EQ_DONE|\ 1329 DP_LANE_SYMBOL_LOCKED) 1330 static bool 1331 cdv_intel_channel_eq_ok(struct gma_encoder *encoder) 1332 { 1333 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 1334 uint8_t lane_align; 1335 uint8_t lane_status; 1336 int lane; 1337 1338 lane_align = cdv_intel_dp_link_status(intel_dp->link_status, 1339 DP_LANE_ALIGN_STATUS_UPDATED); 1340 if ((lane_align & DP_INTERLANE_ALIGN_DONE) == 0) 1341 return false; 1342 for (lane = 0; lane < intel_dp->lane_count; lane++) { 1343 lane_status = cdv_intel_get_lane_status(intel_dp->link_status, lane); 1344 if ((lane_status & CHANNEL_EQ_BITS) != CHANNEL_EQ_BITS) 1345 return false; 1346 } 1347 return true; 1348 } 1349 1350 static bool 1351 cdv_intel_dp_set_link_train(struct gma_encoder *encoder, 1352 uint32_t dp_reg_value, 1353 uint8_t dp_train_pat) 1354 { 1355 struct drm_device *dev = encoder->base.dev; 1356 int ret; 1357 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 1358 1359 REG_WRITE(intel_dp->output_reg, dp_reg_value); 1360 REG_READ(intel_dp->output_reg); 1361 1362 ret = cdv_intel_dp_aux_native_write_1(encoder, 1363 DP_TRAINING_PATTERN_SET, 1364 dp_train_pat); 1365 1366 if (ret != 1) { 1367 DRM_DEBUG_KMS("Failure in setting link pattern %x\n", 1368 dp_train_pat); 1369 return false; 1370 } 1371 1372 return true; 1373 } 1374 1375 1376 static bool 1377 cdv_intel_dplink_set_level(struct gma_encoder *encoder, 1378 uint8_t dp_train_pat) 1379 { 1380 int ret; 1381 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 1382 1383 ret = cdv_intel_dp_aux_native_write(encoder, 1384 DP_TRAINING_LANE0_SET, 1385 intel_dp->train_set, 1386 intel_dp->lane_count); 1387 1388 if (ret != intel_dp->lane_count) { 1389 DRM_DEBUG_KMS("Failure in setting level %d, lane_cnt= %d\n", 1390 intel_dp->train_set[0], intel_dp->lane_count); 1391 return false; 1392 } 1393 return true; 1394 } 1395 1396 static void 1397 cdv_intel_dp_set_vswing_premph(struct gma_encoder *encoder, uint8_t signal_level) 1398 { 1399 struct drm_device *dev = encoder->base.dev; 1400 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 1401 struct ddi_regoff *ddi_reg; 1402 int vswing, premph, index; 1403 1404 if (intel_dp->output_reg == DP_B) 1405 ddi_reg = &ddi_DP_train_table[0]; 1406 else 1407 ddi_reg = &ddi_DP_train_table[1]; 1408 1409 vswing = (signal_level & DP_TRAIN_VOLTAGE_SWING_MASK); 1410 premph = ((signal_level & DP_TRAIN_PRE_EMPHASIS_MASK)) >> 1411 DP_TRAIN_PRE_EMPHASIS_SHIFT; 1412 1413 if (vswing + premph > 3) 1414 return; 1415 #ifdef CDV_FAST_LINK_TRAIN 1416 return; 1417 #endif 1418 DRM_DEBUG_KMS("Test2\n"); 1419 //return ; 1420 cdv_sb_reset(dev); 1421 /* ;Swing voltage programming 1422 ;gfx_dpio_set_reg(0xc058, 0x0505313A) */ 1423 cdv_sb_write(dev, ddi_reg->VSwing5, 0x0505313A); 1424 1425 /* ;gfx_dpio_set_reg(0x8154, 0x43406055) */ 1426 cdv_sb_write(dev, ddi_reg->VSwing1, 0x43406055); 1427 1428 /* ;gfx_dpio_set_reg(0x8148, 0x55338954) 1429 * The VSwing_PreEmph table is also considered based on the vswing/premp 1430 */ 1431 index = (vswing + premph) * 2; 1432 if (premph == 1 && vswing == 1) { 1433 cdv_sb_write(dev, ddi_reg->VSwing2, 0x055738954); 1434 } else 1435 cdv_sb_write(dev, ddi_reg->VSwing2, dp_vswing_premph_table[index]); 1436 1437 /* ;gfx_dpio_set_reg(0x814c, 0x40802040) */ 1438 if ((vswing + premph) == DP_TRAIN_VOLTAGE_SWING_LEVEL_3) 1439 cdv_sb_write(dev, ddi_reg->VSwing3, 0x70802040); 1440 else 1441 cdv_sb_write(dev, ddi_reg->VSwing3, 0x40802040); 1442 1443 /* ;gfx_dpio_set_reg(0x8150, 0x2b405555) */ 1444 /* cdv_sb_write(dev, ddi_reg->VSwing4, 0x2b405555); */ 1445 1446 /* ;gfx_dpio_set_reg(0x8154, 0xc3406055) */ 1447 cdv_sb_write(dev, ddi_reg->VSwing1, 0xc3406055); 1448 1449 /* ;Pre emphasis programming 1450 * ;gfx_dpio_set_reg(0xc02c, 0x1f030040) 1451 */ 1452 cdv_sb_write(dev, ddi_reg->PreEmph1, 0x1f030040); 1453 1454 /* ;gfx_dpio_set_reg(0x8124, 0x00004000) */ 1455 index = 2 * premph + 1; 1456 cdv_sb_write(dev, ddi_reg->PreEmph2, dp_vswing_premph_table[index]); 1457 return; 1458 } 1459 1460 1461 /* Enable corresponding port and start training pattern 1 */ 1462 static void 1463 cdv_intel_dp_start_link_train(struct gma_encoder *encoder) 1464 { 1465 struct drm_device *dev = encoder->base.dev; 1466 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 1467 int i; 1468 uint8_t voltage; 1469 bool clock_recovery = false; 1470 int tries; 1471 u32 reg; 1472 uint32_t DP = intel_dp->DP; 1473 1474 DP |= DP_PORT_EN; 1475 DP &= ~DP_LINK_TRAIN_MASK; 1476 1477 reg = DP; 1478 reg |= DP_LINK_TRAIN_PAT_1; 1479 /* Enable output, wait for it to become active */ 1480 REG_WRITE(intel_dp->output_reg, reg); 1481 REG_READ(intel_dp->output_reg); 1482 gma_wait_for_vblank(dev); 1483 1484 DRM_DEBUG_KMS("Link config\n"); 1485 /* Write the link configuration data */ 1486 cdv_intel_dp_aux_native_write(encoder, DP_LINK_BW_SET, 1487 intel_dp->link_configuration, 1488 2); 1489 1490 memset(intel_dp->train_set, 0, 4); 1491 voltage = 0; 1492 tries = 0; 1493 clock_recovery = false; 1494 1495 DRM_DEBUG_KMS("Start train\n"); 1496 reg = DP | DP_LINK_TRAIN_PAT_1; 1497 1498 for (;;) { 1499 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */ 1500 DRM_DEBUG_KMS("DP Link Train Set %x, Link_config %x, %x\n", 1501 intel_dp->train_set[0], 1502 intel_dp->link_configuration[0], 1503 intel_dp->link_configuration[1]); 1504 1505 if (!cdv_intel_dp_set_link_train(encoder, reg, DP_TRAINING_PATTERN_1)) { 1506 DRM_DEBUG_KMS("Failure in aux-transfer setting pattern 1\n"); 1507 } 1508 cdv_intel_dp_set_vswing_premph(encoder, intel_dp->train_set[0]); 1509 /* Set training pattern 1 */ 1510 1511 cdv_intel_dplink_set_level(encoder, DP_TRAINING_PATTERN_1); 1512 1513 udelay(200); 1514 if (!cdv_intel_dp_get_link_status(encoder)) 1515 break; 1516 1517 DRM_DEBUG_KMS("DP Link status %x, %x, %x, %x, %x, %x\n", 1518 intel_dp->link_status[0], intel_dp->link_status[1], intel_dp->link_status[2], 1519 intel_dp->link_status[3], intel_dp->link_status[4], intel_dp->link_status[5]); 1520 1521 if (cdv_intel_clock_recovery_ok(intel_dp->link_status, intel_dp->lane_count)) { 1522 DRM_DEBUG_KMS("PT1 train is done\n"); 1523 clock_recovery = true; 1524 break; 1525 } 1526 1527 /* Check to see if we've tried the max voltage */ 1528 for (i = 0; i < intel_dp->lane_count; i++) 1529 if ((intel_dp->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0) 1530 break; 1531 if (i == intel_dp->lane_count) 1532 break; 1533 1534 /* Check to see if we've tried the same voltage 5 times */ 1535 if ((intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) { 1536 ++tries; 1537 if (tries == 5) 1538 break; 1539 } else 1540 tries = 0; 1541 voltage = intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK; 1542 1543 /* Compute new intel_dp->train_set as requested by target */ 1544 cdv_intel_get_adjust_train(encoder); 1545 1546 } 1547 1548 if (!clock_recovery) { 1549 DRM_DEBUG_KMS("failure in DP patter 1 training, train set %x\n", intel_dp->train_set[0]); 1550 } 1551 1552 intel_dp->DP = DP; 1553 } 1554 1555 static void 1556 cdv_intel_dp_complete_link_train(struct gma_encoder *encoder) 1557 { 1558 struct drm_device *dev = encoder->base.dev; 1559 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 1560 int tries, cr_tries; 1561 u32 reg; 1562 uint32_t DP = intel_dp->DP; 1563 1564 /* channel equalization */ 1565 tries = 0; 1566 cr_tries = 0; 1567 1568 DRM_DEBUG_KMS("\n"); 1569 reg = DP | DP_LINK_TRAIN_PAT_2; 1570 1571 for (;;) { 1572 1573 DRM_DEBUG_KMS("DP Link Train Set %x, Link_config %x, %x\n", 1574 intel_dp->train_set[0], 1575 intel_dp->link_configuration[0], 1576 intel_dp->link_configuration[1]); 1577 /* channel eq pattern */ 1578 1579 if (!cdv_intel_dp_set_link_train(encoder, reg, 1580 DP_TRAINING_PATTERN_2)) { 1581 DRM_DEBUG_KMS("Failure in aux-transfer setting pattern 2\n"); 1582 } 1583 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */ 1584 1585 if (cr_tries > 5) { 1586 DRM_ERROR("failed to train DP, aborting\n"); 1587 cdv_intel_dp_link_down(encoder); 1588 break; 1589 } 1590 1591 cdv_intel_dp_set_vswing_premph(encoder, intel_dp->train_set[0]); 1592 1593 cdv_intel_dplink_set_level(encoder, DP_TRAINING_PATTERN_2); 1594 1595 udelay(1000); 1596 if (!cdv_intel_dp_get_link_status(encoder)) 1597 break; 1598 1599 DRM_DEBUG_KMS("DP Link status %x, %x, %x, %x, %x, %x\n", 1600 intel_dp->link_status[0], intel_dp->link_status[1], intel_dp->link_status[2], 1601 intel_dp->link_status[3], intel_dp->link_status[4], intel_dp->link_status[5]); 1602 1603 /* Make sure clock is still ok */ 1604 if (!cdv_intel_clock_recovery_ok(intel_dp->link_status, intel_dp->lane_count)) { 1605 cdv_intel_dp_start_link_train(encoder); 1606 cr_tries++; 1607 continue; 1608 } 1609 1610 if (cdv_intel_channel_eq_ok(encoder)) { 1611 DRM_DEBUG_KMS("PT2 train is done\n"); 1612 break; 1613 } 1614 1615 /* Try 5 times, then try clock recovery if that fails */ 1616 if (tries > 5) { 1617 cdv_intel_dp_link_down(encoder); 1618 cdv_intel_dp_start_link_train(encoder); 1619 tries = 0; 1620 cr_tries++; 1621 continue; 1622 } 1623 1624 /* Compute new intel_dp->train_set as requested by target */ 1625 cdv_intel_get_adjust_train(encoder); 1626 ++tries; 1627 1628 } 1629 1630 reg = DP | DP_LINK_TRAIN_OFF; 1631 1632 REG_WRITE(intel_dp->output_reg, reg); 1633 REG_READ(intel_dp->output_reg); 1634 cdv_intel_dp_aux_native_write_1(encoder, 1635 DP_TRAINING_PATTERN_SET, DP_TRAINING_PATTERN_DISABLE); 1636 } 1637 1638 static void 1639 cdv_intel_dp_link_down(struct gma_encoder *encoder) 1640 { 1641 struct drm_device *dev = encoder->base.dev; 1642 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 1643 uint32_t DP = intel_dp->DP; 1644 1645 if ((REG_READ(intel_dp->output_reg) & DP_PORT_EN) == 0) 1646 return; 1647 1648 DRM_DEBUG_KMS("\n"); 1649 1650 1651 { 1652 DP &= ~DP_LINK_TRAIN_MASK; 1653 REG_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE); 1654 } 1655 REG_READ(intel_dp->output_reg); 1656 1657 msleep(17); 1658 1659 REG_WRITE(intel_dp->output_reg, DP & ~DP_PORT_EN); 1660 REG_READ(intel_dp->output_reg); 1661 } 1662 1663 static enum drm_connector_status cdv_dp_detect(struct gma_encoder *encoder) 1664 { 1665 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 1666 enum drm_connector_status status; 1667 1668 status = connector_status_disconnected; 1669 if (cdv_intel_dp_aux_native_read(encoder, 0x000, intel_dp->dpcd, 1670 sizeof (intel_dp->dpcd)) == sizeof (intel_dp->dpcd)) 1671 { 1672 if (intel_dp->dpcd[DP_DPCD_REV] != 0) 1673 status = connector_status_connected; 1674 } 1675 if (status == connector_status_connected) 1676 DRM_DEBUG_KMS("DPCD: Rev=%x LN_Rate=%x LN_CNT=%x LN_DOWNSP=%x\n", 1677 intel_dp->dpcd[0], intel_dp->dpcd[1], 1678 intel_dp->dpcd[2], intel_dp->dpcd[3]); 1679 return status; 1680 } 1681 1682 /* 1683 * Uses CRT_HOTPLUG_EN and CRT_HOTPLUG_STAT to detect DP connection. 1684 * 1685 * \return true if DP port is connected. 1686 * \return false if DP port is disconnected. 1687 */ 1688 static enum drm_connector_status 1689 cdv_intel_dp_detect(struct drm_connector *connector, bool force) 1690 { 1691 struct gma_encoder *encoder = gma_attached_encoder(connector); 1692 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 1693 enum drm_connector_status status; 1694 struct edid *edid = NULL; 1695 int edp = is_edp(encoder); 1696 1697 intel_dp->has_audio = false; 1698 1699 if (edp) 1700 cdv_intel_edp_panel_vdd_on(encoder); 1701 status = cdv_dp_detect(encoder); 1702 if (status != connector_status_connected) { 1703 if (edp) 1704 cdv_intel_edp_panel_vdd_off(encoder); 1705 return status; 1706 } 1707 1708 if (intel_dp->force_audio) { 1709 intel_dp->has_audio = intel_dp->force_audio > 0; 1710 } else { 1711 edid = drm_get_edid(connector, &intel_dp->adapter); 1712 if (edid) { 1713 intel_dp->has_audio = drm_detect_monitor_audio(edid); 1714 kfree(edid); 1715 } 1716 } 1717 if (edp) 1718 cdv_intel_edp_panel_vdd_off(encoder); 1719 1720 return connector_status_connected; 1721 } 1722 1723 static int cdv_intel_dp_get_modes(struct drm_connector *connector) 1724 { 1725 struct gma_encoder *intel_encoder = gma_attached_encoder(connector); 1726 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv; 1727 struct edid *edid = NULL; 1728 int ret = 0; 1729 int edp = is_edp(intel_encoder); 1730 1731 1732 edid = drm_get_edid(connector, &intel_dp->adapter); 1733 if (edid) { 1734 drm_connector_update_edid_property(connector, edid); 1735 ret = drm_add_edid_modes(connector, edid); 1736 kfree(edid); 1737 } 1738 1739 if (is_edp(intel_encoder)) { 1740 struct drm_device *dev = connector->dev; 1741 struct drm_psb_private *dev_priv = to_drm_psb_private(dev); 1742 1743 cdv_intel_edp_panel_vdd_off(intel_encoder); 1744 if (ret) { 1745 if (edp && !intel_dp->panel_fixed_mode) { 1746 struct drm_display_mode *newmode; 1747 list_for_each_entry(newmode, &connector->probed_modes, 1748 head) { 1749 if (newmode->type & DRM_MODE_TYPE_PREFERRED) { 1750 intel_dp->panel_fixed_mode = 1751 drm_mode_duplicate(dev, newmode); 1752 break; 1753 } 1754 } 1755 } 1756 1757 return ret; 1758 } 1759 if (!intel_dp->panel_fixed_mode && dev_priv->lfp_lvds_vbt_mode) { 1760 intel_dp->panel_fixed_mode = 1761 drm_mode_duplicate(dev, dev_priv->lfp_lvds_vbt_mode); 1762 if (intel_dp->panel_fixed_mode) { 1763 intel_dp->panel_fixed_mode->type |= 1764 DRM_MODE_TYPE_PREFERRED; 1765 } 1766 } 1767 if (intel_dp->panel_fixed_mode != NULL) { 1768 struct drm_display_mode *mode; 1769 mode = drm_mode_duplicate(dev, intel_dp->panel_fixed_mode); 1770 drm_mode_probed_add(connector, mode); 1771 return 1; 1772 } 1773 } 1774 1775 return ret; 1776 } 1777 1778 static bool 1779 cdv_intel_dp_detect_audio(struct drm_connector *connector) 1780 { 1781 struct gma_encoder *encoder = gma_attached_encoder(connector); 1782 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 1783 struct edid *edid; 1784 bool has_audio = false; 1785 int edp = is_edp(encoder); 1786 1787 if (edp) 1788 cdv_intel_edp_panel_vdd_on(encoder); 1789 1790 edid = drm_get_edid(connector, &intel_dp->adapter); 1791 if (edid) { 1792 has_audio = drm_detect_monitor_audio(edid); 1793 kfree(edid); 1794 } 1795 if (edp) 1796 cdv_intel_edp_panel_vdd_off(encoder); 1797 1798 return has_audio; 1799 } 1800 1801 static int 1802 cdv_intel_dp_set_property(struct drm_connector *connector, 1803 struct drm_property *property, 1804 uint64_t val) 1805 { 1806 struct drm_psb_private *dev_priv = to_drm_psb_private(connector->dev); 1807 struct gma_encoder *encoder = gma_attached_encoder(connector); 1808 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 1809 int ret; 1810 1811 ret = drm_object_property_set_value(&connector->base, property, val); 1812 if (ret) 1813 return ret; 1814 1815 if (property == dev_priv->force_audio_property) { 1816 int i = val; 1817 bool has_audio; 1818 1819 if (i == intel_dp->force_audio) 1820 return 0; 1821 1822 intel_dp->force_audio = i; 1823 1824 if (i == 0) 1825 has_audio = cdv_intel_dp_detect_audio(connector); 1826 else 1827 has_audio = i > 0; 1828 1829 if (has_audio == intel_dp->has_audio) 1830 return 0; 1831 1832 intel_dp->has_audio = has_audio; 1833 goto done; 1834 } 1835 1836 if (property == dev_priv->broadcast_rgb_property) { 1837 if (val == !!intel_dp->color_range) 1838 return 0; 1839 1840 intel_dp->color_range = val ? DP_COLOR_RANGE_16_235 : 0; 1841 goto done; 1842 } 1843 1844 return -EINVAL; 1845 1846 done: 1847 if (encoder->base.crtc) { 1848 struct drm_crtc *crtc = encoder->base.crtc; 1849 drm_crtc_helper_set_mode(crtc, &crtc->mode, 1850 crtc->x, crtc->y, 1851 crtc->primary->fb); 1852 } 1853 1854 return 0; 1855 } 1856 1857 static void 1858 cdv_intel_dp_destroy(struct drm_connector *connector) 1859 { 1860 struct gma_connector *gma_connector = to_gma_connector(connector); 1861 struct gma_encoder *gma_encoder = gma_attached_encoder(connector); 1862 struct cdv_intel_dp *intel_dp = gma_encoder->dev_priv; 1863 1864 if (is_edp(gma_encoder)) { 1865 /* cdv_intel_panel_destroy_backlight(connector->dev); */ 1866 kfree(intel_dp->panel_fixed_mode); 1867 intel_dp->panel_fixed_mode = NULL; 1868 } 1869 i2c_del_adapter(&intel_dp->adapter); 1870 drm_connector_cleanup(connector); 1871 kfree(gma_connector); 1872 } 1873 1874 static const struct drm_encoder_helper_funcs cdv_intel_dp_helper_funcs = { 1875 .dpms = cdv_intel_dp_dpms, 1876 .mode_fixup = cdv_intel_dp_mode_fixup, 1877 .prepare = cdv_intel_dp_prepare, 1878 .mode_set = cdv_intel_dp_mode_set, 1879 .commit = cdv_intel_dp_commit, 1880 }; 1881 1882 static const struct drm_connector_funcs cdv_intel_dp_connector_funcs = { 1883 .dpms = drm_helper_connector_dpms, 1884 .detect = cdv_intel_dp_detect, 1885 .fill_modes = drm_helper_probe_single_connector_modes, 1886 .set_property = cdv_intel_dp_set_property, 1887 .destroy = cdv_intel_dp_destroy, 1888 }; 1889 1890 static const struct drm_connector_helper_funcs cdv_intel_dp_connector_helper_funcs = { 1891 .get_modes = cdv_intel_dp_get_modes, 1892 .mode_valid = cdv_intel_dp_mode_valid, 1893 .best_encoder = gma_best_encoder, 1894 }; 1895 1896 static void cdv_intel_dp_add_properties(struct drm_connector *connector) 1897 { 1898 cdv_intel_attach_force_audio_property(connector); 1899 cdv_intel_attach_broadcast_rgb_property(connector); 1900 } 1901 1902 /* check the VBT to see whether the eDP is on DP-D port */ 1903 static bool cdv_intel_dpc_is_edp(struct drm_device *dev) 1904 { 1905 struct drm_psb_private *dev_priv = to_drm_psb_private(dev); 1906 struct child_device_config *p_child; 1907 int i; 1908 1909 if (!dev_priv->child_dev_num) 1910 return false; 1911 1912 for (i = 0; i < dev_priv->child_dev_num; i++) { 1913 p_child = dev_priv->child_dev + i; 1914 1915 if (p_child->dvo_port == PORT_IDPC && 1916 p_child->device_type == DEVICE_TYPE_eDP) 1917 return true; 1918 } 1919 return false; 1920 } 1921 1922 /* Cedarview display clock gating 1923 1924 We need this disable dot get correct behaviour while enabling 1925 DP/eDP. TODO - investigate if we can turn it back to normality 1926 after enabling */ 1927 static void cdv_disable_intel_clock_gating(struct drm_device *dev) 1928 { 1929 u32 reg_value; 1930 reg_value = REG_READ(DSPCLK_GATE_D); 1931 1932 reg_value |= (DPUNIT_PIPEB_GATE_DISABLE | 1933 DPUNIT_PIPEA_GATE_DISABLE | 1934 DPCUNIT_CLOCK_GATE_DISABLE | 1935 DPLSUNIT_CLOCK_GATE_DISABLE | 1936 DPOUNIT_CLOCK_GATE_DISABLE | 1937 DPIOUNIT_CLOCK_GATE_DISABLE); 1938 1939 REG_WRITE(DSPCLK_GATE_D, reg_value); 1940 1941 udelay(500); 1942 } 1943 1944 void 1945 cdv_intel_dp_init(struct drm_device *dev, struct psb_intel_mode_device *mode_dev, int output_reg) 1946 { 1947 struct gma_encoder *gma_encoder; 1948 struct gma_connector *gma_connector; 1949 struct drm_connector *connector; 1950 struct drm_encoder *encoder; 1951 struct cdv_intel_dp *intel_dp; 1952 const char *name = NULL; 1953 int type = DRM_MODE_CONNECTOR_DisplayPort; 1954 1955 gma_encoder = kzalloc(sizeof(struct gma_encoder), GFP_KERNEL); 1956 if (!gma_encoder) 1957 return; 1958 gma_connector = kzalloc(sizeof(struct gma_connector), GFP_KERNEL); 1959 if (!gma_connector) 1960 goto err_connector; 1961 intel_dp = kzalloc(sizeof(struct cdv_intel_dp), GFP_KERNEL); 1962 if (!intel_dp) 1963 goto err_priv; 1964 1965 if ((output_reg == DP_C) && cdv_intel_dpc_is_edp(dev)) 1966 type = DRM_MODE_CONNECTOR_eDP; 1967 1968 connector = &gma_connector->base; 1969 encoder = &gma_encoder->base; 1970 1971 drm_connector_init(dev, connector, &cdv_intel_dp_connector_funcs, type); 1972 drm_simple_encoder_init(dev, encoder, DRM_MODE_ENCODER_TMDS); 1973 1974 gma_connector_attach_encoder(gma_connector, gma_encoder); 1975 1976 if (type == DRM_MODE_CONNECTOR_DisplayPort) 1977 gma_encoder->type = INTEL_OUTPUT_DISPLAYPORT; 1978 else 1979 gma_encoder->type = INTEL_OUTPUT_EDP; 1980 1981 1982 gma_encoder->dev_priv=intel_dp; 1983 intel_dp->encoder = gma_encoder; 1984 intel_dp->output_reg = output_reg; 1985 1986 drm_encoder_helper_add(encoder, &cdv_intel_dp_helper_funcs); 1987 drm_connector_helper_add(connector, &cdv_intel_dp_connector_helper_funcs); 1988 1989 connector->polled = DRM_CONNECTOR_POLL_HPD; 1990 connector->interlace_allowed = false; 1991 connector->doublescan_allowed = false; 1992 1993 /* Set up the DDC bus. */ 1994 switch (output_reg) { 1995 case DP_B: 1996 name = "DPDDC-B"; 1997 gma_encoder->ddi_select = (DP_MASK | DDI0_SELECT); 1998 break; 1999 case DP_C: 2000 name = "DPDDC-C"; 2001 gma_encoder->ddi_select = (DP_MASK | DDI1_SELECT); 2002 break; 2003 } 2004 2005 cdv_disable_intel_clock_gating(dev); 2006 2007 cdv_intel_dp_i2c_init(gma_connector, gma_encoder, name); 2008 /* FIXME:fail check */ 2009 cdv_intel_dp_add_properties(connector); 2010 2011 if (is_edp(gma_encoder)) { 2012 int ret; 2013 struct edp_power_seq cur; 2014 u32 pp_on, pp_off, pp_div; 2015 u32 pwm_ctrl; 2016 2017 pp_on = REG_READ(PP_CONTROL); 2018 pp_on &= ~PANEL_UNLOCK_MASK; 2019 pp_on |= PANEL_UNLOCK_REGS; 2020 2021 REG_WRITE(PP_CONTROL, pp_on); 2022 2023 pwm_ctrl = REG_READ(BLC_PWM_CTL2); 2024 pwm_ctrl |= PWM_PIPE_B; 2025 REG_WRITE(BLC_PWM_CTL2, pwm_ctrl); 2026 2027 pp_on = REG_READ(PP_ON_DELAYS); 2028 pp_off = REG_READ(PP_OFF_DELAYS); 2029 pp_div = REG_READ(PP_DIVISOR); 2030 2031 /* Pull timing values out of registers */ 2032 cur.t1_t3 = (pp_on & PANEL_POWER_UP_DELAY_MASK) >> 2033 PANEL_POWER_UP_DELAY_SHIFT; 2034 2035 cur.t8 = (pp_on & PANEL_LIGHT_ON_DELAY_MASK) >> 2036 PANEL_LIGHT_ON_DELAY_SHIFT; 2037 2038 cur.t9 = (pp_off & PANEL_LIGHT_OFF_DELAY_MASK) >> 2039 PANEL_LIGHT_OFF_DELAY_SHIFT; 2040 2041 cur.t10 = (pp_off & PANEL_POWER_DOWN_DELAY_MASK) >> 2042 PANEL_POWER_DOWN_DELAY_SHIFT; 2043 2044 cur.t11_t12 = ((pp_div & PANEL_POWER_CYCLE_DELAY_MASK) >> 2045 PANEL_POWER_CYCLE_DELAY_SHIFT); 2046 2047 DRM_DEBUG_KMS("cur t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n", 2048 cur.t1_t3, cur.t8, cur.t9, cur.t10, cur.t11_t12); 2049 2050 2051 intel_dp->panel_power_up_delay = cur.t1_t3 / 10; 2052 intel_dp->backlight_on_delay = cur.t8 / 10; 2053 intel_dp->backlight_off_delay = cur.t9 / 10; 2054 intel_dp->panel_power_down_delay = cur.t10 / 10; 2055 intel_dp->panel_power_cycle_delay = (cur.t11_t12 - 1) * 100; 2056 2057 DRM_DEBUG_KMS("panel power up delay %d, power down delay %d, power cycle delay %d\n", 2058 intel_dp->panel_power_up_delay, intel_dp->panel_power_down_delay, 2059 intel_dp->panel_power_cycle_delay); 2060 2061 DRM_DEBUG_KMS("backlight on delay %d, off delay %d\n", 2062 intel_dp->backlight_on_delay, intel_dp->backlight_off_delay); 2063 2064 2065 cdv_intel_edp_panel_vdd_on(gma_encoder); 2066 ret = cdv_intel_dp_aux_native_read(gma_encoder, DP_DPCD_REV, 2067 intel_dp->dpcd, 2068 sizeof(intel_dp->dpcd)); 2069 cdv_intel_edp_panel_vdd_off(gma_encoder); 2070 if (ret <= 0) { 2071 /* if this fails, presume the device is a ghost */ 2072 DRM_INFO("failed to retrieve link info, disabling eDP\n"); 2073 drm_encoder_cleanup(encoder); 2074 cdv_intel_dp_destroy(connector); 2075 goto err_connector; 2076 } else { 2077 DRM_DEBUG_KMS("DPCD: Rev=%x LN_Rate=%x LN_CNT=%x LN_DOWNSP=%x\n", 2078 intel_dp->dpcd[0], intel_dp->dpcd[1], 2079 intel_dp->dpcd[2], intel_dp->dpcd[3]); 2080 2081 } 2082 /* The CDV reference driver moves pnale backlight setup into the displays that 2083 have a backlight: this is a good idea and one we should probably adopt, however 2084 we need to migrate all the drivers before we can do that */ 2085 /*cdv_intel_panel_setup_backlight(dev); */ 2086 } 2087 return; 2088 2089 err_priv: 2090 kfree(gma_connector); 2091 err_connector: 2092 kfree(gma_encoder); 2093 } 2094