1 /* 2 * Copyright © 2012 Intel Corporation 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice (including the next 12 * paragraph) shall be included in all copies or substantial portions of the 13 * Software. 14 * 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 21 * IN THE SOFTWARE. 22 * 23 * Authors: 24 * Keith Packard <keithp@keithp.com> 25 * 26 */ 27 28 #include <linux/i2c.h> 29 #include <linux/module.h> 30 #include <linux/slab.h> 31 32 #include <drm/drm_crtc.h> 33 #include <drm/drm_crtc_helper.h> 34 #include <drm/drm_dp_helper.h> 35 36 #include "gma_display.h" 37 #include "psb_drv.h" 38 #include "psb_intel_drv.h" 39 #include "psb_intel_reg.h" 40 41 /** 42 * struct i2c_algo_dp_aux_data - driver interface structure for i2c over dp 43 * aux algorithm 44 * @running: set by the algo indicating whether an i2c is ongoing or whether 45 * the i2c bus is quiescent 46 * @address: i2c target address for the currently ongoing transfer 47 * @aux_ch: driver callback to transfer a single byte of the i2c payload 48 */ 49 struct i2c_algo_dp_aux_data { 50 bool running; 51 u16 address; 52 int (*aux_ch) (struct i2c_adapter *adapter, 53 int mode, uint8_t write_byte, 54 uint8_t *read_byte); 55 }; 56 57 /* Run a single AUX_CH I2C transaction, writing/reading data as necessary */ 58 static int 59 i2c_algo_dp_aux_transaction(struct i2c_adapter *adapter, int mode, 60 uint8_t write_byte, uint8_t *read_byte) 61 { 62 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data; 63 int ret; 64 65 ret = (*algo_data->aux_ch)(adapter, mode, 66 write_byte, read_byte); 67 return ret; 68 } 69 70 /* 71 * I2C over AUX CH 72 */ 73 74 /* 75 * Send the address. If the I2C link is running, this 'restarts' 76 * the connection with the new address, this is used for doing 77 * a write followed by a read (as needed for DDC) 78 */ 79 static int 80 i2c_algo_dp_aux_address(struct i2c_adapter *adapter, u16 address, bool reading) 81 { 82 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data; 83 int mode = MODE_I2C_START; 84 int ret; 85 86 if (reading) 87 mode |= MODE_I2C_READ; 88 else 89 mode |= MODE_I2C_WRITE; 90 algo_data->address = address; 91 algo_data->running = true; 92 ret = i2c_algo_dp_aux_transaction(adapter, mode, 0, NULL); 93 return ret; 94 } 95 96 /* 97 * Stop the I2C transaction. This closes out the link, sending 98 * a bare address packet with the MOT bit turned off 99 */ 100 static void 101 i2c_algo_dp_aux_stop(struct i2c_adapter *adapter, bool reading) 102 { 103 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data; 104 int mode = MODE_I2C_STOP; 105 106 if (reading) 107 mode |= MODE_I2C_READ; 108 else 109 mode |= MODE_I2C_WRITE; 110 if (algo_data->running) { 111 (void) i2c_algo_dp_aux_transaction(adapter, mode, 0, NULL); 112 algo_data->running = false; 113 } 114 } 115 116 /* 117 * Write a single byte to the current I2C address, the 118 * the I2C link must be running or this returns -EIO 119 */ 120 static int 121 i2c_algo_dp_aux_put_byte(struct i2c_adapter *adapter, u8 byte) 122 { 123 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data; 124 int ret; 125 126 if (!algo_data->running) 127 return -EIO; 128 129 ret = i2c_algo_dp_aux_transaction(adapter, MODE_I2C_WRITE, byte, NULL); 130 return ret; 131 } 132 133 /* 134 * Read a single byte from the current I2C address, the 135 * I2C link must be running or this returns -EIO 136 */ 137 static int 138 i2c_algo_dp_aux_get_byte(struct i2c_adapter *adapter, u8 *byte_ret) 139 { 140 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data; 141 int ret; 142 143 if (!algo_data->running) 144 return -EIO; 145 146 ret = i2c_algo_dp_aux_transaction(adapter, MODE_I2C_READ, 0, byte_ret); 147 return ret; 148 } 149 150 static int 151 i2c_algo_dp_aux_xfer(struct i2c_adapter *adapter, 152 struct i2c_msg *msgs, 153 int num) 154 { 155 int ret = 0; 156 bool reading = false; 157 int m; 158 int b; 159 160 for (m = 0; m < num; m++) { 161 u16 len = msgs[m].len; 162 u8 *buf = msgs[m].buf; 163 reading = (msgs[m].flags & I2C_M_RD) != 0; 164 ret = i2c_algo_dp_aux_address(adapter, msgs[m].addr, reading); 165 if (ret < 0) 166 break; 167 if (reading) { 168 for (b = 0; b < len; b++) { 169 ret = i2c_algo_dp_aux_get_byte(adapter, &buf[b]); 170 if (ret < 0) 171 break; 172 } 173 } else { 174 for (b = 0; b < len; b++) { 175 ret = i2c_algo_dp_aux_put_byte(adapter, buf[b]); 176 if (ret < 0) 177 break; 178 } 179 } 180 if (ret < 0) 181 break; 182 } 183 if (ret >= 0) 184 ret = num; 185 i2c_algo_dp_aux_stop(adapter, reading); 186 DRM_DEBUG_KMS("dp_aux_xfer return %d\n", ret); 187 return ret; 188 } 189 190 static u32 191 i2c_algo_dp_aux_functionality(struct i2c_adapter *adapter) 192 { 193 return I2C_FUNC_I2C | I2C_FUNC_SMBUS_EMUL | 194 I2C_FUNC_SMBUS_READ_BLOCK_DATA | 195 I2C_FUNC_SMBUS_BLOCK_PROC_CALL | 196 I2C_FUNC_10BIT_ADDR; 197 } 198 199 static const struct i2c_algorithm i2c_dp_aux_algo = { 200 .master_xfer = i2c_algo_dp_aux_xfer, 201 .functionality = i2c_algo_dp_aux_functionality, 202 }; 203 204 static void 205 i2c_dp_aux_reset_bus(struct i2c_adapter *adapter) 206 { 207 (void) i2c_algo_dp_aux_address(adapter, 0, false); 208 (void) i2c_algo_dp_aux_stop(adapter, false); 209 } 210 211 static int 212 i2c_dp_aux_prepare_bus(struct i2c_adapter *adapter) 213 { 214 adapter->algo = &i2c_dp_aux_algo; 215 adapter->retries = 3; 216 i2c_dp_aux_reset_bus(adapter); 217 return 0; 218 } 219 220 /* 221 * FIXME: This is the old dp aux helper, gma500 is the last driver that needs to 222 * be ported over to the new helper code in drm_dp_helper.c like i915 or radeon. 223 */ 224 static int 225 i2c_dp_aux_add_bus(struct i2c_adapter *adapter) 226 { 227 int error; 228 229 error = i2c_dp_aux_prepare_bus(adapter); 230 if (error) 231 return error; 232 error = i2c_add_adapter(adapter); 233 return error; 234 } 235 236 #define _wait_for(COND, MS, W) ({ \ 237 unsigned long timeout__ = jiffies + msecs_to_jiffies(MS); \ 238 int ret__ = 0; \ 239 while (! (COND)) { \ 240 if (time_after(jiffies, timeout__)) { \ 241 ret__ = -ETIMEDOUT; \ 242 break; \ 243 } \ 244 if (W && !in_dbg_master()) msleep(W); \ 245 } \ 246 ret__; \ 247 }) 248 249 #define wait_for(COND, MS) _wait_for(COND, MS, 1) 250 251 #define DP_LINK_CHECK_TIMEOUT (10 * 1000) 252 253 #define DP_LINK_CONFIGURATION_SIZE 9 254 255 #define CDV_FAST_LINK_TRAIN 1 256 257 struct cdv_intel_dp { 258 uint32_t output_reg; 259 uint32_t DP; 260 uint8_t link_configuration[DP_LINK_CONFIGURATION_SIZE]; 261 bool has_audio; 262 int force_audio; 263 uint32_t color_range; 264 uint8_t link_bw; 265 uint8_t lane_count; 266 uint8_t dpcd[4]; 267 struct gma_encoder *encoder; 268 struct i2c_adapter adapter; 269 struct i2c_algo_dp_aux_data algo; 270 uint8_t train_set[4]; 271 uint8_t link_status[DP_LINK_STATUS_SIZE]; 272 int panel_power_up_delay; 273 int panel_power_down_delay; 274 int panel_power_cycle_delay; 275 int backlight_on_delay; 276 int backlight_off_delay; 277 struct drm_display_mode *panel_fixed_mode; /* for eDP */ 278 bool panel_on; 279 }; 280 281 struct ddi_regoff { 282 uint32_t PreEmph1; 283 uint32_t PreEmph2; 284 uint32_t VSwing1; 285 uint32_t VSwing2; 286 uint32_t VSwing3; 287 uint32_t VSwing4; 288 uint32_t VSwing5; 289 }; 290 291 static struct ddi_regoff ddi_DP_train_table[] = { 292 {.PreEmph1 = 0x812c, .PreEmph2 = 0x8124, .VSwing1 = 0x8154, 293 .VSwing2 = 0x8148, .VSwing3 = 0x814C, .VSwing4 = 0x8150, 294 .VSwing5 = 0x8158,}, 295 {.PreEmph1 = 0x822c, .PreEmph2 = 0x8224, .VSwing1 = 0x8254, 296 .VSwing2 = 0x8248, .VSwing3 = 0x824C, .VSwing4 = 0x8250, 297 .VSwing5 = 0x8258,}, 298 }; 299 300 static uint32_t dp_vswing_premph_table[] = { 301 0x55338954, 0x4000, 302 0x554d8954, 0x2000, 303 0x55668954, 0, 304 0x559ac0d4, 0x6000, 305 }; 306 /** 307 * is_edp - is the given port attached to an eDP panel (either CPU or PCH) 308 * @intel_dp: DP struct 309 * 310 * If a CPU or PCH DP output is attached to an eDP panel, this function 311 * will return true, and false otherwise. 312 */ 313 static bool is_edp(struct gma_encoder *encoder) 314 { 315 return encoder->type == INTEL_OUTPUT_EDP; 316 } 317 318 319 static void cdv_intel_dp_start_link_train(struct gma_encoder *encoder); 320 static void cdv_intel_dp_complete_link_train(struct gma_encoder *encoder); 321 static void cdv_intel_dp_link_down(struct gma_encoder *encoder); 322 323 static int 324 cdv_intel_dp_max_lane_count(struct gma_encoder *encoder) 325 { 326 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 327 int max_lane_count = 4; 328 329 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11) { 330 max_lane_count = intel_dp->dpcd[DP_MAX_LANE_COUNT] & 0x1f; 331 switch (max_lane_count) { 332 case 1: case 2: case 4: 333 break; 334 default: 335 max_lane_count = 4; 336 } 337 } 338 return max_lane_count; 339 } 340 341 static int 342 cdv_intel_dp_max_link_bw(struct gma_encoder *encoder) 343 { 344 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 345 int max_link_bw = intel_dp->dpcd[DP_MAX_LINK_RATE]; 346 347 switch (max_link_bw) { 348 case DP_LINK_BW_1_62: 349 case DP_LINK_BW_2_7: 350 break; 351 default: 352 max_link_bw = DP_LINK_BW_1_62; 353 break; 354 } 355 return max_link_bw; 356 } 357 358 static int 359 cdv_intel_dp_link_clock(uint8_t link_bw) 360 { 361 if (link_bw == DP_LINK_BW_2_7) 362 return 270000; 363 else 364 return 162000; 365 } 366 367 static int 368 cdv_intel_dp_link_required(int pixel_clock, int bpp) 369 { 370 return (pixel_clock * bpp + 7) / 8; 371 } 372 373 static int 374 cdv_intel_dp_max_data_rate(int max_link_clock, int max_lanes) 375 { 376 return (max_link_clock * max_lanes * 19) / 20; 377 } 378 379 static void cdv_intel_edp_panel_vdd_on(struct gma_encoder *intel_encoder) 380 { 381 struct drm_device *dev = intel_encoder->base.dev; 382 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv; 383 u32 pp; 384 385 if (intel_dp->panel_on) { 386 DRM_DEBUG_KMS("Skip VDD on because of panel on\n"); 387 return; 388 } 389 DRM_DEBUG_KMS("\n"); 390 391 pp = REG_READ(PP_CONTROL); 392 393 pp |= EDP_FORCE_VDD; 394 REG_WRITE(PP_CONTROL, pp); 395 REG_READ(PP_CONTROL); 396 msleep(intel_dp->panel_power_up_delay); 397 } 398 399 static void cdv_intel_edp_panel_vdd_off(struct gma_encoder *intel_encoder) 400 { 401 struct drm_device *dev = intel_encoder->base.dev; 402 u32 pp; 403 404 DRM_DEBUG_KMS("\n"); 405 pp = REG_READ(PP_CONTROL); 406 407 pp &= ~EDP_FORCE_VDD; 408 REG_WRITE(PP_CONTROL, pp); 409 REG_READ(PP_CONTROL); 410 411 } 412 413 /* Returns true if the panel was already on when called */ 414 static bool cdv_intel_edp_panel_on(struct gma_encoder *intel_encoder) 415 { 416 struct drm_device *dev = intel_encoder->base.dev; 417 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv; 418 u32 pp, idle_on_mask = PP_ON | PP_SEQUENCE_NONE; 419 420 if (intel_dp->panel_on) 421 return true; 422 423 DRM_DEBUG_KMS("\n"); 424 pp = REG_READ(PP_CONTROL); 425 pp &= ~PANEL_UNLOCK_MASK; 426 427 pp |= (PANEL_UNLOCK_REGS | POWER_TARGET_ON); 428 REG_WRITE(PP_CONTROL, pp); 429 REG_READ(PP_CONTROL); 430 431 if (wait_for(((REG_READ(PP_STATUS) & idle_on_mask) == idle_on_mask), 1000)) { 432 DRM_DEBUG_KMS("Error in Powering up eDP panel, status %x\n", REG_READ(PP_STATUS)); 433 intel_dp->panel_on = false; 434 } else 435 intel_dp->panel_on = true; 436 msleep(intel_dp->panel_power_up_delay); 437 438 return false; 439 } 440 441 static void cdv_intel_edp_panel_off (struct gma_encoder *intel_encoder) 442 { 443 struct drm_device *dev = intel_encoder->base.dev; 444 u32 pp, idle_off_mask = PP_ON ; 445 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv; 446 447 DRM_DEBUG_KMS("\n"); 448 449 pp = REG_READ(PP_CONTROL); 450 451 if ((pp & POWER_TARGET_ON) == 0) 452 return; 453 454 intel_dp->panel_on = false; 455 pp &= ~PANEL_UNLOCK_MASK; 456 /* ILK workaround: disable reset around power sequence */ 457 458 pp &= ~POWER_TARGET_ON; 459 pp &= ~EDP_FORCE_VDD; 460 pp &= ~EDP_BLC_ENABLE; 461 REG_WRITE(PP_CONTROL, pp); 462 REG_READ(PP_CONTROL); 463 DRM_DEBUG_KMS("PP_STATUS %x\n", REG_READ(PP_STATUS)); 464 465 if (wait_for((REG_READ(PP_STATUS) & idle_off_mask) == 0, 1000)) { 466 DRM_DEBUG_KMS("Error in turning off Panel\n"); 467 } 468 469 msleep(intel_dp->panel_power_cycle_delay); 470 DRM_DEBUG_KMS("Over\n"); 471 } 472 473 static void cdv_intel_edp_backlight_on (struct gma_encoder *intel_encoder) 474 { 475 struct drm_device *dev = intel_encoder->base.dev; 476 u32 pp; 477 478 DRM_DEBUG_KMS("\n"); 479 /* 480 * If we enable the backlight right away following a panel power 481 * on, we may see slight flicker as the panel syncs with the eDP 482 * link. So delay a bit to make sure the image is solid before 483 * allowing it to appear. 484 */ 485 msleep(300); 486 pp = REG_READ(PP_CONTROL); 487 488 pp |= EDP_BLC_ENABLE; 489 REG_WRITE(PP_CONTROL, pp); 490 gma_backlight_enable(dev); 491 } 492 493 static void cdv_intel_edp_backlight_off (struct gma_encoder *intel_encoder) 494 { 495 struct drm_device *dev = intel_encoder->base.dev; 496 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv; 497 u32 pp; 498 499 DRM_DEBUG_KMS("\n"); 500 gma_backlight_disable(dev); 501 msleep(10); 502 pp = REG_READ(PP_CONTROL); 503 504 pp &= ~EDP_BLC_ENABLE; 505 REG_WRITE(PP_CONTROL, pp); 506 msleep(intel_dp->backlight_off_delay); 507 } 508 509 static enum drm_mode_status 510 cdv_intel_dp_mode_valid(struct drm_connector *connector, 511 struct drm_display_mode *mode) 512 { 513 struct gma_encoder *encoder = gma_attached_encoder(connector); 514 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 515 int max_link_clock = cdv_intel_dp_link_clock(cdv_intel_dp_max_link_bw(encoder)); 516 int max_lanes = cdv_intel_dp_max_lane_count(encoder); 517 struct drm_psb_private *dev_priv = connector->dev->dev_private; 518 519 if (is_edp(encoder) && intel_dp->panel_fixed_mode) { 520 if (mode->hdisplay > intel_dp->panel_fixed_mode->hdisplay) 521 return MODE_PANEL; 522 if (mode->vdisplay > intel_dp->panel_fixed_mode->vdisplay) 523 return MODE_PANEL; 524 } 525 526 /* only refuse the mode on non eDP since we have seen some weird eDP panels 527 which are outside spec tolerances but somehow work by magic */ 528 if (!is_edp(encoder) && 529 (cdv_intel_dp_link_required(mode->clock, dev_priv->edp.bpp) 530 > cdv_intel_dp_max_data_rate(max_link_clock, max_lanes))) 531 return MODE_CLOCK_HIGH; 532 533 if (is_edp(encoder)) { 534 if (cdv_intel_dp_link_required(mode->clock, 24) 535 > cdv_intel_dp_max_data_rate(max_link_clock, max_lanes)) 536 return MODE_CLOCK_HIGH; 537 538 } 539 if (mode->clock < 10000) 540 return MODE_CLOCK_LOW; 541 542 return MODE_OK; 543 } 544 545 static uint32_t 546 pack_aux(uint8_t *src, int src_bytes) 547 { 548 int i; 549 uint32_t v = 0; 550 551 if (src_bytes > 4) 552 src_bytes = 4; 553 for (i = 0; i < src_bytes; i++) 554 v |= ((uint32_t) src[i]) << ((3-i) * 8); 555 return v; 556 } 557 558 static void 559 unpack_aux(uint32_t src, uint8_t *dst, int dst_bytes) 560 { 561 int i; 562 if (dst_bytes > 4) 563 dst_bytes = 4; 564 for (i = 0; i < dst_bytes; i++) 565 dst[i] = src >> ((3-i) * 8); 566 } 567 568 static int 569 cdv_intel_dp_aux_ch(struct gma_encoder *encoder, 570 uint8_t *send, int send_bytes, 571 uint8_t *recv, int recv_size) 572 { 573 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 574 uint32_t output_reg = intel_dp->output_reg; 575 struct drm_device *dev = encoder->base.dev; 576 uint32_t ch_ctl = output_reg + 0x10; 577 uint32_t ch_data = ch_ctl + 4; 578 int i; 579 int recv_bytes; 580 uint32_t status; 581 uint32_t aux_clock_divider; 582 int try, precharge; 583 584 /* The clock divider is based off the hrawclk, 585 * and would like to run at 2MHz. So, take the 586 * hrawclk value and divide by 2 and use that 587 * On CDV platform it uses 200MHz as hrawclk. 588 * 589 */ 590 aux_clock_divider = 200 / 2; 591 592 precharge = 4; 593 if (is_edp(encoder)) 594 precharge = 10; 595 596 if (REG_READ(ch_ctl) & DP_AUX_CH_CTL_SEND_BUSY) { 597 DRM_ERROR("dp_aux_ch not started status 0x%08x\n", 598 REG_READ(ch_ctl)); 599 return -EBUSY; 600 } 601 602 /* Must try at least 3 times according to DP spec */ 603 for (try = 0; try < 5; try++) { 604 /* Load the send data into the aux channel data registers */ 605 for (i = 0; i < send_bytes; i += 4) 606 REG_WRITE(ch_data + i, 607 pack_aux(send + i, send_bytes - i)); 608 609 /* Send the command and wait for it to complete */ 610 REG_WRITE(ch_ctl, 611 DP_AUX_CH_CTL_SEND_BUSY | 612 DP_AUX_CH_CTL_TIME_OUT_400us | 613 (send_bytes << DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT) | 614 (precharge << DP_AUX_CH_CTL_PRECHARGE_2US_SHIFT) | 615 (aux_clock_divider << DP_AUX_CH_CTL_BIT_CLOCK_2X_SHIFT) | 616 DP_AUX_CH_CTL_DONE | 617 DP_AUX_CH_CTL_TIME_OUT_ERROR | 618 DP_AUX_CH_CTL_RECEIVE_ERROR); 619 for (;;) { 620 status = REG_READ(ch_ctl); 621 if ((status & DP_AUX_CH_CTL_SEND_BUSY) == 0) 622 break; 623 udelay(100); 624 } 625 626 /* Clear done status and any errors */ 627 REG_WRITE(ch_ctl, 628 status | 629 DP_AUX_CH_CTL_DONE | 630 DP_AUX_CH_CTL_TIME_OUT_ERROR | 631 DP_AUX_CH_CTL_RECEIVE_ERROR); 632 if (status & DP_AUX_CH_CTL_DONE) 633 break; 634 } 635 636 if ((status & DP_AUX_CH_CTL_DONE) == 0) { 637 DRM_ERROR("dp_aux_ch not done status 0x%08x\n", status); 638 return -EBUSY; 639 } 640 641 /* Check for timeout or receive error. 642 * Timeouts occur when the sink is not connected 643 */ 644 if (status & DP_AUX_CH_CTL_RECEIVE_ERROR) { 645 DRM_ERROR("dp_aux_ch receive error status 0x%08x\n", status); 646 return -EIO; 647 } 648 649 /* Timeouts occur when the device isn't connected, so they're 650 * "normal" -- don't fill the kernel log with these */ 651 if (status & DP_AUX_CH_CTL_TIME_OUT_ERROR) { 652 DRM_DEBUG_KMS("dp_aux_ch timeout status 0x%08x\n", status); 653 return -ETIMEDOUT; 654 } 655 656 /* Unload any bytes sent back from the other side */ 657 recv_bytes = ((status & DP_AUX_CH_CTL_MESSAGE_SIZE_MASK) >> 658 DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT); 659 if (recv_bytes > recv_size) 660 recv_bytes = recv_size; 661 662 for (i = 0; i < recv_bytes; i += 4) 663 unpack_aux(REG_READ(ch_data + i), 664 recv + i, recv_bytes - i); 665 666 return recv_bytes; 667 } 668 669 /* Write data to the aux channel in native mode */ 670 static int 671 cdv_intel_dp_aux_native_write(struct gma_encoder *encoder, 672 uint16_t address, uint8_t *send, int send_bytes) 673 { 674 int ret; 675 uint8_t msg[20]; 676 int msg_bytes; 677 uint8_t ack; 678 679 if (send_bytes > 16) 680 return -1; 681 msg[0] = DP_AUX_NATIVE_WRITE << 4; 682 msg[1] = address >> 8; 683 msg[2] = address & 0xff; 684 msg[3] = send_bytes - 1; 685 memcpy(&msg[4], send, send_bytes); 686 msg_bytes = send_bytes + 4; 687 for (;;) { 688 ret = cdv_intel_dp_aux_ch(encoder, msg, msg_bytes, &ack, 1); 689 if (ret < 0) 690 return ret; 691 ack >>= 4; 692 if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_ACK) 693 break; 694 else if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_DEFER) 695 udelay(100); 696 else 697 return -EIO; 698 } 699 return send_bytes; 700 } 701 702 /* Write a single byte to the aux channel in native mode */ 703 static int 704 cdv_intel_dp_aux_native_write_1(struct gma_encoder *encoder, 705 uint16_t address, uint8_t byte) 706 { 707 return cdv_intel_dp_aux_native_write(encoder, address, &byte, 1); 708 } 709 710 /* read bytes from a native aux channel */ 711 static int 712 cdv_intel_dp_aux_native_read(struct gma_encoder *encoder, 713 uint16_t address, uint8_t *recv, int recv_bytes) 714 { 715 uint8_t msg[4]; 716 int msg_bytes; 717 uint8_t reply[20]; 718 int reply_bytes; 719 uint8_t ack; 720 int ret; 721 722 msg[0] = DP_AUX_NATIVE_READ << 4; 723 msg[1] = address >> 8; 724 msg[2] = address & 0xff; 725 msg[3] = recv_bytes - 1; 726 727 msg_bytes = 4; 728 reply_bytes = recv_bytes + 1; 729 730 for (;;) { 731 ret = cdv_intel_dp_aux_ch(encoder, msg, msg_bytes, 732 reply, reply_bytes); 733 if (ret == 0) 734 return -EPROTO; 735 if (ret < 0) 736 return ret; 737 ack = reply[0] >> 4; 738 if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_ACK) { 739 memcpy(recv, reply + 1, ret - 1); 740 return ret - 1; 741 } 742 else if ((ack & DP_AUX_NATIVE_REPLY_MASK) == DP_AUX_NATIVE_REPLY_DEFER) 743 udelay(100); 744 else 745 return -EIO; 746 } 747 } 748 749 static int 750 cdv_intel_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode, 751 uint8_t write_byte, uint8_t *read_byte) 752 { 753 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data; 754 struct cdv_intel_dp *intel_dp = container_of(adapter, 755 struct cdv_intel_dp, 756 adapter); 757 struct gma_encoder *encoder = intel_dp->encoder; 758 uint16_t address = algo_data->address; 759 uint8_t msg[5]; 760 uint8_t reply[2]; 761 unsigned retry; 762 int msg_bytes; 763 int reply_bytes; 764 int ret; 765 766 /* Set up the command byte */ 767 if (mode & MODE_I2C_READ) 768 msg[0] = DP_AUX_I2C_READ << 4; 769 else 770 msg[0] = DP_AUX_I2C_WRITE << 4; 771 772 if (!(mode & MODE_I2C_STOP)) 773 msg[0] |= DP_AUX_I2C_MOT << 4; 774 775 msg[1] = address >> 8; 776 msg[2] = address; 777 778 switch (mode) { 779 case MODE_I2C_WRITE: 780 msg[3] = 0; 781 msg[4] = write_byte; 782 msg_bytes = 5; 783 reply_bytes = 1; 784 break; 785 case MODE_I2C_READ: 786 msg[3] = 0; 787 msg_bytes = 4; 788 reply_bytes = 2; 789 break; 790 default: 791 msg_bytes = 3; 792 reply_bytes = 1; 793 break; 794 } 795 796 for (retry = 0; retry < 5; retry++) { 797 ret = cdv_intel_dp_aux_ch(encoder, 798 msg, msg_bytes, 799 reply, reply_bytes); 800 if (ret < 0) { 801 DRM_DEBUG_KMS("aux_ch failed %d\n", ret); 802 return ret; 803 } 804 805 switch ((reply[0] >> 4) & DP_AUX_NATIVE_REPLY_MASK) { 806 case DP_AUX_NATIVE_REPLY_ACK: 807 /* I2C-over-AUX Reply field is only valid 808 * when paired with AUX ACK. 809 */ 810 break; 811 case DP_AUX_NATIVE_REPLY_NACK: 812 DRM_DEBUG_KMS("aux_ch native nack\n"); 813 return -EREMOTEIO; 814 case DP_AUX_NATIVE_REPLY_DEFER: 815 udelay(100); 816 continue; 817 default: 818 DRM_ERROR("aux_ch invalid native reply 0x%02x\n", 819 reply[0]); 820 return -EREMOTEIO; 821 } 822 823 switch ((reply[0] >> 4) & DP_AUX_I2C_REPLY_MASK) { 824 case DP_AUX_I2C_REPLY_ACK: 825 if (mode == MODE_I2C_READ) { 826 *read_byte = reply[1]; 827 } 828 return reply_bytes - 1; 829 case DP_AUX_I2C_REPLY_NACK: 830 DRM_DEBUG_KMS("aux_i2c nack\n"); 831 return -EREMOTEIO; 832 case DP_AUX_I2C_REPLY_DEFER: 833 DRM_DEBUG_KMS("aux_i2c defer\n"); 834 udelay(100); 835 break; 836 default: 837 DRM_ERROR("aux_i2c invalid reply 0x%02x\n", reply[0]); 838 return -EREMOTEIO; 839 } 840 } 841 842 DRM_ERROR("too many retries, giving up\n"); 843 return -EREMOTEIO; 844 } 845 846 static int 847 cdv_intel_dp_i2c_init(struct gma_connector *connector, 848 struct gma_encoder *encoder, const char *name) 849 { 850 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 851 int ret; 852 853 DRM_DEBUG_KMS("i2c_init %s\n", name); 854 855 intel_dp->algo.running = false; 856 intel_dp->algo.address = 0; 857 intel_dp->algo.aux_ch = cdv_intel_dp_i2c_aux_ch; 858 859 memset(&intel_dp->adapter, '\0', sizeof (intel_dp->adapter)); 860 intel_dp->adapter.owner = THIS_MODULE; 861 intel_dp->adapter.class = I2C_CLASS_DDC; 862 strncpy (intel_dp->adapter.name, name, sizeof(intel_dp->adapter.name) - 1); 863 intel_dp->adapter.name[sizeof(intel_dp->adapter.name) - 1] = '\0'; 864 intel_dp->adapter.algo_data = &intel_dp->algo; 865 intel_dp->adapter.dev.parent = connector->base.kdev; 866 867 if (is_edp(encoder)) 868 cdv_intel_edp_panel_vdd_on(encoder); 869 ret = i2c_dp_aux_add_bus(&intel_dp->adapter); 870 if (is_edp(encoder)) 871 cdv_intel_edp_panel_vdd_off(encoder); 872 873 return ret; 874 } 875 876 static void cdv_intel_fixed_panel_mode(struct drm_display_mode *fixed_mode, 877 struct drm_display_mode *adjusted_mode) 878 { 879 adjusted_mode->hdisplay = fixed_mode->hdisplay; 880 adjusted_mode->hsync_start = fixed_mode->hsync_start; 881 adjusted_mode->hsync_end = fixed_mode->hsync_end; 882 adjusted_mode->htotal = fixed_mode->htotal; 883 884 adjusted_mode->vdisplay = fixed_mode->vdisplay; 885 adjusted_mode->vsync_start = fixed_mode->vsync_start; 886 adjusted_mode->vsync_end = fixed_mode->vsync_end; 887 adjusted_mode->vtotal = fixed_mode->vtotal; 888 889 adjusted_mode->clock = fixed_mode->clock; 890 891 drm_mode_set_crtcinfo(adjusted_mode, CRTC_INTERLACE_HALVE_V); 892 } 893 894 static bool 895 cdv_intel_dp_mode_fixup(struct drm_encoder *encoder, const struct drm_display_mode *mode, 896 struct drm_display_mode *adjusted_mode) 897 { 898 struct drm_psb_private *dev_priv = encoder->dev->dev_private; 899 struct gma_encoder *intel_encoder = to_gma_encoder(encoder); 900 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv; 901 int lane_count, clock; 902 int max_lane_count = cdv_intel_dp_max_lane_count(intel_encoder); 903 int max_clock = cdv_intel_dp_max_link_bw(intel_encoder) == DP_LINK_BW_2_7 ? 1 : 0; 904 static int bws[2] = { DP_LINK_BW_1_62, DP_LINK_BW_2_7 }; 905 int refclock = mode->clock; 906 int bpp = 24; 907 908 if (is_edp(intel_encoder) && intel_dp->panel_fixed_mode) { 909 cdv_intel_fixed_panel_mode(intel_dp->panel_fixed_mode, adjusted_mode); 910 refclock = intel_dp->panel_fixed_mode->clock; 911 bpp = dev_priv->edp.bpp; 912 } 913 914 for (lane_count = 1; lane_count <= max_lane_count; lane_count <<= 1) { 915 for (clock = max_clock; clock >= 0; clock--) { 916 int link_avail = cdv_intel_dp_max_data_rate(cdv_intel_dp_link_clock(bws[clock]), lane_count); 917 918 if (cdv_intel_dp_link_required(refclock, bpp) <= link_avail) { 919 intel_dp->link_bw = bws[clock]; 920 intel_dp->lane_count = lane_count; 921 adjusted_mode->clock = cdv_intel_dp_link_clock(intel_dp->link_bw); 922 DRM_DEBUG_KMS("Display port link bw %02x lane " 923 "count %d clock %d\n", 924 intel_dp->link_bw, intel_dp->lane_count, 925 adjusted_mode->clock); 926 return true; 927 } 928 } 929 } 930 if (is_edp(intel_encoder)) { 931 /* okay we failed just pick the highest */ 932 intel_dp->lane_count = max_lane_count; 933 intel_dp->link_bw = bws[max_clock]; 934 adjusted_mode->clock = cdv_intel_dp_link_clock(intel_dp->link_bw); 935 DRM_DEBUG_KMS("Force picking display port link bw %02x lane " 936 "count %d clock %d\n", 937 intel_dp->link_bw, intel_dp->lane_count, 938 adjusted_mode->clock); 939 940 return true; 941 } 942 return false; 943 } 944 945 struct cdv_intel_dp_m_n { 946 uint32_t tu; 947 uint32_t gmch_m; 948 uint32_t gmch_n; 949 uint32_t link_m; 950 uint32_t link_n; 951 }; 952 953 static void 954 cdv_intel_reduce_ratio(uint32_t *num, uint32_t *den) 955 { 956 /* 957 while (*num > 0xffffff || *den > 0xffffff) { 958 *num >>= 1; 959 *den >>= 1; 960 }*/ 961 uint64_t value, m; 962 m = *num; 963 value = m * (0x800000); 964 m = do_div(value, *den); 965 *num = value; 966 *den = 0x800000; 967 } 968 969 static void 970 cdv_intel_dp_compute_m_n(int bpp, 971 int nlanes, 972 int pixel_clock, 973 int link_clock, 974 struct cdv_intel_dp_m_n *m_n) 975 { 976 m_n->tu = 64; 977 m_n->gmch_m = (pixel_clock * bpp + 7) >> 3; 978 m_n->gmch_n = link_clock * nlanes; 979 cdv_intel_reduce_ratio(&m_n->gmch_m, &m_n->gmch_n); 980 m_n->link_m = pixel_clock; 981 m_n->link_n = link_clock; 982 cdv_intel_reduce_ratio(&m_n->link_m, &m_n->link_n); 983 } 984 985 void 986 cdv_intel_dp_set_m_n(struct drm_crtc *crtc, struct drm_display_mode *mode, 987 struct drm_display_mode *adjusted_mode) 988 { 989 struct drm_device *dev = crtc->dev; 990 struct drm_psb_private *dev_priv = dev->dev_private; 991 struct drm_mode_config *mode_config = &dev->mode_config; 992 struct drm_encoder *encoder; 993 struct gma_crtc *gma_crtc = to_gma_crtc(crtc); 994 int lane_count = 4, bpp = 24; 995 struct cdv_intel_dp_m_n m_n; 996 int pipe = gma_crtc->pipe; 997 998 /* 999 * Find the lane count in the intel_encoder private 1000 */ 1001 list_for_each_entry(encoder, &mode_config->encoder_list, head) { 1002 struct gma_encoder *intel_encoder; 1003 struct cdv_intel_dp *intel_dp; 1004 1005 if (encoder->crtc != crtc) 1006 continue; 1007 1008 intel_encoder = to_gma_encoder(encoder); 1009 intel_dp = intel_encoder->dev_priv; 1010 if (intel_encoder->type == INTEL_OUTPUT_DISPLAYPORT) { 1011 lane_count = intel_dp->lane_count; 1012 break; 1013 } else if (is_edp(intel_encoder)) { 1014 lane_count = intel_dp->lane_count; 1015 bpp = dev_priv->edp.bpp; 1016 break; 1017 } 1018 } 1019 1020 /* 1021 * Compute the GMCH and Link ratios. The '3' here is 1022 * the number of bytes_per_pixel post-LUT, which we always 1023 * set up for 8-bits of R/G/B, or 3 bytes total. 1024 */ 1025 cdv_intel_dp_compute_m_n(bpp, lane_count, 1026 mode->clock, adjusted_mode->clock, &m_n); 1027 1028 { 1029 REG_WRITE(PIPE_GMCH_DATA_M(pipe), 1030 ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) | 1031 m_n.gmch_m); 1032 REG_WRITE(PIPE_GMCH_DATA_N(pipe), m_n.gmch_n); 1033 REG_WRITE(PIPE_DP_LINK_M(pipe), m_n.link_m); 1034 REG_WRITE(PIPE_DP_LINK_N(pipe), m_n.link_n); 1035 } 1036 } 1037 1038 static void 1039 cdv_intel_dp_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode, 1040 struct drm_display_mode *adjusted_mode) 1041 { 1042 struct gma_encoder *intel_encoder = to_gma_encoder(encoder); 1043 struct drm_crtc *crtc = encoder->crtc; 1044 struct gma_crtc *gma_crtc = to_gma_crtc(crtc); 1045 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv; 1046 struct drm_device *dev = encoder->dev; 1047 1048 intel_dp->DP = DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0; 1049 intel_dp->DP |= intel_dp->color_range; 1050 1051 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC) 1052 intel_dp->DP |= DP_SYNC_HS_HIGH; 1053 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC) 1054 intel_dp->DP |= DP_SYNC_VS_HIGH; 1055 1056 intel_dp->DP |= DP_LINK_TRAIN_OFF; 1057 1058 switch (intel_dp->lane_count) { 1059 case 1: 1060 intel_dp->DP |= DP_PORT_WIDTH_1; 1061 break; 1062 case 2: 1063 intel_dp->DP |= DP_PORT_WIDTH_2; 1064 break; 1065 case 4: 1066 intel_dp->DP |= DP_PORT_WIDTH_4; 1067 break; 1068 } 1069 if (intel_dp->has_audio) 1070 intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE; 1071 1072 memset(intel_dp->link_configuration, 0, DP_LINK_CONFIGURATION_SIZE); 1073 intel_dp->link_configuration[0] = intel_dp->link_bw; 1074 intel_dp->link_configuration[1] = intel_dp->lane_count; 1075 1076 /* 1077 * Check for DPCD version > 1.1 and enhanced framing support 1078 */ 1079 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 && 1080 (intel_dp->dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP)) { 1081 intel_dp->link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN; 1082 intel_dp->DP |= DP_ENHANCED_FRAMING; 1083 } 1084 1085 /* CPT DP's pipe select is decided in TRANS_DP_CTL */ 1086 if (gma_crtc->pipe == 1) 1087 intel_dp->DP |= DP_PIPEB_SELECT; 1088 1089 REG_WRITE(intel_dp->output_reg, (intel_dp->DP | DP_PORT_EN)); 1090 DRM_DEBUG_KMS("DP expected reg is %x\n", intel_dp->DP); 1091 if (is_edp(intel_encoder)) { 1092 uint32_t pfit_control; 1093 cdv_intel_edp_panel_on(intel_encoder); 1094 1095 if (mode->hdisplay != adjusted_mode->hdisplay || 1096 mode->vdisplay != adjusted_mode->vdisplay) 1097 pfit_control = PFIT_ENABLE; 1098 else 1099 pfit_control = 0; 1100 1101 pfit_control |= gma_crtc->pipe << PFIT_PIPE_SHIFT; 1102 1103 REG_WRITE(PFIT_CONTROL, pfit_control); 1104 } 1105 } 1106 1107 1108 /* If the sink supports it, try to set the power state appropriately */ 1109 static void cdv_intel_dp_sink_dpms(struct gma_encoder *encoder, int mode) 1110 { 1111 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 1112 int ret, i; 1113 1114 /* Should have a valid DPCD by this point */ 1115 if (intel_dp->dpcd[DP_DPCD_REV] < 0x11) 1116 return; 1117 1118 if (mode != DRM_MODE_DPMS_ON) { 1119 ret = cdv_intel_dp_aux_native_write_1(encoder, DP_SET_POWER, 1120 DP_SET_POWER_D3); 1121 if (ret != 1) 1122 DRM_DEBUG_DRIVER("failed to write sink power state\n"); 1123 } else { 1124 /* 1125 * When turning on, we need to retry for 1ms to give the sink 1126 * time to wake up. 1127 */ 1128 for (i = 0; i < 3; i++) { 1129 ret = cdv_intel_dp_aux_native_write_1(encoder, 1130 DP_SET_POWER, 1131 DP_SET_POWER_D0); 1132 if (ret == 1) 1133 break; 1134 udelay(1000); 1135 } 1136 } 1137 } 1138 1139 static void cdv_intel_dp_prepare(struct drm_encoder *encoder) 1140 { 1141 struct gma_encoder *intel_encoder = to_gma_encoder(encoder); 1142 int edp = is_edp(intel_encoder); 1143 1144 if (edp) { 1145 cdv_intel_edp_backlight_off(intel_encoder); 1146 cdv_intel_edp_panel_off(intel_encoder); 1147 cdv_intel_edp_panel_vdd_on(intel_encoder); 1148 } 1149 /* Wake up the sink first */ 1150 cdv_intel_dp_sink_dpms(intel_encoder, DRM_MODE_DPMS_ON); 1151 cdv_intel_dp_link_down(intel_encoder); 1152 if (edp) 1153 cdv_intel_edp_panel_vdd_off(intel_encoder); 1154 } 1155 1156 static void cdv_intel_dp_commit(struct drm_encoder *encoder) 1157 { 1158 struct gma_encoder *intel_encoder = to_gma_encoder(encoder); 1159 int edp = is_edp(intel_encoder); 1160 1161 if (edp) 1162 cdv_intel_edp_panel_on(intel_encoder); 1163 cdv_intel_dp_start_link_train(intel_encoder); 1164 cdv_intel_dp_complete_link_train(intel_encoder); 1165 if (edp) 1166 cdv_intel_edp_backlight_on(intel_encoder); 1167 } 1168 1169 static void 1170 cdv_intel_dp_dpms(struct drm_encoder *encoder, int mode) 1171 { 1172 struct gma_encoder *intel_encoder = to_gma_encoder(encoder); 1173 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv; 1174 struct drm_device *dev = encoder->dev; 1175 uint32_t dp_reg = REG_READ(intel_dp->output_reg); 1176 int edp = is_edp(intel_encoder); 1177 1178 if (mode != DRM_MODE_DPMS_ON) { 1179 if (edp) { 1180 cdv_intel_edp_backlight_off(intel_encoder); 1181 cdv_intel_edp_panel_vdd_on(intel_encoder); 1182 } 1183 cdv_intel_dp_sink_dpms(intel_encoder, mode); 1184 cdv_intel_dp_link_down(intel_encoder); 1185 if (edp) { 1186 cdv_intel_edp_panel_vdd_off(intel_encoder); 1187 cdv_intel_edp_panel_off(intel_encoder); 1188 } 1189 } else { 1190 if (edp) 1191 cdv_intel_edp_panel_on(intel_encoder); 1192 cdv_intel_dp_sink_dpms(intel_encoder, mode); 1193 if (!(dp_reg & DP_PORT_EN)) { 1194 cdv_intel_dp_start_link_train(intel_encoder); 1195 cdv_intel_dp_complete_link_train(intel_encoder); 1196 } 1197 if (edp) 1198 cdv_intel_edp_backlight_on(intel_encoder); 1199 } 1200 } 1201 1202 /* 1203 * Native read with retry for link status and receiver capability reads for 1204 * cases where the sink may still be asleep. 1205 */ 1206 static bool 1207 cdv_intel_dp_aux_native_read_retry(struct gma_encoder *encoder, uint16_t address, 1208 uint8_t *recv, int recv_bytes) 1209 { 1210 int ret, i; 1211 1212 /* 1213 * Sinks are *supposed* to come up within 1ms from an off state, 1214 * but we're also supposed to retry 3 times per the spec. 1215 */ 1216 for (i = 0; i < 3; i++) { 1217 ret = cdv_intel_dp_aux_native_read(encoder, address, recv, 1218 recv_bytes); 1219 if (ret == recv_bytes) 1220 return true; 1221 udelay(1000); 1222 } 1223 1224 return false; 1225 } 1226 1227 /* 1228 * Fetch AUX CH registers 0x202 - 0x207 which contain 1229 * link status information 1230 */ 1231 static bool 1232 cdv_intel_dp_get_link_status(struct gma_encoder *encoder) 1233 { 1234 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 1235 return cdv_intel_dp_aux_native_read_retry(encoder, 1236 DP_LANE0_1_STATUS, 1237 intel_dp->link_status, 1238 DP_LINK_STATUS_SIZE); 1239 } 1240 1241 static uint8_t 1242 cdv_intel_dp_link_status(uint8_t link_status[DP_LINK_STATUS_SIZE], 1243 int r) 1244 { 1245 return link_status[r - DP_LANE0_1_STATUS]; 1246 } 1247 1248 static uint8_t 1249 cdv_intel_get_adjust_request_voltage(uint8_t link_status[DP_LINK_STATUS_SIZE], 1250 int lane) 1251 { 1252 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1); 1253 int s = ((lane & 1) ? 1254 DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT : 1255 DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT); 1256 uint8_t l = cdv_intel_dp_link_status(link_status, i); 1257 1258 return ((l >> s) & 3) << DP_TRAIN_VOLTAGE_SWING_SHIFT; 1259 } 1260 1261 static uint8_t 1262 cdv_intel_get_adjust_request_pre_emphasis(uint8_t link_status[DP_LINK_STATUS_SIZE], 1263 int lane) 1264 { 1265 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1); 1266 int s = ((lane & 1) ? 1267 DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT : 1268 DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT); 1269 uint8_t l = cdv_intel_dp_link_status(link_status, i); 1270 1271 return ((l >> s) & 3) << DP_TRAIN_PRE_EMPHASIS_SHIFT; 1272 } 1273 1274 1275 #if 0 1276 static char *voltage_names[] = { 1277 "0.4V", "0.6V", "0.8V", "1.2V" 1278 }; 1279 static char *pre_emph_names[] = { 1280 "0dB", "3.5dB", "6dB", "9.5dB" 1281 }; 1282 static char *link_train_names[] = { 1283 "pattern 1", "pattern 2", "idle", "off" 1284 }; 1285 #endif 1286 1287 #define CDV_DP_VOLTAGE_MAX DP_TRAIN_VOLTAGE_SWING_LEVEL_3 1288 /* 1289 static uint8_t 1290 cdv_intel_dp_pre_emphasis_max(uint8_t voltage_swing) 1291 { 1292 switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) { 1293 case DP_TRAIN_VOLTAGE_SWING_400: 1294 return DP_TRAIN_PRE_EMPHASIS_6; 1295 case DP_TRAIN_VOLTAGE_SWING_600: 1296 return DP_TRAIN_PRE_EMPHASIS_6; 1297 case DP_TRAIN_VOLTAGE_SWING_800: 1298 return DP_TRAIN_PRE_EMPHASIS_3_5; 1299 case DP_TRAIN_VOLTAGE_SWING_1200: 1300 default: 1301 return DP_TRAIN_PRE_EMPHASIS_0; 1302 } 1303 } 1304 */ 1305 static void 1306 cdv_intel_get_adjust_train(struct gma_encoder *encoder) 1307 { 1308 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 1309 uint8_t v = 0; 1310 uint8_t p = 0; 1311 int lane; 1312 1313 for (lane = 0; lane < intel_dp->lane_count; lane++) { 1314 uint8_t this_v = cdv_intel_get_adjust_request_voltage(intel_dp->link_status, lane); 1315 uint8_t this_p = cdv_intel_get_adjust_request_pre_emphasis(intel_dp->link_status, lane); 1316 1317 if (this_v > v) 1318 v = this_v; 1319 if (this_p > p) 1320 p = this_p; 1321 } 1322 1323 if (v >= CDV_DP_VOLTAGE_MAX) 1324 v = CDV_DP_VOLTAGE_MAX | DP_TRAIN_MAX_SWING_REACHED; 1325 1326 if (p == DP_TRAIN_PRE_EMPHASIS_MASK) 1327 p |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED; 1328 1329 for (lane = 0; lane < 4; lane++) 1330 intel_dp->train_set[lane] = v | p; 1331 } 1332 1333 1334 static uint8_t 1335 cdv_intel_get_lane_status(uint8_t link_status[DP_LINK_STATUS_SIZE], 1336 int lane) 1337 { 1338 int i = DP_LANE0_1_STATUS + (lane >> 1); 1339 int s = (lane & 1) * 4; 1340 uint8_t l = cdv_intel_dp_link_status(link_status, i); 1341 1342 return (l >> s) & 0xf; 1343 } 1344 1345 /* Check for clock recovery is done on all channels */ 1346 static bool 1347 cdv_intel_clock_recovery_ok(uint8_t link_status[DP_LINK_STATUS_SIZE], int lane_count) 1348 { 1349 int lane; 1350 uint8_t lane_status; 1351 1352 for (lane = 0; lane < lane_count; lane++) { 1353 lane_status = cdv_intel_get_lane_status(link_status, lane); 1354 if ((lane_status & DP_LANE_CR_DONE) == 0) 1355 return false; 1356 } 1357 return true; 1358 } 1359 1360 /* Check to see if channel eq is done on all channels */ 1361 #define CHANNEL_EQ_BITS (DP_LANE_CR_DONE|\ 1362 DP_LANE_CHANNEL_EQ_DONE|\ 1363 DP_LANE_SYMBOL_LOCKED) 1364 static bool 1365 cdv_intel_channel_eq_ok(struct gma_encoder *encoder) 1366 { 1367 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 1368 uint8_t lane_align; 1369 uint8_t lane_status; 1370 int lane; 1371 1372 lane_align = cdv_intel_dp_link_status(intel_dp->link_status, 1373 DP_LANE_ALIGN_STATUS_UPDATED); 1374 if ((lane_align & DP_INTERLANE_ALIGN_DONE) == 0) 1375 return false; 1376 for (lane = 0; lane < intel_dp->lane_count; lane++) { 1377 lane_status = cdv_intel_get_lane_status(intel_dp->link_status, lane); 1378 if ((lane_status & CHANNEL_EQ_BITS) != CHANNEL_EQ_BITS) 1379 return false; 1380 } 1381 return true; 1382 } 1383 1384 static bool 1385 cdv_intel_dp_set_link_train(struct gma_encoder *encoder, 1386 uint32_t dp_reg_value, 1387 uint8_t dp_train_pat) 1388 { 1389 1390 struct drm_device *dev = encoder->base.dev; 1391 int ret; 1392 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 1393 1394 REG_WRITE(intel_dp->output_reg, dp_reg_value); 1395 REG_READ(intel_dp->output_reg); 1396 1397 ret = cdv_intel_dp_aux_native_write_1(encoder, 1398 DP_TRAINING_PATTERN_SET, 1399 dp_train_pat); 1400 1401 if (ret != 1) { 1402 DRM_DEBUG_KMS("Failure in setting link pattern %x\n", 1403 dp_train_pat); 1404 return false; 1405 } 1406 1407 return true; 1408 } 1409 1410 1411 static bool 1412 cdv_intel_dplink_set_level(struct gma_encoder *encoder, 1413 uint8_t dp_train_pat) 1414 { 1415 1416 int ret; 1417 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 1418 1419 ret = cdv_intel_dp_aux_native_write(encoder, 1420 DP_TRAINING_LANE0_SET, 1421 intel_dp->train_set, 1422 intel_dp->lane_count); 1423 1424 if (ret != intel_dp->lane_count) { 1425 DRM_DEBUG_KMS("Failure in setting level %d, lane_cnt= %d\n", 1426 intel_dp->train_set[0], intel_dp->lane_count); 1427 return false; 1428 } 1429 return true; 1430 } 1431 1432 static void 1433 cdv_intel_dp_set_vswing_premph(struct gma_encoder *encoder, uint8_t signal_level) 1434 { 1435 struct drm_device *dev = encoder->base.dev; 1436 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 1437 struct ddi_regoff *ddi_reg; 1438 int vswing, premph, index; 1439 1440 if (intel_dp->output_reg == DP_B) 1441 ddi_reg = &ddi_DP_train_table[0]; 1442 else 1443 ddi_reg = &ddi_DP_train_table[1]; 1444 1445 vswing = (signal_level & DP_TRAIN_VOLTAGE_SWING_MASK); 1446 premph = ((signal_level & DP_TRAIN_PRE_EMPHASIS_MASK)) >> 1447 DP_TRAIN_PRE_EMPHASIS_SHIFT; 1448 1449 if (vswing + premph > 3) 1450 return; 1451 #ifdef CDV_FAST_LINK_TRAIN 1452 return; 1453 #endif 1454 DRM_DEBUG_KMS("Test2\n"); 1455 //return ; 1456 cdv_sb_reset(dev); 1457 /* ;Swing voltage programming 1458 ;gfx_dpio_set_reg(0xc058, 0x0505313A) */ 1459 cdv_sb_write(dev, ddi_reg->VSwing5, 0x0505313A); 1460 1461 /* ;gfx_dpio_set_reg(0x8154, 0x43406055) */ 1462 cdv_sb_write(dev, ddi_reg->VSwing1, 0x43406055); 1463 1464 /* ;gfx_dpio_set_reg(0x8148, 0x55338954) 1465 * The VSwing_PreEmph table is also considered based on the vswing/premp 1466 */ 1467 index = (vswing + premph) * 2; 1468 if (premph == 1 && vswing == 1) { 1469 cdv_sb_write(dev, ddi_reg->VSwing2, 0x055738954); 1470 } else 1471 cdv_sb_write(dev, ddi_reg->VSwing2, dp_vswing_premph_table[index]); 1472 1473 /* ;gfx_dpio_set_reg(0x814c, 0x40802040) */ 1474 if ((vswing + premph) == DP_TRAIN_VOLTAGE_SWING_LEVEL_3) 1475 cdv_sb_write(dev, ddi_reg->VSwing3, 0x70802040); 1476 else 1477 cdv_sb_write(dev, ddi_reg->VSwing3, 0x40802040); 1478 1479 /* ;gfx_dpio_set_reg(0x8150, 0x2b405555) */ 1480 /* cdv_sb_write(dev, ddi_reg->VSwing4, 0x2b405555); */ 1481 1482 /* ;gfx_dpio_set_reg(0x8154, 0xc3406055) */ 1483 cdv_sb_write(dev, ddi_reg->VSwing1, 0xc3406055); 1484 1485 /* ;Pre emphasis programming 1486 * ;gfx_dpio_set_reg(0xc02c, 0x1f030040) 1487 */ 1488 cdv_sb_write(dev, ddi_reg->PreEmph1, 0x1f030040); 1489 1490 /* ;gfx_dpio_set_reg(0x8124, 0x00004000) */ 1491 index = 2 * premph + 1; 1492 cdv_sb_write(dev, ddi_reg->PreEmph2, dp_vswing_premph_table[index]); 1493 return; 1494 } 1495 1496 1497 /* Enable corresponding port and start training pattern 1 */ 1498 static void 1499 cdv_intel_dp_start_link_train(struct gma_encoder *encoder) 1500 { 1501 struct drm_device *dev = encoder->base.dev; 1502 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 1503 int i; 1504 uint8_t voltage; 1505 bool clock_recovery = false; 1506 int tries; 1507 u32 reg; 1508 uint32_t DP = intel_dp->DP; 1509 1510 DP |= DP_PORT_EN; 1511 DP &= ~DP_LINK_TRAIN_MASK; 1512 1513 reg = DP; 1514 reg |= DP_LINK_TRAIN_PAT_1; 1515 /* Enable output, wait for it to become active */ 1516 REG_WRITE(intel_dp->output_reg, reg); 1517 REG_READ(intel_dp->output_reg); 1518 gma_wait_for_vblank(dev); 1519 1520 DRM_DEBUG_KMS("Link config\n"); 1521 /* Write the link configuration data */ 1522 cdv_intel_dp_aux_native_write(encoder, DP_LINK_BW_SET, 1523 intel_dp->link_configuration, 1524 2); 1525 1526 memset(intel_dp->train_set, 0, 4); 1527 voltage = 0; 1528 tries = 0; 1529 clock_recovery = false; 1530 1531 DRM_DEBUG_KMS("Start train\n"); 1532 reg = DP | DP_LINK_TRAIN_PAT_1; 1533 1534 1535 for (;;) { 1536 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */ 1537 DRM_DEBUG_KMS("DP Link Train Set %x, Link_config %x, %x\n", 1538 intel_dp->train_set[0], 1539 intel_dp->link_configuration[0], 1540 intel_dp->link_configuration[1]); 1541 1542 if (!cdv_intel_dp_set_link_train(encoder, reg, DP_TRAINING_PATTERN_1)) { 1543 DRM_DEBUG_KMS("Failure in aux-transfer setting pattern 1\n"); 1544 } 1545 cdv_intel_dp_set_vswing_premph(encoder, intel_dp->train_set[0]); 1546 /* Set training pattern 1 */ 1547 1548 cdv_intel_dplink_set_level(encoder, DP_TRAINING_PATTERN_1); 1549 1550 udelay(200); 1551 if (!cdv_intel_dp_get_link_status(encoder)) 1552 break; 1553 1554 DRM_DEBUG_KMS("DP Link status %x, %x, %x, %x, %x, %x\n", 1555 intel_dp->link_status[0], intel_dp->link_status[1], intel_dp->link_status[2], 1556 intel_dp->link_status[3], intel_dp->link_status[4], intel_dp->link_status[5]); 1557 1558 if (cdv_intel_clock_recovery_ok(intel_dp->link_status, intel_dp->lane_count)) { 1559 DRM_DEBUG_KMS("PT1 train is done\n"); 1560 clock_recovery = true; 1561 break; 1562 } 1563 1564 /* Check to see if we've tried the max voltage */ 1565 for (i = 0; i < intel_dp->lane_count; i++) 1566 if ((intel_dp->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0) 1567 break; 1568 if (i == intel_dp->lane_count) 1569 break; 1570 1571 /* Check to see if we've tried the same voltage 5 times */ 1572 if ((intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) { 1573 ++tries; 1574 if (tries == 5) 1575 break; 1576 } else 1577 tries = 0; 1578 voltage = intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK; 1579 1580 /* Compute new intel_dp->train_set as requested by target */ 1581 cdv_intel_get_adjust_train(encoder); 1582 1583 } 1584 1585 if (!clock_recovery) { 1586 DRM_DEBUG_KMS("failure in DP patter 1 training, train set %x\n", intel_dp->train_set[0]); 1587 } 1588 1589 intel_dp->DP = DP; 1590 } 1591 1592 static void 1593 cdv_intel_dp_complete_link_train(struct gma_encoder *encoder) 1594 { 1595 struct drm_device *dev = encoder->base.dev; 1596 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 1597 int tries, cr_tries; 1598 u32 reg; 1599 uint32_t DP = intel_dp->DP; 1600 1601 /* channel equalization */ 1602 tries = 0; 1603 cr_tries = 0; 1604 1605 DRM_DEBUG_KMS("\n"); 1606 reg = DP | DP_LINK_TRAIN_PAT_2; 1607 1608 for (;;) { 1609 1610 DRM_DEBUG_KMS("DP Link Train Set %x, Link_config %x, %x\n", 1611 intel_dp->train_set[0], 1612 intel_dp->link_configuration[0], 1613 intel_dp->link_configuration[1]); 1614 /* channel eq pattern */ 1615 1616 if (!cdv_intel_dp_set_link_train(encoder, reg, 1617 DP_TRAINING_PATTERN_2)) { 1618 DRM_DEBUG_KMS("Failure in aux-transfer setting pattern 2\n"); 1619 } 1620 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */ 1621 1622 if (cr_tries > 5) { 1623 DRM_ERROR("failed to train DP, aborting\n"); 1624 cdv_intel_dp_link_down(encoder); 1625 break; 1626 } 1627 1628 cdv_intel_dp_set_vswing_premph(encoder, intel_dp->train_set[0]); 1629 1630 cdv_intel_dplink_set_level(encoder, DP_TRAINING_PATTERN_2); 1631 1632 udelay(1000); 1633 if (!cdv_intel_dp_get_link_status(encoder)) 1634 break; 1635 1636 DRM_DEBUG_KMS("DP Link status %x, %x, %x, %x, %x, %x\n", 1637 intel_dp->link_status[0], intel_dp->link_status[1], intel_dp->link_status[2], 1638 intel_dp->link_status[3], intel_dp->link_status[4], intel_dp->link_status[5]); 1639 1640 /* Make sure clock is still ok */ 1641 if (!cdv_intel_clock_recovery_ok(intel_dp->link_status, intel_dp->lane_count)) { 1642 cdv_intel_dp_start_link_train(encoder); 1643 cr_tries++; 1644 continue; 1645 } 1646 1647 if (cdv_intel_channel_eq_ok(encoder)) { 1648 DRM_DEBUG_KMS("PT2 train is done\n"); 1649 break; 1650 } 1651 1652 /* Try 5 times, then try clock recovery if that fails */ 1653 if (tries > 5) { 1654 cdv_intel_dp_link_down(encoder); 1655 cdv_intel_dp_start_link_train(encoder); 1656 tries = 0; 1657 cr_tries++; 1658 continue; 1659 } 1660 1661 /* Compute new intel_dp->train_set as requested by target */ 1662 cdv_intel_get_adjust_train(encoder); 1663 ++tries; 1664 1665 } 1666 1667 reg = DP | DP_LINK_TRAIN_OFF; 1668 1669 REG_WRITE(intel_dp->output_reg, reg); 1670 REG_READ(intel_dp->output_reg); 1671 cdv_intel_dp_aux_native_write_1(encoder, 1672 DP_TRAINING_PATTERN_SET, DP_TRAINING_PATTERN_DISABLE); 1673 } 1674 1675 static void 1676 cdv_intel_dp_link_down(struct gma_encoder *encoder) 1677 { 1678 struct drm_device *dev = encoder->base.dev; 1679 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 1680 uint32_t DP = intel_dp->DP; 1681 1682 if ((REG_READ(intel_dp->output_reg) & DP_PORT_EN) == 0) 1683 return; 1684 1685 DRM_DEBUG_KMS("\n"); 1686 1687 1688 { 1689 DP &= ~DP_LINK_TRAIN_MASK; 1690 REG_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE); 1691 } 1692 REG_READ(intel_dp->output_reg); 1693 1694 msleep(17); 1695 1696 REG_WRITE(intel_dp->output_reg, DP & ~DP_PORT_EN); 1697 REG_READ(intel_dp->output_reg); 1698 } 1699 1700 static enum drm_connector_status cdv_dp_detect(struct gma_encoder *encoder) 1701 { 1702 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 1703 enum drm_connector_status status; 1704 1705 status = connector_status_disconnected; 1706 if (cdv_intel_dp_aux_native_read(encoder, 0x000, intel_dp->dpcd, 1707 sizeof (intel_dp->dpcd)) == sizeof (intel_dp->dpcd)) 1708 { 1709 if (intel_dp->dpcd[DP_DPCD_REV] != 0) 1710 status = connector_status_connected; 1711 } 1712 if (status == connector_status_connected) 1713 DRM_DEBUG_KMS("DPCD: Rev=%x LN_Rate=%x LN_CNT=%x LN_DOWNSP=%x\n", 1714 intel_dp->dpcd[0], intel_dp->dpcd[1], 1715 intel_dp->dpcd[2], intel_dp->dpcd[3]); 1716 return status; 1717 } 1718 1719 /** 1720 * Uses CRT_HOTPLUG_EN and CRT_HOTPLUG_STAT to detect DP connection. 1721 * 1722 * \return true if DP port is connected. 1723 * \return false if DP port is disconnected. 1724 */ 1725 static enum drm_connector_status 1726 cdv_intel_dp_detect(struct drm_connector *connector, bool force) 1727 { 1728 struct gma_encoder *encoder = gma_attached_encoder(connector); 1729 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 1730 enum drm_connector_status status; 1731 struct edid *edid = NULL; 1732 int edp = is_edp(encoder); 1733 1734 intel_dp->has_audio = false; 1735 1736 if (edp) 1737 cdv_intel_edp_panel_vdd_on(encoder); 1738 status = cdv_dp_detect(encoder); 1739 if (status != connector_status_connected) { 1740 if (edp) 1741 cdv_intel_edp_panel_vdd_off(encoder); 1742 return status; 1743 } 1744 1745 if (intel_dp->force_audio) { 1746 intel_dp->has_audio = intel_dp->force_audio > 0; 1747 } else { 1748 edid = drm_get_edid(connector, &intel_dp->adapter); 1749 if (edid) { 1750 intel_dp->has_audio = drm_detect_monitor_audio(edid); 1751 kfree(edid); 1752 } 1753 } 1754 if (edp) 1755 cdv_intel_edp_panel_vdd_off(encoder); 1756 1757 return connector_status_connected; 1758 } 1759 1760 static int cdv_intel_dp_get_modes(struct drm_connector *connector) 1761 { 1762 struct gma_encoder *intel_encoder = gma_attached_encoder(connector); 1763 struct cdv_intel_dp *intel_dp = intel_encoder->dev_priv; 1764 struct edid *edid = NULL; 1765 int ret = 0; 1766 int edp = is_edp(intel_encoder); 1767 1768 1769 edid = drm_get_edid(connector, &intel_dp->adapter); 1770 if (edid) { 1771 drm_connector_update_edid_property(connector, edid); 1772 ret = drm_add_edid_modes(connector, edid); 1773 kfree(edid); 1774 } 1775 1776 if (is_edp(intel_encoder)) { 1777 struct drm_device *dev = connector->dev; 1778 struct drm_psb_private *dev_priv = dev->dev_private; 1779 1780 cdv_intel_edp_panel_vdd_off(intel_encoder); 1781 if (ret) { 1782 if (edp && !intel_dp->panel_fixed_mode) { 1783 struct drm_display_mode *newmode; 1784 list_for_each_entry(newmode, &connector->probed_modes, 1785 head) { 1786 if (newmode->type & DRM_MODE_TYPE_PREFERRED) { 1787 intel_dp->panel_fixed_mode = 1788 drm_mode_duplicate(dev, newmode); 1789 break; 1790 } 1791 } 1792 } 1793 1794 return ret; 1795 } 1796 if (!intel_dp->panel_fixed_mode && dev_priv->lfp_lvds_vbt_mode) { 1797 intel_dp->panel_fixed_mode = 1798 drm_mode_duplicate(dev, dev_priv->lfp_lvds_vbt_mode); 1799 if (intel_dp->panel_fixed_mode) { 1800 intel_dp->panel_fixed_mode->type |= 1801 DRM_MODE_TYPE_PREFERRED; 1802 } 1803 } 1804 if (intel_dp->panel_fixed_mode != NULL) { 1805 struct drm_display_mode *mode; 1806 mode = drm_mode_duplicate(dev, intel_dp->panel_fixed_mode); 1807 drm_mode_probed_add(connector, mode); 1808 return 1; 1809 } 1810 } 1811 1812 return ret; 1813 } 1814 1815 static bool 1816 cdv_intel_dp_detect_audio(struct drm_connector *connector) 1817 { 1818 struct gma_encoder *encoder = gma_attached_encoder(connector); 1819 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 1820 struct edid *edid; 1821 bool has_audio = false; 1822 int edp = is_edp(encoder); 1823 1824 if (edp) 1825 cdv_intel_edp_panel_vdd_on(encoder); 1826 1827 edid = drm_get_edid(connector, &intel_dp->adapter); 1828 if (edid) { 1829 has_audio = drm_detect_monitor_audio(edid); 1830 kfree(edid); 1831 } 1832 if (edp) 1833 cdv_intel_edp_panel_vdd_off(encoder); 1834 1835 return has_audio; 1836 } 1837 1838 static int 1839 cdv_intel_dp_set_property(struct drm_connector *connector, 1840 struct drm_property *property, 1841 uint64_t val) 1842 { 1843 struct drm_psb_private *dev_priv = connector->dev->dev_private; 1844 struct gma_encoder *encoder = gma_attached_encoder(connector); 1845 struct cdv_intel_dp *intel_dp = encoder->dev_priv; 1846 int ret; 1847 1848 ret = drm_object_property_set_value(&connector->base, property, val); 1849 if (ret) 1850 return ret; 1851 1852 if (property == dev_priv->force_audio_property) { 1853 int i = val; 1854 bool has_audio; 1855 1856 if (i == intel_dp->force_audio) 1857 return 0; 1858 1859 intel_dp->force_audio = i; 1860 1861 if (i == 0) 1862 has_audio = cdv_intel_dp_detect_audio(connector); 1863 else 1864 has_audio = i > 0; 1865 1866 if (has_audio == intel_dp->has_audio) 1867 return 0; 1868 1869 intel_dp->has_audio = has_audio; 1870 goto done; 1871 } 1872 1873 if (property == dev_priv->broadcast_rgb_property) { 1874 if (val == !!intel_dp->color_range) 1875 return 0; 1876 1877 intel_dp->color_range = val ? DP_COLOR_RANGE_16_235 : 0; 1878 goto done; 1879 } 1880 1881 return -EINVAL; 1882 1883 done: 1884 if (encoder->base.crtc) { 1885 struct drm_crtc *crtc = encoder->base.crtc; 1886 drm_crtc_helper_set_mode(crtc, &crtc->mode, 1887 crtc->x, crtc->y, 1888 crtc->primary->fb); 1889 } 1890 1891 return 0; 1892 } 1893 1894 static void 1895 cdv_intel_dp_destroy(struct drm_connector *connector) 1896 { 1897 struct gma_encoder *gma_encoder = gma_attached_encoder(connector); 1898 struct cdv_intel_dp *intel_dp = gma_encoder->dev_priv; 1899 1900 if (is_edp(gma_encoder)) { 1901 /* cdv_intel_panel_destroy_backlight(connector->dev); */ 1902 kfree(intel_dp->panel_fixed_mode); 1903 intel_dp->panel_fixed_mode = NULL; 1904 } 1905 i2c_del_adapter(&intel_dp->adapter); 1906 drm_connector_unregister(connector); 1907 drm_connector_cleanup(connector); 1908 kfree(connector); 1909 } 1910 1911 static void cdv_intel_dp_encoder_destroy(struct drm_encoder *encoder) 1912 { 1913 drm_encoder_cleanup(encoder); 1914 } 1915 1916 static const struct drm_encoder_helper_funcs cdv_intel_dp_helper_funcs = { 1917 .dpms = cdv_intel_dp_dpms, 1918 .mode_fixup = cdv_intel_dp_mode_fixup, 1919 .prepare = cdv_intel_dp_prepare, 1920 .mode_set = cdv_intel_dp_mode_set, 1921 .commit = cdv_intel_dp_commit, 1922 }; 1923 1924 static const struct drm_connector_funcs cdv_intel_dp_connector_funcs = { 1925 .dpms = drm_helper_connector_dpms, 1926 .detect = cdv_intel_dp_detect, 1927 .fill_modes = drm_helper_probe_single_connector_modes, 1928 .set_property = cdv_intel_dp_set_property, 1929 .destroy = cdv_intel_dp_destroy, 1930 }; 1931 1932 static const struct drm_connector_helper_funcs cdv_intel_dp_connector_helper_funcs = { 1933 .get_modes = cdv_intel_dp_get_modes, 1934 .mode_valid = cdv_intel_dp_mode_valid, 1935 .best_encoder = gma_best_encoder, 1936 }; 1937 1938 static const struct drm_encoder_funcs cdv_intel_dp_enc_funcs = { 1939 .destroy = cdv_intel_dp_encoder_destroy, 1940 }; 1941 1942 1943 static void cdv_intel_dp_add_properties(struct drm_connector *connector) 1944 { 1945 cdv_intel_attach_force_audio_property(connector); 1946 cdv_intel_attach_broadcast_rgb_property(connector); 1947 } 1948 1949 /* check the VBT to see whether the eDP is on DP-D port */ 1950 static bool cdv_intel_dpc_is_edp(struct drm_device *dev) 1951 { 1952 struct drm_psb_private *dev_priv = dev->dev_private; 1953 struct child_device_config *p_child; 1954 int i; 1955 1956 if (!dev_priv->child_dev_num) 1957 return false; 1958 1959 for (i = 0; i < dev_priv->child_dev_num; i++) { 1960 p_child = dev_priv->child_dev + i; 1961 1962 if (p_child->dvo_port == PORT_IDPC && 1963 p_child->device_type == DEVICE_TYPE_eDP) 1964 return true; 1965 } 1966 return false; 1967 } 1968 1969 /* Cedarview display clock gating 1970 1971 We need this disable dot get correct behaviour while enabling 1972 DP/eDP. TODO - investigate if we can turn it back to normality 1973 after enabling */ 1974 static void cdv_disable_intel_clock_gating(struct drm_device *dev) 1975 { 1976 u32 reg_value; 1977 reg_value = REG_READ(DSPCLK_GATE_D); 1978 1979 reg_value |= (DPUNIT_PIPEB_GATE_DISABLE | 1980 DPUNIT_PIPEA_GATE_DISABLE | 1981 DPCUNIT_CLOCK_GATE_DISABLE | 1982 DPLSUNIT_CLOCK_GATE_DISABLE | 1983 DPOUNIT_CLOCK_GATE_DISABLE | 1984 DPIOUNIT_CLOCK_GATE_DISABLE); 1985 1986 REG_WRITE(DSPCLK_GATE_D, reg_value); 1987 1988 udelay(500); 1989 } 1990 1991 void 1992 cdv_intel_dp_init(struct drm_device *dev, struct psb_intel_mode_device *mode_dev, int output_reg) 1993 { 1994 struct gma_encoder *gma_encoder; 1995 struct gma_connector *gma_connector; 1996 struct drm_connector *connector; 1997 struct drm_encoder *encoder; 1998 struct cdv_intel_dp *intel_dp; 1999 const char *name = NULL; 2000 int type = DRM_MODE_CONNECTOR_DisplayPort; 2001 2002 gma_encoder = kzalloc(sizeof(struct gma_encoder), GFP_KERNEL); 2003 if (!gma_encoder) 2004 return; 2005 gma_connector = kzalloc(sizeof(struct gma_connector), GFP_KERNEL); 2006 if (!gma_connector) 2007 goto err_connector; 2008 intel_dp = kzalloc(sizeof(struct cdv_intel_dp), GFP_KERNEL); 2009 if (!intel_dp) 2010 goto err_priv; 2011 2012 if ((output_reg == DP_C) && cdv_intel_dpc_is_edp(dev)) 2013 type = DRM_MODE_CONNECTOR_eDP; 2014 2015 connector = &gma_connector->base; 2016 encoder = &gma_encoder->base; 2017 2018 drm_connector_init(dev, connector, &cdv_intel_dp_connector_funcs, type); 2019 drm_encoder_init(dev, encoder, &cdv_intel_dp_enc_funcs, 2020 DRM_MODE_ENCODER_TMDS, NULL); 2021 2022 gma_connector_attach_encoder(gma_connector, gma_encoder); 2023 2024 if (type == DRM_MODE_CONNECTOR_DisplayPort) 2025 gma_encoder->type = INTEL_OUTPUT_DISPLAYPORT; 2026 else 2027 gma_encoder->type = INTEL_OUTPUT_EDP; 2028 2029 2030 gma_encoder->dev_priv=intel_dp; 2031 intel_dp->encoder = gma_encoder; 2032 intel_dp->output_reg = output_reg; 2033 2034 drm_encoder_helper_add(encoder, &cdv_intel_dp_helper_funcs); 2035 drm_connector_helper_add(connector, &cdv_intel_dp_connector_helper_funcs); 2036 2037 connector->polled = DRM_CONNECTOR_POLL_HPD; 2038 connector->interlace_allowed = false; 2039 connector->doublescan_allowed = false; 2040 2041 drm_connector_register(connector); 2042 2043 /* Set up the DDC bus. */ 2044 switch (output_reg) { 2045 case DP_B: 2046 name = "DPDDC-B"; 2047 gma_encoder->ddi_select = (DP_MASK | DDI0_SELECT); 2048 break; 2049 case DP_C: 2050 name = "DPDDC-C"; 2051 gma_encoder->ddi_select = (DP_MASK | DDI1_SELECT); 2052 break; 2053 } 2054 2055 cdv_disable_intel_clock_gating(dev); 2056 2057 cdv_intel_dp_i2c_init(gma_connector, gma_encoder, name); 2058 /* FIXME:fail check */ 2059 cdv_intel_dp_add_properties(connector); 2060 2061 if (is_edp(gma_encoder)) { 2062 int ret; 2063 struct edp_power_seq cur; 2064 u32 pp_on, pp_off, pp_div; 2065 u32 pwm_ctrl; 2066 2067 pp_on = REG_READ(PP_CONTROL); 2068 pp_on &= ~PANEL_UNLOCK_MASK; 2069 pp_on |= PANEL_UNLOCK_REGS; 2070 2071 REG_WRITE(PP_CONTROL, pp_on); 2072 2073 pwm_ctrl = REG_READ(BLC_PWM_CTL2); 2074 pwm_ctrl |= PWM_PIPE_B; 2075 REG_WRITE(BLC_PWM_CTL2, pwm_ctrl); 2076 2077 pp_on = REG_READ(PP_ON_DELAYS); 2078 pp_off = REG_READ(PP_OFF_DELAYS); 2079 pp_div = REG_READ(PP_DIVISOR); 2080 2081 /* Pull timing values out of registers */ 2082 cur.t1_t3 = (pp_on & PANEL_POWER_UP_DELAY_MASK) >> 2083 PANEL_POWER_UP_DELAY_SHIFT; 2084 2085 cur.t8 = (pp_on & PANEL_LIGHT_ON_DELAY_MASK) >> 2086 PANEL_LIGHT_ON_DELAY_SHIFT; 2087 2088 cur.t9 = (pp_off & PANEL_LIGHT_OFF_DELAY_MASK) >> 2089 PANEL_LIGHT_OFF_DELAY_SHIFT; 2090 2091 cur.t10 = (pp_off & PANEL_POWER_DOWN_DELAY_MASK) >> 2092 PANEL_POWER_DOWN_DELAY_SHIFT; 2093 2094 cur.t11_t12 = ((pp_div & PANEL_POWER_CYCLE_DELAY_MASK) >> 2095 PANEL_POWER_CYCLE_DELAY_SHIFT); 2096 2097 DRM_DEBUG_KMS("cur t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n", 2098 cur.t1_t3, cur.t8, cur.t9, cur.t10, cur.t11_t12); 2099 2100 2101 intel_dp->panel_power_up_delay = cur.t1_t3 / 10; 2102 intel_dp->backlight_on_delay = cur.t8 / 10; 2103 intel_dp->backlight_off_delay = cur.t9 / 10; 2104 intel_dp->panel_power_down_delay = cur.t10 / 10; 2105 intel_dp->panel_power_cycle_delay = (cur.t11_t12 - 1) * 100; 2106 2107 DRM_DEBUG_KMS("panel power up delay %d, power down delay %d, power cycle delay %d\n", 2108 intel_dp->panel_power_up_delay, intel_dp->panel_power_down_delay, 2109 intel_dp->panel_power_cycle_delay); 2110 2111 DRM_DEBUG_KMS("backlight on delay %d, off delay %d\n", 2112 intel_dp->backlight_on_delay, intel_dp->backlight_off_delay); 2113 2114 2115 cdv_intel_edp_panel_vdd_on(gma_encoder); 2116 ret = cdv_intel_dp_aux_native_read(gma_encoder, DP_DPCD_REV, 2117 intel_dp->dpcd, 2118 sizeof(intel_dp->dpcd)); 2119 cdv_intel_edp_panel_vdd_off(gma_encoder); 2120 if (ret == 0) { 2121 /* if this fails, presume the device is a ghost */ 2122 DRM_INFO("failed to retrieve link info, disabling eDP\n"); 2123 cdv_intel_dp_encoder_destroy(encoder); 2124 cdv_intel_dp_destroy(connector); 2125 goto err_priv; 2126 } else { 2127 DRM_DEBUG_KMS("DPCD: Rev=%x LN_Rate=%x LN_CNT=%x LN_DOWNSP=%x\n", 2128 intel_dp->dpcd[0], intel_dp->dpcd[1], 2129 intel_dp->dpcd[2], intel_dp->dpcd[3]); 2130 2131 } 2132 /* The CDV reference driver moves pnale backlight setup into the displays that 2133 have a backlight: this is a good idea and one we should probably adopt, however 2134 we need to migrate all the drivers before we can do that */ 2135 /*cdv_intel_panel_setup_backlight(dev); */ 2136 } 2137 return; 2138 2139 err_priv: 2140 kfree(gma_connector); 2141 err_connector: 2142 kfree(gma_encoder); 2143 } 2144