1 /* 2 * Copyright © 2008 Intel Corporation 3 * 2014 Red Hat Inc. 4 * 5 * Permission is hereby granted, free of charge, to any person obtaining a 6 * copy of this software and associated documentation files (the "Software"), 7 * to deal in the Software without restriction, including without limitation 8 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 9 * and/or sell copies of the Software, and to permit persons to whom the 10 * Software is furnished to do so, subject to the following conditions: 11 * 12 * The above copyright notice and this permission notice (including the next 13 * paragraph) shall be included in all copies or substantial portions of the 14 * Software. 15 * 16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 22 * IN THE SOFTWARE. 23 * 24 */ 25 26 #include <drm/drm_atomic.h> 27 #include <drm/drm_atomic_helper.h> 28 #include <drm/drm_edid.h> 29 #include <drm/drm_probe_helper.h> 30 31 #include "i915_drv.h" 32 #include "i915_reg.h" 33 #include "intel_atomic.h" 34 #include "intel_audio.h" 35 #include "intel_connector.h" 36 #include "intel_crtc.h" 37 #include "intel_ddi.h" 38 #include "intel_de.h" 39 #include "intel_display_types.h" 40 #include "intel_dp.h" 41 #include "intel_dp_hdcp.h" 42 #include "intel_dp_mst.h" 43 #include "intel_dpio_phy.h" 44 #include "intel_hdcp.h" 45 #include "intel_hotplug.h" 46 #include "skl_scaler.h" 47 48 static int intel_dp_mst_check_constraints(struct drm_i915_private *i915, int bpp, 49 const struct drm_display_mode *adjusted_mode, 50 struct intel_crtc_state *crtc_state, 51 bool dsc) 52 { 53 if (intel_dp_is_uhbr(crtc_state) && DISPLAY_VER(i915) <= 13 && dsc) { 54 int output_bpp = bpp; 55 /* DisplayPort 2 128b/132b, bits per lane is always 32 */ 56 int symbol_clock = crtc_state->port_clock / 32; 57 58 if (output_bpp * adjusted_mode->crtc_clock >= 59 symbol_clock * 72) { 60 drm_dbg_kms(&i915->drm, "UHBR check failed(required bw %d available %d)\n", 61 output_bpp * adjusted_mode->crtc_clock, symbol_clock * 72); 62 return -EINVAL; 63 } 64 } 65 66 return 0; 67 } 68 69 static int intel_dp_mst_find_vcpi_slots_for_bpp(struct intel_encoder *encoder, 70 struct intel_crtc_state *crtc_state, 71 int max_bpp, 72 int min_bpp, 73 struct link_config_limits *limits, 74 struct drm_connector_state *conn_state, 75 int step, 76 bool dsc) 77 { 78 struct drm_atomic_state *state = crtc_state->uapi.state; 79 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder); 80 struct intel_dp *intel_dp = &intel_mst->primary->dp; 81 struct drm_dp_mst_topology_state *mst_state; 82 struct intel_connector *connector = 83 to_intel_connector(conn_state->connector); 84 struct drm_i915_private *i915 = to_i915(connector->base.dev); 85 const struct drm_display_mode *adjusted_mode = 86 &crtc_state->hw.adjusted_mode; 87 int bpp, slots = -EINVAL; 88 int ret = 0; 89 90 mst_state = drm_atomic_get_mst_topology_state(state, &intel_dp->mst_mgr); 91 if (IS_ERR(mst_state)) 92 return PTR_ERR(mst_state); 93 94 crtc_state->lane_count = limits->max_lane_count; 95 crtc_state->port_clock = limits->max_rate; 96 97 // TODO: Handle pbn_div changes by adding a new MST helper 98 if (!mst_state->pbn_div) { 99 mst_state->pbn_div = drm_dp_get_vc_payload_bw(&intel_dp->mst_mgr, 100 crtc_state->port_clock, 101 crtc_state->lane_count); 102 } 103 104 for (bpp = max_bpp; bpp >= min_bpp; bpp -= step) { 105 drm_dbg_kms(&i915->drm, "Trying bpp %d\n", bpp); 106 107 ret = intel_dp_mst_check_constraints(i915, bpp, adjusted_mode, crtc_state, dsc); 108 if (ret) 109 continue; 110 111 crtc_state->pbn = drm_dp_calc_pbn_mode(adjusted_mode->crtc_clock, 112 bpp << 4); 113 114 slots = drm_dp_atomic_find_time_slots(state, &intel_dp->mst_mgr, 115 connector->port, 116 crtc_state->pbn); 117 if (slots == -EDEADLK) 118 return slots; 119 120 if (slots >= 0) { 121 ret = drm_dp_mst_atomic_check(state); 122 /* 123 * If we got slots >= 0 and we can fit those based on check 124 * then we can exit the loop. Otherwise keep trying. 125 */ 126 if (!ret) 127 break; 128 } 129 } 130 131 /* We failed to find a proper bpp/timeslots, return error */ 132 if (ret) 133 slots = ret; 134 135 if (slots < 0) { 136 drm_dbg_kms(&i915->drm, "failed finding vcpi slots:%d\n", 137 slots); 138 } else { 139 if (!dsc) 140 crtc_state->pipe_bpp = bpp; 141 else 142 crtc_state->dsc.compressed_bpp = bpp; 143 drm_dbg_kms(&i915->drm, "Got %d slots for pipe bpp %d dsc %d\n", slots, bpp, dsc); 144 } 145 146 return slots; 147 } 148 149 static int intel_dp_mst_compute_link_config(struct intel_encoder *encoder, 150 struct intel_crtc_state *crtc_state, 151 struct drm_connector_state *conn_state, 152 struct link_config_limits *limits) 153 { 154 const struct drm_display_mode *adjusted_mode = 155 &crtc_state->hw.adjusted_mode; 156 int slots = -EINVAL; 157 158 slots = intel_dp_mst_find_vcpi_slots_for_bpp(encoder, crtc_state, limits->max_bpp, 159 limits->min_bpp, limits, 160 conn_state, 2 * 3, false); 161 162 if (slots < 0) 163 return slots; 164 165 intel_link_compute_m_n(crtc_state->pipe_bpp, 166 crtc_state->lane_count, 167 adjusted_mode->crtc_clock, 168 crtc_state->port_clock, 169 &crtc_state->dp_m_n, 170 crtc_state->fec_enable); 171 crtc_state->dp_m_n.tu = slots; 172 173 return 0; 174 } 175 176 static int intel_dp_dsc_mst_compute_link_config(struct intel_encoder *encoder, 177 struct intel_crtc_state *crtc_state, 178 struct drm_connector_state *conn_state, 179 struct link_config_limits *limits) 180 { 181 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder); 182 struct intel_dp *intel_dp = &intel_mst->primary->dp; 183 struct intel_connector *connector = 184 to_intel_connector(conn_state->connector); 185 struct drm_i915_private *i915 = to_i915(connector->base.dev); 186 const struct drm_display_mode *adjusted_mode = 187 &crtc_state->hw.adjusted_mode; 188 int slots = -EINVAL; 189 int i, num_bpc; 190 u8 dsc_bpc[3] = {0}; 191 int min_bpp, max_bpp, sink_min_bpp, sink_max_bpp; 192 u8 dsc_max_bpc; 193 bool need_timeslot_recalc = false; 194 u32 last_compressed_bpp; 195 196 /* Max DSC Input BPC for ICL is 10 and for TGL+ is 12 */ 197 if (DISPLAY_VER(i915) >= 12) 198 dsc_max_bpc = min_t(u8, 12, conn_state->max_requested_bpc); 199 else 200 dsc_max_bpc = min_t(u8, 10, conn_state->max_requested_bpc); 201 202 max_bpp = min_t(u8, dsc_max_bpc * 3, limits->max_bpp); 203 min_bpp = limits->min_bpp; 204 205 num_bpc = drm_dp_dsc_sink_supported_input_bpcs(intel_dp->dsc_dpcd, 206 dsc_bpc); 207 208 drm_dbg_kms(&i915->drm, "DSC Source supported min bpp %d max bpp %d\n", 209 min_bpp, max_bpp); 210 211 sink_max_bpp = dsc_bpc[0] * 3; 212 sink_min_bpp = sink_max_bpp; 213 214 for (i = 1; i < num_bpc; i++) { 215 if (sink_min_bpp > dsc_bpc[i] * 3) 216 sink_min_bpp = dsc_bpc[i] * 3; 217 if (sink_max_bpp < dsc_bpc[i] * 3) 218 sink_max_bpp = dsc_bpc[i] * 3; 219 } 220 221 drm_dbg_kms(&i915->drm, "DSC Sink supported min bpp %d max bpp %d\n", 222 sink_min_bpp, sink_max_bpp); 223 224 if (min_bpp < sink_min_bpp) 225 min_bpp = sink_min_bpp; 226 227 if (max_bpp > sink_max_bpp) 228 max_bpp = sink_max_bpp; 229 230 slots = intel_dp_mst_find_vcpi_slots_for_bpp(encoder, crtc_state, max_bpp, 231 min_bpp, limits, 232 conn_state, 2 * 3, true); 233 234 if (slots < 0) 235 return slots; 236 237 last_compressed_bpp = crtc_state->dsc.compressed_bpp; 238 239 crtc_state->dsc.compressed_bpp = intel_dp_dsc_nearest_valid_bpp(i915, 240 last_compressed_bpp, 241 crtc_state->pipe_bpp); 242 243 if (crtc_state->dsc.compressed_bpp != last_compressed_bpp) 244 need_timeslot_recalc = true; 245 246 /* 247 * Apparently some MST hubs dislike if vcpi slots are not matching precisely 248 * the actual compressed bpp we use. 249 */ 250 if (need_timeslot_recalc) { 251 slots = intel_dp_mst_find_vcpi_slots_for_bpp(encoder, crtc_state, 252 crtc_state->dsc.compressed_bpp, 253 crtc_state->dsc.compressed_bpp, 254 limits, conn_state, 2 * 3, true); 255 if (slots < 0) 256 return slots; 257 } 258 259 intel_link_compute_m_n(crtc_state->dsc.compressed_bpp, 260 crtc_state->lane_count, 261 adjusted_mode->crtc_clock, 262 crtc_state->port_clock, 263 &crtc_state->dp_m_n, 264 crtc_state->fec_enable); 265 crtc_state->dp_m_n.tu = slots; 266 267 return 0; 268 } 269 static int intel_dp_mst_update_slots(struct intel_encoder *encoder, 270 struct intel_crtc_state *crtc_state, 271 struct drm_connector_state *conn_state) 272 { 273 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 274 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder); 275 struct intel_dp *intel_dp = &intel_mst->primary->dp; 276 struct drm_dp_mst_topology_mgr *mgr = &intel_dp->mst_mgr; 277 struct drm_dp_mst_topology_state *topology_state; 278 u8 link_coding_cap = intel_dp_is_uhbr(crtc_state) ? 279 DP_CAP_ANSI_128B132B : DP_CAP_ANSI_8B10B; 280 281 topology_state = drm_atomic_get_mst_topology_state(conn_state->state, mgr); 282 if (IS_ERR(topology_state)) { 283 drm_dbg_kms(&i915->drm, "slot update failed\n"); 284 return PTR_ERR(topology_state); 285 } 286 287 drm_dp_mst_update_slots(topology_state, link_coding_cap); 288 289 return 0; 290 } 291 292 static bool intel_dp_mst_has_audio(const struct drm_connector_state *conn_state) 293 { 294 const struct intel_digital_connector_state *intel_conn_state = 295 to_intel_digital_connector_state(conn_state); 296 struct intel_connector *connector = 297 to_intel_connector(conn_state->connector); 298 299 if (intel_conn_state->force_audio == HDMI_AUDIO_AUTO) 300 return connector->base.display_info.has_audio; 301 else 302 return intel_conn_state->force_audio == HDMI_AUDIO_ON; 303 } 304 305 static int intel_dp_mst_compute_config(struct intel_encoder *encoder, 306 struct intel_crtc_state *pipe_config, 307 struct drm_connector_state *conn_state) 308 { 309 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 310 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder); 311 struct intel_dp *intel_dp = &intel_mst->primary->dp; 312 const struct drm_display_mode *adjusted_mode = 313 &pipe_config->hw.adjusted_mode; 314 struct link_config_limits limits; 315 int ret; 316 317 if (adjusted_mode->flags & DRM_MODE_FLAG_DBLSCAN) 318 return -EINVAL; 319 320 pipe_config->sink_format = INTEL_OUTPUT_FORMAT_RGB; 321 pipe_config->output_format = INTEL_OUTPUT_FORMAT_RGB; 322 pipe_config->has_pch_encoder = false; 323 324 pipe_config->has_audio = 325 intel_dp_mst_has_audio(conn_state) && 326 intel_audio_compute_config(encoder, pipe_config, conn_state); 327 328 /* 329 * for MST we always configure max link bw - the spec doesn't 330 * seem to suggest we should do otherwise. 331 */ 332 limits.min_rate = 333 limits.max_rate = intel_dp_max_link_rate(intel_dp); 334 335 limits.min_lane_count = 336 limits.max_lane_count = intel_dp_max_lane_count(intel_dp); 337 338 limits.min_bpp = intel_dp_min_bpp(pipe_config->output_format); 339 /* 340 * FIXME: If all the streams can't fit into the link with 341 * their current pipe_bpp we should reduce pipe_bpp across 342 * the board until things start to fit. Until then we 343 * limit to <= 8bpc since that's what was hardcoded for all 344 * MST streams previously. This hack should be removed once 345 * we have the proper retry logic in place. 346 */ 347 limits.max_bpp = min(pipe_config->pipe_bpp, 24); 348 349 intel_dp_adjust_compliance_config(intel_dp, pipe_config, &limits); 350 351 ret = intel_dp_mst_compute_link_config(encoder, pipe_config, 352 conn_state, &limits); 353 354 if (ret == -EDEADLK) 355 return ret; 356 357 /* enable compression if the mode doesn't fit available BW */ 358 drm_dbg_kms(&dev_priv->drm, "Force DSC en = %d\n", intel_dp->force_dsc_en); 359 if (ret || intel_dp->force_dsc_en) { 360 /* 361 * Try to get at least some timeslots and then see, if 362 * we can fit there with DSC. 363 */ 364 drm_dbg_kms(&dev_priv->drm, "Trying to find VCPI slots in DSC mode\n"); 365 366 ret = intel_dp_dsc_mst_compute_link_config(encoder, pipe_config, 367 conn_state, &limits); 368 if (ret < 0) 369 return ret; 370 371 ret = intel_dp_dsc_compute_config(intel_dp, pipe_config, 372 conn_state, &limits, 373 pipe_config->dp_m_n.tu, false); 374 } 375 376 if (ret) 377 return ret; 378 379 ret = intel_dp_mst_update_slots(encoder, pipe_config, conn_state); 380 if (ret) 381 return ret; 382 383 pipe_config->limited_color_range = 384 intel_dp_limited_color_range(pipe_config, conn_state); 385 386 if (IS_GEMINILAKE(dev_priv) || IS_BROXTON(dev_priv)) 387 pipe_config->lane_lat_optim_mask = 388 bxt_ddi_phy_calc_lane_lat_optim_mask(pipe_config->lane_count); 389 390 intel_ddi_compute_min_voltage_level(dev_priv, pipe_config); 391 392 return 0; 393 } 394 395 /* 396 * Iterate over all connectors and return a mask of 397 * all CPU transcoders streaming over the same DP link. 398 */ 399 static unsigned int 400 intel_dp_mst_transcoder_mask(struct intel_atomic_state *state, 401 struct intel_dp *mst_port) 402 { 403 struct drm_i915_private *dev_priv = to_i915(state->base.dev); 404 const struct intel_digital_connector_state *conn_state; 405 struct intel_connector *connector; 406 u8 transcoders = 0; 407 int i; 408 409 if (DISPLAY_VER(dev_priv) < 12) 410 return 0; 411 412 for_each_new_intel_connector_in_state(state, connector, conn_state, i) { 413 const struct intel_crtc_state *crtc_state; 414 struct intel_crtc *crtc; 415 416 if (connector->mst_port != mst_port || !conn_state->base.crtc) 417 continue; 418 419 crtc = to_intel_crtc(conn_state->base.crtc); 420 crtc_state = intel_atomic_get_new_crtc_state(state, crtc); 421 422 if (!crtc_state->hw.active) 423 continue; 424 425 transcoders |= BIT(crtc_state->cpu_transcoder); 426 } 427 428 return transcoders; 429 } 430 431 static int intel_dp_mst_compute_config_late(struct intel_encoder *encoder, 432 struct intel_crtc_state *crtc_state, 433 struct drm_connector_state *conn_state) 434 { 435 struct intel_atomic_state *state = to_intel_atomic_state(conn_state->state); 436 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder); 437 struct intel_dp *intel_dp = &intel_mst->primary->dp; 438 439 /* lowest numbered transcoder will be designated master */ 440 crtc_state->mst_master_transcoder = 441 ffs(intel_dp_mst_transcoder_mask(state, intel_dp)) - 1; 442 443 return 0; 444 } 445 446 /* 447 * If one of the connectors in a MST stream needs a modeset, mark all CRTCs 448 * that shares the same MST stream as mode changed, 449 * intel_modeset_pipe_config()+intel_crtc_check_fastset() will take care to do 450 * a fastset when possible. 451 */ 452 static int 453 intel_dp_mst_atomic_master_trans_check(struct intel_connector *connector, 454 struct intel_atomic_state *state) 455 { 456 struct drm_i915_private *dev_priv = to_i915(state->base.dev); 457 struct drm_connector_list_iter connector_list_iter; 458 struct intel_connector *connector_iter; 459 int ret = 0; 460 461 if (DISPLAY_VER(dev_priv) < 12) 462 return 0; 463 464 if (!intel_connector_needs_modeset(state, &connector->base)) 465 return 0; 466 467 drm_connector_list_iter_begin(&dev_priv->drm, &connector_list_iter); 468 for_each_intel_connector_iter(connector_iter, &connector_list_iter) { 469 struct intel_digital_connector_state *conn_iter_state; 470 struct intel_crtc_state *crtc_state; 471 struct intel_crtc *crtc; 472 473 if (connector_iter->mst_port != connector->mst_port || 474 connector_iter == connector) 475 continue; 476 477 conn_iter_state = intel_atomic_get_digital_connector_state(state, 478 connector_iter); 479 if (IS_ERR(conn_iter_state)) { 480 ret = PTR_ERR(conn_iter_state); 481 break; 482 } 483 484 if (!conn_iter_state->base.crtc) 485 continue; 486 487 crtc = to_intel_crtc(conn_iter_state->base.crtc); 488 crtc_state = intel_atomic_get_crtc_state(&state->base, crtc); 489 if (IS_ERR(crtc_state)) { 490 ret = PTR_ERR(crtc_state); 491 break; 492 } 493 494 ret = drm_atomic_add_affected_planes(&state->base, &crtc->base); 495 if (ret) 496 break; 497 crtc_state->uapi.mode_changed = true; 498 } 499 drm_connector_list_iter_end(&connector_list_iter); 500 501 return ret; 502 } 503 504 static int 505 intel_dp_mst_atomic_check(struct drm_connector *connector, 506 struct drm_atomic_state *_state) 507 { 508 struct intel_atomic_state *state = to_intel_atomic_state(_state); 509 struct intel_connector *intel_connector = 510 to_intel_connector(connector); 511 int ret; 512 513 ret = intel_digital_connector_atomic_check(connector, &state->base); 514 if (ret) 515 return ret; 516 517 ret = intel_dp_mst_atomic_master_trans_check(intel_connector, state); 518 if (ret) 519 return ret; 520 521 return drm_dp_atomic_release_time_slots(&state->base, 522 &intel_connector->mst_port->mst_mgr, 523 intel_connector->port); 524 } 525 526 static void clear_act_sent(struct intel_encoder *encoder, 527 const struct intel_crtc_state *crtc_state) 528 { 529 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 530 531 intel_de_write(i915, dp_tp_status_reg(encoder, crtc_state), 532 DP_TP_STATUS_ACT_SENT); 533 } 534 535 static void wait_for_act_sent(struct intel_encoder *encoder, 536 const struct intel_crtc_state *crtc_state) 537 { 538 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 539 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder); 540 struct intel_dp *intel_dp = &intel_mst->primary->dp; 541 542 if (intel_de_wait_for_set(i915, dp_tp_status_reg(encoder, crtc_state), 543 DP_TP_STATUS_ACT_SENT, 1)) 544 drm_err(&i915->drm, "Timed out waiting for ACT sent\n"); 545 546 drm_dp_check_act_status(&intel_dp->mst_mgr); 547 } 548 549 static void intel_mst_disable_dp(struct intel_atomic_state *state, 550 struct intel_encoder *encoder, 551 const struct intel_crtc_state *old_crtc_state, 552 const struct drm_connector_state *old_conn_state) 553 { 554 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder); 555 struct intel_digital_port *dig_port = intel_mst->primary; 556 struct intel_dp *intel_dp = &dig_port->dp; 557 struct intel_connector *connector = 558 to_intel_connector(old_conn_state->connector); 559 struct drm_dp_mst_topology_state *old_mst_state = 560 drm_atomic_get_old_mst_topology_state(&state->base, &intel_dp->mst_mgr); 561 struct drm_dp_mst_topology_state *new_mst_state = 562 drm_atomic_get_new_mst_topology_state(&state->base, &intel_dp->mst_mgr); 563 const struct drm_dp_mst_atomic_payload *old_payload = 564 drm_atomic_get_mst_payload_state(old_mst_state, connector->port); 565 struct drm_dp_mst_atomic_payload *new_payload = 566 drm_atomic_get_mst_payload_state(new_mst_state, connector->port); 567 struct drm_i915_private *i915 = to_i915(connector->base.dev); 568 569 drm_dbg_kms(&i915->drm, "active links %d\n", 570 intel_dp->active_mst_links); 571 572 intel_hdcp_disable(intel_mst->connector); 573 574 drm_dp_remove_payload(&intel_dp->mst_mgr, new_mst_state, 575 old_payload, new_payload); 576 577 intel_audio_codec_disable(encoder, old_crtc_state, old_conn_state); 578 } 579 580 static void intel_mst_post_disable_dp(struct intel_atomic_state *state, 581 struct intel_encoder *encoder, 582 const struct intel_crtc_state *old_crtc_state, 583 const struct drm_connector_state *old_conn_state) 584 { 585 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder); 586 struct intel_digital_port *dig_port = intel_mst->primary; 587 struct intel_dp *intel_dp = &dig_port->dp; 588 struct intel_connector *connector = 589 to_intel_connector(old_conn_state->connector); 590 struct drm_i915_private *dev_priv = to_i915(connector->base.dev); 591 bool last_mst_stream; 592 593 intel_dp->active_mst_links--; 594 last_mst_stream = intel_dp->active_mst_links == 0; 595 drm_WARN_ON(&dev_priv->drm, 596 DISPLAY_VER(dev_priv) >= 12 && last_mst_stream && 597 !intel_dp_mst_is_master_trans(old_crtc_state)); 598 599 intel_crtc_vblank_off(old_crtc_state); 600 601 intel_disable_transcoder(old_crtc_state); 602 603 clear_act_sent(encoder, old_crtc_state); 604 605 intel_de_rmw(dev_priv, TRANS_DDI_FUNC_CTL(old_crtc_state->cpu_transcoder), 606 TRANS_DDI_DP_VC_PAYLOAD_ALLOC, 0); 607 608 wait_for_act_sent(encoder, old_crtc_state); 609 610 intel_ddi_disable_transcoder_func(old_crtc_state); 611 612 if (DISPLAY_VER(dev_priv) >= 9) 613 skl_scaler_disable(old_crtc_state); 614 else 615 ilk_pfit_disable(old_crtc_state); 616 617 /* 618 * Power down mst path before disabling the port, otherwise we end 619 * up getting interrupts from the sink upon detecting link loss. 620 */ 621 drm_dp_send_power_updown_phy(&intel_dp->mst_mgr, connector->port, 622 false); 623 624 /* 625 * BSpec 4287: disable DIP after the transcoder is disabled and before 626 * the transcoder clock select is set to none. 627 */ 628 if (last_mst_stream) 629 intel_dp_set_infoframes(&dig_port->base, false, 630 old_crtc_state, NULL); 631 /* 632 * From TGL spec: "If multi-stream slave transcoder: Configure 633 * Transcoder Clock Select to direct no clock to the transcoder" 634 * 635 * From older GENs spec: "Configure Transcoder Clock Select to direct 636 * no clock to the transcoder" 637 */ 638 if (DISPLAY_VER(dev_priv) < 12 || !last_mst_stream) 639 intel_ddi_disable_transcoder_clock(old_crtc_state); 640 641 642 intel_mst->connector = NULL; 643 if (last_mst_stream) 644 dig_port->base.post_disable(state, &dig_port->base, 645 old_crtc_state, NULL); 646 647 drm_dbg_kms(&dev_priv->drm, "active links %d\n", 648 intel_dp->active_mst_links); 649 } 650 651 static void intel_mst_post_pll_disable_dp(struct intel_atomic_state *state, 652 struct intel_encoder *encoder, 653 const struct intel_crtc_state *old_crtc_state, 654 const struct drm_connector_state *old_conn_state) 655 { 656 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder); 657 struct intel_digital_port *dig_port = intel_mst->primary; 658 struct intel_dp *intel_dp = &dig_port->dp; 659 660 if (intel_dp->active_mst_links == 0 && 661 dig_port->base.post_pll_disable) 662 dig_port->base.post_pll_disable(state, encoder, old_crtc_state, old_conn_state); 663 } 664 665 static void intel_mst_pre_pll_enable_dp(struct intel_atomic_state *state, 666 struct intel_encoder *encoder, 667 const struct intel_crtc_state *pipe_config, 668 const struct drm_connector_state *conn_state) 669 { 670 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder); 671 struct intel_digital_port *dig_port = intel_mst->primary; 672 struct intel_dp *intel_dp = &dig_port->dp; 673 674 if (intel_dp->active_mst_links == 0) 675 dig_port->base.pre_pll_enable(state, &dig_port->base, 676 pipe_config, NULL); 677 else 678 /* 679 * The port PLL state needs to get updated for secondary 680 * streams as for the primary stream. 681 */ 682 intel_ddi_update_active_dpll(state, &dig_port->base, 683 to_intel_crtc(pipe_config->uapi.crtc)); 684 } 685 686 static void intel_mst_pre_enable_dp(struct intel_atomic_state *state, 687 struct intel_encoder *encoder, 688 const struct intel_crtc_state *pipe_config, 689 const struct drm_connector_state *conn_state) 690 { 691 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder); 692 struct intel_digital_port *dig_port = intel_mst->primary; 693 struct intel_dp *intel_dp = &dig_port->dp; 694 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 695 struct intel_connector *connector = 696 to_intel_connector(conn_state->connector); 697 struct drm_dp_mst_topology_state *mst_state = 698 drm_atomic_get_new_mst_topology_state(&state->base, &intel_dp->mst_mgr); 699 int ret; 700 bool first_mst_stream; 701 702 /* MST encoders are bound to a crtc, not to a connector, 703 * force the mapping here for get_hw_state. 704 */ 705 connector->encoder = encoder; 706 intel_mst->connector = connector; 707 first_mst_stream = intel_dp->active_mst_links == 0; 708 drm_WARN_ON(&dev_priv->drm, 709 DISPLAY_VER(dev_priv) >= 12 && first_mst_stream && 710 !intel_dp_mst_is_master_trans(pipe_config)); 711 712 drm_dbg_kms(&dev_priv->drm, "active links %d\n", 713 intel_dp->active_mst_links); 714 715 if (first_mst_stream) 716 intel_dp_set_power(intel_dp, DP_SET_POWER_D0); 717 718 drm_dp_send_power_updown_phy(&intel_dp->mst_mgr, connector->port, true); 719 720 if (first_mst_stream) 721 dig_port->base.pre_enable(state, &dig_port->base, 722 pipe_config, NULL); 723 724 intel_dp->active_mst_links++; 725 726 ret = drm_dp_add_payload_part1(&intel_dp->mst_mgr, mst_state, 727 drm_atomic_get_mst_payload_state(mst_state, connector->port)); 728 if (ret < 0) 729 drm_err(&dev_priv->drm, "Failed to create MST payload for %s: %d\n", 730 connector->base.name, ret); 731 732 /* 733 * Before Gen 12 this is not done as part of 734 * dig_port->base.pre_enable() and should be done here. For 735 * Gen 12+ the step in which this should be done is different for the 736 * first MST stream, so it's done on the DDI for the first stream and 737 * here for the following ones. 738 */ 739 if (DISPLAY_VER(dev_priv) < 12 || !first_mst_stream) 740 intel_ddi_enable_transcoder_clock(encoder, pipe_config); 741 742 intel_ddi_set_dp_msa(pipe_config, conn_state); 743 } 744 745 static void intel_mst_enable_dp(struct intel_atomic_state *state, 746 struct intel_encoder *encoder, 747 const struct intel_crtc_state *pipe_config, 748 const struct drm_connector_state *conn_state) 749 { 750 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder); 751 struct intel_digital_port *dig_port = intel_mst->primary; 752 struct intel_dp *intel_dp = &dig_port->dp; 753 struct intel_connector *connector = to_intel_connector(conn_state->connector); 754 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 755 struct drm_dp_mst_topology_state *mst_state = 756 drm_atomic_get_new_mst_topology_state(&state->base, &intel_dp->mst_mgr); 757 enum transcoder trans = pipe_config->cpu_transcoder; 758 759 drm_WARN_ON(&dev_priv->drm, pipe_config->has_pch_encoder); 760 761 clear_act_sent(encoder, pipe_config); 762 763 if (intel_dp_is_uhbr(pipe_config)) { 764 const struct drm_display_mode *adjusted_mode = 765 &pipe_config->hw.adjusted_mode; 766 u64 crtc_clock_hz = KHz(adjusted_mode->crtc_clock); 767 768 intel_de_write(dev_priv, TRANS_DP2_VFREQHIGH(pipe_config->cpu_transcoder), 769 TRANS_DP2_VFREQ_PIXEL_CLOCK(crtc_clock_hz >> 24)); 770 intel_de_write(dev_priv, TRANS_DP2_VFREQLOW(pipe_config->cpu_transcoder), 771 TRANS_DP2_VFREQ_PIXEL_CLOCK(crtc_clock_hz & 0xffffff)); 772 } 773 774 intel_ddi_enable_transcoder_func(encoder, pipe_config); 775 776 intel_de_rmw(dev_priv, TRANS_DDI_FUNC_CTL(trans), 0, 777 TRANS_DDI_DP_VC_PAYLOAD_ALLOC); 778 779 drm_dbg_kms(&dev_priv->drm, "active links %d\n", 780 intel_dp->active_mst_links); 781 782 wait_for_act_sent(encoder, pipe_config); 783 784 drm_dp_add_payload_part2(&intel_dp->mst_mgr, &state->base, 785 drm_atomic_get_mst_payload_state(mst_state, connector->port)); 786 787 if (DISPLAY_VER(dev_priv) >= 14 && pipe_config->fec_enable) 788 intel_de_rmw(dev_priv, MTL_CHICKEN_TRANS(trans), 0, 789 FECSTALL_DIS_DPTSTREAM_DPTTG); 790 else if (DISPLAY_VER(dev_priv) >= 12 && pipe_config->fec_enable) 791 intel_de_rmw(dev_priv, CHICKEN_TRANS(trans), 0, 792 FECSTALL_DIS_DPTSTREAM_DPTTG); 793 794 intel_enable_transcoder(pipe_config); 795 796 intel_crtc_vblank_on(pipe_config); 797 798 intel_audio_codec_enable(encoder, pipe_config, conn_state); 799 800 /* Enable hdcp if it's desired */ 801 if (conn_state->content_protection == 802 DRM_MODE_CONTENT_PROTECTION_DESIRED) 803 intel_hdcp_enable(state, encoder, pipe_config, conn_state); 804 } 805 806 static bool intel_dp_mst_enc_get_hw_state(struct intel_encoder *encoder, 807 enum pipe *pipe) 808 { 809 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder); 810 *pipe = intel_mst->pipe; 811 if (intel_mst->connector) 812 return true; 813 return false; 814 } 815 816 static void intel_dp_mst_enc_get_config(struct intel_encoder *encoder, 817 struct intel_crtc_state *pipe_config) 818 { 819 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder); 820 struct intel_digital_port *dig_port = intel_mst->primary; 821 822 dig_port->base.get_config(&dig_port->base, pipe_config); 823 } 824 825 static bool intel_dp_mst_initial_fastset_check(struct intel_encoder *encoder, 826 struct intel_crtc_state *crtc_state) 827 { 828 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder); 829 struct intel_digital_port *dig_port = intel_mst->primary; 830 831 return intel_dp_initial_fastset_check(&dig_port->base, crtc_state); 832 } 833 834 static int intel_dp_mst_get_ddc_modes(struct drm_connector *connector) 835 { 836 struct intel_connector *intel_connector = to_intel_connector(connector); 837 struct intel_dp *intel_dp = intel_connector->mst_port; 838 const struct drm_edid *drm_edid; 839 int ret; 840 841 if (drm_connector_is_unregistered(connector)) 842 return intel_connector_update_modes(connector, NULL); 843 844 drm_edid = drm_dp_mst_edid_read(connector, &intel_dp->mst_mgr, intel_connector->port); 845 846 ret = intel_connector_update_modes(connector, drm_edid); 847 848 drm_edid_free(drm_edid); 849 850 return ret; 851 } 852 853 static int 854 intel_dp_mst_connector_late_register(struct drm_connector *connector) 855 { 856 struct intel_connector *intel_connector = to_intel_connector(connector); 857 int ret; 858 859 ret = drm_dp_mst_connector_late_register(connector, 860 intel_connector->port); 861 if (ret < 0) 862 return ret; 863 864 ret = intel_connector_register(connector); 865 if (ret < 0) 866 drm_dp_mst_connector_early_unregister(connector, 867 intel_connector->port); 868 869 return ret; 870 } 871 872 static void 873 intel_dp_mst_connector_early_unregister(struct drm_connector *connector) 874 { 875 struct intel_connector *intel_connector = to_intel_connector(connector); 876 877 intel_connector_unregister(connector); 878 drm_dp_mst_connector_early_unregister(connector, 879 intel_connector->port); 880 } 881 882 static const struct drm_connector_funcs intel_dp_mst_connector_funcs = { 883 .fill_modes = drm_helper_probe_single_connector_modes, 884 .atomic_get_property = intel_digital_connector_atomic_get_property, 885 .atomic_set_property = intel_digital_connector_atomic_set_property, 886 .late_register = intel_dp_mst_connector_late_register, 887 .early_unregister = intel_dp_mst_connector_early_unregister, 888 .destroy = intel_connector_destroy, 889 .atomic_destroy_state = drm_atomic_helper_connector_destroy_state, 890 .atomic_duplicate_state = intel_digital_connector_duplicate_state, 891 }; 892 893 static int intel_dp_mst_get_modes(struct drm_connector *connector) 894 { 895 return intel_dp_mst_get_ddc_modes(connector); 896 } 897 898 static int 899 intel_dp_mst_mode_valid_ctx(struct drm_connector *connector, 900 struct drm_display_mode *mode, 901 struct drm_modeset_acquire_ctx *ctx, 902 enum drm_mode_status *status) 903 { 904 struct drm_i915_private *dev_priv = to_i915(connector->dev); 905 struct intel_connector *intel_connector = to_intel_connector(connector); 906 struct intel_dp *intel_dp = intel_connector->mst_port; 907 struct drm_dp_mst_topology_mgr *mgr = &intel_dp->mst_mgr; 908 struct drm_dp_mst_port *port = intel_connector->port; 909 const int min_bpp = 18; 910 int max_dotclk = to_i915(connector->dev)->max_dotclk_freq; 911 int max_rate, mode_rate, max_lanes, max_link_clock; 912 int ret; 913 bool dsc = false, bigjoiner = false; 914 u16 dsc_max_output_bpp = 0; 915 u8 dsc_slice_count = 0; 916 int target_clock = mode->clock; 917 918 if (drm_connector_is_unregistered(connector)) { 919 *status = MODE_ERROR; 920 return 0; 921 } 922 923 *status = intel_cpu_transcoder_mode_valid(dev_priv, mode); 924 if (*status != MODE_OK) 925 return 0; 926 927 if (mode->flags & DRM_MODE_FLAG_DBLSCAN) { 928 *status = MODE_NO_DBLESCAN; 929 return 0; 930 } 931 932 max_link_clock = intel_dp_max_link_rate(intel_dp); 933 max_lanes = intel_dp_max_lane_count(intel_dp); 934 935 max_rate = intel_dp_max_data_rate(max_link_clock, max_lanes); 936 mode_rate = intel_dp_link_required(mode->clock, min_bpp); 937 938 ret = drm_modeset_lock(&mgr->base.lock, ctx); 939 if (ret) 940 return ret; 941 942 if (mode_rate > max_rate || mode->clock > max_dotclk || 943 drm_dp_calc_pbn_mode(mode->clock, min_bpp << 4) > port->full_pbn) { 944 *status = MODE_CLOCK_HIGH; 945 return 0; 946 } 947 948 if (mode->clock < 10000) { 949 *status = MODE_CLOCK_LOW; 950 return 0; 951 } 952 953 if (mode->flags & DRM_MODE_FLAG_DBLCLK) { 954 *status = MODE_H_ILLEGAL; 955 return 0; 956 } 957 958 if (intel_dp_need_bigjoiner(intel_dp, mode->hdisplay, target_clock)) { 959 bigjoiner = true; 960 max_dotclk *= 2; 961 962 /* TODO: add support for bigjoiner */ 963 *status = MODE_CLOCK_HIGH; 964 return 0; 965 } 966 967 if (DISPLAY_VER(dev_priv) >= 10 && 968 drm_dp_sink_supports_dsc(intel_dp->dsc_dpcd)) { 969 /* 970 * TBD pass the connector BPC, 971 * for now U8_MAX so that max BPC on that platform would be picked 972 */ 973 int pipe_bpp = intel_dp_dsc_compute_bpp(intel_dp, U8_MAX); 974 975 if (drm_dp_sink_supports_fec(intel_dp->fec_capable)) { 976 dsc_max_output_bpp = 977 intel_dp_dsc_get_output_bpp(dev_priv, 978 max_link_clock, 979 max_lanes, 980 target_clock, 981 mode->hdisplay, 982 bigjoiner, 983 pipe_bpp, 64) >> 4; 984 dsc_slice_count = 985 intel_dp_dsc_get_slice_count(intel_dp, 986 target_clock, 987 mode->hdisplay, 988 bigjoiner); 989 } 990 991 dsc = dsc_max_output_bpp && dsc_slice_count; 992 } 993 994 /* 995 * Big joiner configuration needs DSC for TGL which is not true for 996 * XE_LPD where uncompressed joiner is supported. 997 */ 998 if (DISPLAY_VER(dev_priv) < 13 && bigjoiner && !dsc) { 999 *status = MODE_CLOCK_HIGH; 1000 return 0; 1001 } 1002 1003 if (mode_rate > max_rate && !dsc) { 1004 *status = MODE_CLOCK_HIGH; 1005 return 0; 1006 } 1007 1008 *status = intel_mode_valid_max_plane_size(dev_priv, mode, false); 1009 return 0; 1010 } 1011 1012 static struct drm_encoder *intel_mst_atomic_best_encoder(struct drm_connector *connector, 1013 struct drm_atomic_state *state) 1014 { 1015 struct drm_connector_state *connector_state = drm_atomic_get_new_connector_state(state, 1016 connector); 1017 struct intel_connector *intel_connector = to_intel_connector(connector); 1018 struct intel_dp *intel_dp = intel_connector->mst_port; 1019 struct intel_crtc *crtc = to_intel_crtc(connector_state->crtc); 1020 1021 return &intel_dp->mst_encoders[crtc->pipe]->base.base; 1022 } 1023 1024 static int 1025 intel_dp_mst_detect(struct drm_connector *connector, 1026 struct drm_modeset_acquire_ctx *ctx, bool force) 1027 { 1028 struct drm_i915_private *i915 = to_i915(connector->dev); 1029 struct intel_connector *intel_connector = to_intel_connector(connector); 1030 struct intel_dp *intel_dp = intel_connector->mst_port; 1031 1032 if (!INTEL_DISPLAY_ENABLED(i915)) 1033 return connector_status_disconnected; 1034 1035 if (drm_connector_is_unregistered(connector)) 1036 return connector_status_disconnected; 1037 1038 return drm_dp_mst_detect_port(connector, ctx, &intel_dp->mst_mgr, 1039 intel_connector->port); 1040 } 1041 1042 static const struct drm_connector_helper_funcs intel_dp_mst_connector_helper_funcs = { 1043 .get_modes = intel_dp_mst_get_modes, 1044 .mode_valid_ctx = intel_dp_mst_mode_valid_ctx, 1045 .atomic_best_encoder = intel_mst_atomic_best_encoder, 1046 .atomic_check = intel_dp_mst_atomic_check, 1047 .detect_ctx = intel_dp_mst_detect, 1048 }; 1049 1050 static void intel_dp_mst_encoder_destroy(struct drm_encoder *encoder) 1051 { 1052 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(to_intel_encoder(encoder)); 1053 1054 drm_encoder_cleanup(encoder); 1055 kfree(intel_mst); 1056 } 1057 1058 static const struct drm_encoder_funcs intel_dp_mst_enc_funcs = { 1059 .destroy = intel_dp_mst_encoder_destroy, 1060 }; 1061 1062 static bool intel_dp_mst_get_hw_state(struct intel_connector *connector) 1063 { 1064 if (intel_attached_encoder(connector) && connector->base.state->crtc) { 1065 enum pipe pipe; 1066 if (!intel_attached_encoder(connector)->get_hw_state(intel_attached_encoder(connector), &pipe)) 1067 return false; 1068 return true; 1069 } 1070 return false; 1071 } 1072 1073 static int intel_dp_mst_add_properties(struct intel_dp *intel_dp, 1074 struct drm_connector *connector, 1075 const char *pathprop) 1076 { 1077 struct drm_i915_private *i915 = to_i915(connector->dev); 1078 1079 drm_object_attach_property(&connector->base, 1080 i915->drm.mode_config.path_property, 0); 1081 drm_object_attach_property(&connector->base, 1082 i915->drm.mode_config.tile_property, 0); 1083 1084 intel_attach_force_audio_property(connector); 1085 intel_attach_broadcast_rgb_property(connector); 1086 1087 /* 1088 * Reuse the prop from the SST connector because we're 1089 * not allowed to create new props after device registration. 1090 */ 1091 connector->max_bpc_property = 1092 intel_dp->attached_connector->base.max_bpc_property; 1093 if (connector->max_bpc_property) 1094 drm_connector_attach_max_bpc_property(connector, 6, 12); 1095 1096 return drm_connector_set_path_property(connector, pathprop); 1097 } 1098 1099 static struct drm_connector *intel_dp_add_mst_connector(struct drm_dp_mst_topology_mgr *mgr, 1100 struct drm_dp_mst_port *port, 1101 const char *pathprop) 1102 { 1103 struct intel_dp *intel_dp = container_of(mgr, struct intel_dp, mst_mgr); 1104 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp); 1105 struct drm_device *dev = dig_port->base.base.dev; 1106 struct drm_i915_private *dev_priv = to_i915(dev); 1107 struct intel_connector *intel_connector; 1108 struct drm_connector *connector; 1109 enum pipe pipe; 1110 int ret; 1111 1112 intel_connector = intel_connector_alloc(); 1113 if (!intel_connector) 1114 return NULL; 1115 1116 intel_connector->get_hw_state = intel_dp_mst_get_hw_state; 1117 intel_connector->mst_port = intel_dp; 1118 intel_connector->port = port; 1119 drm_dp_mst_get_port_malloc(port); 1120 1121 connector = &intel_connector->base; 1122 ret = drm_connector_init(dev, connector, &intel_dp_mst_connector_funcs, 1123 DRM_MODE_CONNECTOR_DisplayPort); 1124 if (ret) { 1125 drm_dp_mst_put_port_malloc(port); 1126 intel_connector_free(intel_connector); 1127 return NULL; 1128 } 1129 1130 drm_connector_helper_add(connector, &intel_dp_mst_connector_helper_funcs); 1131 1132 for_each_pipe(dev_priv, pipe) { 1133 struct drm_encoder *enc = 1134 &intel_dp->mst_encoders[pipe]->base.base; 1135 1136 ret = drm_connector_attach_encoder(&intel_connector->base, enc); 1137 if (ret) 1138 goto err; 1139 } 1140 1141 ret = intel_dp_mst_add_properties(intel_dp, connector, pathprop); 1142 if (ret) 1143 goto err; 1144 1145 ret = intel_dp_hdcp_init(dig_port, intel_connector); 1146 if (ret) 1147 drm_dbg_kms(&dev_priv->drm, "[%s:%d] HDCP MST init failed, skipping.\n", 1148 connector->name, connector->base.id); 1149 1150 return connector; 1151 1152 err: 1153 drm_connector_cleanup(connector); 1154 return NULL; 1155 } 1156 1157 static void 1158 intel_dp_mst_poll_hpd_irq(struct drm_dp_mst_topology_mgr *mgr) 1159 { 1160 struct intel_dp *intel_dp = container_of(mgr, struct intel_dp, mst_mgr); 1161 1162 intel_hpd_trigger_irq(dp_to_dig_port(intel_dp)); 1163 } 1164 1165 static const struct drm_dp_mst_topology_cbs mst_cbs = { 1166 .add_connector = intel_dp_add_mst_connector, 1167 .poll_hpd_irq = intel_dp_mst_poll_hpd_irq, 1168 }; 1169 1170 static struct intel_dp_mst_encoder * 1171 intel_dp_create_fake_mst_encoder(struct intel_digital_port *dig_port, enum pipe pipe) 1172 { 1173 struct intel_dp_mst_encoder *intel_mst; 1174 struct intel_encoder *intel_encoder; 1175 struct drm_device *dev = dig_port->base.base.dev; 1176 1177 intel_mst = kzalloc(sizeof(*intel_mst), GFP_KERNEL); 1178 1179 if (!intel_mst) 1180 return NULL; 1181 1182 intel_mst->pipe = pipe; 1183 intel_encoder = &intel_mst->base; 1184 intel_mst->primary = dig_port; 1185 1186 drm_encoder_init(dev, &intel_encoder->base, &intel_dp_mst_enc_funcs, 1187 DRM_MODE_ENCODER_DPMST, "DP-MST %c", pipe_name(pipe)); 1188 1189 intel_encoder->type = INTEL_OUTPUT_DP_MST; 1190 intel_encoder->power_domain = dig_port->base.power_domain; 1191 intel_encoder->port = dig_port->base.port; 1192 intel_encoder->cloneable = 0; 1193 /* 1194 * This is wrong, but broken userspace uses the intersection 1195 * of possible_crtcs of all the encoders of a given connector 1196 * to figure out which crtcs can drive said connector. What 1197 * should be used instead is the union of possible_crtcs. 1198 * To keep such userspace functioning we must misconfigure 1199 * this to make sure the intersection is not empty :( 1200 */ 1201 intel_encoder->pipe_mask = ~0; 1202 1203 intel_encoder->compute_config = intel_dp_mst_compute_config; 1204 intel_encoder->compute_config_late = intel_dp_mst_compute_config_late; 1205 intel_encoder->disable = intel_mst_disable_dp; 1206 intel_encoder->post_disable = intel_mst_post_disable_dp; 1207 intel_encoder->post_pll_disable = intel_mst_post_pll_disable_dp; 1208 intel_encoder->update_pipe = intel_ddi_update_pipe; 1209 intel_encoder->pre_pll_enable = intel_mst_pre_pll_enable_dp; 1210 intel_encoder->pre_enable = intel_mst_pre_enable_dp; 1211 intel_encoder->enable = intel_mst_enable_dp; 1212 intel_encoder->get_hw_state = intel_dp_mst_enc_get_hw_state; 1213 intel_encoder->get_config = intel_dp_mst_enc_get_config; 1214 intel_encoder->initial_fastset_check = intel_dp_mst_initial_fastset_check; 1215 1216 return intel_mst; 1217 1218 } 1219 1220 static bool 1221 intel_dp_create_fake_mst_encoders(struct intel_digital_port *dig_port) 1222 { 1223 struct intel_dp *intel_dp = &dig_port->dp; 1224 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev); 1225 enum pipe pipe; 1226 1227 for_each_pipe(dev_priv, pipe) 1228 intel_dp->mst_encoders[pipe] = intel_dp_create_fake_mst_encoder(dig_port, pipe); 1229 return true; 1230 } 1231 1232 int 1233 intel_dp_mst_encoder_active_links(struct intel_digital_port *dig_port) 1234 { 1235 return dig_port->dp.active_mst_links; 1236 } 1237 1238 int 1239 intel_dp_mst_encoder_init(struct intel_digital_port *dig_port, int conn_base_id) 1240 { 1241 struct drm_i915_private *i915 = to_i915(dig_port->base.base.dev); 1242 struct intel_dp *intel_dp = &dig_port->dp; 1243 enum port port = dig_port->base.port; 1244 int ret; 1245 1246 if (!HAS_DP_MST(i915) || intel_dp_is_edp(intel_dp)) 1247 return 0; 1248 1249 if (DISPLAY_VER(i915) < 12 && port == PORT_A) 1250 return 0; 1251 1252 if (DISPLAY_VER(i915) < 11 && port == PORT_E) 1253 return 0; 1254 1255 intel_dp->mst_mgr.cbs = &mst_cbs; 1256 1257 /* create encoders */ 1258 intel_dp_create_fake_mst_encoders(dig_port); 1259 ret = drm_dp_mst_topology_mgr_init(&intel_dp->mst_mgr, &i915->drm, 1260 &intel_dp->aux, 16, 3, conn_base_id); 1261 if (ret) { 1262 intel_dp->mst_mgr.cbs = NULL; 1263 return ret; 1264 } 1265 1266 return 0; 1267 } 1268 1269 bool intel_dp_mst_source_support(struct intel_dp *intel_dp) 1270 { 1271 return intel_dp->mst_mgr.cbs; 1272 } 1273 1274 void 1275 intel_dp_mst_encoder_cleanup(struct intel_digital_port *dig_port) 1276 { 1277 struct intel_dp *intel_dp = &dig_port->dp; 1278 1279 if (!intel_dp_mst_source_support(intel_dp)) 1280 return; 1281 1282 drm_dp_mst_topology_mgr_destroy(&intel_dp->mst_mgr); 1283 /* encoders will get killed by normal cleanup */ 1284 1285 intel_dp->mst_mgr.cbs = NULL; 1286 } 1287 1288 bool intel_dp_mst_is_master_trans(const struct intel_crtc_state *crtc_state) 1289 { 1290 return crtc_state->mst_master_transcoder == crtc_state->cpu_transcoder; 1291 } 1292 1293 bool intel_dp_mst_is_slave_trans(const struct intel_crtc_state *crtc_state) 1294 { 1295 return crtc_state->mst_master_transcoder != INVALID_TRANSCODER && 1296 crtc_state->mst_master_transcoder != crtc_state->cpu_transcoder; 1297 } 1298 1299 /** 1300 * intel_dp_mst_add_topology_state_for_connector - add MST topology state for a connector 1301 * @state: atomic state 1302 * @connector: connector to add the state for 1303 * @crtc: the CRTC @connector is attached to 1304 * 1305 * Add the MST topology state for @connector to @state. 1306 * 1307 * Returns 0 on success, negative error code on failure. 1308 */ 1309 static int 1310 intel_dp_mst_add_topology_state_for_connector(struct intel_atomic_state *state, 1311 struct intel_connector *connector, 1312 struct intel_crtc *crtc) 1313 { 1314 struct drm_dp_mst_topology_state *mst_state; 1315 1316 if (!connector->mst_port) 1317 return 0; 1318 1319 mst_state = drm_atomic_get_mst_topology_state(&state->base, 1320 &connector->mst_port->mst_mgr); 1321 if (IS_ERR(mst_state)) 1322 return PTR_ERR(mst_state); 1323 1324 mst_state->pending_crtc_mask |= drm_crtc_mask(&crtc->base); 1325 1326 return 0; 1327 } 1328 1329 /** 1330 * intel_dp_mst_add_topology_state_for_crtc - add MST topology state for a CRTC 1331 * @state: atomic state 1332 * @crtc: CRTC to add the state for 1333 * 1334 * Add the MST topology state for @crtc to @state. 1335 * 1336 * Returns 0 on success, negative error code on failure. 1337 */ 1338 int intel_dp_mst_add_topology_state_for_crtc(struct intel_atomic_state *state, 1339 struct intel_crtc *crtc) 1340 { 1341 struct drm_connector *_connector; 1342 struct drm_connector_state *conn_state; 1343 int i; 1344 1345 for_each_new_connector_in_state(&state->base, _connector, conn_state, i) { 1346 struct intel_connector *connector = to_intel_connector(_connector); 1347 int ret; 1348 1349 if (conn_state->crtc != &crtc->base) 1350 continue; 1351 1352 ret = intel_dp_mst_add_topology_state_for_connector(state, connector, crtc); 1353 if (ret) 1354 return ret; 1355 } 1356 1357 return 0; 1358 } 1359