1 /* 2 * Copyright 2011 Red Hat Inc. 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice shall be included in 12 * all copies or substantial portions of the Software. 13 * 14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 20 * OTHER DEALINGS IN THE SOFTWARE. 21 * 22 * Authors: Ben Skeggs 23 */ 24 #include "disp.h" 25 #include "atom.h" 26 #include "core.h" 27 #include "head.h" 28 #include "wndw.h" 29 30 #include <linux/dma-mapping.h> 31 #include <linux/hdmi.h> 32 #include <linux/component.h> 33 34 #include <drm/drm_atomic_helper.h> 35 #include <drm/drm_dp_helper.h> 36 #include <drm/drm_edid.h> 37 #include <drm/drm_fb_helper.h> 38 #include <drm/drm_plane_helper.h> 39 #include <drm/drm_probe_helper.h> 40 #include <drm/drm_scdc_helper.h> 41 #include <drm/drm_vblank.h> 42 43 #include <nvif/class.h> 44 #include <nvif/cl0002.h> 45 #include <nvif/cl5070.h> 46 #include <nvif/cl507d.h> 47 #include <nvif/event.h> 48 #include <nvif/timer.h> 49 50 #include "nouveau_drv.h" 51 #include "nouveau_dma.h" 52 #include "nouveau_gem.h" 53 #include "nouveau_connector.h" 54 #include "nouveau_encoder.h" 55 #include "nouveau_fence.h" 56 #include "nouveau_fbcon.h" 57 58 #include <subdev/bios/dp.h> 59 60 /****************************************************************************** 61 * Atomic state 62 *****************************************************************************/ 63 64 struct nv50_outp_atom { 65 struct list_head head; 66 67 struct drm_encoder *encoder; 68 bool flush_disable; 69 70 union nv50_outp_atom_mask { 71 struct { 72 bool ctrl:1; 73 }; 74 u8 mask; 75 } set, clr; 76 }; 77 78 /****************************************************************************** 79 * EVO channel 80 *****************************************************************************/ 81 82 static int 83 nv50_chan_create(struct nvif_device *device, struct nvif_object *disp, 84 const s32 *oclass, u8 head, void *data, u32 size, 85 struct nv50_chan *chan) 86 { 87 struct nvif_sclass *sclass; 88 int ret, i, n; 89 90 chan->device = device; 91 92 ret = n = nvif_object_sclass_get(disp, &sclass); 93 if (ret < 0) 94 return ret; 95 96 while (oclass[0]) { 97 for (i = 0; i < n; i++) { 98 if (sclass[i].oclass == oclass[0]) { 99 ret = nvif_object_init(disp, 0, oclass[0], 100 data, size, &chan->user); 101 if (ret == 0) 102 nvif_object_map(&chan->user, NULL, 0); 103 nvif_object_sclass_put(&sclass); 104 return ret; 105 } 106 } 107 oclass++; 108 } 109 110 nvif_object_sclass_put(&sclass); 111 return -ENOSYS; 112 } 113 114 static void 115 nv50_chan_destroy(struct nv50_chan *chan) 116 { 117 nvif_object_fini(&chan->user); 118 } 119 120 /****************************************************************************** 121 * DMA EVO channel 122 *****************************************************************************/ 123 124 void 125 nv50_dmac_destroy(struct nv50_dmac *dmac) 126 { 127 nvif_object_fini(&dmac->vram); 128 nvif_object_fini(&dmac->sync); 129 130 nv50_chan_destroy(&dmac->base); 131 132 nvif_mem_fini(&dmac->push); 133 } 134 135 int 136 nv50_dmac_create(struct nvif_device *device, struct nvif_object *disp, 137 const s32 *oclass, u8 head, void *data, u32 size, u64 syncbuf, 138 struct nv50_dmac *dmac) 139 { 140 struct nouveau_cli *cli = (void *)device->object.client; 141 struct nv50_disp_core_channel_dma_v0 *args = data; 142 u8 type = NVIF_MEM_COHERENT; 143 int ret; 144 145 mutex_init(&dmac->lock); 146 147 /* Pascal added support for 47-bit physical addresses, but some 148 * parts of EVO still only accept 40-bit PAs. 149 * 150 * To avoid issues on systems with large amounts of RAM, and on 151 * systems where an IOMMU maps pages at a high address, we need 152 * to allocate push buffers in VRAM instead. 153 * 154 * This appears to match NVIDIA's behaviour on Pascal. 155 */ 156 if (device->info.family == NV_DEVICE_INFO_V0_PASCAL) 157 type |= NVIF_MEM_VRAM; 158 159 ret = nvif_mem_init_map(&cli->mmu, type, 0x1000, &dmac->push); 160 if (ret) 161 return ret; 162 163 dmac->ptr = dmac->push.object.map.ptr; 164 165 args->pushbuf = nvif_handle(&dmac->push.object); 166 167 ret = nv50_chan_create(device, disp, oclass, head, data, size, 168 &dmac->base); 169 if (ret) 170 return ret; 171 172 if (!syncbuf) 173 return 0; 174 175 ret = nvif_object_init(&dmac->base.user, 0xf0000000, NV_DMA_IN_MEMORY, 176 &(struct nv_dma_v0) { 177 .target = NV_DMA_V0_TARGET_VRAM, 178 .access = NV_DMA_V0_ACCESS_RDWR, 179 .start = syncbuf + 0x0000, 180 .limit = syncbuf + 0x0fff, 181 }, sizeof(struct nv_dma_v0), 182 &dmac->sync); 183 if (ret) 184 return ret; 185 186 ret = nvif_object_init(&dmac->base.user, 0xf0000001, NV_DMA_IN_MEMORY, 187 &(struct nv_dma_v0) { 188 .target = NV_DMA_V0_TARGET_VRAM, 189 .access = NV_DMA_V0_ACCESS_RDWR, 190 .start = 0, 191 .limit = device->info.ram_user - 1, 192 }, sizeof(struct nv_dma_v0), 193 &dmac->vram); 194 if (ret) 195 return ret; 196 197 return ret; 198 } 199 200 /****************************************************************************** 201 * EVO channel helpers 202 *****************************************************************************/ 203 static void 204 evo_flush(struct nv50_dmac *dmac) 205 { 206 /* Push buffer fetches are not coherent with BAR1, we need to ensure 207 * writes have been flushed right through to VRAM before writing PUT. 208 */ 209 if (dmac->push.type & NVIF_MEM_VRAM) { 210 struct nvif_device *device = dmac->base.device; 211 nvif_wr32(&device->object, 0x070000, 0x00000001); 212 nvif_msec(device, 2000, 213 if (!(nvif_rd32(&device->object, 0x070000) & 0x00000002)) 214 break; 215 ); 216 } 217 } 218 219 u32 * 220 evo_wait(struct nv50_dmac *evoc, int nr) 221 { 222 struct nv50_dmac *dmac = evoc; 223 struct nvif_device *device = dmac->base.device; 224 u32 put = nvif_rd32(&dmac->base.user, 0x0000) / 4; 225 226 mutex_lock(&dmac->lock); 227 if (put + nr >= (PAGE_SIZE / 4) - 8) { 228 dmac->ptr[put] = 0x20000000; 229 evo_flush(dmac); 230 231 nvif_wr32(&dmac->base.user, 0x0000, 0x00000000); 232 if (nvif_msec(device, 2000, 233 if (!nvif_rd32(&dmac->base.user, 0x0004)) 234 break; 235 ) < 0) { 236 mutex_unlock(&dmac->lock); 237 pr_err("nouveau: evo channel stalled\n"); 238 return NULL; 239 } 240 241 put = 0; 242 } 243 244 return dmac->ptr + put; 245 } 246 247 void 248 evo_kick(u32 *push, struct nv50_dmac *evoc) 249 { 250 struct nv50_dmac *dmac = evoc; 251 252 evo_flush(dmac); 253 254 nvif_wr32(&dmac->base.user, 0x0000, (push - dmac->ptr) << 2); 255 mutex_unlock(&dmac->lock); 256 } 257 258 /****************************************************************************** 259 * Output path helpers 260 *****************************************************************************/ 261 static void 262 nv50_outp_release(struct nouveau_encoder *nv_encoder) 263 { 264 struct nv50_disp *disp = nv50_disp(nv_encoder->base.base.dev); 265 struct { 266 struct nv50_disp_mthd_v1 base; 267 } args = { 268 .base.version = 1, 269 .base.method = NV50_DISP_MTHD_V1_RELEASE, 270 .base.hasht = nv_encoder->dcb->hasht, 271 .base.hashm = nv_encoder->dcb->hashm, 272 }; 273 274 nvif_mthd(&disp->disp->object, 0, &args, sizeof(args)); 275 nv_encoder->or = -1; 276 nv_encoder->link = 0; 277 } 278 279 static int 280 nv50_outp_acquire(struct nouveau_encoder *nv_encoder) 281 { 282 struct nouveau_drm *drm = nouveau_drm(nv_encoder->base.base.dev); 283 struct nv50_disp *disp = nv50_disp(drm->dev); 284 struct { 285 struct nv50_disp_mthd_v1 base; 286 struct nv50_disp_acquire_v0 info; 287 } args = { 288 .base.version = 1, 289 .base.method = NV50_DISP_MTHD_V1_ACQUIRE, 290 .base.hasht = nv_encoder->dcb->hasht, 291 .base.hashm = nv_encoder->dcb->hashm, 292 }; 293 int ret; 294 295 ret = nvif_mthd(&disp->disp->object, 0, &args, sizeof(args)); 296 if (ret) { 297 NV_ERROR(drm, "error acquiring output path: %d\n", ret); 298 return ret; 299 } 300 301 nv_encoder->or = args.info.or; 302 nv_encoder->link = args.info.link; 303 return 0; 304 } 305 306 static int 307 nv50_outp_atomic_check_view(struct drm_encoder *encoder, 308 struct drm_crtc_state *crtc_state, 309 struct drm_connector_state *conn_state, 310 struct drm_display_mode *native_mode) 311 { 312 struct drm_display_mode *adjusted_mode = &crtc_state->adjusted_mode; 313 struct drm_display_mode *mode = &crtc_state->mode; 314 struct drm_connector *connector = conn_state->connector; 315 struct nouveau_conn_atom *asyc = nouveau_conn_atom(conn_state); 316 struct nouveau_drm *drm = nouveau_drm(encoder->dev); 317 318 NV_ATOMIC(drm, "%s atomic_check\n", encoder->name); 319 asyc->scaler.full = false; 320 if (!native_mode) 321 return 0; 322 323 if (asyc->scaler.mode == DRM_MODE_SCALE_NONE) { 324 switch (connector->connector_type) { 325 case DRM_MODE_CONNECTOR_LVDS: 326 case DRM_MODE_CONNECTOR_eDP: 327 /* Don't force scaler for EDID modes with 328 * same size as the native one (e.g. different 329 * refresh rate) 330 */ 331 if (mode->hdisplay == native_mode->hdisplay && 332 mode->vdisplay == native_mode->vdisplay && 333 mode->type & DRM_MODE_TYPE_DRIVER) 334 break; 335 mode = native_mode; 336 asyc->scaler.full = true; 337 break; 338 default: 339 break; 340 } 341 } else { 342 mode = native_mode; 343 } 344 345 if (!drm_mode_equal(adjusted_mode, mode)) { 346 drm_mode_copy(adjusted_mode, mode); 347 crtc_state->mode_changed = true; 348 } 349 350 return 0; 351 } 352 353 static int 354 nv50_outp_atomic_check(struct drm_encoder *encoder, 355 struct drm_crtc_state *crtc_state, 356 struct drm_connector_state *conn_state) 357 { 358 struct drm_connector *connector = conn_state->connector; 359 struct nouveau_connector *nv_connector = nouveau_connector(connector); 360 struct nv50_head_atom *asyh = nv50_head_atom(crtc_state); 361 int ret; 362 363 ret = nv50_outp_atomic_check_view(encoder, crtc_state, conn_state, 364 nv_connector->native_mode); 365 if (ret) 366 return ret; 367 368 if (crtc_state->mode_changed || crtc_state->connectors_changed) 369 asyh->or.bpc = connector->display_info.bpc; 370 371 return 0; 372 } 373 374 /****************************************************************************** 375 * DAC 376 *****************************************************************************/ 377 static void 378 nv50_dac_disable(struct drm_encoder *encoder) 379 { 380 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 381 struct nv50_core *core = nv50_disp(encoder->dev)->core; 382 if (nv_encoder->crtc) 383 core->func->dac->ctrl(core, nv_encoder->or, 0x00000000, NULL); 384 nv_encoder->crtc = NULL; 385 nv50_outp_release(nv_encoder); 386 } 387 388 static void 389 nv50_dac_enable(struct drm_encoder *encoder) 390 { 391 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 392 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc); 393 struct nv50_head_atom *asyh = nv50_head_atom(nv_crtc->base.state); 394 struct nv50_core *core = nv50_disp(encoder->dev)->core; 395 396 nv50_outp_acquire(nv_encoder); 397 398 core->func->dac->ctrl(core, nv_encoder->or, 1 << nv_crtc->index, asyh); 399 asyh->or.depth = 0; 400 401 nv_encoder->crtc = encoder->crtc; 402 } 403 404 static enum drm_connector_status 405 nv50_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector) 406 { 407 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 408 struct nv50_disp *disp = nv50_disp(encoder->dev); 409 struct { 410 struct nv50_disp_mthd_v1 base; 411 struct nv50_disp_dac_load_v0 load; 412 } args = { 413 .base.version = 1, 414 .base.method = NV50_DISP_MTHD_V1_DAC_LOAD, 415 .base.hasht = nv_encoder->dcb->hasht, 416 .base.hashm = nv_encoder->dcb->hashm, 417 }; 418 int ret; 419 420 args.load.data = nouveau_drm(encoder->dev)->vbios.dactestval; 421 if (args.load.data == 0) 422 args.load.data = 340; 423 424 ret = nvif_mthd(&disp->disp->object, 0, &args, sizeof(args)); 425 if (ret || !args.load.load) 426 return connector_status_disconnected; 427 428 return connector_status_connected; 429 } 430 431 static const struct drm_encoder_helper_funcs 432 nv50_dac_help = { 433 .atomic_check = nv50_outp_atomic_check, 434 .enable = nv50_dac_enable, 435 .disable = nv50_dac_disable, 436 .detect = nv50_dac_detect 437 }; 438 439 static void 440 nv50_dac_destroy(struct drm_encoder *encoder) 441 { 442 drm_encoder_cleanup(encoder); 443 kfree(encoder); 444 } 445 446 static const struct drm_encoder_funcs 447 nv50_dac_func = { 448 .destroy = nv50_dac_destroy, 449 }; 450 451 static int 452 nv50_dac_create(struct drm_connector *connector, struct dcb_output *dcbe) 453 { 454 struct nouveau_drm *drm = nouveau_drm(connector->dev); 455 struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device); 456 struct nvkm_i2c_bus *bus; 457 struct nouveau_encoder *nv_encoder; 458 struct drm_encoder *encoder; 459 int type = DRM_MODE_ENCODER_DAC; 460 461 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL); 462 if (!nv_encoder) 463 return -ENOMEM; 464 nv_encoder->dcb = dcbe; 465 466 bus = nvkm_i2c_bus_find(i2c, dcbe->i2c_index); 467 if (bus) 468 nv_encoder->i2c = &bus->i2c; 469 470 encoder = to_drm_encoder(nv_encoder); 471 encoder->possible_crtcs = dcbe->heads; 472 encoder->possible_clones = 0; 473 drm_encoder_init(connector->dev, encoder, &nv50_dac_func, type, 474 "dac-%04x-%04x", dcbe->hasht, dcbe->hashm); 475 drm_encoder_helper_add(encoder, &nv50_dac_help); 476 477 drm_connector_attach_encoder(connector, encoder); 478 return 0; 479 } 480 481 /* 482 * audio component binding for ELD notification 483 */ 484 static void 485 nv50_audio_component_eld_notify(struct drm_audio_component *acomp, int port) 486 { 487 if (acomp && acomp->audio_ops && acomp->audio_ops->pin_eld_notify) 488 acomp->audio_ops->pin_eld_notify(acomp->audio_ops->audio_ptr, 489 port, -1); 490 } 491 492 static int 493 nv50_audio_component_get_eld(struct device *kdev, int port, int pipe, 494 bool *enabled, unsigned char *buf, int max_bytes) 495 { 496 struct drm_device *drm_dev = dev_get_drvdata(kdev); 497 struct nouveau_drm *drm = nouveau_drm(drm_dev); 498 struct drm_encoder *encoder; 499 struct nouveau_encoder *nv_encoder; 500 struct nouveau_connector *nv_connector; 501 struct nouveau_crtc *nv_crtc; 502 int ret = 0; 503 504 *enabled = false; 505 drm_for_each_encoder(encoder, drm->dev) { 506 nv_encoder = nouveau_encoder(encoder); 507 nv_connector = nouveau_encoder_connector_get(nv_encoder); 508 nv_crtc = nouveau_crtc(encoder->crtc); 509 if (!nv_connector || !nv_crtc || nv_crtc->index != port) 510 continue; 511 *enabled = drm_detect_monitor_audio(nv_connector->edid); 512 if (*enabled) { 513 ret = drm_eld_size(nv_connector->base.eld); 514 memcpy(buf, nv_connector->base.eld, 515 min(max_bytes, ret)); 516 } 517 break; 518 } 519 return ret; 520 } 521 522 static const struct drm_audio_component_ops nv50_audio_component_ops = { 523 .get_eld = nv50_audio_component_get_eld, 524 }; 525 526 static int 527 nv50_audio_component_bind(struct device *kdev, struct device *hda_kdev, 528 void *data) 529 { 530 struct drm_device *drm_dev = dev_get_drvdata(kdev); 531 struct nouveau_drm *drm = nouveau_drm(drm_dev); 532 struct drm_audio_component *acomp = data; 533 534 if (WARN_ON(!device_link_add(hda_kdev, kdev, DL_FLAG_STATELESS))) 535 return -ENOMEM; 536 537 drm_modeset_lock_all(drm_dev); 538 acomp->ops = &nv50_audio_component_ops; 539 acomp->dev = kdev; 540 drm->audio.component = acomp; 541 drm_modeset_unlock_all(drm_dev); 542 return 0; 543 } 544 545 static void 546 nv50_audio_component_unbind(struct device *kdev, struct device *hda_kdev, 547 void *data) 548 { 549 struct drm_device *drm_dev = dev_get_drvdata(kdev); 550 struct nouveau_drm *drm = nouveau_drm(drm_dev); 551 struct drm_audio_component *acomp = data; 552 553 drm_modeset_lock_all(drm_dev); 554 drm->audio.component = NULL; 555 acomp->ops = NULL; 556 acomp->dev = NULL; 557 drm_modeset_unlock_all(drm_dev); 558 } 559 560 static const struct component_ops nv50_audio_component_bind_ops = { 561 .bind = nv50_audio_component_bind, 562 .unbind = nv50_audio_component_unbind, 563 }; 564 565 static void 566 nv50_audio_component_init(struct nouveau_drm *drm) 567 { 568 if (!component_add(drm->dev->dev, &nv50_audio_component_bind_ops)) 569 drm->audio.component_registered = true; 570 } 571 572 static void 573 nv50_audio_component_fini(struct nouveau_drm *drm) 574 { 575 if (drm->audio.component_registered) { 576 component_del(drm->dev->dev, &nv50_audio_component_bind_ops); 577 drm->audio.component_registered = false; 578 } 579 } 580 581 /****************************************************************************** 582 * Audio 583 *****************************************************************************/ 584 static void 585 nv50_audio_disable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc) 586 { 587 struct nouveau_drm *drm = nouveau_drm(encoder->dev); 588 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 589 struct nv50_disp *disp = nv50_disp(encoder->dev); 590 struct { 591 struct nv50_disp_mthd_v1 base; 592 struct nv50_disp_sor_hda_eld_v0 eld; 593 } args = { 594 .base.version = 1, 595 .base.method = NV50_DISP_MTHD_V1_SOR_HDA_ELD, 596 .base.hasht = nv_encoder->dcb->hasht, 597 .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) | 598 (0x0100 << nv_crtc->index), 599 }; 600 601 nvif_mthd(&disp->disp->object, 0, &args, sizeof(args)); 602 603 nv50_audio_component_eld_notify(drm->audio.component, nv_crtc->index); 604 } 605 606 static void 607 nv50_audio_enable(struct drm_encoder *encoder, struct drm_display_mode *mode) 608 { 609 struct nouveau_drm *drm = nouveau_drm(encoder->dev); 610 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 611 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc); 612 struct nouveau_connector *nv_connector; 613 struct nv50_disp *disp = nv50_disp(encoder->dev); 614 struct __packed { 615 struct { 616 struct nv50_disp_mthd_v1 mthd; 617 struct nv50_disp_sor_hda_eld_v0 eld; 618 } base; 619 u8 data[sizeof(nv_connector->base.eld)]; 620 } args = { 621 .base.mthd.version = 1, 622 .base.mthd.method = NV50_DISP_MTHD_V1_SOR_HDA_ELD, 623 .base.mthd.hasht = nv_encoder->dcb->hasht, 624 .base.mthd.hashm = (0xf0ff & nv_encoder->dcb->hashm) | 625 (0x0100 << nv_crtc->index), 626 }; 627 628 nv_connector = nouveau_encoder_connector_get(nv_encoder); 629 if (!drm_detect_monitor_audio(nv_connector->edid)) 630 return; 631 632 memcpy(args.data, nv_connector->base.eld, sizeof(args.data)); 633 634 nvif_mthd(&disp->disp->object, 0, &args, 635 sizeof(args.base) + drm_eld_size(args.data)); 636 637 nv50_audio_component_eld_notify(drm->audio.component, nv_crtc->index); 638 } 639 640 /****************************************************************************** 641 * HDMI 642 *****************************************************************************/ 643 static void 644 nv50_hdmi_disable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc) 645 { 646 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 647 struct nv50_disp *disp = nv50_disp(encoder->dev); 648 struct { 649 struct nv50_disp_mthd_v1 base; 650 struct nv50_disp_sor_hdmi_pwr_v0 pwr; 651 } args = { 652 .base.version = 1, 653 .base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR, 654 .base.hasht = nv_encoder->dcb->hasht, 655 .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) | 656 (0x0100 << nv_crtc->index), 657 }; 658 659 nvif_mthd(&disp->disp->object, 0, &args, sizeof(args)); 660 } 661 662 static void 663 nv50_hdmi_enable(struct drm_encoder *encoder, struct drm_display_mode *mode) 664 { 665 struct nouveau_drm *drm = nouveau_drm(encoder->dev); 666 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 667 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc); 668 struct nv50_disp *disp = nv50_disp(encoder->dev); 669 struct { 670 struct nv50_disp_mthd_v1 base; 671 struct nv50_disp_sor_hdmi_pwr_v0 pwr; 672 u8 infoframes[2 * 17]; /* two frames, up to 17 bytes each */ 673 } args = { 674 .base.version = 1, 675 .base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR, 676 .base.hasht = nv_encoder->dcb->hasht, 677 .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) | 678 (0x0100 << nv_crtc->index), 679 .pwr.state = 1, 680 .pwr.rekey = 56, /* binary driver, and tegra, constant */ 681 }; 682 struct nouveau_connector *nv_connector; 683 struct drm_hdmi_info *hdmi; 684 u32 max_ac_packet; 685 union hdmi_infoframe avi_frame; 686 union hdmi_infoframe vendor_frame; 687 bool high_tmds_clock_ratio = false, scrambling = false; 688 u8 config; 689 int ret; 690 int size; 691 692 nv_connector = nouveau_encoder_connector_get(nv_encoder); 693 if (!drm_detect_hdmi_monitor(nv_connector->edid)) 694 return; 695 696 hdmi = &nv_connector->base.display_info.hdmi; 697 698 ret = drm_hdmi_avi_infoframe_from_display_mode(&avi_frame.avi, 699 &nv_connector->base, mode); 700 if (!ret) { 701 /* We have an AVI InfoFrame, populate it to the display */ 702 args.pwr.avi_infoframe_length 703 = hdmi_infoframe_pack(&avi_frame, args.infoframes, 17); 704 } 705 706 ret = drm_hdmi_vendor_infoframe_from_display_mode(&vendor_frame.vendor.hdmi, 707 &nv_connector->base, mode); 708 if (!ret) { 709 /* We have a Vendor InfoFrame, populate it to the display */ 710 args.pwr.vendor_infoframe_length 711 = hdmi_infoframe_pack(&vendor_frame, 712 args.infoframes 713 + args.pwr.avi_infoframe_length, 714 17); 715 } 716 717 max_ac_packet = mode->htotal - mode->hdisplay; 718 max_ac_packet -= args.pwr.rekey; 719 max_ac_packet -= 18; /* constant from tegra */ 720 args.pwr.max_ac_packet = max_ac_packet / 32; 721 722 if (hdmi->scdc.scrambling.supported) { 723 high_tmds_clock_ratio = mode->clock > 340000; 724 scrambling = high_tmds_clock_ratio || 725 hdmi->scdc.scrambling.low_rates; 726 } 727 728 args.pwr.scdc = 729 NV50_DISP_SOR_HDMI_PWR_V0_SCDC_SCRAMBLE * scrambling | 730 NV50_DISP_SOR_HDMI_PWR_V0_SCDC_DIV_BY_4 * high_tmds_clock_ratio; 731 732 size = sizeof(args.base) 733 + sizeof(args.pwr) 734 + args.pwr.avi_infoframe_length 735 + args.pwr.vendor_infoframe_length; 736 nvif_mthd(&disp->disp->object, 0, &args, size); 737 738 nv50_audio_enable(encoder, mode); 739 740 /* If SCDC is supported by the downstream monitor, update 741 * divider / scrambling settings to what we programmed above. 742 */ 743 if (!hdmi->scdc.scrambling.supported) 744 return; 745 746 ret = drm_scdc_readb(nv_encoder->i2c, SCDC_TMDS_CONFIG, &config); 747 if (ret < 0) { 748 NV_ERROR(drm, "Failure to read SCDC_TMDS_CONFIG: %d\n", ret); 749 return; 750 } 751 config &= ~(SCDC_TMDS_BIT_CLOCK_RATIO_BY_40 | SCDC_SCRAMBLING_ENABLE); 752 config |= SCDC_TMDS_BIT_CLOCK_RATIO_BY_40 * high_tmds_clock_ratio; 753 config |= SCDC_SCRAMBLING_ENABLE * scrambling; 754 ret = drm_scdc_writeb(nv_encoder->i2c, SCDC_TMDS_CONFIG, config); 755 if (ret < 0) 756 NV_ERROR(drm, "Failure to write SCDC_TMDS_CONFIG = 0x%02x: %d\n", 757 config, ret); 758 } 759 760 /****************************************************************************** 761 * MST 762 *****************************************************************************/ 763 #define nv50_mstm(p) container_of((p), struct nv50_mstm, mgr) 764 #define nv50_mstc(p) container_of((p), struct nv50_mstc, connector) 765 #define nv50_msto(p) container_of((p), struct nv50_msto, encoder) 766 767 struct nv50_mstm { 768 struct nouveau_encoder *outp; 769 770 struct drm_dp_mst_topology_mgr mgr; 771 772 bool modified; 773 bool disabled; 774 int links; 775 }; 776 777 struct nv50_mstc { 778 struct nv50_mstm *mstm; 779 struct drm_dp_mst_port *port; 780 struct drm_connector connector; 781 782 struct drm_display_mode *native; 783 struct edid *edid; 784 }; 785 786 struct nv50_msto { 787 struct drm_encoder encoder; 788 789 struct nv50_head *head; 790 struct nv50_mstc *mstc; 791 bool disabled; 792 }; 793 794 static struct drm_dp_payload * 795 nv50_msto_payload(struct nv50_msto *msto) 796 { 797 struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev); 798 struct nv50_mstc *mstc = msto->mstc; 799 struct nv50_mstm *mstm = mstc->mstm; 800 int vcpi = mstc->port->vcpi.vcpi, i; 801 802 WARN_ON(!mutex_is_locked(&mstm->mgr.payload_lock)); 803 804 NV_ATOMIC(drm, "%s: vcpi %d\n", msto->encoder.name, vcpi); 805 for (i = 0; i < mstm->mgr.max_payloads; i++) { 806 struct drm_dp_payload *payload = &mstm->mgr.payloads[i]; 807 NV_ATOMIC(drm, "%s: %d: vcpi %d start 0x%02x slots 0x%02x\n", 808 mstm->outp->base.base.name, i, payload->vcpi, 809 payload->start_slot, payload->num_slots); 810 } 811 812 for (i = 0; i < mstm->mgr.max_payloads; i++) { 813 struct drm_dp_payload *payload = &mstm->mgr.payloads[i]; 814 if (payload->vcpi == vcpi) 815 return payload; 816 } 817 818 return NULL; 819 } 820 821 static void 822 nv50_msto_cleanup(struct nv50_msto *msto) 823 { 824 struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev); 825 struct nv50_mstc *mstc = msto->mstc; 826 struct nv50_mstm *mstm = mstc->mstm; 827 828 if (!msto->disabled) 829 return; 830 831 NV_ATOMIC(drm, "%s: msto cleanup\n", msto->encoder.name); 832 833 drm_dp_mst_deallocate_vcpi(&mstm->mgr, mstc->port); 834 835 msto->mstc = NULL; 836 msto->disabled = false; 837 } 838 839 static void 840 nv50_msto_prepare(struct nv50_msto *msto) 841 { 842 struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev); 843 struct nv50_mstc *mstc = msto->mstc; 844 struct nv50_mstm *mstm = mstc->mstm; 845 struct { 846 struct nv50_disp_mthd_v1 base; 847 struct nv50_disp_sor_dp_mst_vcpi_v0 vcpi; 848 } args = { 849 .base.version = 1, 850 .base.method = NV50_DISP_MTHD_V1_SOR_DP_MST_VCPI, 851 .base.hasht = mstm->outp->dcb->hasht, 852 .base.hashm = (0xf0ff & mstm->outp->dcb->hashm) | 853 (0x0100 << msto->head->base.index), 854 }; 855 856 mutex_lock(&mstm->mgr.payload_lock); 857 858 NV_ATOMIC(drm, "%s: msto prepare\n", msto->encoder.name); 859 if (mstc->port->vcpi.vcpi > 0) { 860 struct drm_dp_payload *payload = nv50_msto_payload(msto); 861 if (payload) { 862 args.vcpi.start_slot = payload->start_slot; 863 args.vcpi.num_slots = payload->num_slots; 864 args.vcpi.pbn = mstc->port->vcpi.pbn; 865 args.vcpi.aligned_pbn = mstc->port->vcpi.aligned_pbn; 866 } 867 } 868 869 NV_ATOMIC(drm, "%s: %s: %02x %02x %04x %04x\n", 870 msto->encoder.name, msto->head->base.base.name, 871 args.vcpi.start_slot, args.vcpi.num_slots, 872 args.vcpi.pbn, args.vcpi.aligned_pbn); 873 874 nvif_mthd(&drm->display->disp.object, 0, &args, sizeof(args)); 875 mutex_unlock(&mstm->mgr.payload_lock); 876 } 877 878 static int 879 nv50_msto_atomic_check(struct drm_encoder *encoder, 880 struct drm_crtc_state *crtc_state, 881 struct drm_connector_state *conn_state) 882 { 883 struct drm_atomic_state *state = crtc_state->state; 884 struct drm_connector *connector = conn_state->connector; 885 struct nv50_mstc *mstc = nv50_mstc(connector); 886 struct nv50_mstm *mstm = mstc->mstm; 887 struct nv50_head_atom *asyh = nv50_head_atom(crtc_state); 888 int slots; 889 int ret; 890 891 ret = nv50_outp_atomic_check_view(encoder, crtc_state, conn_state, 892 mstc->native); 893 if (ret) 894 return ret; 895 896 if (!crtc_state->mode_changed && !crtc_state->connectors_changed) 897 return 0; 898 899 /* 900 * When restoring duplicated states, we need to make sure that the bw 901 * remains the same and avoid recalculating it, as the connector's bpc 902 * may have changed after the state was duplicated 903 */ 904 if (!state->duplicated) { 905 const int clock = crtc_state->adjusted_mode.clock; 906 907 /* 908 * XXX: Since we don't use HDR in userspace quite yet, limit 909 * the bpc to 8 to save bandwidth on the topology. In the 910 * future, we'll want to properly fix this by dynamically 911 * selecting the highest possible bpc that would fit in the 912 * topology 913 */ 914 asyh->or.bpc = min(connector->display_info.bpc, 8U); 915 asyh->dp.pbn = drm_dp_calc_pbn_mode(clock, asyh->or.bpc * 3, false); 916 } 917 918 slots = drm_dp_atomic_find_vcpi_slots(state, &mstm->mgr, mstc->port, 919 asyh->dp.pbn, 0); 920 if (slots < 0) 921 return slots; 922 923 asyh->dp.tu = slots; 924 925 return 0; 926 } 927 928 static u8 929 nv50_dp_bpc_to_depth(unsigned int bpc) 930 { 931 switch (bpc) { 932 case 6: return 0x2; 933 case 8: return 0x5; 934 case 10: /* fall-through */ 935 default: return 0x6; 936 } 937 } 938 939 static void 940 nv50_msto_enable(struct drm_encoder *encoder) 941 { 942 struct nv50_head *head = nv50_head(encoder->crtc); 943 struct nv50_head_atom *armh = nv50_head_atom(head->base.base.state); 944 struct nv50_msto *msto = nv50_msto(encoder); 945 struct nv50_mstc *mstc = NULL; 946 struct nv50_mstm *mstm = NULL; 947 struct drm_connector *connector; 948 struct drm_connector_list_iter conn_iter; 949 u8 proto; 950 bool r; 951 952 drm_connector_list_iter_begin(encoder->dev, &conn_iter); 953 drm_for_each_connector_iter(connector, &conn_iter) { 954 if (connector->state->best_encoder == &msto->encoder) { 955 mstc = nv50_mstc(connector); 956 mstm = mstc->mstm; 957 break; 958 } 959 } 960 drm_connector_list_iter_end(&conn_iter); 961 962 if (WARN_ON(!mstc)) 963 return; 964 965 r = drm_dp_mst_allocate_vcpi(&mstm->mgr, mstc->port, armh->dp.pbn, 966 armh->dp.tu); 967 if (!r) 968 DRM_DEBUG_KMS("Failed to allocate VCPI\n"); 969 970 if (!mstm->links++) 971 nv50_outp_acquire(mstm->outp); 972 973 if (mstm->outp->link & 1) 974 proto = 0x8; 975 else 976 proto = 0x9; 977 978 mstm->outp->update(mstm->outp, head->base.index, armh, proto, 979 nv50_dp_bpc_to_depth(armh->or.bpc)); 980 981 msto->mstc = mstc; 982 mstm->modified = true; 983 } 984 985 static void 986 nv50_msto_disable(struct drm_encoder *encoder) 987 { 988 struct nv50_msto *msto = nv50_msto(encoder); 989 struct nv50_mstc *mstc = msto->mstc; 990 struct nv50_mstm *mstm = mstc->mstm; 991 992 drm_dp_mst_reset_vcpi_slots(&mstm->mgr, mstc->port); 993 994 mstm->outp->update(mstm->outp, msto->head->base.index, NULL, 0, 0); 995 mstm->modified = true; 996 if (!--mstm->links) 997 mstm->disabled = true; 998 msto->disabled = true; 999 } 1000 1001 static const struct drm_encoder_helper_funcs 1002 nv50_msto_help = { 1003 .disable = nv50_msto_disable, 1004 .enable = nv50_msto_enable, 1005 .atomic_check = nv50_msto_atomic_check, 1006 }; 1007 1008 static void 1009 nv50_msto_destroy(struct drm_encoder *encoder) 1010 { 1011 struct nv50_msto *msto = nv50_msto(encoder); 1012 drm_encoder_cleanup(&msto->encoder); 1013 kfree(msto); 1014 } 1015 1016 static const struct drm_encoder_funcs 1017 nv50_msto = { 1018 .destroy = nv50_msto_destroy, 1019 }; 1020 1021 static struct nv50_msto * 1022 nv50_msto_new(struct drm_device *dev, struct nv50_head *head, int id) 1023 { 1024 struct nv50_msto *msto; 1025 int ret; 1026 1027 msto = kzalloc(sizeof(*msto), GFP_KERNEL); 1028 if (!msto) 1029 return ERR_PTR(-ENOMEM); 1030 1031 ret = drm_encoder_init(dev, &msto->encoder, &nv50_msto, 1032 DRM_MODE_ENCODER_DPMST, "mst-%d", id); 1033 if (ret) { 1034 kfree(msto); 1035 return ERR_PTR(ret); 1036 } 1037 1038 drm_encoder_helper_add(&msto->encoder, &nv50_msto_help); 1039 msto->encoder.possible_crtcs = drm_crtc_mask(&head->base.base); 1040 msto->head = head; 1041 return msto; 1042 } 1043 1044 static struct drm_encoder * 1045 nv50_mstc_atomic_best_encoder(struct drm_connector *connector, 1046 struct drm_connector_state *connector_state) 1047 { 1048 struct nv50_mstc *mstc = nv50_mstc(connector); 1049 struct drm_crtc *crtc = connector_state->crtc; 1050 1051 if (!(mstc->mstm->outp->dcb->heads & drm_crtc_mask(crtc))) 1052 return NULL; 1053 1054 return &nv50_head(crtc)->msto->encoder; 1055 } 1056 1057 static enum drm_mode_status 1058 nv50_mstc_mode_valid(struct drm_connector *connector, 1059 struct drm_display_mode *mode) 1060 { 1061 return MODE_OK; 1062 } 1063 1064 static int 1065 nv50_mstc_get_modes(struct drm_connector *connector) 1066 { 1067 struct nv50_mstc *mstc = nv50_mstc(connector); 1068 int ret = 0; 1069 1070 mstc->edid = drm_dp_mst_get_edid(&mstc->connector, mstc->port->mgr, mstc->port); 1071 drm_connector_update_edid_property(&mstc->connector, mstc->edid); 1072 if (mstc->edid) 1073 ret = drm_add_edid_modes(&mstc->connector, mstc->edid); 1074 1075 if (!mstc->connector.display_info.bpc) 1076 mstc->connector.display_info.bpc = 8; 1077 1078 if (mstc->native) 1079 drm_mode_destroy(mstc->connector.dev, mstc->native); 1080 mstc->native = nouveau_conn_native_mode(&mstc->connector); 1081 return ret; 1082 } 1083 1084 static int 1085 nv50_mstc_atomic_check(struct drm_connector *connector, 1086 struct drm_atomic_state *state) 1087 { 1088 struct nv50_mstc *mstc = nv50_mstc(connector); 1089 struct drm_dp_mst_topology_mgr *mgr = &mstc->mstm->mgr; 1090 struct drm_connector_state *new_conn_state = 1091 drm_atomic_get_new_connector_state(state, connector); 1092 struct drm_connector_state *old_conn_state = 1093 drm_atomic_get_old_connector_state(state, connector); 1094 struct drm_crtc_state *crtc_state; 1095 struct drm_crtc *new_crtc = new_conn_state->crtc; 1096 1097 if (!old_conn_state->crtc) 1098 return 0; 1099 1100 /* We only want to free VCPI if this state disables the CRTC on this 1101 * connector 1102 */ 1103 if (new_crtc) { 1104 crtc_state = drm_atomic_get_new_crtc_state(state, new_crtc); 1105 1106 if (!crtc_state || 1107 !drm_atomic_crtc_needs_modeset(crtc_state) || 1108 crtc_state->enable) 1109 return 0; 1110 } 1111 1112 return drm_dp_atomic_release_vcpi_slots(state, mgr, mstc->port); 1113 } 1114 1115 static int 1116 nv50_mstc_detect(struct drm_connector *connector, 1117 struct drm_modeset_acquire_ctx *ctx, bool force) 1118 { 1119 struct nv50_mstc *mstc = nv50_mstc(connector); 1120 int ret; 1121 1122 if (drm_connector_is_unregistered(connector)) 1123 return connector_status_disconnected; 1124 1125 ret = pm_runtime_get_sync(connector->dev->dev); 1126 if (ret < 0 && ret != -EACCES) 1127 return connector_status_disconnected; 1128 1129 ret = drm_dp_mst_detect_port(connector, ctx, mstc->port->mgr, 1130 mstc->port); 1131 1132 pm_runtime_mark_last_busy(connector->dev->dev); 1133 pm_runtime_put_autosuspend(connector->dev->dev); 1134 return ret; 1135 } 1136 1137 static const struct drm_connector_helper_funcs 1138 nv50_mstc_help = { 1139 .get_modes = nv50_mstc_get_modes, 1140 .mode_valid = nv50_mstc_mode_valid, 1141 .atomic_best_encoder = nv50_mstc_atomic_best_encoder, 1142 .atomic_check = nv50_mstc_atomic_check, 1143 .detect_ctx = nv50_mstc_detect, 1144 }; 1145 1146 static void 1147 nv50_mstc_destroy(struct drm_connector *connector) 1148 { 1149 struct nv50_mstc *mstc = nv50_mstc(connector); 1150 1151 drm_connector_cleanup(&mstc->connector); 1152 drm_dp_mst_put_port_malloc(mstc->port); 1153 1154 kfree(mstc); 1155 } 1156 1157 static const struct drm_connector_funcs 1158 nv50_mstc = { 1159 .reset = nouveau_conn_reset, 1160 .fill_modes = drm_helper_probe_single_connector_modes, 1161 .destroy = nv50_mstc_destroy, 1162 .atomic_duplicate_state = nouveau_conn_atomic_duplicate_state, 1163 .atomic_destroy_state = nouveau_conn_atomic_destroy_state, 1164 .atomic_set_property = nouveau_conn_atomic_set_property, 1165 .atomic_get_property = nouveau_conn_atomic_get_property, 1166 }; 1167 1168 static int 1169 nv50_mstc_new(struct nv50_mstm *mstm, struct drm_dp_mst_port *port, 1170 const char *path, struct nv50_mstc **pmstc) 1171 { 1172 struct drm_device *dev = mstm->outp->base.base.dev; 1173 struct drm_crtc *crtc; 1174 struct nv50_mstc *mstc; 1175 int ret; 1176 1177 if (!(mstc = *pmstc = kzalloc(sizeof(*mstc), GFP_KERNEL))) 1178 return -ENOMEM; 1179 mstc->mstm = mstm; 1180 mstc->port = port; 1181 1182 ret = drm_connector_init(dev, &mstc->connector, &nv50_mstc, 1183 DRM_MODE_CONNECTOR_DisplayPort); 1184 if (ret) { 1185 kfree(*pmstc); 1186 *pmstc = NULL; 1187 return ret; 1188 } 1189 1190 drm_connector_helper_add(&mstc->connector, &nv50_mstc_help); 1191 1192 mstc->connector.funcs->reset(&mstc->connector); 1193 nouveau_conn_attach_properties(&mstc->connector); 1194 1195 drm_for_each_crtc(crtc, dev) { 1196 if (!(mstm->outp->dcb->heads & drm_crtc_mask(crtc))) 1197 continue; 1198 1199 drm_connector_attach_encoder(&mstc->connector, 1200 &nv50_head(crtc)->msto->encoder); 1201 } 1202 1203 drm_object_attach_property(&mstc->connector.base, dev->mode_config.path_property, 0); 1204 drm_object_attach_property(&mstc->connector.base, dev->mode_config.tile_property, 0); 1205 drm_connector_set_path_property(&mstc->connector, path); 1206 drm_dp_mst_get_port_malloc(port); 1207 return 0; 1208 } 1209 1210 static void 1211 nv50_mstm_cleanup(struct nv50_mstm *mstm) 1212 { 1213 struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev); 1214 struct drm_encoder *encoder; 1215 int ret; 1216 1217 NV_ATOMIC(drm, "%s: mstm cleanup\n", mstm->outp->base.base.name); 1218 ret = drm_dp_check_act_status(&mstm->mgr); 1219 1220 ret = drm_dp_update_payload_part2(&mstm->mgr); 1221 1222 drm_for_each_encoder(encoder, mstm->outp->base.base.dev) { 1223 if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) { 1224 struct nv50_msto *msto = nv50_msto(encoder); 1225 struct nv50_mstc *mstc = msto->mstc; 1226 if (mstc && mstc->mstm == mstm) 1227 nv50_msto_cleanup(msto); 1228 } 1229 } 1230 1231 mstm->modified = false; 1232 } 1233 1234 static void 1235 nv50_mstm_prepare(struct nv50_mstm *mstm) 1236 { 1237 struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev); 1238 struct drm_encoder *encoder; 1239 int ret; 1240 1241 NV_ATOMIC(drm, "%s: mstm prepare\n", mstm->outp->base.base.name); 1242 ret = drm_dp_update_payload_part1(&mstm->mgr); 1243 1244 drm_for_each_encoder(encoder, mstm->outp->base.base.dev) { 1245 if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) { 1246 struct nv50_msto *msto = nv50_msto(encoder); 1247 struct nv50_mstc *mstc = msto->mstc; 1248 if (mstc && mstc->mstm == mstm) 1249 nv50_msto_prepare(msto); 1250 } 1251 } 1252 1253 if (mstm->disabled) { 1254 if (!mstm->links) 1255 nv50_outp_release(mstm->outp); 1256 mstm->disabled = false; 1257 } 1258 } 1259 1260 static struct drm_connector * 1261 nv50_mstm_add_connector(struct drm_dp_mst_topology_mgr *mgr, 1262 struct drm_dp_mst_port *port, const char *path) 1263 { 1264 struct nv50_mstm *mstm = nv50_mstm(mgr); 1265 struct nv50_mstc *mstc; 1266 int ret; 1267 1268 ret = nv50_mstc_new(mstm, port, path, &mstc); 1269 if (ret) 1270 return NULL; 1271 1272 return &mstc->connector; 1273 } 1274 1275 static const struct drm_dp_mst_topology_cbs 1276 nv50_mstm = { 1277 .add_connector = nv50_mstm_add_connector, 1278 }; 1279 1280 void 1281 nv50_mstm_service(struct nv50_mstm *mstm) 1282 { 1283 struct drm_dp_aux *aux = mstm ? mstm->mgr.aux : NULL; 1284 bool handled = true; 1285 int ret; 1286 u8 esi[8] = {}; 1287 1288 if (!aux) 1289 return; 1290 1291 while (handled) { 1292 ret = drm_dp_dpcd_read(aux, DP_SINK_COUNT_ESI, esi, 8); 1293 if (ret != 8) { 1294 drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false); 1295 return; 1296 } 1297 1298 drm_dp_mst_hpd_irq(&mstm->mgr, esi, &handled); 1299 if (!handled) 1300 break; 1301 1302 drm_dp_dpcd_write(aux, DP_SINK_COUNT_ESI + 1, &esi[1], 3); 1303 } 1304 } 1305 1306 void 1307 nv50_mstm_remove(struct nv50_mstm *mstm) 1308 { 1309 if (mstm) 1310 drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false); 1311 } 1312 1313 static int 1314 nv50_mstm_enable(struct nv50_mstm *mstm, u8 dpcd, int state) 1315 { 1316 struct nouveau_encoder *outp = mstm->outp; 1317 struct { 1318 struct nv50_disp_mthd_v1 base; 1319 struct nv50_disp_sor_dp_mst_link_v0 mst; 1320 } args = { 1321 .base.version = 1, 1322 .base.method = NV50_DISP_MTHD_V1_SOR_DP_MST_LINK, 1323 .base.hasht = outp->dcb->hasht, 1324 .base.hashm = outp->dcb->hashm, 1325 .mst.state = state, 1326 }; 1327 struct nouveau_drm *drm = nouveau_drm(outp->base.base.dev); 1328 struct nvif_object *disp = &drm->display->disp.object; 1329 int ret; 1330 1331 if (dpcd >= 0x12) { 1332 /* Even if we're enabling MST, start with disabling the 1333 * branching unit to clear any sink-side MST topology state 1334 * that wasn't set by us 1335 */ 1336 ret = drm_dp_dpcd_writeb(mstm->mgr.aux, DP_MSTM_CTRL, 0); 1337 if (ret < 0) 1338 return ret; 1339 1340 if (state) { 1341 /* Now, start initializing */ 1342 ret = drm_dp_dpcd_writeb(mstm->mgr.aux, DP_MSTM_CTRL, 1343 DP_MST_EN); 1344 if (ret < 0) 1345 return ret; 1346 } 1347 } 1348 1349 return nvif_mthd(disp, 0, &args, sizeof(args)); 1350 } 1351 1352 int 1353 nv50_mstm_detect(struct nv50_mstm *mstm, u8 dpcd[8], int allow) 1354 { 1355 struct drm_dp_aux *aux; 1356 int ret; 1357 bool old_state, new_state; 1358 u8 mstm_ctrl; 1359 1360 if (!mstm) 1361 return 0; 1362 1363 mutex_lock(&mstm->mgr.lock); 1364 1365 old_state = mstm->mgr.mst_state; 1366 new_state = old_state; 1367 aux = mstm->mgr.aux; 1368 1369 if (old_state) { 1370 /* Just check that the MST hub is still as we expect it */ 1371 ret = drm_dp_dpcd_readb(aux, DP_MSTM_CTRL, &mstm_ctrl); 1372 if (ret < 0 || !(mstm_ctrl & DP_MST_EN)) { 1373 DRM_DEBUG_KMS("Hub gone, disabling MST topology\n"); 1374 new_state = false; 1375 } 1376 } else if (dpcd[0] >= 0x12) { 1377 ret = drm_dp_dpcd_readb(aux, DP_MSTM_CAP, &dpcd[1]); 1378 if (ret < 0) 1379 goto probe_error; 1380 1381 if (!(dpcd[1] & DP_MST_CAP)) 1382 dpcd[0] = 0x11; 1383 else 1384 new_state = allow; 1385 } 1386 1387 if (new_state == old_state) { 1388 mutex_unlock(&mstm->mgr.lock); 1389 return new_state; 1390 } 1391 1392 ret = nv50_mstm_enable(mstm, dpcd[0], new_state); 1393 if (ret) 1394 goto probe_error; 1395 1396 mutex_unlock(&mstm->mgr.lock); 1397 1398 ret = drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, new_state); 1399 if (ret) 1400 return nv50_mstm_enable(mstm, dpcd[0], 0); 1401 1402 return new_state; 1403 1404 probe_error: 1405 mutex_unlock(&mstm->mgr.lock); 1406 return ret; 1407 } 1408 1409 static void 1410 nv50_mstm_fini(struct nv50_mstm *mstm) 1411 { 1412 if (mstm && mstm->mgr.mst_state) 1413 drm_dp_mst_topology_mgr_suspend(&mstm->mgr); 1414 } 1415 1416 static void 1417 nv50_mstm_init(struct nv50_mstm *mstm, bool runtime) 1418 { 1419 int ret; 1420 1421 if (!mstm || !mstm->mgr.mst_state) 1422 return; 1423 1424 ret = drm_dp_mst_topology_mgr_resume(&mstm->mgr, !runtime); 1425 if (ret == -1) { 1426 drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false); 1427 drm_kms_helper_hotplug_event(mstm->mgr.dev); 1428 } 1429 } 1430 1431 static void 1432 nv50_mstm_del(struct nv50_mstm **pmstm) 1433 { 1434 struct nv50_mstm *mstm = *pmstm; 1435 if (mstm) { 1436 drm_dp_mst_topology_mgr_destroy(&mstm->mgr); 1437 kfree(*pmstm); 1438 *pmstm = NULL; 1439 } 1440 } 1441 1442 static int 1443 nv50_mstm_new(struct nouveau_encoder *outp, struct drm_dp_aux *aux, int aux_max, 1444 int conn_base_id, struct nv50_mstm **pmstm) 1445 { 1446 const int max_payloads = hweight8(outp->dcb->heads); 1447 struct drm_device *dev = outp->base.base.dev; 1448 struct nv50_mstm *mstm; 1449 int ret; 1450 u8 dpcd; 1451 1452 /* This is a workaround for some monitors not functioning 1453 * correctly in MST mode on initial module load. I think 1454 * some bad interaction with the VBIOS may be responsible. 1455 * 1456 * A good ol' off and on again seems to work here ;) 1457 */ 1458 ret = drm_dp_dpcd_readb(aux, DP_DPCD_REV, &dpcd); 1459 if (ret >= 0 && dpcd >= 0x12) 1460 drm_dp_dpcd_writeb(aux, DP_MSTM_CTRL, 0); 1461 1462 if (!(mstm = *pmstm = kzalloc(sizeof(*mstm), GFP_KERNEL))) 1463 return -ENOMEM; 1464 mstm->outp = outp; 1465 mstm->mgr.cbs = &nv50_mstm; 1466 1467 ret = drm_dp_mst_topology_mgr_init(&mstm->mgr, dev, aux, aux_max, 1468 max_payloads, conn_base_id); 1469 if (ret) 1470 return ret; 1471 1472 return 0; 1473 } 1474 1475 /****************************************************************************** 1476 * SOR 1477 *****************************************************************************/ 1478 static void 1479 nv50_sor_update(struct nouveau_encoder *nv_encoder, u8 head, 1480 struct nv50_head_atom *asyh, u8 proto, u8 depth) 1481 { 1482 struct nv50_disp *disp = nv50_disp(nv_encoder->base.base.dev); 1483 struct nv50_core *core = disp->core; 1484 1485 if (!asyh) { 1486 nv_encoder->ctrl &= ~BIT(head); 1487 if (!(nv_encoder->ctrl & 0x0000000f)) 1488 nv_encoder->ctrl = 0; 1489 } else { 1490 nv_encoder->ctrl |= proto << 8; 1491 nv_encoder->ctrl |= BIT(head); 1492 asyh->or.depth = depth; 1493 } 1494 1495 core->func->sor->ctrl(core, nv_encoder->or, nv_encoder->ctrl, asyh); 1496 } 1497 1498 static void 1499 nv50_sor_disable(struct drm_encoder *encoder) 1500 { 1501 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1502 struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc); 1503 1504 nv_encoder->crtc = NULL; 1505 1506 if (nv_crtc) { 1507 struct nvkm_i2c_aux *aux = nv_encoder->aux; 1508 u8 pwr; 1509 1510 if (aux) { 1511 int ret = nvkm_rdaux(aux, DP_SET_POWER, &pwr, 1); 1512 if (ret == 0) { 1513 pwr &= ~DP_SET_POWER_MASK; 1514 pwr |= DP_SET_POWER_D3; 1515 nvkm_wraux(aux, DP_SET_POWER, &pwr, 1); 1516 } 1517 } 1518 1519 nv_encoder->update(nv_encoder, nv_crtc->index, NULL, 0, 0); 1520 nv50_audio_disable(encoder, nv_crtc); 1521 nv50_hdmi_disable(&nv_encoder->base.base, nv_crtc); 1522 nv50_outp_release(nv_encoder); 1523 } 1524 } 1525 1526 static void 1527 nv50_sor_enable(struct drm_encoder *encoder) 1528 { 1529 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1530 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc); 1531 struct nv50_head_atom *asyh = nv50_head_atom(nv_crtc->base.state); 1532 struct drm_display_mode *mode = &asyh->state.adjusted_mode; 1533 struct { 1534 struct nv50_disp_mthd_v1 base; 1535 struct nv50_disp_sor_lvds_script_v0 lvds; 1536 } lvds = { 1537 .base.version = 1, 1538 .base.method = NV50_DISP_MTHD_V1_SOR_LVDS_SCRIPT, 1539 .base.hasht = nv_encoder->dcb->hasht, 1540 .base.hashm = nv_encoder->dcb->hashm, 1541 }; 1542 struct nv50_disp *disp = nv50_disp(encoder->dev); 1543 struct drm_device *dev = encoder->dev; 1544 struct nouveau_drm *drm = nouveau_drm(dev); 1545 struct nouveau_connector *nv_connector; 1546 struct nvbios *bios = &drm->vbios; 1547 u8 proto = 0xf; 1548 u8 depth = 0x0; 1549 1550 nv_connector = nouveau_encoder_connector_get(nv_encoder); 1551 nv_encoder->crtc = encoder->crtc; 1552 nv50_outp_acquire(nv_encoder); 1553 1554 switch (nv_encoder->dcb->type) { 1555 case DCB_OUTPUT_TMDS: 1556 if (nv_encoder->link & 1) { 1557 proto = 0x1; 1558 /* Only enable dual-link if: 1559 * - Need to (i.e. rate > 165MHz) 1560 * - DCB says we can 1561 * - Not an HDMI monitor, since there's no dual-link 1562 * on HDMI. 1563 */ 1564 if (mode->clock >= 165000 && 1565 nv_encoder->dcb->duallink_possible && 1566 !drm_detect_hdmi_monitor(nv_connector->edid)) 1567 proto |= 0x4; 1568 } else { 1569 proto = 0x2; 1570 } 1571 1572 nv50_hdmi_enable(&nv_encoder->base.base, mode); 1573 break; 1574 case DCB_OUTPUT_LVDS: 1575 proto = 0x0; 1576 1577 if (bios->fp_no_ddc) { 1578 if (bios->fp.dual_link) 1579 lvds.lvds.script |= 0x0100; 1580 if (bios->fp.if_is_24bit) 1581 lvds.lvds.script |= 0x0200; 1582 } else { 1583 if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) { 1584 if (((u8 *)nv_connector->edid)[121] == 2) 1585 lvds.lvds.script |= 0x0100; 1586 } else 1587 if (mode->clock >= bios->fp.duallink_transition_clk) { 1588 lvds.lvds.script |= 0x0100; 1589 } 1590 1591 if (lvds.lvds.script & 0x0100) { 1592 if (bios->fp.strapless_is_24bit & 2) 1593 lvds.lvds.script |= 0x0200; 1594 } else { 1595 if (bios->fp.strapless_is_24bit & 1) 1596 lvds.lvds.script |= 0x0200; 1597 } 1598 1599 if (asyh->or.bpc == 8) 1600 lvds.lvds.script |= 0x0200; 1601 } 1602 1603 nvif_mthd(&disp->disp->object, 0, &lvds, sizeof(lvds)); 1604 break; 1605 case DCB_OUTPUT_DP: 1606 depth = nv50_dp_bpc_to_depth(asyh->or.bpc); 1607 1608 if (nv_encoder->link & 1) 1609 proto = 0x8; 1610 else 1611 proto = 0x9; 1612 1613 nv50_audio_enable(encoder, mode); 1614 break; 1615 default: 1616 BUG(); 1617 break; 1618 } 1619 1620 nv_encoder->update(nv_encoder, nv_crtc->index, asyh, proto, depth); 1621 } 1622 1623 static const struct drm_encoder_helper_funcs 1624 nv50_sor_help = { 1625 .atomic_check = nv50_outp_atomic_check, 1626 .enable = nv50_sor_enable, 1627 .disable = nv50_sor_disable, 1628 }; 1629 1630 static void 1631 nv50_sor_destroy(struct drm_encoder *encoder) 1632 { 1633 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1634 nv50_mstm_del(&nv_encoder->dp.mstm); 1635 drm_encoder_cleanup(encoder); 1636 kfree(encoder); 1637 } 1638 1639 static const struct drm_encoder_funcs 1640 nv50_sor_func = { 1641 .destroy = nv50_sor_destroy, 1642 }; 1643 1644 static bool nv50_has_mst(struct nouveau_drm *drm) 1645 { 1646 struct nvkm_bios *bios = nvxx_bios(&drm->client.device); 1647 u32 data; 1648 u8 ver, hdr, cnt, len; 1649 1650 data = nvbios_dp_table(bios, &ver, &hdr, &cnt, &len); 1651 return data && ver >= 0x40 && (nvbios_rd08(bios, data + 0x08) & 0x04); 1652 } 1653 1654 static int 1655 nv50_sor_create(struct drm_connector *connector, struct dcb_output *dcbe) 1656 { 1657 struct nouveau_connector *nv_connector = nouveau_connector(connector); 1658 struct nouveau_drm *drm = nouveau_drm(connector->dev); 1659 struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device); 1660 struct nouveau_encoder *nv_encoder; 1661 struct drm_encoder *encoder; 1662 int type, ret; 1663 1664 switch (dcbe->type) { 1665 case DCB_OUTPUT_LVDS: type = DRM_MODE_ENCODER_LVDS; break; 1666 case DCB_OUTPUT_TMDS: 1667 case DCB_OUTPUT_DP: 1668 default: 1669 type = DRM_MODE_ENCODER_TMDS; 1670 break; 1671 } 1672 1673 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL); 1674 if (!nv_encoder) 1675 return -ENOMEM; 1676 nv_encoder->dcb = dcbe; 1677 nv_encoder->update = nv50_sor_update; 1678 1679 encoder = to_drm_encoder(nv_encoder); 1680 encoder->possible_crtcs = dcbe->heads; 1681 encoder->possible_clones = 0; 1682 drm_encoder_init(connector->dev, encoder, &nv50_sor_func, type, 1683 "sor-%04x-%04x", dcbe->hasht, dcbe->hashm); 1684 drm_encoder_helper_add(encoder, &nv50_sor_help); 1685 1686 drm_connector_attach_encoder(connector, encoder); 1687 1688 if (dcbe->type == DCB_OUTPUT_DP) { 1689 struct nv50_disp *disp = nv50_disp(encoder->dev); 1690 struct nvkm_i2c_aux *aux = 1691 nvkm_i2c_aux_find(i2c, dcbe->i2c_index); 1692 if (aux) { 1693 if (disp->disp->object.oclass < GF110_DISP) { 1694 /* HW has no support for address-only 1695 * transactions, so we're required to 1696 * use custom I2C-over-AUX code. 1697 */ 1698 nv_encoder->i2c = &aux->i2c; 1699 } else { 1700 nv_encoder->i2c = &nv_connector->aux.ddc; 1701 } 1702 nv_encoder->aux = aux; 1703 } 1704 1705 if (nv_connector->type != DCB_CONNECTOR_eDP && 1706 nv50_has_mst(drm)) { 1707 ret = nv50_mstm_new(nv_encoder, &nv_connector->aux, 1708 16, nv_connector->base.base.id, 1709 &nv_encoder->dp.mstm); 1710 if (ret) 1711 return ret; 1712 } 1713 } else { 1714 struct nvkm_i2c_bus *bus = 1715 nvkm_i2c_bus_find(i2c, dcbe->i2c_index); 1716 if (bus) 1717 nv_encoder->i2c = &bus->i2c; 1718 } 1719 1720 return 0; 1721 } 1722 1723 /****************************************************************************** 1724 * PIOR 1725 *****************************************************************************/ 1726 static int 1727 nv50_pior_atomic_check(struct drm_encoder *encoder, 1728 struct drm_crtc_state *crtc_state, 1729 struct drm_connector_state *conn_state) 1730 { 1731 int ret = nv50_outp_atomic_check(encoder, crtc_state, conn_state); 1732 if (ret) 1733 return ret; 1734 crtc_state->adjusted_mode.clock *= 2; 1735 return 0; 1736 } 1737 1738 static void 1739 nv50_pior_disable(struct drm_encoder *encoder) 1740 { 1741 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1742 struct nv50_core *core = nv50_disp(encoder->dev)->core; 1743 if (nv_encoder->crtc) 1744 core->func->pior->ctrl(core, nv_encoder->or, 0x00000000, NULL); 1745 nv_encoder->crtc = NULL; 1746 nv50_outp_release(nv_encoder); 1747 } 1748 1749 static void 1750 nv50_pior_enable(struct drm_encoder *encoder) 1751 { 1752 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1753 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc); 1754 struct nv50_head_atom *asyh = nv50_head_atom(nv_crtc->base.state); 1755 struct nv50_core *core = nv50_disp(encoder->dev)->core; 1756 u8 owner = 1 << nv_crtc->index; 1757 u8 proto; 1758 1759 nv50_outp_acquire(nv_encoder); 1760 1761 switch (asyh->or.bpc) { 1762 case 10: asyh->or.depth = 0x6; break; 1763 case 8: asyh->or.depth = 0x5; break; 1764 case 6: asyh->or.depth = 0x2; break; 1765 default: asyh->or.depth = 0x0; break; 1766 } 1767 1768 switch (nv_encoder->dcb->type) { 1769 case DCB_OUTPUT_TMDS: 1770 case DCB_OUTPUT_DP: 1771 proto = 0x0; 1772 break; 1773 default: 1774 BUG(); 1775 break; 1776 } 1777 1778 core->func->pior->ctrl(core, nv_encoder->or, (proto << 8) | owner, asyh); 1779 nv_encoder->crtc = encoder->crtc; 1780 } 1781 1782 static const struct drm_encoder_helper_funcs 1783 nv50_pior_help = { 1784 .atomic_check = nv50_pior_atomic_check, 1785 .enable = nv50_pior_enable, 1786 .disable = nv50_pior_disable, 1787 }; 1788 1789 static void 1790 nv50_pior_destroy(struct drm_encoder *encoder) 1791 { 1792 drm_encoder_cleanup(encoder); 1793 kfree(encoder); 1794 } 1795 1796 static const struct drm_encoder_funcs 1797 nv50_pior_func = { 1798 .destroy = nv50_pior_destroy, 1799 }; 1800 1801 static int 1802 nv50_pior_create(struct drm_connector *connector, struct dcb_output *dcbe) 1803 { 1804 struct nouveau_drm *drm = nouveau_drm(connector->dev); 1805 struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device); 1806 struct nvkm_i2c_bus *bus = NULL; 1807 struct nvkm_i2c_aux *aux = NULL; 1808 struct i2c_adapter *ddc; 1809 struct nouveau_encoder *nv_encoder; 1810 struct drm_encoder *encoder; 1811 int type; 1812 1813 switch (dcbe->type) { 1814 case DCB_OUTPUT_TMDS: 1815 bus = nvkm_i2c_bus_find(i2c, NVKM_I2C_BUS_EXT(dcbe->extdev)); 1816 ddc = bus ? &bus->i2c : NULL; 1817 type = DRM_MODE_ENCODER_TMDS; 1818 break; 1819 case DCB_OUTPUT_DP: 1820 aux = nvkm_i2c_aux_find(i2c, NVKM_I2C_AUX_EXT(dcbe->extdev)); 1821 ddc = aux ? &aux->i2c : NULL; 1822 type = DRM_MODE_ENCODER_TMDS; 1823 break; 1824 default: 1825 return -ENODEV; 1826 } 1827 1828 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL); 1829 if (!nv_encoder) 1830 return -ENOMEM; 1831 nv_encoder->dcb = dcbe; 1832 nv_encoder->i2c = ddc; 1833 nv_encoder->aux = aux; 1834 1835 encoder = to_drm_encoder(nv_encoder); 1836 encoder->possible_crtcs = dcbe->heads; 1837 encoder->possible_clones = 0; 1838 drm_encoder_init(connector->dev, encoder, &nv50_pior_func, type, 1839 "pior-%04x-%04x", dcbe->hasht, dcbe->hashm); 1840 drm_encoder_helper_add(encoder, &nv50_pior_help); 1841 1842 drm_connector_attach_encoder(connector, encoder); 1843 return 0; 1844 } 1845 1846 /****************************************************************************** 1847 * Atomic 1848 *****************************************************************************/ 1849 1850 static void 1851 nv50_disp_atomic_commit_core(struct drm_atomic_state *state, u32 *interlock) 1852 { 1853 struct nouveau_drm *drm = nouveau_drm(state->dev); 1854 struct nv50_disp *disp = nv50_disp(drm->dev); 1855 struct nv50_core *core = disp->core; 1856 struct nv50_mstm *mstm; 1857 struct drm_encoder *encoder; 1858 1859 NV_ATOMIC(drm, "commit core %08x\n", interlock[NV50_DISP_INTERLOCK_BASE]); 1860 1861 drm_for_each_encoder(encoder, drm->dev) { 1862 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) { 1863 mstm = nouveau_encoder(encoder)->dp.mstm; 1864 if (mstm && mstm->modified) 1865 nv50_mstm_prepare(mstm); 1866 } 1867 } 1868 1869 core->func->ntfy_init(disp->sync, NV50_DISP_CORE_NTFY); 1870 core->func->update(core, interlock, true); 1871 if (core->func->ntfy_wait_done(disp->sync, NV50_DISP_CORE_NTFY, 1872 disp->core->chan.base.device)) 1873 NV_ERROR(drm, "core notifier timeout\n"); 1874 1875 drm_for_each_encoder(encoder, drm->dev) { 1876 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) { 1877 mstm = nouveau_encoder(encoder)->dp.mstm; 1878 if (mstm && mstm->modified) 1879 nv50_mstm_cleanup(mstm); 1880 } 1881 } 1882 } 1883 1884 static void 1885 nv50_disp_atomic_commit_wndw(struct drm_atomic_state *state, u32 *interlock) 1886 { 1887 struct drm_plane_state *new_plane_state; 1888 struct drm_plane *plane; 1889 int i; 1890 1891 for_each_new_plane_in_state(state, plane, new_plane_state, i) { 1892 struct nv50_wndw *wndw = nv50_wndw(plane); 1893 if (interlock[wndw->interlock.type] & wndw->interlock.data) { 1894 if (wndw->func->update) 1895 wndw->func->update(wndw, interlock); 1896 } 1897 } 1898 } 1899 1900 static void 1901 nv50_disp_atomic_commit_tail(struct drm_atomic_state *state) 1902 { 1903 struct drm_device *dev = state->dev; 1904 struct drm_crtc_state *new_crtc_state, *old_crtc_state; 1905 struct drm_crtc *crtc; 1906 struct drm_plane_state *new_plane_state; 1907 struct drm_plane *plane; 1908 struct nouveau_drm *drm = nouveau_drm(dev); 1909 struct nv50_disp *disp = nv50_disp(dev); 1910 struct nv50_atom *atom = nv50_atom(state); 1911 struct nv50_core *core = disp->core; 1912 struct nv50_outp_atom *outp, *outt; 1913 u32 interlock[NV50_DISP_INTERLOCK__SIZE] = {}; 1914 int i; 1915 1916 NV_ATOMIC(drm, "commit %d %d\n", atom->lock_core, atom->flush_disable); 1917 drm_atomic_helper_wait_for_fences(dev, state, false); 1918 drm_atomic_helper_wait_for_dependencies(state); 1919 drm_atomic_helper_update_legacy_modeset_state(dev, state); 1920 1921 if (atom->lock_core) 1922 mutex_lock(&disp->mutex); 1923 1924 /* Disable head(s). */ 1925 for_each_oldnew_crtc_in_state(state, crtc, old_crtc_state, new_crtc_state, i) { 1926 struct nv50_head_atom *asyh = nv50_head_atom(new_crtc_state); 1927 struct nv50_head *head = nv50_head(crtc); 1928 1929 NV_ATOMIC(drm, "%s: clr %04x (set %04x)\n", crtc->name, 1930 asyh->clr.mask, asyh->set.mask); 1931 1932 if (old_crtc_state->active && !new_crtc_state->active) { 1933 pm_runtime_put_noidle(dev->dev); 1934 drm_crtc_vblank_off(crtc); 1935 } 1936 1937 if (asyh->clr.mask) { 1938 nv50_head_flush_clr(head, asyh, atom->flush_disable); 1939 interlock[NV50_DISP_INTERLOCK_CORE] |= 1; 1940 } 1941 } 1942 1943 /* Disable plane(s). */ 1944 for_each_new_plane_in_state(state, plane, new_plane_state, i) { 1945 struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state); 1946 struct nv50_wndw *wndw = nv50_wndw(plane); 1947 1948 NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", plane->name, 1949 asyw->clr.mask, asyw->set.mask); 1950 if (!asyw->clr.mask) 1951 continue; 1952 1953 nv50_wndw_flush_clr(wndw, interlock, atom->flush_disable, asyw); 1954 } 1955 1956 /* Disable output path(s). */ 1957 list_for_each_entry(outp, &atom->outp, head) { 1958 const struct drm_encoder_helper_funcs *help; 1959 struct drm_encoder *encoder; 1960 1961 encoder = outp->encoder; 1962 help = encoder->helper_private; 1963 1964 NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", encoder->name, 1965 outp->clr.mask, outp->set.mask); 1966 1967 if (outp->clr.mask) { 1968 help->disable(encoder); 1969 interlock[NV50_DISP_INTERLOCK_CORE] |= 1; 1970 if (outp->flush_disable) { 1971 nv50_disp_atomic_commit_wndw(state, interlock); 1972 nv50_disp_atomic_commit_core(state, interlock); 1973 memset(interlock, 0x00, sizeof(interlock)); 1974 } 1975 } 1976 } 1977 1978 /* Flush disable. */ 1979 if (interlock[NV50_DISP_INTERLOCK_CORE]) { 1980 if (atom->flush_disable) { 1981 nv50_disp_atomic_commit_wndw(state, interlock); 1982 nv50_disp_atomic_commit_core(state, interlock); 1983 memset(interlock, 0x00, sizeof(interlock)); 1984 } 1985 } 1986 1987 /* Update output path(s). */ 1988 list_for_each_entry_safe(outp, outt, &atom->outp, head) { 1989 const struct drm_encoder_helper_funcs *help; 1990 struct drm_encoder *encoder; 1991 1992 encoder = outp->encoder; 1993 help = encoder->helper_private; 1994 1995 NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", encoder->name, 1996 outp->set.mask, outp->clr.mask); 1997 1998 if (outp->set.mask) { 1999 help->enable(encoder); 2000 interlock[NV50_DISP_INTERLOCK_CORE] = 1; 2001 } 2002 2003 list_del(&outp->head); 2004 kfree(outp); 2005 } 2006 2007 /* Update head(s). */ 2008 for_each_oldnew_crtc_in_state(state, crtc, old_crtc_state, new_crtc_state, i) { 2009 struct nv50_head_atom *asyh = nv50_head_atom(new_crtc_state); 2010 struct nv50_head *head = nv50_head(crtc); 2011 2012 NV_ATOMIC(drm, "%s: set %04x (clr %04x)\n", crtc->name, 2013 asyh->set.mask, asyh->clr.mask); 2014 2015 if (asyh->set.mask) { 2016 nv50_head_flush_set(head, asyh); 2017 interlock[NV50_DISP_INTERLOCK_CORE] = 1; 2018 } 2019 2020 if (new_crtc_state->active) { 2021 if (!old_crtc_state->active) { 2022 drm_crtc_vblank_on(crtc); 2023 pm_runtime_get_noresume(dev->dev); 2024 } 2025 if (new_crtc_state->event) 2026 drm_crtc_vblank_get(crtc); 2027 } 2028 } 2029 2030 /* Update window->head assignment. 2031 * 2032 * This has to happen in an update that's not interlocked with 2033 * any window channels to avoid hitting HW error checks. 2034 * 2035 *TODO: Proper handling of window ownership (Turing apparently 2036 * supports non-fixed mappings). 2037 */ 2038 if (core->assign_windows) { 2039 core->func->wndw.owner(core); 2040 core->func->update(core, interlock, false); 2041 core->assign_windows = false; 2042 interlock[NV50_DISP_INTERLOCK_CORE] = 0; 2043 } 2044 2045 /* Update plane(s). */ 2046 for_each_new_plane_in_state(state, plane, new_plane_state, i) { 2047 struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state); 2048 struct nv50_wndw *wndw = nv50_wndw(plane); 2049 2050 NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", plane->name, 2051 asyw->set.mask, asyw->clr.mask); 2052 if ( !asyw->set.mask && 2053 (!asyw->clr.mask || atom->flush_disable)) 2054 continue; 2055 2056 nv50_wndw_flush_set(wndw, interlock, asyw); 2057 } 2058 2059 /* Flush update. */ 2060 nv50_disp_atomic_commit_wndw(state, interlock); 2061 2062 if (interlock[NV50_DISP_INTERLOCK_CORE]) { 2063 if (interlock[NV50_DISP_INTERLOCK_BASE] || 2064 interlock[NV50_DISP_INTERLOCK_OVLY] || 2065 interlock[NV50_DISP_INTERLOCK_WNDW] || 2066 !atom->state.legacy_cursor_update) 2067 nv50_disp_atomic_commit_core(state, interlock); 2068 else 2069 disp->core->func->update(disp->core, interlock, false); 2070 } 2071 2072 if (atom->lock_core) 2073 mutex_unlock(&disp->mutex); 2074 2075 /* Wait for HW to signal completion. */ 2076 for_each_new_plane_in_state(state, plane, new_plane_state, i) { 2077 struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state); 2078 struct nv50_wndw *wndw = nv50_wndw(plane); 2079 int ret = nv50_wndw_wait_armed(wndw, asyw); 2080 if (ret) 2081 NV_ERROR(drm, "%s: timeout\n", plane->name); 2082 } 2083 2084 for_each_new_crtc_in_state(state, crtc, new_crtc_state, i) { 2085 if (new_crtc_state->event) { 2086 unsigned long flags; 2087 /* Get correct count/ts if racing with vblank irq */ 2088 if (new_crtc_state->active) 2089 drm_crtc_accurate_vblank_count(crtc); 2090 spin_lock_irqsave(&crtc->dev->event_lock, flags); 2091 drm_crtc_send_vblank_event(crtc, new_crtc_state->event); 2092 spin_unlock_irqrestore(&crtc->dev->event_lock, flags); 2093 2094 new_crtc_state->event = NULL; 2095 if (new_crtc_state->active) 2096 drm_crtc_vblank_put(crtc); 2097 } 2098 } 2099 2100 drm_atomic_helper_commit_hw_done(state); 2101 drm_atomic_helper_cleanup_planes(dev, state); 2102 drm_atomic_helper_commit_cleanup_done(state); 2103 drm_atomic_state_put(state); 2104 2105 /* Drop the RPM ref we got from nv50_disp_atomic_commit() */ 2106 pm_runtime_mark_last_busy(dev->dev); 2107 pm_runtime_put_autosuspend(dev->dev); 2108 } 2109 2110 static void 2111 nv50_disp_atomic_commit_work(struct work_struct *work) 2112 { 2113 struct drm_atomic_state *state = 2114 container_of(work, typeof(*state), commit_work); 2115 nv50_disp_atomic_commit_tail(state); 2116 } 2117 2118 static int 2119 nv50_disp_atomic_commit(struct drm_device *dev, 2120 struct drm_atomic_state *state, bool nonblock) 2121 { 2122 struct drm_plane_state *new_plane_state; 2123 struct drm_plane *plane; 2124 int ret, i; 2125 2126 ret = pm_runtime_get_sync(dev->dev); 2127 if (ret < 0 && ret != -EACCES) 2128 return ret; 2129 2130 ret = drm_atomic_helper_setup_commit(state, nonblock); 2131 if (ret) 2132 goto done; 2133 2134 INIT_WORK(&state->commit_work, nv50_disp_atomic_commit_work); 2135 2136 ret = drm_atomic_helper_prepare_planes(dev, state); 2137 if (ret) 2138 goto done; 2139 2140 if (!nonblock) { 2141 ret = drm_atomic_helper_wait_for_fences(dev, state, true); 2142 if (ret) 2143 goto err_cleanup; 2144 } 2145 2146 ret = drm_atomic_helper_swap_state(state, true); 2147 if (ret) 2148 goto err_cleanup; 2149 2150 for_each_new_plane_in_state(state, plane, new_plane_state, i) { 2151 struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state); 2152 struct nv50_wndw *wndw = nv50_wndw(plane); 2153 2154 if (asyw->set.image) 2155 nv50_wndw_ntfy_enable(wndw, asyw); 2156 } 2157 2158 drm_atomic_state_get(state); 2159 2160 /* 2161 * Grab another RPM ref for the commit tail, which will release the 2162 * ref when it's finished 2163 */ 2164 pm_runtime_get_noresume(dev->dev); 2165 2166 if (nonblock) 2167 queue_work(system_unbound_wq, &state->commit_work); 2168 else 2169 nv50_disp_atomic_commit_tail(state); 2170 2171 err_cleanup: 2172 if (ret) 2173 drm_atomic_helper_cleanup_planes(dev, state); 2174 done: 2175 pm_runtime_put_autosuspend(dev->dev); 2176 return ret; 2177 } 2178 2179 static struct nv50_outp_atom * 2180 nv50_disp_outp_atomic_add(struct nv50_atom *atom, struct drm_encoder *encoder) 2181 { 2182 struct nv50_outp_atom *outp; 2183 2184 list_for_each_entry(outp, &atom->outp, head) { 2185 if (outp->encoder == encoder) 2186 return outp; 2187 } 2188 2189 outp = kzalloc(sizeof(*outp), GFP_KERNEL); 2190 if (!outp) 2191 return ERR_PTR(-ENOMEM); 2192 2193 list_add(&outp->head, &atom->outp); 2194 outp->encoder = encoder; 2195 return outp; 2196 } 2197 2198 static int 2199 nv50_disp_outp_atomic_check_clr(struct nv50_atom *atom, 2200 struct drm_connector_state *old_connector_state) 2201 { 2202 struct drm_encoder *encoder = old_connector_state->best_encoder; 2203 struct drm_crtc_state *old_crtc_state, *new_crtc_state; 2204 struct drm_crtc *crtc; 2205 struct nv50_outp_atom *outp; 2206 2207 if (!(crtc = old_connector_state->crtc)) 2208 return 0; 2209 2210 old_crtc_state = drm_atomic_get_old_crtc_state(&atom->state, crtc); 2211 new_crtc_state = drm_atomic_get_new_crtc_state(&atom->state, crtc); 2212 if (old_crtc_state->active && drm_atomic_crtc_needs_modeset(new_crtc_state)) { 2213 outp = nv50_disp_outp_atomic_add(atom, encoder); 2214 if (IS_ERR(outp)) 2215 return PTR_ERR(outp); 2216 2217 if (outp->encoder->encoder_type == DRM_MODE_ENCODER_DPMST) { 2218 outp->flush_disable = true; 2219 atom->flush_disable = true; 2220 } 2221 outp->clr.ctrl = true; 2222 atom->lock_core = true; 2223 } 2224 2225 return 0; 2226 } 2227 2228 static int 2229 nv50_disp_outp_atomic_check_set(struct nv50_atom *atom, 2230 struct drm_connector_state *connector_state) 2231 { 2232 struct drm_encoder *encoder = connector_state->best_encoder; 2233 struct drm_crtc_state *new_crtc_state; 2234 struct drm_crtc *crtc; 2235 struct nv50_outp_atom *outp; 2236 2237 if (!(crtc = connector_state->crtc)) 2238 return 0; 2239 2240 new_crtc_state = drm_atomic_get_new_crtc_state(&atom->state, crtc); 2241 if (new_crtc_state->active && drm_atomic_crtc_needs_modeset(new_crtc_state)) { 2242 outp = nv50_disp_outp_atomic_add(atom, encoder); 2243 if (IS_ERR(outp)) 2244 return PTR_ERR(outp); 2245 2246 outp->set.ctrl = true; 2247 atom->lock_core = true; 2248 } 2249 2250 return 0; 2251 } 2252 2253 static int 2254 nv50_disp_atomic_check(struct drm_device *dev, struct drm_atomic_state *state) 2255 { 2256 struct nv50_atom *atom = nv50_atom(state); 2257 struct drm_connector_state *old_connector_state, *new_connector_state; 2258 struct drm_connector *connector; 2259 struct drm_crtc_state *new_crtc_state; 2260 struct drm_crtc *crtc; 2261 int ret, i; 2262 2263 /* We need to handle colour management on a per-plane basis. */ 2264 for_each_new_crtc_in_state(state, crtc, new_crtc_state, i) { 2265 if (new_crtc_state->color_mgmt_changed) { 2266 ret = drm_atomic_add_affected_planes(state, crtc); 2267 if (ret) 2268 return ret; 2269 } 2270 } 2271 2272 ret = drm_atomic_helper_check(dev, state); 2273 if (ret) 2274 return ret; 2275 2276 for_each_oldnew_connector_in_state(state, connector, old_connector_state, new_connector_state, i) { 2277 ret = nv50_disp_outp_atomic_check_clr(atom, old_connector_state); 2278 if (ret) 2279 return ret; 2280 2281 ret = nv50_disp_outp_atomic_check_set(atom, new_connector_state); 2282 if (ret) 2283 return ret; 2284 } 2285 2286 ret = drm_dp_mst_atomic_check(state); 2287 if (ret) 2288 return ret; 2289 2290 return 0; 2291 } 2292 2293 static void 2294 nv50_disp_atomic_state_clear(struct drm_atomic_state *state) 2295 { 2296 struct nv50_atom *atom = nv50_atom(state); 2297 struct nv50_outp_atom *outp, *outt; 2298 2299 list_for_each_entry_safe(outp, outt, &atom->outp, head) { 2300 list_del(&outp->head); 2301 kfree(outp); 2302 } 2303 2304 drm_atomic_state_default_clear(state); 2305 } 2306 2307 static void 2308 nv50_disp_atomic_state_free(struct drm_atomic_state *state) 2309 { 2310 struct nv50_atom *atom = nv50_atom(state); 2311 drm_atomic_state_default_release(&atom->state); 2312 kfree(atom); 2313 } 2314 2315 static struct drm_atomic_state * 2316 nv50_disp_atomic_state_alloc(struct drm_device *dev) 2317 { 2318 struct nv50_atom *atom; 2319 if (!(atom = kzalloc(sizeof(*atom), GFP_KERNEL)) || 2320 drm_atomic_state_init(dev, &atom->state) < 0) { 2321 kfree(atom); 2322 return NULL; 2323 } 2324 INIT_LIST_HEAD(&atom->outp); 2325 return &atom->state; 2326 } 2327 2328 static const struct drm_mode_config_funcs 2329 nv50_disp_func = { 2330 .fb_create = nouveau_user_framebuffer_create, 2331 .output_poll_changed = nouveau_fbcon_output_poll_changed, 2332 .atomic_check = nv50_disp_atomic_check, 2333 .atomic_commit = nv50_disp_atomic_commit, 2334 .atomic_state_alloc = nv50_disp_atomic_state_alloc, 2335 .atomic_state_clear = nv50_disp_atomic_state_clear, 2336 .atomic_state_free = nv50_disp_atomic_state_free, 2337 }; 2338 2339 /****************************************************************************** 2340 * Init 2341 *****************************************************************************/ 2342 2343 static void 2344 nv50_display_fini(struct drm_device *dev, bool suspend) 2345 { 2346 struct nouveau_encoder *nv_encoder; 2347 struct drm_encoder *encoder; 2348 struct drm_plane *plane; 2349 2350 drm_for_each_plane(plane, dev) { 2351 struct nv50_wndw *wndw = nv50_wndw(plane); 2352 if (plane->funcs != &nv50_wndw) 2353 continue; 2354 nv50_wndw_fini(wndw); 2355 } 2356 2357 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { 2358 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) { 2359 nv_encoder = nouveau_encoder(encoder); 2360 nv50_mstm_fini(nv_encoder->dp.mstm); 2361 } 2362 } 2363 } 2364 2365 static int 2366 nv50_display_init(struct drm_device *dev, bool resume, bool runtime) 2367 { 2368 struct nv50_core *core = nv50_disp(dev)->core; 2369 struct drm_encoder *encoder; 2370 struct drm_plane *plane; 2371 2372 core->func->init(core); 2373 2374 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { 2375 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) { 2376 struct nouveau_encoder *nv_encoder = 2377 nouveau_encoder(encoder); 2378 nv50_mstm_init(nv_encoder->dp.mstm, runtime); 2379 } 2380 } 2381 2382 drm_for_each_plane(plane, dev) { 2383 struct nv50_wndw *wndw = nv50_wndw(plane); 2384 if (plane->funcs != &nv50_wndw) 2385 continue; 2386 nv50_wndw_init(wndw); 2387 } 2388 2389 return 0; 2390 } 2391 2392 static void 2393 nv50_display_destroy(struct drm_device *dev) 2394 { 2395 struct nv50_disp *disp = nv50_disp(dev); 2396 2397 nv50_audio_component_fini(nouveau_drm(dev)); 2398 2399 nv50_core_del(&disp->core); 2400 2401 nouveau_bo_unmap(disp->sync); 2402 if (disp->sync) 2403 nouveau_bo_unpin(disp->sync); 2404 nouveau_bo_ref(NULL, &disp->sync); 2405 2406 nouveau_display(dev)->priv = NULL; 2407 kfree(disp); 2408 } 2409 2410 int 2411 nv50_display_create(struct drm_device *dev) 2412 { 2413 struct nvif_device *device = &nouveau_drm(dev)->client.device; 2414 struct nouveau_drm *drm = nouveau_drm(dev); 2415 struct dcb_table *dcb = &drm->vbios.dcb; 2416 struct drm_connector *connector, *tmp; 2417 struct nv50_disp *disp; 2418 struct dcb_output *dcbe; 2419 int crtcs, ret, i; 2420 bool has_mst = nv50_has_mst(drm); 2421 2422 disp = kzalloc(sizeof(*disp), GFP_KERNEL); 2423 if (!disp) 2424 return -ENOMEM; 2425 2426 mutex_init(&disp->mutex); 2427 2428 nouveau_display(dev)->priv = disp; 2429 nouveau_display(dev)->dtor = nv50_display_destroy; 2430 nouveau_display(dev)->init = nv50_display_init; 2431 nouveau_display(dev)->fini = nv50_display_fini; 2432 disp->disp = &nouveau_display(dev)->disp; 2433 dev->mode_config.funcs = &nv50_disp_func; 2434 dev->mode_config.quirk_addfb_prefer_xbgr_30bpp = true; 2435 dev->mode_config.normalize_zpos = true; 2436 2437 /* small shared memory area we use for notifiers and semaphores */ 2438 ret = nouveau_bo_new(&drm->client, 4096, 0x1000, TTM_PL_FLAG_VRAM, 2439 0, 0x0000, NULL, NULL, &disp->sync); 2440 if (!ret) { 2441 ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM, true); 2442 if (!ret) { 2443 ret = nouveau_bo_map(disp->sync); 2444 if (ret) 2445 nouveau_bo_unpin(disp->sync); 2446 } 2447 if (ret) 2448 nouveau_bo_ref(NULL, &disp->sync); 2449 } 2450 2451 if (ret) 2452 goto out; 2453 2454 /* allocate master evo channel */ 2455 ret = nv50_core_new(drm, &disp->core); 2456 if (ret) 2457 goto out; 2458 2459 /* create crtc objects to represent the hw heads */ 2460 if (disp->disp->object.oclass >= GV100_DISP) 2461 crtcs = nvif_rd32(&device->object, 0x610060) & 0xff; 2462 else 2463 if (disp->disp->object.oclass >= GF110_DISP) 2464 crtcs = nvif_rd32(&device->object, 0x612004) & 0xf; 2465 else 2466 crtcs = 0x3; 2467 2468 for (i = 0; i < fls(crtcs); i++) { 2469 struct nv50_head *head; 2470 2471 if (!(crtcs & (1 << i))) 2472 continue; 2473 2474 head = nv50_head_create(dev, i); 2475 if (IS_ERR(head)) { 2476 ret = PTR_ERR(head); 2477 goto out; 2478 } 2479 2480 if (has_mst) { 2481 head->msto = nv50_msto_new(dev, head, i); 2482 if (IS_ERR(head->msto)) { 2483 ret = PTR_ERR(head->msto); 2484 head->msto = NULL; 2485 goto out; 2486 } 2487 2488 /* 2489 * FIXME: This is a hack to workaround the following 2490 * issues: 2491 * 2492 * https://gitlab.gnome.org/GNOME/mutter/issues/759 2493 * https://gitlab.freedesktop.org/xorg/xserver/merge_requests/277 2494 * 2495 * Once these issues are closed, this should be 2496 * removed 2497 */ 2498 head->msto->encoder.possible_crtcs = crtcs; 2499 } 2500 } 2501 2502 /* create encoder/connector objects based on VBIOS DCB table */ 2503 for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) { 2504 connector = nouveau_connector_create(dev, dcbe); 2505 if (IS_ERR(connector)) 2506 continue; 2507 2508 if (dcbe->location == DCB_LOC_ON_CHIP) { 2509 switch (dcbe->type) { 2510 case DCB_OUTPUT_TMDS: 2511 case DCB_OUTPUT_LVDS: 2512 case DCB_OUTPUT_DP: 2513 ret = nv50_sor_create(connector, dcbe); 2514 break; 2515 case DCB_OUTPUT_ANALOG: 2516 ret = nv50_dac_create(connector, dcbe); 2517 break; 2518 default: 2519 ret = -ENODEV; 2520 break; 2521 } 2522 } else { 2523 ret = nv50_pior_create(connector, dcbe); 2524 } 2525 2526 if (ret) { 2527 NV_WARN(drm, "failed to create encoder %d/%d/%d: %d\n", 2528 dcbe->location, dcbe->type, 2529 ffs(dcbe->or) - 1, ret); 2530 ret = 0; 2531 } 2532 } 2533 2534 /* cull any connectors we created that don't have an encoder */ 2535 list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) { 2536 if (connector->possible_encoders) 2537 continue; 2538 2539 NV_WARN(drm, "%s has no encoders, removing\n", 2540 connector->name); 2541 connector->funcs->destroy(connector); 2542 } 2543 2544 /* Disable vblank irqs aggressively for power-saving, safe on nv50+ */ 2545 dev->vblank_disable_immediate = true; 2546 2547 nv50_audio_component_init(drm); 2548 2549 out: 2550 if (ret) 2551 nv50_display_destroy(dev); 2552 return ret; 2553 } 2554