Lines Matching +full:gop +full:- +full:port +full:- +full:id
31 #include <linux/dma-mapping.h>
83 chan->device = device; in nv50_chan_create()
94 &chan->user); in nv50_chan_create()
96 nvif_object_map(&chan->user, NULL, 0); in nv50_chan_create()
105 return -ENOSYS; in nv50_chan_create()
111 nvif_object_dtor(&chan->user); in nv50_chan_destroy()
121 nvif_object_dtor(&dmac->vram); in nv50_dmac_destroy()
122 nvif_object_dtor(&dmac->sync); in nv50_dmac_destroy()
124 nv50_chan_destroy(&dmac->base); in nv50_dmac_destroy()
126 nvif_mem_dtor(&dmac->_push.mem); in nv50_dmac_destroy()
134 dmac->cur = push->cur - (u32 __iomem *)dmac->_push.mem.object.map.ptr; in nv50_dmac_kick()
135 if (dmac->put != dmac->cur) { in nv50_dmac_kick()
139 if (dmac->push->mem.type & NVIF_MEM_VRAM) { in nv50_dmac_kick()
140 struct nvif_device *device = dmac->base.device; in nv50_dmac_kick()
141 nvif_wr32(&device->object, 0x070000, 0x00000001); in nv50_dmac_kick()
143 if (!(nvif_rd32(&device->object, 0x070000) & 0x00000002)) in nv50_dmac_kick()
148 NVIF_WV32(&dmac->base.user, NV507C, PUT, PTR, dmac->cur); in nv50_dmac_kick()
149 dmac->put = dmac->cur; in nv50_dmac_kick()
152 push->bgn = push->cur; in nv50_dmac_kick()
158 u32 get = NVIF_RV32(&dmac->base.user, NV507C, GET, PTR); in nv50_dmac_free()
159 if (get > dmac->cur) /* NVIDIA stay 5 away from GET, do the same. */ in nv50_dmac_free()
160 return get - dmac->cur - 5; in nv50_dmac_free()
161 return dmac->max - dmac->cur; in nv50_dmac_free()
170 u32 get = NVIF_RV32(&dmac->base.user, NV507C, GET, PTR); in nv50_dmac_wind()
172 /* Corner-case, HW idle, but non-committed work pending. */ in nv50_dmac_wind()
173 if (dmac->put == 0) in nv50_dmac_wind()
174 nv50_dmac_kick(dmac->push); in nv50_dmac_wind()
176 if (nvif_msec(dmac->base.device, 2000, in nv50_dmac_wind()
177 if (NVIF_TV32(&dmac->base.user, NV507C, GET, PTR, >, 0)) in nv50_dmac_wind()
180 return -ETIMEDOUT; in nv50_dmac_wind()
183 PUSH_RSVD(dmac->push, PUSH_JUMP(dmac->push, 0)); in nv50_dmac_wind()
184 dmac->cur = 0; in nv50_dmac_wind()
194 if (WARN_ON(size > dmac->max)) in nv50_dmac_wait()
195 return -EINVAL; in nv50_dmac_wait()
197 dmac->cur = push->cur - (u32 __iomem *)dmac->_push.mem.object.map.ptr; in nv50_dmac_wait()
198 if (dmac->cur + size >= dmac->max) { in nv50_dmac_wait()
203 push->cur = dmac->_push.mem.object.map.ptr; in nv50_dmac_wait()
204 push->cur = push->cur + dmac->cur; in nv50_dmac_wait()
208 if (nvif_msec(dmac->base.device, 2000, in nv50_dmac_wait()
213 return -ETIMEDOUT; in nv50_dmac_wait()
216 push->bgn = dmac->_push.mem.object.map.ptr; in nv50_dmac_wait()
217 push->bgn = push->bgn + dmac->cur; in nv50_dmac_wait()
218 push->cur = push->bgn; in nv50_dmac_wait()
219 push->end = push->cur + free; in nv50_dmac_wait()
224 static int nv50_dmac_vram_pushbuf = -1;
232 struct nouveau_cli *cli = (void *)device->object.client; in nv50_dmac_create()
237 mutex_init(&dmac->lock); in nv50_dmac_create()
239 /* Pascal added support for 47-bit physical addresses, but some in nv50_dmac_create()
240 * parts of EVO still only accept 40-bit PAs. in nv50_dmac_create()
249 (nv50_dmac_vram_pushbuf < 0 && device->info.family == NV_DEVICE_INFO_V0_PASCAL)) in nv50_dmac_create()
252 ret = nvif_mem_ctor_map(&cli->mmu, "kmsChanPush", type, 0x1000, in nv50_dmac_create()
253 &dmac->_push.mem); in nv50_dmac_create()
257 dmac->ptr = dmac->_push.mem.object.map.ptr; in nv50_dmac_create()
258 dmac->_push.wait = nv50_dmac_wait; in nv50_dmac_create()
259 dmac->_push.kick = nv50_dmac_kick; in nv50_dmac_create()
260 dmac->push = &dmac->_push; in nv50_dmac_create()
261 dmac->push->bgn = dmac->_push.mem.object.map.ptr; in nv50_dmac_create()
262 dmac->push->cur = dmac->push->bgn; in nv50_dmac_create()
263 dmac->push->end = dmac->push->bgn; in nv50_dmac_create()
264 dmac->max = 0x1000/4 - 1; in nv50_dmac_create()
269 if (disp->oclass < GV100_DISP) in nv50_dmac_create()
270 dmac->max -= 12; in nv50_dmac_create()
272 args->pushbuf = nvif_handle(&dmac->_push.mem.object); in nv50_dmac_create()
275 &dmac->base); in nv50_dmac_create()
282 ret = nvif_object_ctor(&dmac->base.user, "kmsSyncCtxDma", NV50_DISP_HANDLE_SYNCBUF, in nv50_dmac_create()
290 &dmac->sync); in nv50_dmac_create()
294 ret = nvif_object_ctor(&dmac->base.user, "kmsVramCtxDma", NV50_DISP_HANDLE_VRAM, in nv50_dmac_create()
300 .limit = device->info.ram_user - 1, in nv50_dmac_create()
302 &dmac->vram); in nv50_dmac_create()
317 outp->base.base.name, outp->caps.dp_interlace); in nv50_outp_dump_caps()
326 struct drm_display_mode *adjusted_mode = &crtc_state->adjusted_mode; in nv50_outp_atomic_check_view()
327 struct drm_display_mode *mode = &crtc_state->mode; in nv50_outp_atomic_check_view()
328 struct drm_connector *connector = conn_state->connector; in nv50_outp_atomic_check_view()
330 struct nouveau_drm *drm = nouveau_drm(encoder->dev); in nv50_outp_atomic_check_view()
332 NV_ATOMIC(drm, "%s atomic_check\n", encoder->name); in nv50_outp_atomic_check_view()
333 asyc->scaler.full = false; in nv50_outp_atomic_check_view()
337 if (asyc->scaler.mode == DRM_MODE_SCALE_NONE) { in nv50_outp_atomic_check_view()
338 switch (connector->connector_type) { in nv50_outp_atomic_check_view()
345 if (mode->hdisplay == native_mode->hdisplay && in nv50_outp_atomic_check_view()
346 mode->vdisplay == native_mode->vdisplay && in nv50_outp_atomic_check_view()
347 mode->type & DRM_MODE_TYPE_DRIVER) in nv50_outp_atomic_check_view()
350 asyc->scaler.full = true; in nv50_outp_atomic_check_view()
361 crtc_state->mode_changed = true; in nv50_outp_atomic_check_view()
372 struct drm_display_mode *mode = &asyh->state.adjusted_mode; in nv50_outp_atomic_fix_depth()
375 switch (nv_encoder->dcb->type) { in nv50_outp_atomic_fix_depth()
377 max_rate = nv_encoder->dp.link_nr * nv_encoder->dp.link_bw; in nv50_outp_atomic_fix_depth()
380 asyh->or.bpc = min_t(u8, asyh->or.bpc, 10); in nv50_outp_atomic_fix_depth()
383 while (asyh->or.bpc > 6) { in nv50_outp_atomic_fix_depth()
384 mode_rate = DIV_ROUND_UP(mode->clock * asyh->or.bpc * 3, 8); in nv50_outp_atomic_fix_depth()
388 asyh->or.bpc -= 2; in nv50_outp_atomic_fix_depth()
401 struct drm_connector *connector = conn_state->connector; in nv50_outp_atomic_check()
407 nv_connector->native_mode); in nv50_outp_atomic_check()
411 if (crtc_state->mode_changed || crtc_state->connectors_changed) in nv50_outp_atomic_check()
412 asyh->or.bpc = connector->display_info.bpc; in nv50_outp_atomic_check()
429 if (connector_state->best_encoder == encoder) in nv50_outp_get_new_connector()
445 if (connector_state->best_encoder == encoder) in nv50_outp_get_old_connector()
457 const u32 mask = drm_encoder_mask(&outp->base.base); in nv50_outp_get_new_crtc()
461 if (crtc_state->encoder_mask & mask) in nv50_outp_get_new_crtc()
475 struct nv50_core *core = nv50_disp(encoder->dev)->core; in nv50_dac_atomic_disable()
478 core->func->dac->ctrl(core, nv_encoder->outp.or.id, ctrl, NULL); in nv50_dac_atomic_disable()
479 nv_encoder->crtc = NULL; in nv50_dac_atomic_disable()
480 nvif_outp_release(&nv_encoder->outp); in nv50_dac_atomic_disable()
489 nv50_head_atom(drm_atomic_get_new_crtc_state(state, &nv_crtc->base)); in nv50_dac_atomic_enable()
490 struct nv50_core *core = nv50_disp(encoder->dev)->core; in nv50_dac_atomic_enable()
493 switch (nv_crtc->index) { in nv50_dac_atomic_enable()
505 nvif_outp_acquire_rgb_crt(&nv_encoder->outp); in nv50_dac_atomic_enable()
507 core->func->dac->ctrl(core, nv_encoder->outp.or.id, ctrl, asyh); in nv50_dac_atomic_enable()
508 asyh->or.depth = 0; in nv50_dac_atomic_enable()
510 nv_encoder->crtc = &nv_crtc->base; in nv50_dac_atomic_enable()
520 loadval = nouveau_drm(encoder->dev)->vbios.dactestval; in nv50_dac_detect()
524 ret = nvif_outp_load_detect(&nv_encoder->outp, loadval); in nv50_dac_detect()
544 nvif_outp_dtor(&nv_encoder->outp); in nv50_dac_destroy()
558 struct nouveau_drm *drm = nouveau_drm(connector->dev); in nv50_dac_create()
559 struct nv50_disp *disp = nv50_disp(connector->dev); in nv50_dac_create()
560 struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device); in nv50_dac_create()
568 return -ENOMEM; in nv50_dac_create()
569 nv_encoder->dcb = dcbe; in nv50_dac_create()
571 bus = nvkm_i2c_bus_find(i2c, dcbe->i2c_index); in nv50_dac_create()
573 nv_encoder->i2c = &bus->i2c; in nv50_dac_create()
576 encoder->possible_crtcs = dcbe->heads; in nv50_dac_create()
577 encoder->possible_clones = 0; in nv50_dac_create()
578 drm_encoder_init(connector->dev, encoder, &nv50_dac_func, type, in nv50_dac_create()
579 "dac-%04x-%04x", dcbe->hasht, dcbe->hashm); in nv50_dac_create()
583 return nvif_outp_ctor(disp->disp, nv_encoder->base.base.name, dcbe->id, &nv_encoder->outp); in nv50_dac_create()
590 nv50_audio_component_eld_notify(struct drm_audio_component *acomp, int port, in nv50_audio_component_eld_notify() argument
593 if (acomp && acomp->audio_ops && acomp->audio_ops->pin_eld_notify) in nv50_audio_component_eld_notify()
594 acomp->audio_ops->pin_eld_notify(acomp->audio_ops->audio_ptr, in nv50_audio_component_eld_notify()
595 port, dev_id); in nv50_audio_component_eld_notify()
599 nv50_audio_component_get_eld(struct device *kdev, int port, int dev_id, in nv50_audio_component_get_eld() argument
611 mutex_lock(&drm->audio.lock); in nv50_audio_component_get_eld()
613 drm_for_each_encoder(encoder, drm->dev) { in nv50_audio_component_get_eld()
616 if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) in nv50_audio_component_get_eld()
620 nv_connector = nouveau_connector(nv_encoder->audio.connector); in nv50_audio_component_get_eld()
621 nv_crtc = nouveau_crtc(nv_encoder->crtc); in nv50_audio_component_get_eld()
623 if (!nv_crtc || nv_encoder->outp.or.id != port || nv_crtc->index != dev_id) in nv50_audio_component_get_eld()
626 *enabled = nv_encoder->audio.enabled; in nv50_audio_component_get_eld()
628 ret = drm_eld_size(nv_connector->base.eld); in nv50_audio_component_get_eld()
629 memcpy(buf, nv_connector->base.eld, in nv50_audio_component_get_eld()
635 mutex_unlock(&drm->audio.lock); in nv50_audio_component_get_eld()
653 return -ENOMEM; in nv50_audio_component_bind()
656 acomp->ops = &nv50_audio_component_ops; in nv50_audio_component_bind()
657 acomp->dev = kdev; in nv50_audio_component_bind()
658 drm->audio.component = acomp; in nv50_audio_component_bind()
672 drm->audio.component = NULL; in nv50_audio_component_unbind()
673 acomp->ops = NULL; in nv50_audio_component_unbind()
674 acomp->dev = NULL; in nv50_audio_component_unbind()
686 if (component_add(drm->dev->dev, &nv50_audio_component_bind_ops)) in nv50_audio_component_init()
689 drm->audio.component_registered = true; in nv50_audio_component_init()
690 mutex_init(&drm->audio.lock); in nv50_audio_component_init()
696 if (!drm->audio.component_registered) in nv50_audio_component_fini()
699 component_del(drm->dev->dev, &nv50_audio_component_bind_ops); in nv50_audio_component_fini()
700 drm->audio.component_registered = false; in nv50_audio_component_fini()
701 mutex_destroy(&drm->audio.lock); in nv50_audio_component_fini()
710 struct nv50_disp *disp = nv50_disp(encoder->dev); in nv50_audio_supported()
712 if (disp->disp->object.oclass <= GT200_DISP || in nv50_audio_supported()
713 disp->disp->object.oclass == GT206_DISP) in nv50_audio_supported()
722 struct nouveau_drm *drm = nouveau_drm(encoder->dev); in nv50_audio_disable()
724 struct nvif_outp *outp = &nv_encoder->outp; in nv50_audio_disable()
729 mutex_lock(&drm->audio.lock); in nv50_audio_disable()
730 if (nv_encoder->audio.enabled) { in nv50_audio_disable()
731 nv_encoder->audio.enabled = false; in nv50_audio_disable()
732 nv_encoder->audio.connector = NULL; in nv50_audio_disable()
733 nvif_outp_hda_eld(&nv_encoder->outp, nv_crtc->index, NULL, 0); in nv50_audio_disable()
735 mutex_unlock(&drm->audio.lock); in nv50_audio_disable()
737 nv50_audio_component_eld_notify(drm->audio.component, outp->or.id, nv_crtc->index); in nv50_audio_disable()
745 struct nouveau_drm *drm = nouveau_drm(encoder->dev); in nv50_audio_enable()
747 struct nvif_outp *outp = &nv_encoder->outp; in nv50_audio_enable()
749 if (!nv50_audio_supported(encoder) || !drm_detect_monitor_audio(nv_connector->edid)) in nv50_audio_enable()
752 mutex_lock(&drm->audio.lock); in nv50_audio_enable()
754 nvif_outp_hda_eld(&nv_encoder->outp, nv_crtc->index, nv_connector->base.eld, in nv50_audio_enable()
755 drm_eld_size(nv_connector->base.eld)); in nv50_audio_enable()
756 nv_encoder->audio.enabled = true; in nv50_audio_enable()
757 nv_encoder->audio.connector = &nv_connector->base; in nv50_audio_enable()
759 mutex_unlock(&drm->audio.lock); in nv50_audio_enable()
761 nv50_audio_component_eld_notify(drm->audio.component, outp->or.id, nv_crtc->index); in nv50_audio_enable()
772 struct nouveau_drm *drm = nouveau_drm(encoder->dev); in nv50_hdmi_enable()
774 struct drm_hdmi_info *hdmi = &nv_connector->base.display_info.hdmi; in nv50_hdmi_enable()
785 max_ac_packet = mode->htotal - mode->hdisplay; in nv50_hdmi_enable()
786 max_ac_packet -= rekey; in nv50_hdmi_enable()
787 max_ac_packet -= 18; /* constant from tegra */ in nv50_hdmi_enable()
790 if (hdmi->scdc.scrambling.supported) { in nv50_hdmi_enable()
791 const bool high_tmds_clock_ratio = mode->clock > 340000; in nv50_hdmi_enable()
793 ret = drm_scdc_readb(nv_encoder->i2c, SCDC_TMDS_CONFIG, &scdc); in nv50_hdmi_enable()
800 if (high_tmds_clock_ratio || hdmi->scdc.scrambling.low_rates) in nv50_hdmi_enable()
805 ret = drm_scdc_writeb(nv_encoder->i2c, SCDC_TMDS_CONFIG, scdc); in nv50_hdmi_enable()
811 ret = nvif_outp_acquire_tmds(&nv_encoder->outp, nv_crtc->index, true, in nv50_hdmi_enable()
818 args.infoframe.head = nv_crtc->index; in nv50_hdmi_enable()
820 if (!drm_hdmi_avi_infoframe_from_display_mode(&infoframe.avi, &nv_connector->base, mode)) { in nv50_hdmi_enable()
821 drm_hdmi_avi_infoframe_quant_range(&infoframe.avi, &nv_connector->base, mode, in nv50_hdmi_enable()
829 nvif_outp_infoframe(&nv_encoder->outp, NVIF_OUTP_INFOFRAME_V0_AVI, &args.infoframe, size); in nv50_hdmi_enable()
834 &nv_connector->base, mode)) in nv50_hdmi_enable()
839 nvif_outp_infoframe(&nv_encoder->outp, NVIF_OUTP_INFOFRAME_V0_VSI, &args.infoframe, size); in nv50_hdmi_enable()
853 struct drm_dp_mst_port *port; member
874 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) in nv50_real_outp()
878 if (!msto->mstc) in nv50_real_outp()
880 return msto->mstc->mstm->outp; in nv50_real_outp()
889 struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev); in nv50_msto_cleanup()
891 drm_atomic_get_mst_payload_state(mst_state, msto->mstc->port); in nv50_msto_cleanup()
893 NV_ATOMIC(drm, "%s: msto cleanup\n", msto->encoder.name); in nv50_msto_cleanup()
895 if (msto->disabled) { in nv50_msto_cleanup()
896 msto->mstc = NULL; in nv50_msto_cleanup()
897 msto->disabled = false; in nv50_msto_cleanup()
898 } else if (msto->enabled) { in nv50_msto_cleanup()
900 msto->enabled = false; in nv50_msto_cleanup()
910 struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev); in nv50_msto_prepare()
911 struct nv50_mstc *mstc = msto->mstc; in nv50_msto_prepare()
912 struct nv50_mstm *mstm = mstc->mstm; in nv50_msto_prepare()
916 NV_ATOMIC(drm, "%s: msto prepare\n", msto->encoder.name); in nv50_msto_prepare()
920 payload = drm_atomic_get_mst_payload_state(mst_state, mstc->port); in nv50_msto_prepare()
921 old_payload = drm_atomic_get_mst_payload_state(old_mst_state, mstc->port); in nv50_msto_prepare()
924 if (msto->disabled) { in nv50_msto_prepare()
927 nvif_outp_dp_mst_vcpi(&mstm->outp->outp, msto->head->base.index, 0, 0, 0, 0); in nv50_msto_prepare()
929 if (msto->enabled) in nv50_msto_prepare()
932 nvif_outp_dp_mst_vcpi(&mstm->outp->outp, msto->head->base.index, in nv50_msto_prepare()
933 payload->vc_start_slot, payload->time_slots, in nv50_msto_prepare()
934 payload->pbn, payload->time_slots * mst_state->pbn_div); in nv50_msto_prepare()
943 struct drm_atomic_state *state = crtc_state->state; in nv50_msto_atomic_check()
944 struct drm_connector *connector = conn_state->connector; in nv50_msto_atomic_check()
947 struct nv50_mstm *mstm = mstc->mstm; in nv50_msto_atomic_check()
953 mstc->native); in nv50_msto_atomic_check()
965 if (!state->duplicated) { in nv50_msto_atomic_check()
966 const int clock = crtc_state->adjusted_mode.clock; in nv50_msto_atomic_check()
968 asyh->or.bpc = connector->display_info.bpc; in nv50_msto_atomic_check()
969 asyh->dp.pbn = drm_dp_calc_pbn_mode(clock, asyh->or.bpc * 3 << 4); in nv50_msto_atomic_check()
972 mst_state = drm_atomic_get_mst_topology_state(state, &mstm->mgr); in nv50_msto_atomic_check()
976 if (!mst_state->pbn_div) { in nv50_msto_atomic_check()
977 struct nouveau_encoder *outp = mstc->mstm->outp; in nv50_msto_atomic_check()
979 mst_state->pbn_div = drm_dp_get_vc_payload_bw(&mstm->mgr, in nv50_msto_atomic_check()
980 outp->dp.link_bw, outp->dp.link_nr); in nv50_msto_atomic_check()
983 slots = drm_dp_atomic_find_time_slots(state, &mstm->mgr, mstc->port, asyh->dp.pbn); in nv50_msto_atomic_check()
987 asyh->dp.tu = slots; in nv50_msto_atomic_check()
1007 struct nv50_head *head = msto->head; in nv50_msto_atomic_enable()
1009 nv50_head_atom(drm_atomic_get_new_crtc_state(state, &head->base.base)); in nv50_msto_atomic_enable()
1016 drm_connector_list_iter_begin(encoder->dev, &conn_iter); in nv50_msto_atomic_enable()
1018 if (connector->state->best_encoder == &msto->encoder) { in nv50_msto_atomic_enable()
1020 mstm = mstc->mstm; in nv50_msto_atomic_enable()
1029 if (!mstm->links++) { in nv50_msto_atomic_enable()
1031 nvif_outp_acquire_dp(&mstm->outp->outp, mstm->outp->dp.dpcd, 0, 0, false, true); in nv50_msto_atomic_enable()
1034 if (mstm->outp->outp.or.link & 1) in nv50_msto_atomic_enable()
1039 mstm->outp->update(mstm->outp, head->base.index, asyh, proto, in nv50_msto_atomic_enable()
1040 nv50_dp_bpc_to_depth(asyh->or.bpc)); in nv50_msto_atomic_enable()
1042 msto->mstc = mstc; in nv50_msto_atomic_enable()
1043 msto->enabled = true; in nv50_msto_atomic_enable()
1044 mstm->modified = true; in nv50_msto_atomic_enable()
1051 struct nv50_mstc *mstc = msto->mstc; in nv50_msto_atomic_disable()
1052 struct nv50_mstm *mstm = mstc->mstm; in nv50_msto_atomic_disable()
1054 mstm->outp->update(mstm->outp, msto->head->base.index, NULL, 0, 0); in nv50_msto_atomic_disable()
1055 mstm->modified = true; in nv50_msto_atomic_disable()
1056 if (!--mstm->links) in nv50_msto_atomic_disable()
1057 mstm->disabled = true; in nv50_msto_atomic_disable()
1058 msto->disabled = true; in nv50_msto_atomic_disable()
1072 drm_encoder_cleanup(&msto->encoder); in nv50_msto_destroy()
1082 nv50_msto_new(struct drm_device *dev, struct nv50_head *head, int id) in nv50_msto_new() argument
1089 return ERR_PTR(-ENOMEM); in nv50_msto_new()
1091 ret = drm_encoder_init(dev, &msto->encoder, &nv50_msto, in nv50_msto_new()
1092 DRM_MODE_ENCODER_DPMST, "mst-%d", id); in nv50_msto_new()
1098 drm_encoder_helper_add(&msto->encoder, &nv50_msto_help); in nv50_msto_new()
1099 msto->encoder.possible_crtcs = drm_crtc_mask(&head->base.base); in nv50_msto_new()
1100 msto->head = head; in nv50_msto_new()
1111 struct drm_crtc *crtc = connector_state->crtc; in nv50_mstc_atomic_best_encoder()
1113 if (!(mstc->mstm->outp->dcb->heads & drm_crtc_mask(crtc))) in nv50_mstc_atomic_best_encoder()
1116 return &nv50_head(crtc)->msto->encoder; in nv50_mstc_atomic_best_encoder()
1124 struct nouveau_encoder *outp = mstc->mstm->outp; in nv50_mstc_mode_valid()
1139 mstc->edid = drm_dp_mst_get_edid(&mstc->connector, mstc->port->mgr, mstc->port); in nv50_mstc_get_modes()
1140 drm_connector_update_edid_property(&mstc->connector, mstc->edid); in nv50_mstc_get_modes()
1141 if (mstc->edid) in nv50_mstc_get_modes()
1142 ret = drm_add_edid_modes(&mstc->connector, mstc->edid); in nv50_mstc_get_modes()
1150 if (connector->display_info.bpc) in nv50_mstc_get_modes()
1151 connector->display_info.bpc = in nv50_mstc_get_modes()
1152 clamp(connector->display_info.bpc, 6U, 8U); in nv50_mstc_get_modes()
1154 connector->display_info.bpc = 8; in nv50_mstc_get_modes()
1156 if (mstc->native) in nv50_mstc_get_modes()
1157 drm_mode_destroy(mstc->connector.dev, mstc->native); in nv50_mstc_get_modes()
1158 mstc->native = nouveau_conn_native_mode(&mstc->connector); in nv50_mstc_get_modes()
1167 struct drm_dp_mst_topology_mgr *mgr = &mstc->mstm->mgr; in nv50_mstc_atomic_check()
1169 return drm_dp_atomic_release_time_slots(state, mgr, mstc->port); in nv50_mstc_atomic_check()
1182 ret = pm_runtime_get_sync(connector->dev->dev); in nv50_mstc_detect()
1183 if (ret < 0 && ret != -EACCES) { in nv50_mstc_detect()
1184 pm_runtime_put_autosuspend(connector->dev->dev); in nv50_mstc_detect()
1188 ret = drm_dp_mst_detect_port(connector, ctx, mstc->port->mgr, in nv50_mstc_detect()
1189 mstc->port); in nv50_mstc_detect()
1194 pm_runtime_mark_last_busy(connector->dev->dev); in nv50_mstc_detect()
1195 pm_runtime_put_autosuspend(connector->dev->dev); in nv50_mstc_detect()
1213 drm_connector_cleanup(&mstc->connector); in nv50_mstc_destroy()
1214 drm_dp_mst_put_port_malloc(mstc->port); in nv50_mstc_destroy()
1231 nv50_mstc_new(struct nv50_mstm *mstm, struct drm_dp_mst_port *port, in nv50_mstc_new() argument
1234 struct drm_device *dev = mstm->outp->base.base.dev; in nv50_mstc_new()
1240 return -ENOMEM; in nv50_mstc_new()
1241 mstc->mstm = mstm; in nv50_mstc_new()
1242 mstc->port = port; in nv50_mstc_new()
1244 ret = drm_connector_init(dev, &mstc->connector, &nv50_mstc, in nv50_mstc_new()
1252 drm_connector_helper_add(&mstc->connector, &nv50_mstc_help); in nv50_mstc_new()
1254 mstc->connector.funcs->reset(&mstc->connector); in nv50_mstc_new()
1255 nouveau_conn_attach_properties(&mstc->connector); in nv50_mstc_new()
1258 if (!(mstm->outp->dcb->heads & drm_crtc_mask(crtc))) in nv50_mstc_new()
1261 drm_connector_attach_encoder(&mstc->connector, in nv50_mstc_new()
1262 &nv50_head(crtc)->msto->encoder); in nv50_mstc_new()
1265 drm_object_attach_property(&mstc->connector.base, dev->mode_config.path_property, 0); in nv50_mstc_new()
1266 drm_object_attach_property(&mstc->connector.base, dev->mode_config.tile_property, 0); in nv50_mstc_new()
1267 drm_connector_set_path_property(&mstc->connector, path); in nv50_mstc_new()
1268 drm_dp_mst_get_port_malloc(port); in nv50_mstc_new()
1277 struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev); in nv50_mstm_cleanup()
1280 NV_ATOMIC(drm, "%s: mstm cleanup\n", mstm->outp->base.base.name); in nv50_mstm_cleanup()
1281 drm_dp_check_act_status(&mstm->mgr); in nv50_mstm_cleanup()
1283 drm_for_each_encoder(encoder, mstm->outp->base.base.dev) { in nv50_mstm_cleanup()
1284 if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) { in nv50_mstm_cleanup()
1286 struct nv50_mstc *mstc = msto->mstc; in nv50_mstm_cleanup()
1287 if (mstc && mstc->mstm == mstm) in nv50_mstm_cleanup()
1288 nv50_msto_cleanup(state, mst_state, &mstm->mgr, msto); in nv50_mstm_cleanup()
1292 mstm->modified = false; in nv50_mstm_cleanup()
1300 struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev); in nv50_mstm_prepare()
1303 NV_ATOMIC(drm, "%s: mstm prepare\n", mstm->outp->base.base.name); in nv50_mstm_prepare()
1306 drm_for_each_encoder(encoder, mstm->outp->base.base.dev) { in nv50_mstm_prepare()
1307 if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) { in nv50_mstm_prepare()
1309 struct nv50_mstc *mstc = msto->mstc; in nv50_mstm_prepare()
1310 if (mstc && mstc->mstm == mstm && msto->disabled) in nv50_mstm_prepare()
1311 nv50_msto_prepare(state, mst_state, &mstm->mgr, msto); in nv50_mstm_prepare()
1318 drm_for_each_encoder(encoder, mstm->outp->base.base.dev) { in nv50_mstm_prepare()
1319 if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) { in nv50_mstm_prepare()
1321 struct nv50_mstc *mstc = msto->mstc; in nv50_mstm_prepare()
1322 if (mstc && mstc->mstm == mstm && !msto->disabled) in nv50_mstm_prepare()
1323 nv50_msto_prepare(state, mst_state, &mstm->mgr, msto); in nv50_mstm_prepare()
1327 if (mstm->disabled) { in nv50_mstm_prepare()
1328 if (!mstm->links) in nv50_mstm_prepare()
1329 nvif_outp_release(&mstm->outp->outp); in nv50_mstm_prepare()
1330 mstm->disabled = false; in nv50_mstm_prepare()
1336 struct drm_dp_mst_port *port, const char *path) in nv50_mstm_add_connector() argument
1342 ret = nv50_mstc_new(mstm, port, path, &mstc); in nv50_mstm_add_connector()
1346 return &mstc->connector; in nv50_mstm_add_connector()
1359 struct drm_dp_aux *aux = &nv_connector->aux; in nv50_mstm_service()
1373 drm_dp_mst_hpd_irq_handle_event(&mstm->mgr, esi, ack, &handled); in nv50_mstm_service()
1384 drm_dp_mst_hpd_irq_send_new_request(&mstm->mgr); in nv50_mstm_service()
1389 nv_connector->base.name, rc); in nv50_mstm_service()
1397 mstm->is_mst = false; in nv50_mstm_remove()
1398 drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false); in nv50_mstm_remove()
1404 struct nv50_mstm *mstm = outp->dp.mstm; in nv50_mstm_detect()
1408 if (!mstm || !mstm->can_mst) in nv50_mstm_detect()
1411 aux = mstm->mgr.aux; in nv50_mstm_detect()
1421 ret = drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, true); in nv50_mstm_detect()
1425 mstm->is_mst = true; in nv50_mstm_detect()
1432 struct nv50_mstm *mstm = outp->dp.mstm; in nv50_mstm_fini()
1439 * path to protect mstm->is_mst without potentially deadlocking in nv50_mstm_fini()
1441 mutex_lock(&outp->dp.hpd_irq_lock); in nv50_mstm_fini()
1442 mstm->suspended = true; in nv50_mstm_fini()
1443 mutex_unlock(&outp->dp.hpd_irq_lock); in nv50_mstm_fini()
1445 if (mstm->is_mst) in nv50_mstm_fini()
1446 drm_dp_mst_topology_mgr_suspend(&mstm->mgr); in nv50_mstm_fini()
1452 struct nv50_mstm *mstm = outp->dp.mstm; in nv50_mstm_init()
1458 if (mstm->is_mst) { in nv50_mstm_init()
1459 ret = drm_dp_mst_topology_mgr_resume(&mstm->mgr, !runtime); in nv50_mstm_init()
1460 if (ret == -1) in nv50_mstm_init()
1464 mutex_lock(&outp->dp.hpd_irq_lock); in nv50_mstm_init()
1465 mstm->suspended = false; in nv50_mstm_init()
1466 mutex_unlock(&outp->dp.hpd_irq_lock); in nv50_mstm_init()
1468 if (ret == -1) in nv50_mstm_init()
1469 drm_kms_helper_hotplug_event(mstm->mgr.dev); in nv50_mstm_init()
1477 drm_dp_mst_topology_mgr_destroy(&mstm->mgr); in nv50_mstm_del()
1487 const int max_payloads = hweight8(outp->dcb->heads); in nv50_mstm_new()
1488 struct drm_device *dev = outp->base.base.dev; in nv50_mstm_new()
1493 return -ENOMEM; in nv50_mstm_new()
1494 mstm->outp = outp; in nv50_mstm_new()
1495 mstm->mgr.cbs = &nv50_mstm; in nv50_mstm_new()
1497 ret = drm_dp_mst_topology_mgr_init(&mstm->mgr, dev, aux, aux_max, in nv50_mstm_new()
1512 struct nv50_disp *disp = nv50_disp(nv_encoder->base.base.dev); in nv50_sor_update()
1513 struct nv50_core *core = disp->core; in nv50_sor_update()
1516 nv_encoder->ctrl &= ~BIT(head); in nv50_sor_update()
1517 if (NVDEF_TEST(nv_encoder->ctrl, NV507D, SOR_SET_CONTROL, OWNER, ==, NONE)) in nv50_sor_update()
1518 nv_encoder->ctrl = 0; in nv50_sor_update()
1520 nv_encoder->ctrl |= NVVAL(NV507D, SOR_SET_CONTROL, PROTOCOL, proto); in nv50_sor_update()
1521 nv_encoder->ctrl |= BIT(head); in nv50_sor_update()
1522 asyh->or.depth = depth; in nv50_sor_update()
1525 core->func->sor->ctrl(core, nv_encoder->outp.or.id, nv_encoder->ctrl, asyh); in nv50_sor_update()
1528 /* TODO: Should we extend this to PWM-only backlights?
1537 struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc); in nv50_sor_atomic_disable()
1540 struct nouveau_drm *drm = nouveau_drm(nv_encoder->base.base.dev); in nv50_sor_atomic_disable()
1541 struct nouveau_backlight *backlight = nv_connector->backlight; in nv50_sor_atomic_disable()
1543 struct drm_dp_aux *aux = &nv_connector->aux; in nv50_sor_atomic_disable()
1548 if (backlight && backlight->uses_dpcd) { in nv50_sor_atomic_disable()
1549 ret = drm_edp_backlight_disable(aux, &backlight->edp_info); in nv50_sor_atomic_disable()
1552 nv_connector->base.base.id, nv_connector->base.name, ret); in nv50_sor_atomic_disable()
1556 if (nv_encoder->dcb->type == DCB_OUTPUT_DP) { in nv50_sor_atomic_disable()
1566 nv_encoder->update(nv_encoder, nv_crtc->index, NULL, 0, 0); in nv50_sor_atomic_disable()
1568 nvif_outp_release(&nv_encoder->outp); in nv50_sor_atomic_disable()
1569 nv_encoder->crtc = NULL; in nv50_sor_atomic_disable()
1578 nv50_head_atom(drm_atomic_get_new_crtc_state(state, &nv_crtc->base)); in nv50_sor_atomic_enable()
1579 struct drm_display_mode *mode = &asyh->state.adjusted_mode; in nv50_sor_atomic_enable()
1580 struct nv50_disp *disp = nv50_disp(encoder->dev); in nv50_sor_atomic_enable()
1581 struct nvif_outp *outp = &nv_encoder->outp; in nv50_sor_atomic_enable()
1582 struct drm_device *dev = encoder->dev; in nv50_sor_atomic_enable()
1588 struct nvbios *bios = &drm->vbios; in nv50_sor_atomic_enable()
1594 nv_encoder->crtc = &nv_crtc->base; in nv50_sor_atomic_enable()
1596 if ((disp->disp->object.oclass == GT214_DISP || in nv50_sor_atomic_enable()
1597 disp->disp->object.oclass >= GF110_DISP) && in nv50_sor_atomic_enable()
1598 drm_detect_monitor_audio(nv_connector->edid)) in nv50_sor_atomic_enable()
1601 switch (nv_encoder->dcb->type) { in nv50_sor_atomic_enable()
1603 if (disp->disp->object.oclass == NV50_DISP || in nv50_sor_atomic_enable()
1604 !drm_detect_hdmi_monitor(nv_connector->edid)) in nv50_sor_atomic_enable()
1605 nvif_outp_acquire_tmds(outp, nv_crtc->index, false, 0, 0, 0, false); in nv50_sor_atomic_enable()
1609 if (nv_encoder->outp.or.link & 1) { in nv50_sor_atomic_enable()
1611 /* Only enable dual-link if: in nv50_sor_atomic_enable()
1612 * - Need to (i.e. rate > 165MHz) in nv50_sor_atomic_enable()
1613 * - DCB says we can in nv50_sor_atomic_enable()
1614 * - Not an HDMI monitor, since there's no dual-link in nv50_sor_atomic_enable()
1617 if (mode->clock >= 165000 && in nv50_sor_atomic_enable()
1618 nv_encoder->dcb->duallink_possible && in nv50_sor_atomic_enable()
1619 !drm_detect_hdmi_monitor(nv_connector->edid)) in nv50_sor_atomic_enable()
1628 if (bios->fp_no_ddc) { in nv50_sor_atomic_enable()
1629 lvds_dual = bios->fp.dual_link; in nv50_sor_atomic_enable()
1630 lvds_8bpc = bios->fp.if_is_24bit; in nv50_sor_atomic_enable()
1632 if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) { in nv50_sor_atomic_enable()
1633 if (((u8 *)nv_connector->edid)[121] == 2) in nv50_sor_atomic_enable()
1636 if (mode->clock >= bios->fp.duallink_transition_clk) { in nv50_sor_atomic_enable()
1641 if (bios->fp.strapless_is_24bit & 2) in nv50_sor_atomic_enable()
1644 if (bios->fp.strapless_is_24bit & 1) in nv50_sor_atomic_enable()
1648 if (asyh->or.bpc == 8) in nv50_sor_atomic_enable()
1652 nvif_outp_acquire_lvds(&nv_encoder->outp, lvds_dual, lvds_8bpc); in nv50_sor_atomic_enable()
1655 nvif_outp_acquire_dp(&nv_encoder->outp, nv_encoder->dp.dpcd, 0, 0, hda, false); in nv50_sor_atomic_enable()
1656 depth = nv50_dp_bpc_to_depth(asyh->or.bpc); in nv50_sor_atomic_enable()
1658 if (nv_encoder->outp.or.link & 1) in nv50_sor_atomic_enable()
1666 backlight = nv_connector->backlight; in nv50_sor_atomic_enable()
1667 if (backlight && backlight->uses_dpcd) in nv50_sor_atomic_enable()
1668 drm_edp_backlight_enable(&nv_connector->aux, &backlight->edp_info, in nv50_sor_atomic_enable()
1669 (u16)backlight->dev->props.brightness); in nv50_sor_atomic_enable()
1678 nv_encoder->update(nv_encoder, nv_crtc->index, asyh, proto, depth); in nv50_sor_atomic_enable()
1693 nvif_outp_dtor(&nv_encoder->outp); in nv50_sor_destroy()
1695 nv50_mstm_del(&nv_encoder->dp.mstm); in nv50_sor_destroy()
1698 if (nv_encoder->dcb->type == DCB_OUTPUT_DP) in nv50_sor_destroy()
1699 mutex_destroy(&nv_encoder->dp.hpd_irq_lock); in nv50_sor_destroy()
1711 struct nvkm_bios *bios = nvxx_bios(&drm->client.device); in nv50_has_mst()
1723 struct nouveau_drm *drm = nouveau_drm(connector->dev); in nv50_sor_create()
1724 struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device); in nv50_sor_create()
1727 struct nv50_disp *disp = nv50_disp(connector->dev); in nv50_sor_create()
1730 switch (dcbe->type) { in nv50_sor_create()
1741 return -ENOMEM; in nv50_sor_create()
1742 nv_encoder->dcb = dcbe; in nv50_sor_create()
1743 nv_encoder->update = nv50_sor_update; in nv50_sor_create()
1746 encoder->possible_crtcs = dcbe->heads; in nv50_sor_create()
1747 encoder->possible_clones = 0; in nv50_sor_create()
1748 drm_encoder_init(connector->dev, encoder, &nv50_sor_func, type, in nv50_sor_create()
1749 "sor-%04x-%04x", dcbe->hasht, dcbe->hashm); in nv50_sor_create()
1754 disp->core->func->sor->get_caps(disp, nv_encoder, ffs(dcbe->or) - 1); in nv50_sor_create()
1757 if (dcbe->type == DCB_OUTPUT_DP) { in nv50_sor_create()
1759 nvkm_i2c_aux_find(i2c, dcbe->i2c_index); in nv50_sor_create()
1761 mutex_init(&nv_encoder->dp.hpd_irq_lock); in nv50_sor_create()
1764 if (disp->disp->object.oclass < GF110_DISP) { in nv50_sor_create()
1765 /* HW has no support for address-only in nv50_sor_create()
1767 * use custom I2C-over-AUX code. in nv50_sor_create()
1769 nv_encoder->i2c = &aux->i2c; in nv50_sor_create()
1771 nv_encoder->i2c = &nv_connector->aux.ddc; in nv50_sor_create()
1773 nv_encoder->aux = aux; in nv50_sor_create()
1776 if (nv_connector->type != DCB_CONNECTOR_eDP && in nv50_sor_create()
1778 ret = nv50_mstm_new(nv_encoder, &nv_connector->aux, in nv50_sor_create()
1779 16, nv_connector->base.base.id, in nv50_sor_create()
1780 &nv_encoder->dp.mstm); in nv50_sor_create()
1786 nvkm_i2c_bus_find(i2c, dcbe->i2c_index); in nv50_sor_create()
1788 nv_encoder->i2c = &bus->i2c; in nv50_sor_create()
1791 return nvif_outp_ctor(disp->disp, nv_encoder->base.base.name, dcbe->id, &nv_encoder->outp); in nv50_sor_create()
1805 crtc_state->adjusted_mode.clock *= 2; in nv50_pior_atomic_check()
1813 struct nv50_core *core = nv50_disp(encoder->dev)->core; in nv50_pior_atomic_disable()
1816 core->func->pior->ctrl(core, nv_encoder->outp.or.id, ctrl, NULL); in nv50_pior_atomic_disable()
1817 nv_encoder->crtc = NULL; in nv50_pior_atomic_disable()
1818 nvif_outp_release(&nv_encoder->outp); in nv50_pior_atomic_disable()
1827 nv50_head_atom(drm_atomic_get_new_crtc_state(state, &nv_crtc->base)); in nv50_pior_atomic_enable()
1828 struct nv50_core *core = nv50_disp(encoder->dev)->core; in nv50_pior_atomic_enable()
1831 switch (nv_crtc->index) { in nv50_pior_atomic_enable()
1839 switch (asyh->or.bpc) { in nv50_pior_atomic_enable()
1840 case 10: asyh->or.depth = NV837D_PIOR_SET_CONTROL_PIXEL_DEPTH_BPP_30_444; break; in nv50_pior_atomic_enable()
1841 case 8: asyh->or.depth = NV837D_PIOR_SET_CONTROL_PIXEL_DEPTH_BPP_24_444; break; in nv50_pior_atomic_enable()
1842 case 6: asyh->or.depth = NV837D_PIOR_SET_CONTROL_PIXEL_DEPTH_BPP_18_444; break; in nv50_pior_atomic_enable()
1843 default: asyh->or.depth = NV837D_PIOR_SET_CONTROL_PIXEL_DEPTH_DEFAULT; break; in nv50_pior_atomic_enable()
1846 switch (nv_encoder->dcb->type) { in nv50_pior_atomic_enable()
1849 nvif_outp_acquire_tmds(&nv_encoder->outp, false, false, 0, 0, 0, false); in nv50_pior_atomic_enable()
1853 nvif_outp_acquire_dp(&nv_encoder->outp, nv_encoder->dp.dpcd, 0, 0, false, false); in nv50_pior_atomic_enable()
1860 core->func->pior->ctrl(core, nv_encoder->outp.or.id, ctrl, asyh); in nv50_pior_atomic_enable()
1861 nv_encoder->crtc = &nv_crtc->base; in nv50_pior_atomic_enable()
1876 nvif_outp_dtor(&nv_encoder->outp); in nv50_pior_destroy()
1880 mutex_destroy(&nv_encoder->dp.hpd_irq_lock); in nv50_pior_destroy()
1892 struct drm_device *dev = connector->dev; in nv50_pior_create()
1895 struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device); in nv50_pior_create()
1903 switch (dcbe->type) { in nv50_pior_create()
1905 bus = nvkm_i2c_bus_find(i2c, NVKM_I2C_BUS_EXT(dcbe->extdev)); in nv50_pior_create()
1906 ddc = bus ? &bus->i2c : NULL; in nv50_pior_create()
1910 aux = nvkm_i2c_aux_find(i2c, NVKM_I2C_AUX_EXT(dcbe->extdev)); in nv50_pior_create()
1911 ddc = aux ? &aux->i2c : NULL; in nv50_pior_create()
1915 return -ENODEV; in nv50_pior_create()
1920 return -ENOMEM; in nv50_pior_create()
1921 nv_encoder->dcb = dcbe; in nv50_pior_create()
1922 nv_encoder->i2c = ddc; in nv50_pior_create()
1923 nv_encoder->aux = aux; in nv50_pior_create()
1925 mutex_init(&nv_encoder->dp.hpd_irq_lock); in nv50_pior_create()
1928 encoder->possible_crtcs = dcbe->heads; in nv50_pior_create()
1929 encoder->possible_clones = 0; in nv50_pior_create()
1930 drm_encoder_init(connector->dev, encoder, &nv50_pior_func, type, in nv50_pior_create()
1931 "pior-%04x-%04x", dcbe->hasht, dcbe->hashm); in nv50_pior_create()
1936 disp->core->func->pior->get_caps(disp, nv_encoder, ffs(dcbe->or) - 1); in nv50_pior_create()
1939 return nvif_outp_ctor(disp->disp, nv_encoder->base.base.name, dcbe->id, &nv_encoder->outp); in nv50_pior_create()
1951 struct nouveau_drm *drm = nouveau_drm(state->dev); in nv50_disp_atomic_commit_core()
1952 struct nv50_disp *disp = nv50_disp(drm->dev); in nv50_disp_atomic_commit_core()
1953 struct nv50_core *core = disp->core; in nv50_disp_atomic_commit_core()
1961 if (mstm->modified) in nv50_disp_atomic_commit_core()
1965 core->func->ntfy_init(disp->sync, NV50_DISP_CORE_NTFY); in nv50_disp_atomic_commit_core()
1966 core->func->update(core, interlock, true); in nv50_disp_atomic_commit_core()
1967 if (core->func->ntfy_wait_done(disp->sync, NV50_DISP_CORE_NTFY, in nv50_disp_atomic_commit_core()
1968 disp->core->chan.base.device)) in nv50_disp_atomic_commit_core()
1973 if (mstm->modified) in nv50_disp_atomic_commit_core()
1987 if (interlock[wndw->interlock.type] & wndw->interlock.data) { in nv50_disp_atomic_commit_wndw()
1988 if (wndw->func->update) in nv50_disp_atomic_commit_wndw()
1989 wndw->func->update(wndw, interlock); in nv50_disp_atomic_commit_wndw()
1997 struct drm_device *dev = state->dev; in nv50_disp_atomic_commit_tail()
2005 struct nv50_core *core = disp->core; in nv50_disp_atomic_commit_tail()
2011 NV_ATOMIC(drm, "commit %d %d\n", atom->lock_core, atom->flush_disable); in nv50_disp_atomic_commit_tail()
2019 if (atom->lock_core) in nv50_disp_atomic_commit_tail()
2020 mutex_lock(&disp->mutex); in nv50_disp_atomic_commit_tail()
2027 NV_ATOMIC(drm, "%s: clr %04x (set %04x)\n", crtc->name, in nv50_disp_atomic_commit_tail()
2028 asyh->clr.mask, asyh->set.mask); in nv50_disp_atomic_commit_tail()
2030 if (old_crtc_state->active && !new_crtc_state->active) { in nv50_disp_atomic_commit_tail()
2031 pm_runtime_put_noidle(dev->dev); in nv50_disp_atomic_commit_tail()
2035 if (asyh->clr.mask) { in nv50_disp_atomic_commit_tail()
2036 nv50_head_flush_clr(head, asyh, atom->flush_disable); in nv50_disp_atomic_commit_tail()
2046 NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", plane->name, in nv50_disp_atomic_commit_tail()
2047 asyw->clr.mask, asyw->set.mask); in nv50_disp_atomic_commit_tail()
2048 if (!asyw->clr.mask) in nv50_disp_atomic_commit_tail()
2051 nv50_wndw_flush_clr(wndw, interlock, atom->flush_disable, asyw); in nv50_disp_atomic_commit_tail()
2055 list_for_each_entry(outp, &atom->outp, head) { in nv50_disp_atomic_commit_tail()
2059 encoder = outp->encoder; in nv50_disp_atomic_commit_tail()
2060 help = encoder->helper_private; in nv50_disp_atomic_commit_tail()
2062 NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", encoder->name, in nv50_disp_atomic_commit_tail()
2063 outp->clr.mask, outp->set.mask); in nv50_disp_atomic_commit_tail()
2065 if (outp->clr.mask) { in nv50_disp_atomic_commit_tail()
2066 help->atomic_disable(encoder, state); in nv50_disp_atomic_commit_tail()
2068 if (outp->flush_disable) { in nv50_disp_atomic_commit_tail()
2080 if (atom->flush_disable) { in nv50_disp_atomic_commit_tail()
2094 list_for_each_entry_safe(outp, outt, &atom->outp, head) { in nv50_disp_atomic_commit_tail()
2098 encoder = outp->encoder; in nv50_disp_atomic_commit_tail()
2099 help = encoder->helper_private; in nv50_disp_atomic_commit_tail()
2101 NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", encoder->name, in nv50_disp_atomic_commit_tail()
2102 outp->set.mask, outp->clr.mask); in nv50_disp_atomic_commit_tail()
2104 if (outp->set.mask) { in nv50_disp_atomic_commit_tail()
2105 help->atomic_enable(encoder, state); in nv50_disp_atomic_commit_tail()
2109 list_del(&outp->head); in nv50_disp_atomic_commit_tail()
2118 NV_ATOMIC(drm, "%s: set %04x (clr %04x)\n", crtc->name, in nv50_disp_atomic_commit_tail()
2119 asyh->set.mask, asyh->clr.mask); in nv50_disp_atomic_commit_tail()
2121 if (asyh->set.mask) { in nv50_disp_atomic_commit_tail()
2126 if (new_crtc_state->active) { in nv50_disp_atomic_commit_tail()
2127 if (!old_crtc_state->active) { in nv50_disp_atomic_commit_tail()
2129 pm_runtime_get_noresume(dev->dev); in nv50_disp_atomic_commit_tail()
2131 if (new_crtc_state->event) in nv50_disp_atomic_commit_tail()
2136 /* Update window->head assignment. in nv50_disp_atomic_commit_tail()
2142 * supports non-fixed mappings). in nv50_disp_atomic_commit_tail()
2144 if (core->assign_windows) { in nv50_disp_atomic_commit_tail()
2145 core->func->wndw.owner(core); in nv50_disp_atomic_commit_tail()
2147 core->assign_windows = false; in nv50_disp_atomic_commit_tail()
2156 * The EFI GOP driver on newer GPUs configures window channels with a in nv50_disp_atomic_commit_tail()
2169 NV_ATOMIC(drm, "%s: set %04x (clr %04x)\n", crtc->name, in nv50_disp_atomic_commit_tail()
2170 asyh->set.mask, asyh->clr.mask); in nv50_disp_atomic_commit_tail()
2172 if (asyh->set.mask) { in nv50_disp_atomic_commit_tail()
2183 NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", plane->name, in nv50_disp_atomic_commit_tail()
2184 asyw->set.mask, asyw->clr.mask); in nv50_disp_atomic_commit_tail()
2185 if ( !asyw->set.mask && in nv50_disp_atomic_commit_tail()
2186 (!asyw->clr.mask || atom->flush_disable)) in nv50_disp_atomic_commit_tail()
2199 !atom->state.legacy_cursor_update) in nv50_disp_atomic_commit_tail()
2202 disp->core->func->update(disp->core, interlock, false); in nv50_disp_atomic_commit_tail()
2205 if (atom->lock_core) in nv50_disp_atomic_commit_tail()
2206 mutex_unlock(&disp->mutex); in nv50_disp_atomic_commit_tail()
2214 NV_ERROR(drm, "%s: timeout\n", plane->name); in nv50_disp_atomic_commit_tail()
2218 if (new_crtc_state->event) { in nv50_disp_atomic_commit_tail()
2221 if (new_crtc_state->active) in nv50_disp_atomic_commit_tail()
2223 spin_lock_irqsave(&crtc->dev->event_lock, flags); in nv50_disp_atomic_commit_tail()
2224 drm_crtc_send_vblank_event(crtc, new_crtc_state->event); in nv50_disp_atomic_commit_tail()
2225 spin_unlock_irqrestore(&crtc->dev->event_lock, flags); in nv50_disp_atomic_commit_tail()
2227 new_crtc_state->event = NULL; in nv50_disp_atomic_commit_tail()
2228 if (new_crtc_state->active) in nv50_disp_atomic_commit_tail()
2243 pm_runtime_mark_last_busy(dev->dev); in nv50_disp_atomic_commit_tail()
2244 pm_runtime_put_autosuspend(dev->dev); in nv50_disp_atomic_commit_tail()
2263 ret = pm_runtime_get_sync(dev->dev); in nv50_disp_atomic_commit()
2264 if (ret < 0 && ret != -EACCES) { in nv50_disp_atomic_commit()
2265 pm_runtime_put_autosuspend(dev->dev); in nv50_disp_atomic_commit()
2273 INIT_WORK(&state->commit_work, nv50_disp_atomic_commit_work); in nv50_disp_atomic_commit()
2293 if (asyw->set.image) in nv50_disp_atomic_commit()
2303 pm_runtime_get_noresume(dev->dev); in nv50_disp_atomic_commit()
2306 queue_work(system_unbound_wq, &state->commit_work); in nv50_disp_atomic_commit()
2314 pm_runtime_put_autosuspend(dev->dev); in nv50_disp_atomic_commit()
2323 list_for_each_entry(outp, &atom->outp, head) { in nv50_disp_outp_atomic_add()
2324 if (outp->encoder == encoder) in nv50_disp_outp_atomic_add()
2330 return ERR_PTR(-ENOMEM); in nv50_disp_outp_atomic_add()
2332 list_add(&outp->head, &atom->outp); in nv50_disp_outp_atomic_add()
2333 outp->encoder = encoder; in nv50_disp_outp_atomic_add()
2341 struct drm_encoder *encoder = old_connector_state->best_encoder; in nv50_disp_outp_atomic_check_clr()
2346 if (!(crtc = old_connector_state->crtc)) in nv50_disp_outp_atomic_check_clr()
2349 old_crtc_state = drm_atomic_get_old_crtc_state(&atom->state, crtc); in nv50_disp_outp_atomic_check_clr()
2350 new_crtc_state = drm_atomic_get_new_crtc_state(&atom->state, crtc); in nv50_disp_outp_atomic_check_clr()
2351 if (old_crtc_state->active && drm_atomic_crtc_needs_modeset(new_crtc_state)) { in nv50_disp_outp_atomic_check_clr()
2356 if (outp->encoder->encoder_type == DRM_MODE_ENCODER_DPMST) { in nv50_disp_outp_atomic_check_clr()
2357 outp->flush_disable = true; in nv50_disp_outp_atomic_check_clr()
2358 atom->flush_disable = true; in nv50_disp_outp_atomic_check_clr()
2360 outp->clr.ctrl = true; in nv50_disp_outp_atomic_check_clr()
2361 atom->lock_core = true; in nv50_disp_outp_atomic_check_clr()
2371 struct drm_encoder *encoder = connector_state->best_encoder; in nv50_disp_outp_atomic_check_set()
2376 if (!(crtc = connector_state->crtc)) in nv50_disp_outp_atomic_check_set()
2379 new_crtc_state = drm_atomic_get_new_crtc_state(&atom->state, crtc); in nv50_disp_outp_atomic_check_set()
2380 if (new_crtc_state->active && drm_atomic_crtc_needs_modeset(new_crtc_state)) { in nv50_disp_outp_atomic_check_set()
2385 outp->set.ctrl = true; in nv50_disp_outp_atomic_check_set()
2386 atom->lock_core = true; in nv50_disp_outp_atomic_check_set()
2396 struct nv50_core *core = nv50_disp(dev)->core; in nv50_disp_atomic_check()
2405 if (core->assign_windows && core->func->head->static_wndw_map) { in nv50_disp_atomic_check()
2414 core->func->head->static_wndw_map(head, asyh); in nv50_disp_atomic_check()
2418 /* We need to handle colour management on a per-plane basis. */ in nv50_disp_atomic_check()
2420 if (new_crtc_state->color_mgmt_changed) { in nv50_disp_atomic_check()
2456 list_for_each_entry_safe(outp, outt, &atom->outp, head) { in nv50_disp_atomic_state_clear()
2457 list_del(&outp->head); in nv50_disp_atomic_state_clear()
2468 drm_atomic_state_default_release(&atom->state); in nv50_disp_atomic_state_free()
2477 drm_atomic_state_init(dev, &atom->state) < 0) { in nv50_disp_atomic_state_alloc()
2481 INIT_LIST_HEAD(&atom->outp); in nv50_disp_atomic_state_alloc()
2482 return &atom->state; in nv50_disp_atomic_state_alloc()
2511 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { in nv50_display_fini()
2512 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) in nv50_display_fini()
2517 cancel_work_sync(&drm->hpd_work); in nv50_display_fini()
2523 struct nv50_core *core = nv50_disp(dev)->core; in nv50_display_init()
2527 core->func->init(core); in nv50_display_init()
2529 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { in nv50_display_init()
2530 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) { in nv50_display_init()
2547 nvif_object_unmap(&disp->caps); in nv50_display_destroy()
2548 nvif_object_dtor(&disp->caps); in nv50_display_destroy()
2549 nv50_core_del(&disp->core); in nv50_display_destroy()
2551 nouveau_bo_unmap(disp->sync); in nv50_display_destroy()
2552 if (disp->sync) in nv50_display_destroy()
2553 nouveau_bo_unpin(disp->sync); in nv50_display_destroy()
2554 nouveau_bo_ref(NULL, &disp->sync); in nv50_display_destroy()
2556 nouveau_display(dev)->priv = NULL; in nv50_display_destroy()
2563 struct nvif_device *device = &nouveau_drm(dev)->client.device; in nv50_display_create()
2565 struct dcb_table *dcb = &drm->vbios.dcb; in nv50_display_create()
2574 return -ENOMEM; in nv50_display_create()
2576 mutex_init(&disp->mutex); in nv50_display_create()
2578 nouveau_display(dev)->priv = disp; in nv50_display_create()
2579 nouveau_display(dev)->dtor = nv50_display_destroy; in nv50_display_create()
2580 nouveau_display(dev)->init = nv50_display_init; in nv50_display_create()
2581 nouveau_display(dev)->fini = nv50_display_fini; in nv50_display_create()
2582 disp->disp = &nouveau_display(dev)->disp; in nv50_display_create()
2583 dev->mode_config.funcs = &nv50_disp_func; in nv50_display_create()
2584 dev->mode_config.helper_private = &nv50_disp_helper_func; in nv50_display_create()
2585 dev->mode_config.quirk_addfb_prefer_xbgr_30bpp = true; in nv50_display_create()
2586 dev->mode_config.normalize_zpos = true; in nv50_display_create()
2589 ret = nouveau_bo_new(&drm->client, 4096, 0x1000, in nv50_display_create()
2591 0, 0x0000, NULL, NULL, &disp->sync); in nv50_display_create()
2593 ret = nouveau_bo_pin(disp->sync, NOUVEAU_GEM_DOMAIN_VRAM, true); in nv50_display_create()
2595 ret = nouveau_bo_map(disp->sync); in nv50_display_create()
2597 nouveau_bo_unpin(disp->sync); in nv50_display_create()
2600 nouveau_bo_ref(NULL, &disp->sync); in nv50_display_create()
2607 ret = nv50_core_new(drm, &disp->core); in nv50_display_create()
2611 disp->core->func->init(disp->core); in nv50_display_create()
2612 if (disp->core->func->caps_init) { in nv50_display_create()
2613 ret = disp->core->func->caps_init(drm, disp); in nv50_display_create()
2619 if (disp->disp->object.oclass >= TU102_DISP) in nv50_display_create()
2620 nouveau_display(dev)->format_modifiers = wndwc57e_modifiers; in nv50_display_create()
2622 if (drm->client.device.info.family >= NV_DEVICE_INFO_V0_FERMI) in nv50_display_create()
2623 nouveau_display(dev)->format_modifiers = disp90xx_modifiers; in nv50_display_create()
2625 nouveau_display(dev)->format_modifiers = disp50xx_modifiers; in nv50_display_create()
2632 * But until then, just limit cursors to 128x128 - which is small enough to avoid ever using in nv50_display_create()
2635 if (disp->disp->object.oclass >= GM107_DISP) { in nv50_display_create()
2636 dev->mode_config.cursor_width = 256; in nv50_display_create()
2637 dev->mode_config.cursor_height = 256; in nv50_display_create()
2638 } else if (disp->disp->object.oclass >= GK104_DISP) { in nv50_display_create()
2639 dev->mode_config.cursor_width = 128; in nv50_display_create()
2640 dev->mode_config.cursor_height = 128; in nv50_display_create()
2642 dev->mode_config.cursor_width = 64; in nv50_display_create()
2643 dev->mode_config.cursor_height = 64; in nv50_display_create()
2647 if (disp->disp->object.oclass >= GV100_DISP) in nv50_display_create()
2648 crtcs = nvif_rd32(&device->object, 0x610060) & 0xff; in nv50_display_create()
2650 if (disp->disp->object.oclass >= GF110_DISP) in nv50_display_create()
2651 crtcs = nvif_rd32(&device->object, 0x612004) & 0xf; in nv50_display_create()
2668 head->msto = nv50_msto_new(dev, head, i); in nv50_display_create()
2669 if (IS_ERR(head->msto)) { in nv50_display_create()
2670 ret = PTR_ERR(head->msto); in nv50_display_create()
2671 head->msto = NULL; in nv50_display_create()
2685 head->msto->encoder.possible_crtcs = crtcs; in nv50_display_create()
2690 for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) { in nv50_display_create()
2695 if (dcbe->location == DCB_LOC_ON_CHIP) { in nv50_display_create()
2696 switch (dcbe->type) { in nv50_display_create()
2706 ret = -ENODEV; in nv50_display_create()
2715 dcbe->location, dcbe->type, in nv50_display_create()
2716 ffs(dcbe->or) - 1, ret); in nv50_display_create()
2722 list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) { in nv50_display_create()
2723 if (connector->possible_encoders) in nv50_display_create()
2727 connector->name); in nv50_display_create()
2728 connector->funcs->destroy(connector); in nv50_display_create()
2731 /* Disable vblank irqs aggressively for power-saving, safe on nv50+ */ in nv50_display_create()
2732 dev->vblank_disable_immediate = true; in nv50_display_create()
2747 * Log2(block height) ----------------------------+ *
2748 * Page Kind ----------------------------------+ | *
2749 * Gob Height/Page Kind Generation ------+ | | *
2750 * Sector layout -------+ | | | *
2751 * Compression ------+ | | | | */
2776 * Log2(block height) ----------------------------+ *
2777 * Page Kind ----------------------------------+ | *
2778 * Gob Height/Page Kind Generation ------+ | | *
2779 * Sector layout -------+ | | | *
2780 * Compression ------+ | | | | */