1 /*
2  * Copyright 2011 Red Hat Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Ben Skeggs
23  */
24 #include "disp.h"
25 #include "atom.h"
26 #include "core.h"
27 #include "head.h"
28 #include "wndw.h"
29 
30 #include <linux/dma-mapping.h>
31 #include <linux/hdmi.h>
32 #include <linux/component.h>
33 
34 #include <drm/drm_atomic_helper.h>
35 #include <drm/drm_dp_helper.h>
36 #include <drm/drm_edid.h>
37 #include <drm/drm_fb_helper.h>
38 #include <drm/drm_plane_helper.h>
39 #include <drm/drm_probe_helper.h>
40 #include <drm/drm_scdc_helper.h>
41 #include <drm/drm_vblank.h>
42 
43 #include <nvif/class.h>
44 #include <nvif/cl0002.h>
45 #include <nvif/cl5070.h>
46 #include <nvif/cl507d.h>
47 #include <nvif/event.h>
48 
49 #include "nouveau_drv.h"
50 #include "nouveau_dma.h"
51 #include "nouveau_gem.h"
52 #include "nouveau_connector.h"
53 #include "nouveau_encoder.h"
54 #include "nouveau_fence.h"
55 #include "nouveau_fbcon.h"
56 
57 #include <subdev/bios/dp.h>
58 
59 /******************************************************************************
60  * Atomic state
61  *****************************************************************************/
62 
63 struct nv50_outp_atom {
64 	struct list_head head;
65 
66 	struct drm_encoder *encoder;
67 	bool flush_disable;
68 
69 	union nv50_outp_atom_mask {
70 		struct {
71 			bool ctrl:1;
72 		};
73 		u8 mask;
74 	} set, clr;
75 };
76 
77 /******************************************************************************
78  * EVO channel
79  *****************************************************************************/
80 
81 static int
82 nv50_chan_create(struct nvif_device *device, struct nvif_object *disp,
83 		 const s32 *oclass, u8 head, void *data, u32 size,
84 		 struct nv50_chan *chan)
85 {
86 	struct nvif_sclass *sclass;
87 	int ret, i, n;
88 
89 	chan->device = device;
90 
91 	ret = n = nvif_object_sclass_get(disp, &sclass);
92 	if (ret < 0)
93 		return ret;
94 
95 	while (oclass[0]) {
96 		for (i = 0; i < n; i++) {
97 			if (sclass[i].oclass == oclass[0]) {
98 				ret = nvif_object_init(disp, 0, oclass[0],
99 						       data, size, &chan->user);
100 				if (ret == 0)
101 					nvif_object_map(&chan->user, NULL, 0);
102 				nvif_object_sclass_put(&sclass);
103 				return ret;
104 			}
105 		}
106 		oclass++;
107 	}
108 
109 	nvif_object_sclass_put(&sclass);
110 	return -ENOSYS;
111 }
112 
113 static void
114 nv50_chan_destroy(struct nv50_chan *chan)
115 {
116 	nvif_object_fini(&chan->user);
117 }
118 
119 /******************************************************************************
120  * DMA EVO channel
121  *****************************************************************************/
122 
123 void
124 nv50_dmac_destroy(struct nv50_dmac *dmac)
125 {
126 	nvif_object_fini(&dmac->vram);
127 	nvif_object_fini(&dmac->sync);
128 
129 	nv50_chan_destroy(&dmac->base);
130 
131 	nvif_mem_fini(&dmac->push);
132 }
133 
134 int
135 nv50_dmac_create(struct nvif_device *device, struct nvif_object *disp,
136 		 const s32 *oclass, u8 head, void *data, u32 size, u64 syncbuf,
137 		 struct nv50_dmac *dmac)
138 {
139 	struct nouveau_cli *cli = (void *)device->object.client;
140 	struct nv50_disp_core_channel_dma_v0 *args = data;
141 	u8 type = NVIF_MEM_COHERENT;
142 	int ret;
143 
144 	mutex_init(&dmac->lock);
145 
146 	/* Pascal added support for 47-bit physical addresses, but some
147 	 * parts of EVO still only accept 40-bit PAs.
148 	 *
149 	 * To avoid issues on systems with large amounts of RAM, and on
150 	 * systems where an IOMMU maps pages at a high address, we need
151 	 * to allocate push buffers in VRAM instead.
152 	 *
153 	 * This appears to match NVIDIA's behaviour on Pascal.
154 	 */
155 	if (device->info.family == NV_DEVICE_INFO_V0_PASCAL)
156 		type |= NVIF_MEM_VRAM;
157 
158 	ret = nvif_mem_init_map(&cli->mmu, type, 0x1000, &dmac->push);
159 	if (ret)
160 		return ret;
161 
162 	dmac->ptr = dmac->push.object.map.ptr;
163 
164 	args->pushbuf = nvif_handle(&dmac->push.object);
165 
166 	ret = nv50_chan_create(device, disp, oclass, head, data, size,
167 			       &dmac->base);
168 	if (ret)
169 		return ret;
170 
171 	if (!syncbuf)
172 		return 0;
173 
174 	ret = nvif_object_init(&dmac->base.user, 0xf0000000, NV_DMA_IN_MEMORY,
175 			       &(struct nv_dma_v0) {
176 					.target = NV_DMA_V0_TARGET_VRAM,
177 					.access = NV_DMA_V0_ACCESS_RDWR,
178 					.start = syncbuf + 0x0000,
179 					.limit = syncbuf + 0x0fff,
180 			       }, sizeof(struct nv_dma_v0),
181 			       &dmac->sync);
182 	if (ret)
183 		return ret;
184 
185 	ret = nvif_object_init(&dmac->base.user, 0xf0000001, NV_DMA_IN_MEMORY,
186 			       &(struct nv_dma_v0) {
187 					.target = NV_DMA_V0_TARGET_VRAM,
188 					.access = NV_DMA_V0_ACCESS_RDWR,
189 					.start = 0,
190 					.limit = device->info.ram_user - 1,
191 			       }, sizeof(struct nv_dma_v0),
192 			       &dmac->vram);
193 	if (ret)
194 		return ret;
195 
196 	return ret;
197 }
198 
199 /******************************************************************************
200  * EVO channel helpers
201  *****************************************************************************/
202 static void
203 evo_flush(struct nv50_dmac *dmac)
204 {
205 	/* Push buffer fetches are not coherent with BAR1, we need to ensure
206 	 * writes have been flushed right through to VRAM before writing PUT.
207 	 */
208 	if (dmac->push.type & NVIF_MEM_VRAM) {
209 		struct nvif_device *device = dmac->base.device;
210 		nvif_wr32(&device->object, 0x070000, 0x00000001);
211 		nvif_msec(device, 2000,
212 			if (!(nvif_rd32(&device->object, 0x070000) & 0x00000002))
213 				break;
214 		);
215 	}
216 }
217 
218 u32 *
219 evo_wait(struct nv50_dmac *evoc, int nr)
220 {
221 	struct nv50_dmac *dmac = evoc;
222 	struct nvif_device *device = dmac->base.device;
223 	u32 put = nvif_rd32(&dmac->base.user, 0x0000) / 4;
224 
225 	mutex_lock(&dmac->lock);
226 	if (put + nr >= (PAGE_SIZE / 4) - 8) {
227 		dmac->ptr[put] = 0x20000000;
228 		evo_flush(dmac);
229 
230 		nvif_wr32(&dmac->base.user, 0x0000, 0x00000000);
231 		if (nvif_msec(device, 2000,
232 			if (!nvif_rd32(&dmac->base.user, 0x0004))
233 				break;
234 		) < 0) {
235 			mutex_unlock(&dmac->lock);
236 			pr_err("nouveau: evo channel stalled\n");
237 			return NULL;
238 		}
239 
240 		put = 0;
241 	}
242 
243 	return dmac->ptr + put;
244 }
245 
246 void
247 evo_kick(u32 *push, struct nv50_dmac *evoc)
248 {
249 	struct nv50_dmac *dmac = evoc;
250 
251 	evo_flush(dmac);
252 
253 	nvif_wr32(&dmac->base.user, 0x0000, (push - dmac->ptr) << 2);
254 	mutex_unlock(&dmac->lock);
255 }
256 
257 /******************************************************************************
258  * Output path helpers
259  *****************************************************************************/
260 static void
261 nv50_outp_release(struct nouveau_encoder *nv_encoder)
262 {
263 	struct nv50_disp *disp = nv50_disp(nv_encoder->base.base.dev);
264 	struct {
265 		struct nv50_disp_mthd_v1 base;
266 	} args = {
267 		.base.version = 1,
268 		.base.method = NV50_DISP_MTHD_V1_RELEASE,
269 		.base.hasht  = nv_encoder->dcb->hasht,
270 		.base.hashm  = nv_encoder->dcb->hashm,
271 	};
272 
273 	nvif_mthd(&disp->disp->object, 0, &args, sizeof(args));
274 	nv_encoder->or = -1;
275 	nv_encoder->link = 0;
276 }
277 
278 static int
279 nv50_outp_acquire(struct nouveau_encoder *nv_encoder)
280 {
281 	struct nouveau_drm *drm = nouveau_drm(nv_encoder->base.base.dev);
282 	struct nv50_disp *disp = nv50_disp(drm->dev);
283 	struct {
284 		struct nv50_disp_mthd_v1 base;
285 		struct nv50_disp_acquire_v0 info;
286 	} args = {
287 		.base.version = 1,
288 		.base.method = NV50_DISP_MTHD_V1_ACQUIRE,
289 		.base.hasht  = nv_encoder->dcb->hasht,
290 		.base.hashm  = nv_encoder->dcb->hashm,
291 	};
292 	int ret;
293 
294 	ret = nvif_mthd(&disp->disp->object, 0, &args, sizeof(args));
295 	if (ret) {
296 		NV_ERROR(drm, "error acquiring output path: %d\n", ret);
297 		return ret;
298 	}
299 
300 	nv_encoder->or = args.info.or;
301 	nv_encoder->link = args.info.link;
302 	return 0;
303 }
304 
305 static int
306 nv50_outp_atomic_check_view(struct drm_encoder *encoder,
307 			    struct drm_crtc_state *crtc_state,
308 			    struct drm_connector_state *conn_state,
309 			    struct drm_display_mode *native_mode)
310 {
311 	struct drm_display_mode *adjusted_mode = &crtc_state->adjusted_mode;
312 	struct drm_display_mode *mode = &crtc_state->mode;
313 	struct drm_connector *connector = conn_state->connector;
314 	struct nouveau_conn_atom *asyc = nouveau_conn_atom(conn_state);
315 	struct nouveau_drm *drm = nouveau_drm(encoder->dev);
316 
317 	NV_ATOMIC(drm, "%s atomic_check\n", encoder->name);
318 	asyc->scaler.full = false;
319 	if (!native_mode)
320 		return 0;
321 
322 	if (asyc->scaler.mode == DRM_MODE_SCALE_NONE) {
323 		switch (connector->connector_type) {
324 		case DRM_MODE_CONNECTOR_LVDS:
325 		case DRM_MODE_CONNECTOR_eDP:
326 			/* Don't force scaler for EDID modes with
327 			 * same size as the native one (e.g. different
328 			 * refresh rate)
329 			 */
330 			if (mode->hdisplay == native_mode->hdisplay &&
331 			    mode->vdisplay == native_mode->vdisplay &&
332 			    mode->type & DRM_MODE_TYPE_DRIVER)
333 				break;
334 			mode = native_mode;
335 			asyc->scaler.full = true;
336 			break;
337 		default:
338 			break;
339 		}
340 	} else {
341 		mode = native_mode;
342 	}
343 
344 	if (!drm_mode_equal(adjusted_mode, mode)) {
345 		drm_mode_copy(adjusted_mode, mode);
346 		crtc_state->mode_changed = true;
347 	}
348 
349 	return 0;
350 }
351 
352 static int
353 nv50_outp_atomic_check(struct drm_encoder *encoder,
354 		       struct drm_crtc_state *crtc_state,
355 		       struct drm_connector_state *conn_state)
356 {
357 	struct drm_connector *connector = conn_state->connector;
358 	struct nouveau_connector *nv_connector = nouveau_connector(connector);
359 	struct nv50_head_atom *asyh = nv50_head_atom(crtc_state);
360 	int ret;
361 
362 	ret = nv50_outp_atomic_check_view(encoder, crtc_state, conn_state,
363 					  nv_connector->native_mode);
364 	if (ret)
365 		return ret;
366 
367 	if (crtc_state->mode_changed || crtc_state->connectors_changed)
368 		asyh->or.bpc = connector->display_info.bpc;
369 
370 	return 0;
371 }
372 
373 /******************************************************************************
374  * DAC
375  *****************************************************************************/
376 static void
377 nv50_dac_disable(struct drm_encoder *encoder)
378 {
379 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
380 	struct nv50_core *core = nv50_disp(encoder->dev)->core;
381 	if (nv_encoder->crtc)
382 		core->func->dac->ctrl(core, nv_encoder->or, 0x00000000, NULL);
383 	nv_encoder->crtc = NULL;
384 	nv50_outp_release(nv_encoder);
385 }
386 
387 static void
388 nv50_dac_enable(struct drm_encoder *encoder)
389 {
390 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
391 	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
392 	struct nv50_head_atom *asyh = nv50_head_atom(nv_crtc->base.state);
393 	struct nv50_core *core = nv50_disp(encoder->dev)->core;
394 
395 	nv50_outp_acquire(nv_encoder);
396 
397 	core->func->dac->ctrl(core, nv_encoder->or, 1 << nv_crtc->index, asyh);
398 	asyh->or.depth = 0;
399 
400 	nv_encoder->crtc = encoder->crtc;
401 }
402 
403 static enum drm_connector_status
404 nv50_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
405 {
406 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
407 	struct nv50_disp *disp = nv50_disp(encoder->dev);
408 	struct {
409 		struct nv50_disp_mthd_v1 base;
410 		struct nv50_disp_dac_load_v0 load;
411 	} args = {
412 		.base.version = 1,
413 		.base.method = NV50_DISP_MTHD_V1_DAC_LOAD,
414 		.base.hasht  = nv_encoder->dcb->hasht,
415 		.base.hashm  = nv_encoder->dcb->hashm,
416 	};
417 	int ret;
418 
419 	args.load.data = nouveau_drm(encoder->dev)->vbios.dactestval;
420 	if (args.load.data == 0)
421 		args.load.data = 340;
422 
423 	ret = nvif_mthd(&disp->disp->object, 0, &args, sizeof(args));
424 	if (ret || !args.load.load)
425 		return connector_status_disconnected;
426 
427 	return connector_status_connected;
428 }
429 
430 static const struct drm_encoder_helper_funcs
431 nv50_dac_help = {
432 	.atomic_check = nv50_outp_atomic_check,
433 	.enable = nv50_dac_enable,
434 	.disable = nv50_dac_disable,
435 	.detect = nv50_dac_detect
436 };
437 
438 static void
439 nv50_dac_destroy(struct drm_encoder *encoder)
440 {
441 	drm_encoder_cleanup(encoder);
442 	kfree(encoder);
443 }
444 
445 static const struct drm_encoder_funcs
446 nv50_dac_func = {
447 	.destroy = nv50_dac_destroy,
448 };
449 
450 static int
451 nv50_dac_create(struct drm_connector *connector, struct dcb_output *dcbe)
452 {
453 	struct nouveau_drm *drm = nouveau_drm(connector->dev);
454 	struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device);
455 	struct nvkm_i2c_bus *bus;
456 	struct nouveau_encoder *nv_encoder;
457 	struct drm_encoder *encoder;
458 	int type = DRM_MODE_ENCODER_DAC;
459 
460 	nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
461 	if (!nv_encoder)
462 		return -ENOMEM;
463 	nv_encoder->dcb = dcbe;
464 
465 	bus = nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
466 	if (bus)
467 		nv_encoder->i2c = &bus->i2c;
468 
469 	encoder = to_drm_encoder(nv_encoder);
470 	encoder->possible_crtcs = dcbe->heads;
471 	encoder->possible_clones = 0;
472 	drm_encoder_init(connector->dev, encoder, &nv50_dac_func, type,
473 			 "dac-%04x-%04x", dcbe->hasht, dcbe->hashm);
474 	drm_encoder_helper_add(encoder, &nv50_dac_help);
475 
476 	drm_connector_attach_encoder(connector, encoder);
477 	return 0;
478 }
479 
480 /*
481  * audio component binding for ELD notification
482  */
483 static void
484 nv50_audio_component_eld_notify(struct drm_audio_component *acomp, int port)
485 {
486 	if (acomp && acomp->audio_ops && acomp->audio_ops->pin_eld_notify)
487 		acomp->audio_ops->pin_eld_notify(acomp->audio_ops->audio_ptr,
488 						 port, -1);
489 }
490 
491 static int
492 nv50_audio_component_get_eld(struct device *kdev, int port, int pipe,
493 			     bool *enabled, unsigned char *buf, int max_bytes)
494 {
495 	struct drm_device *drm_dev = dev_get_drvdata(kdev);
496 	struct nouveau_drm *drm = nouveau_drm(drm_dev);
497 	struct drm_encoder *encoder;
498 	struct nouveau_encoder *nv_encoder;
499 	struct nouveau_connector *nv_connector;
500 	struct nouveau_crtc *nv_crtc;
501 	int ret = 0;
502 
503 	*enabled = false;
504 	drm_for_each_encoder(encoder, drm->dev) {
505 		nv_encoder = nouveau_encoder(encoder);
506 		nv_connector = nouveau_encoder_connector_get(nv_encoder);
507 		nv_crtc = nouveau_crtc(encoder->crtc);
508 		if (!nv_connector || !nv_crtc || nv_crtc->index != port)
509 			continue;
510 		*enabled = drm_detect_monitor_audio(nv_connector->edid);
511 		if (*enabled) {
512 			ret = drm_eld_size(nv_connector->base.eld);
513 			memcpy(buf, nv_connector->base.eld,
514 			       min(max_bytes, ret));
515 		}
516 		break;
517 	}
518 	return ret;
519 }
520 
521 static const struct drm_audio_component_ops nv50_audio_component_ops = {
522 	.get_eld = nv50_audio_component_get_eld,
523 };
524 
525 static int
526 nv50_audio_component_bind(struct device *kdev, struct device *hda_kdev,
527 			  void *data)
528 {
529 	struct drm_device *drm_dev = dev_get_drvdata(kdev);
530 	struct nouveau_drm *drm = nouveau_drm(drm_dev);
531 	struct drm_audio_component *acomp = data;
532 
533 	if (WARN_ON(!device_link_add(hda_kdev, kdev, DL_FLAG_STATELESS)))
534 		return -ENOMEM;
535 
536 	drm_modeset_lock_all(drm_dev);
537 	acomp->ops = &nv50_audio_component_ops;
538 	acomp->dev = kdev;
539 	drm->audio.component = acomp;
540 	drm_modeset_unlock_all(drm_dev);
541 	return 0;
542 }
543 
544 static void
545 nv50_audio_component_unbind(struct device *kdev, struct device *hda_kdev,
546 			    void *data)
547 {
548 	struct drm_device *drm_dev = dev_get_drvdata(kdev);
549 	struct nouveau_drm *drm = nouveau_drm(drm_dev);
550 	struct drm_audio_component *acomp = data;
551 
552 	drm_modeset_lock_all(drm_dev);
553 	drm->audio.component = NULL;
554 	acomp->ops = NULL;
555 	acomp->dev = NULL;
556 	drm_modeset_unlock_all(drm_dev);
557 }
558 
559 static const struct component_ops nv50_audio_component_bind_ops = {
560 	.bind   = nv50_audio_component_bind,
561 	.unbind = nv50_audio_component_unbind,
562 };
563 
564 static void
565 nv50_audio_component_init(struct nouveau_drm *drm)
566 {
567 	if (!component_add(drm->dev->dev, &nv50_audio_component_bind_ops))
568 		drm->audio.component_registered = true;
569 }
570 
571 static void
572 nv50_audio_component_fini(struct nouveau_drm *drm)
573 {
574 	if (drm->audio.component_registered) {
575 		component_del(drm->dev->dev, &nv50_audio_component_bind_ops);
576 		drm->audio.component_registered = false;
577 	}
578 }
579 
580 /******************************************************************************
581  * Audio
582  *****************************************************************************/
583 static void
584 nv50_audio_disable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
585 {
586 	struct nouveau_drm *drm = nouveau_drm(encoder->dev);
587 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
588 	struct nv50_disp *disp = nv50_disp(encoder->dev);
589 	struct {
590 		struct nv50_disp_mthd_v1 base;
591 		struct nv50_disp_sor_hda_eld_v0 eld;
592 	} args = {
593 		.base.version = 1,
594 		.base.method  = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
595 		.base.hasht   = nv_encoder->dcb->hasht,
596 		.base.hashm   = (0xf0ff & nv_encoder->dcb->hashm) |
597 				(0x0100 << nv_crtc->index),
598 	};
599 
600 	nvif_mthd(&disp->disp->object, 0, &args, sizeof(args));
601 
602 	nv50_audio_component_eld_notify(drm->audio.component, nv_crtc->index);
603 }
604 
605 static void
606 nv50_audio_enable(struct drm_encoder *encoder, struct drm_display_mode *mode)
607 {
608 	struct nouveau_drm *drm = nouveau_drm(encoder->dev);
609 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
610 	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
611 	struct nouveau_connector *nv_connector;
612 	struct nv50_disp *disp = nv50_disp(encoder->dev);
613 	struct __packed {
614 		struct {
615 			struct nv50_disp_mthd_v1 mthd;
616 			struct nv50_disp_sor_hda_eld_v0 eld;
617 		} base;
618 		u8 data[sizeof(nv_connector->base.eld)];
619 	} args = {
620 		.base.mthd.version = 1,
621 		.base.mthd.method  = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
622 		.base.mthd.hasht   = nv_encoder->dcb->hasht,
623 		.base.mthd.hashm   = (0xf0ff & nv_encoder->dcb->hashm) |
624 				     (0x0100 << nv_crtc->index),
625 	};
626 
627 	nv_connector = nouveau_encoder_connector_get(nv_encoder);
628 	if (!drm_detect_monitor_audio(nv_connector->edid))
629 		return;
630 
631 	memcpy(args.data, nv_connector->base.eld, sizeof(args.data));
632 
633 	nvif_mthd(&disp->disp->object, 0, &args,
634 		  sizeof(args.base) + drm_eld_size(args.data));
635 
636 	nv50_audio_component_eld_notify(drm->audio.component, nv_crtc->index);
637 }
638 
639 /******************************************************************************
640  * HDMI
641  *****************************************************************************/
642 static void
643 nv50_hdmi_disable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
644 {
645 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
646 	struct nv50_disp *disp = nv50_disp(encoder->dev);
647 	struct {
648 		struct nv50_disp_mthd_v1 base;
649 		struct nv50_disp_sor_hdmi_pwr_v0 pwr;
650 	} args = {
651 		.base.version = 1,
652 		.base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
653 		.base.hasht  = nv_encoder->dcb->hasht,
654 		.base.hashm  = (0xf0ff & nv_encoder->dcb->hashm) |
655 			       (0x0100 << nv_crtc->index),
656 	};
657 
658 	nvif_mthd(&disp->disp->object, 0, &args, sizeof(args));
659 }
660 
661 static void
662 nv50_hdmi_enable(struct drm_encoder *encoder, struct drm_display_mode *mode)
663 {
664 	struct nouveau_drm *drm = nouveau_drm(encoder->dev);
665 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
666 	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
667 	struct nv50_disp *disp = nv50_disp(encoder->dev);
668 	struct {
669 		struct nv50_disp_mthd_v1 base;
670 		struct nv50_disp_sor_hdmi_pwr_v0 pwr;
671 		u8 infoframes[2 * 17]; /* two frames, up to 17 bytes each */
672 	} args = {
673 		.base.version = 1,
674 		.base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
675 		.base.hasht  = nv_encoder->dcb->hasht,
676 		.base.hashm  = (0xf0ff & nv_encoder->dcb->hashm) |
677 			       (0x0100 << nv_crtc->index),
678 		.pwr.state = 1,
679 		.pwr.rekey = 56, /* binary driver, and tegra, constant */
680 	};
681 	struct nouveau_connector *nv_connector;
682 	struct drm_hdmi_info *hdmi;
683 	u32 max_ac_packet;
684 	union hdmi_infoframe avi_frame;
685 	union hdmi_infoframe vendor_frame;
686 	bool high_tmds_clock_ratio = false, scrambling = false;
687 	u8 config;
688 	int ret;
689 	int size;
690 
691 	nv_connector = nouveau_encoder_connector_get(nv_encoder);
692 	if (!drm_detect_hdmi_monitor(nv_connector->edid))
693 		return;
694 
695 	hdmi = &nv_connector->base.display_info.hdmi;
696 
697 	ret = drm_hdmi_avi_infoframe_from_display_mode(&avi_frame.avi,
698 						       &nv_connector->base, mode);
699 	if (!ret) {
700 		/* We have an AVI InfoFrame, populate it to the display */
701 		args.pwr.avi_infoframe_length
702 			= hdmi_infoframe_pack(&avi_frame, args.infoframes, 17);
703 	}
704 
705 	ret = drm_hdmi_vendor_infoframe_from_display_mode(&vendor_frame.vendor.hdmi,
706 							  &nv_connector->base, mode);
707 	if (!ret) {
708 		/* We have a Vendor InfoFrame, populate it to the display */
709 		args.pwr.vendor_infoframe_length
710 			= hdmi_infoframe_pack(&vendor_frame,
711 					      args.infoframes
712 					      + args.pwr.avi_infoframe_length,
713 					      17);
714 	}
715 
716 	max_ac_packet  = mode->htotal - mode->hdisplay;
717 	max_ac_packet -= args.pwr.rekey;
718 	max_ac_packet -= 18; /* constant from tegra */
719 	args.pwr.max_ac_packet = max_ac_packet / 32;
720 
721 	if (hdmi->scdc.scrambling.supported) {
722 		high_tmds_clock_ratio = mode->clock > 340000;
723 		scrambling = high_tmds_clock_ratio ||
724 			hdmi->scdc.scrambling.low_rates;
725 	}
726 
727 	args.pwr.scdc =
728 		NV50_DISP_SOR_HDMI_PWR_V0_SCDC_SCRAMBLE * scrambling |
729 		NV50_DISP_SOR_HDMI_PWR_V0_SCDC_DIV_BY_4 * high_tmds_clock_ratio;
730 
731 	size = sizeof(args.base)
732 		+ sizeof(args.pwr)
733 		+ args.pwr.avi_infoframe_length
734 		+ args.pwr.vendor_infoframe_length;
735 	nvif_mthd(&disp->disp->object, 0, &args, size);
736 
737 	nv50_audio_enable(encoder, mode);
738 
739 	/* If SCDC is supported by the downstream monitor, update
740 	 * divider / scrambling settings to what we programmed above.
741 	 */
742 	if (!hdmi->scdc.scrambling.supported)
743 		return;
744 
745 	ret = drm_scdc_readb(nv_encoder->i2c, SCDC_TMDS_CONFIG, &config);
746 	if (ret < 0) {
747 		NV_ERROR(drm, "Failure to read SCDC_TMDS_CONFIG: %d\n", ret);
748 		return;
749 	}
750 	config &= ~(SCDC_TMDS_BIT_CLOCK_RATIO_BY_40 | SCDC_SCRAMBLING_ENABLE);
751 	config |= SCDC_TMDS_BIT_CLOCK_RATIO_BY_40 * high_tmds_clock_ratio;
752 	config |= SCDC_SCRAMBLING_ENABLE * scrambling;
753 	ret = drm_scdc_writeb(nv_encoder->i2c, SCDC_TMDS_CONFIG, config);
754 	if (ret < 0)
755 		NV_ERROR(drm, "Failure to write SCDC_TMDS_CONFIG = 0x%02x: %d\n",
756 			 config, ret);
757 }
758 
759 /******************************************************************************
760  * MST
761  *****************************************************************************/
762 #define nv50_mstm(p) container_of((p), struct nv50_mstm, mgr)
763 #define nv50_mstc(p) container_of((p), struct nv50_mstc, connector)
764 #define nv50_msto(p) container_of((p), struct nv50_msto, encoder)
765 
766 struct nv50_mstm {
767 	struct nouveau_encoder *outp;
768 
769 	struct drm_dp_mst_topology_mgr mgr;
770 
771 	bool modified;
772 	bool disabled;
773 	int links;
774 };
775 
776 struct nv50_mstc {
777 	struct nv50_mstm *mstm;
778 	struct drm_dp_mst_port *port;
779 	struct drm_connector connector;
780 
781 	struct drm_display_mode *native;
782 	struct edid *edid;
783 };
784 
785 struct nv50_msto {
786 	struct drm_encoder encoder;
787 
788 	struct nv50_head *head;
789 	struct nv50_mstc *mstc;
790 	bool disabled;
791 };
792 
793 static struct drm_dp_payload *
794 nv50_msto_payload(struct nv50_msto *msto)
795 {
796 	struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
797 	struct nv50_mstc *mstc = msto->mstc;
798 	struct nv50_mstm *mstm = mstc->mstm;
799 	int vcpi = mstc->port->vcpi.vcpi, i;
800 
801 	WARN_ON(!mutex_is_locked(&mstm->mgr.payload_lock));
802 
803 	NV_ATOMIC(drm, "%s: vcpi %d\n", msto->encoder.name, vcpi);
804 	for (i = 0; i < mstm->mgr.max_payloads; i++) {
805 		struct drm_dp_payload *payload = &mstm->mgr.payloads[i];
806 		NV_ATOMIC(drm, "%s: %d: vcpi %d start 0x%02x slots 0x%02x\n",
807 			  mstm->outp->base.base.name, i, payload->vcpi,
808 			  payload->start_slot, payload->num_slots);
809 	}
810 
811 	for (i = 0; i < mstm->mgr.max_payloads; i++) {
812 		struct drm_dp_payload *payload = &mstm->mgr.payloads[i];
813 		if (payload->vcpi == vcpi)
814 			return payload;
815 	}
816 
817 	return NULL;
818 }
819 
820 static void
821 nv50_msto_cleanup(struct nv50_msto *msto)
822 {
823 	struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
824 	struct nv50_mstc *mstc = msto->mstc;
825 	struct nv50_mstm *mstm = mstc->mstm;
826 
827 	if (!msto->disabled)
828 		return;
829 
830 	NV_ATOMIC(drm, "%s: msto cleanup\n", msto->encoder.name);
831 
832 	drm_dp_mst_deallocate_vcpi(&mstm->mgr, mstc->port);
833 
834 	msto->mstc = NULL;
835 	msto->disabled = false;
836 }
837 
838 static void
839 nv50_msto_prepare(struct nv50_msto *msto)
840 {
841 	struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
842 	struct nv50_mstc *mstc = msto->mstc;
843 	struct nv50_mstm *mstm = mstc->mstm;
844 	struct {
845 		struct nv50_disp_mthd_v1 base;
846 		struct nv50_disp_sor_dp_mst_vcpi_v0 vcpi;
847 	} args = {
848 		.base.version = 1,
849 		.base.method = NV50_DISP_MTHD_V1_SOR_DP_MST_VCPI,
850 		.base.hasht  = mstm->outp->dcb->hasht,
851 		.base.hashm  = (0xf0ff & mstm->outp->dcb->hashm) |
852 			       (0x0100 << msto->head->base.index),
853 	};
854 
855 	mutex_lock(&mstm->mgr.payload_lock);
856 
857 	NV_ATOMIC(drm, "%s: msto prepare\n", msto->encoder.name);
858 	if (mstc->port->vcpi.vcpi > 0) {
859 		struct drm_dp_payload *payload = nv50_msto_payload(msto);
860 		if (payload) {
861 			args.vcpi.start_slot = payload->start_slot;
862 			args.vcpi.num_slots = payload->num_slots;
863 			args.vcpi.pbn = mstc->port->vcpi.pbn;
864 			args.vcpi.aligned_pbn = mstc->port->vcpi.aligned_pbn;
865 		}
866 	}
867 
868 	NV_ATOMIC(drm, "%s: %s: %02x %02x %04x %04x\n",
869 		  msto->encoder.name, msto->head->base.base.name,
870 		  args.vcpi.start_slot, args.vcpi.num_slots,
871 		  args.vcpi.pbn, args.vcpi.aligned_pbn);
872 
873 	nvif_mthd(&drm->display->disp.object, 0, &args, sizeof(args));
874 	mutex_unlock(&mstm->mgr.payload_lock);
875 }
876 
877 static int
878 nv50_msto_atomic_check(struct drm_encoder *encoder,
879 		       struct drm_crtc_state *crtc_state,
880 		       struct drm_connector_state *conn_state)
881 {
882 	struct drm_atomic_state *state = crtc_state->state;
883 	struct drm_connector *connector = conn_state->connector;
884 	struct nv50_mstc *mstc = nv50_mstc(connector);
885 	struct nv50_mstm *mstm = mstc->mstm;
886 	struct nv50_head_atom *asyh = nv50_head_atom(crtc_state);
887 	int slots;
888 	int ret;
889 
890 	ret = nv50_outp_atomic_check_view(encoder, crtc_state, conn_state,
891 					  mstc->native);
892 	if (ret)
893 		return ret;
894 
895 	if (!crtc_state->mode_changed && !crtc_state->connectors_changed)
896 		return 0;
897 
898 	/*
899 	 * When restoring duplicated states, we need to make sure that the bw
900 	 * remains the same and avoid recalculating it, as the connector's bpc
901 	 * may have changed after the state was duplicated
902 	 */
903 	if (!state->duplicated) {
904 		const int clock = crtc_state->adjusted_mode.clock;
905 
906 		/*
907 		 * XXX: Since we don't use HDR in userspace quite yet, limit
908 		 * the bpc to 8 to save bandwidth on the topology. In the
909 		 * future, we'll want to properly fix this by dynamically
910 		 * selecting the highest possible bpc that would fit in the
911 		 * topology
912 		 */
913 		asyh->or.bpc = min(connector->display_info.bpc, 8U);
914 		asyh->dp.pbn = drm_dp_calc_pbn_mode(clock, asyh->or.bpc * 3, false);
915 	}
916 
917 	slots = drm_dp_atomic_find_vcpi_slots(state, &mstm->mgr, mstc->port,
918 					      asyh->dp.pbn, 0);
919 	if (slots < 0)
920 		return slots;
921 
922 	asyh->dp.tu = slots;
923 
924 	return 0;
925 }
926 
927 static u8
928 nv50_dp_bpc_to_depth(unsigned int bpc)
929 {
930 	switch (bpc) {
931 	case  6: return 0x2;
932 	case  8: return 0x5;
933 	case 10: /* fall-through */
934 	default: return 0x6;
935 	}
936 }
937 
938 static void
939 nv50_msto_enable(struct drm_encoder *encoder)
940 {
941 	struct nv50_head *head = nv50_head(encoder->crtc);
942 	struct nv50_head_atom *armh = nv50_head_atom(head->base.base.state);
943 	struct nv50_msto *msto = nv50_msto(encoder);
944 	struct nv50_mstc *mstc = NULL;
945 	struct nv50_mstm *mstm = NULL;
946 	struct drm_connector *connector;
947 	struct drm_connector_list_iter conn_iter;
948 	u8 proto;
949 	bool r;
950 
951 	drm_connector_list_iter_begin(encoder->dev, &conn_iter);
952 	drm_for_each_connector_iter(connector, &conn_iter) {
953 		if (connector->state->best_encoder == &msto->encoder) {
954 			mstc = nv50_mstc(connector);
955 			mstm = mstc->mstm;
956 			break;
957 		}
958 	}
959 	drm_connector_list_iter_end(&conn_iter);
960 
961 	if (WARN_ON(!mstc))
962 		return;
963 
964 	r = drm_dp_mst_allocate_vcpi(&mstm->mgr, mstc->port, armh->dp.pbn,
965 				     armh->dp.tu);
966 	if (!r)
967 		DRM_DEBUG_KMS("Failed to allocate VCPI\n");
968 
969 	if (!mstm->links++)
970 		nv50_outp_acquire(mstm->outp);
971 
972 	if (mstm->outp->link & 1)
973 		proto = 0x8;
974 	else
975 		proto = 0x9;
976 
977 	mstm->outp->update(mstm->outp, head->base.index, armh, proto,
978 			   nv50_dp_bpc_to_depth(armh->or.bpc));
979 
980 	msto->mstc = mstc;
981 	mstm->modified = true;
982 }
983 
984 static void
985 nv50_msto_disable(struct drm_encoder *encoder)
986 {
987 	struct nv50_msto *msto = nv50_msto(encoder);
988 	struct nv50_mstc *mstc = msto->mstc;
989 	struct nv50_mstm *mstm = mstc->mstm;
990 
991 	drm_dp_mst_reset_vcpi_slots(&mstm->mgr, mstc->port);
992 
993 	mstm->outp->update(mstm->outp, msto->head->base.index, NULL, 0, 0);
994 	mstm->modified = true;
995 	if (!--mstm->links)
996 		mstm->disabled = true;
997 	msto->disabled = true;
998 }
999 
1000 static const struct drm_encoder_helper_funcs
1001 nv50_msto_help = {
1002 	.disable = nv50_msto_disable,
1003 	.enable = nv50_msto_enable,
1004 	.atomic_check = nv50_msto_atomic_check,
1005 };
1006 
1007 static void
1008 nv50_msto_destroy(struct drm_encoder *encoder)
1009 {
1010 	struct nv50_msto *msto = nv50_msto(encoder);
1011 	drm_encoder_cleanup(&msto->encoder);
1012 	kfree(msto);
1013 }
1014 
1015 static const struct drm_encoder_funcs
1016 nv50_msto = {
1017 	.destroy = nv50_msto_destroy,
1018 };
1019 
1020 static struct nv50_msto *
1021 nv50_msto_new(struct drm_device *dev, struct nv50_head *head, int id)
1022 {
1023 	struct nv50_msto *msto;
1024 	int ret;
1025 
1026 	msto = kzalloc(sizeof(*msto), GFP_KERNEL);
1027 	if (!msto)
1028 		return ERR_PTR(-ENOMEM);
1029 
1030 	ret = drm_encoder_init(dev, &msto->encoder, &nv50_msto,
1031 			       DRM_MODE_ENCODER_DPMST, "mst-%d", id);
1032 	if (ret) {
1033 		kfree(msto);
1034 		return ERR_PTR(ret);
1035 	}
1036 
1037 	drm_encoder_helper_add(&msto->encoder, &nv50_msto_help);
1038 	msto->encoder.possible_crtcs = drm_crtc_mask(&head->base.base);
1039 	msto->head = head;
1040 	return msto;
1041 }
1042 
1043 static struct drm_encoder *
1044 nv50_mstc_atomic_best_encoder(struct drm_connector *connector,
1045 			      struct drm_connector_state *connector_state)
1046 {
1047 	struct nv50_mstc *mstc = nv50_mstc(connector);
1048 	struct drm_crtc *crtc = connector_state->crtc;
1049 
1050 	if (!(mstc->mstm->outp->dcb->heads & drm_crtc_mask(crtc)))
1051 		return NULL;
1052 
1053 	return &nv50_head(crtc)->msto->encoder;
1054 }
1055 
1056 static enum drm_mode_status
1057 nv50_mstc_mode_valid(struct drm_connector *connector,
1058 		     struct drm_display_mode *mode)
1059 {
1060 	return MODE_OK;
1061 }
1062 
1063 static int
1064 nv50_mstc_get_modes(struct drm_connector *connector)
1065 {
1066 	struct nv50_mstc *mstc = nv50_mstc(connector);
1067 	int ret = 0;
1068 
1069 	mstc->edid = drm_dp_mst_get_edid(&mstc->connector, mstc->port->mgr, mstc->port);
1070 	drm_connector_update_edid_property(&mstc->connector, mstc->edid);
1071 	if (mstc->edid)
1072 		ret = drm_add_edid_modes(&mstc->connector, mstc->edid);
1073 
1074 	if (!mstc->connector.display_info.bpc)
1075 		mstc->connector.display_info.bpc = 8;
1076 
1077 	if (mstc->native)
1078 		drm_mode_destroy(mstc->connector.dev, mstc->native);
1079 	mstc->native = nouveau_conn_native_mode(&mstc->connector);
1080 	return ret;
1081 }
1082 
1083 static int
1084 nv50_mstc_atomic_check(struct drm_connector *connector,
1085 		       struct drm_atomic_state *state)
1086 {
1087 	struct nv50_mstc *mstc = nv50_mstc(connector);
1088 	struct drm_dp_mst_topology_mgr *mgr = &mstc->mstm->mgr;
1089 	struct drm_connector_state *new_conn_state =
1090 		drm_atomic_get_new_connector_state(state, connector);
1091 	struct drm_connector_state *old_conn_state =
1092 		drm_atomic_get_old_connector_state(state, connector);
1093 	struct drm_crtc_state *crtc_state;
1094 	struct drm_crtc *new_crtc = new_conn_state->crtc;
1095 
1096 	if (!old_conn_state->crtc)
1097 		return 0;
1098 
1099 	/* We only want to free VCPI if this state disables the CRTC on this
1100 	 * connector
1101 	 */
1102 	if (new_crtc) {
1103 		crtc_state = drm_atomic_get_new_crtc_state(state, new_crtc);
1104 
1105 		if (!crtc_state ||
1106 		    !drm_atomic_crtc_needs_modeset(crtc_state) ||
1107 		    crtc_state->enable)
1108 			return 0;
1109 	}
1110 
1111 	return drm_dp_atomic_release_vcpi_slots(state, mgr, mstc->port);
1112 }
1113 
1114 static int
1115 nv50_mstc_detect(struct drm_connector *connector,
1116 		 struct drm_modeset_acquire_ctx *ctx, bool force)
1117 {
1118 	struct nv50_mstc *mstc = nv50_mstc(connector);
1119 	int ret;
1120 
1121 	if (drm_connector_is_unregistered(connector))
1122 		return connector_status_disconnected;
1123 
1124 	ret = pm_runtime_get_sync(connector->dev->dev);
1125 	if (ret < 0 && ret != -EACCES)
1126 		return connector_status_disconnected;
1127 
1128 	ret = drm_dp_mst_detect_port(connector, ctx, mstc->port->mgr,
1129 				     mstc->port);
1130 
1131 	pm_runtime_mark_last_busy(connector->dev->dev);
1132 	pm_runtime_put_autosuspend(connector->dev->dev);
1133 	return ret;
1134 }
1135 
1136 static const struct drm_connector_helper_funcs
1137 nv50_mstc_help = {
1138 	.get_modes = nv50_mstc_get_modes,
1139 	.mode_valid = nv50_mstc_mode_valid,
1140 	.atomic_best_encoder = nv50_mstc_atomic_best_encoder,
1141 	.atomic_check = nv50_mstc_atomic_check,
1142 	.detect_ctx = nv50_mstc_detect,
1143 };
1144 
1145 static void
1146 nv50_mstc_destroy(struct drm_connector *connector)
1147 {
1148 	struct nv50_mstc *mstc = nv50_mstc(connector);
1149 
1150 	drm_connector_cleanup(&mstc->connector);
1151 	drm_dp_mst_put_port_malloc(mstc->port);
1152 
1153 	kfree(mstc);
1154 }
1155 
1156 static const struct drm_connector_funcs
1157 nv50_mstc = {
1158 	.reset = nouveau_conn_reset,
1159 	.fill_modes = drm_helper_probe_single_connector_modes,
1160 	.destroy = nv50_mstc_destroy,
1161 	.atomic_duplicate_state = nouveau_conn_atomic_duplicate_state,
1162 	.atomic_destroy_state = nouveau_conn_atomic_destroy_state,
1163 	.atomic_set_property = nouveau_conn_atomic_set_property,
1164 	.atomic_get_property = nouveau_conn_atomic_get_property,
1165 };
1166 
1167 static int
1168 nv50_mstc_new(struct nv50_mstm *mstm, struct drm_dp_mst_port *port,
1169 	      const char *path, struct nv50_mstc **pmstc)
1170 {
1171 	struct drm_device *dev = mstm->outp->base.base.dev;
1172 	struct drm_crtc *crtc;
1173 	struct nv50_mstc *mstc;
1174 	int ret;
1175 
1176 	if (!(mstc = *pmstc = kzalloc(sizeof(*mstc), GFP_KERNEL)))
1177 		return -ENOMEM;
1178 	mstc->mstm = mstm;
1179 	mstc->port = port;
1180 
1181 	ret = drm_connector_init(dev, &mstc->connector, &nv50_mstc,
1182 				 DRM_MODE_CONNECTOR_DisplayPort);
1183 	if (ret) {
1184 		kfree(*pmstc);
1185 		*pmstc = NULL;
1186 		return ret;
1187 	}
1188 
1189 	drm_connector_helper_add(&mstc->connector, &nv50_mstc_help);
1190 
1191 	mstc->connector.funcs->reset(&mstc->connector);
1192 	nouveau_conn_attach_properties(&mstc->connector);
1193 
1194 	drm_for_each_crtc(crtc, dev) {
1195 		if (!(mstm->outp->dcb->heads & drm_crtc_mask(crtc)))
1196 			continue;
1197 
1198 		drm_connector_attach_encoder(&mstc->connector,
1199 					     &nv50_head(crtc)->msto->encoder);
1200 	}
1201 
1202 	drm_object_attach_property(&mstc->connector.base, dev->mode_config.path_property, 0);
1203 	drm_object_attach_property(&mstc->connector.base, dev->mode_config.tile_property, 0);
1204 	drm_connector_set_path_property(&mstc->connector, path);
1205 	drm_dp_mst_get_port_malloc(port);
1206 	return 0;
1207 }
1208 
1209 static void
1210 nv50_mstm_cleanup(struct nv50_mstm *mstm)
1211 {
1212 	struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev);
1213 	struct drm_encoder *encoder;
1214 	int ret;
1215 
1216 	NV_ATOMIC(drm, "%s: mstm cleanup\n", mstm->outp->base.base.name);
1217 	ret = drm_dp_check_act_status(&mstm->mgr);
1218 
1219 	ret = drm_dp_update_payload_part2(&mstm->mgr);
1220 
1221 	drm_for_each_encoder(encoder, mstm->outp->base.base.dev) {
1222 		if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
1223 			struct nv50_msto *msto = nv50_msto(encoder);
1224 			struct nv50_mstc *mstc = msto->mstc;
1225 			if (mstc && mstc->mstm == mstm)
1226 				nv50_msto_cleanup(msto);
1227 		}
1228 	}
1229 
1230 	mstm->modified = false;
1231 }
1232 
1233 static void
1234 nv50_mstm_prepare(struct nv50_mstm *mstm)
1235 {
1236 	struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev);
1237 	struct drm_encoder *encoder;
1238 	int ret;
1239 
1240 	NV_ATOMIC(drm, "%s: mstm prepare\n", mstm->outp->base.base.name);
1241 	ret = drm_dp_update_payload_part1(&mstm->mgr);
1242 
1243 	drm_for_each_encoder(encoder, mstm->outp->base.base.dev) {
1244 		if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
1245 			struct nv50_msto *msto = nv50_msto(encoder);
1246 			struct nv50_mstc *mstc = msto->mstc;
1247 			if (mstc && mstc->mstm == mstm)
1248 				nv50_msto_prepare(msto);
1249 		}
1250 	}
1251 
1252 	if (mstm->disabled) {
1253 		if (!mstm->links)
1254 			nv50_outp_release(mstm->outp);
1255 		mstm->disabled = false;
1256 	}
1257 }
1258 
1259 static struct drm_connector *
1260 nv50_mstm_add_connector(struct drm_dp_mst_topology_mgr *mgr,
1261 			struct drm_dp_mst_port *port, const char *path)
1262 {
1263 	struct nv50_mstm *mstm = nv50_mstm(mgr);
1264 	struct nv50_mstc *mstc;
1265 	int ret;
1266 
1267 	ret = nv50_mstc_new(mstm, port, path, &mstc);
1268 	if (ret)
1269 		return NULL;
1270 
1271 	return &mstc->connector;
1272 }
1273 
1274 static const struct drm_dp_mst_topology_cbs
1275 nv50_mstm = {
1276 	.add_connector = nv50_mstm_add_connector,
1277 };
1278 
1279 void
1280 nv50_mstm_service(struct nv50_mstm *mstm)
1281 {
1282 	struct drm_dp_aux *aux = mstm ? mstm->mgr.aux : NULL;
1283 	bool handled = true;
1284 	int ret;
1285 	u8 esi[8] = {};
1286 
1287 	if (!aux)
1288 		return;
1289 
1290 	while (handled) {
1291 		ret = drm_dp_dpcd_read(aux, DP_SINK_COUNT_ESI, esi, 8);
1292 		if (ret != 8) {
1293 			drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false);
1294 			return;
1295 		}
1296 
1297 		drm_dp_mst_hpd_irq(&mstm->mgr, esi, &handled);
1298 		if (!handled)
1299 			break;
1300 
1301 		drm_dp_dpcd_write(aux, DP_SINK_COUNT_ESI + 1, &esi[1], 3);
1302 	}
1303 }
1304 
1305 void
1306 nv50_mstm_remove(struct nv50_mstm *mstm)
1307 {
1308 	if (mstm)
1309 		drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false);
1310 }
1311 
1312 static int
1313 nv50_mstm_enable(struct nv50_mstm *mstm, u8 dpcd, int state)
1314 {
1315 	struct nouveau_encoder *outp = mstm->outp;
1316 	struct {
1317 		struct nv50_disp_mthd_v1 base;
1318 		struct nv50_disp_sor_dp_mst_link_v0 mst;
1319 	} args = {
1320 		.base.version = 1,
1321 		.base.method = NV50_DISP_MTHD_V1_SOR_DP_MST_LINK,
1322 		.base.hasht = outp->dcb->hasht,
1323 		.base.hashm = outp->dcb->hashm,
1324 		.mst.state = state,
1325 	};
1326 	struct nouveau_drm *drm = nouveau_drm(outp->base.base.dev);
1327 	struct nvif_object *disp = &drm->display->disp.object;
1328 	int ret;
1329 
1330 	if (dpcd >= 0x12) {
1331 		/* Even if we're enabling MST, start with disabling the
1332 		 * branching unit to clear any sink-side MST topology state
1333 		 * that wasn't set by us
1334 		 */
1335 		ret = drm_dp_dpcd_writeb(mstm->mgr.aux, DP_MSTM_CTRL, 0);
1336 		if (ret < 0)
1337 			return ret;
1338 
1339 		if (state) {
1340 			/* Now, start initializing */
1341 			ret = drm_dp_dpcd_writeb(mstm->mgr.aux, DP_MSTM_CTRL,
1342 						 DP_MST_EN);
1343 			if (ret < 0)
1344 				return ret;
1345 		}
1346 	}
1347 
1348 	return nvif_mthd(disp, 0, &args, sizeof(args));
1349 }
1350 
1351 int
1352 nv50_mstm_detect(struct nv50_mstm *mstm, u8 dpcd[8], int allow)
1353 {
1354 	struct drm_dp_aux *aux;
1355 	int ret;
1356 	bool old_state, new_state;
1357 	u8 mstm_ctrl;
1358 
1359 	if (!mstm)
1360 		return 0;
1361 
1362 	mutex_lock(&mstm->mgr.lock);
1363 
1364 	old_state = mstm->mgr.mst_state;
1365 	new_state = old_state;
1366 	aux = mstm->mgr.aux;
1367 
1368 	if (old_state) {
1369 		/* Just check that the MST hub is still as we expect it */
1370 		ret = drm_dp_dpcd_readb(aux, DP_MSTM_CTRL, &mstm_ctrl);
1371 		if (ret < 0 || !(mstm_ctrl & DP_MST_EN)) {
1372 			DRM_DEBUG_KMS("Hub gone, disabling MST topology\n");
1373 			new_state = false;
1374 		}
1375 	} else if (dpcd[0] >= 0x12) {
1376 		ret = drm_dp_dpcd_readb(aux, DP_MSTM_CAP, &dpcd[1]);
1377 		if (ret < 0)
1378 			goto probe_error;
1379 
1380 		if (!(dpcd[1] & DP_MST_CAP))
1381 			dpcd[0] = 0x11;
1382 		else
1383 			new_state = allow;
1384 	}
1385 
1386 	if (new_state == old_state) {
1387 		mutex_unlock(&mstm->mgr.lock);
1388 		return new_state;
1389 	}
1390 
1391 	ret = nv50_mstm_enable(mstm, dpcd[0], new_state);
1392 	if (ret)
1393 		goto probe_error;
1394 
1395 	mutex_unlock(&mstm->mgr.lock);
1396 
1397 	ret = drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, new_state);
1398 	if (ret)
1399 		return nv50_mstm_enable(mstm, dpcd[0], 0);
1400 
1401 	return new_state;
1402 
1403 probe_error:
1404 	mutex_unlock(&mstm->mgr.lock);
1405 	return ret;
1406 }
1407 
1408 static void
1409 nv50_mstm_fini(struct nv50_mstm *mstm)
1410 {
1411 	if (mstm && mstm->mgr.mst_state)
1412 		drm_dp_mst_topology_mgr_suspend(&mstm->mgr);
1413 }
1414 
1415 static void
1416 nv50_mstm_init(struct nv50_mstm *mstm, bool runtime)
1417 {
1418 	int ret;
1419 
1420 	if (!mstm || !mstm->mgr.mst_state)
1421 		return;
1422 
1423 	ret = drm_dp_mst_topology_mgr_resume(&mstm->mgr, !runtime);
1424 	if (ret == -1) {
1425 		drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false);
1426 		drm_kms_helper_hotplug_event(mstm->mgr.dev);
1427 	}
1428 }
1429 
1430 static void
1431 nv50_mstm_del(struct nv50_mstm **pmstm)
1432 {
1433 	struct nv50_mstm *mstm = *pmstm;
1434 	if (mstm) {
1435 		drm_dp_mst_topology_mgr_destroy(&mstm->mgr);
1436 		kfree(*pmstm);
1437 		*pmstm = NULL;
1438 	}
1439 }
1440 
1441 static int
1442 nv50_mstm_new(struct nouveau_encoder *outp, struct drm_dp_aux *aux, int aux_max,
1443 	      int conn_base_id, struct nv50_mstm **pmstm)
1444 {
1445 	const int max_payloads = hweight8(outp->dcb->heads);
1446 	struct drm_device *dev = outp->base.base.dev;
1447 	struct nv50_mstm *mstm;
1448 	int ret;
1449 	u8 dpcd;
1450 
1451 	/* This is a workaround for some monitors not functioning
1452 	 * correctly in MST mode on initial module load.  I think
1453 	 * some bad interaction with the VBIOS may be responsible.
1454 	 *
1455 	 * A good ol' off and on again seems to work here ;)
1456 	 */
1457 	ret = drm_dp_dpcd_readb(aux, DP_DPCD_REV, &dpcd);
1458 	if (ret >= 0 && dpcd >= 0x12)
1459 		drm_dp_dpcd_writeb(aux, DP_MSTM_CTRL, 0);
1460 
1461 	if (!(mstm = *pmstm = kzalloc(sizeof(*mstm), GFP_KERNEL)))
1462 		return -ENOMEM;
1463 	mstm->outp = outp;
1464 	mstm->mgr.cbs = &nv50_mstm;
1465 
1466 	ret = drm_dp_mst_topology_mgr_init(&mstm->mgr, dev, aux, aux_max,
1467 					   max_payloads, conn_base_id);
1468 	if (ret)
1469 		return ret;
1470 
1471 	return 0;
1472 }
1473 
1474 /******************************************************************************
1475  * SOR
1476  *****************************************************************************/
1477 static void
1478 nv50_sor_update(struct nouveau_encoder *nv_encoder, u8 head,
1479 		struct nv50_head_atom *asyh, u8 proto, u8 depth)
1480 {
1481 	struct nv50_disp *disp = nv50_disp(nv_encoder->base.base.dev);
1482 	struct nv50_core *core = disp->core;
1483 
1484 	if (!asyh) {
1485 		nv_encoder->ctrl &= ~BIT(head);
1486 		if (!(nv_encoder->ctrl & 0x0000000f))
1487 			nv_encoder->ctrl = 0;
1488 	} else {
1489 		nv_encoder->ctrl |= proto << 8;
1490 		nv_encoder->ctrl |= BIT(head);
1491 		asyh->or.depth = depth;
1492 	}
1493 
1494 	core->func->sor->ctrl(core, nv_encoder->or, nv_encoder->ctrl, asyh);
1495 }
1496 
1497 static void
1498 nv50_sor_disable(struct drm_encoder *encoder)
1499 {
1500 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1501 	struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc);
1502 
1503 	nv_encoder->crtc = NULL;
1504 
1505 	if (nv_crtc) {
1506 		struct nvkm_i2c_aux *aux = nv_encoder->aux;
1507 		u8 pwr;
1508 
1509 		if (aux) {
1510 			int ret = nvkm_rdaux(aux, DP_SET_POWER, &pwr, 1);
1511 			if (ret == 0) {
1512 				pwr &= ~DP_SET_POWER_MASK;
1513 				pwr |=  DP_SET_POWER_D3;
1514 				nvkm_wraux(aux, DP_SET_POWER, &pwr, 1);
1515 			}
1516 		}
1517 
1518 		nv_encoder->update(nv_encoder, nv_crtc->index, NULL, 0, 0);
1519 		nv50_audio_disable(encoder, nv_crtc);
1520 		nv50_hdmi_disable(&nv_encoder->base.base, nv_crtc);
1521 		nv50_outp_release(nv_encoder);
1522 	}
1523 }
1524 
1525 static void
1526 nv50_sor_enable(struct drm_encoder *encoder)
1527 {
1528 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1529 	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1530 	struct nv50_head_atom *asyh = nv50_head_atom(nv_crtc->base.state);
1531 	struct drm_display_mode *mode = &asyh->state.adjusted_mode;
1532 	struct {
1533 		struct nv50_disp_mthd_v1 base;
1534 		struct nv50_disp_sor_lvds_script_v0 lvds;
1535 	} lvds = {
1536 		.base.version = 1,
1537 		.base.method  = NV50_DISP_MTHD_V1_SOR_LVDS_SCRIPT,
1538 		.base.hasht   = nv_encoder->dcb->hasht,
1539 		.base.hashm   = nv_encoder->dcb->hashm,
1540 	};
1541 	struct nv50_disp *disp = nv50_disp(encoder->dev);
1542 	struct drm_device *dev = encoder->dev;
1543 	struct nouveau_drm *drm = nouveau_drm(dev);
1544 	struct nouveau_connector *nv_connector;
1545 	struct nvbios *bios = &drm->vbios;
1546 	u8 proto = 0xf;
1547 	u8 depth = 0x0;
1548 
1549 	nv_connector = nouveau_encoder_connector_get(nv_encoder);
1550 	nv_encoder->crtc = encoder->crtc;
1551 	nv50_outp_acquire(nv_encoder);
1552 
1553 	switch (nv_encoder->dcb->type) {
1554 	case DCB_OUTPUT_TMDS:
1555 		if (nv_encoder->link & 1) {
1556 			proto = 0x1;
1557 			/* Only enable dual-link if:
1558 			 *  - Need to (i.e. rate > 165MHz)
1559 			 *  - DCB says we can
1560 			 *  - Not an HDMI monitor, since there's no dual-link
1561 			 *    on HDMI.
1562 			 */
1563 			if (mode->clock >= 165000 &&
1564 			    nv_encoder->dcb->duallink_possible &&
1565 			    !drm_detect_hdmi_monitor(nv_connector->edid))
1566 				proto |= 0x4;
1567 		} else {
1568 			proto = 0x2;
1569 		}
1570 
1571 		nv50_hdmi_enable(&nv_encoder->base.base, mode);
1572 		break;
1573 	case DCB_OUTPUT_LVDS:
1574 		proto = 0x0;
1575 
1576 		if (bios->fp_no_ddc) {
1577 			if (bios->fp.dual_link)
1578 				lvds.lvds.script |= 0x0100;
1579 			if (bios->fp.if_is_24bit)
1580 				lvds.lvds.script |= 0x0200;
1581 		} else {
1582 			if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) {
1583 				if (((u8 *)nv_connector->edid)[121] == 2)
1584 					lvds.lvds.script |= 0x0100;
1585 			} else
1586 			if (mode->clock >= bios->fp.duallink_transition_clk) {
1587 				lvds.lvds.script |= 0x0100;
1588 			}
1589 
1590 			if (lvds.lvds.script & 0x0100) {
1591 				if (bios->fp.strapless_is_24bit & 2)
1592 					lvds.lvds.script |= 0x0200;
1593 			} else {
1594 				if (bios->fp.strapless_is_24bit & 1)
1595 					lvds.lvds.script |= 0x0200;
1596 			}
1597 
1598 			if (asyh->or.bpc == 8)
1599 				lvds.lvds.script |= 0x0200;
1600 		}
1601 
1602 		nvif_mthd(&disp->disp->object, 0, &lvds, sizeof(lvds));
1603 		break;
1604 	case DCB_OUTPUT_DP:
1605 		depth = nv50_dp_bpc_to_depth(asyh->or.bpc);
1606 
1607 		if (nv_encoder->link & 1)
1608 			proto = 0x8;
1609 		else
1610 			proto = 0x9;
1611 
1612 		nv50_audio_enable(encoder, mode);
1613 		break;
1614 	default:
1615 		BUG();
1616 		break;
1617 	}
1618 
1619 	nv_encoder->update(nv_encoder, nv_crtc->index, asyh, proto, depth);
1620 }
1621 
1622 static const struct drm_encoder_helper_funcs
1623 nv50_sor_help = {
1624 	.atomic_check = nv50_outp_atomic_check,
1625 	.enable = nv50_sor_enable,
1626 	.disable = nv50_sor_disable,
1627 };
1628 
1629 static void
1630 nv50_sor_destroy(struct drm_encoder *encoder)
1631 {
1632 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1633 	nv50_mstm_del(&nv_encoder->dp.mstm);
1634 	drm_encoder_cleanup(encoder);
1635 	kfree(encoder);
1636 }
1637 
1638 static const struct drm_encoder_funcs
1639 nv50_sor_func = {
1640 	.destroy = nv50_sor_destroy,
1641 };
1642 
1643 static bool nv50_has_mst(struct nouveau_drm *drm)
1644 {
1645 	struct nvkm_bios *bios = nvxx_bios(&drm->client.device);
1646 	u32 data;
1647 	u8 ver, hdr, cnt, len;
1648 
1649 	data = nvbios_dp_table(bios, &ver, &hdr, &cnt, &len);
1650 	return data && ver >= 0x40 && (nvbios_rd08(bios, data + 0x08) & 0x04);
1651 }
1652 
1653 static int
1654 nv50_sor_create(struct drm_connector *connector, struct dcb_output *dcbe)
1655 {
1656 	struct nouveau_connector *nv_connector = nouveau_connector(connector);
1657 	struct nouveau_drm *drm = nouveau_drm(connector->dev);
1658 	struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device);
1659 	struct nouveau_encoder *nv_encoder;
1660 	struct drm_encoder *encoder;
1661 	int type, ret;
1662 
1663 	switch (dcbe->type) {
1664 	case DCB_OUTPUT_LVDS: type = DRM_MODE_ENCODER_LVDS; break;
1665 	case DCB_OUTPUT_TMDS:
1666 	case DCB_OUTPUT_DP:
1667 	default:
1668 		type = DRM_MODE_ENCODER_TMDS;
1669 		break;
1670 	}
1671 
1672 	nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1673 	if (!nv_encoder)
1674 		return -ENOMEM;
1675 	nv_encoder->dcb = dcbe;
1676 	nv_encoder->update = nv50_sor_update;
1677 
1678 	encoder = to_drm_encoder(nv_encoder);
1679 	encoder->possible_crtcs = dcbe->heads;
1680 	encoder->possible_clones = 0;
1681 	drm_encoder_init(connector->dev, encoder, &nv50_sor_func, type,
1682 			 "sor-%04x-%04x", dcbe->hasht, dcbe->hashm);
1683 	drm_encoder_helper_add(encoder, &nv50_sor_help);
1684 
1685 	drm_connector_attach_encoder(connector, encoder);
1686 
1687 	if (dcbe->type == DCB_OUTPUT_DP) {
1688 		struct nv50_disp *disp = nv50_disp(encoder->dev);
1689 		struct nvkm_i2c_aux *aux =
1690 			nvkm_i2c_aux_find(i2c, dcbe->i2c_index);
1691 		if (aux) {
1692 			if (disp->disp->object.oclass < GF110_DISP) {
1693 				/* HW has no support for address-only
1694 				 * transactions, so we're required to
1695 				 * use custom I2C-over-AUX code.
1696 				 */
1697 				nv_encoder->i2c = &aux->i2c;
1698 			} else {
1699 				nv_encoder->i2c = &nv_connector->aux.ddc;
1700 			}
1701 			nv_encoder->aux = aux;
1702 		}
1703 
1704 		if (nv_connector->type != DCB_CONNECTOR_eDP &&
1705 		    nv50_has_mst(drm)) {
1706 			ret = nv50_mstm_new(nv_encoder, &nv_connector->aux,
1707 					    16, nv_connector->base.base.id,
1708 					    &nv_encoder->dp.mstm);
1709 			if (ret)
1710 				return ret;
1711 		}
1712 	} else {
1713 		struct nvkm_i2c_bus *bus =
1714 			nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
1715 		if (bus)
1716 			nv_encoder->i2c = &bus->i2c;
1717 	}
1718 
1719 	return 0;
1720 }
1721 
1722 /******************************************************************************
1723  * PIOR
1724  *****************************************************************************/
1725 static int
1726 nv50_pior_atomic_check(struct drm_encoder *encoder,
1727 		       struct drm_crtc_state *crtc_state,
1728 		       struct drm_connector_state *conn_state)
1729 {
1730 	int ret = nv50_outp_atomic_check(encoder, crtc_state, conn_state);
1731 	if (ret)
1732 		return ret;
1733 	crtc_state->adjusted_mode.clock *= 2;
1734 	return 0;
1735 }
1736 
1737 static void
1738 nv50_pior_disable(struct drm_encoder *encoder)
1739 {
1740 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1741 	struct nv50_core *core = nv50_disp(encoder->dev)->core;
1742 	if (nv_encoder->crtc)
1743 		core->func->pior->ctrl(core, nv_encoder->or, 0x00000000, NULL);
1744 	nv_encoder->crtc = NULL;
1745 	nv50_outp_release(nv_encoder);
1746 }
1747 
1748 static void
1749 nv50_pior_enable(struct drm_encoder *encoder)
1750 {
1751 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1752 	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1753 	struct nv50_head_atom *asyh = nv50_head_atom(nv_crtc->base.state);
1754 	struct nv50_core *core = nv50_disp(encoder->dev)->core;
1755 	u8 owner = 1 << nv_crtc->index;
1756 	u8 proto;
1757 
1758 	nv50_outp_acquire(nv_encoder);
1759 
1760 	switch (asyh->or.bpc) {
1761 	case 10: asyh->or.depth = 0x6; break;
1762 	case  8: asyh->or.depth = 0x5; break;
1763 	case  6: asyh->or.depth = 0x2; break;
1764 	default: asyh->or.depth = 0x0; break;
1765 	}
1766 
1767 	switch (nv_encoder->dcb->type) {
1768 	case DCB_OUTPUT_TMDS:
1769 	case DCB_OUTPUT_DP:
1770 		proto = 0x0;
1771 		break;
1772 	default:
1773 		BUG();
1774 		break;
1775 	}
1776 
1777 	core->func->pior->ctrl(core, nv_encoder->or, (proto << 8) | owner, asyh);
1778 	nv_encoder->crtc = encoder->crtc;
1779 }
1780 
1781 static const struct drm_encoder_helper_funcs
1782 nv50_pior_help = {
1783 	.atomic_check = nv50_pior_atomic_check,
1784 	.enable = nv50_pior_enable,
1785 	.disable = nv50_pior_disable,
1786 };
1787 
1788 static void
1789 nv50_pior_destroy(struct drm_encoder *encoder)
1790 {
1791 	drm_encoder_cleanup(encoder);
1792 	kfree(encoder);
1793 }
1794 
1795 static const struct drm_encoder_funcs
1796 nv50_pior_func = {
1797 	.destroy = nv50_pior_destroy,
1798 };
1799 
1800 static int
1801 nv50_pior_create(struct drm_connector *connector, struct dcb_output *dcbe)
1802 {
1803 	struct nouveau_drm *drm = nouveau_drm(connector->dev);
1804 	struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device);
1805 	struct nvkm_i2c_bus *bus = NULL;
1806 	struct nvkm_i2c_aux *aux = NULL;
1807 	struct i2c_adapter *ddc;
1808 	struct nouveau_encoder *nv_encoder;
1809 	struct drm_encoder *encoder;
1810 	int type;
1811 
1812 	switch (dcbe->type) {
1813 	case DCB_OUTPUT_TMDS:
1814 		bus  = nvkm_i2c_bus_find(i2c, NVKM_I2C_BUS_EXT(dcbe->extdev));
1815 		ddc  = bus ? &bus->i2c : NULL;
1816 		type = DRM_MODE_ENCODER_TMDS;
1817 		break;
1818 	case DCB_OUTPUT_DP:
1819 		aux  = nvkm_i2c_aux_find(i2c, NVKM_I2C_AUX_EXT(dcbe->extdev));
1820 		ddc  = aux ? &aux->i2c : NULL;
1821 		type = DRM_MODE_ENCODER_TMDS;
1822 		break;
1823 	default:
1824 		return -ENODEV;
1825 	}
1826 
1827 	nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1828 	if (!nv_encoder)
1829 		return -ENOMEM;
1830 	nv_encoder->dcb = dcbe;
1831 	nv_encoder->i2c = ddc;
1832 	nv_encoder->aux = aux;
1833 
1834 	encoder = to_drm_encoder(nv_encoder);
1835 	encoder->possible_crtcs = dcbe->heads;
1836 	encoder->possible_clones = 0;
1837 	drm_encoder_init(connector->dev, encoder, &nv50_pior_func, type,
1838 			 "pior-%04x-%04x", dcbe->hasht, dcbe->hashm);
1839 	drm_encoder_helper_add(encoder, &nv50_pior_help);
1840 
1841 	drm_connector_attach_encoder(connector, encoder);
1842 	return 0;
1843 }
1844 
1845 /******************************************************************************
1846  * Atomic
1847  *****************************************************************************/
1848 
1849 static void
1850 nv50_disp_atomic_commit_core(struct drm_atomic_state *state, u32 *interlock)
1851 {
1852 	struct nouveau_drm *drm = nouveau_drm(state->dev);
1853 	struct nv50_disp *disp = nv50_disp(drm->dev);
1854 	struct nv50_core *core = disp->core;
1855 	struct nv50_mstm *mstm;
1856 	struct drm_encoder *encoder;
1857 
1858 	NV_ATOMIC(drm, "commit core %08x\n", interlock[NV50_DISP_INTERLOCK_BASE]);
1859 
1860 	drm_for_each_encoder(encoder, drm->dev) {
1861 		if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
1862 			mstm = nouveau_encoder(encoder)->dp.mstm;
1863 			if (mstm && mstm->modified)
1864 				nv50_mstm_prepare(mstm);
1865 		}
1866 	}
1867 
1868 	core->func->ntfy_init(disp->sync, NV50_DISP_CORE_NTFY);
1869 	core->func->update(core, interlock, true);
1870 	if (core->func->ntfy_wait_done(disp->sync, NV50_DISP_CORE_NTFY,
1871 				       disp->core->chan.base.device))
1872 		NV_ERROR(drm, "core notifier timeout\n");
1873 
1874 	drm_for_each_encoder(encoder, drm->dev) {
1875 		if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
1876 			mstm = nouveau_encoder(encoder)->dp.mstm;
1877 			if (mstm && mstm->modified)
1878 				nv50_mstm_cleanup(mstm);
1879 		}
1880 	}
1881 }
1882 
1883 static void
1884 nv50_disp_atomic_commit_wndw(struct drm_atomic_state *state, u32 *interlock)
1885 {
1886 	struct drm_plane_state *new_plane_state;
1887 	struct drm_plane *plane;
1888 	int i;
1889 
1890 	for_each_new_plane_in_state(state, plane, new_plane_state, i) {
1891 		struct nv50_wndw *wndw = nv50_wndw(plane);
1892 		if (interlock[wndw->interlock.type] & wndw->interlock.data) {
1893 			if (wndw->func->update)
1894 				wndw->func->update(wndw, interlock);
1895 		}
1896 	}
1897 }
1898 
1899 static void
1900 nv50_disp_atomic_commit_tail(struct drm_atomic_state *state)
1901 {
1902 	struct drm_device *dev = state->dev;
1903 	struct drm_crtc_state *new_crtc_state, *old_crtc_state;
1904 	struct drm_crtc *crtc;
1905 	struct drm_plane_state *new_plane_state;
1906 	struct drm_plane *plane;
1907 	struct nouveau_drm *drm = nouveau_drm(dev);
1908 	struct nv50_disp *disp = nv50_disp(dev);
1909 	struct nv50_atom *atom = nv50_atom(state);
1910 	struct nv50_core *core = disp->core;
1911 	struct nv50_outp_atom *outp, *outt;
1912 	u32 interlock[NV50_DISP_INTERLOCK__SIZE] = {};
1913 	int i;
1914 
1915 	NV_ATOMIC(drm, "commit %d %d\n", atom->lock_core, atom->flush_disable);
1916 	drm_atomic_helper_wait_for_fences(dev, state, false);
1917 	drm_atomic_helper_wait_for_dependencies(state);
1918 	drm_atomic_helper_update_legacy_modeset_state(dev, state);
1919 
1920 	if (atom->lock_core)
1921 		mutex_lock(&disp->mutex);
1922 
1923 	/* Disable head(s). */
1924 	for_each_oldnew_crtc_in_state(state, crtc, old_crtc_state, new_crtc_state, i) {
1925 		struct nv50_head_atom *asyh = nv50_head_atom(new_crtc_state);
1926 		struct nv50_head *head = nv50_head(crtc);
1927 
1928 		NV_ATOMIC(drm, "%s: clr %04x (set %04x)\n", crtc->name,
1929 			  asyh->clr.mask, asyh->set.mask);
1930 
1931 		if (old_crtc_state->active && !new_crtc_state->active) {
1932 			pm_runtime_put_noidle(dev->dev);
1933 			drm_crtc_vblank_off(crtc);
1934 		}
1935 
1936 		if (asyh->clr.mask) {
1937 			nv50_head_flush_clr(head, asyh, atom->flush_disable);
1938 			interlock[NV50_DISP_INTERLOCK_CORE] |= 1;
1939 		}
1940 	}
1941 
1942 	/* Disable plane(s). */
1943 	for_each_new_plane_in_state(state, plane, new_plane_state, i) {
1944 		struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
1945 		struct nv50_wndw *wndw = nv50_wndw(plane);
1946 
1947 		NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", plane->name,
1948 			  asyw->clr.mask, asyw->set.mask);
1949 		if (!asyw->clr.mask)
1950 			continue;
1951 
1952 		nv50_wndw_flush_clr(wndw, interlock, atom->flush_disable, asyw);
1953 	}
1954 
1955 	/* Disable output path(s). */
1956 	list_for_each_entry(outp, &atom->outp, head) {
1957 		const struct drm_encoder_helper_funcs *help;
1958 		struct drm_encoder *encoder;
1959 
1960 		encoder = outp->encoder;
1961 		help = encoder->helper_private;
1962 
1963 		NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", encoder->name,
1964 			  outp->clr.mask, outp->set.mask);
1965 
1966 		if (outp->clr.mask) {
1967 			help->disable(encoder);
1968 			interlock[NV50_DISP_INTERLOCK_CORE] |= 1;
1969 			if (outp->flush_disable) {
1970 				nv50_disp_atomic_commit_wndw(state, interlock);
1971 				nv50_disp_atomic_commit_core(state, interlock);
1972 				memset(interlock, 0x00, sizeof(interlock));
1973 			}
1974 		}
1975 	}
1976 
1977 	/* Flush disable. */
1978 	if (interlock[NV50_DISP_INTERLOCK_CORE]) {
1979 		if (atom->flush_disable) {
1980 			nv50_disp_atomic_commit_wndw(state, interlock);
1981 			nv50_disp_atomic_commit_core(state, interlock);
1982 			memset(interlock, 0x00, sizeof(interlock));
1983 		}
1984 	}
1985 
1986 	/* Update output path(s). */
1987 	list_for_each_entry_safe(outp, outt, &atom->outp, head) {
1988 		const struct drm_encoder_helper_funcs *help;
1989 		struct drm_encoder *encoder;
1990 
1991 		encoder = outp->encoder;
1992 		help = encoder->helper_private;
1993 
1994 		NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", encoder->name,
1995 			  outp->set.mask, outp->clr.mask);
1996 
1997 		if (outp->set.mask) {
1998 			help->enable(encoder);
1999 			interlock[NV50_DISP_INTERLOCK_CORE] = 1;
2000 		}
2001 
2002 		list_del(&outp->head);
2003 		kfree(outp);
2004 	}
2005 
2006 	/* Update head(s). */
2007 	for_each_oldnew_crtc_in_state(state, crtc, old_crtc_state, new_crtc_state, i) {
2008 		struct nv50_head_atom *asyh = nv50_head_atom(new_crtc_state);
2009 		struct nv50_head *head = nv50_head(crtc);
2010 
2011 		NV_ATOMIC(drm, "%s: set %04x (clr %04x)\n", crtc->name,
2012 			  asyh->set.mask, asyh->clr.mask);
2013 
2014 		if (asyh->set.mask) {
2015 			nv50_head_flush_set(head, asyh);
2016 			interlock[NV50_DISP_INTERLOCK_CORE] = 1;
2017 		}
2018 
2019 		if (new_crtc_state->active) {
2020 			if (!old_crtc_state->active) {
2021 				drm_crtc_vblank_on(crtc);
2022 				pm_runtime_get_noresume(dev->dev);
2023 			}
2024 			if (new_crtc_state->event)
2025 				drm_crtc_vblank_get(crtc);
2026 		}
2027 	}
2028 
2029 	/* Update window->head assignment.
2030 	 *
2031 	 * This has to happen in an update that's not interlocked with
2032 	 * any window channels to avoid hitting HW error checks.
2033 	 *
2034 	 *TODO: Proper handling of window ownership (Turing apparently
2035 	 *      supports non-fixed mappings).
2036 	 */
2037 	if (core->assign_windows) {
2038 		core->func->wndw.owner(core);
2039 		core->func->update(core, interlock, false);
2040 		core->assign_windows = false;
2041 		interlock[NV50_DISP_INTERLOCK_CORE] = 0;
2042 	}
2043 
2044 	/* Update plane(s). */
2045 	for_each_new_plane_in_state(state, plane, new_plane_state, i) {
2046 		struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
2047 		struct nv50_wndw *wndw = nv50_wndw(plane);
2048 
2049 		NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", plane->name,
2050 			  asyw->set.mask, asyw->clr.mask);
2051 		if ( !asyw->set.mask &&
2052 		    (!asyw->clr.mask || atom->flush_disable))
2053 			continue;
2054 
2055 		nv50_wndw_flush_set(wndw, interlock, asyw);
2056 	}
2057 
2058 	/* Flush update. */
2059 	nv50_disp_atomic_commit_wndw(state, interlock);
2060 
2061 	if (interlock[NV50_DISP_INTERLOCK_CORE]) {
2062 		if (interlock[NV50_DISP_INTERLOCK_BASE] ||
2063 		    interlock[NV50_DISP_INTERLOCK_OVLY] ||
2064 		    interlock[NV50_DISP_INTERLOCK_WNDW] ||
2065 		    !atom->state.legacy_cursor_update)
2066 			nv50_disp_atomic_commit_core(state, interlock);
2067 		else
2068 			disp->core->func->update(disp->core, interlock, false);
2069 	}
2070 
2071 	if (atom->lock_core)
2072 		mutex_unlock(&disp->mutex);
2073 
2074 	/* Wait for HW to signal completion. */
2075 	for_each_new_plane_in_state(state, plane, new_plane_state, i) {
2076 		struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
2077 		struct nv50_wndw *wndw = nv50_wndw(plane);
2078 		int ret = nv50_wndw_wait_armed(wndw, asyw);
2079 		if (ret)
2080 			NV_ERROR(drm, "%s: timeout\n", plane->name);
2081 	}
2082 
2083 	for_each_new_crtc_in_state(state, crtc, new_crtc_state, i) {
2084 		if (new_crtc_state->event) {
2085 			unsigned long flags;
2086 			/* Get correct count/ts if racing with vblank irq */
2087 			if (new_crtc_state->active)
2088 				drm_crtc_accurate_vblank_count(crtc);
2089 			spin_lock_irqsave(&crtc->dev->event_lock, flags);
2090 			drm_crtc_send_vblank_event(crtc, new_crtc_state->event);
2091 			spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
2092 
2093 			new_crtc_state->event = NULL;
2094 			if (new_crtc_state->active)
2095 				drm_crtc_vblank_put(crtc);
2096 		}
2097 	}
2098 
2099 	drm_atomic_helper_commit_hw_done(state);
2100 	drm_atomic_helper_cleanup_planes(dev, state);
2101 	drm_atomic_helper_commit_cleanup_done(state);
2102 	drm_atomic_state_put(state);
2103 
2104 	/* Drop the RPM ref we got from nv50_disp_atomic_commit() */
2105 	pm_runtime_mark_last_busy(dev->dev);
2106 	pm_runtime_put_autosuspend(dev->dev);
2107 }
2108 
2109 static void
2110 nv50_disp_atomic_commit_work(struct work_struct *work)
2111 {
2112 	struct drm_atomic_state *state =
2113 		container_of(work, typeof(*state), commit_work);
2114 	nv50_disp_atomic_commit_tail(state);
2115 }
2116 
2117 static int
2118 nv50_disp_atomic_commit(struct drm_device *dev,
2119 			struct drm_atomic_state *state, bool nonblock)
2120 {
2121 	struct drm_plane_state *new_plane_state;
2122 	struct drm_plane *plane;
2123 	int ret, i;
2124 
2125 	ret = pm_runtime_get_sync(dev->dev);
2126 	if (ret < 0 && ret != -EACCES)
2127 		return ret;
2128 
2129 	ret = drm_atomic_helper_setup_commit(state, nonblock);
2130 	if (ret)
2131 		goto done;
2132 
2133 	INIT_WORK(&state->commit_work, nv50_disp_atomic_commit_work);
2134 
2135 	ret = drm_atomic_helper_prepare_planes(dev, state);
2136 	if (ret)
2137 		goto done;
2138 
2139 	if (!nonblock) {
2140 		ret = drm_atomic_helper_wait_for_fences(dev, state, true);
2141 		if (ret)
2142 			goto err_cleanup;
2143 	}
2144 
2145 	ret = drm_atomic_helper_swap_state(state, true);
2146 	if (ret)
2147 		goto err_cleanup;
2148 
2149 	for_each_new_plane_in_state(state, plane, new_plane_state, i) {
2150 		struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
2151 		struct nv50_wndw *wndw = nv50_wndw(plane);
2152 
2153 		if (asyw->set.image)
2154 			nv50_wndw_ntfy_enable(wndw, asyw);
2155 	}
2156 
2157 	drm_atomic_state_get(state);
2158 
2159 	/*
2160 	 * Grab another RPM ref for the commit tail, which will release the
2161 	 * ref when it's finished
2162 	 */
2163 	pm_runtime_get_noresume(dev->dev);
2164 
2165 	if (nonblock)
2166 		queue_work(system_unbound_wq, &state->commit_work);
2167 	else
2168 		nv50_disp_atomic_commit_tail(state);
2169 
2170 err_cleanup:
2171 	if (ret)
2172 		drm_atomic_helper_cleanup_planes(dev, state);
2173 done:
2174 	pm_runtime_put_autosuspend(dev->dev);
2175 	return ret;
2176 }
2177 
2178 static struct nv50_outp_atom *
2179 nv50_disp_outp_atomic_add(struct nv50_atom *atom, struct drm_encoder *encoder)
2180 {
2181 	struct nv50_outp_atom *outp;
2182 
2183 	list_for_each_entry(outp, &atom->outp, head) {
2184 		if (outp->encoder == encoder)
2185 			return outp;
2186 	}
2187 
2188 	outp = kzalloc(sizeof(*outp), GFP_KERNEL);
2189 	if (!outp)
2190 		return ERR_PTR(-ENOMEM);
2191 
2192 	list_add(&outp->head, &atom->outp);
2193 	outp->encoder = encoder;
2194 	return outp;
2195 }
2196 
2197 static int
2198 nv50_disp_outp_atomic_check_clr(struct nv50_atom *atom,
2199 				struct drm_connector_state *old_connector_state)
2200 {
2201 	struct drm_encoder *encoder = old_connector_state->best_encoder;
2202 	struct drm_crtc_state *old_crtc_state, *new_crtc_state;
2203 	struct drm_crtc *crtc;
2204 	struct nv50_outp_atom *outp;
2205 
2206 	if (!(crtc = old_connector_state->crtc))
2207 		return 0;
2208 
2209 	old_crtc_state = drm_atomic_get_old_crtc_state(&atom->state, crtc);
2210 	new_crtc_state = drm_atomic_get_new_crtc_state(&atom->state, crtc);
2211 	if (old_crtc_state->active && drm_atomic_crtc_needs_modeset(new_crtc_state)) {
2212 		outp = nv50_disp_outp_atomic_add(atom, encoder);
2213 		if (IS_ERR(outp))
2214 			return PTR_ERR(outp);
2215 
2216 		if (outp->encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
2217 			outp->flush_disable = true;
2218 			atom->flush_disable = true;
2219 		}
2220 		outp->clr.ctrl = true;
2221 		atom->lock_core = true;
2222 	}
2223 
2224 	return 0;
2225 }
2226 
2227 static int
2228 nv50_disp_outp_atomic_check_set(struct nv50_atom *atom,
2229 				struct drm_connector_state *connector_state)
2230 {
2231 	struct drm_encoder *encoder = connector_state->best_encoder;
2232 	struct drm_crtc_state *new_crtc_state;
2233 	struct drm_crtc *crtc;
2234 	struct nv50_outp_atom *outp;
2235 
2236 	if (!(crtc = connector_state->crtc))
2237 		return 0;
2238 
2239 	new_crtc_state = drm_atomic_get_new_crtc_state(&atom->state, crtc);
2240 	if (new_crtc_state->active && drm_atomic_crtc_needs_modeset(new_crtc_state)) {
2241 		outp = nv50_disp_outp_atomic_add(atom, encoder);
2242 		if (IS_ERR(outp))
2243 			return PTR_ERR(outp);
2244 
2245 		outp->set.ctrl = true;
2246 		atom->lock_core = true;
2247 	}
2248 
2249 	return 0;
2250 }
2251 
2252 static int
2253 nv50_disp_atomic_check(struct drm_device *dev, struct drm_atomic_state *state)
2254 {
2255 	struct nv50_atom *atom = nv50_atom(state);
2256 	struct drm_connector_state *old_connector_state, *new_connector_state;
2257 	struct drm_connector *connector;
2258 	struct drm_crtc_state *new_crtc_state;
2259 	struct drm_crtc *crtc;
2260 	int ret, i;
2261 
2262 	/* We need to handle colour management on a per-plane basis. */
2263 	for_each_new_crtc_in_state(state, crtc, new_crtc_state, i) {
2264 		if (new_crtc_state->color_mgmt_changed) {
2265 			ret = drm_atomic_add_affected_planes(state, crtc);
2266 			if (ret)
2267 				return ret;
2268 		}
2269 	}
2270 
2271 	ret = drm_atomic_helper_check(dev, state);
2272 	if (ret)
2273 		return ret;
2274 
2275 	for_each_oldnew_connector_in_state(state, connector, old_connector_state, new_connector_state, i) {
2276 		ret = nv50_disp_outp_atomic_check_clr(atom, old_connector_state);
2277 		if (ret)
2278 			return ret;
2279 
2280 		ret = nv50_disp_outp_atomic_check_set(atom, new_connector_state);
2281 		if (ret)
2282 			return ret;
2283 	}
2284 
2285 	ret = drm_dp_mst_atomic_check(state);
2286 	if (ret)
2287 		return ret;
2288 
2289 	return 0;
2290 }
2291 
2292 static void
2293 nv50_disp_atomic_state_clear(struct drm_atomic_state *state)
2294 {
2295 	struct nv50_atom *atom = nv50_atom(state);
2296 	struct nv50_outp_atom *outp, *outt;
2297 
2298 	list_for_each_entry_safe(outp, outt, &atom->outp, head) {
2299 		list_del(&outp->head);
2300 		kfree(outp);
2301 	}
2302 
2303 	drm_atomic_state_default_clear(state);
2304 }
2305 
2306 static void
2307 nv50_disp_atomic_state_free(struct drm_atomic_state *state)
2308 {
2309 	struct nv50_atom *atom = nv50_atom(state);
2310 	drm_atomic_state_default_release(&atom->state);
2311 	kfree(atom);
2312 }
2313 
2314 static struct drm_atomic_state *
2315 nv50_disp_atomic_state_alloc(struct drm_device *dev)
2316 {
2317 	struct nv50_atom *atom;
2318 	if (!(atom = kzalloc(sizeof(*atom), GFP_KERNEL)) ||
2319 	    drm_atomic_state_init(dev, &atom->state) < 0) {
2320 		kfree(atom);
2321 		return NULL;
2322 	}
2323 	INIT_LIST_HEAD(&atom->outp);
2324 	return &atom->state;
2325 }
2326 
2327 static const struct drm_mode_config_funcs
2328 nv50_disp_func = {
2329 	.fb_create = nouveau_user_framebuffer_create,
2330 	.output_poll_changed = nouveau_fbcon_output_poll_changed,
2331 	.atomic_check = nv50_disp_atomic_check,
2332 	.atomic_commit = nv50_disp_atomic_commit,
2333 	.atomic_state_alloc = nv50_disp_atomic_state_alloc,
2334 	.atomic_state_clear = nv50_disp_atomic_state_clear,
2335 	.atomic_state_free = nv50_disp_atomic_state_free,
2336 };
2337 
2338 /******************************************************************************
2339  * Init
2340  *****************************************************************************/
2341 
2342 static void
2343 nv50_display_fini(struct drm_device *dev, bool suspend)
2344 {
2345 	struct nouveau_encoder *nv_encoder;
2346 	struct drm_encoder *encoder;
2347 	struct drm_plane *plane;
2348 
2349 	drm_for_each_plane(plane, dev) {
2350 		struct nv50_wndw *wndw = nv50_wndw(plane);
2351 		if (plane->funcs != &nv50_wndw)
2352 			continue;
2353 		nv50_wndw_fini(wndw);
2354 	}
2355 
2356 	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
2357 		if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
2358 			nv_encoder = nouveau_encoder(encoder);
2359 			nv50_mstm_fini(nv_encoder->dp.mstm);
2360 		}
2361 	}
2362 }
2363 
2364 static int
2365 nv50_display_init(struct drm_device *dev, bool resume, bool runtime)
2366 {
2367 	struct nv50_core *core = nv50_disp(dev)->core;
2368 	struct drm_encoder *encoder;
2369 	struct drm_plane *plane;
2370 
2371 	core->func->init(core);
2372 
2373 	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
2374 		if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
2375 			struct nouveau_encoder *nv_encoder =
2376 				nouveau_encoder(encoder);
2377 			nv50_mstm_init(nv_encoder->dp.mstm, runtime);
2378 		}
2379 	}
2380 
2381 	drm_for_each_plane(plane, dev) {
2382 		struct nv50_wndw *wndw = nv50_wndw(plane);
2383 		if (plane->funcs != &nv50_wndw)
2384 			continue;
2385 		nv50_wndw_init(wndw);
2386 	}
2387 
2388 	return 0;
2389 }
2390 
2391 static void
2392 nv50_display_destroy(struct drm_device *dev)
2393 {
2394 	struct nv50_disp *disp = nv50_disp(dev);
2395 
2396 	nv50_audio_component_fini(nouveau_drm(dev));
2397 
2398 	nv50_core_del(&disp->core);
2399 
2400 	nouveau_bo_unmap(disp->sync);
2401 	if (disp->sync)
2402 		nouveau_bo_unpin(disp->sync);
2403 	nouveau_bo_ref(NULL, &disp->sync);
2404 
2405 	nouveau_display(dev)->priv = NULL;
2406 	kfree(disp);
2407 }
2408 
2409 int
2410 nv50_display_create(struct drm_device *dev)
2411 {
2412 	struct nvif_device *device = &nouveau_drm(dev)->client.device;
2413 	struct nouveau_drm *drm = nouveau_drm(dev);
2414 	struct dcb_table *dcb = &drm->vbios.dcb;
2415 	struct drm_connector *connector, *tmp;
2416 	struct nv50_disp *disp;
2417 	struct dcb_output *dcbe;
2418 	int crtcs, ret, i;
2419 	bool has_mst = nv50_has_mst(drm);
2420 
2421 	disp = kzalloc(sizeof(*disp), GFP_KERNEL);
2422 	if (!disp)
2423 		return -ENOMEM;
2424 
2425 	mutex_init(&disp->mutex);
2426 
2427 	nouveau_display(dev)->priv = disp;
2428 	nouveau_display(dev)->dtor = nv50_display_destroy;
2429 	nouveau_display(dev)->init = nv50_display_init;
2430 	nouveau_display(dev)->fini = nv50_display_fini;
2431 	disp->disp = &nouveau_display(dev)->disp;
2432 	dev->mode_config.funcs = &nv50_disp_func;
2433 	dev->mode_config.quirk_addfb_prefer_xbgr_30bpp = true;
2434 	dev->mode_config.normalize_zpos = true;
2435 
2436 	/* small shared memory area we use for notifiers and semaphores */
2437 	ret = nouveau_bo_new(&drm->client, 4096, 0x1000, TTM_PL_FLAG_VRAM,
2438 			     0, 0x0000, NULL, NULL, &disp->sync);
2439 	if (!ret) {
2440 		ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM, true);
2441 		if (!ret) {
2442 			ret = nouveau_bo_map(disp->sync);
2443 			if (ret)
2444 				nouveau_bo_unpin(disp->sync);
2445 		}
2446 		if (ret)
2447 			nouveau_bo_ref(NULL, &disp->sync);
2448 	}
2449 
2450 	if (ret)
2451 		goto out;
2452 
2453 	/* allocate master evo channel */
2454 	ret = nv50_core_new(drm, &disp->core);
2455 	if (ret)
2456 		goto out;
2457 
2458 	/* create crtc objects to represent the hw heads */
2459 	if (disp->disp->object.oclass >= GV100_DISP)
2460 		crtcs = nvif_rd32(&device->object, 0x610060) & 0xff;
2461 	else
2462 	if (disp->disp->object.oclass >= GF110_DISP)
2463 		crtcs = nvif_rd32(&device->object, 0x612004) & 0xf;
2464 	else
2465 		crtcs = 0x3;
2466 
2467 	for (i = 0; i < fls(crtcs); i++) {
2468 		struct nv50_head *head;
2469 
2470 		if (!(crtcs & (1 << i)))
2471 			continue;
2472 
2473 		head = nv50_head_create(dev, i);
2474 		if (IS_ERR(head)) {
2475 			ret = PTR_ERR(head);
2476 			goto out;
2477 		}
2478 
2479 		if (has_mst) {
2480 			head->msto = nv50_msto_new(dev, head, i);
2481 			if (IS_ERR(head->msto)) {
2482 				ret = PTR_ERR(head->msto);
2483 				head->msto = NULL;
2484 				goto out;
2485 			}
2486 
2487 			/*
2488 			 * FIXME: This is a hack to workaround the following
2489 			 * issues:
2490 			 *
2491 			 * https://gitlab.gnome.org/GNOME/mutter/issues/759
2492 			 * https://gitlab.freedesktop.org/xorg/xserver/merge_requests/277
2493 			 *
2494 			 * Once these issues are closed, this should be
2495 			 * removed
2496 			 */
2497 			head->msto->encoder.possible_crtcs = crtcs;
2498 		}
2499 	}
2500 
2501 	/* create encoder/connector objects based on VBIOS DCB table */
2502 	for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
2503 		connector = nouveau_connector_create(dev, dcbe);
2504 		if (IS_ERR(connector))
2505 			continue;
2506 
2507 		if (dcbe->location == DCB_LOC_ON_CHIP) {
2508 			switch (dcbe->type) {
2509 			case DCB_OUTPUT_TMDS:
2510 			case DCB_OUTPUT_LVDS:
2511 			case DCB_OUTPUT_DP:
2512 				ret = nv50_sor_create(connector, dcbe);
2513 				break;
2514 			case DCB_OUTPUT_ANALOG:
2515 				ret = nv50_dac_create(connector, dcbe);
2516 				break;
2517 			default:
2518 				ret = -ENODEV;
2519 				break;
2520 			}
2521 		} else {
2522 			ret = nv50_pior_create(connector, dcbe);
2523 		}
2524 
2525 		if (ret) {
2526 			NV_WARN(drm, "failed to create encoder %d/%d/%d: %d\n",
2527 				     dcbe->location, dcbe->type,
2528 				     ffs(dcbe->or) - 1, ret);
2529 			ret = 0;
2530 		}
2531 	}
2532 
2533 	/* cull any connectors we created that don't have an encoder */
2534 	list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
2535 		if (connector->possible_encoders)
2536 			continue;
2537 
2538 		NV_WARN(drm, "%s has no encoders, removing\n",
2539 			connector->name);
2540 		connector->funcs->destroy(connector);
2541 	}
2542 
2543 	/* Disable vblank irqs aggressively for power-saving, safe on nv50+ */
2544 	dev->vblank_disable_immediate = true;
2545 
2546 	nv50_audio_component_init(drm);
2547 
2548 out:
2549 	if (ret)
2550 		nv50_display_destroy(dev);
2551 	return ret;
2552 }
2553