1 /*
2  * Copyright 2011 Red Hat Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Ben Skeggs
23  */
24 #include "disp.h"
25 #include "atom.h"
26 #include "core.h"
27 #include "head.h"
28 #include "wndw.h"
29 
30 #include <linux/dma-mapping.h>
31 #include <linux/hdmi.h>
32 #include <linux/component.h>
33 
34 #include <drm/drm_atomic_helper.h>
35 #include <drm/drm_dp_helper.h>
36 #include <drm/drm_edid.h>
37 #include <drm/drm_fb_helper.h>
38 #include <drm/drm_plane_helper.h>
39 #include <drm/drm_probe_helper.h>
40 #include <drm/drm_scdc_helper.h>
41 #include <drm/drm_vblank.h>
42 
43 #include <nvif/class.h>
44 #include <nvif/cl0002.h>
45 #include <nvif/cl5070.h>
46 #include <nvif/cl507d.h>
47 #include <nvif/event.h>
48 #include <nvif/timer.h>
49 
50 #include "nouveau_drv.h"
51 #include "nouveau_dma.h"
52 #include "nouveau_gem.h"
53 #include "nouveau_connector.h"
54 #include "nouveau_encoder.h"
55 #include "nouveau_fence.h"
56 #include "nouveau_fbcon.h"
57 
58 #include <subdev/bios/dp.h>
59 
60 /******************************************************************************
61  * Atomic state
62  *****************************************************************************/
63 
64 struct nv50_outp_atom {
65 	struct list_head head;
66 
67 	struct drm_encoder *encoder;
68 	bool flush_disable;
69 
70 	union nv50_outp_atom_mask {
71 		struct {
72 			bool ctrl:1;
73 		};
74 		u8 mask;
75 	} set, clr;
76 };
77 
78 /******************************************************************************
79  * EVO channel
80  *****************************************************************************/
81 
82 static int
83 nv50_chan_create(struct nvif_device *device, struct nvif_object *disp,
84 		 const s32 *oclass, u8 head, void *data, u32 size,
85 		 struct nv50_chan *chan)
86 {
87 	struct nvif_sclass *sclass;
88 	int ret, i, n;
89 
90 	chan->device = device;
91 
92 	ret = n = nvif_object_sclass_get(disp, &sclass);
93 	if (ret < 0)
94 		return ret;
95 
96 	while (oclass[0]) {
97 		for (i = 0; i < n; i++) {
98 			if (sclass[i].oclass == oclass[0]) {
99 				ret = nvif_object_init(disp, 0, oclass[0],
100 						       data, size, &chan->user);
101 				if (ret == 0)
102 					nvif_object_map(&chan->user, NULL, 0);
103 				nvif_object_sclass_put(&sclass);
104 				return ret;
105 			}
106 		}
107 		oclass++;
108 	}
109 
110 	nvif_object_sclass_put(&sclass);
111 	return -ENOSYS;
112 }
113 
114 static void
115 nv50_chan_destroy(struct nv50_chan *chan)
116 {
117 	nvif_object_fini(&chan->user);
118 }
119 
120 /******************************************************************************
121  * DMA EVO channel
122  *****************************************************************************/
123 
124 void
125 nv50_dmac_destroy(struct nv50_dmac *dmac)
126 {
127 	nvif_object_fini(&dmac->vram);
128 	nvif_object_fini(&dmac->sync);
129 
130 	nv50_chan_destroy(&dmac->base);
131 
132 	nvif_mem_fini(&dmac->push);
133 }
134 
135 int
136 nv50_dmac_create(struct nvif_device *device, struct nvif_object *disp,
137 		 const s32 *oclass, u8 head, void *data, u32 size, u64 syncbuf,
138 		 struct nv50_dmac *dmac)
139 {
140 	struct nouveau_cli *cli = (void *)device->object.client;
141 	struct nv50_disp_core_channel_dma_v0 *args = data;
142 	u8 type = NVIF_MEM_COHERENT;
143 	int ret;
144 
145 	mutex_init(&dmac->lock);
146 
147 	/* Pascal added support for 47-bit physical addresses, but some
148 	 * parts of EVO still only accept 40-bit PAs.
149 	 *
150 	 * To avoid issues on systems with large amounts of RAM, and on
151 	 * systems where an IOMMU maps pages at a high address, we need
152 	 * to allocate push buffers in VRAM instead.
153 	 *
154 	 * This appears to match NVIDIA's behaviour on Pascal.
155 	 */
156 	if (device->info.family == NV_DEVICE_INFO_V0_PASCAL)
157 		type |= NVIF_MEM_VRAM;
158 
159 	ret = nvif_mem_init_map(&cli->mmu, type, 0x1000, &dmac->push);
160 	if (ret)
161 		return ret;
162 
163 	dmac->ptr = dmac->push.object.map.ptr;
164 
165 	args->pushbuf = nvif_handle(&dmac->push.object);
166 
167 	ret = nv50_chan_create(device, disp, oclass, head, data, size,
168 			       &dmac->base);
169 	if (ret)
170 		return ret;
171 
172 	if (!syncbuf)
173 		return 0;
174 
175 	ret = nvif_object_init(&dmac->base.user, 0xf0000000, NV_DMA_IN_MEMORY,
176 			       &(struct nv_dma_v0) {
177 					.target = NV_DMA_V0_TARGET_VRAM,
178 					.access = NV_DMA_V0_ACCESS_RDWR,
179 					.start = syncbuf + 0x0000,
180 					.limit = syncbuf + 0x0fff,
181 			       }, sizeof(struct nv_dma_v0),
182 			       &dmac->sync);
183 	if (ret)
184 		return ret;
185 
186 	ret = nvif_object_init(&dmac->base.user, 0xf0000001, NV_DMA_IN_MEMORY,
187 			       &(struct nv_dma_v0) {
188 					.target = NV_DMA_V0_TARGET_VRAM,
189 					.access = NV_DMA_V0_ACCESS_RDWR,
190 					.start = 0,
191 					.limit = device->info.ram_user - 1,
192 			       }, sizeof(struct nv_dma_v0),
193 			       &dmac->vram);
194 	if (ret)
195 		return ret;
196 
197 	return ret;
198 }
199 
200 /******************************************************************************
201  * EVO channel helpers
202  *****************************************************************************/
203 static void
204 evo_flush(struct nv50_dmac *dmac)
205 {
206 	/* Push buffer fetches are not coherent with BAR1, we need to ensure
207 	 * writes have been flushed right through to VRAM before writing PUT.
208 	 */
209 	if (dmac->push.type & NVIF_MEM_VRAM) {
210 		struct nvif_device *device = dmac->base.device;
211 		nvif_wr32(&device->object, 0x070000, 0x00000001);
212 		nvif_msec(device, 2000,
213 			if (!(nvif_rd32(&device->object, 0x070000) & 0x00000002))
214 				break;
215 		);
216 	}
217 }
218 
219 u32 *
220 evo_wait(struct nv50_dmac *evoc, int nr)
221 {
222 	struct nv50_dmac *dmac = evoc;
223 	struct nvif_device *device = dmac->base.device;
224 	u32 put = nvif_rd32(&dmac->base.user, 0x0000) / 4;
225 
226 	mutex_lock(&dmac->lock);
227 	if (put + nr >= (PAGE_SIZE / 4) - 8) {
228 		dmac->ptr[put] = 0x20000000;
229 		evo_flush(dmac);
230 
231 		nvif_wr32(&dmac->base.user, 0x0000, 0x00000000);
232 		if (nvif_msec(device, 2000,
233 			if (!nvif_rd32(&dmac->base.user, 0x0004))
234 				break;
235 		) < 0) {
236 			mutex_unlock(&dmac->lock);
237 			pr_err("nouveau: evo channel stalled\n");
238 			return NULL;
239 		}
240 
241 		put = 0;
242 	}
243 
244 	return dmac->ptr + put;
245 }
246 
247 void
248 evo_kick(u32 *push, struct nv50_dmac *evoc)
249 {
250 	struct nv50_dmac *dmac = evoc;
251 
252 	evo_flush(dmac);
253 
254 	nvif_wr32(&dmac->base.user, 0x0000, (push - dmac->ptr) << 2);
255 	mutex_unlock(&dmac->lock);
256 }
257 
258 /******************************************************************************
259  * Output path helpers
260  *****************************************************************************/
261 static void
262 nv50_outp_release(struct nouveau_encoder *nv_encoder)
263 {
264 	struct nv50_disp *disp = nv50_disp(nv_encoder->base.base.dev);
265 	struct {
266 		struct nv50_disp_mthd_v1 base;
267 	} args = {
268 		.base.version = 1,
269 		.base.method = NV50_DISP_MTHD_V1_RELEASE,
270 		.base.hasht  = nv_encoder->dcb->hasht,
271 		.base.hashm  = nv_encoder->dcb->hashm,
272 	};
273 
274 	nvif_mthd(&disp->disp->object, 0, &args, sizeof(args));
275 	nv_encoder->or = -1;
276 	nv_encoder->link = 0;
277 }
278 
279 static int
280 nv50_outp_acquire(struct nouveau_encoder *nv_encoder)
281 {
282 	struct nouveau_drm *drm = nouveau_drm(nv_encoder->base.base.dev);
283 	struct nv50_disp *disp = nv50_disp(drm->dev);
284 	struct {
285 		struct nv50_disp_mthd_v1 base;
286 		struct nv50_disp_acquire_v0 info;
287 	} args = {
288 		.base.version = 1,
289 		.base.method = NV50_DISP_MTHD_V1_ACQUIRE,
290 		.base.hasht  = nv_encoder->dcb->hasht,
291 		.base.hashm  = nv_encoder->dcb->hashm,
292 	};
293 	int ret;
294 
295 	ret = nvif_mthd(&disp->disp->object, 0, &args, sizeof(args));
296 	if (ret) {
297 		NV_ERROR(drm, "error acquiring output path: %d\n", ret);
298 		return ret;
299 	}
300 
301 	nv_encoder->or = args.info.or;
302 	nv_encoder->link = args.info.link;
303 	return 0;
304 }
305 
306 static int
307 nv50_outp_atomic_check_view(struct drm_encoder *encoder,
308 			    struct drm_crtc_state *crtc_state,
309 			    struct drm_connector_state *conn_state,
310 			    struct drm_display_mode *native_mode)
311 {
312 	struct drm_display_mode *adjusted_mode = &crtc_state->adjusted_mode;
313 	struct drm_display_mode *mode = &crtc_state->mode;
314 	struct drm_connector *connector = conn_state->connector;
315 	struct nouveau_conn_atom *asyc = nouveau_conn_atom(conn_state);
316 	struct nouveau_drm *drm = nouveau_drm(encoder->dev);
317 
318 	NV_ATOMIC(drm, "%s atomic_check\n", encoder->name);
319 	asyc->scaler.full = false;
320 	if (!native_mode)
321 		return 0;
322 
323 	if (asyc->scaler.mode == DRM_MODE_SCALE_NONE) {
324 		switch (connector->connector_type) {
325 		case DRM_MODE_CONNECTOR_LVDS:
326 		case DRM_MODE_CONNECTOR_eDP:
327 			/* Don't force scaler for EDID modes with
328 			 * same size as the native one (e.g. different
329 			 * refresh rate)
330 			 */
331 			if (mode->hdisplay == native_mode->hdisplay &&
332 			    mode->vdisplay == native_mode->vdisplay &&
333 			    mode->type & DRM_MODE_TYPE_DRIVER)
334 				break;
335 			mode = native_mode;
336 			asyc->scaler.full = true;
337 			break;
338 		default:
339 			break;
340 		}
341 	} else {
342 		mode = native_mode;
343 	}
344 
345 	if (!drm_mode_equal(adjusted_mode, mode)) {
346 		drm_mode_copy(adjusted_mode, mode);
347 		crtc_state->mode_changed = true;
348 	}
349 
350 	return 0;
351 }
352 
353 static int
354 nv50_outp_atomic_check(struct drm_encoder *encoder,
355 		       struct drm_crtc_state *crtc_state,
356 		       struct drm_connector_state *conn_state)
357 {
358 	struct drm_connector *connector = conn_state->connector;
359 	struct nouveau_connector *nv_connector = nouveau_connector(connector);
360 	struct nv50_head_atom *asyh = nv50_head_atom(crtc_state);
361 	int ret;
362 
363 	ret = nv50_outp_atomic_check_view(encoder, crtc_state, conn_state,
364 					  nv_connector->native_mode);
365 	if (ret)
366 		return ret;
367 
368 	if (crtc_state->mode_changed || crtc_state->connectors_changed)
369 		asyh->or.bpc = connector->display_info.bpc;
370 
371 	return 0;
372 }
373 
374 /******************************************************************************
375  * DAC
376  *****************************************************************************/
377 static void
378 nv50_dac_disable(struct drm_encoder *encoder)
379 {
380 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
381 	struct nv50_core *core = nv50_disp(encoder->dev)->core;
382 	if (nv_encoder->crtc)
383 		core->func->dac->ctrl(core, nv_encoder->or, 0x00000000, NULL);
384 	nv_encoder->crtc = NULL;
385 	nv50_outp_release(nv_encoder);
386 }
387 
388 static void
389 nv50_dac_enable(struct drm_encoder *encoder)
390 {
391 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
392 	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
393 	struct nv50_head_atom *asyh = nv50_head_atom(nv_crtc->base.state);
394 	struct nv50_core *core = nv50_disp(encoder->dev)->core;
395 
396 	nv50_outp_acquire(nv_encoder);
397 
398 	core->func->dac->ctrl(core, nv_encoder->or, 1 << nv_crtc->index, asyh);
399 	asyh->or.depth = 0;
400 
401 	nv_encoder->crtc = encoder->crtc;
402 }
403 
404 static enum drm_connector_status
405 nv50_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
406 {
407 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
408 	struct nv50_disp *disp = nv50_disp(encoder->dev);
409 	struct {
410 		struct nv50_disp_mthd_v1 base;
411 		struct nv50_disp_dac_load_v0 load;
412 	} args = {
413 		.base.version = 1,
414 		.base.method = NV50_DISP_MTHD_V1_DAC_LOAD,
415 		.base.hasht  = nv_encoder->dcb->hasht,
416 		.base.hashm  = nv_encoder->dcb->hashm,
417 	};
418 	int ret;
419 
420 	args.load.data = nouveau_drm(encoder->dev)->vbios.dactestval;
421 	if (args.load.data == 0)
422 		args.load.data = 340;
423 
424 	ret = nvif_mthd(&disp->disp->object, 0, &args, sizeof(args));
425 	if (ret || !args.load.load)
426 		return connector_status_disconnected;
427 
428 	return connector_status_connected;
429 }
430 
431 static const struct drm_encoder_helper_funcs
432 nv50_dac_help = {
433 	.atomic_check = nv50_outp_atomic_check,
434 	.enable = nv50_dac_enable,
435 	.disable = nv50_dac_disable,
436 	.detect = nv50_dac_detect
437 };
438 
439 static void
440 nv50_dac_destroy(struct drm_encoder *encoder)
441 {
442 	drm_encoder_cleanup(encoder);
443 	kfree(encoder);
444 }
445 
446 static const struct drm_encoder_funcs
447 nv50_dac_func = {
448 	.destroy = nv50_dac_destroy,
449 };
450 
451 static int
452 nv50_dac_create(struct drm_connector *connector, struct dcb_output *dcbe)
453 {
454 	struct nouveau_drm *drm = nouveau_drm(connector->dev);
455 	struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device);
456 	struct nvkm_i2c_bus *bus;
457 	struct nouveau_encoder *nv_encoder;
458 	struct drm_encoder *encoder;
459 	int type = DRM_MODE_ENCODER_DAC;
460 
461 	nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
462 	if (!nv_encoder)
463 		return -ENOMEM;
464 	nv_encoder->dcb = dcbe;
465 
466 	bus = nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
467 	if (bus)
468 		nv_encoder->i2c = &bus->i2c;
469 
470 	encoder = to_drm_encoder(nv_encoder);
471 	encoder->possible_crtcs = dcbe->heads;
472 	encoder->possible_clones = 0;
473 	drm_encoder_init(connector->dev, encoder, &nv50_dac_func, type,
474 			 "dac-%04x-%04x", dcbe->hasht, dcbe->hashm);
475 	drm_encoder_helper_add(encoder, &nv50_dac_help);
476 
477 	drm_connector_attach_encoder(connector, encoder);
478 	return 0;
479 }
480 
481 /*
482  * audio component binding for ELD notification
483  */
484 static void
485 nv50_audio_component_eld_notify(struct drm_audio_component *acomp, int port,
486 				int dev_id)
487 {
488 	if (acomp && acomp->audio_ops && acomp->audio_ops->pin_eld_notify)
489 		acomp->audio_ops->pin_eld_notify(acomp->audio_ops->audio_ptr,
490 						 port, dev_id);
491 }
492 
493 static int
494 nv50_audio_component_get_eld(struct device *kdev, int port, int dev_id,
495 			     bool *enabled, unsigned char *buf, int max_bytes)
496 {
497 	struct drm_device *drm_dev = dev_get_drvdata(kdev);
498 	struct nouveau_drm *drm = nouveau_drm(drm_dev);
499 	struct drm_encoder *encoder;
500 	struct nouveau_encoder *nv_encoder;
501 	struct nouveau_connector *nv_connector;
502 	struct nouveau_crtc *nv_crtc;
503 	int ret = 0;
504 
505 	*enabled = false;
506 	drm_for_each_encoder(encoder, drm->dev) {
507 		nv_encoder = nouveau_encoder(encoder);
508 		nv_connector = nouveau_encoder_connector_get(nv_encoder);
509 		nv_crtc = nouveau_crtc(encoder->crtc);
510 		if (!nv_connector || !nv_crtc || nv_encoder->or != port ||
511 		    nv_crtc->index != dev_id)
512 			continue;
513 		*enabled = drm_detect_monitor_audio(nv_connector->edid);
514 		if (*enabled) {
515 			ret = drm_eld_size(nv_connector->base.eld);
516 			memcpy(buf, nv_connector->base.eld,
517 			       min(max_bytes, ret));
518 		}
519 		break;
520 	}
521 	return ret;
522 }
523 
524 static const struct drm_audio_component_ops nv50_audio_component_ops = {
525 	.get_eld = nv50_audio_component_get_eld,
526 };
527 
528 static int
529 nv50_audio_component_bind(struct device *kdev, struct device *hda_kdev,
530 			  void *data)
531 {
532 	struct drm_device *drm_dev = dev_get_drvdata(kdev);
533 	struct nouveau_drm *drm = nouveau_drm(drm_dev);
534 	struct drm_audio_component *acomp = data;
535 
536 	if (WARN_ON(!device_link_add(hda_kdev, kdev, DL_FLAG_STATELESS)))
537 		return -ENOMEM;
538 
539 	drm_modeset_lock_all(drm_dev);
540 	acomp->ops = &nv50_audio_component_ops;
541 	acomp->dev = kdev;
542 	drm->audio.component = acomp;
543 	drm_modeset_unlock_all(drm_dev);
544 	return 0;
545 }
546 
547 static void
548 nv50_audio_component_unbind(struct device *kdev, struct device *hda_kdev,
549 			    void *data)
550 {
551 	struct drm_device *drm_dev = dev_get_drvdata(kdev);
552 	struct nouveau_drm *drm = nouveau_drm(drm_dev);
553 	struct drm_audio_component *acomp = data;
554 
555 	drm_modeset_lock_all(drm_dev);
556 	drm->audio.component = NULL;
557 	acomp->ops = NULL;
558 	acomp->dev = NULL;
559 	drm_modeset_unlock_all(drm_dev);
560 }
561 
562 static const struct component_ops nv50_audio_component_bind_ops = {
563 	.bind   = nv50_audio_component_bind,
564 	.unbind = nv50_audio_component_unbind,
565 };
566 
567 static void
568 nv50_audio_component_init(struct nouveau_drm *drm)
569 {
570 	if (!component_add(drm->dev->dev, &nv50_audio_component_bind_ops))
571 		drm->audio.component_registered = true;
572 }
573 
574 static void
575 nv50_audio_component_fini(struct nouveau_drm *drm)
576 {
577 	if (drm->audio.component_registered) {
578 		component_del(drm->dev->dev, &nv50_audio_component_bind_ops);
579 		drm->audio.component_registered = false;
580 	}
581 }
582 
583 /******************************************************************************
584  * Audio
585  *****************************************************************************/
586 static void
587 nv50_audio_disable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
588 {
589 	struct nouveau_drm *drm = nouveau_drm(encoder->dev);
590 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
591 	struct nv50_disp *disp = nv50_disp(encoder->dev);
592 	struct {
593 		struct nv50_disp_mthd_v1 base;
594 		struct nv50_disp_sor_hda_eld_v0 eld;
595 	} args = {
596 		.base.version = 1,
597 		.base.method  = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
598 		.base.hasht   = nv_encoder->dcb->hasht,
599 		.base.hashm   = (0xf0ff & nv_encoder->dcb->hashm) |
600 				(0x0100 << nv_crtc->index),
601 	};
602 
603 	nvif_mthd(&disp->disp->object, 0, &args, sizeof(args));
604 
605 	nv50_audio_component_eld_notify(drm->audio.component, nv_encoder->or,
606 					nv_crtc->index);
607 }
608 
609 static void
610 nv50_audio_enable(struct drm_encoder *encoder, struct drm_display_mode *mode)
611 {
612 	struct nouveau_drm *drm = nouveau_drm(encoder->dev);
613 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
614 	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
615 	struct nouveau_connector *nv_connector;
616 	struct nv50_disp *disp = nv50_disp(encoder->dev);
617 	struct __packed {
618 		struct {
619 			struct nv50_disp_mthd_v1 mthd;
620 			struct nv50_disp_sor_hda_eld_v0 eld;
621 		} base;
622 		u8 data[sizeof(nv_connector->base.eld)];
623 	} args = {
624 		.base.mthd.version = 1,
625 		.base.mthd.method  = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
626 		.base.mthd.hasht   = nv_encoder->dcb->hasht,
627 		.base.mthd.hashm   = (0xf0ff & nv_encoder->dcb->hashm) |
628 				     (0x0100 << nv_crtc->index),
629 	};
630 
631 	nv_connector = nouveau_encoder_connector_get(nv_encoder);
632 	if (!drm_detect_monitor_audio(nv_connector->edid))
633 		return;
634 
635 	memcpy(args.data, nv_connector->base.eld, sizeof(args.data));
636 
637 	nvif_mthd(&disp->disp->object, 0, &args,
638 		  sizeof(args.base) + drm_eld_size(args.data));
639 
640 	nv50_audio_component_eld_notify(drm->audio.component, nv_encoder->or,
641 					nv_crtc->index);
642 }
643 
644 /******************************************************************************
645  * HDMI
646  *****************************************************************************/
647 static void
648 nv50_hdmi_disable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
649 {
650 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
651 	struct nv50_disp *disp = nv50_disp(encoder->dev);
652 	struct {
653 		struct nv50_disp_mthd_v1 base;
654 		struct nv50_disp_sor_hdmi_pwr_v0 pwr;
655 	} args = {
656 		.base.version = 1,
657 		.base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
658 		.base.hasht  = nv_encoder->dcb->hasht,
659 		.base.hashm  = (0xf0ff & nv_encoder->dcb->hashm) |
660 			       (0x0100 << nv_crtc->index),
661 	};
662 
663 	nvif_mthd(&disp->disp->object, 0, &args, sizeof(args));
664 }
665 
666 static void
667 nv50_hdmi_enable(struct drm_encoder *encoder, struct drm_display_mode *mode)
668 {
669 	struct nouveau_drm *drm = nouveau_drm(encoder->dev);
670 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
671 	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
672 	struct nv50_disp *disp = nv50_disp(encoder->dev);
673 	struct {
674 		struct nv50_disp_mthd_v1 base;
675 		struct nv50_disp_sor_hdmi_pwr_v0 pwr;
676 		u8 infoframes[2 * 17]; /* two frames, up to 17 bytes each */
677 	} args = {
678 		.base.version = 1,
679 		.base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
680 		.base.hasht  = nv_encoder->dcb->hasht,
681 		.base.hashm  = (0xf0ff & nv_encoder->dcb->hashm) |
682 			       (0x0100 << nv_crtc->index),
683 		.pwr.state = 1,
684 		.pwr.rekey = 56, /* binary driver, and tegra, constant */
685 	};
686 	struct nouveau_connector *nv_connector;
687 	struct drm_hdmi_info *hdmi;
688 	u32 max_ac_packet;
689 	union hdmi_infoframe avi_frame;
690 	union hdmi_infoframe vendor_frame;
691 	bool high_tmds_clock_ratio = false, scrambling = false;
692 	u8 config;
693 	int ret;
694 	int size;
695 
696 	nv_connector = nouveau_encoder_connector_get(nv_encoder);
697 	if (!drm_detect_hdmi_monitor(nv_connector->edid))
698 		return;
699 
700 	hdmi = &nv_connector->base.display_info.hdmi;
701 
702 	ret = drm_hdmi_avi_infoframe_from_display_mode(&avi_frame.avi,
703 						       &nv_connector->base, mode);
704 	if (!ret) {
705 		/* We have an AVI InfoFrame, populate it to the display */
706 		args.pwr.avi_infoframe_length
707 			= hdmi_infoframe_pack(&avi_frame, args.infoframes, 17);
708 	}
709 
710 	ret = drm_hdmi_vendor_infoframe_from_display_mode(&vendor_frame.vendor.hdmi,
711 							  &nv_connector->base, mode);
712 	if (!ret) {
713 		/* We have a Vendor InfoFrame, populate it to the display */
714 		args.pwr.vendor_infoframe_length
715 			= hdmi_infoframe_pack(&vendor_frame,
716 					      args.infoframes
717 					      + args.pwr.avi_infoframe_length,
718 					      17);
719 	}
720 
721 	max_ac_packet  = mode->htotal - mode->hdisplay;
722 	max_ac_packet -= args.pwr.rekey;
723 	max_ac_packet -= 18; /* constant from tegra */
724 	args.pwr.max_ac_packet = max_ac_packet / 32;
725 
726 	if (hdmi->scdc.scrambling.supported) {
727 		high_tmds_clock_ratio = mode->clock > 340000;
728 		scrambling = high_tmds_clock_ratio ||
729 			hdmi->scdc.scrambling.low_rates;
730 	}
731 
732 	args.pwr.scdc =
733 		NV50_DISP_SOR_HDMI_PWR_V0_SCDC_SCRAMBLE * scrambling |
734 		NV50_DISP_SOR_HDMI_PWR_V0_SCDC_DIV_BY_4 * high_tmds_clock_ratio;
735 
736 	size = sizeof(args.base)
737 		+ sizeof(args.pwr)
738 		+ args.pwr.avi_infoframe_length
739 		+ args.pwr.vendor_infoframe_length;
740 	nvif_mthd(&disp->disp->object, 0, &args, size);
741 
742 	nv50_audio_enable(encoder, mode);
743 
744 	/* If SCDC is supported by the downstream monitor, update
745 	 * divider / scrambling settings to what we programmed above.
746 	 */
747 	if (!hdmi->scdc.scrambling.supported)
748 		return;
749 
750 	ret = drm_scdc_readb(nv_encoder->i2c, SCDC_TMDS_CONFIG, &config);
751 	if (ret < 0) {
752 		NV_ERROR(drm, "Failure to read SCDC_TMDS_CONFIG: %d\n", ret);
753 		return;
754 	}
755 	config &= ~(SCDC_TMDS_BIT_CLOCK_RATIO_BY_40 | SCDC_SCRAMBLING_ENABLE);
756 	config |= SCDC_TMDS_BIT_CLOCK_RATIO_BY_40 * high_tmds_clock_ratio;
757 	config |= SCDC_SCRAMBLING_ENABLE * scrambling;
758 	ret = drm_scdc_writeb(nv_encoder->i2c, SCDC_TMDS_CONFIG, config);
759 	if (ret < 0)
760 		NV_ERROR(drm, "Failure to write SCDC_TMDS_CONFIG = 0x%02x: %d\n",
761 			 config, ret);
762 }
763 
764 /******************************************************************************
765  * MST
766  *****************************************************************************/
767 #define nv50_mstm(p) container_of((p), struct nv50_mstm, mgr)
768 #define nv50_mstc(p) container_of((p), struct nv50_mstc, connector)
769 #define nv50_msto(p) container_of((p), struct nv50_msto, encoder)
770 
771 struct nv50_mstm {
772 	struct nouveau_encoder *outp;
773 
774 	struct drm_dp_mst_topology_mgr mgr;
775 
776 	bool modified;
777 	bool disabled;
778 	int links;
779 };
780 
781 struct nv50_mstc {
782 	struct nv50_mstm *mstm;
783 	struct drm_dp_mst_port *port;
784 	struct drm_connector connector;
785 
786 	struct drm_display_mode *native;
787 	struct edid *edid;
788 };
789 
790 struct nv50_msto {
791 	struct drm_encoder encoder;
792 
793 	struct nv50_head *head;
794 	struct nv50_mstc *mstc;
795 	bool disabled;
796 };
797 
798 static struct drm_dp_payload *
799 nv50_msto_payload(struct nv50_msto *msto)
800 {
801 	struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
802 	struct nv50_mstc *mstc = msto->mstc;
803 	struct nv50_mstm *mstm = mstc->mstm;
804 	int vcpi = mstc->port->vcpi.vcpi, i;
805 
806 	WARN_ON(!mutex_is_locked(&mstm->mgr.payload_lock));
807 
808 	NV_ATOMIC(drm, "%s: vcpi %d\n", msto->encoder.name, vcpi);
809 	for (i = 0; i < mstm->mgr.max_payloads; i++) {
810 		struct drm_dp_payload *payload = &mstm->mgr.payloads[i];
811 		NV_ATOMIC(drm, "%s: %d: vcpi %d start 0x%02x slots 0x%02x\n",
812 			  mstm->outp->base.base.name, i, payload->vcpi,
813 			  payload->start_slot, payload->num_slots);
814 	}
815 
816 	for (i = 0; i < mstm->mgr.max_payloads; i++) {
817 		struct drm_dp_payload *payload = &mstm->mgr.payloads[i];
818 		if (payload->vcpi == vcpi)
819 			return payload;
820 	}
821 
822 	return NULL;
823 }
824 
825 static void
826 nv50_msto_cleanup(struct nv50_msto *msto)
827 {
828 	struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
829 	struct nv50_mstc *mstc = msto->mstc;
830 	struct nv50_mstm *mstm = mstc->mstm;
831 
832 	if (!msto->disabled)
833 		return;
834 
835 	NV_ATOMIC(drm, "%s: msto cleanup\n", msto->encoder.name);
836 
837 	drm_dp_mst_deallocate_vcpi(&mstm->mgr, mstc->port);
838 
839 	msto->mstc = NULL;
840 	msto->disabled = false;
841 }
842 
843 static void
844 nv50_msto_prepare(struct nv50_msto *msto)
845 {
846 	struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
847 	struct nv50_mstc *mstc = msto->mstc;
848 	struct nv50_mstm *mstm = mstc->mstm;
849 	struct {
850 		struct nv50_disp_mthd_v1 base;
851 		struct nv50_disp_sor_dp_mst_vcpi_v0 vcpi;
852 	} args = {
853 		.base.version = 1,
854 		.base.method = NV50_DISP_MTHD_V1_SOR_DP_MST_VCPI,
855 		.base.hasht  = mstm->outp->dcb->hasht,
856 		.base.hashm  = (0xf0ff & mstm->outp->dcb->hashm) |
857 			       (0x0100 << msto->head->base.index),
858 	};
859 
860 	mutex_lock(&mstm->mgr.payload_lock);
861 
862 	NV_ATOMIC(drm, "%s: msto prepare\n", msto->encoder.name);
863 	if (mstc->port->vcpi.vcpi > 0) {
864 		struct drm_dp_payload *payload = nv50_msto_payload(msto);
865 		if (payload) {
866 			args.vcpi.start_slot = payload->start_slot;
867 			args.vcpi.num_slots = payload->num_slots;
868 			args.vcpi.pbn = mstc->port->vcpi.pbn;
869 			args.vcpi.aligned_pbn = mstc->port->vcpi.aligned_pbn;
870 		}
871 	}
872 
873 	NV_ATOMIC(drm, "%s: %s: %02x %02x %04x %04x\n",
874 		  msto->encoder.name, msto->head->base.base.name,
875 		  args.vcpi.start_slot, args.vcpi.num_slots,
876 		  args.vcpi.pbn, args.vcpi.aligned_pbn);
877 
878 	nvif_mthd(&drm->display->disp.object, 0, &args, sizeof(args));
879 	mutex_unlock(&mstm->mgr.payload_lock);
880 }
881 
882 static int
883 nv50_msto_atomic_check(struct drm_encoder *encoder,
884 		       struct drm_crtc_state *crtc_state,
885 		       struct drm_connector_state *conn_state)
886 {
887 	struct drm_atomic_state *state = crtc_state->state;
888 	struct drm_connector *connector = conn_state->connector;
889 	struct nv50_mstc *mstc = nv50_mstc(connector);
890 	struct nv50_mstm *mstm = mstc->mstm;
891 	struct nv50_head_atom *asyh = nv50_head_atom(crtc_state);
892 	int slots;
893 	int ret;
894 
895 	ret = nv50_outp_atomic_check_view(encoder, crtc_state, conn_state,
896 					  mstc->native);
897 	if (ret)
898 		return ret;
899 
900 	if (!crtc_state->mode_changed && !crtc_state->connectors_changed)
901 		return 0;
902 
903 	/*
904 	 * When restoring duplicated states, we need to make sure that the bw
905 	 * remains the same and avoid recalculating it, as the connector's bpc
906 	 * may have changed after the state was duplicated
907 	 */
908 	if (!state->duplicated) {
909 		const int clock = crtc_state->adjusted_mode.clock;
910 
911 		asyh->or.bpc = connector->display_info.bpc;
912 		asyh->dp.pbn = drm_dp_calc_pbn_mode(clock, asyh->or.bpc * 3,
913 						    false);
914 	}
915 
916 	slots = drm_dp_atomic_find_vcpi_slots(state, &mstm->mgr, mstc->port,
917 					      asyh->dp.pbn, 0);
918 	if (slots < 0)
919 		return slots;
920 
921 	asyh->dp.tu = slots;
922 
923 	return 0;
924 }
925 
926 static u8
927 nv50_dp_bpc_to_depth(unsigned int bpc)
928 {
929 	switch (bpc) {
930 	case  6: return 0x2;
931 	case  8: return 0x5;
932 	case 10: /* fall-through */
933 	default: return 0x6;
934 	}
935 }
936 
937 static void
938 nv50_msto_enable(struct drm_encoder *encoder)
939 {
940 	struct nv50_head *head = nv50_head(encoder->crtc);
941 	struct nv50_head_atom *armh = nv50_head_atom(head->base.base.state);
942 	struct nv50_msto *msto = nv50_msto(encoder);
943 	struct nv50_mstc *mstc = NULL;
944 	struct nv50_mstm *mstm = NULL;
945 	struct drm_connector *connector;
946 	struct drm_connector_list_iter conn_iter;
947 	u8 proto;
948 	bool r;
949 
950 	drm_connector_list_iter_begin(encoder->dev, &conn_iter);
951 	drm_for_each_connector_iter(connector, &conn_iter) {
952 		if (connector->state->best_encoder == &msto->encoder) {
953 			mstc = nv50_mstc(connector);
954 			mstm = mstc->mstm;
955 			break;
956 		}
957 	}
958 	drm_connector_list_iter_end(&conn_iter);
959 
960 	if (WARN_ON(!mstc))
961 		return;
962 
963 	r = drm_dp_mst_allocate_vcpi(&mstm->mgr, mstc->port, armh->dp.pbn,
964 				     armh->dp.tu);
965 	if (!r)
966 		DRM_DEBUG_KMS("Failed to allocate VCPI\n");
967 
968 	if (!mstm->links++)
969 		nv50_outp_acquire(mstm->outp);
970 
971 	if (mstm->outp->link & 1)
972 		proto = 0x8;
973 	else
974 		proto = 0x9;
975 
976 	mstm->outp->update(mstm->outp, head->base.index, armh, proto,
977 			   nv50_dp_bpc_to_depth(armh->or.bpc));
978 
979 	msto->mstc = mstc;
980 	mstm->modified = true;
981 }
982 
983 static void
984 nv50_msto_disable(struct drm_encoder *encoder)
985 {
986 	struct nv50_msto *msto = nv50_msto(encoder);
987 	struct nv50_mstc *mstc = msto->mstc;
988 	struct nv50_mstm *mstm = mstc->mstm;
989 
990 	drm_dp_mst_reset_vcpi_slots(&mstm->mgr, mstc->port);
991 
992 	mstm->outp->update(mstm->outp, msto->head->base.index, NULL, 0, 0);
993 	mstm->modified = true;
994 	if (!--mstm->links)
995 		mstm->disabled = true;
996 	msto->disabled = true;
997 }
998 
999 static const struct drm_encoder_helper_funcs
1000 nv50_msto_help = {
1001 	.disable = nv50_msto_disable,
1002 	.enable = nv50_msto_enable,
1003 	.atomic_check = nv50_msto_atomic_check,
1004 };
1005 
1006 static void
1007 nv50_msto_destroy(struct drm_encoder *encoder)
1008 {
1009 	struct nv50_msto *msto = nv50_msto(encoder);
1010 	drm_encoder_cleanup(&msto->encoder);
1011 	kfree(msto);
1012 }
1013 
1014 static const struct drm_encoder_funcs
1015 nv50_msto = {
1016 	.destroy = nv50_msto_destroy,
1017 };
1018 
1019 static struct nv50_msto *
1020 nv50_msto_new(struct drm_device *dev, struct nv50_head *head, int id)
1021 {
1022 	struct nv50_msto *msto;
1023 	int ret;
1024 
1025 	msto = kzalloc(sizeof(*msto), GFP_KERNEL);
1026 	if (!msto)
1027 		return ERR_PTR(-ENOMEM);
1028 
1029 	ret = drm_encoder_init(dev, &msto->encoder, &nv50_msto,
1030 			       DRM_MODE_ENCODER_DPMST, "mst-%d", id);
1031 	if (ret) {
1032 		kfree(msto);
1033 		return ERR_PTR(ret);
1034 	}
1035 
1036 	drm_encoder_helper_add(&msto->encoder, &nv50_msto_help);
1037 	msto->encoder.possible_crtcs = drm_crtc_mask(&head->base.base);
1038 	msto->head = head;
1039 	return msto;
1040 }
1041 
1042 static struct drm_encoder *
1043 nv50_mstc_atomic_best_encoder(struct drm_connector *connector,
1044 			      struct drm_connector_state *connector_state)
1045 {
1046 	struct nv50_mstc *mstc = nv50_mstc(connector);
1047 	struct drm_crtc *crtc = connector_state->crtc;
1048 
1049 	if (!(mstc->mstm->outp->dcb->heads & drm_crtc_mask(crtc)))
1050 		return NULL;
1051 
1052 	return &nv50_head(crtc)->msto->encoder;
1053 }
1054 
1055 static enum drm_mode_status
1056 nv50_mstc_mode_valid(struct drm_connector *connector,
1057 		     struct drm_display_mode *mode)
1058 {
1059 	struct nv50_mstc *mstc = nv50_mstc(connector);
1060 	struct nouveau_encoder *outp = mstc->mstm->outp;
1061 
1062 	/* TODO: calculate the PBN from the dotclock and validate against the
1063 	 * MSTB's max possible PBN
1064 	 */
1065 
1066 	return nv50_dp_mode_valid(connector, outp, mode, NULL);
1067 }
1068 
1069 static int
1070 nv50_mstc_get_modes(struct drm_connector *connector)
1071 {
1072 	struct nv50_mstc *mstc = nv50_mstc(connector);
1073 	int ret = 0;
1074 
1075 	mstc->edid = drm_dp_mst_get_edid(&mstc->connector, mstc->port->mgr, mstc->port);
1076 	drm_connector_update_edid_property(&mstc->connector, mstc->edid);
1077 	if (mstc->edid)
1078 		ret = drm_add_edid_modes(&mstc->connector, mstc->edid);
1079 
1080 	/*
1081 	 * XXX: Since we don't use HDR in userspace quite yet, limit the bpc
1082 	 * to 8 to save bandwidth on the topology. In the future, we'll want
1083 	 * to properly fix this by dynamically selecting the highest possible
1084 	 * bpc that would fit in the topology
1085 	 */
1086 	if (connector->display_info.bpc)
1087 		connector->display_info.bpc =
1088 			clamp(connector->display_info.bpc, 6U, 8U);
1089 	else
1090 		connector->display_info.bpc = 8;
1091 
1092 	if (mstc->native)
1093 		drm_mode_destroy(mstc->connector.dev, mstc->native);
1094 	mstc->native = nouveau_conn_native_mode(&mstc->connector);
1095 	return ret;
1096 }
1097 
1098 static int
1099 nv50_mstc_atomic_check(struct drm_connector *connector,
1100 		       struct drm_atomic_state *state)
1101 {
1102 	struct nv50_mstc *mstc = nv50_mstc(connector);
1103 	struct drm_dp_mst_topology_mgr *mgr = &mstc->mstm->mgr;
1104 	struct drm_connector_state *new_conn_state =
1105 		drm_atomic_get_new_connector_state(state, connector);
1106 	struct drm_connector_state *old_conn_state =
1107 		drm_atomic_get_old_connector_state(state, connector);
1108 	struct drm_crtc_state *crtc_state;
1109 	struct drm_crtc *new_crtc = new_conn_state->crtc;
1110 
1111 	if (!old_conn_state->crtc)
1112 		return 0;
1113 
1114 	/* We only want to free VCPI if this state disables the CRTC on this
1115 	 * connector
1116 	 */
1117 	if (new_crtc) {
1118 		crtc_state = drm_atomic_get_new_crtc_state(state, new_crtc);
1119 
1120 		if (!crtc_state ||
1121 		    !drm_atomic_crtc_needs_modeset(crtc_state) ||
1122 		    crtc_state->enable)
1123 			return 0;
1124 	}
1125 
1126 	return drm_dp_atomic_release_vcpi_slots(state, mgr, mstc->port);
1127 }
1128 
1129 static int
1130 nv50_mstc_detect(struct drm_connector *connector,
1131 		 struct drm_modeset_acquire_ctx *ctx, bool force)
1132 {
1133 	struct nv50_mstc *mstc = nv50_mstc(connector);
1134 	int ret;
1135 
1136 	if (drm_connector_is_unregistered(connector))
1137 		return connector_status_disconnected;
1138 
1139 	ret = pm_runtime_get_sync(connector->dev->dev);
1140 	if (ret < 0 && ret != -EACCES) {
1141 		pm_runtime_put_autosuspend(connector->dev->dev);
1142 		return connector_status_disconnected;
1143 	}
1144 
1145 	ret = drm_dp_mst_detect_port(connector, ctx, mstc->port->mgr,
1146 				     mstc->port);
1147 
1148 	pm_runtime_mark_last_busy(connector->dev->dev);
1149 	pm_runtime_put_autosuspend(connector->dev->dev);
1150 	return ret;
1151 }
1152 
1153 static const struct drm_connector_helper_funcs
1154 nv50_mstc_help = {
1155 	.get_modes = nv50_mstc_get_modes,
1156 	.mode_valid = nv50_mstc_mode_valid,
1157 	.atomic_best_encoder = nv50_mstc_atomic_best_encoder,
1158 	.atomic_check = nv50_mstc_atomic_check,
1159 	.detect_ctx = nv50_mstc_detect,
1160 };
1161 
1162 static void
1163 nv50_mstc_destroy(struct drm_connector *connector)
1164 {
1165 	struct nv50_mstc *mstc = nv50_mstc(connector);
1166 
1167 	drm_connector_cleanup(&mstc->connector);
1168 	drm_dp_mst_put_port_malloc(mstc->port);
1169 
1170 	kfree(mstc);
1171 }
1172 
1173 static const struct drm_connector_funcs
1174 nv50_mstc = {
1175 	.reset = nouveau_conn_reset,
1176 	.fill_modes = drm_helper_probe_single_connector_modes,
1177 	.destroy = nv50_mstc_destroy,
1178 	.atomic_duplicate_state = nouveau_conn_atomic_duplicate_state,
1179 	.atomic_destroy_state = nouveau_conn_atomic_destroy_state,
1180 	.atomic_set_property = nouveau_conn_atomic_set_property,
1181 	.atomic_get_property = nouveau_conn_atomic_get_property,
1182 };
1183 
1184 static int
1185 nv50_mstc_new(struct nv50_mstm *mstm, struct drm_dp_mst_port *port,
1186 	      const char *path, struct nv50_mstc **pmstc)
1187 {
1188 	struct drm_device *dev = mstm->outp->base.base.dev;
1189 	struct drm_crtc *crtc;
1190 	struct nv50_mstc *mstc;
1191 	int ret;
1192 
1193 	if (!(mstc = *pmstc = kzalloc(sizeof(*mstc), GFP_KERNEL)))
1194 		return -ENOMEM;
1195 	mstc->mstm = mstm;
1196 	mstc->port = port;
1197 
1198 	ret = drm_connector_init(dev, &mstc->connector, &nv50_mstc,
1199 				 DRM_MODE_CONNECTOR_DisplayPort);
1200 	if (ret) {
1201 		kfree(*pmstc);
1202 		*pmstc = NULL;
1203 		return ret;
1204 	}
1205 
1206 	drm_connector_helper_add(&mstc->connector, &nv50_mstc_help);
1207 
1208 	mstc->connector.funcs->reset(&mstc->connector);
1209 	nouveau_conn_attach_properties(&mstc->connector);
1210 
1211 	drm_for_each_crtc(crtc, dev) {
1212 		if (!(mstm->outp->dcb->heads & drm_crtc_mask(crtc)))
1213 			continue;
1214 
1215 		drm_connector_attach_encoder(&mstc->connector,
1216 					     &nv50_head(crtc)->msto->encoder);
1217 	}
1218 
1219 	drm_object_attach_property(&mstc->connector.base, dev->mode_config.path_property, 0);
1220 	drm_object_attach_property(&mstc->connector.base, dev->mode_config.tile_property, 0);
1221 	drm_connector_set_path_property(&mstc->connector, path);
1222 	drm_dp_mst_get_port_malloc(port);
1223 	return 0;
1224 }
1225 
1226 static void
1227 nv50_mstm_cleanup(struct nv50_mstm *mstm)
1228 {
1229 	struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev);
1230 	struct drm_encoder *encoder;
1231 	int ret;
1232 
1233 	NV_ATOMIC(drm, "%s: mstm cleanup\n", mstm->outp->base.base.name);
1234 	ret = drm_dp_check_act_status(&mstm->mgr);
1235 
1236 	ret = drm_dp_update_payload_part2(&mstm->mgr);
1237 
1238 	drm_for_each_encoder(encoder, mstm->outp->base.base.dev) {
1239 		if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
1240 			struct nv50_msto *msto = nv50_msto(encoder);
1241 			struct nv50_mstc *mstc = msto->mstc;
1242 			if (mstc && mstc->mstm == mstm)
1243 				nv50_msto_cleanup(msto);
1244 		}
1245 	}
1246 
1247 	mstm->modified = false;
1248 }
1249 
1250 static void
1251 nv50_mstm_prepare(struct nv50_mstm *mstm)
1252 {
1253 	struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev);
1254 	struct drm_encoder *encoder;
1255 	int ret;
1256 
1257 	NV_ATOMIC(drm, "%s: mstm prepare\n", mstm->outp->base.base.name);
1258 	ret = drm_dp_update_payload_part1(&mstm->mgr);
1259 
1260 	drm_for_each_encoder(encoder, mstm->outp->base.base.dev) {
1261 		if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
1262 			struct nv50_msto *msto = nv50_msto(encoder);
1263 			struct nv50_mstc *mstc = msto->mstc;
1264 			if (mstc && mstc->mstm == mstm)
1265 				nv50_msto_prepare(msto);
1266 		}
1267 	}
1268 
1269 	if (mstm->disabled) {
1270 		if (!mstm->links)
1271 			nv50_outp_release(mstm->outp);
1272 		mstm->disabled = false;
1273 	}
1274 }
1275 
1276 static struct drm_connector *
1277 nv50_mstm_add_connector(struct drm_dp_mst_topology_mgr *mgr,
1278 			struct drm_dp_mst_port *port, const char *path)
1279 {
1280 	struct nv50_mstm *mstm = nv50_mstm(mgr);
1281 	struct nv50_mstc *mstc;
1282 	int ret;
1283 
1284 	ret = nv50_mstc_new(mstm, port, path, &mstc);
1285 	if (ret)
1286 		return NULL;
1287 
1288 	return &mstc->connector;
1289 }
1290 
1291 static const struct drm_dp_mst_topology_cbs
1292 nv50_mstm = {
1293 	.add_connector = nv50_mstm_add_connector,
1294 };
1295 
1296 void
1297 nv50_mstm_service(struct nv50_mstm *mstm)
1298 {
1299 	struct drm_dp_aux *aux = mstm ? mstm->mgr.aux : NULL;
1300 	bool handled = true;
1301 	int ret;
1302 	u8 esi[8] = {};
1303 
1304 	if (!aux)
1305 		return;
1306 
1307 	while (handled) {
1308 		ret = drm_dp_dpcd_read(aux, DP_SINK_COUNT_ESI, esi, 8);
1309 		if (ret != 8) {
1310 			drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false);
1311 			return;
1312 		}
1313 
1314 		drm_dp_mst_hpd_irq(&mstm->mgr, esi, &handled);
1315 		if (!handled)
1316 			break;
1317 
1318 		drm_dp_dpcd_write(aux, DP_SINK_COUNT_ESI + 1, &esi[1], 3);
1319 	}
1320 }
1321 
1322 void
1323 nv50_mstm_remove(struct nv50_mstm *mstm)
1324 {
1325 	if (mstm)
1326 		drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false);
1327 }
1328 
1329 static int
1330 nv50_mstm_enable(struct nv50_mstm *mstm, u8 dpcd, int state)
1331 {
1332 	struct nouveau_encoder *outp = mstm->outp;
1333 	struct {
1334 		struct nv50_disp_mthd_v1 base;
1335 		struct nv50_disp_sor_dp_mst_link_v0 mst;
1336 	} args = {
1337 		.base.version = 1,
1338 		.base.method = NV50_DISP_MTHD_V1_SOR_DP_MST_LINK,
1339 		.base.hasht = outp->dcb->hasht,
1340 		.base.hashm = outp->dcb->hashm,
1341 		.mst.state = state,
1342 	};
1343 	struct nouveau_drm *drm = nouveau_drm(outp->base.base.dev);
1344 	struct nvif_object *disp = &drm->display->disp.object;
1345 	int ret;
1346 
1347 	if (dpcd >= 0x12) {
1348 		/* Even if we're enabling MST, start with disabling the
1349 		 * branching unit to clear any sink-side MST topology state
1350 		 * that wasn't set by us
1351 		 */
1352 		ret = drm_dp_dpcd_writeb(mstm->mgr.aux, DP_MSTM_CTRL, 0);
1353 		if (ret < 0)
1354 			return ret;
1355 
1356 		if (state) {
1357 			/* Now, start initializing */
1358 			ret = drm_dp_dpcd_writeb(mstm->mgr.aux, DP_MSTM_CTRL,
1359 						 DP_MST_EN);
1360 			if (ret < 0)
1361 				return ret;
1362 		}
1363 	}
1364 
1365 	return nvif_mthd(disp, 0, &args, sizeof(args));
1366 }
1367 
1368 int
1369 nv50_mstm_detect(struct nv50_mstm *mstm, u8 dpcd[8], int allow)
1370 {
1371 	struct drm_dp_aux *aux;
1372 	int ret;
1373 	bool old_state, new_state;
1374 	u8 mstm_ctrl;
1375 
1376 	if (!mstm)
1377 		return 0;
1378 
1379 	mutex_lock(&mstm->mgr.lock);
1380 
1381 	old_state = mstm->mgr.mst_state;
1382 	new_state = old_state;
1383 	aux = mstm->mgr.aux;
1384 
1385 	if (old_state) {
1386 		/* Just check that the MST hub is still as we expect it */
1387 		ret = drm_dp_dpcd_readb(aux, DP_MSTM_CTRL, &mstm_ctrl);
1388 		if (ret < 0 || !(mstm_ctrl & DP_MST_EN)) {
1389 			DRM_DEBUG_KMS("Hub gone, disabling MST topology\n");
1390 			new_state = false;
1391 		}
1392 	} else if (dpcd[0] >= 0x12) {
1393 		ret = drm_dp_dpcd_readb(aux, DP_MSTM_CAP, &dpcd[1]);
1394 		if (ret < 0)
1395 			goto probe_error;
1396 
1397 		if (!(dpcd[1] & DP_MST_CAP))
1398 			dpcd[0] = 0x11;
1399 		else
1400 			new_state = allow;
1401 	}
1402 
1403 	if (new_state == old_state) {
1404 		mutex_unlock(&mstm->mgr.lock);
1405 		return new_state;
1406 	}
1407 
1408 	ret = nv50_mstm_enable(mstm, dpcd[0], new_state);
1409 	if (ret)
1410 		goto probe_error;
1411 
1412 	mutex_unlock(&mstm->mgr.lock);
1413 
1414 	ret = drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, new_state);
1415 	if (ret)
1416 		return nv50_mstm_enable(mstm, dpcd[0], 0);
1417 
1418 	return new_state;
1419 
1420 probe_error:
1421 	mutex_unlock(&mstm->mgr.lock);
1422 	return ret;
1423 }
1424 
1425 static void
1426 nv50_mstm_fini(struct nv50_mstm *mstm)
1427 {
1428 	if (mstm && mstm->mgr.mst_state)
1429 		drm_dp_mst_topology_mgr_suspend(&mstm->mgr);
1430 }
1431 
1432 static void
1433 nv50_mstm_init(struct nv50_mstm *mstm, bool runtime)
1434 {
1435 	int ret;
1436 
1437 	if (!mstm || !mstm->mgr.mst_state)
1438 		return;
1439 
1440 	ret = drm_dp_mst_topology_mgr_resume(&mstm->mgr, !runtime);
1441 	if (ret == -1) {
1442 		drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false);
1443 		drm_kms_helper_hotplug_event(mstm->mgr.dev);
1444 	}
1445 }
1446 
1447 static void
1448 nv50_mstm_del(struct nv50_mstm **pmstm)
1449 {
1450 	struct nv50_mstm *mstm = *pmstm;
1451 	if (mstm) {
1452 		drm_dp_mst_topology_mgr_destroy(&mstm->mgr);
1453 		kfree(*pmstm);
1454 		*pmstm = NULL;
1455 	}
1456 }
1457 
1458 static int
1459 nv50_mstm_new(struct nouveau_encoder *outp, struct drm_dp_aux *aux, int aux_max,
1460 	      int conn_base_id, struct nv50_mstm **pmstm)
1461 {
1462 	const int max_payloads = hweight8(outp->dcb->heads);
1463 	struct drm_device *dev = outp->base.base.dev;
1464 	struct nv50_mstm *mstm;
1465 	int ret;
1466 	u8 dpcd;
1467 
1468 	/* This is a workaround for some monitors not functioning
1469 	 * correctly in MST mode on initial module load.  I think
1470 	 * some bad interaction with the VBIOS may be responsible.
1471 	 *
1472 	 * A good ol' off and on again seems to work here ;)
1473 	 */
1474 	ret = drm_dp_dpcd_readb(aux, DP_DPCD_REV, &dpcd);
1475 	if (ret >= 0 && dpcd >= 0x12)
1476 		drm_dp_dpcd_writeb(aux, DP_MSTM_CTRL, 0);
1477 
1478 	if (!(mstm = *pmstm = kzalloc(sizeof(*mstm), GFP_KERNEL)))
1479 		return -ENOMEM;
1480 	mstm->outp = outp;
1481 	mstm->mgr.cbs = &nv50_mstm;
1482 
1483 	ret = drm_dp_mst_topology_mgr_init(&mstm->mgr, dev, aux, aux_max,
1484 					   max_payloads, conn_base_id);
1485 	if (ret)
1486 		return ret;
1487 
1488 	return 0;
1489 }
1490 
1491 /******************************************************************************
1492  * SOR
1493  *****************************************************************************/
1494 static void
1495 nv50_sor_update(struct nouveau_encoder *nv_encoder, u8 head,
1496 		struct nv50_head_atom *asyh, u8 proto, u8 depth)
1497 {
1498 	struct nv50_disp *disp = nv50_disp(nv_encoder->base.base.dev);
1499 	struct nv50_core *core = disp->core;
1500 
1501 	if (!asyh) {
1502 		nv_encoder->ctrl &= ~BIT(head);
1503 		if (!(nv_encoder->ctrl & 0x0000000f))
1504 			nv_encoder->ctrl = 0;
1505 	} else {
1506 		nv_encoder->ctrl |= proto << 8;
1507 		nv_encoder->ctrl |= BIT(head);
1508 		asyh->or.depth = depth;
1509 	}
1510 
1511 	core->func->sor->ctrl(core, nv_encoder->or, nv_encoder->ctrl, asyh);
1512 }
1513 
1514 static void
1515 nv50_sor_disable(struct drm_encoder *encoder)
1516 {
1517 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1518 	struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc);
1519 
1520 	nv_encoder->crtc = NULL;
1521 
1522 	if (nv_crtc) {
1523 		struct nvkm_i2c_aux *aux = nv_encoder->aux;
1524 		u8 pwr;
1525 
1526 		if (aux) {
1527 			int ret = nvkm_rdaux(aux, DP_SET_POWER, &pwr, 1);
1528 			if (ret == 0) {
1529 				pwr &= ~DP_SET_POWER_MASK;
1530 				pwr |=  DP_SET_POWER_D3;
1531 				nvkm_wraux(aux, DP_SET_POWER, &pwr, 1);
1532 			}
1533 		}
1534 
1535 		nv_encoder->update(nv_encoder, nv_crtc->index, NULL, 0, 0);
1536 		nv50_audio_disable(encoder, nv_crtc);
1537 		nv50_hdmi_disable(&nv_encoder->base.base, nv_crtc);
1538 		nv50_outp_release(nv_encoder);
1539 	}
1540 }
1541 
1542 static void
1543 nv50_sor_enable(struct drm_encoder *encoder)
1544 {
1545 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1546 	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1547 	struct nv50_head_atom *asyh = nv50_head_atom(nv_crtc->base.state);
1548 	struct drm_display_mode *mode = &asyh->state.adjusted_mode;
1549 	struct {
1550 		struct nv50_disp_mthd_v1 base;
1551 		struct nv50_disp_sor_lvds_script_v0 lvds;
1552 	} lvds = {
1553 		.base.version = 1,
1554 		.base.method  = NV50_DISP_MTHD_V1_SOR_LVDS_SCRIPT,
1555 		.base.hasht   = nv_encoder->dcb->hasht,
1556 		.base.hashm   = nv_encoder->dcb->hashm,
1557 	};
1558 	struct nv50_disp *disp = nv50_disp(encoder->dev);
1559 	struct drm_device *dev = encoder->dev;
1560 	struct nouveau_drm *drm = nouveau_drm(dev);
1561 	struct nouveau_connector *nv_connector;
1562 	struct nvbios *bios = &drm->vbios;
1563 	u8 proto = 0xf;
1564 	u8 depth = 0x0;
1565 
1566 	nv_connector = nouveau_encoder_connector_get(nv_encoder);
1567 	nv_encoder->crtc = encoder->crtc;
1568 	nv50_outp_acquire(nv_encoder);
1569 
1570 	switch (nv_encoder->dcb->type) {
1571 	case DCB_OUTPUT_TMDS:
1572 		if (nv_encoder->link & 1) {
1573 			proto = 0x1;
1574 			/* Only enable dual-link if:
1575 			 *  - Need to (i.e. rate > 165MHz)
1576 			 *  - DCB says we can
1577 			 *  - Not an HDMI monitor, since there's no dual-link
1578 			 *    on HDMI.
1579 			 */
1580 			if (mode->clock >= 165000 &&
1581 			    nv_encoder->dcb->duallink_possible &&
1582 			    !drm_detect_hdmi_monitor(nv_connector->edid))
1583 				proto |= 0x4;
1584 		} else {
1585 			proto = 0x2;
1586 		}
1587 
1588 		nv50_hdmi_enable(&nv_encoder->base.base, mode);
1589 		break;
1590 	case DCB_OUTPUT_LVDS:
1591 		proto = 0x0;
1592 
1593 		if (bios->fp_no_ddc) {
1594 			if (bios->fp.dual_link)
1595 				lvds.lvds.script |= 0x0100;
1596 			if (bios->fp.if_is_24bit)
1597 				lvds.lvds.script |= 0x0200;
1598 		} else {
1599 			if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) {
1600 				if (((u8 *)nv_connector->edid)[121] == 2)
1601 					lvds.lvds.script |= 0x0100;
1602 			} else
1603 			if (mode->clock >= bios->fp.duallink_transition_clk) {
1604 				lvds.lvds.script |= 0x0100;
1605 			}
1606 
1607 			if (lvds.lvds.script & 0x0100) {
1608 				if (bios->fp.strapless_is_24bit & 2)
1609 					lvds.lvds.script |= 0x0200;
1610 			} else {
1611 				if (bios->fp.strapless_is_24bit & 1)
1612 					lvds.lvds.script |= 0x0200;
1613 			}
1614 
1615 			if (asyh->or.bpc == 8)
1616 				lvds.lvds.script |= 0x0200;
1617 		}
1618 
1619 		nvif_mthd(&disp->disp->object, 0, &lvds, sizeof(lvds));
1620 		break;
1621 	case DCB_OUTPUT_DP:
1622 		depth = nv50_dp_bpc_to_depth(asyh->or.bpc);
1623 
1624 		if (nv_encoder->link & 1)
1625 			proto = 0x8;
1626 		else
1627 			proto = 0x9;
1628 
1629 		nv50_audio_enable(encoder, mode);
1630 		break;
1631 	default:
1632 		BUG();
1633 		break;
1634 	}
1635 
1636 	nv_encoder->update(nv_encoder, nv_crtc->index, asyh, proto, depth);
1637 }
1638 
1639 static const struct drm_encoder_helper_funcs
1640 nv50_sor_help = {
1641 	.atomic_check = nv50_outp_atomic_check,
1642 	.enable = nv50_sor_enable,
1643 	.disable = nv50_sor_disable,
1644 };
1645 
1646 static void
1647 nv50_sor_destroy(struct drm_encoder *encoder)
1648 {
1649 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1650 	nv50_mstm_del(&nv_encoder->dp.mstm);
1651 	drm_encoder_cleanup(encoder);
1652 	kfree(encoder);
1653 }
1654 
1655 static const struct drm_encoder_funcs
1656 nv50_sor_func = {
1657 	.destroy = nv50_sor_destroy,
1658 };
1659 
1660 static bool nv50_has_mst(struct nouveau_drm *drm)
1661 {
1662 	struct nvkm_bios *bios = nvxx_bios(&drm->client.device);
1663 	u32 data;
1664 	u8 ver, hdr, cnt, len;
1665 
1666 	data = nvbios_dp_table(bios, &ver, &hdr, &cnt, &len);
1667 	return data && ver >= 0x40 && (nvbios_rd08(bios, data + 0x08) & 0x04);
1668 }
1669 
1670 static int
1671 nv50_sor_create(struct drm_connector *connector, struct dcb_output *dcbe)
1672 {
1673 	struct nouveau_connector *nv_connector = nouveau_connector(connector);
1674 	struct nouveau_drm *drm = nouveau_drm(connector->dev);
1675 	struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device);
1676 	struct nouveau_encoder *nv_encoder;
1677 	struct drm_encoder *encoder;
1678 	struct nv50_disp *disp = nv50_disp(connector->dev);
1679 	int type, ret;
1680 
1681 	switch (dcbe->type) {
1682 	case DCB_OUTPUT_LVDS: type = DRM_MODE_ENCODER_LVDS; break;
1683 	case DCB_OUTPUT_TMDS:
1684 	case DCB_OUTPUT_DP:
1685 	default:
1686 		type = DRM_MODE_ENCODER_TMDS;
1687 		break;
1688 	}
1689 
1690 	nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1691 	if (!nv_encoder)
1692 		return -ENOMEM;
1693 	nv_encoder->dcb = dcbe;
1694 	nv_encoder->update = nv50_sor_update;
1695 
1696 	encoder = to_drm_encoder(nv_encoder);
1697 	encoder->possible_crtcs = dcbe->heads;
1698 	encoder->possible_clones = 0;
1699 	drm_encoder_init(connector->dev, encoder, &nv50_sor_func, type,
1700 			 "sor-%04x-%04x", dcbe->hasht, dcbe->hashm);
1701 	drm_encoder_helper_add(encoder, &nv50_sor_help);
1702 
1703 	drm_connector_attach_encoder(connector, encoder);
1704 
1705 	disp->core->func->sor->get_caps(disp, nv_encoder, ffs(dcbe->or) - 1);
1706 
1707 	if (dcbe->type == DCB_OUTPUT_DP) {
1708 		struct nvkm_i2c_aux *aux =
1709 			nvkm_i2c_aux_find(i2c, dcbe->i2c_index);
1710 
1711 		if (aux) {
1712 			if (disp->disp->object.oclass < GF110_DISP) {
1713 				/* HW has no support for address-only
1714 				 * transactions, so we're required to
1715 				 * use custom I2C-over-AUX code.
1716 				 */
1717 				nv_encoder->i2c = &aux->i2c;
1718 			} else {
1719 				nv_encoder->i2c = &nv_connector->aux.ddc;
1720 			}
1721 			nv_encoder->aux = aux;
1722 		}
1723 
1724 		if (nv_connector->type != DCB_CONNECTOR_eDP &&
1725 		    nv50_has_mst(drm)) {
1726 			ret = nv50_mstm_new(nv_encoder, &nv_connector->aux,
1727 					    16, nv_connector->base.base.id,
1728 					    &nv_encoder->dp.mstm);
1729 			if (ret)
1730 				return ret;
1731 		}
1732 	} else {
1733 		struct nvkm_i2c_bus *bus =
1734 			nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
1735 		if (bus)
1736 			nv_encoder->i2c = &bus->i2c;
1737 	}
1738 
1739 	return 0;
1740 }
1741 
1742 /******************************************************************************
1743  * PIOR
1744  *****************************************************************************/
1745 static int
1746 nv50_pior_atomic_check(struct drm_encoder *encoder,
1747 		       struct drm_crtc_state *crtc_state,
1748 		       struct drm_connector_state *conn_state)
1749 {
1750 	int ret = nv50_outp_atomic_check(encoder, crtc_state, conn_state);
1751 	if (ret)
1752 		return ret;
1753 	crtc_state->adjusted_mode.clock *= 2;
1754 	return 0;
1755 }
1756 
1757 static void
1758 nv50_pior_disable(struct drm_encoder *encoder)
1759 {
1760 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1761 	struct nv50_core *core = nv50_disp(encoder->dev)->core;
1762 	if (nv_encoder->crtc)
1763 		core->func->pior->ctrl(core, nv_encoder->or, 0x00000000, NULL);
1764 	nv_encoder->crtc = NULL;
1765 	nv50_outp_release(nv_encoder);
1766 }
1767 
1768 static void
1769 nv50_pior_enable(struct drm_encoder *encoder)
1770 {
1771 	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1772 	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1773 	struct nv50_head_atom *asyh = nv50_head_atom(nv_crtc->base.state);
1774 	struct nv50_core *core = nv50_disp(encoder->dev)->core;
1775 	u8 owner = 1 << nv_crtc->index;
1776 	u8 proto;
1777 
1778 	nv50_outp_acquire(nv_encoder);
1779 
1780 	switch (asyh->or.bpc) {
1781 	case 10: asyh->or.depth = 0x6; break;
1782 	case  8: asyh->or.depth = 0x5; break;
1783 	case  6: asyh->or.depth = 0x2; break;
1784 	default: asyh->or.depth = 0x0; break;
1785 	}
1786 
1787 	switch (nv_encoder->dcb->type) {
1788 	case DCB_OUTPUT_TMDS:
1789 	case DCB_OUTPUT_DP:
1790 		proto = 0x0;
1791 		break;
1792 	default:
1793 		BUG();
1794 		break;
1795 	}
1796 
1797 	core->func->pior->ctrl(core, nv_encoder->or, (proto << 8) | owner, asyh);
1798 	nv_encoder->crtc = encoder->crtc;
1799 }
1800 
1801 static const struct drm_encoder_helper_funcs
1802 nv50_pior_help = {
1803 	.atomic_check = nv50_pior_atomic_check,
1804 	.enable = nv50_pior_enable,
1805 	.disable = nv50_pior_disable,
1806 };
1807 
1808 static void
1809 nv50_pior_destroy(struct drm_encoder *encoder)
1810 {
1811 	drm_encoder_cleanup(encoder);
1812 	kfree(encoder);
1813 }
1814 
1815 static const struct drm_encoder_funcs
1816 nv50_pior_func = {
1817 	.destroy = nv50_pior_destroy,
1818 };
1819 
1820 static int
1821 nv50_pior_create(struct drm_connector *connector, struct dcb_output *dcbe)
1822 {
1823 	struct drm_device *dev = connector->dev;
1824 	struct nouveau_drm *drm = nouveau_drm(dev);
1825 	struct nv50_disp *disp = nv50_disp(dev);
1826 	struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device);
1827 	struct nvkm_i2c_bus *bus = NULL;
1828 	struct nvkm_i2c_aux *aux = NULL;
1829 	struct i2c_adapter *ddc;
1830 	struct nouveau_encoder *nv_encoder;
1831 	struct drm_encoder *encoder;
1832 	int type;
1833 
1834 	switch (dcbe->type) {
1835 	case DCB_OUTPUT_TMDS:
1836 		bus  = nvkm_i2c_bus_find(i2c, NVKM_I2C_BUS_EXT(dcbe->extdev));
1837 		ddc  = bus ? &bus->i2c : NULL;
1838 		type = DRM_MODE_ENCODER_TMDS;
1839 		break;
1840 	case DCB_OUTPUT_DP:
1841 		aux  = nvkm_i2c_aux_find(i2c, NVKM_I2C_AUX_EXT(dcbe->extdev));
1842 		ddc  = aux ? &aux->i2c : NULL;
1843 		type = DRM_MODE_ENCODER_TMDS;
1844 		break;
1845 	default:
1846 		return -ENODEV;
1847 	}
1848 
1849 	nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1850 	if (!nv_encoder)
1851 		return -ENOMEM;
1852 	nv_encoder->dcb = dcbe;
1853 	nv_encoder->i2c = ddc;
1854 	nv_encoder->aux = aux;
1855 
1856 	encoder = to_drm_encoder(nv_encoder);
1857 	encoder->possible_crtcs = dcbe->heads;
1858 	encoder->possible_clones = 0;
1859 	drm_encoder_init(connector->dev, encoder, &nv50_pior_func, type,
1860 			 "pior-%04x-%04x", dcbe->hasht, dcbe->hashm);
1861 	drm_encoder_helper_add(encoder, &nv50_pior_help);
1862 
1863 	drm_connector_attach_encoder(connector, encoder);
1864 
1865 	disp->core->func->pior->get_caps(disp, nv_encoder, ffs(dcbe->or) - 1);
1866 
1867 	return 0;
1868 }
1869 
1870 /******************************************************************************
1871  * Atomic
1872  *****************************************************************************/
1873 
1874 static void
1875 nv50_disp_atomic_commit_core(struct drm_atomic_state *state, u32 *interlock)
1876 {
1877 	struct nouveau_drm *drm = nouveau_drm(state->dev);
1878 	struct nv50_disp *disp = nv50_disp(drm->dev);
1879 	struct nv50_core *core = disp->core;
1880 	struct nv50_mstm *mstm;
1881 	struct drm_encoder *encoder;
1882 
1883 	NV_ATOMIC(drm, "commit core %08x\n", interlock[NV50_DISP_INTERLOCK_BASE]);
1884 
1885 	drm_for_each_encoder(encoder, drm->dev) {
1886 		if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
1887 			mstm = nouveau_encoder(encoder)->dp.mstm;
1888 			if (mstm && mstm->modified)
1889 				nv50_mstm_prepare(mstm);
1890 		}
1891 	}
1892 
1893 	core->func->ntfy_init(disp->sync, NV50_DISP_CORE_NTFY);
1894 	core->func->update(core, interlock, true);
1895 	if (core->func->ntfy_wait_done(disp->sync, NV50_DISP_CORE_NTFY,
1896 				       disp->core->chan.base.device))
1897 		NV_ERROR(drm, "core notifier timeout\n");
1898 
1899 	drm_for_each_encoder(encoder, drm->dev) {
1900 		if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
1901 			mstm = nouveau_encoder(encoder)->dp.mstm;
1902 			if (mstm && mstm->modified)
1903 				nv50_mstm_cleanup(mstm);
1904 		}
1905 	}
1906 }
1907 
1908 static void
1909 nv50_disp_atomic_commit_wndw(struct drm_atomic_state *state, u32 *interlock)
1910 {
1911 	struct drm_plane_state *new_plane_state;
1912 	struct drm_plane *plane;
1913 	int i;
1914 
1915 	for_each_new_plane_in_state(state, plane, new_plane_state, i) {
1916 		struct nv50_wndw *wndw = nv50_wndw(plane);
1917 		if (interlock[wndw->interlock.type] & wndw->interlock.data) {
1918 			if (wndw->func->update)
1919 				wndw->func->update(wndw, interlock);
1920 		}
1921 	}
1922 }
1923 
1924 static void
1925 nv50_disp_atomic_commit_tail(struct drm_atomic_state *state)
1926 {
1927 	struct drm_device *dev = state->dev;
1928 	struct drm_crtc_state *new_crtc_state, *old_crtc_state;
1929 	struct drm_crtc *crtc;
1930 	struct drm_plane_state *new_plane_state;
1931 	struct drm_plane *plane;
1932 	struct nouveau_drm *drm = nouveau_drm(dev);
1933 	struct nv50_disp *disp = nv50_disp(dev);
1934 	struct nv50_atom *atom = nv50_atom(state);
1935 	struct nv50_core *core = disp->core;
1936 	struct nv50_outp_atom *outp, *outt;
1937 	u32 interlock[NV50_DISP_INTERLOCK__SIZE] = {};
1938 	int i;
1939 
1940 	NV_ATOMIC(drm, "commit %d %d\n", atom->lock_core, atom->flush_disable);
1941 	drm_atomic_helper_wait_for_fences(dev, state, false);
1942 	drm_atomic_helper_wait_for_dependencies(state);
1943 	drm_atomic_helper_update_legacy_modeset_state(dev, state);
1944 
1945 	if (atom->lock_core)
1946 		mutex_lock(&disp->mutex);
1947 
1948 	/* Disable head(s). */
1949 	for_each_oldnew_crtc_in_state(state, crtc, old_crtc_state, new_crtc_state, i) {
1950 		struct nv50_head_atom *asyh = nv50_head_atom(new_crtc_state);
1951 		struct nv50_head *head = nv50_head(crtc);
1952 
1953 		NV_ATOMIC(drm, "%s: clr %04x (set %04x)\n", crtc->name,
1954 			  asyh->clr.mask, asyh->set.mask);
1955 
1956 		if (old_crtc_state->active && !new_crtc_state->active) {
1957 			pm_runtime_put_noidle(dev->dev);
1958 			drm_crtc_vblank_off(crtc);
1959 		}
1960 
1961 		if (asyh->clr.mask) {
1962 			nv50_head_flush_clr(head, asyh, atom->flush_disable);
1963 			interlock[NV50_DISP_INTERLOCK_CORE] |= 1;
1964 		}
1965 	}
1966 
1967 	/* Disable plane(s). */
1968 	for_each_new_plane_in_state(state, plane, new_plane_state, i) {
1969 		struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
1970 		struct nv50_wndw *wndw = nv50_wndw(plane);
1971 
1972 		NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", plane->name,
1973 			  asyw->clr.mask, asyw->set.mask);
1974 		if (!asyw->clr.mask)
1975 			continue;
1976 
1977 		nv50_wndw_flush_clr(wndw, interlock, atom->flush_disable, asyw);
1978 	}
1979 
1980 	/* Disable output path(s). */
1981 	list_for_each_entry(outp, &atom->outp, head) {
1982 		const struct drm_encoder_helper_funcs *help;
1983 		struct drm_encoder *encoder;
1984 
1985 		encoder = outp->encoder;
1986 		help = encoder->helper_private;
1987 
1988 		NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", encoder->name,
1989 			  outp->clr.mask, outp->set.mask);
1990 
1991 		if (outp->clr.mask) {
1992 			help->disable(encoder);
1993 			interlock[NV50_DISP_INTERLOCK_CORE] |= 1;
1994 			if (outp->flush_disable) {
1995 				nv50_disp_atomic_commit_wndw(state, interlock);
1996 				nv50_disp_atomic_commit_core(state, interlock);
1997 				memset(interlock, 0x00, sizeof(interlock));
1998 			}
1999 		}
2000 	}
2001 
2002 	/* Flush disable. */
2003 	if (interlock[NV50_DISP_INTERLOCK_CORE]) {
2004 		if (atom->flush_disable) {
2005 			nv50_disp_atomic_commit_wndw(state, interlock);
2006 			nv50_disp_atomic_commit_core(state, interlock);
2007 			memset(interlock, 0x00, sizeof(interlock));
2008 		}
2009 	}
2010 
2011 	/* Update output path(s). */
2012 	list_for_each_entry_safe(outp, outt, &atom->outp, head) {
2013 		const struct drm_encoder_helper_funcs *help;
2014 		struct drm_encoder *encoder;
2015 
2016 		encoder = outp->encoder;
2017 		help = encoder->helper_private;
2018 
2019 		NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", encoder->name,
2020 			  outp->set.mask, outp->clr.mask);
2021 
2022 		if (outp->set.mask) {
2023 			help->enable(encoder);
2024 			interlock[NV50_DISP_INTERLOCK_CORE] = 1;
2025 		}
2026 
2027 		list_del(&outp->head);
2028 		kfree(outp);
2029 	}
2030 
2031 	/* Update head(s). */
2032 	for_each_oldnew_crtc_in_state(state, crtc, old_crtc_state, new_crtc_state, i) {
2033 		struct nv50_head_atom *asyh = nv50_head_atom(new_crtc_state);
2034 		struct nv50_head *head = nv50_head(crtc);
2035 
2036 		NV_ATOMIC(drm, "%s: set %04x (clr %04x)\n", crtc->name,
2037 			  asyh->set.mask, asyh->clr.mask);
2038 
2039 		if (asyh->set.mask) {
2040 			nv50_head_flush_set(head, asyh);
2041 			interlock[NV50_DISP_INTERLOCK_CORE] = 1;
2042 		}
2043 
2044 		if (new_crtc_state->active) {
2045 			if (!old_crtc_state->active) {
2046 				drm_crtc_vblank_on(crtc);
2047 				pm_runtime_get_noresume(dev->dev);
2048 			}
2049 			if (new_crtc_state->event)
2050 				drm_crtc_vblank_get(crtc);
2051 		}
2052 	}
2053 
2054 	/* Update window->head assignment.
2055 	 *
2056 	 * This has to happen in an update that's not interlocked with
2057 	 * any window channels to avoid hitting HW error checks.
2058 	 *
2059 	 *TODO: Proper handling of window ownership (Turing apparently
2060 	 *      supports non-fixed mappings).
2061 	 */
2062 	if (core->assign_windows) {
2063 		core->func->wndw.owner(core);
2064 		core->func->update(core, interlock, false);
2065 		core->assign_windows = false;
2066 		interlock[NV50_DISP_INTERLOCK_CORE] = 0;
2067 	}
2068 
2069 	/* Update plane(s). */
2070 	for_each_new_plane_in_state(state, plane, new_plane_state, i) {
2071 		struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
2072 		struct nv50_wndw *wndw = nv50_wndw(plane);
2073 
2074 		NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", plane->name,
2075 			  asyw->set.mask, asyw->clr.mask);
2076 		if ( !asyw->set.mask &&
2077 		    (!asyw->clr.mask || atom->flush_disable))
2078 			continue;
2079 
2080 		nv50_wndw_flush_set(wndw, interlock, asyw);
2081 	}
2082 
2083 	/* Flush update. */
2084 	nv50_disp_atomic_commit_wndw(state, interlock);
2085 
2086 	if (interlock[NV50_DISP_INTERLOCK_CORE]) {
2087 		if (interlock[NV50_DISP_INTERLOCK_BASE] ||
2088 		    interlock[NV50_DISP_INTERLOCK_OVLY] ||
2089 		    interlock[NV50_DISP_INTERLOCK_WNDW] ||
2090 		    !atom->state.legacy_cursor_update)
2091 			nv50_disp_atomic_commit_core(state, interlock);
2092 		else
2093 			disp->core->func->update(disp->core, interlock, false);
2094 	}
2095 
2096 	if (atom->lock_core)
2097 		mutex_unlock(&disp->mutex);
2098 
2099 	/* Wait for HW to signal completion. */
2100 	for_each_new_plane_in_state(state, plane, new_plane_state, i) {
2101 		struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
2102 		struct nv50_wndw *wndw = nv50_wndw(plane);
2103 		int ret = nv50_wndw_wait_armed(wndw, asyw);
2104 		if (ret)
2105 			NV_ERROR(drm, "%s: timeout\n", plane->name);
2106 	}
2107 
2108 	for_each_new_crtc_in_state(state, crtc, new_crtc_state, i) {
2109 		if (new_crtc_state->event) {
2110 			unsigned long flags;
2111 			/* Get correct count/ts if racing with vblank irq */
2112 			if (new_crtc_state->active)
2113 				drm_crtc_accurate_vblank_count(crtc);
2114 			spin_lock_irqsave(&crtc->dev->event_lock, flags);
2115 			drm_crtc_send_vblank_event(crtc, new_crtc_state->event);
2116 			spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
2117 
2118 			new_crtc_state->event = NULL;
2119 			if (new_crtc_state->active)
2120 				drm_crtc_vblank_put(crtc);
2121 		}
2122 	}
2123 
2124 	drm_atomic_helper_commit_hw_done(state);
2125 	drm_atomic_helper_cleanup_planes(dev, state);
2126 	drm_atomic_helper_commit_cleanup_done(state);
2127 	drm_atomic_state_put(state);
2128 
2129 	/* Drop the RPM ref we got from nv50_disp_atomic_commit() */
2130 	pm_runtime_mark_last_busy(dev->dev);
2131 	pm_runtime_put_autosuspend(dev->dev);
2132 }
2133 
2134 static void
2135 nv50_disp_atomic_commit_work(struct work_struct *work)
2136 {
2137 	struct drm_atomic_state *state =
2138 		container_of(work, typeof(*state), commit_work);
2139 	nv50_disp_atomic_commit_tail(state);
2140 }
2141 
2142 static int
2143 nv50_disp_atomic_commit(struct drm_device *dev,
2144 			struct drm_atomic_state *state, bool nonblock)
2145 {
2146 	struct drm_plane_state *new_plane_state;
2147 	struct drm_plane *plane;
2148 	int ret, i;
2149 
2150 	ret = pm_runtime_get_sync(dev->dev);
2151 	if (ret < 0 && ret != -EACCES)
2152 		return ret;
2153 
2154 	ret = drm_atomic_helper_setup_commit(state, nonblock);
2155 	if (ret)
2156 		goto done;
2157 
2158 	INIT_WORK(&state->commit_work, nv50_disp_atomic_commit_work);
2159 
2160 	ret = drm_atomic_helper_prepare_planes(dev, state);
2161 	if (ret)
2162 		goto done;
2163 
2164 	if (!nonblock) {
2165 		ret = drm_atomic_helper_wait_for_fences(dev, state, true);
2166 		if (ret)
2167 			goto err_cleanup;
2168 	}
2169 
2170 	ret = drm_atomic_helper_swap_state(state, true);
2171 	if (ret)
2172 		goto err_cleanup;
2173 
2174 	for_each_new_plane_in_state(state, plane, new_plane_state, i) {
2175 		struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
2176 		struct nv50_wndw *wndw = nv50_wndw(plane);
2177 
2178 		if (asyw->set.image)
2179 			nv50_wndw_ntfy_enable(wndw, asyw);
2180 	}
2181 
2182 	drm_atomic_state_get(state);
2183 
2184 	/*
2185 	 * Grab another RPM ref for the commit tail, which will release the
2186 	 * ref when it's finished
2187 	 */
2188 	pm_runtime_get_noresume(dev->dev);
2189 
2190 	if (nonblock)
2191 		queue_work(system_unbound_wq, &state->commit_work);
2192 	else
2193 		nv50_disp_atomic_commit_tail(state);
2194 
2195 err_cleanup:
2196 	if (ret)
2197 		drm_atomic_helper_cleanup_planes(dev, state);
2198 done:
2199 	pm_runtime_put_autosuspend(dev->dev);
2200 	return ret;
2201 }
2202 
2203 static struct nv50_outp_atom *
2204 nv50_disp_outp_atomic_add(struct nv50_atom *atom, struct drm_encoder *encoder)
2205 {
2206 	struct nv50_outp_atom *outp;
2207 
2208 	list_for_each_entry(outp, &atom->outp, head) {
2209 		if (outp->encoder == encoder)
2210 			return outp;
2211 	}
2212 
2213 	outp = kzalloc(sizeof(*outp), GFP_KERNEL);
2214 	if (!outp)
2215 		return ERR_PTR(-ENOMEM);
2216 
2217 	list_add(&outp->head, &atom->outp);
2218 	outp->encoder = encoder;
2219 	return outp;
2220 }
2221 
2222 static int
2223 nv50_disp_outp_atomic_check_clr(struct nv50_atom *atom,
2224 				struct drm_connector_state *old_connector_state)
2225 {
2226 	struct drm_encoder *encoder = old_connector_state->best_encoder;
2227 	struct drm_crtc_state *old_crtc_state, *new_crtc_state;
2228 	struct drm_crtc *crtc;
2229 	struct nv50_outp_atom *outp;
2230 
2231 	if (!(crtc = old_connector_state->crtc))
2232 		return 0;
2233 
2234 	old_crtc_state = drm_atomic_get_old_crtc_state(&atom->state, crtc);
2235 	new_crtc_state = drm_atomic_get_new_crtc_state(&atom->state, crtc);
2236 	if (old_crtc_state->active && drm_atomic_crtc_needs_modeset(new_crtc_state)) {
2237 		outp = nv50_disp_outp_atomic_add(atom, encoder);
2238 		if (IS_ERR(outp))
2239 			return PTR_ERR(outp);
2240 
2241 		if (outp->encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
2242 			outp->flush_disable = true;
2243 			atom->flush_disable = true;
2244 		}
2245 		outp->clr.ctrl = true;
2246 		atom->lock_core = true;
2247 	}
2248 
2249 	return 0;
2250 }
2251 
2252 static int
2253 nv50_disp_outp_atomic_check_set(struct nv50_atom *atom,
2254 				struct drm_connector_state *connector_state)
2255 {
2256 	struct drm_encoder *encoder = connector_state->best_encoder;
2257 	struct drm_crtc_state *new_crtc_state;
2258 	struct drm_crtc *crtc;
2259 	struct nv50_outp_atom *outp;
2260 
2261 	if (!(crtc = connector_state->crtc))
2262 		return 0;
2263 
2264 	new_crtc_state = drm_atomic_get_new_crtc_state(&atom->state, crtc);
2265 	if (new_crtc_state->active && drm_atomic_crtc_needs_modeset(new_crtc_state)) {
2266 		outp = nv50_disp_outp_atomic_add(atom, encoder);
2267 		if (IS_ERR(outp))
2268 			return PTR_ERR(outp);
2269 
2270 		outp->set.ctrl = true;
2271 		atom->lock_core = true;
2272 	}
2273 
2274 	return 0;
2275 }
2276 
2277 static int
2278 nv50_disp_atomic_check(struct drm_device *dev, struct drm_atomic_state *state)
2279 {
2280 	struct nv50_atom *atom = nv50_atom(state);
2281 	struct drm_connector_state *old_connector_state, *new_connector_state;
2282 	struct drm_connector *connector;
2283 	struct drm_crtc_state *new_crtc_state;
2284 	struct drm_crtc *crtc;
2285 	int ret, i;
2286 
2287 	/* We need to handle colour management on a per-plane basis. */
2288 	for_each_new_crtc_in_state(state, crtc, new_crtc_state, i) {
2289 		if (new_crtc_state->color_mgmt_changed) {
2290 			ret = drm_atomic_add_affected_planes(state, crtc);
2291 			if (ret)
2292 				return ret;
2293 		}
2294 	}
2295 
2296 	ret = drm_atomic_helper_check(dev, state);
2297 	if (ret)
2298 		return ret;
2299 
2300 	for_each_oldnew_connector_in_state(state, connector, old_connector_state, new_connector_state, i) {
2301 		ret = nv50_disp_outp_atomic_check_clr(atom, old_connector_state);
2302 		if (ret)
2303 			return ret;
2304 
2305 		ret = nv50_disp_outp_atomic_check_set(atom, new_connector_state);
2306 		if (ret)
2307 			return ret;
2308 	}
2309 
2310 	ret = drm_dp_mst_atomic_check(state);
2311 	if (ret)
2312 		return ret;
2313 
2314 	return 0;
2315 }
2316 
2317 static void
2318 nv50_disp_atomic_state_clear(struct drm_atomic_state *state)
2319 {
2320 	struct nv50_atom *atom = nv50_atom(state);
2321 	struct nv50_outp_atom *outp, *outt;
2322 
2323 	list_for_each_entry_safe(outp, outt, &atom->outp, head) {
2324 		list_del(&outp->head);
2325 		kfree(outp);
2326 	}
2327 
2328 	drm_atomic_state_default_clear(state);
2329 }
2330 
2331 static void
2332 nv50_disp_atomic_state_free(struct drm_atomic_state *state)
2333 {
2334 	struct nv50_atom *atom = nv50_atom(state);
2335 	drm_atomic_state_default_release(&atom->state);
2336 	kfree(atom);
2337 }
2338 
2339 static struct drm_atomic_state *
2340 nv50_disp_atomic_state_alloc(struct drm_device *dev)
2341 {
2342 	struct nv50_atom *atom;
2343 	if (!(atom = kzalloc(sizeof(*atom), GFP_KERNEL)) ||
2344 	    drm_atomic_state_init(dev, &atom->state) < 0) {
2345 		kfree(atom);
2346 		return NULL;
2347 	}
2348 	INIT_LIST_HEAD(&atom->outp);
2349 	return &atom->state;
2350 }
2351 
2352 static const struct drm_mode_config_funcs
2353 nv50_disp_func = {
2354 	.fb_create = nouveau_user_framebuffer_create,
2355 	.output_poll_changed = nouveau_fbcon_output_poll_changed,
2356 	.atomic_check = nv50_disp_atomic_check,
2357 	.atomic_commit = nv50_disp_atomic_commit,
2358 	.atomic_state_alloc = nv50_disp_atomic_state_alloc,
2359 	.atomic_state_clear = nv50_disp_atomic_state_clear,
2360 	.atomic_state_free = nv50_disp_atomic_state_free,
2361 };
2362 
2363 /******************************************************************************
2364  * Init
2365  *****************************************************************************/
2366 
2367 static void
2368 nv50_display_fini(struct drm_device *dev, bool suspend)
2369 {
2370 	struct nouveau_encoder *nv_encoder;
2371 	struct drm_encoder *encoder;
2372 	struct drm_plane *plane;
2373 
2374 	drm_for_each_plane(plane, dev) {
2375 		struct nv50_wndw *wndw = nv50_wndw(plane);
2376 		if (plane->funcs != &nv50_wndw)
2377 			continue;
2378 		nv50_wndw_fini(wndw);
2379 	}
2380 
2381 	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
2382 		if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
2383 			nv_encoder = nouveau_encoder(encoder);
2384 			nv50_mstm_fini(nv_encoder->dp.mstm);
2385 		}
2386 	}
2387 }
2388 
2389 static int
2390 nv50_display_init(struct drm_device *dev, bool resume, bool runtime)
2391 {
2392 	struct nv50_core *core = nv50_disp(dev)->core;
2393 	struct drm_encoder *encoder;
2394 	struct drm_plane *plane;
2395 
2396 	if (resume || runtime)
2397 		core->func->init(core);
2398 
2399 	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
2400 		if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
2401 			struct nouveau_encoder *nv_encoder =
2402 				nouveau_encoder(encoder);
2403 			nv50_mstm_init(nv_encoder->dp.mstm, runtime);
2404 		}
2405 	}
2406 
2407 	drm_for_each_plane(plane, dev) {
2408 		struct nv50_wndw *wndw = nv50_wndw(plane);
2409 		if (plane->funcs != &nv50_wndw)
2410 			continue;
2411 		nv50_wndw_init(wndw);
2412 	}
2413 
2414 	return 0;
2415 }
2416 
2417 static void
2418 nv50_display_destroy(struct drm_device *dev)
2419 {
2420 	struct nv50_disp *disp = nv50_disp(dev);
2421 
2422 	nv50_audio_component_fini(nouveau_drm(dev));
2423 
2424 	nvif_object_unmap(&disp->caps);
2425 	nvif_object_fini(&disp->caps);
2426 	nv50_core_del(&disp->core);
2427 
2428 	nouveau_bo_unmap(disp->sync);
2429 	if (disp->sync)
2430 		nouveau_bo_unpin(disp->sync);
2431 	nouveau_bo_ref(NULL, &disp->sync);
2432 
2433 	nouveau_display(dev)->priv = NULL;
2434 	kfree(disp);
2435 }
2436 
2437 int
2438 nv50_display_create(struct drm_device *dev)
2439 {
2440 	struct nvif_device *device = &nouveau_drm(dev)->client.device;
2441 	struct nouveau_drm *drm = nouveau_drm(dev);
2442 	struct dcb_table *dcb = &drm->vbios.dcb;
2443 	struct drm_connector *connector, *tmp;
2444 	struct nv50_disp *disp;
2445 	struct dcb_output *dcbe;
2446 	int crtcs, ret, i;
2447 	bool has_mst = nv50_has_mst(drm);
2448 
2449 	disp = kzalloc(sizeof(*disp), GFP_KERNEL);
2450 	if (!disp)
2451 		return -ENOMEM;
2452 
2453 	mutex_init(&disp->mutex);
2454 
2455 	nouveau_display(dev)->priv = disp;
2456 	nouveau_display(dev)->dtor = nv50_display_destroy;
2457 	nouveau_display(dev)->init = nv50_display_init;
2458 	nouveau_display(dev)->fini = nv50_display_fini;
2459 	disp->disp = &nouveau_display(dev)->disp;
2460 	dev->mode_config.funcs = &nv50_disp_func;
2461 	dev->mode_config.quirk_addfb_prefer_xbgr_30bpp = true;
2462 	dev->mode_config.normalize_zpos = true;
2463 
2464 	/* small shared memory area we use for notifiers and semaphores */
2465 	ret = nouveau_bo_new(&drm->client, 4096, 0x1000, TTM_PL_FLAG_VRAM,
2466 			     0, 0x0000, NULL, NULL, &disp->sync);
2467 	if (!ret) {
2468 		ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM, true);
2469 		if (!ret) {
2470 			ret = nouveau_bo_map(disp->sync);
2471 			if (ret)
2472 				nouveau_bo_unpin(disp->sync);
2473 		}
2474 		if (ret)
2475 			nouveau_bo_ref(NULL, &disp->sync);
2476 	}
2477 
2478 	if (ret)
2479 		goto out;
2480 
2481 	/* allocate master evo channel */
2482 	ret = nv50_core_new(drm, &disp->core);
2483 	if (ret)
2484 		goto out;
2485 
2486 	disp->core->func->init(disp->core);
2487 	if (disp->core->func->caps_init) {
2488 		ret = disp->core->func->caps_init(drm, disp);
2489 		if (ret)
2490 			goto out;
2491 	}
2492 
2493 	/* Assign the correct format modifiers */
2494 	if (disp->disp->object.oclass >= TU102_DISP)
2495 		nouveau_display(dev)->format_modifiers = wndwc57e_modifiers;
2496 	else
2497 	if (disp->disp->object.oclass >= GF110_DISP)
2498 		nouveau_display(dev)->format_modifiers = disp90xx_modifiers;
2499 	else
2500 		nouveau_display(dev)->format_modifiers = disp50xx_modifiers;
2501 
2502 	/* create crtc objects to represent the hw heads */
2503 	if (disp->disp->object.oclass >= GV100_DISP)
2504 		crtcs = nvif_rd32(&device->object, 0x610060) & 0xff;
2505 	else
2506 	if (disp->disp->object.oclass >= GF110_DISP)
2507 		crtcs = nvif_rd32(&device->object, 0x612004) & 0xf;
2508 	else
2509 		crtcs = 0x3;
2510 
2511 	for (i = 0; i < fls(crtcs); i++) {
2512 		struct nv50_head *head;
2513 
2514 		if (!(crtcs & (1 << i)))
2515 			continue;
2516 
2517 		head = nv50_head_create(dev, i);
2518 		if (IS_ERR(head)) {
2519 			ret = PTR_ERR(head);
2520 			goto out;
2521 		}
2522 
2523 		if (has_mst) {
2524 			head->msto = nv50_msto_new(dev, head, i);
2525 			if (IS_ERR(head->msto)) {
2526 				ret = PTR_ERR(head->msto);
2527 				head->msto = NULL;
2528 				goto out;
2529 			}
2530 
2531 			/*
2532 			 * FIXME: This is a hack to workaround the following
2533 			 * issues:
2534 			 *
2535 			 * https://gitlab.gnome.org/GNOME/mutter/issues/759
2536 			 * https://gitlab.freedesktop.org/xorg/xserver/merge_requests/277
2537 			 *
2538 			 * Once these issues are closed, this should be
2539 			 * removed
2540 			 */
2541 			head->msto->encoder.possible_crtcs = crtcs;
2542 		}
2543 	}
2544 
2545 	/* create encoder/connector objects based on VBIOS DCB table */
2546 	for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
2547 		connector = nouveau_connector_create(dev, dcbe);
2548 		if (IS_ERR(connector))
2549 			continue;
2550 
2551 		if (dcbe->location == DCB_LOC_ON_CHIP) {
2552 			switch (dcbe->type) {
2553 			case DCB_OUTPUT_TMDS:
2554 			case DCB_OUTPUT_LVDS:
2555 			case DCB_OUTPUT_DP:
2556 				ret = nv50_sor_create(connector, dcbe);
2557 				break;
2558 			case DCB_OUTPUT_ANALOG:
2559 				ret = nv50_dac_create(connector, dcbe);
2560 				break;
2561 			default:
2562 				ret = -ENODEV;
2563 				break;
2564 			}
2565 		} else {
2566 			ret = nv50_pior_create(connector, dcbe);
2567 		}
2568 
2569 		if (ret) {
2570 			NV_WARN(drm, "failed to create encoder %d/%d/%d: %d\n",
2571 				     dcbe->location, dcbe->type,
2572 				     ffs(dcbe->or) - 1, ret);
2573 			ret = 0;
2574 		}
2575 	}
2576 
2577 	/* cull any connectors we created that don't have an encoder */
2578 	list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
2579 		if (connector->possible_encoders)
2580 			continue;
2581 
2582 		NV_WARN(drm, "%s has no encoders, removing\n",
2583 			connector->name);
2584 		connector->funcs->destroy(connector);
2585 	}
2586 
2587 	/* Disable vblank irqs aggressively for power-saving, safe on nv50+ */
2588 	dev->vblank_disable_immediate = true;
2589 
2590 	nv50_audio_component_init(drm);
2591 
2592 out:
2593 	if (ret)
2594 		nv50_display_destroy(dev);
2595 	return ret;
2596 }
2597 
2598 /******************************************************************************
2599  * Format modifiers
2600  *****************************************************************************/
2601 
2602 /****************************************************************
2603  *            Log2(block height) ----------------------------+  *
2604  *            Page Kind ----------------------------------+  |  *
2605  *            Gob Height/Page Kind Generation ------+     |  |  *
2606  *                          Sector layout -------+  |     |  |  *
2607  *                          Compression ------+  |  |     |  |  */
2608 const u64 disp50xx_modifiers[] = { /*         |  |  |     |  |  */
2609 	DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x7a, 0),
2610 	DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x7a, 1),
2611 	DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x7a, 2),
2612 	DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x7a, 3),
2613 	DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x7a, 4),
2614 	DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x7a, 5),
2615 	DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x78, 0),
2616 	DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x78, 1),
2617 	DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x78, 2),
2618 	DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x78, 3),
2619 	DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x78, 4),
2620 	DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x78, 5),
2621 	DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x70, 0),
2622 	DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x70, 1),
2623 	DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x70, 2),
2624 	DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x70, 3),
2625 	DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x70, 4),
2626 	DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 1, 0x70, 5),
2627 	DRM_FORMAT_MOD_LINEAR,
2628 	DRM_FORMAT_MOD_INVALID
2629 };
2630 
2631 /****************************************************************
2632  *            Log2(block height) ----------------------------+  *
2633  *            Page Kind ----------------------------------+  |  *
2634  *            Gob Height/Page Kind Generation ------+     |  |  *
2635  *                          Sector layout -------+  |     |  |  *
2636  *                          Compression ------+  |  |     |  |  */
2637 const u64 disp90xx_modifiers[] = { /*         |  |  |     |  |  */
2638 	DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 0, 0xfe, 0),
2639 	DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 0, 0xfe, 1),
2640 	DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 0, 0xfe, 2),
2641 	DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 0, 0xfe, 3),
2642 	DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 0, 0xfe, 4),
2643 	DRM_FORMAT_MOD_NVIDIA_BLOCK_LINEAR_2D(0, 1, 0, 0xfe, 5),
2644 	DRM_FORMAT_MOD_LINEAR,
2645 	DRM_FORMAT_MOD_INVALID
2646 };
2647