xref: /openbmc/linux/drivers/gpu/drm/nouveau/nouveau_display.c (revision f43e47c090dc7fe32d5410d8740c3a004eb2676f)
1 /*
2  * Copyright (C) 2008 Maarten Maathuis.
3  * All Rights Reserved.
4  *
5  * Permission is hereby granted, free of charge, to any person obtaining
6  * a copy of this software and associated documentation files (the
7  * "Software"), to deal in the Software without restriction, including
8  * without limitation the rights to use, copy, modify, merge, publish,
9  * distribute, sublicense, and/or sell copies of the Software, and to
10  * permit persons to whom the Software is furnished to do so, subject to
11  * the following conditions:
12  *
13  * The above copyright notice and this permission notice (including the
14  * next paragraph) shall be included in all copies or substantial
15  * portions of the Software.
16  *
17  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
18  * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
19  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
20  * IN NO EVENT SHALL THE COPYRIGHT OWNER(S) AND/OR ITS SUPPLIERS BE
21  * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
22  * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
23  * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
24  *
25  */
26 
27 #include <acpi/video.h>
28 
29 #include <drm/drm_atomic.h>
30 #include <drm/drm_atomic_helper.h>
31 #include <drm/drm_crtc_helper.h>
32 #include <drm/drm_fb_helper.h>
33 #include <drm/drm_fourcc.h>
34 #include <drm/drm_gem_framebuffer_helper.h>
35 #include <drm/drm_probe_helper.h>
36 #include <drm/drm_vblank.h>
37 
38 #include "nouveau_fbcon.h"
39 #include "nouveau_crtc.h"
40 #include "nouveau_gem.h"
41 #include "nouveau_connector.h"
42 #include "nv50_display.h"
43 
44 #include <nvif/class.h>
45 #include <nvif/if0013.h>
46 #include <nvif/event.h>
47 #include <dispnv50/crc.h>
48 
49 int
50 nouveau_display_vblank_enable(struct drm_crtc *crtc)
51 {
52 	struct nouveau_crtc *nv_crtc;
53 
54 	nv_crtc = nouveau_crtc(crtc);
55 	nvif_notify_get(&nv_crtc->vblank);
56 
57 	return 0;
58 }
59 
60 void
61 nouveau_display_vblank_disable(struct drm_crtc *crtc)
62 {
63 	struct nouveau_crtc *nv_crtc;
64 
65 	nv_crtc = nouveau_crtc(crtc);
66 	nvif_notify_put(&nv_crtc->vblank);
67 }
68 
69 static inline int
70 calc(int blanks, int blanke, int total, int line)
71 {
72 	if (blanke >= blanks) {
73 		if (line >= blanks)
74 			line -= total;
75 	} else {
76 		if (line >= blanks)
77 			line -= total;
78 		line -= blanke + 1;
79 	}
80 	return line;
81 }
82 
83 static bool
84 nouveau_display_scanoutpos_head(struct drm_crtc *crtc, int *vpos, int *hpos,
85 				ktime_t *stime, ktime_t *etime)
86 {
87 	struct drm_vblank_crtc *vblank = &crtc->dev->vblank[drm_crtc_index(crtc)];
88 	struct nvif_head *head = &nouveau_crtc(crtc)->head;
89 	struct nvif_head_scanoutpos_v0 args;
90 	int retry = 20;
91 	bool ret = false;
92 
93 	args.version = 0;
94 
95 	do {
96 		ret = nvif_mthd(&head->object, NVIF_HEAD_V0_SCANOUTPOS, &args, sizeof(args));
97 		if (ret != 0)
98 			return false;
99 
100 		if (args.vline) {
101 			ret = true;
102 			break;
103 		}
104 
105 		if (retry) ndelay(vblank->linedur_ns);
106 	} while (retry--);
107 
108 	*hpos = args.hline;
109 	*vpos = calc(args.vblanks, args.vblanke, args.vtotal, args.vline);
110 	if (stime) *stime = ns_to_ktime(args.time[0]);
111 	if (etime) *etime = ns_to_ktime(args.time[1]);
112 
113 	return ret;
114 }
115 
116 bool
117 nouveau_display_scanoutpos(struct drm_crtc *crtc,
118 			   bool in_vblank_irq, int *vpos, int *hpos,
119 			   ktime_t *stime, ktime_t *etime,
120 			   const struct drm_display_mode *mode)
121 {
122 	return nouveau_display_scanoutpos_head(crtc, vpos, hpos,
123 					       stime, etime);
124 }
125 
126 static const struct drm_framebuffer_funcs nouveau_framebuffer_funcs = {
127 	.destroy = drm_gem_fb_destroy,
128 	.create_handle = drm_gem_fb_create_handle,
129 };
130 
131 static void
132 nouveau_decode_mod(struct nouveau_drm *drm,
133 		   uint64_t modifier,
134 		   uint32_t *tile_mode,
135 		   uint8_t *kind)
136 {
137 	struct nouveau_display *disp = nouveau_display(drm->dev);
138 	BUG_ON(!tile_mode || !kind);
139 
140 	if (modifier == DRM_FORMAT_MOD_LINEAR) {
141 		/* tile_mode will not be used in this case */
142 		*tile_mode = 0;
143 		*kind = 0;
144 	} else {
145 		/*
146 		 * Extract the block height and kind from the corresponding
147 		 * modifier fields.  See drm_fourcc.h for details.
148 		 */
149 
150 		if ((modifier & (0xffull << 12)) == 0ull) {
151 			/* Legacy modifier.  Translate to this dev's 'kind.' */
152 			modifier |= disp->format_modifiers[0] & (0xffull << 12);
153 		}
154 
155 		*tile_mode = (uint32_t)(modifier & 0xF);
156 		*kind = (uint8_t)((modifier >> 12) & 0xFF);
157 
158 		if (drm->client.device.info.chipset >= 0xc0)
159 			*tile_mode <<= 4;
160 	}
161 }
162 
163 void
164 nouveau_framebuffer_get_layout(struct drm_framebuffer *fb,
165 			       uint32_t *tile_mode,
166 			       uint8_t *kind)
167 {
168 	if (fb->flags & DRM_MODE_FB_MODIFIERS) {
169 		struct nouveau_drm *drm = nouveau_drm(fb->dev);
170 
171 		nouveau_decode_mod(drm, fb->modifier, tile_mode, kind);
172 	} else {
173 		const struct nouveau_bo *nvbo = nouveau_gem_object(fb->obj[0]);
174 
175 		*tile_mode = nvbo->mode;
176 		*kind = nvbo->kind;
177 	}
178 }
179 
180 static const u64 legacy_modifiers[] = {
181 	DRM_FORMAT_MOD_NVIDIA_16BX2_BLOCK(0),
182 	DRM_FORMAT_MOD_NVIDIA_16BX2_BLOCK(1),
183 	DRM_FORMAT_MOD_NVIDIA_16BX2_BLOCK(2),
184 	DRM_FORMAT_MOD_NVIDIA_16BX2_BLOCK(3),
185 	DRM_FORMAT_MOD_NVIDIA_16BX2_BLOCK(4),
186 	DRM_FORMAT_MOD_NVIDIA_16BX2_BLOCK(5),
187 	DRM_FORMAT_MOD_INVALID
188 };
189 
190 static int
191 nouveau_validate_decode_mod(struct nouveau_drm *drm,
192 			    uint64_t modifier,
193 			    uint32_t *tile_mode,
194 			    uint8_t *kind)
195 {
196 	struct nouveau_display *disp = nouveau_display(drm->dev);
197 	int mod;
198 
199 	if (drm->client.device.info.family < NV_DEVICE_INFO_V0_TESLA) {
200 		return -EINVAL;
201 	}
202 
203 	BUG_ON(!disp->format_modifiers);
204 
205 	for (mod = 0;
206 	     (disp->format_modifiers[mod] != DRM_FORMAT_MOD_INVALID) &&
207 	     (disp->format_modifiers[mod] != modifier);
208 	     mod++);
209 
210 	if (disp->format_modifiers[mod] == DRM_FORMAT_MOD_INVALID) {
211 		for (mod = 0;
212 		     (legacy_modifiers[mod] != DRM_FORMAT_MOD_INVALID) &&
213 		     (legacy_modifiers[mod] != modifier);
214 		     mod++);
215 		if (legacy_modifiers[mod] == DRM_FORMAT_MOD_INVALID)
216 			return -EINVAL;
217 	}
218 
219 	nouveau_decode_mod(drm, modifier, tile_mode, kind);
220 
221 	return 0;
222 }
223 
224 static inline uint32_t
225 nouveau_get_width_in_blocks(uint32_t stride)
226 {
227 	/* GOBs per block in the x direction is always one, and GOBs are
228 	 * 64 bytes wide
229 	 */
230 	static const uint32_t log_block_width = 6;
231 
232 	return (stride + (1 << log_block_width) - 1) >> log_block_width;
233 }
234 
235 static inline uint32_t
236 nouveau_get_height_in_blocks(struct nouveau_drm *drm,
237 			     uint32_t height,
238 			     uint32_t log_block_height_in_gobs)
239 {
240 	uint32_t log_gob_height;
241 	uint32_t log_block_height;
242 
243 	BUG_ON(drm->client.device.info.family < NV_DEVICE_INFO_V0_TESLA);
244 
245 	if (drm->client.device.info.family < NV_DEVICE_INFO_V0_FERMI)
246 		log_gob_height = 2;
247 	else
248 		log_gob_height = 3;
249 
250 	log_block_height = log_block_height_in_gobs + log_gob_height;
251 
252 	return (height + (1 << log_block_height) - 1) >> log_block_height;
253 }
254 
255 static int
256 nouveau_check_bl_size(struct nouveau_drm *drm, struct nouveau_bo *nvbo,
257 		      uint32_t offset, uint32_t stride, uint32_t h,
258 		      uint32_t tile_mode)
259 {
260 	uint32_t gob_size, bw, bh;
261 	uint64_t bl_size;
262 
263 	BUG_ON(drm->client.device.info.family < NV_DEVICE_INFO_V0_TESLA);
264 
265 	if (drm->client.device.info.chipset >= 0xc0) {
266 		if (tile_mode & 0xF)
267 			return -EINVAL;
268 		tile_mode >>= 4;
269 	}
270 
271 	if (tile_mode & 0xFFFFFFF0)
272 		return -EINVAL;
273 
274 	if (drm->client.device.info.family < NV_DEVICE_INFO_V0_FERMI)
275 		gob_size = 256;
276 	else
277 		gob_size = 512;
278 
279 	bw = nouveau_get_width_in_blocks(stride);
280 	bh = nouveau_get_height_in_blocks(drm, h, tile_mode);
281 
282 	bl_size = bw * bh * (1 << tile_mode) * gob_size;
283 
284 	DRM_DEBUG_KMS("offset=%u stride=%u h=%u tile_mode=0x%02x bw=%u bh=%u gob_size=%u bl_size=%llu size=%zu\n",
285 		      offset, stride, h, tile_mode, bw, bh, gob_size, bl_size,
286 		      nvbo->bo.base.size);
287 
288 	if (bl_size + offset > nvbo->bo.base.size)
289 		return -ERANGE;
290 
291 	return 0;
292 }
293 
294 int
295 nouveau_framebuffer_new(struct drm_device *dev,
296 			const struct drm_mode_fb_cmd2 *mode_cmd,
297 			struct drm_gem_object *gem,
298 			struct drm_framebuffer **pfb)
299 {
300 	struct nouveau_drm *drm = nouveau_drm(dev);
301 	struct nouveau_bo *nvbo = nouveau_gem_object(gem);
302 	struct drm_framebuffer *fb;
303 	const struct drm_format_info *info;
304 	unsigned int height, i;
305 	uint32_t tile_mode;
306 	uint8_t kind;
307 	int ret;
308 
309         /* YUV overlays have special requirements pre-NV50 */
310 	if (drm->client.device.info.family < NV_DEVICE_INFO_V0_TESLA &&
311 
312 	    (mode_cmd->pixel_format == DRM_FORMAT_YUYV ||
313 	     mode_cmd->pixel_format == DRM_FORMAT_UYVY ||
314 	     mode_cmd->pixel_format == DRM_FORMAT_NV12 ||
315 	     mode_cmd->pixel_format == DRM_FORMAT_NV21) &&
316 	    (mode_cmd->pitches[0] & 0x3f || /* align 64 */
317 	     mode_cmd->pitches[0] >= 0x10000 || /* at most 64k pitch */
318 	     (mode_cmd->pitches[1] && /* pitches for planes must match */
319 	      mode_cmd->pitches[0] != mode_cmd->pitches[1]))) {
320 		DRM_DEBUG_KMS("Unsuitable framebuffer: format: %p4cc; pitches: 0x%x\n 0x%x\n",
321 			      &mode_cmd->pixel_format,
322 			      mode_cmd->pitches[0], mode_cmd->pitches[1]);
323 		return -EINVAL;
324 	}
325 
326 	if (mode_cmd->flags & DRM_MODE_FB_MODIFIERS) {
327 		if (nouveau_validate_decode_mod(drm, mode_cmd->modifier[0],
328 						&tile_mode, &kind)) {
329 			DRM_DEBUG_KMS("Unsupported modifier: 0x%llx\n",
330 				      mode_cmd->modifier[0]);
331 			return -EINVAL;
332 		}
333 	} else {
334 		tile_mode = nvbo->mode;
335 		kind = nvbo->kind;
336 	}
337 
338 	info = drm_get_format_info(dev, mode_cmd);
339 
340 	for (i = 0; i < info->num_planes; i++) {
341 		height = drm_format_info_plane_height(info,
342 						      mode_cmd->height,
343 						      i);
344 
345 		if (kind) {
346 			ret = nouveau_check_bl_size(drm, nvbo,
347 						    mode_cmd->offsets[i],
348 						    mode_cmd->pitches[i],
349 						    height, tile_mode);
350 			if (ret)
351 				return ret;
352 		} else {
353 			uint32_t size = mode_cmd->pitches[i] * height;
354 
355 			if (size + mode_cmd->offsets[i] > nvbo->bo.base.size)
356 				return -ERANGE;
357 		}
358 	}
359 
360 	if (!(fb = *pfb = kzalloc(sizeof(*fb), GFP_KERNEL)))
361 		return -ENOMEM;
362 
363 	drm_helper_mode_fill_fb_struct(dev, fb, mode_cmd);
364 	fb->obj[0] = gem;
365 
366 	ret = drm_framebuffer_init(dev, fb, &nouveau_framebuffer_funcs);
367 	if (ret)
368 		kfree(fb);
369 	return ret;
370 }
371 
372 struct drm_framebuffer *
373 nouveau_user_framebuffer_create(struct drm_device *dev,
374 				struct drm_file *file_priv,
375 				const struct drm_mode_fb_cmd2 *mode_cmd)
376 {
377 	struct drm_framebuffer *fb;
378 	struct drm_gem_object *gem;
379 	int ret;
380 
381 	gem = drm_gem_object_lookup(file_priv, mode_cmd->handles[0]);
382 	if (!gem)
383 		return ERR_PTR(-ENOENT);
384 
385 	ret = nouveau_framebuffer_new(dev, mode_cmd, gem, &fb);
386 	if (ret == 0)
387 		return fb;
388 
389 	drm_gem_object_put(gem);
390 	return ERR_PTR(ret);
391 }
392 
393 static const struct drm_mode_config_funcs nouveau_mode_config_funcs = {
394 	.fb_create = nouveau_user_framebuffer_create,
395 	.output_poll_changed = nouveau_fbcon_output_poll_changed,
396 };
397 
398 
399 struct nouveau_drm_prop_enum_list {
400 	u8 gen_mask;
401 	int type;
402 	char *name;
403 };
404 
405 static struct nouveau_drm_prop_enum_list underscan[] = {
406 	{ 6, UNDERSCAN_AUTO, "auto" },
407 	{ 6, UNDERSCAN_OFF, "off" },
408 	{ 6, UNDERSCAN_ON, "on" },
409 	{}
410 };
411 
412 static struct nouveau_drm_prop_enum_list dither_mode[] = {
413 	{ 7, DITHERING_MODE_AUTO, "auto" },
414 	{ 7, DITHERING_MODE_OFF, "off" },
415 	{ 1, DITHERING_MODE_ON, "on" },
416 	{ 6, DITHERING_MODE_STATIC2X2, "static 2x2" },
417 	{ 6, DITHERING_MODE_DYNAMIC2X2, "dynamic 2x2" },
418 	{ 4, DITHERING_MODE_TEMPORAL, "temporal" },
419 	{}
420 };
421 
422 static struct nouveau_drm_prop_enum_list dither_depth[] = {
423 	{ 6, DITHERING_DEPTH_AUTO, "auto" },
424 	{ 6, DITHERING_DEPTH_6BPC, "6 bpc" },
425 	{ 6, DITHERING_DEPTH_8BPC, "8 bpc" },
426 	{}
427 };
428 
429 #define PROP_ENUM(p,gen,n,list) do {                                           \
430 	struct nouveau_drm_prop_enum_list *l = (list);                         \
431 	int c = 0;                                                             \
432 	while (l->gen_mask) {                                                  \
433 		if (l->gen_mask & (1 << (gen)))                                \
434 			c++;                                                   \
435 		l++;                                                           \
436 	}                                                                      \
437 	if (c) {                                                               \
438 		p = drm_property_create(dev, DRM_MODE_PROP_ENUM, n, c);        \
439 		l = (list);                                                    \
440 		while (p && l->gen_mask) {                                     \
441 			if (l->gen_mask & (1 << (gen))) {                      \
442 				drm_property_add_enum(p, l->type, l->name);    \
443 			}                                                      \
444 			l++;                                                   \
445 		}                                                              \
446 	}                                                                      \
447 } while(0)
448 
449 void
450 nouveau_display_hpd_resume(struct drm_device *dev)
451 {
452 	struct nouveau_drm *drm = nouveau_drm(dev);
453 
454 	spin_lock_irq(&drm->hpd_lock);
455 	drm->hpd_pending = ~0;
456 	spin_unlock_irq(&drm->hpd_lock);
457 
458 	schedule_work(&drm->hpd_work);
459 }
460 
461 static void
462 nouveau_display_hpd_work(struct work_struct *work)
463 {
464 	struct nouveau_drm *drm = container_of(work, typeof(*drm), hpd_work);
465 	struct drm_device *dev = drm->dev;
466 	struct drm_connector *connector;
467 	struct drm_connector_list_iter conn_iter;
468 	u32 pending;
469 	bool changed = false;
470 
471 	pm_runtime_get_sync(dev->dev);
472 
473 	spin_lock_irq(&drm->hpd_lock);
474 	pending = drm->hpd_pending;
475 	drm->hpd_pending = 0;
476 	spin_unlock_irq(&drm->hpd_lock);
477 
478 	/* Nothing to do, exit early without updating the last busy counter */
479 	if (!pending)
480 		goto noop;
481 
482 	mutex_lock(&dev->mode_config.mutex);
483 	drm_connector_list_iter_begin(dev, &conn_iter);
484 
485 	nouveau_for_each_non_mst_connector_iter(connector, &conn_iter) {
486 		struct nouveau_connector *nv_connector = nouveau_connector(connector);
487 		enum drm_connector_status old_status = connector->status;
488 		u64 bits, old_epoch_counter = connector->epoch_counter;
489 
490 		if (!(pending & drm_connector_mask(connector)))
491 			continue;
492 
493 		spin_lock_irq(&drm->hpd_lock);
494 		bits = nv_connector->hpd_pending;
495 		nv_connector->hpd_pending = 0;
496 		spin_unlock_irq(&drm->hpd_lock);
497 
498 		drm_dbg_kms(dev, "[CONNECTOR:%d:%s] plug:%d unplug:%d irq:%d\n",
499 			    connector->base.id, connector->name,
500 			    !!(bits & NVIF_NOTIFY_CONN_V0_PLUG),
501 			    !!(bits & NVIF_NOTIFY_CONN_V0_UNPLUG),
502 			    !!(bits & NVIF_NOTIFY_CONN_V0_IRQ));
503 
504 		if (bits & NVIF_NOTIFY_CONN_V0_IRQ) {
505 			if (nouveau_dp_link_check(nv_connector))
506 				continue;
507 		}
508 
509 		connector->status = drm_helper_probe_detect(connector, NULL, false);
510 		if (old_epoch_counter == connector->epoch_counter)
511 			continue;
512 
513 		changed = true;
514 		drm_dbg_kms(dev, "[CONNECTOR:%d:%s] status updated from %s to %s (epoch counter %llu->%llu)\n",
515 			    connector->base.id, connector->name,
516 			    drm_get_connector_status_name(old_status),
517 			    drm_get_connector_status_name(connector->status),
518 			    old_epoch_counter, connector->epoch_counter);
519 	}
520 
521 	drm_connector_list_iter_end(&conn_iter);
522 	mutex_unlock(&dev->mode_config.mutex);
523 
524 	if (changed)
525 		drm_kms_helper_hotplug_event(dev);
526 
527 	pm_runtime_mark_last_busy(drm->dev->dev);
528 noop:
529 	pm_runtime_put_autosuspend(dev->dev);
530 }
531 
532 #ifdef CONFIG_ACPI
533 
534 static int
535 nouveau_display_acpi_ntfy(struct notifier_block *nb, unsigned long val,
536 			  void *data)
537 {
538 	struct nouveau_drm *drm = container_of(nb, typeof(*drm), acpi_nb);
539 	struct acpi_bus_event *info = data;
540 	int ret;
541 
542 	if (!strcmp(info->device_class, ACPI_VIDEO_CLASS)) {
543 		if (info->type == ACPI_VIDEO_NOTIFY_PROBE) {
544 			ret = pm_runtime_get(drm->dev->dev);
545 			if (ret == 1 || ret == -EACCES) {
546 				/* If the GPU is already awake, or in a state
547 				 * where we can't wake it up, it can handle
548 				 * it's own hotplug events.
549 				 */
550 				pm_runtime_put_autosuspend(drm->dev->dev);
551 			} else if (ret == 0 || ret == -EINPROGRESS) {
552 				/* We've started resuming the GPU already, so
553 				 * it will handle scheduling a full reprobe
554 				 * itself
555 				 */
556 				NV_DEBUG(drm, "ACPI requested connector reprobe\n");
557 				pm_runtime_put_noidle(drm->dev->dev);
558 			} else {
559 				NV_WARN(drm, "Dropped ACPI reprobe event due to RPM error: %d\n",
560 					ret);
561 			}
562 
563 			/* acpi-video should not generate keypresses for this */
564 			return NOTIFY_BAD;
565 		}
566 	}
567 
568 	return NOTIFY_DONE;
569 }
570 #endif
571 
572 int
573 nouveau_display_init(struct drm_device *dev, bool resume, bool runtime)
574 {
575 	struct nouveau_display *disp = nouveau_display(dev);
576 	struct drm_connector *connector;
577 	struct drm_connector_list_iter conn_iter;
578 	int ret;
579 
580 	/*
581 	 * Enable hotplug interrupts (done as early as possible, since we need
582 	 * them for MST)
583 	 */
584 	drm_connector_list_iter_begin(dev, &conn_iter);
585 	nouveau_for_each_non_mst_connector_iter(connector, &conn_iter) {
586 		struct nouveau_connector *conn = nouveau_connector(connector);
587 		nvif_notify_get(&conn->hpd);
588 	}
589 	drm_connector_list_iter_end(&conn_iter);
590 
591 	ret = disp->init(dev, resume, runtime);
592 	if (ret)
593 		return ret;
594 
595 	/* enable connector detection and polling for connectors without HPD
596 	 * support
597 	 */
598 	drm_kms_helper_poll_enable(dev);
599 
600 	return ret;
601 }
602 
603 void
604 nouveau_display_fini(struct drm_device *dev, bool suspend, bool runtime)
605 {
606 	struct nouveau_display *disp = nouveau_display(dev);
607 	struct nouveau_drm *drm = nouveau_drm(dev);
608 	struct drm_connector *connector;
609 	struct drm_connector_list_iter conn_iter;
610 
611 	if (!suspend) {
612 		if (drm_drv_uses_atomic_modeset(dev))
613 			drm_atomic_helper_shutdown(dev);
614 		else
615 			drm_helper_force_disable_all(dev);
616 	}
617 
618 	/* disable hotplug interrupts */
619 	drm_connector_list_iter_begin(dev, &conn_iter);
620 	nouveau_for_each_non_mst_connector_iter(connector, &conn_iter) {
621 		struct nouveau_connector *conn = nouveau_connector(connector);
622 		nvif_notify_put(&conn->hpd);
623 	}
624 	drm_connector_list_iter_end(&conn_iter);
625 
626 	if (!runtime)
627 		cancel_work_sync(&drm->hpd_work);
628 
629 	drm_kms_helper_poll_disable(dev);
630 	disp->fini(dev, runtime, suspend);
631 }
632 
633 static void
634 nouveau_display_create_properties(struct drm_device *dev)
635 {
636 	struct nouveau_display *disp = nouveau_display(dev);
637 	int gen;
638 
639 	if (disp->disp.object.oclass < NV50_DISP)
640 		gen = 0;
641 	else
642 	if (disp->disp.object.oclass < GF110_DISP)
643 		gen = 1;
644 	else
645 		gen = 2;
646 
647 	PROP_ENUM(disp->dithering_mode, gen, "dithering mode", dither_mode);
648 	PROP_ENUM(disp->dithering_depth, gen, "dithering depth", dither_depth);
649 	PROP_ENUM(disp->underscan_property, gen, "underscan", underscan);
650 
651 	disp->underscan_hborder_property =
652 		drm_property_create_range(dev, 0, "underscan hborder", 0, 128);
653 
654 	disp->underscan_vborder_property =
655 		drm_property_create_range(dev, 0, "underscan vborder", 0, 128);
656 
657 	if (gen < 1)
658 		return;
659 
660 	/* -90..+90 */
661 	disp->vibrant_hue_property =
662 		drm_property_create_range(dev, 0, "vibrant hue", 0, 180);
663 
664 	/* -100..+100 */
665 	disp->color_vibrance_property =
666 		drm_property_create_range(dev, 0, "color vibrance", 0, 200);
667 }
668 
669 int
670 nouveau_display_create(struct drm_device *dev)
671 {
672 	struct nouveau_drm *drm = nouveau_drm(dev);
673 	struct nouveau_display *disp;
674 	int ret;
675 
676 	disp = drm->display = kzalloc(sizeof(*disp), GFP_KERNEL);
677 	if (!disp)
678 		return -ENOMEM;
679 
680 	drm_mode_config_init(dev);
681 	drm_mode_create_scaling_mode_property(dev);
682 	drm_mode_create_dvi_i_properties(dev);
683 
684 	dev->mode_config.funcs = &nouveau_mode_config_funcs;
685 
686 	dev->mode_config.min_width = 0;
687 	dev->mode_config.min_height = 0;
688 	if (drm->client.device.info.family < NV_DEVICE_INFO_V0_CELSIUS) {
689 		dev->mode_config.max_width = 2048;
690 		dev->mode_config.max_height = 2048;
691 	} else
692 	if (drm->client.device.info.family < NV_DEVICE_INFO_V0_TESLA) {
693 		dev->mode_config.max_width = 4096;
694 		dev->mode_config.max_height = 4096;
695 	} else
696 	if (drm->client.device.info.family < NV_DEVICE_INFO_V0_FERMI) {
697 		dev->mode_config.max_width = 8192;
698 		dev->mode_config.max_height = 8192;
699 	} else {
700 		dev->mode_config.max_width = 16384;
701 		dev->mode_config.max_height = 16384;
702 	}
703 
704 	dev->mode_config.preferred_depth = 24;
705 	dev->mode_config.prefer_shadow = 1;
706 
707 	if (drm->client.device.info.chipset < 0x11)
708 		dev->mode_config.async_page_flip = false;
709 	else
710 		dev->mode_config.async_page_flip = true;
711 
712 	drm_kms_helper_poll_init(dev);
713 	drm_kms_helper_poll_disable(dev);
714 
715 	if (nouveau_modeset != 2 && drm->vbios.dcb.entries) {
716 		ret = nvif_disp_ctor(&drm->client.device, "kmsDisp", 0,
717 				     &disp->disp);
718 		if (ret == 0) {
719 			nouveau_display_create_properties(dev);
720 			if (disp->disp.object.oclass < NV50_DISP) {
721 				dev->mode_config.fb_modifiers_not_supported = true;
722 				ret = nv04_display_create(dev);
723 			} else {
724 				ret = nv50_display_create(dev);
725 			}
726 		}
727 	} else {
728 		ret = 0;
729 	}
730 
731 	if (ret)
732 		goto disp_create_err;
733 
734 	drm_mode_config_reset(dev);
735 
736 	if (dev->mode_config.num_crtc) {
737 		ret = drm_vblank_init(dev, dev->mode_config.num_crtc);
738 		if (ret)
739 			goto vblank_err;
740 
741 		if (disp->disp.object.oclass >= NV50_DISP)
742 			nv50_crc_init(dev);
743 	}
744 
745 	INIT_WORK(&drm->hpd_work, nouveau_display_hpd_work);
746 	spin_lock_init(&drm->hpd_lock);
747 #ifdef CONFIG_ACPI
748 	drm->acpi_nb.notifier_call = nouveau_display_acpi_ntfy;
749 	register_acpi_notifier(&drm->acpi_nb);
750 #endif
751 
752 	return 0;
753 
754 vblank_err:
755 	disp->dtor(dev);
756 disp_create_err:
757 	drm_kms_helper_poll_fini(dev);
758 	drm_mode_config_cleanup(dev);
759 	return ret;
760 }
761 
762 void
763 nouveau_display_destroy(struct drm_device *dev)
764 {
765 	struct nouveau_display *disp = nouveau_display(dev);
766 	struct nouveau_drm *drm = nouveau_drm(dev);
767 
768 #ifdef CONFIG_ACPI
769 	unregister_acpi_notifier(&drm->acpi_nb);
770 #endif
771 
772 	drm_kms_helper_poll_fini(dev);
773 	drm_mode_config_cleanup(dev);
774 
775 	if (disp->dtor)
776 		disp->dtor(dev);
777 
778 	nvif_disp_dtor(&disp->disp);
779 
780 	drm->display = NULL;
781 	kfree(disp);
782 }
783 
784 int
785 nouveau_display_suspend(struct drm_device *dev, bool runtime)
786 {
787 	struct nouveau_display *disp = nouveau_display(dev);
788 
789 	if (drm_drv_uses_atomic_modeset(dev)) {
790 		if (!runtime) {
791 			disp->suspend = drm_atomic_helper_suspend(dev);
792 			if (IS_ERR(disp->suspend)) {
793 				int ret = PTR_ERR(disp->suspend);
794 				disp->suspend = NULL;
795 				return ret;
796 			}
797 		}
798 	}
799 
800 	nouveau_display_fini(dev, true, runtime);
801 	return 0;
802 }
803 
804 void
805 nouveau_display_resume(struct drm_device *dev, bool runtime)
806 {
807 	struct nouveau_display *disp = nouveau_display(dev);
808 
809 	nouveau_display_init(dev, true, runtime);
810 
811 	if (drm_drv_uses_atomic_modeset(dev)) {
812 		if (disp->suspend) {
813 			drm_atomic_helper_resume(dev, disp->suspend);
814 			disp->suspend = NULL;
815 		}
816 		return;
817 	}
818 }
819 
820 int
821 nouveau_display_dumb_create(struct drm_file *file_priv, struct drm_device *dev,
822 			    struct drm_mode_create_dumb *args)
823 {
824 	struct nouveau_cli *cli = nouveau_cli(file_priv);
825 	struct nouveau_bo *bo;
826 	uint32_t domain;
827 	int ret;
828 
829 	args->pitch = roundup(args->width * (args->bpp / 8), 256);
830 	args->size = args->pitch * args->height;
831 	args->size = roundup(args->size, PAGE_SIZE);
832 
833 	/* Use VRAM if there is any ; otherwise fallback to system memory */
834 	if (nouveau_drm(dev)->client.device.info.ram_size != 0)
835 		domain = NOUVEAU_GEM_DOMAIN_VRAM;
836 	else
837 		domain = NOUVEAU_GEM_DOMAIN_GART;
838 
839 	ret = nouveau_gem_new(cli, args->size, 0, domain, 0, 0, &bo);
840 	if (ret)
841 		return ret;
842 
843 	ret = drm_gem_handle_create(file_priv, &bo->bo.base, &args->handle);
844 	drm_gem_object_put(&bo->bo.base);
845 	return ret;
846 }
847