1 /*
2  * Copyright © 2015 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21  * DEALINGS IN THE SOFTWARE.
22  */
23 
24 /**
25  * DOC: atomic modeset support
26  *
27  * The functions here implement the state management and hardware programming
28  * dispatch required by the atomic modeset infrastructure.
29  * See intel_atomic_plane.c for the plane-specific atomic functionality.
30  */
31 
32 #include <drm/drm_atomic.h>
33 #include <drm/drm_atomic_helper.h>
34 #include <drm/drm_fourcc.h>
35 
36 #include "i915_drv.h"
37 #include "i915_reg.h"
38 #include "intel_atomic.h"
39 #include "intel_cdclk.h"
40 #include "intel_display_types.h"
41 #include "intel_global_state.h"
42 #include "intel_hdcp.h"
43 #include "intel_psr.h"
44 #include "intel_fb.h"
45 #include "skl_universal_plane.h"
46 
47 /**
48  * intel_digital_connector_atomic_get_property - hook for connector->atomic_get_property.
49  * @connector: Connector to get the property for.
50  * @state: Connector state to retrieve the property from.
51  * @property: Property to retrieve.
52  * @val: Return value for the property.
53  *
54  * Returns the atomic property value for a digital connector.
55  */
56 int intel_digital_connector_atomic_get_property(struct drm_connector *connector,
57 						const struct drm_connector_state *state,
58 						struct drm_property *property,
59 						u64 *val)
60 {
61 	struct drm_device *dev = connector->dev;
62 	struct drm_i915_private *dev_priv = to_i915(dev);
63 	struct intel_digital_connector_state *intel_conn_state =
64 		to_intel_digital_connector_state(state);
65 
66 	if (property == dev_priv->display.properties.force_audio)
67 		*val = intel_conn_state->force_audio;
68 	else if (property == dev_priv->display.properties.broadcast_rgb)
69 		*val = intel_conn_state->broadcast_rgb;
70 	else {
71 		drm_dbg_atomic(&dev_priv->drm,
72 			       "Unknown property [PROP:%d:%s]\n",
73 			       property->base.id, property->name);
74 		return -EINVAL;
75 	}
76 
77 	return 0;
78 }
79 
80 /**
81  * intel_digital_connector_atomic_set_property - hook for connector->atomic_set_property.
82  * @connector: Connector to set the property for.
83  * @state: Connector state to set the property on.
84  * @property: Property to set.
85  * @val: New value for the property.
86  *
87  * Sets the atomic property value for a digital connector.
88  */
89 int intel_digital_connector_atomic_set_property(struct drm_connector *connector,
90 						struct drm_connector_state *state,
91 						struct drm_property *property,
92 						u64 val)
93 {
94 	struct drm_device *dev = connector->dev;
95 	struct drm_i915_private *dev_priv = to_i915(dev);
96 	struct intel_digital_connector_state *intel_conn_state =
97 		to_intel_digital_connector_state(state);
98 
99 	if (property == dev_priv->display.properties.force_audio) {
100 		intel_conn_state->force_audio = val;
101 		return 0;
102 	}
103 
104 	if (property == dev_priv->display.properties.broadcast_rgb) {
105 		intel_conn_state->broadcast_rgb = val;
106 		return 0;
107 	}
108 
109 	drm_dbg_atomic(&dev_priv->drm, "Unknown property [PROP:%d:%s]\n",
110 		       property->base.id, property->name);
111 	return -EINVAL;
112 }
113 
114 int intel_digital_connector_atomic_check(struct drm_connector *conn,
115 					 struct drm_atomic_state *state)
116 {
117 	struct drm_connector_state *new_state =
118 		drm_atomic_get_new_connector_state(state, conn);
119 	struct intel_digital_connector_state *new_conn_state =
120 		to_intel_digital_connector_state(new_state);
121 	struct drm_connector_state *old_state =
122 		drm_atomic_get_old_connector_state(state, conn);
123 	struct intel_digital_connector_state *old_conn_state =
124 		to_intel_digital_connector_state(old_state);
125 	struct drm_crtc_state *crtc_state;
126 
127 	intel_hdcp_atomic_check(conn, old_state, new_state);
128 
129 	if (!new_state->crtc)
130 		return 0;
131 
132 	crtc_state = drm_atomic_get_new_crtc_state(state, new_state->crtc);
133 
134 	/*
135 	 * These properties are handled by fastset, and might not end
136 	 * up in a modeset.
137 	 */
138 	if (new_conn_state->force_audio != old_conn_state->force_audio ||
139 	    new_conn_state->broadcast_rgb != old_conn_state->broadcast_rgb ||
140 	    new_conn_state->base.colorspace != old_conn_state->base.colorspace ||
141 	    new_conn_state->base.picture_aspect_ratio != old_conn_state->base.picture_aspect_ratio ||
142 	    new_conn_state->base.content_type != old_conn_state->base.content_type ||
143 	    new_conn_state->base.scaling_mode != old_conn_state->base.scaling_mode ||
144 	    new_conn_state->base.privacy_screen_sw_state != old_conn_state->base.privacy_screen_sw_state ||
145 	    !drm_connector_atomic_hdr_metadata_equal(old_state, new_state))
146 		crtc_state->mode_changed = true;
147 
148 	return 0;
149 }
150 
151 /**
152  * intel_digital_connector_duplicate_state - duplicate connector state
153  * @connector: digital connector
154  *
155  * Allocates and returns a copy of the connector state (both common and
156  * digital connector specific) for the specified connector.
157  *
158  * Returns: The newly allocated connector state, or NULL on failure.
159  */
160 struct drm_connector_state *
161 intel_digital_connector_duplicate_state(struct drm_connector *connector)
162 {
163 	struct intel_digital_connector_state *state;
164 
165 	state = kmemdup(connector->state, sizeof(*state), GFP_KERNEL);
166 	if (!state)
167 		return NULL;
168 
169 	__drm_atomic_helper_connector_duplicate_state(connector, &state->base);
170 	return &state->base;
171 }
172 
173 /**
174  * intel_connector_needs_modeset - check if connector needs a modeset
175  * @state: the atomic state corresponding to this modeset
176  * @connector: the connector
177  */
178 bool
179 intel_connector_needs_modeset(struct intel_atomic_state *state,
180 			      struct drm_connector *connector)
181 {
182 	const struct drm_connector_state *old_conn_state, *new_conn_state;
183 
184 	old_conn_state = drm_atomic_get_old_connector_state(&state->base, connector);
185 	new_conn_state = drm_atomic_get_new_connector_state(&state->base, connector);
186 
187 	return old_conn_state->crtc != new_conn_state->crtc ||
188 	       (new_conn_state->crtc &&
189 		drm_atomic_crtc_needs_modeset(drm_atomic_get_new_crtc_state(&state->base,
190 									    new_conn_state->crtc)));
191 }
192 
193 /**
194  * intel_any_crtc_needs_modeset - check if any CRTC needs a modeset
195  * @state: the atomic state corresponding to this modeset
196  *
197  * Returns true if any CRTC in @state needs a modeset.
198  */
199 bool intel_any_crtc_needs_modeset(struct intel_atomic_state *state)
200 {
201 	struct intel_crtc *crtc;
202 	struct intel_crtc_state *crtc_state;
203 	int i;
204 
205 	for_each_new_intel_crtc_in_state(state, crtc, crtc_state, i) {
206 		if (intel_crtc_needs_modeset(crtc_state))
207 			return true;
208 	}
209 
210 	return false;
211 }
212 
213 struct intel_digital_connector_state *
214 intel_atomic_get_digital_connector_state(struct intel_atomic_state *state,
215 					 struct intel_connector *connector)
216 {
217 	struct drm_connector_state *conn_state;
218 
219 	conn_state = drm_atomic_get_connector_state(&state->base,
220 						    &connector->base);
221 	if (IS_ERR(conn_state))
222 		return ERR_CAST(conn_state);
223 
224 	return to_intel_digital_connector_state(conn_state);
225 }
226 
227 /**
228  * intel_crtc_duplicate_state - duplicate crtc state
229  * @crtc: drm crtc
230  *
231  * Allocates and returns a copy of the crtc state (both common and
232  * Intel-specific) for the specified crtc.
233  *
234  * Returns: The newly allocated crtc state, or NULL on failure.
235  */
236 struct drm_crtc_state *
237 intel_crtc_duplicate_state(struct drm_crtc *crtc)
238 {
239 	const struct intel_crtc_state *old_crtc_state = to_intel_crtc_state(crtc->state);
240 	struct intel_crtc_state *crtc_state;
241 
242 	crtc_state = kmemdup(old_crtc_state, sizeof(*crtc_state), GFP_KERNEL);
243 	if (!crtc_state)
244 		return NULL;
245 
246 	__drm_atomic_helper_crtc_duplicate_state(crtc, &crtc_state->uapi);
247 
248 	/* copy color blobs */
249 	if (crtc_state->hw.degamma_lut)
250 		drm_property_blob_get(crtc_state->hw.degamma_lut);
251 	if (crtc_state->hw.ctm)
252 		drm_property_blob_get(crtc_state->hw.ctm);
253 	if (crtc_state->hw.gamma_lut)
254 		drm_property_blob_get(crtc_state->hw.gamma_lut);
255 
256 	if (crtc_state->pre_csc_lut)
257 		drm_property_blob_get(crtc_state->pre_csc_lut);
258 	if (crtc_state->post_csc_lut)
259 		drm_property_blob_get(crtc_state->post_csc_lut);
260 
261 	crtc_state->update_pipe = false;
262 	crtc_state->disable_lp_wm = false;
263 	crtc_state->disable_cxsr = false;
264 	crtc_state->update_wm_pre = false;
265 	crtc_state->update_wm_post = false;
266 	crtc_state->fifo_changed = false;
267 	crtc_state->preload_luts = false;
268 	crtc_state->inherited = false;
269 	crtc_state->wm.need_postvbl_update = false;
270 	crtc_state->do_async_flip = false;
271 	crtc_state->fb_bits = 0;
272 	crtc_state->update_planes = 0;
273 	crtc_state->dsb = NULL;
274 
275 	return &crtc_state->uapi;
276 }
277 
278 static void intel_crtc_put_color_blobs(struct intel_crtc_state *crtc_state)
279 {
280 	drm_property_blob_put(crtc_state->hw.degamma_lut);
281 	drm_property_blob_put(crtc_state->hw.gamma_lut);
282 	drm_property_blob_put(crtc_state->hw.ctm);
283 
284 	drm_property_blob_put(crtc_state->pre_csc_lut);
285 	drm_property_blob_put(crtc_state->post_csc_lut);
286 }
287 
288 void intel_crtc_free_hw_state(struct intel_crtc_state *crtc_state)
289 {
290 	intel_crtc_put_color_blobs(crtc_state);
291 }
292 
293 /**
294  * intel_crtc_destroy_state - destroy crtc state
295  * @crtc: drm crtc
296  * @state: the state to destroy
297  *
298  * Destroys the crtc state (both common and Intel-specific) for the
299  * specified crtc.
300  */
301 void
302 intel_crtc_destroy_state(struct drm_crtc *crtc,
303 			 struct drm_crtc_state *state)
304 {
305 	struct intel_crtc_state *crtc_state = to_intel_crtc_state(state);
306 
307 	drm_WARN_ON(crtc->dev, crtc_state->dsb);
308 
309 	__drm_atomic_helper_crtc_destroy_state(&crtc_state->uapi);
310 	intel_crtc_free_hw_state(crtc_state);
311 	kfree(crtc_state);
312 }
313 
314 static int intel_atomic_setup_scaler(struct intel_crtc_scaler_state *scaler_state,
315 				     int num_scalers_need, struct intel_crtc *intel_crtc,
316 				     const char *name, int idx,
317 				     struct intel_plane_state *plane_state,
318 				     int *scaler_id)
319 {
320 	struct drm_i915_private *dev_priv = to_i915(intel_crtc->base.dev);
321 	int j;
322 	u32 mode;
323 
324 	if (*scaler_id < 0) {
325 		/* find a free scaler */
326 		for (j = 0; j < intel_crtc->num_scalers; j++) {
327 			if (scaler_state->scalers[j].in_use)
328 				continue;
329 
330 			*scaler_id = j;
331 			scaler_state->scalers[*scaler_id].in_use = 1;
332 			break;
333 		}
334 	}
335 
336 	if (drm_WARN(&dev_priv->drm, *scaler_id < 0,
337 		     "Cannot find scaler for %s:%d\n", name, idx))
338 		return -EINVAL;
339 
340 	/* set scaler mode */
341 	if (plane_state && plane_state->hw.fb &&
342 	    plane_state->hw.fb->format->is_yuv &&
343 	    plane_state->hw.fb->format->num_planes > 1) {
344 		struct intel_plane *plane = to_intel_plane(plane_state->uapi.plane);
345 		if (DISPLAY_VER(dev_priv) == 9) {
346 			mode = SKL_PS_SCALER_MODE_NV12;
347 		} else if (icl_is_hdr_plane(dev_priv, plane->id)) {
348 			/*
349 			 * On gen11+'s HDR planes we only use the scaler for
350 			 * scaling. They have a dedicated chroma upsampler, so
351 			 * we don't need the scaler to upsample the UV plane.
352 			 */
353 			mode = PS_SCALER_MODE_NORMAL;
354 		} else {
355 			struct intel_plane *linked =
356 				plane_state->planar_linked_plane;
357 
358 			mode = PS_SCALER_MODE_PLANAR;
359 
360 			if (linked)
361 				mode |= PS_PLANE_Y_SEL(linked->id);
362 		}
363 	} else if (DISPLAY_VER(dev_priv) >= 10) {
364 		mode = PS_SCALER_MODE_NORMAL;
365 	} else if (num_scalers_need == 1 && intel_crtc->num_scalers > 1) {
366 		/*
367 		 * when only 1 scaler is in use on a pipe with 2 scalers
368 		 * scaler 0 operates in high quality (HQ) mode.
369 		 * In this case use scaler 0 to take advantage of HQ mode
370 		 */
371 		scaler_state->scalers[*scaler_id].in_use = 0;
372 		*scaler_id = 0;
373 		scaler_state->scalers[0].in_use = 1;
374 		mode = SKL_PS_SCALER_MODE_HQ;
375 	} else {
376 		mode = SKL_PS_SCALER_MODE_DYN;
377 	}
378 
379 	/*
380 	 * FIXME: we should also check the scaler factors for pfit, so
381 	 * this shouldn't be tied directly to planes.
382 	 */
383 	if (plane_state && plane_state->hw.fb) {
384 		const struct drm_framebuffer *fb = plane_state->hw.fb;
385 		const struct drm_rect *src = &plane_state->uapi.src;
386 		const struct drm_rect *dst = &plane_state->uapi.dst;
387 		int hscale, vscale, max_vscale, max_hscale;
388 
389 		/*
390 		 * FIXME: When two scalers are needed, but only one of
391 		 * them needs to downscale, we should make sure that
392 		 * the one that needs downscaling support is assigned
393 		 * as the first scaler, so we don't reject downscaling
394 		 * unnecessarily.
395 		 */
396 
397 		if (DISPLAY_VER(dev_priv) >= 14) {
398 			/*
399 			 * On versions 14 and up, only the first
400 			 * scaler supports a vertical scaling factor
401 			 * of more than 1.0, while a horizontal
402 			 * scaling factor of 3.0 is supported.
403 			 */
404 			max_hscale = 0x30000 - 1;
405 			if (*scaler_id == 0)
406 				max_vscale = 0x30000 - 1;
407 			else
408 				max_vscale = 0x10000;
409 
410 		} else if (DISPLAY_VER(dev_priv) >= 10 ||
411 			   !intel_format_info_is_yuv_semiplanar(fb->format, fb->modifier)) {
412 			max_hscale = 0x30000 - 1;
413 			max_vscale = 0x30000 - 1;
414 		} else {
415 			max_hscale = 0x20000 - 1;
416 			max_vscale = 0x20000 - 1;
417 		}
418 
419 		/*
420 		 * FIXME: We should change the if-else block above to
421 		 * support HQ vs dynamic scaler properly.
422 		 */
423 
424 		/* Check if required scaling is within limits */
425 		hscale = drm_rect_calc_hscale(src, dst, 1, max_hscale);
426 		vscale = drm_rect_calc_vscale(src, dst, 1, max_vscale);
427 
428 		if (hscale < 0 || vscale < 0) {
429 			drm_dbg_kms(&dev_priv->drm,
430 				    "Scaler %d doesn't support required plane scaling\n",
431 				    *scaler_id);
432 			drm_rect_debug_print("src: ", src, true);
433 			drm_rect_debug_print("dst: ", dst, false);
434 
435 			return -EINVAL;
436 		}
437 	}
438 
439 	drm_dbg_kms(&dev_priv->drm, "Attached scaler id %u.%u to %s:%d\n",
440 		    intel_crtc->pipe, *scaler_id, name, idx);
441 	scaler_state->scalers[*scaler_id].mode = mode;
442 
443 	return 0;
444 }
445 
446 /**
447  * intel_atomic_setup_scalers() - setup scalers for crtc per staged requests
448  * @dev_priv: i915 device
449  * @intel_crtc: intel crtc
450  * @crtc_state: incoming crtc_state to validate and setup scalers
451  *
452  * This function sets up scalers based on staged scaling requests for
453  * a @crtc and its planes. It is called from crtc level check path. If request
454  * is a supportable request, it attaches scalers to requested planes and crtc.
455  *
456  * This function takes into account the current scaler(s) in use by any planes
457  * not being part of this atomic state
458  *
459  *  Returns:
460  *         0 - scalers were setup succesfully
461  *         error code - otherwise
462  */
463 int intel_atomic_setup_scalers(struct drm_i915_private *dev_priv,
464 			       struct intel_crtc *intel_crtc,
465 			       struct intel_crtc_state *crtc_state)
466 {
467 	struct drm_plane *plane = NULL;
468 	struct intel_plane *intel_plane;
469 	struct intel_plane_state *plane_state = NULL;
470 	struct intel_crtc_scaler_state *scaler_state =
471 		&crtc_state->scaler_state;
472 	struct drm_atomic_state *drm_state = crtc_state->uapi.state;
473 	struct intel_atomic_state *intel_state = to_intel_atomic_state(drm_state);
474 	int num_scalers_need;
475 	int i;
476 
477 	num_scalers_need = hweight32(scaler_state->scaler_users);
478 
479 	/*
480 	 * High level flow:
481 	 * - staged scaler requests are already in scaler_state->scaler_users
482 	 * - check whether staged scaling requests can be supported
483 	 * - add planes using scalers that aren't in current transaction
484 	 * - assign scalers to requested users
485 	 * - as part of plane commit, scalers will be committed
486 	 *   (i.e., either attached or detached) to respective planes in hw
487 	 * - as part of crtc_commit, scaler will be either attached or detached
488 	 *   to crtc in hw
489 	 */
490 
491 	/* fail if required scalers > available scalers */
492 	if (num_scalers_need > intel_crtc->num_scalers){
493 		drm_dbg_kms(&dev_priv->drm,
494 			    "Too many scaling requests %d > %d\n",
495 			    num_scalers_need, intel_crtc->num_scalers);
496 		return -EINVAL;
497 	}
498 
499 	/* walkthrough scaler_users bits and start assigning scalers */
500 	for (i = 0; i < sizeof(scaler_state->scaler_users) * 8; i++) {
501 		int *scaler_id;
502 		const char *name;
503 		int idx, ret;
504 
505 		/* skip if scaler not required */
506 		if (!(scaler_state->scaler_users & (1 << i)))
507 			continue;
508 
509 		if (i == SKL_CRTC_INDEX) {
510 			name = "CRTC";
511 			idx = intel_crtc->base.base.id;
512 
513 			/* panel fitter case: assign as a crtc scaler */
514 			scaler_id = &scaler_state->scaler_id;
515 		} else {
516 			name = "PLANE";
517 
518 			/* plane scaler case: assign as a plane scaler */
519 			/* find the plane that set the bit as scaler_user */
520 			plane = drm_state->planes[i].ptr;
521 
522 			/*
523 			 * to enable/disable hq mode, add planes that are using scaler
524 			 * into this transaction
525 			 */
526 			if (!plane) {
527 				struct drm_plane_state *state;
528 
529 				/*
530 				 * GLK+ scalers don't have a HQ mode so it
531 				 * isn't necessary to change between HQ and dyn mode
532 				 * on those platforms.
533 				 */
534 				if (DISPLAY_VER(dev_priv) >= 10)
535 					continue;
536 
537 				plane = drm_plane_from_index(&dev_priv->drm, i);
538 				state = drm_atomic_get_plane_state(drm_state, plane);
539 				if (IS_ERR(state)) {
540 					drm_dbg_kms(&dev_priv->drm,
541 						    "Failed to add [PLANE:%d] to drm_state\n",
542 						    plane->base.id);
543 					return PTR_ERR(state);
544 				}
545 			}
546 
547 			intel_plane = to_intel_plane(plane);
548 			idx = plane->base.id;
549 
550 			/* plane on different crtc cannot be a scaler user of this crtc */
551 			if (drm_WARN_ON(&dev_priv->drm,
552 					intel_plane->pipe != intel_crtc->pipe))
553 				continue;
554 
555 			plane_state = intel_atomic_get_new_plane_state(intel_state,
556 								       intel_plane);
557 			scaler_id = &plane_state->scaler_id;
558 		}
559 
560 		ret = intel_atomic_setup_scaler(scaler_state, num_scalers_need,
561 						intel_crtc, name, idx,
562 						plane_state, scaler_id);
563 		if (ret < 0)
564 			return ret;
565 	}
566 
567 	return 0;
568 }
569 
570 struct drm_atomic_state *
571 intel_atomic_state_alloc(struct drm_device *dev)
572 {
573 	struct intel_atomic_state *state = kzalloc(sizeof(*state), GFP_KERNEL);
574 
575 	if (!state || drm_atomic_state_init(dev, &state->base) < 0) {
576 		kfree(state);
577 		return NULL;
578 	}
579 
580 	return &state->base;
581 }
582 
583 void intel_atomic_state_free(struct drm_atomic_state *_state)
584 {
585 	struct intel_atomic_state *state = to_intel_atomic_state(_state);
586 
587 	drm_atomic_state_default_release(&state->base);
588 	kfree(state->global_objs);
589 
590 	i915_sw_fence_fini(&state->commit_ready);
591 
592 	kfree(state);
593 }
594 
595 void intel_atomic_state_clear(struct drm_atomic_state *s)
596 {
597 	struct intel_atomic_state *state = to_intel_atomic_state(s);
598 
599 	drm_atomic_state_default_clear(&state->base);
600 	intel_atomic_clear_global_state(state);
601 
602 	state->dpll_set = state->modeset = false;
603 }
604 
605 struct intel_crtc_state *
606 intel_atomic_get_crtc_state(struct drm_atomic_state *state,
607 			    struct intel_crtc *crtc)
608 {
609 	struct drm_crtc_state *crtc_state;
610 	crtc_state = drm_atomic_get_crtc_state(state, &crtc->base);
611 	if (IS_ERR(crtc_state))
612 		return ERR_CAST(crtc_state);
613 
614 	return to_intel_crtc_state(crtc_state);
615 }
616