xref: /openbmc/linux/drivers/gpu/drm/i915/display/intel_atomic.c (revision 817396dc9f6ab2481b94071de2e586aae876e89c)
1 /*
2  * Copyright © 2015 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21  * DEALINGS IN THE SOFTWARE.
22  */
23 
24 /**
25  * DOC: atomic modeset support
26  *
27  * The functions here implement the state management and hardware programming
28  * dispatch required by the atomic modeset infrastructure.
29  * See intel_atomic_plane.c for the plane-specific atomic functionality.
30  */
31 
32 #include <drm/drm_atomic.h>
33 #include <drm/drm_atomic_helper.h>
34 #include <drm/drm_fourcc.h>
35 #include <drm/drm_plane_helper.h>
36 
37 #include "intel_atomic.h"
38 #include "intel_display_types.h"
39 #include "intel_hdcp.h"
40 #include "intel_sprite.h"
41 
42 /**
43  * intel_digital_connector_atomic_get_property - hook for connector->atomic_get_property.
44  * @connector: Connector to get the property for.
45  * @state: Connector state to retrieve the property from.
46  * @property: Property to retrieve.
47  * @val: Return value for the property.
48  *
49  * Returns the atomic property value for a digital connector.
50  */
51 int intel_digital_connector_atomic_get_property(struct drm_connector *connector,
52 						const struct drm_connector_state *state,
53 						struct drm_property *property,
54 						u64 *val)
55 {
56 	struct drm_device *dev = connector->dev;
57 	struct drm_i915_private *dev_priv = to_i915(dev);
58 	struct intel_digital_connector_state *intel_conn_state =
59 		to_intel_digital_connector_state(state);
60 
61 	if (property == dev_priv->force_audio_property)
62 		*val = intel_conn_state->force_audio;
63 	else if (property == dev_priv->broadcast_rgb_property)
64 		*val = intel_conn_state->broadcast_rgb;
65 	else {
66 		DRM_DEBUG_ATOMIC("Unknown property [PROP:%d:%s]\n",
67 				 property->base.id, property->name);
68 		return -EINVAL;
69 	}
70 
71 	return 0;
72 }
73 
74 /**
75  * intel_digital_connector_atomic_set_property - hook for connector->atomic_set_property.
76  * @connector: Connector to set the property for.
77  * @state: Connector state to set the property on.
78  * @property: Property to set.
79  * @val: New value for the property.
80  *
81  * Sets the atomic property value for a digital connector.
82  */
83 int intel_digital_connector_atomic_set_property(struct drm_connector *connector,
84 						struct drm_connector_state *state,
85 						struct drm_property *property,
86 						u64 val)
87 {
88 	struct drm_device *dev = connector->dev;
89 	struct drm_i915_private *dev_priv = to_i915(dev);
90 	struct intel_digital_connector_state *intel_conn_state =
91 		to_intel_digital_connector_state(state);
92 
93 	if (property == dev_priv->force_audio_property) {
94 		intel_conn_state->force_audio = val;
95 		return 0;
96 	}
97 
98 	if (property == dev_priv->broadcast_rgb_property) {
99 		intel_conn_state->broadcast_rgb = val;
100 		return 0;
101 	}
102 
103 	DRM_DEBUG_ATOMIC("Unknown property [PROP:%d:%s]\n",
104 			 property->base.id, property->name);
105 	return -EINVAL;
106 }
107 
108 static bool blob_equal(const struct drm_property_blob *a,
109 		       const struct drm_property_blob *b)
110 {
111 	if (a && b)
112 		return a->length == b->length &&
113 			!memcmp(a->data, b->data, a->length);
114 
115 	return !a == !b;
116 }
117 
118 int intel_digital_connector_atomic_check(struct drm_connector *conn,
119 					 struct drm_atomic_state *state)
120 {
121 	struct drm_connector_state *new_state =
122 		drm_atomic_get_new_connector_state(state, conn);
123 	struct intel_digital_connector_state *new_conn_state =
124 		to_intel_digital_connector_state(new_state);
125 	struct drm_connector_state *old_state =
126 		drm_atomic_get_old_connector_state(state, conn);
127 	struct intel_digital_connector_state *old_conn_state =
128 		to_intel_digital_connector_state(old_state);
129 	struct drm_crtc_state *crtc_state;
130 
131 	intel_hdcp_atomic_check(conn, old_state, new_state);
132 
133 	if (!new_state->crtc)
134 		return 0;
135 
136 	crtc_state = drm_atomic_get_new_crtc_state(state, new_state->crtc);
137 
138 	/*
139 	 * These properties are handled by fastset, and might not end
140 	 * up in a modeset.
141 	 */
142 	if (new_conn_state->force_audio != old_conn_state->force_audio ||
143 	    new_conn_state->broadcast_rgb != old_conn_state->broadcast_rgb ||
144 	    new_conn_state->base.colorspace != old_conn_state->base.colorspace ||
145 	    new_conn_state->base.picture_aspect_ratio != old_conn_state->base.picture_aspect_ratio ||
146 	    new_conn_state->base.content_type != old_conn_state->base.content_type ||
147 	    new_conn_state->base.scaling_mode != old_conn_state->base.scaling_mode ||
148 	    !blob_equal(new_conn_state->base.hdr_output_metadata,
149 			old_conn_state->base.hdr_output_metadata))
150 		crtc_state->mode_changed = true;
151 
152 	return 0;
153 }
154 
155 /**
156  * intel_digital_connector_duplicate_state - duplicate connector state
157  * @connector: digital connector
158  *
159  * Allocates and returns a copy of the connector state (both common and
160  * digital connector specific) for the specified connector.
161  *
162  * Returns: The newly allocated connector state, or NULL on failure.
163  */
164 struct drm_connector_state *
165 intel_digital_connector_duplicate_state(struct drm_connector *connector)
166 {
167 	struct intel_digital_connector_state *state;
168 
169 	state = kmemdup(connector->state, sizeof(*state), GFP_KERNEL);
170 	if (!state)
171 		return NULL;
172 
173 	__drm_atomic_helper_connector_duplicate_state(connector, &state->base);
174 	return &state->base;
175 }
176 
177 /**
178  * intel_crtc_duplicate_state - duplicate crtc state
179  * @crtc: drm crtc
180  *
181  * Allocates and returns a copy of the crtc state (both common and
182  * Intel-specific) for the specified crtc.
183  *
184  * Returns: The newly allocated crtc state, or NULL on failure.
185  */
186 struct drm_crtc_state *
187 intel_crtc_duplicate_state(struct drm_crtc *crtc)
188 {
189 	const struct intel_crtc_state *old_crtc_state = to_intel_crtc_state(crtc->state);
190 	struct intel_crtc_state *crtc_state;
191 
192 	crtc_state = kmemdup(old_crtc_state, sizeof(*crtc_state), GFP_KERNEL);
193 	if (!crtc_state)
194 		return NULL;
195 
196 	__drm_atomic_helper_crtc_duplicate_state(crtc, &crtc_state->uapi);
197 
198 	/* copy color blobs */
199 	if (crtc_state->hw.degamma_lut)
200 		drm_property_blob_get(crtc_state->hw.degamma_lut);
201 	if (crtc_state->hw.ctm)
202 		drm_property_blob_get(crtc_state->hw.ctm);
203 	if (crtc_state->hw.gamma_lut)
204 		drm_property_blob_get(crtc_state->hw.gamma_lut);
205 
206 	crtc_state->update_pipe = false;
207 	crtc_state->disable_lp_wm = false;
208 	crtc_state->disable_cxsr = false;
209 	crtc_state->update_wm_pre = false;
210 	crtc_state->update_wm_post = false;
211 	crtc_state->fifo_changed = false;
212 	crtc_state->preload_luts = false;
213 	crtc_state->wm.need_postvbl_update = false;
214 	crtc_state->fb_bits = 0;
215 	crtc_state->update_planes = 0;
216 
217 	return &crtc_state->uapi;
218 }
219 
220 static void intel_crtc_put_color_blobs(struct intel_crtc_state *crtc_state)
221 {
222 	drm_property_blob_put(crtc_state->hw.degamma_lut);
223 	drm_property_blob_put(crtc_state->hw.gamma_lut);
224 	drm_property_blob_put(crtc_state->hw.ctm);
225 }
226 
227 void intel_crtc_free_hw_state(struct intel_crtc_state *crtc_state)
228 {
229 	intel_crtc_put_color_blobs(crtc_state);
230 }
231 
232 void intel_crtc_copy_color_blobs(struct intel_crtc_state *crtc_state)
233 {
234 	drm_property_replace_blob(&crtc_state->hw.degamma_lut,
235 				  crtc_state->uapi.degamma_lut);
236 	drm_property_replace_blob(&crtc_state->hw.gamma_lut,
237 				  crtc_state->uapi.gamma_lut);
238 	drm_property_replace_blob(&crtc_state->hw.ctm,
239 				  crtc_state->uapi.ctm);
240 }
241 
242 /**
243  * intel_crtc_destroy_state - destroy crtc state
244  * @crtc: drm crtc
245  * @state: the state to destroy
246  *
247  * Destroys the crtc state (both common and Intel-specific) for the
248  * specified crtc.
249  */
250 void
251 intel_crtc_destroy_state(struct drm_crtc *crtc,
252 			 struct drm_crtc_state *state)
253 {
254 	struct intel_crtc_state *crtc_state = to_intel_crtc_state(state);
255 
256 	__drm_atomic_helper_crtc_destroy_state(&crtc_state->uapi);
257 	intel_crtc_free_hw_state(crtc_state);
258 	kfree(crtc_state);
259 }
260 
261 static void intel_atomic_setup_scaler(struct intel_crtc_scaler_state *scaler_state,
262 				      int num_scalers_need, struct intel_crtc *intel_crtc,
263 				      const char *name, int idx,
264 				      struct intel_plane_state *plane_state,
265 				      int *scaler_id)
266 {
267 	struct drm_i915_private *dev_priv = to_i915(intel_crtc->base.dev);
268 	int j;
269 	u32 mode;
270 
271 	if (*scaler_id < 0) {
272 		/* find a free scaler */
273 		for (j = 0; j < intel_crtc->num_scalers; j++) {
274 			if (scaler_state->scalers[j].in_use)
275 				continue;
276 
277 			*scaler_id = j;
278 			scaler_state->scalers[*scaler_id].in_use = 1;
279 			break;
280 		}
281 	}
282 
283 	if (WARN(*scaler_id < 0, "Cannot find scaler for %s:%d\n", name, idx))
284 		return;
285 
286 	/* set scaler mode */
287 	if (plane_state && plane_state->hw.fb &&
288 	    plane_state->hw.fb->format->is_yuv &&
289 	    plane_state->hw.fb->format->num_planes > 1) {
290 		struct intel_plane *plane = to_intel_plane(plane_state->uapi.plane);
291 		if (IS_GEN(dev_priv, 9) &&
292 		    !IS_GEMINILAKE(dev_priv)) {
293 			mode = SKL_PS_SCALER_MODE_NV12;
294 		} else if (icl_is_hdr_plane(dev_priv, plane->id)) {
295 			/*
296 			 * On gen11+'s HDR planes we only use the scaler for
297 			 * scaling. They have a dedicated chroma upsampler, so
298 			 * we don't need the scaler to upsample the UV plane.
299 			 */
300 			mode = PS_SCALER_MODE_NORMAL;
301 		} else {
302 			struct intel_plane *linked =
303 				plane_state->planar_linked_plane;
304 
305 			mode = PS_SCALER_MODE_PLANAR;
306 
307 			if (linked)
308 				mode |= PS_PLANE_Y_SEL(linked->id);
309 		}
310 	} else if (INTEL_GEN(dev_priv) > 9 || IS_GEMINILAKE(dev_priv)) {
311 		mode = PS_SCALER_MODE_NORMAL;
312 	} else if (num_scalers_need == 1 && intel_crtc->num_scalers > 1) {
313 		/*
314 		 * when only 1 scaler is in use on a pipe with 2 scalers
315 		 * scaler 0 operates in high quality (HQ) mode.
316 		 * In this case use scaler 0 to take advantage of HQ mode
317 		 */
318 		scaler_state->scalers[*scaler_id].in_use = 0;
319 		*scaler_id = 0;
320 		scaler_state->scalers[0].in_use = 1;
321 		mode = SKL_PS_SCALER_MODE_HQ;
322 	} else {
323 		mode = SKL_PS_SCALER_MODE_DYN;
324 	}
325 
326 	DRM_DEBUG_KMS("Attached scaler id %u.%u to %s:%d\n",
327 		      intel_crtc->pipe, *scaler_id, name, idx);
328 	scaler_state->scalers[*scaler_id].mode = mode;
329 }
330 
331 /**
332  * intel_atomic_setup_scalers() - setup scalers for crtc per staged requests
333  * @dev_priv: i915 device
334  * @intel_crtc: intel crtc
335  * @crtc_state: incoming crtc_state to validate and setup scalers
336  *
337  * This function sets up scalers based on staged scaling requests for
338  * a @crtc and its planes. It is called from crtc level check path. If request
339  * is a supportable request, it attaches scalers to requested planes and crtc.
340  *
341  * This function takes into account the current scaler(s) in use by any planes
342  * not being part of this atomic state
343  *
344  *  Returns:
345  *         0 - scalers were setup succesfully
346  *         error code - otherwise
347  */
348 int intel_atomic_setup_scalers(struct drm_i915_private *dev_priv,
349 			       struct intel_crtc *intel_crtc,
350 			       struct intel_crtc_state *crtc_state)
351 {
352 	struct drm_plane *plane = NULL;
353 	struct intel_plane *intel_plane;
354 	struct intel_plane_state *plane_state = NULL;
355 	struct intel_crtc_scaler_state *scaler_state =
356 		&crtc_state->scaler_state;
357 	struct drm_atomic_state *drm_state = crtc_state->uapi.state;
358 	struct intel_atomic_state *intel_state = to_intel_atomic_state(drm_state);
359 	int num_scalers_need;
360 	int i;
361 
362 	num_scalers_need = hweight32(scaler_state->scaler_users);
363 
364 	/*
365 	 * High level flow:
366 	 * - staged scaler requests are already in scaler_state->scaler_users
367 	 * - check whether staged scaling requests can be supported
368 	 * - add planes using scalers that aren't in current transaction
369 	 * - assign scalers to requested users
370 	 * - as part of plane commit, scalers will be committed
371 	 *   (i.e., either attached or detached) to respective planes in hw
372 	 * - as part of crtc_commit, scaler will be either attached or detached
373 	 *   to crtc in hw
374 	 */
375 
376 	/* fail if required scalers > available scalers */
377 	if (num_scalers_need > intel_crtc->num_scalers){
378 		DRM_DEBUG_KMS("Too many scaling requests %d > %d\n",
379 			num_scalers_need, intel_crtc->num_scalers);
380 		return -EINVAL;
381 	}
382 
383 	/* walkthrough scaler_users bits and start assigning scalers */
384 	for (i = 0; i < sizeof(scaler_state->scaler_users) * 8; i++) {
385 		int *scaler_id;
386 		const char *name;
387 		int idx;
388 
389 		/* skip if scaler not required */
390 		if (!(scaler_state->scaler_users & (1 << i)))
391 			continue;
392 
393 		if (i == SKL_CRTC_INDEX) {
394 			name = "CRTC";
395 			idx = intel_crtc->base.base.id;
396 
397 			/* panel fitter case: assign as a crtc scaler */
398 			scaler_id = &scaler_state->scaler_id;
399 		} else {
400 			name = "PLANE";
401 
402 			/* plane scaler case: assign as a plane scaler */
403 			/* find the plane that set the bit as scaler_user */
404 			plane = drm_state->planes[i].ptr;
405 
406 			/*
407 			 * to enable/disable hq mode, add planes that are using scaler
408 			 * into this transaction
409 			 */
410 			if (!plane) {
411 				struct drm_plane_state *state;
412 
413 				/*
414 				 * GLK+ scalers don't have a HQ mode so it
415 				 * isn't necessary to change between HQ and dyn mode
416 				 * on those platforms.
417 				 */
418 				if (INTEL_GEN(dev_priv) >= 10 || IS_GEMINILAKE(dev_priv))
419 					continue;
420 
421 				plane = drm_plane_from_index(&dev_priv->drm, i);
422 				state = drm_atomic_get_plane_state(drm_state, plane);
423 				if (IS_ERR(state)) {
424 					DRM_DEBUG_KMS("Failed to add [PLANE:%d] to drm_state\n",
425 						plane->base.id);
426 					return PTR_ERR(state);
427 				}
428 			}
429 
430 			intel_plane = to_intel_plane(plane);
431 			idx = plane->base.id;
432 
433 			/* plane on different crtc cannot be a scaler user of this crtc */
434 			if (WARN_ON(intel_plane->pipe != intel_crtc->pipe))
435 				continue;
436 
437 			plane_state = intel_atomic_get_new_plane_state(intel_state,
438 								       intel_plane);
439 			scaler_id = &plane_state->scaler_id;
440 		}
441 
442 		intel_atomic_setup_scaler(scaler_state, num_scalers_need,
443 					  intel_crtc, name, idx,
444 					  plane_state, scaler_id);
445 	}
446 
447 	return 0;
448 }
449 
450 struct drm_atomic_state *
451 intel_atomic_state_alloc(struct drm_device *dev)
452 {
453 	struct intel_atomic_state *state = kzalloc(sizeof(*state), GFP_KERNEL);
454 
455 	if (!state || drm_atomic_state_init(dev, &state->base) < 0) {
456 		kfree(state);
457 		return NULL;
458 	}
459 
460 	return &state->base;
461 }
462 
463 void intel_atomic_state_clear(struct drm_atomic_state *s)
464 {
465 	struct intel_atomic_state *state = to_intel_atomic_state(s);
466 	drm_atomic_state_default_clear(&state->base);
467 	state->dpll_set = state->modeset = false;
468 	state->global_state_changed = false;
469 	state->active_pipes = 0;
470 	memset(&state->min_cdclk, 0, sizeof(state->min_cdclk));
471 	memset(&state->min_voltage_level, 0, sizeof(state->min_voltage_level));
472 	memset(&state->cdclk.logical, 0, sizeof(state->cdclk.logical));
473 	memset(&state->cdclk.actual, 0, sizeof(state->cdclk.actual));
474 	state->cdclk.pipe = INVALID_PIPE;
475 }
476 
477 struct intel_crtc_state *
478 intel_atomic_get_crtc_state(struct drm_atomic_state *state,
479 			    struct intel_crtc *crtc)
480 {
481 	struct drm_crtc_state *crtc_state;
482 	crtc_state = drm_atomic_get_crtc_state(state, &crtc->base);
483 	if (IS_ERR(crtc_state))
484 		return ERR_CAST(crtc_state);
485 
486 	return to_intel_crtc_state(crtc_state);
487 }
488 
489 int intel_atomic_lock_global_state(struct intel_atomic_state *state)
490 {
491 	struct drm_i915_private *dev_priv = to_i915(state->base.dev);
492 	struct intel_crtc *crtc;
493 
494 	state->global_state_changed = true;
495 
496 	for_each_intel_crtc(&dev_priv->drm, crtc) {
497 		int ret;
498 
499 		ret = drm_modeset_lock(&crtc->base.mutex,
500 				       state->base.acquire_ctx);
501 		if (ret)
502 			return ret;
503 	}
504 
505 	return 0;
506 }
507 
508 int intel_atomic_serialize_global_state(struct intel_atomic_state *state)
509 {
510 	struct drm_i915_private *dev_priv = to_i915(state->base.dev);
511 	struct intel_crtc *crtc;
512 
513 	state->global_state_changed = true;
514 
515 	for_each_intel_crtc(&dev_priv->drm, crtc) {
516 		struct intel_crtc_state *crtc_state;
517 
518 		crtc_state = intel_atomic_get_crtc_state(&state->base, crtc);
519 		if (IS_ERR(crtc_state))
520 			return PTR_ERR(crtc_state);
521 	}
522 
523 	return 0;
524 }
525