1 // SPDX-License-Identifier: MIT
2 /*
3  * Copyright © 2021 Intel Corporation
4  */
5 
6 #include "i915_drv.h"
7 #include "intel_atomic.h"
8 #include "intel_de.h"
9 #include "intel_display_types.h"
10 #include "intel_drrs.h"
11 #include "intel_panel.h"
12 
13 /**
14  * DOC: Display Refresh Rate Switching (DRRS)
15  *
16  * Display Refresh Rate Switching (DRRS) is a power conservation feature
17  * which enables swtching between low and high refresh rates,
18  * dynamically, based on the usage scenario. This feature is applicable
19  * for internal panels.
20  *
21  * Indication that the panel supports DRRS is given by the panel EDID, which
22  * would list multiple refresh rates for one resolution.
23  *
24  * DRRS is of 2 types - static and seamless.
25  * Static DRRS involves changing refresh rate (RR) by doing a full modeset
26  * (may appear as a blink on screen) and is used in dock-undock scenario.
27  * Seamless DRRS involves changing RR without any visual effect to the user
28  * and can be used during normal system usage. This is done by programming
29  * certain registers.
30  *
31  * Support for static/seamless DRRS may be indicated in the VBT based on
32  * inputs from the panel spec.
33  *
34  * DRRS saves power by switching to low RR based on usage scenarios.
35  *
36  * The implementation is based on frontbuffer tracking implementation.  When
37  * there is a disturbance on the screen triggered by user activity or a periodic
38  * system activity, DRRS is disabled (RR is changed to high RR).  When there is
39  * no movement on screen, after a timeout of 1 second, a switch to low RR is
40  * made.
41  *
42  * For integration with frontbuffer tracking code, intel_drrs_invalidate()
43  * and intel_drrs_flush() are called.
44  *
45  * DRRS can be further extended to support other internal panels and also
46  * the scenario of video playback wherein RR is set based on the rate
47  * requested by userspace.
48  */
49 
50 static bool can_enable_drrs(struct intel_connector *connector,
51 			    const struct intel_crtc_state *pipe_config)
52 {
53 	const struct drm_i915_private *i915 = to_i915(connector->base.dev);
54 
55 	if (pipe_config->vrr.enable)
56 		return false;
57 
58 	/*
59 	 * DRRS and PSR can't be enable together, so giving preference to PSR
60 	 * as it allows more power-savings by complete shutting down display,
61 	 * so to guarantee this, intel_drrs_compute_config() must be called
62 	 * after intel_psr_compute_config().
63 	 */
64 	if (pipe_config->has_psr)
65 		return false;
66 
67 	return connector->panel.downclock_mode &&
68 		i915->drrs.type == SEAMLESS_DRRS_SUPPORT;
69 }
70 
71 void
72 intel_drrs_compute_config(struct intel_dp *intel_dp,
73 			  struct intel_crtc_state *pipe_config,
74 			  int output_bpp, bool constant_n)
75 {
76 	struct intel_connector *connector = intel_dp->attached_connector;
77 	struct drm_i915_private *i915 = to_i915(connector->base.dev);
78 	int pixel_clock;
79 
80 	if (!can_enable_drrs(connector, pipe_config)) {
81 		if (intel_cpu_transcoder_has_m2_n2(i915, pipe_config->cpu_transcoder))
82 			intel_zero_m_n(&pipe_config->dp_m2_n2);
83 		return;
84 	}
85 
86 	pipe_config->has_drrs = true;
87 
88 	pixel_clock = connector->panel.downclock_mode->clock;
89 	if (pipe_config->splitter.enable)
90 		pixel_clock /= pipe_config->splitter.link_count;
91 
92 	intel_link_compute_m_n(output_bpp, pipe_config->lane_count, pixel_clock,
93 			       pipe_config->port_clock, &pipe_config->dp_m2_n2,
94 			       constant_n, pipe_config->fec_enable);
95 
96 	/* FIXME: abstract this better */
97 	if (pipe_config->splitter.enable)
98 		pipe_config->dp_m2_n2.data_m *= pipe_config->splitter.link_count;
99 }
100 
101 static void
102 intel_drrs_set_refresh_rate_pipeconf(const struct intel_crtc_state *crtc_state,
103 				     enum drrs_refresh_rate_type refresh_type)
104 {
105 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
106 	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
107 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
108 	u32 val, bit;
109 
110 	if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
111 		bit = PIPECONF_EDP_RR_MODE_SWITCH_VLV;
112 	else
113 		bit = PIPECONF_EDP_RR_MODE_SWITCH;
114 
115 	val = intel_de_read(dev_priv, PIPECONF(cpu_transcoder));
116 
117 	if (refresh_type == DRRS_LOW_RR)
118 		val |= bit;
119 	else
120 		val &= ~bit;
121 
122 	intel_de_write(dev_priv, PIPECONF(cpu_transcoder), val);
123 }
124 
125 static void
126 intel_drrs_set_refresh_rate_m_n(const struct intel_crtc_state *crtc_state,
127 				enum drrs_refresh_rate_type refresh_type)
128 {
129 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
130 
131 	intel_cpu_transcoder_set_m1_n1(crtc, crtc_state->cpu_transcoder,
132 				       refresh_type == DRRS_LOW_RR ?
133 				       &crtc_state->dp_m2_n2 : &crtc_state->dp_m_n);
134 }
135 
136 static void intel_drrs_set_state(struct drm_i915_private *dev_priv,
137 				 const struct intel_crtc_state *crtc_state,
138 				 enum drrs_refresh_rate_type refresh_type)
139 {
140 	struct intel_dp *intel_dp = dev_priv->drrs.dp;
141 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
142 	struct drm_display_mode *mode;
143 
144 	if (!intel_dp) {
145 		drm_dbg_kms(&dev_priv->drm, "DRRS not supported.\n");
146 		return;
147 	}
148 
149 	if (!crtc) {
150 		drm_dbg_kms(&dev_priv->drm,
151 			    "DRRS: intel_crtc not initialized\n");
152 		return;
153 	}
154 
155 	if (dev_priv->drrs.type < SEAMLESS_DRRS_SUPPORT) {
156 		drm_dbg_kms(&dev_priv->drm, "Only Seamless DRRS supported.\n");
157 		return;
158 	}
159 
160 	if (refresh_type == dev_priv->drrs.refresh_rate_type)
161 		return;
162 
163 	if (!crtc_state->hw.active) {
164 		drm_dbg_kms(&dev_priv->drm,
165 			    "eDP encoder disabled. CRTC not Active\n");
166 		return;
167 	}
168 
169 	if (DISPLAY_VER(dev_priv) >= 8 && !IS_CHERRYVIEW(dev_priv))
170 		intel_drrs_set_refresh_rate_m_n(crtc_state, refresh_type);
171 	else if (DISPLAY_VER(dev_priv) > 6)
172 		intel_drrs_set_refresh_rate_pipeconf(crtc_state, refresh_type);
173 
174 	dev_priv->drrs.refresh_rate_type = refresh_type;
175 
176 	if (refresh_type == DRRS_LOW_RR)
177 		mode = intel_dp->attached_connector->panel.downclock_mode;
178 	else
179 		mode = intel_dp->attached_connector->panel.fixed_mode;
180 	drm_dbg_kms(&dev_priv->drm, "eDP Refresh Rate set to : %dHz\n",
181 		    drm_mode_vrefresh(mode));
182 }
183 
184 static void
185 intel_drrs_enable_locked(struct intel_dp *intel_dp)
186 {
187 	struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
188 
189 	dev_priv->drrs.busy_frontbuffer_bits = 0;
190 	dev_priv->drrs.dp = intel_dp;
191 }
192 
193 /**
194  * intel_drrs_enable - init drrs struct if supported
195  * @intel_dp: DP struct
196  * @crtc_state: A pointer to the active crtc state.
197  *
198  * Initializes frontbuffer_bits and drrs.dp
199  */
200 void intel_drrs_enable(struct intel_dp *intel_dp,
201 		       const struct intel_crtc_state *crtc_state)
202 {
203 	struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
204 
205 	if (!crtc_state->has_drrs)
206 		return;
207 
208 	drm_dbg_kms(&dev_priv->drm, "Enabling DRRS\n");
209 
210 	mutex_lock(&dev_priv->drrs.mutex);
211 
212 	if (dev_priv->drrs.dp) {
213 		drm_warn(&dev_priv->drm, "DRRS already enabled\n");
214 		goto unlock;
215 	}
216 
217 	intel_drrs_enable_locked(intel_dp);
218 
219 unlock:
220 	mutex_unlock(&dev_priv->drrs.mutex);
221 }
222 
223 static void
224 intel_drrs_disable_locked(struct intel_dp *intel_dp,
225 			  const struct intel_crtc_state *crtc_state)
226 {
227 	struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
228 
229 	intel_drrs_set_state(dev_priv, crtc_state, DRRS_HIGH_RR);
230 	dev_priv->drrs.dp = NULL;
231 }
232 
233 /**
234  * intel_drrs_disable - Disable DRRS
235  * @intel_dp: DP struct
236  * @old_crtc_state: Pointer to old crtc_state.
237  *
238  */
239 void intel_drrs_disable(struct intel_dp *intel_dp,
240 			const struct intel_crtc_state *old_crtc_state)
241 {
242 	struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
243 
244 	if (!old_crtc_state->has_drrs)
245 		return;
246 
247 	mutex_lock(&dev_priv->drrs.mutex);
248 	if (!dev_priv->drrs.dp) {
249 		mutex_unlock(&dev_priv->drrs.mutex);
250 		return;
251 	}
252 
253 	intel_drrs_disable_locked(intel_dp, old_crtc_state);
254 	mutex_unlock(&dev_priv->drrs.mutex);
255 
256 	cancel_delayed_work_sync(&dev_priv->drrs.work);
257 }
258 
259 /**
260  * intel_drrs_update - Update DRRS state
261  * @intel_dp: Intel DP
262  * @crtc_state: new CRTC state
263  *
264  * This function will update DRRS states, disabling or enabling DRRS when
265  * executing fastsets. For full modeset, intel_drrs_disable() and
266  * intel_drrs_enable() should be called instead.
267  */
268 void
269 intel_drrs_update(struct intel_dp *intel_dp,
270 		  const struct intel_crtc_state *crtc_state)
271 {
272 	struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
273 
274 	if (dev_priv->drrs.type != SEAMLESS_DRRS_SUPPORT)
275 		return;
276 
277 	mutex_lock(&dev_priv->drrs.mutex);
278 
279 	/* New state matches current one? */
280 	if (crtc_state->has_drrs == !!dev_priv->drrs.dp)
281 		goto unlock;
282 
283 	if (crtc_state->has_drrs)
284 		intel_drrs_enable_locked(intel_dp);
285 	else
286 		intel_drrs_disable_locked(intel_dp, crtc_state);
287 
288 unlock:
289 	mutex_unlock(&dev_priv->drrs.mutex);
290 }
291 
292 static void intel_drrs_downclock_work(struct work_struct *work)
293 {
294 	struct drm_i915_private *dev_priv =
295 		container_of(work, typeof(*dev_priv), drrs.work.work);
296 	struct intel_dp *intel_dp;
297 	struct drm_crtc *crtc;
298 
299 	mutex_lock(&dev_priv->drrs.mutex);
300 
301 	intel_dp = dev_priv->drrs.dp;
302 
303 	if (!intel_dp)
304 		goto unlock;
305 
306 	/*
307 	 * The delayed work can race with an invalidate hence we need to
308 	 * recheck.
309 	 */
310 
311 	if (dev_priv->drrs.busy_frontbuffer_bits)
312 		goto unlock;
313 
314 	crtc = dp_to_dig_port(intel_dp)->base.base.crtc;
315 	intel_drrs_set_state(dev_priv, to_intel_crtc(crtc)->config, DRRS_LOW_RR);
316 
317 unlock:
318 	mutex_unlock(&dev_priv->drrs.mutex);
319 }
320 
321 static void intel_drrs_frontbuffer_update(struct drm_i915_private *dev_priv,
322 					  unsigned int frontbuffer_bits,
323 					  bool invalidate)
324 {
325 	struct intel_dp *intel_dp;
326 	struct drm_crtc *crtc;
327 	enum pipe pipe;
328 
329 	if (dev_priv->drrs.type == DRRS_NOT_SUPPORTED)
330 		return;
331 
332 	cancel_delayed_work(&dev_priv->drrs.work);
333 
334 	mutex_lock(&dev_priv->drrs.mutex);
335 
336 	intel_dp = dev_priv->drrs.dp;
337 	if (!intel_dp) {
338 		mutex_unlock(&dev_priv->drrs.mutex);
339 		return;
340 	}
341 
342 	crtc = dp_to_dig_port(intel_dp)->base.base.crtc;
343 	pipe = to_intel_crtc(crtc)->pipe;
344 
345 	frontbuffer_bits &= INTEL_FRONTBUFFER_ALL_MASK(pipe);
346 	if (invalidate)
347 		dev_priv->drrs.busy_frontbuffer_bits |= frontbuffer_bits;
348 	else
349 		dev_priv->drrs.busy_frontbuffer_bits &= ~frontbuffer_bits;
350 
351 	/* flush/invalidate means busy screen hence upclock */
352 	if (frontbuffer_bits)
353 		intel_drrs_set_state(dev_priv, to_intel_crtc(crtc)->config,
354 				     DRRS_HIGH_RR);
355 
356 	/*
357 	 * flush also means no more activity hence schedule downclock, if all
358 	 * other fbs are quiescent too
359 	 */
360 	if (!invalidate && !dev_priv->drrs.busy_frontbuffer_bits)
361 		schedule_delayed_work(&dev_priv->drrs.work,
362 				      msecs_to_jiffies(1000));
363 	mutex_unlock(&dev_priv->drrs.mutex);
364 }
365 
366 /**
367  * intel_drrs_invalidate - Disable Idleness DRRS
368  * @dev_priv: i915 device
369  * @frontbuffer_bits: frontbuffer plane tracking bits
370  *
371  * This function gets called everytime rendering on the given planes start.
372  * Hence DRRS needs to be Upclocked, i.e. (LOW_RR -> HIGH_RR).
373  *
374  * Dirty frontbuffers relevant to DRRS are tracked in busy_frontbuffer_bits.
375  */
376 void intel_drrs_invalidate(struct drm_i915_private *dev_priv,
377 			   unsigned int frontbuffer_bits)
378 {
379 	intel_drrs_frontbuffer_update(dev_priv, frontbuffer_bits, true);
380 }
381 
382 /**
383  * intel_drrs_flush - Restart Idleness DRRS
384  * @dev_priv: i915 device
385  * @frontbuffer_bits: frontbuffer plane tracking bits
386  *
387  * This function gets called every time rendering on the given planes has
388  * completed or flip on a crtc is completed. So DRRS should be upclocked
389  * (LOW_RR -> HIGH_RR). And also Idleness detection should be started again,
390  * if no other planes are dirty.
391  *
392  * Dirty frontbuffers relevant to DRRS are tracked in busy_frontbuffer_bits.
393  */
394 void intel_drrs_flush(struct drm_i915_private *dev_priv,
395 		      unsigned int frontbuffer_bits)
396 {
397 	intel_drrs_frontbuffer_update(dev_priv, frontbuffer_bits, false);
398 }
399 
400 void intel_drrs_page_flip(struct intel_atomic_state *state,
401 			  struct intel_crtc *crtc)
402 {
403 	struct drm_i915_private *dev_priv = to_i915(state->base.dev);
404 	unsigned int frontbuffer_bits = INTEL_FRONTBUFFER_ALL_MASK(crtc->pipe);
405 
406 	intel_drrs_frontbuffer_update(dev_priv, frontbuffer_bits, false);
407 }
408 
409 /**
410  * intel_drrs_init - Init basic DRRS work and mutex.
411  * @connector: eDP connector
412  * @fixed_mode: preferred mode of panel
413  *
414  * This function is  called only once at driver load to initialize basic
415  * DRRS stuff.
416  *
417  * Returns:
418  * Downclock mode if panel supports it, else return NULL.
419  * DRRS support is determined by the presence of downclock mode (apart
420  * from VBT setting).
421  */
422 struct drm_display_mode *
423 intel_drrs_init(struct intel_connector *connector,
424 		struct drm_display_mode *fixed_mode)
425 {
426 	struct drm_i915_private *dev_priv = to_i915(connector->base.dev);
427 	struct intel_encoder *encoder = connector->encoder;
428 	struct drm_display_mode *downclock_mode = NULL;
429 
430 	INIT_DELAYED_WORK(&dev_priv->drrs.work, intel_drrs_downclock_work);
431 	mutex_init(&dev_priv->drrs.mutex);
432 
433 	if (DISPLAY_VER(dev_priv) <= 6) {
434 		drm_dbg_kms(&dev_priv->drm,
435 			    "DRRS supported for Gen7 and above\n");
436 		return NULL;
437 	}
438 
439 	if ((DISPLAY_VER(dev_priv) < 8 && !HAS_GMCH(dev_priv)) &&
440 	    encoder->port != PORT_A) {
441 		drm_dbg_kms(&dev_priv->drm,
442 			    "DRRS only supported on eDP port A\n");
443 		return NULL;
444 	}
445 
446 	if (dev_priv->vbt.drrs_type != SEAMLESS_DRRS_SUPPORT) {
447 		drm_dbg_kms(&dev_priv->drm, "VBT doesn't support DRRS\n");
448 		return NULL;
449 	}
450 
451 	downclock_mode = intel_panel_edid_downclock_mode(connector, fixed_mode);
452 	if (!downclock_mode) {
453 		drm_dbg_kms(&dev_priv->drm,
454 			    "Downclock mode is not found. DRRS not supported\n");
455 		return NULL;
456 	}
457 
458 	dev_priv->drrs.type = dev_priv->vbt.drrs_type;
459 
460 	dev_priv->drrs.refresh_rate_type = DRRS_HIGH_RR;
461 	drm_dbg_kms(&dev_priv->drm,
462 		    "seamless DRRS supported for eDP panel.\n");
463 	return downclock_mode;
464 }
465