1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (c) 2015-2018, 2020-2021 The Linux Foundation. All rights reserved.
4  */
5 
6 #define pr_fmt(fmt)	"[drm:%s:%d] " fmt, __func__, __LINE__
7 #include <linux/delay.h>
8 #include "dpu_encoder_phys.h"
9 #include "dpu_hw_interrupts.h"
10 #include "dpu_hw_pingpong.h"
11 #include "dpu_core_irq.h"
12 #include "dpu_formats.h"
13 #include "dpu_trace.h"
14 #include "disp/msm_disp_snapshot.h"
15 
16 #include <drm/drm_managed.h>
17 
18 #define DPU_DEBUG_CMDENC(e, fmt, ...) DPU_DEBUG("enc%d intf%d " fmt, \
19 		(e) && (e)->base.parent ? \
20 		(e)->base.parent->base.id : -1, \
21 		(e) ? (e)->base.hw_intf->idx - INTF_0 : -1, ##__VA_ARGS__)
22 
23 #define DPU_ERROR_CMDENC(e, fmt, ...) DPU_ERROR("enc%d intf%d " fmt, \
24 		(e) && (e)->base.parent ? \
25 		(e)->base.parent->base.id : -1, \
26 		(e) ? (e)->base.hw_intf->idx - INTF_0 : -1, ##__VA_ARGS__)
27 
28 #define to_dpu_encoder_phys_cmd(x) \
29 	container_of(x, struct dpu_encoder_phys_cmd, base)
30 
31 #define PP_TIMEOUT_MAX_TRIALS	10
32 
33 /*
34  * Tearcheck sync start and continue thresholds are empirically found
35  * based on common panels In the future, may want to allow panels to override
36  * these default values
37  */
38 #define DEFAULT_TEARCHECK_SYNC_THRESH_START	4
39 #define DEFAULT_TEARCHECK_SYNC_THRESH_CONTINUE	4
40 
41 static void dpu_encoder_phys_cmd_enable_te(struct dpu_encoder_phys *phys_enc);
42 
dpu_encoder_phys_cmd_is_master(struct dpu_encoder_phys * phys_enc)43 static bool dpu_encoder_phys_cmd_is_master(struct dpu_encoder_phys *phys_enc)
44 {
45 	return (phys_enc->split_role != ENC_ROLE_SLAVE);
46 }
47 
_dpu_encoder_phys_cmd_update_intf_cfg(struct dpu_encoder_phys * phys_enc)48 static void _dpu_encoder_phys_cmd_update_intf_cfg(
49 		struct dpu_encoder_phys *phys_enc)
50 {
51 	struct dpu_encoder_phys_cmd *cmd_enc =
52 			to_dpu_encoder_phys_cmd(phys_enc);
53 	struct dpu_hw_ctl *ctl;
54 	struct dpu_hw_intf_cfg intf_cfg = { 0 };
55 	struct dpu_hw_intf_cmd_mode_cfg cmd_mode_cfg = {};
56 
57 	ctl = phys_enc->hw_ctl;
58 	if (!ctl->ops.setup_intf_cfg)
59 		return;
60 
61 	intf_cfg.intf = phys_enc->hw_intf->idx;
62 	intf_cfg.intf_mode_sel = DPU_CTL_MODE_SEL_CMD;
63 	intf_cfg.stream_sel = cmd_enc->stream_sel;
64 	intf_cfg.mode_3d = dpu_encoder_helper_get_3d_blend_mode(phys_enc);
65 	intf_cfg.dsc = dpu_encoder_helper_get_dsc(phys_enc);
66 	ctl->ops.setup_intf_cfg(ctl, &intf_cfg);
67 
68 	/* setup which pp blk will connect to this intf */
69 	if (test_bit(DPU_CTL_ACTIVE_CFG, &ctl->caps->features) && phys_enc->hw_intf->ops.bind_pingpong_blk)
70 		phys_enc->hw_intf->ops.bind_pingpong_blk(
71 				phys_enc->hw_intf,
72 				phys_enc->hw_pp->idx);
73 
74 	if (intf_cfg.dsc != 0)
75 		cmd_mode_cfg.data_compress = true;
76 
77 	if (phys_enc->hw_intf->ops.program_intf_cmd_cfg)
78 		phys_enc->hw_intf->ops.program_intf_cmd_cfg(phys_enc->hw_intf, &cmd_mode_cfg);
79 }
80 
dpu_encoder_phys_cmd_pp_tx_done_irq(void * arg)81 static void dpu_encoder_phys_cmd_pp_tx_done_irq(void *arg)
82 {
83 	struct dpu_encoder_phys *phys_enc = arg;
84 	unsigned long lock_flags;
85 	int new_cnt;
86 	u32 event = DPU_ENCODER_FRAME_EVENT_DONE;
87 
88 	if (!phys_enc->hw_pp)
89 		return;
90 
91 	DPU_ATRACE_BEGIN("pp_done_irq");
92 	/* notify all synchronous clients first, then asynchronous clients */
93 	dpu_encoder_frame_done_callback(phys_enc->parent, phys_enc, event);
94 
95 	spin_lock_irqsave(phys_enc->enc_spinlock, lock_flags);
96 	new_cnt = atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0);
97 	spin_unlock_irqrestore(phys_enc->enc_spinlock, lock_flags);
98 
99 	trace_dpu_enc_phys_cmd_pp_tx_done(DRMID(phys_enc->parent),
100 					  phys_enc->hw_pp->idx - PINGPONG_0,
101 					  new_cnt, event);
102 
103 	/* Signal any waiting atomic commit thread */
104 	wake_up_all(&phys_enc->pending_kickoff_wq);
105 	DPU_ATRACE_END("pp_done_irq");
106 }
107 
dpu_encoder_phys_cmd_te_rd_ptr_irq(void * arg)108 static void dpu_encoder_phys_cmd_te_rd_ptr_irq(void *arg)
109 {
110 	struct dpu_encoder_phys *phys_enc = arg;
111 	struct dpu_encoder_phys_cmd *cmd_enc;
112 
113 	if (phys_enc->has_intf_te) {
114 		if (!phys_enc->hw_intf)
115 			return;
116 	} else {
117 		if (!phys_enc->hw_pp)
118 			return;
119 	}
120 
121 	DPU_ATRACE_BEGIN("rd_ptr_irq");
122 	cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
123 
124 	dpu_encoder_vblank_callback(phys_enc->parent, phys_enc);
125 
126 	atomic_add_unless(&cmd_enc->pending_vblank_cnt, -1, 0);
127 	wake_up_all(&cmd_enc->pending_vblank_wq);
128 	DPU_ATRACE_END("rd_ptr_irq");
129 }
130 
dpu_encoder_phys_cmd_ctl_start_irq(void * arg)131 static void dpu_encoder_phys_cmd_ctl_start_irq(void *arg)
132 {
133 	struct dpu_encoder_phys *phys_enc = arg;
134 
135 	DPU_ATRACE_BEGIN("ctl_start_irq");
136 
137 	atomic_add_unless(&phys_enc->pending_ctlstart_cnt, -1, 0);
138 
139 	/* Signal any waiting ctl start interrupt */
140 	wake_up_all(&phys_enc->pending_kickoff_wq);
141 	DPU_ATRACE_END("ctl_start_irq");
142 }
143 
dpu_encoder_phys_cmd_underrun_irq(void * arg)144 static void dpu_encoder_phys_cmd_underrun_irq(void *arg)
145 {
146 	struct dpu_encoder_phys *phys_enc = arg;
147 
148 	dpu_encoder_underrun_callback(phys_enc->parent, phys_enc);
149 }
150 
dpu_encoder_phys_cmd_atomic_mode_set(struct dpu_encoder_phys * phys_enc,struct drm_crtc_state * crtc_state,struct drm_connector_state * conn_state)151 static void dpu_encoder_phys_cmd_atomic_mode_set(
152 		struct dpu_encoder_phys *phys_enc,
153 		struct drm_crtc_state *crtc_state,
154 		struct drm_connector_state *conn_state)
155 {
156 	phys_enc->irq[INTR_IDX_CTL_START] = phys_enc->hw_ctl->caps->intr_start;
157 
158 	phys_enc->irq[INTR_IDX_PINGPONG] = phys_enc->hw_pp->caps->intr_done;
159 
160 	if (phys_enc->has_intf_te)
161 		phys_enc->irq[INTR_IDX_RDPTR] = phys_enc->hw_intf->cap->intr_tear_rd_ptr;
162 	else
163 		phys_enc->irq[INTR_IDX_RDPTR] = phys_enc->hw_pp->caps->intr_rdptr;
164 
165 	phys_enc->irq[INTR_IDX_UNDERRUN] = phys_enc->hw_intf->cap->intr_underrun;
166 }
167 
_dpu_encoder_phys_cmd_handle_ppdone_timeout(struct dpu_encoder_phys * phys_enc)168 static int _dpu_encoder_phys_cmd_handle_ppdone_timeout(
169 		struct dpu_encoder_phys *phys_enc)
170 {
171 	struct dpu_encoder_phys_cmd *cmd_enc =
172 			to_dpu_encoder_phys_cmd(phys_enc);
173 	u32 frame_event = DPU_ENCODER_FRAME_EVENT_ERROR;
174 	bool do_log = false;
175 	struct drm_encoder *drm_enc;
176 
177 	if (!phys_enc->hw_pp)
178 		return -EINVAL;
179 
180 	drm_enc = phys_enc->parent;
181 
182 	cmd_enc->pp_timeout_report_cnt++;
183 	if (cmd_enc->pp_timeout_report_cnt == PP_TIMEOUT_MAX_TRIALS) {
184 		frame_event |= DPU_ENCODER_FRAME_EVENT_PANEL_DEAD;
185 		do_log = true;
186 	} else if (cmd_enc->pp_timeout_report_cnt == 1) {
187 		do_log = true;
188 	}
189 
190 	trace_dpu_enc_phys_cmd_pdone_timeout(DRMID(drm_enc),
191 		     phys_enc->hw_pp->idx - PINGPONG_0,
192 		     cmd_enc->pp_timeout_report_cnt,
193 		     atomic_read(&phys_enc->pending_kickoff_cnt),
194 		     frame_event);
195 
196 	/* to avoid flooding, only log first time, and "dead" time */
197 	if (do_log) {
198 		DRM_ERROR("id:%d pp:%d kickoff timeout %d cnt %d koff_cnt %d\n",
199 			  DRMID(drm_enc),
200 			  phys_enc->hw_pp->idx - PINGPONG_0,
201 			  phys_enc->hw_ctl->idx - CTL_0,
202 			  cmd_enc->pp_timeout_report_cnt,
203 			  atomic_read(&phys_enc->pending_kickoff_cnt));
204 		msm_disp_snapshot_state(drm_enc->dev);
205 		dpu_core_irq_unregister_callback(phys_enc->dpu_kms,
206 				phys_enc->irq[INTR_IDX_RDPTR]);
207 	}
208 
209 	atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0);
210 
211 	/* request a ctl reset before the next kickoff */
212 	phys_enc->enable_state = DPU_ENC_ERR_NEEDS_HW_RESET;
213 
214 	dpu_encoder_frame_done_callback(phys_enc->parent, phys_enc, frame_event);
215 
216 	return -ETIMEDOUT;
217 }
218 
_dpu_encoder_phys_cmd_wait_for_idle(struct dpu_encoder_phys * phys_enc)219 static int _dpu_encoder_phys_cmd_wait_for_idle(
220 		struct dpu_encoder_phys *phys_enc)
221 {
222 	struct dpu_encoder_phys_cmd *cmd_enc =
223 			to_dpu_encoder_phys_cmd(phys_enc);
224 	struct dpu_encoder_wait_info wait_info;
225 	int ret;
226 
227 	wait_info.wq = &phys_enc->pending_kickoff_wq;
228 	wait_info.atomic_cnt = &phys_enc->pending_kickoff_cnt;
229 	wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
230 
231 	ret = dpu_encoder_helper_wait_for_irq(phys_enc,
232 			phys_enc->irq[INTR_IDX_PINGPONG],
233 			dpu_encoder_phys_cmd_pp_tx_done_irq,
234 			&wait_info);
235 	if (ret == -ETIMEDOUT)
236 		_dpu_encoder_phys_cmd_handle_ppdone_timeout(phys_enc);
237 	else if (!ret)
238 		cmd_enc->pp_timeout_report_cnt = 0;
239 
240 	return ret;
241 }
242 
dpu_encoder_phys_cmd_control_vblank_irq(struct dpu_encoder_phys * phys_enc,bool enable)243 static int dpu_encoder_phys_cmd_control_vblank_irq(
244 		struct dpu_encoder_phys *phys_enc,
245 		bool enable)
246 {
247 	int ret = 0;
248 	int refcount;
249 
250 	if (!phys_enc->hw_pp) {
251 		DPU_ERROR("invalid encoder\n");
252 		return -EINVAL;
253 	}
254 
255 	refcount = atomic_read(&phys_enc->vblank_refcount);
256 
257 	/* Slave encoders don't report vblank */
258 	if (!dpu_encoder_phys_cmd_is_master(phys_enc))
259 		goto end;
260 
261 	/* protect against negative */
262 	if (!enable && refcount == 0) {
263 		ret = -EINVAL;
264 		goto end;
265 	}
266 
267 	DRM_DEBUG_KMS("id:%u pp:%d enable=%s/%d\n", DRMID(phys_enc->parent),
268 		      phys_enc->hw_pp->idx - PINGPONG_0,
269 		      enable ? "true" : "false", refcount);
270 
271 	if (enable && atomic_inc_return(&phys_enc->vblank_refcount) == 1)
272 		ret = dpu_core_irq_register_callback(phys_enc->dpu_kms,
273 				phys_enc->irq[INTR_IDX_RDPTR],
274 				dpu_encoder_phys_cmd_te_rd_ptr_irq,
275 				phys_enc);
276 	else if (!enable && atomic_dec_return(&phys_enc->vblank_refcount) == 0)
277 		ret = dpu_core_irq_unregister_callback(phys_enc->dpu_kms,
278 				phys_enc->irq[INTR_IDX_RDPTR]);
279 
280 end:
281 	if (ret) {
282 		DRM_ERROR("vblank irq err id:%u pp:%d ret:%d, enable %s/%d\n",
283 			  DRMID(phys_enc->parent),
284 			  phys_enc->hw_pp->idx - PINGPONG_0, ret,
285 			  enable ? "true" : "false", refcount);
286 	}
287 
288 	return ret;
289 }
290 
dpu_encoder_phys_cmd_irq_control(struct dpu_encoder_phys * phys_enc,bool enable)291 static void dpu_encoder_phys_cmd_irq_control(struct dpu_encoder_phys *phys_enc,
292 		bool enable)
293 {
294 	trace_dpu_enc_phys_cmd_irq_ctrl(DRMID(phys_enc->parent),
295 			phys_enc->hw_pp->idx - PINGPONG_0,
296 			enable, atomic_read(&phys_enc->vblank_refcount));
297 
298 	if (enable) {
299 		dpu_core_irq_register_callback(phys_enc->dpu_kms,
300 				phys_enc->irq[INTR_IDX_PINGPONG],
301 				dpu_encoder_phys_cmd_pp_tx_done_irq,
302 				phys_enc);
303 		dpu_core_irq_register_callback(phys_enc->dpu_kms,
304 				phys_enc->irq[INTR_IDX_UNDERRUN],
305 				dpu_encoder_phys_cmd_underrun_irq,
306 				phys_enc);
307 		dpu_encoder_phys_cmd_control_vblank_irq(phys_enc, true);
308 
309 		if (dpu_encoder_phys_cmd_is_master(phys_enc))
310 			dpu_core_irq_register_callback(phys_enc->dpu_kms,
311 					phys_enc->irq[INTR_IDX_CTL_START],
312 					dpu_encoder_phys_cmd_ctl_start_irq,
313 					phys_enc);
314 	} else {
315 		if (dpu_encoder_phys_cmd_is_master(phys_enc))
316 			dpu_core_irq_unregister_callback(phys_enc->dpu_kms,
317 					phys_enc->irq[INTR_IDX_CTL_START]);
318 
319 		dpu_core_irq_unregister_callback(phys_enc->dpu_kms,
320 				phys_enc->irq[INTR_IDX_UNDERRUN]);
321 		dpu_encoder_phys_cmd_control_vblank_irq(phys_enc, false);
322 		dpu_core_irq_unregister_callback(phys_enc->dpu_kms,
323 				phys_enc->irq[INTR_IDX_PINGPONG]);
324 	}
325 }
326 
dpu_encoder_phys_cmd_tearcheck_config(struct dpu_encoder_phys * phys_enc)327 static void dpu_encoder_phys_cmd_tearcheck_config(
328 		struct dpu_encoder_phys *phys_enc)
329 {
330 	struct dpu_encoder_phys_cmd *cmd_enc =
331 		to_dpu_encoder_phys_cmd(phys_enc);
332 	struct dpu_hw_tear_check tc_cfg = { 0 };
333 	struct drm_display_mode *mode;
334 	bool tc_enable = true;
335 	unsigned long vsync_hz;
336 	struct dpu_kms *dpu_kms;
337 
338 	if (phys_enc->has_intf_te) {
339 		if (!phys_enc->hw_intf ||
340 		    !phys_enc->hw_intf->ops.enable_tearcheck) {
341 			DPU_DEBUG_CMDENC(cmd_enc, "tearcheck not supported\n");
342 			return;
343 		}
344 
345 		DPU_DEBUG_CMDENC(cmd_enc, "");
346 	} else {
347 		if (!phys_enc->hw_pp ||
348 		    !phys_enc->hw_pp->ops.enable_tearcheck) {
349 			DPU_DEBUG_CMDENC(cmd_enc, "tearcheck not supported\n");
350 			return;
351 		}
352 
353 		DPU_DEBUG_CMDENC(cmd_enc, "pp %d\n", phys_enc->hw_pp->idx - PINGPONG_0);
354 	}
355 
356 	mode = &phys_enc->cached_mode;
357 
358 	dpu_kms = phys_enc->dpu_kms;
359 
360 	/*
361 	 * TE default: dsi byte clock calculated base on 70 fps;
362 	 * around 14 ms to complete a kickoff cycle if te disabled;
363 	 * vclk_line base on 60 fps; write is faster than read;
364 	 * init == start == rdptr;
365 	 *
366 	 * vsync_count is ratio of MDP VSYNC clock frequency to LCD panel
367 	 * frequency divided by the no. of rows (lines) in the LCDpanel.
368 	 */
369 	vsync_hz = dpu_kms_get_clk_rate(dpu_kms, "vsync");
370 	if (!vsync_hz) {
371 		DPU_DEBUG_CMDENC(cmd_enc, "invalid - no vsync clock\n");
372 		return;
373 	}
374 
375 	tc_cfg.vsync_count = vsync_hz /
376 				(mode->vtotal * drm_mode_vrefresh(mode));
377 
378 	/*
379 	 * Set the sync_cfg_height to twice vtotal so that if we lose a
380 	 * TE event coming from the display TE pin we won't stall immediately
381 	 */
382 	tc_cfg.hw_vsync_mode = 1;
383 	tc_cfg.sync_cfg_height = mode->vtotal * 2;
384 	tc_cfg.vsync_init_val = mode->vdisplay;
385 	tc_cfg.sync_threshold_start = DEFAULT_TEARCHECK_SYNC_THRESH_START;
386 	tc_cfg.sync_threshold_continue = DEFAULT_TEARCHECK_SYNC_THRESH_CONTINUE;
387 	tc_cfg.start_pos = mode->vdisplay;
388 	tc_cfg.rd_ptr_irq = mode->vdisplay + 1;
389 
390 	DPU_DEBUG_CMDENC(cmd_enc,
391 		"tc vsync_clk_speed_hz %lu vtotal %u vrefresh %u\n",
392 		vsync_hz, mode->vtotal, drm_mode_vrefresh(mode));
393 	DPU_DEBUG_CMDENC(cmd_enc,
394 		"tc enable %u start_pos %u rd_ptr_irq %u\n",
395 		tc_enable, tc_cfg.start_pos, tc_cfg.rd_ptr_irq);
396 	DPU_DEBUG_CMDENC(cmd_enc,
397 		"tc hw_vsync_mode %u vsync_count %u vsync_init_val %u\n",
398 		tc_cfg.hw_vsync_mode, tc_cfg.vsync_count,
399 		tc_cfg.vsync_init_val);
400 	DPU_DEBUG_CMDENC(cmd_enc,
401 		"tc cfgheight %u thresh_start %u thresh_cont %u\n",
402 		tc_cfg.sync_cfg_height, tc_cfg.sync_threshold_start,
403 		tc_cfg.sync_threshold_continue);
404 
405 	if (phys_enc->has_intf_te)
406 		phys_enc->hw_intf->ops.enable_tearcheck(phys_enc->hw_intf, &tc_cfg);
407 	else
408 		phys_enc->hw_pp->ops.enable_tearcheck(phys_enc->hw_pp, &tc_cfg);
409 }
410 
_dpu_encoder_phys_cmd_pingpong_config(struct dpu_encoder_phys * phys_enc)411 static void _dpu_encoder_phys_cmd_pingpong_config(
412 		struct dpu_encoder_phys *phys_enc)
413 {
414 	struct dpu_encoder_phys_cmd *cmd_enc =
415 		to_dpu_encoder_phys_cmd(phys_enc);
416 
417 	if (!phys_enc->hw_pp || !phys_enc->hw_ctl->ops.setup_intf_cfg) {
418 		DPU_ERROR("invalid arg(s), enc %d\n", phys_enc != NULL);
419 		return;
420 	}
421 
422 	DPU_DEBUG_CMDENC(cmd_enc, "pp %d, enabling mode:\n",
423 			phys_enc->hw_pp->idx - PINGPONG_0);
424 	drm_mode_debug_printmodeline(&phys_enc->cached_mode);
425 
426 	_dpu_encoder_phys_cmd_update_intf_cfg(phys_enc);
427 	dpu_encoder_phys_cmd_tearcheck_config(phys_enc);
428 }
429 
dpu_encoder_phys_cmd_needs_single_flush(struct dpu_encoder_phys * phys_enc)430 static bool dpu_encoder_phys_cmd_needs_single_flush(
431 		struct dpu_encoder_phys *phys_enc)
432 {
433 	/**
434 	 * we do separate flush for each CTL and let
435 	 * CTL_START synchronize them
436 	 */
437 	return false;
438 }
439 
dpu_encoder_phys_cmd_enable_helper(struct dpu_encoder_phys * phys_enc)440 static void dpu_encoder_phys_cmd_enable_helper(
441 		struct dpu_encoder_phys *phys_enc)
442 {
443 	struct dpu_hw_ctl *ctl;
444 
445 	if (!phys_enc->hw_pp) {
446 		DPU_ERROR("invalid arg(s), encoder %d\n", phys_enc != NULL);
447 		return;
448 	}
449 
450 	dpu_encoder_helper_split_config(phys_enc, phys_enc->hw_intf->idx);
451 
452 	_dpu_encoder_phys_cmd_pingpong_config(phys_enc);
453 
454 	ctl = phys_enc->hw_ctl;
455 	ctl->ops.update_pending_flush_intf(ctl, phys_enc->hw_intf->idx);
456 }
457 
dpu_encoder_phys_cmd_enable(struct dpu_encoder_phys * phys_enc)458 static void dpu_encoder_phys_cmd_enable(struct dpu_encoder_phys *phys_enc)
459 {
460 	struct dpu_encoder_phys_cmd *cmd_enc =
461 		to_dpu_encoder_phys_cmd(phys_enc);
462 
463 	if (!phys_enc->hw_pp) {
464 		DPU_ERROR("invalid phys encoder\n");
465 		return;
466 	}
467 
468 	DPU_DEBUG_CMDENC(cmd_enc, "pp %d\n", phys_enc->hw_pp->idx - PINGPONG_0);
469 
470 	if (phys_enc->enable_state == DPU_ENC_ENABLED) {
471 		DPU_ERROR("already enabled\n");
472 		return;
473 	}
474 
475 	dpu_encoder_phys_cmd_enable_helper(phys_enc);
476 	phys_enc->enable_state = DPU_ENC_ENABLED;
477 }
478 
_dpu_encoder_phys_cmd_connect_te(struct dpu_encoder_phys * phys_enc,bool enable)479 static void _dpu_encoder_phys_cmd_connect_te(
480 		struct dpu_encoder_phys *phys_enc, bool enable)
481 {
482 	if (phys_enc->has_intf_te) {
483 		if (!phys_enc->hw_intf || !phys_enc->hw_intf->ops.connect_external_te)
484 			return;
485 
486 		trace_dpu_enc_phys_cmd_connect_te(DRMID(phys_enc->parent), enable);
487 		phys_enc->hw_intf->ops.connect_external_te(phys_enc->hw_intf, enable);
488 	} else {
489 		if (!phys_enc->hw_pp || !phys_enc->hw_pp->ops.connect_external_te)
490 			return;
491 
492 		trace_dpu_enc_phys_cmd_connect_te(DRMID(phys_enc->parent), enable);
493 		phys_enc->hw_pp->ops.connect_external_te(phys_enc->hw_pp, enable);
494 	}
495 }
496 
dpu_encoder_phys_cmd_prepare_idle_pc(struct dpu_encoder_phys * phys_enc)497 static void dpu_encoder_phys_cmd_prepare_idle_pc(
498 		struct dpu_encoder_phys *phys_enc)
499 {
500 	_dpu_encoder_phys_cmd_connect_te(phys_enc, false);
501 }
502 
dpu_encoder_phys_cmd_get_line_count(struct dpu_encoder_phys * phys_enc)503 static int dpu_encoder_phys_cmd_get_line_count(
504 		struct dpu_encoder_phys *phys_enc)
505 {
506 	struct dpu_hw_pingpong *hw_pp;
507 	struct dpu_hw_intf *hw_intf;
508 
509 	if (!dpu_encoder_phys_cmd_is_master(phys_enc))
510 		return -EINVAL;
511 
512 	if (phys_enc->has_intf_te) {
513 		hw_intf = phys_enc->hw_intf;
514 		if (!hw_intf || !hw_intf->ops.get_line_count)
515 			return -EINVAL;
516 		return hw_intf->ops.get_line_count(hw_intf);
517 	}
518 
519 	hw_pp = phys_enc->hw_pp;
520 	if (!hw_pp || !hw_pp->ops.get_line_count)
521 		return -EINVAL;
522 	return hw_pp->ops.get_line_count(hw_pp);
523 }
524 
dpu_encoder_phys_cmd_disable(struct dpu_encoder_phys * phys_enc)525 static void dpu_encoder_phys_cmd_disable(struct dpu_encoder_phys *phys_enc)
526 {
527 	struct dpu_encoder_phys_cmd *cmd_enc =
528 		to_dpu_encoder_phys_cmd(phys_enc);
529 	struct dpu_hw_ctl *ctl;
530 
531 	if (phys_enc->enable_state == DPU_ENC_DISABLED) {
532 		DPU_ERROR_CMDENC(cmd_enc, "already disabled\n");
533 		return;
534 	}
535 
536 	if (phys_enc->has_intf_te) {
537 		DRM_DEBUG_KMS("id:%u intf:%d state:%d\n", DRMID(phys_enc->parent),
538 			      phys_enc->hw_intf->idx - INTF_0,
539 			      phys_enc->enable_state);
540 
541 		if (phys_enc->hw_intf->ops.disable_tearcheck)
542 			phys_enc->hw_intf->ops.disable_tearcheck(phys_enc->hw_intf);
543 	} else {
544 		if (!phys_enc->hw_pp) {
545 			DPU_ERROR("invalid encoder\n");
546 			return;
547 		}
548 
549 		DRM_DEBUG_KMS("id:%u pp:%d state:%d\n", DRMID(phys_enc->parent),
550 			      phys_enc->hw_pp->idx - PINGPONG_0,
551 			      phys_enc->enable_state);
552 
553 		if (phys_enc->hw_pp->ops.disable_tearcheck)
554 			phys_enc->hw_pp->ops.disable_tearcheck(phys_enc->hw_pp);
555 	}
556 
557 	if (phys_enc->hw_intf->ops.bind_pingpong_blk) {
558 		phys_enc->hw_intf->ops.bind_pingpong_blk(
559 				phys_enc->hw_intf,
560 				PINGPONG_NONE);
561 
562 		ctl = phys_enc->hw_ctl;
563 		ctl->ops.update_pending_flush_intf(ctl, phys_enc->hw_intf->idx);
564 	}
565 
566 	phys_enc->enable_state = DPU_ENC_DISABLED;
567 }
568 
dpu_encoder_phys_cmd_prepare_for_kickoff(struct dpu_encoder_phys * phys_enc)569 static void dpu_encoder_phys_cmd_prepare_for_kickoff(
570 		struct dpu_encoder_phys *phys_enc)
571 {
572 	struct dpu_encoder_phys_cmd *cmd_enc =
573 			to_dpu_encoder_phys_cmd(phys_enc);
574 	int ret;
575 
576 	if (!phys_enc->hw_pp) {
577 		DPU_ERROR("invalid encoder\n");
578 		return;
579 	}
580 	DRM_DEBUG_KMS("id:%u pp:%d pending_cnt:%d\n", DRMID(phys_enc->parent),
581 		      phys_enc->hw_pp->idx - PINGPONG_0,
582 		      atomic_read(&phys_enc->pending_kickoff_cnt));
583 
584 	/*
585 	 * Mark kickoff request as outstanding. If there are more than one,
586 	 * outstanding, then we have to wait for the previous one to complete
587 	 */
588 	ret = _dpu_encoder_phys_cmd_wait_for_idle(phys_enc);
589 	if (ret) {
590 		/* force pending_kickoff_cnt 0 to discard failed kickoff */
591 		atomic_set(&phys_enc->pending_kickoff_cnt, 0);
592 		DRM_ERROR("failed wait_for_idle: id:%u ret:%d pp:%d\n",
593 			  DRMID(phys_enc->parent), ret,
594 			  phys_enc->hw_pp->idx - PINGPONG_0);
595 	}
596 
597 	dpu_encoder_phys_cmd_enable_te(phys_enc);
598 
599 	DPU_DEBUG_CMDENC(cmd_enc, "pp:%d pending_cnt %d\n",
600 			phys_enc->hw_pp->idx - PINGPONG_0,
601 			atomic_read(&phys_enc->pending_kickoff_cnt));
602 }
603 
dpu_encoder_phys_cmd_enable_te(struct dpu_encoder_phys * phys_enc)604 static void dpu_encoder_phys_cmd_enable_te(struct dpu_encoder_phys *phys_enc)
605 {
606 	if (!phys_enc)
607 		return;
608 	if (!dpu_encoder_phys_cmd_is_master(phys_enc))
609 		return;
610 
611 	if (phys_enc->has_intf_te) {
612 		if (!phys_enc->hw_intf->ops.disable_autorefresh)
613 			return;
614 
615 		phys_enc->hw_intf->ops.disable_autorefresh(
616 				phys_enc->hw_intf,
617 				DRMID(phys_enc->parent),
618 				phys_enc->cached_mode.vdisplay);
619 	} else {
620 		if (!phys_enc->hw_pp ||
621 		    !phys_enc->hw_pp->ops.disable_autorefresh)
622 			return;
623 
624 		phys_enc->hw_pp->ops.disable_autorefresh(
625 				phys_enc->hw_pp,
626 				DRMID(phys_enc->parent),
627 				phys_enc->cached_mode.vdisplay);
628 	}
629 }
630 
_dpu_encoder_phys_cmd_wait_for_ctl_start(struct dpu_encoder_phys * phys_enc)631 static int _dpu_encoder_phys_cmd_wait_for_ctl_start(
632 		struct dpu_encoder_phys *phys_enc)
633 {
634 	struct dpu_encoder_phys_cmd *cmd_enc =
635 			to_dpu_encoder_phys_cmd(phys_enc);
636 	struct dpu_encoder_wait_info wait_info;
637 	int ret;
638 
639 	wait_info.wq = &phys_enc->pending_kickoff_wq;
640 	wait_info.atomic_cnt = &phys_enc->pending_ctlstart_cnt;
641 	wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
642 
643 	ret = dpu_encoder_helper_wait_for_irq(phys_enc,
644 			phys_enc->irq[INTR_IDX_CTL_START],
645 			dpu_encoder_phys_cmd_ctl_start_irq,
646 			&wait_info);
647 	if (ret == -ETIMEDOUT) {
648 		DPU_ERROR_CMDENC(cmd_enc, "ctl start interrupt wait failed\n");
649 		ret = -EINVAL;
650 	} else if (!ret)
651 		ret = 0;
652 
653 	return ret;
654 }
655 
dpu_encoder_phys_cmd_wait_for_tx_complete(struct dpu_encoder_phys * phys_enc)656 static int dpu_encoder_phys_cmd_wait_for_tx_complete(
657 		struct dpu_encoder_phys *phys_enc)
658 {
659 	int rc;
660 
661 	rc = _dpu_encoder_phys_cmd_wait_for_idle(phys_enc);
662 	if (rc) {
663 		DRM_ERROR("failed wait_for_idle: id:%u ret:%d intf:%d\n",
664 			  DRMID(phys_enc->parent), rc,
665 			  phys_enc->hw_intf->idx - INTF_0);
666 	}
667 
668 	return rc;
669 }
670 
dpu_encoder_phys_cmd_wait_for_commit_done(struct dpu_encoder_phys * phys_enc)671 static int dpu_encoder_phys_cmd_wait_for_commit_done(
672 		struct dpu_encoder_phys *phys_enc)
673 {
674 	/* only required for master controller */
675 	if (!dpu_encoder_phys_cmd_is_master(phys_enc))
676 		return 0;
677 
678 	if (phys_enc->hw_ctl->ops.is_started(phys_enc->hw_ctl))
679 		return dpu_encoder_phys_cmd_wait_for_tx_complete(phys_enc);
680 
681 	return _dpu_encoder_phys_cmd_wait_for_ctl_start(phys_enc);
682 }
683 
dpu_encoder_phys_cmd_handle_post_kickoff(struct dpu_encoder_phys * phys_enc)684 static void dpu_encoder_phys_cmd_handle_post_kickoff(
685 		struct dpu_encoder_phys *phys_enc)
686 {
687 	/**
688 	 * re-enable external TE, either for the first time after enabling
689 	 * or if disabled for Autorefresh
690 	 */
691 	_dpu_encoder_phys_cmd_connect_te(phys_enc, true);
692 }
693 
dpu_encoder_phys_cmd_trigger_start(struct dpu_encoder_phys * phys_enc)694 static void dpu_encoder_phys_cmd_trigger_start(
695 		struct dpu_encoder_phys *phys_enc)
696 {
697 	dpu_encoder_helper_trigger_start(phys_enc);
698 }
699 
dpu_encoder_phys_cmd_init_ops(struct dpu_encoder_phys_ops * ops)700 static void dpu_encoder_phys_cmd_init_ops(
701 		struct dpu_encoder_phys_ops *ops)
702 {
703 	ops->is_master = dpu_encoder_phys_cmd_is_master;
704 	ops->atomic_mode_set = dpu_encoder_phys_cmd_atomic_mode_set;
705 	ops->enable = dpu_encoder_phys_cmd_enable;
706 	ops->disable = dpu_encoder_phys_cmd_disable;
707 	ops->control_vblank_irq = dpu_encoder_phys_cmd_control_vblank_irq;
708 	ops->wait_for_commit_done = dpu_encoder_phys_cmd_wait_for_commit_done;
709 	ops->prepare_for_kickoff = dpu_encoder_phys_cmd_prepare_for_kickoff;
710 	ops->wait_for_tx_complete = dpu_encoder_phys_cmd_wait_for_tx_complete;
711 	ops->trigger_start = dpu_encoder_phys_cmd_trigger_start;
712 	ops->needs_single_flush = dpu_encoder_phys_cmd_needs_single_flush;
713 	ops->irq_control = dpu_encoder_phys_cmd_irq_control;
714 	ops->restore = dpu_encoder_phys_cmd_enable_helper;
715 	ops->prepare_idle_pc = dpu_encoder_phys_cmd_prepare_idle_pc;
716 	ops->handle_post_kickoff = dpu_encoder_phys_cmd_handle_post_kickoff;
717 	ops->get_line_count = dpu_encoder_phys_cmd_get_line_count;
718 }
719 
dpu_encoder_phys_cmd_init(struct drm_device * dev,struct dpu_enc_phys_init_params * p)720 struct dpu_encoder_phys *dpu_encoder_phys_cmd_init(struct drm_device *dev,
721 		struct dpu_enc_phys_init_params *p)
722 {
723 	struct dpu_encoder_phys *phys_enc = NULL;
724 	struct dpu_encoder_phys_cmd *cmd_enc = NULL;
725 
726 	DPU_DEBUG("intf\n");
727 
728 	cmd_enc = drmm_kzalloc(dev, sizeof(*cmd_enc), GFP_KERNEL);
729 	if (!cmd_enc) {
730 		DPU_ERROR("failed to allocate\n");
731 		return ERR_PTR(-ENOMEM);
732 	}
733 	phys_enc = &cmd_enc->base;
734 
735 	dpu_encoder_phys_init(phys_enc, p);
736 
737 	dpu_encoder_phys_cmd_init_ops(&phys_enc->ops);
738 	phys_enc->intf_mode = INTF_MODE_CMD;
739 	cmd_enc->stream_sel = 0;
740 
741 	phys_enc->has_intf_te = test_bit(DPU_INTF_TE,
742 					 &phys_enc->hw_intf->cap->features);
743 
744 	atomic_set(&cmd_enc->pending_vblank_cnt, 0);
745 	init_waitqueue_head(&cmd_enc->pending_vblank_wq);
746 
747 	DPU_DEBUG_CMDENC(cmd_enc, "created\n");
748 
749 	return phys_enc;
750 }
751