1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (c) 2015-2018, 2020-2021 The Linux Foundation. All rights reserved.
4  */
5 
6 #define pr_fmt(fmt)	"[drm:%s:%d] " fmt, __func__, __LINE__
7 #include <linux/delay.h>
8 #include "dpu_encoder_phys.h"
9 #include "dpu_hw_interrupts.h"
10 #include "dpu_hw_pingpong.h"
11 #include "dpu_core_irq.h"
12 #include "dpu_formats.h"
13 #include "dpu_trace.h"
14 #include "disp/msm_disp_snapshot.h"
15 
16 #define DPU_DEBUG_CMDENC(e, fmt, ...) DPU_DEBUG("enc%d intf%d " fmt, \
17 		(e) && (e)->base.parent ? \
18 		(e)->base.parent->base.id : -1, \
19 		(e) ? (e)->base.hw_intf->idx - INTF_0 : -1, ##__VA_ARGS__)
20 
21 #define DPU_ERROR_CMDENC(e, fmt, ...) DPU_ERROR("enc%d intf%d " fmt, \
22 		(e) && (e)->base.parent ? \
23 		(e)->base.parent->base.id : -1, \
24 		(e) ? (e)->base.hw_intf->idx - INTF_0 : -1, ##__VA_ARGS__)
25 
26 #define to_dpu_encoder_phys_cmd(x) \
27 	container_of(x, struct dpu_encoder_phys_cmd, base)
28 
29 #define PP_TIMEOUT_MAX_TRIALS	10
30 
31 /*
32  * Tearcheck sync start and continue thresholds are empirically found
33  * based on common panels In the future, may want to allow panels to override
34  * these default values
35  */
36 #define DEFAULT_TEARCHECK_SYNC_THRESH_START	4
37 #define DEFAULT_TEARCHECK_SYNC_THRESH_CONTINUE	4
38 
39 static void dpu_encoder_phys_cmd_enable_te(struct dpu_encoder_phys *phys_enc);
40 
dpu_encoder_phys_cmd_is_master(struct dpu_encoder_phys * phys_enc)41 static bool dpu_encoder_phys_cmd_is_master(struct dpu_encoder_phys *phys_enc)
42 {
43 	return (phys_enc->split_role != ENC_ROLE_SLAVE);
44 }
45 
_dpu_encoder_phys_cmd_update_intf_cfg(struct dpu_encoder_phys * phys_enc)46 static void _dpu_encoder_phys_cmd_update_intf_cfg(
47 		struct dpu_encoder_phys *phys_enc)
48 {
49 	struct dpu_encoder_phys_cmd *cmd_enc =
50 			to_dpu_encoder_phys_cmd(phys_enc);
51 	struct dpu_hw_ctl *ctl;
52 	struct dpu_hw_intf_cfg intf_cfg = { 0 };
53 	struct dpu_hw_intf_cmd_mode_cfg cmd_mode_cfg = {};
54 
55 	ctl = phys_enc->hw_ctl;
56 	if (!ctl->ops.setup_intf_cfg)
57 		return;
58 
59 	intf_cfg.intf = phys_enc->hw_intf->idx;
60 	intf_cfg.intf_mode_sel = DPU_CTL_MODE_SEL_CMD;
61 	intf_cfg.stream_sel = cmd_enc->stream_sel;
62 	intf_cfg.mode_3d = dpu_encoder_helper_get_3d_blend_mode(phys_enc);
63 	intf_cfg.dsc = dpu_encoder_helper_get_dsc(phys_enc);
64 	ctl->ops.setup_intf_cfg(ctl, &intf_cfg);
65 
66 	/* setup which pp blk will connect to this intf */
67 	if (test_bit(DPU_CTL_ACTIVE_CFG, &ctl->caps->features) && phys_enc->hw_intf->ops.bind_pingpong_blk)
68 		phys_enc->hw_intf->ops.bind_pingpong_blk(
69 				phys_enc->hw_intf,
70 				phys_enc->hw_pp->idx);
71 
72 	if (intf_cfg.dsc != 0)
73 		cmd_mode_cfg.data_compress = true;
74 
75 	if (phys_enc->hw_intf->ops.program_intf_cmd_cfg)
76 		phys_enc->hw_intf->ops.program_intf_cmd_cfg(phys_enc->hw_intf, &cmd_mode_cfg);
77 }
78 
dpu_encoder_phys_cmd_pp_tx_done_irq(void * arg)79 static void dpu_encoder_phys_cmd_pp_tx_done_irq(void *arg)
80 {
81 	struct dpu_encoder_phys *phys_enc = arg;
82 	unsigned long lock_flags;
83 	int new_cnt;
84 	u32 event = DPU_ENCODER_FRAME_EVENT_DONE;
85 
86 	if (!phys_enc->hw_pp)
87 		return;
88 
89 	DPU_ATRACE_BEGIN("pp_done_irq");
90 	/* notify all synchronous clients first, then asynchronous clients */
91 	dpu_encoder_frame_done_callback(phys_enc->parent, phys_enc, event);
92 
93 	spin_lock_irqsave(phys_enc->enc_spinlock, lock_flags);
94 	new_cnt = atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0);
95 	spin_unlock_irqrestore(phys_enc->enc_spinlock, lock_flags);
96 
97 	trace_dpu_enc_phys_cmd_pp_tx_done(DRMID(phys_enc->parent),
98 					  phys_enc->hw_pp->idx - PINGPONG_0,
99 					  new_cnt, event);
100 
101 	/* Signal any waiting atomic commit thread */
102 	wake_up_all(&phys_enc->pending_kickoff_wq);
103 	DPU_ATRACE_END("pp_done_irq");
104 }
105 
dpu_encoder_phys_cmd_te_rd_ptr_irq(void * arg)106 static void dpu_encoder_phys_cmd_te_rd_ptr_irq(void *arg)
107 {
108 	struct dpu_encoder_phys *phys_enc = arg;
109 	struct dpu_encoder_phys_cmd *cmd_enc;
110 
111 	if (phys_enc->has_intf_te) {
112 		if (!phys_enc->hw_intf)
113 			return;
114 	} else {
115 		if (!phys_enc->hw_pp)
116 			return;
117 	}
118 
119 	DPU_ATRACE_BEGIN("rd_ptr_irq");
120 	cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
121 
122 	dpu_encoder_vblank_callback(phys_enc->parent, phys_enc);
123 
124 	atomic_add_unless(&cmd_enc->pending_vblank_cnt, -1, 0);
125 	wake_up_all(&cmd_enc->pending_vblank_wq);
126 	DPU_ATRACE_END("rd_ptr_irq");
127 }
128 
dpu_encoder_phys_cmd_ctl_start_irq(void * arg)129 static void dpu_encoder_phys_cmd_ctl_start_irq(void *arg)
130 {
131 	struct dpu_encoder_phys *phys_enc = arg;
132 
133 	DPU_ATRACE_BEGIN("ctl_start_irq");
134 
135 	atomic_add_unless(&phys_enc->pending_ctlstart_cnt, -1, 0);
136 
137 	/* Signal any waiting ctl start interrupt */
138 	wake_up_all(&phys_enc->pending_kickoff_wq);
139 	DPU_ATRACE_END("ctl_start_irq");
140 }
141 
dpu_encoder_phys_cmd_underrun_irq(void * arg)142 static void dpu_encoder_phys_cmd_underrun_irq(void *arg)
143 {
144 	struct dpu_encoder_phys *phys_enc = arg;
145 
146 	dpu_encoder_underrun_callback(phys_enc->parent, phys_enc);
147 }
148 
dpu_encoder_phys_cmd_atomic_mode_set(struct dpu_encoder_phys * phys_enc,struct drm_crtc_state * crtc_state,struct drm_connector_state * conn_state)149 static void dpu_encoder_phys_cmd_atomic_mode_set(
150 		struct dpu_encoder_phys *phys_enc,
151 		struct drm_crtc_state *crtc_state,
152 		struct drm_connector_state *conn_state)
153 {
154 	phys_enc->irq[INTR_IDX_CTL_START] = phys_enc->hw_ctl->caps->intr_start;
155 
156 	phys_enc->irq[INTR_IDX_PINGPONG] = phys_enc->hw_pp->caps->intr_done;
157 
158 	if (phys_enc->has_intf_te)
159 		phys_enc->irq[INTR_IDX_RDPTR] = phys_enc->hw_intf->cap->intr_tear_rd_ptr;
160 	else
161 		phys_enc->irq[INTR_IDX_RDPTR] = phys_enc->hw_pp->caps->intr_rdptr;
162 
163 	phys_enc->irq[INTR_IDX_UNDERRUN] = phys_enc->hw_intf->cap->intr_underrun;
164 }
165 
_dpu_encoder_phys_cmd_handle_ppdone_timeout(struct dpu_encoder_phys * phys_enc)166 static int _dpu_encoder_phys_cmd_handle_ppdone_timeout(
167 		struct dpu_encoder_phys *phys_enc)
168 {
169 	struct dpu_encoder_phys_cmd *cmd_enc =
170 			to_dpu_encoder_phys_cmd(phys_enc);
171 	u32 frame_event = DPU_ENCODER_FRAME_EVENT_ERROR;
172 	bool do_log = false;
173 	struct drm_encoder *drm_enc;
174 
175 	if (!phys_enc->hw_pp)
176 		return -EINVAL;
177 
178 	drm_enc = phys_enc->parent;
179 
180 	cmd_enc->pp_timeout_report_cnt++;
181 	if (cmd_enc->pp_timeout_report_cnt == PP_TIMEOUT_MAX_TRIALS) {
182 		frame_event |= DPU_ENCODER_FRAME_EVENT_PANEL_DEAD;
183 		do_log = true;
184 	} else if (cmd_enc->pp_timeout_report_cnt == 1) {
185 		do_log = true;
186 	}
187 
188 	trace_dpu_enc_phys_cmd_pdone_timeout(DRMID(drm_enc),
189 		     phys_enc->hw_pp->idx - PINGPONG_0,
190 		     cmd_enc->pp_timeout_report_cnt,
191 		     atomic_read(&phys_enc->pending_kickoff_cnt),
192 		     frame_event);
193 
194 	/* to avoid flooding, only log first time, and "dead" time */
195 	if (do_log) {
196 		DRM_ERROR("id:%d pp:%d kickoff timeout %d cnt %d koff_cnt %d\n",
197 			  DRMID(drm_enc),
198 			  phys_enc->hw_pp->idx - PINGPONG_0,
199 			  phys_enc->hw_ctl->idx - CTL_0,
200 			  cmd_enc->pp_timeout_report_cnt,
201 			  atomic_read(&phys_enc->pending_kickoff_cnt));
202 		msm_disp_snapshot_state(drm_enc->dev);
203 		dpu_core_irq_unregister_callback(phys_enc->dpu_kms,
204 				phys_enc->irq[INTR_IDX_RDPTR]);
205 	}
206 
207 	atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0);
208 
209 	/* request a ctl reset before the next kickoff */
210 	phys_enc->enable_state = DPU_ENC_ERR_NEEDS_HW_RESET;
211 
212 	dpu_encoder_frame_done_callback(phys_enc->parent, phys_enc, frame_event);
213 
214 	return -ETIMEDOUT;
215 }
216 
_dpu_encoder_phys_cmd_wait_for_idle(struct dpu_encoder_phys * phys_enc)217 static int _dpu_encoder_phys_cmd_wait_for_idle(
218 		struct dpu_encoder_phys *phys_enc)
219 {
220 	struct dpu_encoder_phys_cmd *cmd_enc =
221 			to_dpu_encoder_phys_cmd(phys_enc);
222 	struct dpu_encoder_wait_info wait_info;
223 	int ret;
224 
225 	wait_info.wq = &phys_enc->pending_kickoff_wq;
226 	wait_info.atomic_cnt = &phys_enc->pending_kickoff_cnt;
227 	wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
228 
229 	ret = dpu_encoder_helper_wait_for_irq(phys_enc,
230 			phys_enc->irq[INTR_IDX_PINGPONG],
231 			dpu_encoder_phys_cmd_pp_tx_done_irq,
232 			&wait_info);
233 	if (ret == -ETIMEDOUT)
234 		_dpu_encoder_phys_cmd_handle_ppdone_timeout(phys_enc);
235 	else if (!ret)
236 		cmd_enc->pp_timeout_report_cnt = 0;
237 
238 	return ret;
239 }
240 
dpu_encoder_phys_cmd_control_vblank_irq(struct dpu_encoder_phys * phys_enc,bool enable)241 static int dpu_encoder_phys_cmd_control_vblank_irq(
242 		struct dpu_encoder_phys *phys_enc,
243 		bool enable)
244 {
245 	int ret = 0;
246 	int refcount;
247 
248 	if (!phys_enc->hw_pp) {
249 		DPU_ERROR("invalid encoder\n");
250 		return -EINVAL;
251 	}
252 
253 	refcount = atomic_read(&phys_enc->vblank_refcount);
254 
255 	/* Slave encoders don't report vblank */
256 	if (!dpu_encoder_phys_cmd_is_master(phys_enc))
257 		goto end;
258 
259 	/* protect against negative */
260 	if (!enable && refcount == 0) {
261 		ret = -EINVAL;
262 		goto end;
263 	}
264 
265 	DRM_DEBUG_KMS("id:%u pp:%d enable=%s/%d\n", DRMID(phys_enc->parent),
266 		      phys_enc->hw_pp->idx - PINGPONG_0,
267 		      enable ? "true" : "false", refcount);
268 
269 	if (enable && atomic_inc_return(&phys_enc->vblank_refcount) == 1)
270 		ret = dpu_core_irq_register_callback(phys_enc->dpu_kms,
271 				phys_enc->irq[INTR_IDX_RDPTR],
272 				dpu_encoder_phys_cmd_te_rd_ptr_irq,
273 				phys_enc);
274 	else if (!enable && atomic_dec_return(&phys_enc->vblank_refcount) == 0)
275 		ret = dpu_core_irq_unregister_callback(phys_enc->dpu_kms,
276 				phys_enc->irq[INTR_IDX_RDPTR]);
277 
278 end:
279 	if (ret) {
280 		DRM_ERROR("vblank irq err id:%u pp:%d ret:%d, enable %s/%d\n",
281 			  DRMID(phys_enc->parent),
282 			  phys_enc->hw_pp->idx - PINGPONG_0, ret,
283 			  enable ? "true" : "false", refcount);
284 	}
285 
286 	return ret;
287 }
288 
dpu_encoder_phys_cmd_irq_control(struct dpu_encoder_phys * phys_enc,bool enable)289 static void dpu_encoder_phys_cmd_irq_control(struct dpu_encoder_phys *phys_enc,
290 		bool enable)
291 {
292 	trace_dpu_enc_phys_cmd_irq_ctrl(DRMID(phys_enc->parent),
293 			phys_enc->hw_pp->idx - PINGPONG_0,
294 			enable, atomic_read(&phys_enc->vblank_refcount));
295 
296 	if (enable) {
297 		dpu_core_irq_register_callback(phys_enc->dpu_kms,
298 				phys_enc->irq[INTR_IDX_PINGPONG],
299 				dpu_encoder_phys_cmd_pp_tx_done_irq,
300 				phys_enc);
301 		dpu_core_irq_register_callback(phys_enc->dpu_kms,
302 				phys_enc->irq[INTR_IDX_UNDERRUN],
303 				dpu_encoder_phys_cmd_underrun_irq,
304 				phys_enc);
305 		dpu_encoder_phys_cmd_control_vblank_irq(phys_enc, true);
306 
307 		if (dpu_encoder_phys_cmd_is_master(phys_enc))
308 			dpu_core_irq_register_callback(phys_enc->dpu_kms,
309 					phys_enc->irq[INTR_IDX_CTL_START],
310 					dpu_encoder_phys_cmd_ctl_start_irq,
311 					phys_enc);
312 	} else {
313 		if (dpu_encoder_phys_cmd_is_master(phys_enc))
314 			dpu_core_irq_unregister_callback(phys_enc->dpu_kms,
315 					phys_enc->irq[INTR_IDX_CTL_START]);
316 
317 		dpu_core_irq_unregister_callback(phys_enc->dpu_kms,
318 				phys_enc->irq[INTR_IDX_UNDERRUN]);
319 		dpu_encoder_phys_cmd_control_vblank_irq(phys_enc, false);
320 		dpu_core_irq_unregister_callback(phys_enc->dpu_kms,
321 				phys_enc->irq[INTR_IDX_PINGPONG]);
322 	}
323 }
324 
dpu_encoder_phys_cmd_tearcheck_config(struct dpu_encoder_phys * phys_enc)325 static void dpu_encoder_phys_cmd_tearcheck_config(
326 		struct dpu_encoder_phys *phys_enc)
327 {
328 	struct dpu_encoder_phys_cmd *cmd_enc =
329 		to_dpu_encoder_phys_cmd(phys_enc);
330 	struct dpu_hw_tear_check tc_cfg = { 0 };
331 	struct drm_display_mode *mode;
332 	bool tc_enable = true;
333 	unsigned long vsync_hz;
334 	struct dpu_kms *dpu_kms;
335 
336 	if (phys_enc->has_intf_te) {
337 		if (!phys_enc->hw_intf ||
338 		    !phys_enc->hw_intf->ops.enable_tearcheck) {
339 			DPU_DEBUG_CMDENC(cmd_enc, "tearcheck not supported\n");
340 			return;
341 		}
342 
343 		DPU_DEBUG_CMDENC(cmd_enc, "");
344 	} else {
345 		if (!phys_enc->hw_pp ||
346 		    !phys_enc->hw_pp->ops.enable_tearcheck) {
347 			DPU_DEBUG_CMDENC(cmd_enc, "tearcheck not supported\n");
348 			return;
349 		}
350 
351 		DPU_DEBUG_CMDENC(cmd_enc, "pp %d\n", phys_enc->hw_pp->idx - PINGPONG_0);
352 	}
353 
354 	mode = &phys_enc->cached_mode;
355 
356 	dpu_kms = phys_enc->dpu_kms;
357 
358 	/*
359 	 * TE default: dsi byte clock calculated base on 70 fps;
360 	 * around 14 ms to complete a kickoff cycle if te disabled;
361 	 * vclk_line base on 60 fps; write is faster than read;
362 	 * init == start == rdptr;
363 	 *
364 	 * vsync_count is ratio of MDP VSYNC clock frequency to LCD panel
365 	 * frequency divided by the no. of rows (lines) in the LCDpanel.
366 	 */
367 	vsync_hz = dpu_kms_get_clk_rate(dpu_kms, "vsync");
368 	if (!vsync_hz) {
369 		DPU_DEBUG_CMDENC(cmd_enc, "invalid - no vsync clock\n");
370 		return;
371 	}
372 
373 	tc_cfg.vsync_count = vsync_hz /
374 				(mode->vtotal * drm_mode_vrefresh(mode));
375 
376 	/*
377 	 * Set the sync_cfg_height to twice vtotal so that if we lose a
378 	 * TE event coming from the display TE pin we won't stall immediately
379 	 */
380 	tc_cfg.hw_vsync_mode = 1;
381 	tc_cfg.sync_cfg_height = mode->vtotal * 2;
382 	tc_cfg.vsync_init_val = mode->vdisplay;
383 	tc_cfg.sync_threshold_start = DEFAULT_TEARCHECK_SYNC_THRESH_START;
384 	tc_cfg.sync_threshold_continue = DEFAULT_TEARCHECK_SYNC_THRESH_CONTINUE;
385 	tc_cfg.start_pos = mode->vdisplay;
386 	tc_cfg.rd_ptr_irq = mode->vdisplay + 1;
387 
388 	DPU_DEBUG_CMDENC(cmd_enc,
389 		"tc vsync_clk_speed_hz %lu vtotal %u vrefresh %u\n",
390 		vsync_hz, mode->vtotal, drm_mode_vrefresh(mode));
391 	DPU_DEBUG_CMDENC(cmd_enc,
392 		"tc enable %u start_pos %u rd_ptr_irq %u\n",
393 		tc_enable, tc_cfg.start_pos, tc_cfg.rd_ptr_irq);
394 	DPU_DEBUG_CMDENC(cmd_enc,
395 		"tc hw_vsync_mode %u vsync_count %u vsync_init_val %u\n",
396 		tc_cfg.hw_vsync_mode, tc_cfg.vsync_count,
397 		tc_cfg.vsync_init_val);
398 	DPU_DEBUG_CMDENC(cmd_enc,
399 		"tc cfgheight %u thresh_start %u thresh_cont %u\n",
400 		tc_cfg.sync_cfg_height, tc_cfg.sync_threshold_start,
401 		tc_cfg.sync_threshold_continue);
402 
403 	if (phys_enc->has_intf_te)
404 		phys_enc->hw_intf->ops.enable_tearcheck(phys_enc->hw_intf, &tc_cfg);
405 	else
406 		phys_enc->hw_pp->ops.enable_tearcheck(phys_enc->hw_pp, &tc_cfg);
407 }
408 
_dpu_encoder_phys_cmd_pingpong_config(struct dpu_encoder_phys * phys_enc)409 static void _dpu_encoder_phys_cmd_pingpong_config(
410 		struct dpu_encoder_phys *phys_enc)
411 {
412 	struct dpu_encoder_phys_cmd *cmd_enc =
413 		to_dpu_encoder_phys_cmd(phys_enc);
414 
415 	if (!phys_enc->hw_pp || !phys_enc->hw_ctl->ops.setup_intf_cfg) {
416 		DPU_ERROR("invalid arg(s), enc %d\n", phys_enc != NULL);
417 		return;
418 	}
419 
420 	DPU_DEBUG_CMDENC(cmd_enc, "pp %d, enabling mode:\n",
421 			phys_enc->hw_pp->idx - PINGPONG_0);
422 	drm_mode_debug_printmodeline(&phys_enc->cached_mode);
423 
424 	_dpu_encoder_phys_cmd_update_intf_cfg(phys_enc);
425 	dpu_encoder_phys_cmd_tearcheck_config(phys_enc);
426 }
427 
dpu_encoder_phys_cmd_needs_single_flush(struct dpu_encoder_phys * phys_enc)428 static bool dpu_encoder_phys_cmd_needs_single_flush(
429 		struct dpu_encoder_phys *phys_enc)
430 {
431 	/**
432 	 * we do separate flush for each CTL and let
433 	 * CTL_START synchronize them
434 	 */
435 	return false;
436 }
437 
dpu_encoder_phys_cmd_enable_helper(struct dpu_encoder_phys * phys_enc)438 static void dpu_encoder_phys_cmd_enable_helper(
439 		struct dpu_encoder_phys *phys_enc)
440 {
441 	struct dpu_hw_ctl *ctl;
442 
443 	if (!phys_enc->hw_pp) {
444 		DPU_ERROR("invalid arg(s), encoder %d\n", phys_enc != NULL);
445 		return;
446 	}
447 
448 	dpu_encoder_helper_split_config(phys_enc, phys_enc->hw_intf->idx);
449 
450 	_dpu_encoder_phys_cmd_pingpong_config(phys_enc);
451 
452 	ctl = phys_enc->hw_ctl;
453 	ctl->ops.update_pending_flush_intf(ctl, phys_enc->hw_intf->idx);
454 }
455 
dpu_encoder_phys_cmd_enable(struct dpu_encoder_phys * phys_enc)456 static void dpu_encoder_phys_cmd_enable(struct dpu_encoder_phys *phys_enc)
457 {
458 	struct dpu_encoder_phys_cmd *cmd_enc =
459 		to_dpu_encoder_phys_cmd(phys_enc);
460 
461 	if (!phys_enc->hw_pp) {
462 		DPU_ERROR("invalid phys encoder\n");
463 		return;
464 	}
465 
466 	DPU_DEBUG_CMDENC(cmd_enc, "pp %d\n", phys_enc->hw_pp->idx - PINGPONG_0);
467 
468 	if (phys_enc->enable_state == DPU_ENC_ENABLED) {
469 		DPU_ERROR("already enabled\n");
470 		return;
471 	}
472 
473 	dpu_encoder_phys_cmd_enable_helper(phys_enc);
474 	phys_enc->enable_state = DPU_ENC_ENABLED;
475 }
476 
_dpu_encoder_phys_cmd_connect_te(struct dpu_encoder_phys * phys_enc,bool enable)477 static void _dpu_encoder_phys_cmd_connect_te(
478 		struct dpu_encoder_phys *phys_enc, bool enable)
479 {
480 	if (phys_enc->has_intf_te) {
481 		if (!phys_enc->hw_intf || !phys_enc->hw_intf->ops.connect_external_te)
482 			return;
483 
484 		trace_dpu_enc_phys_cmd_connect_te(DRMID(phys_enc->parent), enable);
485 		phys_enc->hw_intf->ops.connect_external_te(phys_enc->hw_intf, enable);
486 	} else {
487 		if (!phys_enc->hw_pp || !phys_enc->hw_pp->ops.connect_external_te)
488 			return;
489 
490 		trace_dpu_enc_phys_cmd_connect_te(DRMID(phys_enc->parent), enable);
491 		phys_enc->hw_pp->ops.connect_external_te(phys_enc->hw_pp, enable);
492 	}
493 }
494 
dpu_encoder_phys_cmd_prepare_idle_pc(struct dpu_encoder_phys * phys_enc)495 static void dpu_encoder_phys_cmd_prepare_idle_pc(
496 		struct dpu_encoder_phys *phys_enc)
497 {
498 	_dpu_encoder_phys_cmd_connect_te(phys_enc, false);
499 }
500 
dpu_encoder_phys_cmd_get_line_count(struct dpu_encoder_phys * phys_enc)501 static int dpu_encoder_phys_cmd_get_line_count(
502 		struct dpu_encoder_phys *phys_enc)
503 {
504 	struct dpu_hw_pingpong *hw_pp;
505 	struct dpu_hw_intf *hw_intf;
506 
507 	if (!dpu_encoder_phys_cmd_is_master(phys_enc))
508 		return -EINVAL;
509 
510 	if (phys_enc->has_intf_te) {
511 		hw_intf = phys_enc->hw_intf;
512 		if (!hw_intf || !hw_intf->ops.get_line_count)
513 			return -EINVAL;
514 		return hw_intf->ops.get_line_count(hw_intf);
515 	}
516 
517 	hw_pp = phys_enc->hw_pp;
518 	if (!hw_pp || !hw_pp->ops.get_line_count)
519 		return -EINVAL;
520 	return hw_pp->ops.get_line_count(hw_pp);
521 }
522 
dpu_encoder_phys_cmd_disable(struct dpu_encoder_phys * phys_enc)523 static void dpu_encoder_phys_cmd_disable(struct dpu_encoder_phys *phys_enc)
524 {
525 	struct dpu_encoder_phys_cmd *cmd_enc =
526 		to_dpu_encoder_phys_cmd(phys_enc);
527 	struct dpu_hw_ctl *ctl;
528 
529 	if (phys_enc->enable_state == DPU_ENC_DISABLED) {
530 		DPU_ERROR_CMDENC(cmd_enc, "already disabled\n");
531 		return;
532 	}
533 
534 	if (phys_enc->has_intf_te) {
535 		DRM_DEBUG_KMS("id:%u intf:%d state:%d\n", DRMID(phys_enc->parent),
536 			      phys_enc->hw_intf->idx - INTF_0,
537 			      phys_enc->enable_state);
538 
539 		if (phys_enc->hw_intf->ops.disable_tearcheck)
540 			phys_enc->hw_intf->ops.disable_tearcheck(phys_enc->hw_intf);
541 	} else {
542 		if (!phys_enc->hw_pp) {
543 			DPU_ERROR("invalid encoder\n");
544 			return;
545 		}
546 
547 		DRM_DEBUG_KMS("id:%u pp:%d state:%d\n", DRMID(phys_enc->parent),
548 			      phys_enc->hw_pp->idx - PINGPONG_0,
549 			      phys_enc->enable_state);
550 
551 		if (phys_enc->hw_pp->ops.disable_tearcheck)
552 			phys_enc->hw_pp->ops.disable_tearcheck(phys_enc->hw_pp);
553 	}
554 
555 	if (phys_enc->hw_intf->ops.bind_pingpong_blk) {
556 		phys_enc->hw_intf->ops.bind_pingpong_blk(
557 				phys_enc->hw_intf,
558 				PINGPONG_NONE);
559 
560 		ctl = phys_enc->hw_ctl;
561 		ctl->ops.update_pending_flush_intf(ctl, phys_enc->hw_intf->idx);
562 	}
563 
564 	phys_enc->enable_state = DPU_ENC_DISABLED;
565 }
566 
dpu_encoder_phys_cmd_destroy(struct dpu_encoder_phys * phys_enc)567 static void dpu_encoder_phys_cmd_destroy(struct dpu_encoder_phys *phys_enc)
568 {
569 	struct dpu_encoder_phys_cmd *cmd_enc =
570 		to_dpu_encoder_phys_cmd(phys_enc);
571 
572 	kfree(cmd_enc);
573 }
574 
dpu_encoder_phys_cmd_prepare_for_kickoff(struct dpu_encoder_phys * phys_enc)575 static void dpu_encoder_phys_cmd_prepare_for_kickoff(
576 		struct dpu_encoder_phys *phys_enc)
577 {
578 	struct dpu_encoder_phys_cmd *cmd_enc =
579 			to_dpu_encoder_phys_cmd(phys_enc);
580 	int ret;
581 
582 	if (!phys_enc->hw_pp) {
583 		DPU_ERROR("invalid encoder\n");
584 		return;
585 	}
586 	DRM_DEBUG_KMS("id:%u pp:%d pending_cnt:%d\n", DRMID(phys_enc->parent),
587 		      phys_enc->hw_pp->idx - PINGPONG_0,
588 		      atomic_read(&phys_enc->pending_kickoff_cnt));
589 
590 	/*
591 	 * Mark kickoff request as outstanding. If there are more than one,
592 	 * outstanding, then we have to wait for the previous one to complete
593 	 */
594 	ret = _dpu_encoder_phys_cmd_wait_for_idle(phys_enc);
595 	if (ret) {
596 		/* force pending_kickoff_cnt 0 to discard failed kickoff */
597 		atomic_set(&phys_enc->pending_kickoff_cnt, 0);
598 		DRM_ERROR("failed wait_for_idle: id:%u ret:%d pp:%d\n",
599 			  DRMID(phys_enc->parent), ret,
600 			  phys_enc->hw_pp->idx - PINGPONG_0);
601 	}
602 
603 	dpu_encoder_phys_cmd_enable_te(phys_enc);
604 
605 	DPU_DEBUG_CMDENC(cmd_enc, "pp:%d pending_cnt %d\n",
606 			phys_enc->hw_pp->idx - PINGPONG_0,
607 			atomic_read(&phys_enc->pending_kickoff_cnt));
608 }
609 
dpu_encoder_phys_cmd_enable_te(struct dpu_encoder_phys * phys_enc)610 static void dpu_encoder_phys_cmd_enable_te(struct dpu_encoder_phys *phys_enc)
611 {
612 	if (!phys_enc)
613 		return;
614 	if (!dpu_encoder_phys_cmd_is_master(phys_enc))
615 		return;
616 
617 	if (phys_enc->has_intf_te) {
618 		if (!phys_enc->hw_intf->ops.disable_autorefresh)
619 			return;
620 
621 		phys_enc->hw_intf->ops.disable_autorefresh(
622 				phys_enc->hw_intf,
623 				DRMID(phys_enc->parent),
624 				phys_enc->cached_mode.vdisplay);
625 	} else {
626 		if (!phys_enc->hw_pp ||
627 		    !phys_enc->hw_pp->ops.disable_autorefresh)
628 			return;
629 
630 		phys_enc->hw_pp->ops.disable_autorefresh(
631 				phys_enc->hw_pp,
632 				DRMID(phys_enc->parent),
633 				phys_enc->cached_mode.vdisplay);
634 	}
635 }
636 
_dpu_encoder_phys_cmd_wait_for_ctl_start(struct dpu_encoder_phys * phys_enc)637 static int _dpu_encoder_phys_cmd_wait_for_ctl_start(
638 		struct dpu_encoder_phys *phys_enc)
639 {
640 	struct dpu_encoder_phys_cmd *cmd_enc =
641 			to_dpu_encoder_phys_cmd(phys_enc);
642 	struct dpu_encoder_wait_info wait_info;
643 	int ret;
644 
645 	wait_info.wq = &phys_enc->pending_kickoff_wq;
646 	wait_info.atomic_cnt = &phys_enc->pending_ctlstart_cnt;
647 	wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
648 
649 	ret = dpu_encoder_helper_wait_for_irq(phys_enc,
650 			phys_enc->irq[INTR_IDX_CTL_START],
651 			dpu_encoder_phys_cmd_ctl_start_irq,
652 			&wait_info);
653 	if (ret == -ETIMEDOUT) {
654 		DPU_ERROR_CMDENC(cmd_enc, "ctl start interrupt wait failed\n");
655 		ret = -EINVAL;
656 	} else if (!ret)
657 		ret = 0;
658 
659 	return ret;
660 }
661 
dpu_encoder_phys_cmd_wait_for_tx_complete(struct dpu_encoder_phys * phys_enc)662 static int dpu_encoder_phys_cmd_wait_for_tx_complete(
663 		struct dpu_encoder_phys *phys_enc)
664 {
665 	int rc;
666 
667 	rc = _dpu_encoder_phys_cmd_wait_for_idle(phys_enc);
668 	if (rc) {
669 		DRM_ERROR("failed wait_for_idle: id:%u ret:%d intf:%d\n",
670 			  DRMID(phys_enc->parent), rc,
671 			  phys_enc->hw_intf->idx - INTF_0);
672 	}
673 
674 	return rc;
675 }
676 
dpu_encoder_phys_cmd_wait_for_commit_done(struct dpu_encoder_phys * phys_enc)677 static int dpu_encoder_phys_cmd_wait_for_commit_done(
678 		struct dpu_encoder_phys *phys_enc)
679 {
680 	/* only required for master controller */
681 	if (!dpu_encoder_phys_cmd_is_master(phys_enc))
682 		return 0;
683 
684 	if (phys_enc->hw_ctl->ops.is_started(phys_enc->hw_ctl))
685 		return dpu_encoder_phys_cmd_wait_for_tx_complete(phys_enc);
686 
687 	return _dpu_encoder_phys_cmd_wait_for_ctl_start(phys_enc);
688 }
689 
dpu_encoder_phys_cmd_wait_for_vblank(struct dpu_encoder_phys * phys_enc)690 static int dpu_encoder_phys_cmd_wait_for_vblank(
691 		struct dpu_encoder_phys *phys_enc)
692 {
693 	int rc = 0;
694 	struct dpu_encoder_phys_cmd *cmd_enc;
695 	struct dpu_encoder_wait_info wait_info;
696 
697 	cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
698 
699 	/* only required for master controller */
700 	if (!dpu_encoder_phys_cmd_is_master(phys_enc))
701 		return rc;
702 
703 	wait_info.wq = &cmd_enc->pending_vblank_wq;
704 	wait_info.atomic_cnt = &cmd_enc->pending_vblank_cnt;
705 	wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
706 
707 	atomic_inc(&cmd_enc->pending_vblank_cnt);
708 
709 	rc = dpu_encoder_helper_wait_for_irq(phys_enc,
710 			phys_enc->irq[INTR_IDX_RDPTR],
711 			dpu_encoder_phys_cmd_te_rd_ptr_irq,
712 			&wait_info);
713 
714 	return rc;
715 }
716 
dpu_encoder_phys_cmd_handle_post_kickoff(struct dpu_encoder_phys * phys_enc)717 static void dpu_encoder_phys_cmd_handle_post_kickoff(
718 		struct dpu_encoder_phys *phys_enc)
719 {
720 	/**
721 	 * re-enable external TE, either for the first time after enabling
722 	 * or if disabled for Autorefresh
723 	 */
724 	_dpu_encoder_phys_cmd_connect_te(phys_enc, true);
725 }
726 
dpu_encoder_phys_cmd_trigger_start(struct dpu_encoder_phys * phys_enc)727 static void dpu_encoder_phys_cmd_trigger_start(
728 		struct dpu_encoder_phys *phys_enc)
729 {
730 	dpu_encoder_helper_trigger_start(phys_enc);
731 }
732 
dpu_encoder_phys_cmd_init_ops(struct dpu_encoder_phys_ops * ops)733 static void dpu_encoder_phys_cmd_init_ops(
734 		struct dpu_encoder_phys_ops *ops)
735 {
736 	ops->is_master = dpu_encoder_phys_cmd_is_master;
737 	ops->atomic_mode_set = dpu_encoder_phys_cmd_atomic_mode_set;
738 	ops->enable = dpu_encoder_phys_cmd_enable;
739 	ops->disable = dpu_encoder_phys_cmd_disable;
740 	ops->destroy = dpu_encoder_phys_cmd_destroy;
741 	ops->control_vblank_irq = dpu_encoder_phys_cmd_control_vblank_irq;
742 	ops->wait_for_commit_done = dpu_encoder_phys_cmd_wait_for_commit_done;
743 	ops->prepare_for_kickoff = dpu_encoder_phys_cmd_prepare_for_kickoff;
744 	ops->wait_for_tx_complete = dpu_encoder_phys_cmd_wait_for_tx_complete;
745 	ops->wait_for_vblank = dpu_encoder_phys_cmd_wait_for_vblank;
746 	ops->trigger_start = dpu_encoder_phys_cmd_trigger_start;
747 	ops->needs_single_flush = dpu_encoder_phys_cmd_needs_single_flush;
748 	ops->irq_control = dpu_encoder_phys_cmd_irq_control;
749 	ops->restore = dpu_encoder_phys_cmd_enable_helper;
750 	ops->prepare_idle_pc = dpu_encoder_phys_cmd_prepare_idle_pc;
751 	ops->handle_post_kickoff = dpu_encoder_phys_cmd_handle_post_kickoff;
752 	ops->get_line_count = dpu_encoder_phys_cmd_get_line_count;
753 }
754 
dpu_encoder_phys_cmd_init(struct dpu_enc_phys_init_params * p)755 struct dpu_encoder_phys *dpu_encoder_phys_cmd_init(
756 		struct dpu_enc_phys_init_params *p)
757 {
758 	struct dpu_encoder_phys *phys_enc = NULL;
759 	struct dpu_encoder_phys_cmd *cmd_enc = NULL;
760 
761 	DPU_DEBUG("intf\n");
762 
763 	cmd_enc = kzalloc(sizeof(*cmd_enc), GFP_KERNEL);
764 	if (!cmd_enc) {
765 		DPU_ERROR("failed to allocate\n");
766 		return ERR_PTR(-ENOMEM);
767 	}
768 	phys_enc = &cmd_enc->base;
769 
770 	dpu_encoder_phys_init(phys_enc, p);
771 
772 	dpu_encoder_phys_cmd_init_ops(&phys_enc->ops);
773 	phys_enc->intf_mode = INTF_MODE_CMD;
774 	cmd_enc->stream_sel = 0;
775 
776 	phys_enc->has_intf_te = test_bit(DPU_INTF_TE,
777 					 &phys_enc->hw_intf->cap->features);
778 
779 	atomic_set(&cmd_enc->pending_vblank_cnt, 0);
780 	init_waitqueue_head(&cmd_enc->pending_vblank_wq);
781 
782 	DPU_DEBUG_CMDENC(cmd_enc, "created\n");
783 
784 	return phys_enc;
785 }
786