1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (c) 2022 Qualcomm Innovation Center, Inc. All rights reserved.
4  */
5 
6 #define pr_fmt(fmt)	"[drm:%s:%d] " fmt, __func__, __LINE__
7 
8 #include <linux/debugfs.h>
9 
10 #include <drm/drm_framebuffer.h>
11 
12 #include "dpu_encoder_phys.h"
13 #include "dpu_formats.h"
14 #include "dpu_hw_top.h"
15 #include "dpu_hw_wb.h"
16 #include "dpu_hw_lm.h"
17 #include "dpu_hw_merge3d.h"
18 #include "dpu_hw_interrupts.h"
19 #include "dpu_core_irq.h"
20 #include "dpu_vbif.h"
21 #include "dpu_crtc.h"
22 #include "disp/msm_disp_snapshot.h"
23 
24 #define to_dpu_encoder_phys_wb(x) \
25 	container_of(x, struct dpu_encoder_phys_wb, base)
26 
27 /**
28  * dpu_encoder_phys_wb_is_master - report wb always as master encoder
29  */
30 static bool dpu_encoder_phys_wb_is_master(struct dpu_encoder_phys *phys_enc)
31 {
32 	/* there is only one physical enc for dpu_writeback */
33 	return true;
34 }
35 
36 /**
37  * dpu_encoder_phys_wb_set_ot_limit - set OT limit for writeback interface
38  * @phys_enc:	Pointer to physical encoder
39  */
40 static void dpu_encoder_phys_wb_set_ot_limit(
41 		struct dpu_encoder_phys *phys_enc)
42 {
43 	struct dpu_hw_wb *hw_wb = phys_enc->hw_wb;
44 	struct dpu_vbif_set_ot_params ot_params;
45 
46 	memset(&ot_params, 0, sizeof(ot_params));
47 	ot_params.xin_id = hw_wb->caps->xin_id;
48 	ot_params.num = hw_wb->idx - WB_0;
49 	ot_params.width = phys_enc->cached_mode.hdisplay;
50 	ot_params.height = phys_enc->cached_mode.vdisplay;
51 	ot_params.is_wfd = true;
52 	ot_params.frame_rate = drm_mode_vrefresh(&phys_enc->cached_mode);
53 	ot_params.vbif_idx = hw_wb->caps->vbif_idx;
54 	ot_params.clk_ctrl = hw_wb->caps->clk_ctrl;
55 	ot_params.rd = false;
56 
57 	dpu_vbif_set_ot_limit(phys_enc->dpu_kms, &ot_params);
58 }
59 
60 /**
61  * dpu_encoder_phys_wb_set_qos_remap - set QoS remapper for writeback
62  * @phys_enc:	Pointer to physical encoder
63  */
64 static void dpu_encoder_phys_wb_set_qos_remap(
65 		struct dpu_encoder_phys *phys_enc)
66 {
67 	struct dpu_hw_wb *hw_wb;
68 	struct dpu_vbif_set_qos_params qos_params;
69 
70 	if (!phys_enc || !phys_enc->parent || !phys_enc->parent->crtc) {
71 		DPU_ERROR("invalid arguments\n");
72 		return;
73 	}
74 
75 	if (!phys_enc->hw_wb || !phys_enc->hw_wb->caps) {
76 		DPU_ERROR("invalid writeback hardware\n");
77 		return;
78 	}
79 
80 	hw_wb = phys_enc->hw_wb;
81 
82 	memset(&qos_params, 0, sizeof(qos_params));
83 	qos_params.vbif_idx = hw_wb->caps->vbif_idx;
84 	qos_params.xin_id = hw_wb->caps->xin_id;
85 	qos_params.clk_ctrl = hw_wb->caps->clk_ctrl;
86 	qos_params.num = hw_wb->idx - WB_0;
87 	qos_params.is_rt = false;
88 
89 	DPU_DEBUG("[qos_remap] wb:%d vbif:%d xin:%d is_rt:%d\n",
90 			qos_params.num,
91 			qos_params.vbif_idx,
92 			qos_params.xin_id, qos_params.is_rt);
93 
94 	dpu_vbif_set_qos_remap(phys_enc->dpu_kms, &qos_params);
95 }
96 
97 /**
98  * dpu_encoder_phys_wb_set_qos - set QoS/danger/safe LUTs for writeback
99  * @phys_enc:	Pointer to physical encoder
100  */
101 static void dpu_encoder_phys_wb_set_qos(struct dpu_encoder_phys *phys_enc)
102 {
103 	struct dpu_hw_wb *hw_wb;
104 	struct dpu_hw_wb_qos_cfg qos_cfg;
105 	const struct dpu_mdss_cfg *catalog;
106 	const struct dpu_qos_lut_tbl *qos_lut_tb;
107 
108 	if (!phys_enc || !phys_enc->dpu_kms || !phys_enc->dpu_kms->catalog) {
109 		DPU_ERROR("invalid parameter(s)\n");
110 		return;
111 	}
112 
113 	catalog = phys_enc->dpu_kms->catalog;
114 
115 	hw_wb = phys_enc->hw_wb;
116 
117 	memset(&qos_cfg, 0, sizeof(struct dpu_hw_wb_qos_cfg));
118 	qos_cfg.danger_safe_en = true;
119 	qos_cfg.danger_lut =
120 		catalog->perf->danger_lut_tbl[DPU_QOS_LUT_USAGE_NRT];
121 
122 	qos_cfg.safe_lut = catalog->perf->safe_lut_tbl[DPU_QOS_LUT_USAGE_NRT];
123 
124 	qos_lut_tb = &catalog->perf->qos_lut_tbl[DPU_QOS_LUT_USAGE_NRT];
125 	qos_cfg.creq_lut = _dpu_hw_get_qos_lut(qos_lut_tb, 0);
126 
127 	if (hw_wb->ops.setup_qos_lut)
128 		hw_wb->ops.setup_qos_lut(hw_wb, &qos_cfg);
129 }
130 
131 /**
132  * dpu_encoder_phys_wb_setup_fb - setup output framebuffer
133  * @phys_enc:	Pointer to physical encoder
134  * @fb:		Pointer to output framebuffer
135  */
136 static void dpu_encoder_phys_wb_setup_fb(struct dpu_encoder_phys *phys_enc,
137 		struct drm_framebuffer *fb)
138 {
139 	struct dpu_encoder_phys_wb *wb_enc = to_dpu_encoder_phys_wb(phys_enc);
140 	struct dpu_hw_wb *hw_wb;
141 	struct dpu_hw_wb_cfg *wb_cfg;
142 	struct dpu_hw_cdp_cfg cdp_cfg;
143 
144 	if (!phys_enc || !phys_enc->dpu_kms || !phys_enc->dpu_kms->catalog) {
145 		DPU_ERROR("invalid encoder\n");
146 		return;
147 	}
148 
149 	hw_wb = phys_enc->hw_wb;
150 	wb_cfg = &wb_enc->wb_cfg;
151 
152 	wb_cfg->intf_mode = phys_enc->intf_mode;
153 	wb_cfg->roi.x1 = 0;
154 	wb_cfg->roi.x2 = phys_enc->cached_mode.hdisplay;
155 	wb_cfg->roi.y1 = 0;
156 	wb_cfg->roi.y2 = phys_enc->cached_mode.vdisplay;
157 
158 	if (hw_wb->ops.setup_roi)
159 		hw_wb->ops.setup_roi(hw_wb, wb_cfg);
160 
161 	if (hw_wb->ops.setup_outformat)
162 		hw_wb->ops.setup_outformat(hw_wb, wb_cfg);
163 
164 	if (hw_wb->ops.setup_cdp) {
165 		memset(&cdp_cfg, 0, sizeof(struct dpu_hw_cdp_cfg));
166 
167 		cdp_cfg.enable = phys_enc->dpu_kms->catalog->perf->cdp_cfg
168 				[DPU_PERF_CDP_USAGE_NRT].wr_enable;
169 		cdp_cfg.ubwc_meta_enable =
170 				DPU_FORMAT_IS_UBWC(wb_cfg->dest.format);
171 		cdp_cfg.tile_amortize_enable =
172 				DPU_FORMAT_IS_UBWC(wb_cfg->dest.format) ||
173 				DPU_FORMAT_IS_TILE(wb_cfg->dest.format);
174 		cdp_cfg.preload_ahead = DPU_WB_CDP_PRELOAD_AHEAD_64;
175 
176 		hw_wb->ops.setup_cdp(hw_wb, &cdp_cfg);
177 	}
178 
179 	if (hw_wb->ops.setup_outaddress)
180 		hw_wb->ops.setup_outaddress(hw_wb, wb_cfg);
181 }
182 
183 /**
184  * dpu_encoder_phys_wb_setup_cdp - setup chroma down prefetch block
185  * @phys_enc:Pointer to physical encoder
186  */
187 static void dpu_encoder_phys_wb_setup_cdp(struct dpu_encoder_phys *phys_enc)
188 {
189 	struct dpu_hw_wb *hw_wb;
190 	struct dpu_hw_ctl *ctl;
191 
192 	if (!phys_enc) {
193 		DPU_ERROR("invalid encoder\n");
194 		return;
195 	}
196 
197 	hw_wb = phys_enc->hw_wb;
198 	ctl = phys_enc->hw_ctl;
199 
200 	if (test_bit(DPU_CTL_ACTIVE_CFG, &ctl->caps->features) &&
201 		(phys_enc->hw_ctl &&
202 		 phys_enc->hw_ctl->ops.setup_intf_cfg)) {
203 		struct dpu_hw_intf_cfg intf_cfg = {0};
204 		struct dpu_hw_pingpong *hw_pp = phys_enc->hw_pp;
205 		enum dpu_3d_blend_mode mode_3d;
206 
207 		mode_3d = dpu_encoder_helper_get_3d_blend_mode(phys_enc);
208 
209 		intf_cfg.intf = DPU_NONE;
210 		intf_cfg.wb = hw_wb->idx;
211 
212 		if (mode_3d && hw_pp && hw_pp->merge_3d)
213 			intf_cfg.merge_3d = hw_pp->merge_3d->idx;
214 
215 		if (phys_enc->hw_pp->merge_3d && phys_enc->hw_pp->merge_3d->ops.setup_3d_mode)
216 			phys_enc->hw_pp->merge_3d->ops.setup_3d_mode(phys_enc->hw_pp->merge_3d,
217 					mode_3d);
218 
219 		/* setup which pp blk will connect to this wb */
220 		if (hw_pp && phys_enc->hw_wb->ops.bind_pingpong_blk)
221 			phys_enc->hw_wb->ops.bind_pingpong_blk(phys_enc->hw_wb, true,
222 					phys_enc->hw_pp->idx);
223 
224 		phys_enc->hw_ctl->ops.setup_intf_cfg(phys_enc->hw_ctl, &intf_cfg);
225 	} else if (phys_enc->hw_ctl && phys_enc->hw_ctl->ops.setup_intf_cfg) {
226 		struct dpu_hw_intf_cfg intf_cfg = {0};
227 
228 		intf_cfg.intf = DPU_NONE;
229 		intf_cfg.wb = hw_wb->idx;
230 		intf_cfg.mode_3d =
231 			dpu_encoder_helper_get_3d_blend_mode(phys_enc);
232 		phys_enc->hw_ctl->ops.setup_intf_cfg(phys_enc->hw_ctl, &intf_cfg);
233 	}
234 }
235 
236 /**
237  * dpu_encoder_phys_wb_atomic_check - verify and fixup given atomic states
238  * @phys_enc:	Pointer to physical encoder
239  * @crtc_state:	Pointer to CRTC atomic state
240  * @conn_state:	Pointer to connector atomic state
241  */
242 static int dpu_encoder_phys_wb_atomic_check(
243 		struct dpu_encoder_phys *phys_enc,
244 		struct drm_crtc_state *crtc_state,
245 		struct drm_connector_state *conn_state)
246 {
247 	struct drm_framebuffer *fb;
248 	const struct drm_display_mode *mode = &crtc_state->mode;
249 
250 	DPU_DEBUG("[atomic_check:%d, \"%s\",%d,%d]\n",
251 			phys_enc->wb_idx, mode->name, mode->hdisplay, mode->vdisplay);
252 
253 	if (!conn_state || !conn_state->connector) {
254 		DPU_ERROR("invalid connector state\n");
255 		return -EINVAL;
256 	} else if (conn_state->connector->status !=
257 			connector_status_connected) {
258 		DPU_ERROR("connector not connected %d\n",
259 				conn_state->connector->status);
260 		return -EINVAL;
261 	}
262 
263 	if (!conn_state->writeback_job || !conn_state->writeback_job->fb)
264 		return 0;
265 
266 	fb = conn_state->writeback_job->fb;
267 
268 	DPU_DEBUG("[fb_id:%u][fb:%u,%u]\n", fb->base.id,
269 			fb->width, fb->height);
270 
271 	if (fb->width != mode->hdisplay) {
272 		DPU_ERROR("invalid fb w=%d, mode w=%d\n", fb->width,
273 				mode->hdisplay);
274 		return -EINVAL;
275 	} else if (fb->height != mode->vdisplay) {
276 		DPU_ERROR("invalid fb h=%d, mode h=%d\n", fb->height,
277 				  mode->vdisplay);
278 		return -EINVAL;
279 	} else if (fb->width > phys_enc->hw_wb->caps->maxlinewidth) {
280 		DPU_ERROR("invalid fb w=%d, maxlinewidth=%u\n",
281 				  fb->width, phys_enc->hw_wb->caps->maxlinewidth);
282 		return -EINVAL;
283 	}
284 
285 	return 0;
286 }
287 
288 
289 /**
290  * _dpu_encoder_phys_wb_update_flush - flush hardware update
291  * @phys_enc:	Pointer to physical encoder
292  */
293 static void _dpu_encoder_phys_wb_update_flush(struct dpu_encoder_phys *phys_enc)
294 {
295 	struct dpu_hw_wb *hw_wb;
296 	struct dpu_hw_ctl *hw_ctl;
297 	struct dpu_hw_pingpong *hw_pp;
298 	u32 pending_flush = 0;
299 
300 	if (!phys_enc)
301 		return;
302 
303 	hw_wb = phys_enc->hw_wb;
304 	hw_pp = phys_enc->hw_pp;
305 	hw_ctl = phys_enc->hw_ctl;
306 
307 	DPU_DEBUG("[wb:%d]\n", hw_wb->idx - WB_0);
308 
309 	if (!hw_ctl) {
310 		DPU_DEBUG("[wb:%d] no ctl assigned\n", hw_wb->idx - WB_0);
311 		return;
312 	}
313 
314 	if (hw_ctl->ops.update_pending_flush_wb)
315 		hw_ctl->ops.update_pending_flush_wb(hw_ctl, hw_wb->idx);
316 
317 	if (hw_ctl->ops.update_pending_flush_merge_3d && hw_pp && hw_pp->merge_3d)
318 		hw_ctl->ops.update_pending_flush_merge_3d(hw_ctl,
319 				hw_pp->merge_3d->idx);
320 
321 	if (hw_ctl->ops.get_pending_flush)
322 		pending_flush = hw_ctl->ops.get_pending_flush(hw_ctl);
323 
324 	DPU_DEBUG("Pending flush mask for CTL_%d is 0x%x, WB %d\n",
325 			hw_ctl->idx - CTL_0, pending_flush,
326 			hw_wb->idx - WB_0);
327 }
328 
329 /**
330  * dpu_encoder_phys_wb_setup - setup writeback encoder
331  * @phys_enc:	Pointer to physical encoder
332  */
333 static void dpu_encoder_phys_wb_setup(
334 		struct dpu_encoder_phys *phys_enc)
335 {
336 	struct dpu_hw_wb *hw_wb = phys_enc->hw_wb;
337 	struct drm_display_mode mode = phys_enc->cached_mode;
338 	struct drm_framebuffer *fb = NULL;
339 
340 	DPU_DEBUG("[mode_set:%d, \"%s\",%d,%d]\n",
341 			hw_wb->idx - WB_0, mode.name,
342 			mode.hdisplay, mode.vdisplay);
343 
344 	dpu_encoder_phys_wb_set_ot_limit(phys_enc);
345 
346 	dpu_encoder_phys_wb_set_qos_remap(phys_enc);
347 
348 	dpu_encoder_phys_wb_set_qos(phys_enc);
349 
350 	dpu_encoder_phys_wb_setup_fb(phys_enc, fb);
351 
352 	dpu_encoder_phys_wb_setup_cdp(phys_enc);
353 
354 }
355 
356 static void _dpu_encoder_phys_wb_frame_done_helper(void *arg)
357 {
358 	struct dpu_encoder_phys *phys_enc = arg;
359 	struct dpu_encoder_phys_wb *wb_enc = to_dpu_encoder_phys_wb(phys_enc);
360 
361 	struct dpu_hw_wb *hw_wb = phys_enc->hw_wb;
362 	unsigned long lock_flags;
363 	u32 event = DPU_ENCODER_FRAME_EVENT_DONE;
364 
365 	DPU_DEBUG("[wb:%d]\n", hw_wb->idx - WB_0);
366 
367 	if (phys_enc->parent_ops->handle_frame_done)
368 		phys_enc->parent_ops->handle_frame_done(phys_enc->parent,
369 				phys_enc, event);
370 
371 	if (phys_enc->parent_ops->handle_vblank_virt)
372 		phys_enc->parent_ops->handle_vblank_virt(phys_enc->parent,
373 				phys_enc);
374 
375 	spin_lock_irqsave(phys_enc->enc_spinlock, lock_flags);
376 	atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0);
377 	spin_unlock_irqrestore(phys_enc->enc_spinlock, lock_flags);
378 
379 	if (wb_enc->wb_conn)
380 		drm_writeback_signal_completion(wb_enc->wb_conn, 0);
381 
382 	/* Signal any waiting atomic commit thread */
383 	wake_up_all(&phys_enc->pending_kickoff_wq);
384 }
385 
386 /**
387  * dpu_encoder_phys_wb_done_irq - writeback interrupt handler
388  * @arg:	Pointer to writeback encoder
389  * @irq_idx:	interrupt index
390  */
391 static void dpu_encoder_phys_wb_done_irq(void *arg, int irq_idx)
392 {
393 	_dpu_encoder_phys_wb_frame_done_helper(arg);
394 }
395 
396 /**
397  * dpu_encoder_phys_wb_irq_ctrl - irq control of WB
398  * @phys:	Pointer to physical encoder
399  * @enable:	indicates enable or disable interrupts
400  */
401 static void dpu_encoder_phys_wb_irq_ctrl(
402 		struct dpu_encoder_phys *phys, bool enable)
403 {
404 
405 	struct dpu_encoder_phys_wb *wb_enc = to_dpu_encoder_phys_wb(phys);
406 
407 	if (enable && atomic_inc_return(&wb_enc->wbirq_refcount) == 1)
408 		dpu_core_irq_register_callback(phys->dpu_kms,
409 				phys->irq[INTR_IDX_WB_DONE], dpu_encoder_phys_wb_done_irq, phys);
410 	else if (!enable &&
411 			atomic_dec_return(&wb_enc->wbirq_refcount) == 0)
412 		dpu_core_irq_unregister_callback(phys->dpu_kms, phys->irq[INTR_IDX_WB_DONE]);
413 }
414 
415 static void dpu_encoder_phys_wb_atomic_mode_set(
416 		struct dpu_encoder_phys *phys_enc,
417 		struct drm_crtc_state *crtc_state,
418 		struct drm_connector_state *conn_state)
419 {
420 
421 	phys_enc->irq[INTR_IDX_WB_DONE] = phys_enc->hw_wb->caps->intr_wb_done;
422 }
423 
424 static void _dpu_encoder_phys_wb_handle_wbdone_timeout(
425 		struct dpu_encoder_phys *phys_enc)
426 {
427 	struct dpu_encoder_phys_wb *wb_enc = to_dpu_encoder_phys_wb(phys_enc);
428 	u32 frame_event = DPU_ENCODER_FRAME_EVENT_ERROR;
429 
430 	wb_enc->wb_done_timeout_cnt++;
431 
432 	if (wb_enc->wb_done_timeout_cnt == 1)
433 		msm_disp_snapshot_state(phys_enc->parent->dev);
434 
435 	atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0);
436 
437 	/* request a ctl reset before the next kickoff */
438 	phys_enc->enable_state = DPU_ENC_ERR_NEEDS_HW_RESET;
439 
440 	if (wb_enc->wb_conn)
441 		drm_writeback_signal_completion(wb_enc->wb_conn, 0);
442 
443 	if (phys_enc->parent_ops->handle_frame_done)
444 		phys_enc->parent_ops->handle_frame_done(
445 				phys_enc->parent, phys_enc, frame_event);
446 }
447 
448 /**
449  * dpu_encoder_phys_wb_wait_for_commit_done - wait until request is committed
450  * @phys_enc:	Pointer to physical encoder
451  */
452 static int dpu_encoder_phys_wb_wait_for_commit_done(
453 		struct dpu_encoder_phys *phys_enc)
454 {
455 	unsigned long ret;
456 	struct dpu_encoder_wait_info wait_info;
457 	struct dpu_encoder_phys_wb *wb_enc = to_dpu_encoder_phys_wb(phys_enc);
458 
459 	wait_info.wq = &phys_enc->pending_kickoff_wq;
460 	wait_info.atomic_cnt = &phys_enc->pending_kickoff_cnt;
461 	wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
462 
463 	ret = dpu_encoder_helper_wait_for_irq(phys_enc, INTR_IDX_WB_DONE,
464 			dpu_encoder_phys_wb_done_irq, &wait_info);
465 	if (ret == -ETIMEDOUT)
466 		_dpu_encoder_phys_wb_handle_wbdone_timeout(phys_enc);
467 	else if (!ret)
468 		wb_enc->wb_done_timeout_cnt = 0;
469 
470 	return ret;
471 }
472 
473 /**
474  * dpu_encoder_phys_wb_prepare_for_kickoff - pre-kickoff processing
475  * @phys_enc:	Pointer to physical encoder
476  * Returns:	Zero on success
477  */
478 static void dpu_encoder_phys_wb_prepare_for_kickoff(
479 		struct dpu_encoder_phys *phys_enc)
480 {
481 	struct dpu_encoder_phys_wb *wb_enc = to_dpu_encoder_phys_wb(phys_enc);
482 	struct drm_connector *drm_conn;
483 	struct drm_connector_state *state;
484 
485 	DPU_DEBUG("[wb:%d]\n", phys_enc->hw_wb->idx - WB_0);
486 
487 	if (!wb_enc->wb_conn || !wb_enc->wb_job) {
488 		DPU_ERROR("invalid wb_conn or wb_job\n");
489 		return;
490 	}
491 
492 	drm_conn = &wb_enc->wb_conn->base;
493 	state = drm_conn->state;
494 
495 	if (wb_enc->wb_conn && wb_enc->wb_job)
496 		drm_writeback_queue_job(wb_enc->wb_conn, state);
497 
498 	dpu_encoder_phys_wb_setup(phys_enc);
499 
500 	_dpu_encoder_phys_wb_update_flush(phys_enc);
501 }
502 
503 /**
504  * dpu_encoder_phys_wb_needs_single_flush - trigger flush processing
505  * @phys_enc:	Pointer to physical encoder
506  */
507 static bool dpu_encoder_phys_wb_needs_single_flush(struct dpu_encoder_phys *phys_enc)
508 {
509 	DPU_DEBUG("[wb:%d]\n", phys_enc->hw_wb->idx - WB_0);
510 	return false;
511 }
512 
513 /**
514  * dpu_encoder_phys_wb_handle_post_kickoff - post-kickoff processing
515  * @phys_enc:	Pointer to physical encoder
516  */
517 static void dpu_encoder_phys_wb_handle_post_kickoff(
518 		struct dpu_encoder_phys *phys_enc)
519 {
520 	DPU_DEBUG("[wb:%d]\n", phys_enc->hw_wb->idx - WB_0);
521 
522 }
523 
524 /**
525  * dpu_encoder_phys_wb_enable - enable writeback encoder
526  * @phys_enc:	Pointer to physical encoder
527  */
528 static void dpu_encoder_phys_wb_enable(struct dpu_encoder_phys *phys_enc)
529 {
530 	DPU_DEBUG("[wb:%d]\n", phys_enc->hw_wb->idx - WB_0);
531 	phys_enc->enable_state = DPU_ENC_ENABLED;
532 }
533 /**
534  * dpu_encoder_phys_wb_disable - disable writeback encoder
535  * @phys_enc:	Pointer to physical encoder
536  */
537 static void dpu_encoder_phys_wb_disable(struct dpu_encoder_phys *phys_enc)
538 {
539 	struct dpu_hw_wb *hw_wb = phys_enc->hw_wb;
540 	struct dpu_hw_ctl *hw_ctl = phys_enc->hw_ctl;
541 
542 	DPU_DEBUG("[wb:%d]\n", hw_wb->idx - WB_0);
543 
544 	if (phys_enc->enable_state == DPU_ENC_DISABLED) {
545 		DPU_ERROR("encoder is already disabled\n");
546 		return;
547 	}
548 
549 	/* reset h/w before final flush */
550 	if (phys_enc->hw_ctl->ops.clear_pending_flush)
551 		phys_enc->hw_ctl->ops.clear_pending_flush(phys_enc->hw_ctl);
552 
553 	/*
554 	 * New CTL reset sequence from 5.0 MDP onwards.
555 	 * If has_3d_merge_reset is not set, legacy reset
556 	 * sequence is executed.
557 	 *
558 	 * Legacy reset sequence has not been implemented yet.
559 	 * Any target earlier than SM8150 will need it and when
560 	 * WB support is added to those targets will need to add
561 	 * the legacy teardown sequence as well.
562 	 */
563 	if (hw_ctl->caps->features & BIT(DPU_CTL_ACTIVE_CFG))
564 		dpu_encoder_helper_phys_cleanup(phys_enc);
565 
566 	phys_enc->enable_state = DPU_ENC_DISABLED;
567 }
568 
569 /**
570  * dpu_encoder_phys_wb_destroy - destroy writeback encoder
571  * @phys_enc:	Pointer to physical encoder
572  */
573 static void dpu_encoder_phys_wb_destroy(struct dpu_encoder_phys *phys_enc)
574 {
575 	if (!phys_enc)
576 		return;
577 
578 	DPU_DEBUG("[wb:%d]\n", phys_enc->wb_idx - WB_0);
579 
580 	kfree(phys_enc);
581 }
582 
583 static void dpu_encoder_phys_wb_prepare_wb_job(struct dpu_encoder_phys *phys_enc,
584 		struct drm_writeback_job *job)
585 {
586 	const struct msm_format *format;
587 	struct msm_gem_address_space *aspace;
588 	struct dpu_hw_wb_cfg *wb_cfg;
589 	int ret;
590 	struct dpu_encoder_phys_wb *wb_enc = to_dpu_encoder_phys_wb(phys_enc);
591 
592 	if (!job->fb)
593 		return;
594 
595 	wb_enc->wb_job = job;
596 	wb_enc->wb_conn = job->connector;
597 	aspace = phys_enc->dpu_kms->base.aspace;
598 
599 	wb_cfg = &wb_enc->wb_cfg;
600 
601 	memset(wb_cfg, 0, sizeof(struct dpu_hw_wb_cfg));
602 
603 	ret = msm_framebuffer_prepare(job->fb, aspace, false);
604 	if (ret) {
605 		DPU_ERROR("prep fb failed, %d\n", ret);
606 		return;
607 	}
608 
609 	format = msm_framebuffer_format(job->fb);
610 
611 	wb_cfg->dest.format = dpu_get_dpu_format_ext(
612 			format->pixel_format, job->fb->modifier);
613 	if (!wb_cfg->dest.format) {
614 		/* this error should be detected during atomic_check */
615 		DPU_ERROR("failed to get format %x\n", format->pixel_format);
616 		return;
617 	}
618 
619 	ret = dpu_format_populate_layout(aspace, job->fb, &wb_cfg->dest);
620 	if (ret) {
621 		DPU_DEBUG("failed to populate layout %d\n", ret);
622 		return;
623 	}
624 
625 	wb_cfg->dest.width = job->fb->width;
626 	wb_cfg->dest.height = job->fb->height;
627 	wb_cfg->dest.num_planes = wb_cfg->dest.format->num_planes;
628 
629 	if ((wb_cfg->dest.format->fetch_planes == DPU_PLANE_PLANAR) &&
630 			(wb_cfg->dest.format->element[0] == C1_B_Cb))
631 		swap(wb_cfg->dest.plane_addr[1], wb_cfg->dest.plane_addr[2]);
632 
633 	DPU_DEBUG("[fb_offset:%8.8x,%8.8x,%8.8x,%8.8x]\n",
634 			wb_cfg->dest.plane_addr[0], wb_cfg->dest.plane_addr[1],
635 			wb_cfg->dest.plane_addr[2], wb_cfg->dest.plane_addr[3]);
636 
637 	DPU_DEBUG("[fb_stride:%8.8x,%8.8x,%8.8x,%8.8x]\n",
638 			wb_cfg->dest.plane_pitch[0], wb_cfg->dest.plane_pitch[1],
639 			wb_cfg->dest.plane_pitch[2], wb_cfg->dest.plane_pitch[3]);
640 }
641 
642 static void dpu_encoder_phys_wb_cleanup_wb_job(struct dpu_encoder_phys *phys_enc,
643 		struct drm_writeback_job *job)
644 {
645 	struct dpu_encoder_phys_wb *wb_enc = to_dpu_encoder_phys_wb(phys_enc);
646 	struct msm_gem_address_space *aspace;
647 
648 	if (!job->fb)
649 		return;
650 
651 	aspace = phys_enc->dpu_kms->base.aspace;
652 
653 	msm_framebuffer_cleanup(job->fb, aspace, false);
654 	wb_enc->wb_job = NULL;
655 	wb_enc->wb_conn = NULL;
656 }
657 
658 static bool dpu_encoder_phys_wb_is_valid_for_commit(struct dpu_encoder_phys *phys_enc)
659 {
660 	struct dpu_encoder_phys_wb *wb_enc = to_dpu_encoder_phys_wb(phys_enc);
661 
662 	if (wb_enc->wb_job)
663 		return true;
664 	else
665 		return false;
666 }
667 
668 /**
669  * dpu_encoder_phys_wb_init_ops - initialize writeback operations
670  * @ops:	Pointer to encoder operation table
671  */
672 static void dpu_encoder_phys_wb_init_ops(struct dpu_encoder_phys_ops *ops)
673 {
674 	ops->is_master = dpu_encoder_phys_wb_is_master;
675 	ops->atomic_mode_set = dpu_encoder_phys_wb_atomic_mode_set;
676 	ops->enable = dpu_encoder_phys_wb_enable;
677 	ops->disable = dpu_encoder_phys_wb_disable;
678 	ops->destroy = dpu_encoder_phys_wb_destroy;
679 	ops->atomic_check = dpu_encoder_phys_wb_atomic_check;
680 	ops->wait_for_commit_done = dpu_encoder_phys_wb_wait_for_commit_done;
681 	ops->prepare_for_kickoff = dpu_encoder_phys_wb_prepare_for_kickoff;
682 	ops->handle_post_kickoff = dpu_encoder_phys_wb_handle_post_kickoff;
683 	ops->needs_single_flush = dpu_encoder_phys_wb_needs_single_flush;
684 	ops->trigger_start = dpu_encoder_helper_trigger_start;
685 	ops->prepare_wb_job = dpu_encoder_phys_wb_prepare_wb_job;
686 	ops->cleanup_wb_job = dpu_encoder_phys_wb_cleanup_wb_job;
687 	ops->irq_control = dpu_encoder_phys_wb_irq_ctrl;
688 	ops->is_valid_for_commit = dpu_encoder_phys_wb_is_valid_for_commit;
689 
690 }
691 
692 /**
693  * dpu_encoder_phys_wb_init - initialize writeback encoder
694  * @p:	Pointer to init info structure with initialization params
695  */
696 struct dpu_encoder_phys *dpu_encoder_phys_wb_init(
697 		struct dpu_enc_phys_init_params *p)
698 {
699 	struct dpu_encoder_phys *phys_enc = NULL;
700 	struct dpu_encoder_phys_wb *wb_enc = NULL;
701 	int ret = 0;
702 	int i;
703 
704 	DPU_DEBUG("\n");
705 
706 	if (!p || !p->parent) {
707 		DPU_ERROR("invalid params\n");
708 		ret = -EINVAL;
709 		goto fail_alloc;
710 	}
711 
712 	wb_enc = kzalloc(sizeof(*wb_enc), GFP_KERNEL);
713 	if (!wb_enc) {
714 		DPU_ERROR("failed to allocate wb phys_enc enc\n");
715 		ret = -ENOMEM;
716 		goto fail_alloc;
717 	}
718 
719 	phys_enc = &wb_enc->base;
720 	phys_enc->hw_mdptop = p->dpu_kms->hw_mdp;
721 	phys_enc->wb_idx = p->wb_idx;
722 
723 	dpu_encoder_phys_wb_init_ops(&phys_enc->ops);
724 	phys_enc->parent = p->parent;
725 	phys_enc->parent_ops = p->parent_ops;
726 	phys_enc->dpu_kms = p->dpu_kms;
727 	phys_enc->split_role = p->split_role;
728 	phys_enc->intf_mode = INTF_MODE_WB_LINE;
729 	phys_enc->wb_idx = p->wb_idx;
730 	phys_enc->enc_spinlock = p->enc_spinlock;
731 
732 	atomic_set(&wb_enc->wbirq_refcount, 0);
733 
734 	for (i = 0; i < ARRAY_SIZE(phys_enc->irq); i++)
735 		phys_enc->irq[i] = -EINVAL;
736 
737 	atomic_set(&phys_enc->pending_kickoff_cnt, 0);
738 	atomic_set(&phys_enc->vblank_refcount, 0);
739 	wb_enc->wb_done_timeout_cnt = 0;
740 
741 	init_waitqueue_head(&phys_enc->pending_kickoff_wq);
742 	phys_enc->enable_state = DPU_ENC_DISABLED;
743 
744 	DPU_DEBUG("Created dpu_encoder_phys for wb %d\n",
745 			phys_enc->wb_idx);
746 
747 	return phys_enc;
748 
749 fail_alloc:
750 	return ERR_PTR(ret);
751 }
752