1 /*
2  * Copyright © 2018 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21  * DEALINGS IN THE SOFTWARE.
22  *
23  * Authors:
24  *   Madhav Chauhan <madhav.chauhan@intel.com>
25  *   Jani Nikula <jani.nikula@intel.com>
26  */
27 
28 #include <drm/display/drm_dsc_helper.h>
29 #include <drm/drm_atomic_helper.h>
30 #include <drm/drm_mipi_dsi.h>
31 
32 #include "i915_reg.h"
33 #include "icl_dsi.h"
34 #include "icl_dsi_regs.h"
35 #include "intel_atomic.h"
36 #include "intel_backlight.h"
37 #include "intel_backlight_regs.h"
38 #include "intel_combo_phy.h"
39 #include "intel_combo_phy_regs.h"
40 #include "intel_connector.h"
41 #include "intel_crtc.h"
42 #include "intel_ddi.h"
43 #include "intel_de.h"
44 #include "intel_dsi.h"
45 #include "intel_dsi_vbt.h"
46 #include "intel_panel.h"
47 #include "intel_vdsc.h"
48 #include "skl_scaler.h"
49 #include "skl_universal_plane.h"
50 
51 static int header_credits_available(struct drm_i915_private *dev_priv,
52 				    enum transcoder dsi_trans)
53 {
54 	return (intel_de_read(dev_priv, DSI_CMD_TXCTL(dsi_trans)) & FREE_HEADER_CREDIT_MASK)
55 		>> FREE_HEADER_CREDIT_SHIFT;
56 }
57 
58 static int payload_credits_available(struct drm_i915_private *dev_priv,
59 				     enum transcoder dsi_trans)
60 {
61 	return (intel_de_read(dev_priv, DSI_CMD_TXCTL(dsi_trans)) & FREE_PLOAD_CREDIT_MASK)
62 		>> FREE_PLOAD_CREDIT_SHIFT;
63 }
64 
65 static bool wait_for_header_credits(struct drm_i915_private *dev_priv,
66 				    enum transcoder dsi_trans, int hdr_credit)
67 {
68 	if (wait_for_us(header_credits_available(dev_priv, dsi_trans) >=
69 			hdr_credit, 100)) {
70 		drm_err(&dev_priv->drm, "DSI header credits not released\n");
71 		return false;
72 	}
73 
74 	return true;
75 }
76 
77 static bool wait_for_payload_credits(struct drm_i915_private *dev_priv,
78 				     enum transcoder dsi_trans, int payld_credit)
79 {
80 	if (wait_for_us(payload_credits_available(dev_priv, dsi_trans) >=
81 			payld_credit, 100)) {
82 		drm_err(&dev_priv->drm, "DSI payload credits not released\n");
83 		return false;
84 	}
85 
86 	return true;
87 }
88 
89 static enum transcoder dsi_port_to_transcoder(enum port port)
90 {
91 	if (port == PORT_A)
92 		return TRANSCODER_DSI_0;
93 	else
94 		return TRANSCODER_DSI_1;
95 }
96 
97 static void wait_for_cmds_dispatched_to_panel(struct intel_encoder *encoder)
98 {
99 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
100 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
101 	struct mipi_dsi_device *dsi;
102 	enum port port;
103 	enum transcoder dsi_trans;
104 	int ret;
105 
106 	/* wait for header/payload credits to be released */
107 	for_each_dsi_port(port, intel_dsi->ports) {
108 		dsi_trans = dsi_port_to_transcoder(port);
109 		wait_for_header_credits(dev_priv, dsi_trans, MAX_HEADER_CREDIT);
110 		wait_for_payload_credits(dev_priv, dsi_trans, MAX_PLOAD_CREDIT);
111 	}
112 
113 	/* send nop DCS command */
114 	for_each_dsi_port(port, intel_dsi->ports) {
115 		dsi = intel_dsi->dsi_hosts[port]->device;
116 		dsi->mode_flags |= MIPI_DSI_MODE_LPM;
117 		dsi->channel = 0;
118 		ret = mipi_dsi_dcs_nop(dsi);
119 		if (ret < 0)
120 			drm_err(&dev_priv->drm,
121 				"error sending DCS NOP command\n");
122 	}
123 
124 	/* wait for header credits to be released */
125 	for_each_dsi_port(port, intel_dsi->ports) {
126 		dsi_trans = dsi_port_to_transcoder(port);
127 		wait_for_header_credits(dev_priv, dsi_trans, MAX_HEADER_CREDIT);
128 	}
129 
130 	/* wait for LP TX in progress bit to be cleared */
131 	for_each_dsi_port(port, intel_dsi->ports) {
132 		dsi_trans = dsi_port_to_transcoder(port);
133 		if (wait_for_us(!(intel_de_read(dev_priv, DSI_LP_MSG(dsi_trans)) &
134 				  LPTX_IN_PROGRESS), 20))
135 			drm_err(&dev_priv->drm, "LPTX bit not cleared\n");
136 	}
137 }
138 
139 static int dsi_send_pkt_payld(struct intel_dsi_host *host,
140 			      const struct mipi_dsi_packet *packet)
141 {
142 	struct intel_dsi *intel_dsi = host->intel_dsi;
143 	struct drm_i915_private *i915 = to_i915(intel_dsi->base.base.dev);
144 	enum transcoder dsi_trans = dsi_port_to_transcoder(host->port);
145 	const u8 *data = packet->payload;
146 	u32 len = packet->payload_length;
147 	int i, j;
148 
149 	/* payload queue can accept *256 bytes*, check limit */
150 	if (len > MAX_PLOAD_CREDIT * 4) {
151 		drm_err(&i915->drm, "payload size exceeds max queue limit\n");
152 		return -EINVAL;
153 	}
154 
155 	for (i = 0; i < len; i += 4) {
156 		u32 tmp = 0;
157 
158 		if (!wait_for_payload_credits(i915, dsi_trans, 1))
159 			return -EBUSY;
160 
161 		for (j = 0; j < min_t(u32, len - i, 4); j++)
162 			tmp |= *data++ << 8 * j;
163 
164 		intel_de_write(i915, DSI_CMD_TXPYLD(dsi_trans), tmp);
165 	}
166 
167 	return 0;
168 }
169 
170 static int dsi_send_pkt_hdr(struct intel_dsi_host *host,
171 			    const struct mipi_dsi_packet *packet,
172 			    bool enable_lpdt)
173 {
174 	struct intel_dsi *intel_dsi = host->intel_dsi;
175 	struct drm_i915_private *dev_priv = to_i915(intel_dsi->base.base.dev);
176 	enum transcoder dsi_trans = dsi_port_to_transcoder(host->port);
177 	u32 tmp;
178 
179 	if (!wait_for_header_credits(dev_priv, dsi_trans, 1))
180 		return -EBUSY;
181 
182 	tmp = intel_de_read(dev_priv, DSI_CMD_TXHDR(dsi_trans));
183 
184 	if (packet->payload)
185 		tmp |= PAYLOAD_PRESENT;
186 	else
187 		tmp &= ~PAYLOAD_PRESENT;
188 
189 	tmp &= ~VBLANK_FENCE;
190 
191 	if (enable_lpdt)
192 		tmp |= LP_DATA_TRANSFER;
193 	else
194 		tmp &= ~LP_DATA_TRANSFER;
195 
196 	tmp &= ~(PARAM_WC_MASK | VC_MASK | DT_MASK);
197 	tmp |= ((packet->header[0] & VC_MASK) << VC_SHIFT);
198 	tmp |= ((packet->header[0] & DT_MASK) << DT_SHIFT);
199 	tmp |= (packet->header[1] << PARAM_WC_LOWER_SHIFT);
200 	tmp |= (packet->header[2] << PARAM_WC_UPPER_SHIFT);
201 	intel_de_write(dev_priv, DSI_CMD_TXHDR(dsi_trans), tmp);
202 
203 	return 0;
204 }
205 
206 void icl_dsi_frame_update(struct intel_crtc_state *crtc_state)
207 {
208 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
209 	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
210 	u32 tmp, mode_flags;
211 	enum port port;
212 
213 	mode_flags = crtc_state->mode_flags;
214 
215 	/*
216 	 * case 1 also covers dual link
217 	 * In case of dual link, frame update should be set on
218 	 * DSI_0
219 	 */
220 	if (mode_flags & I915_MODE_FLAG_DSI_USE_TE0)
221 		port = PORT_A;
222 	else if (mode_flags & I915_MODE_FLAG_DSI_USE_TE1)
223 		port = PORT_B;
224 	else
225 		return;
226 
227 	tmp = intel_de_read(dev_priv, DSI_CMD_FRMCTL(port));
228 	tmp |= DSI_FRAME_UPDATE_REQUEST;
229 	intel_de_write(dev_priv, DSI_CMD_FRMCTL(port), tmp);
230 }
231 
232 static void dsi_program_swing_and_deemphasis(struct intel_encoder *encoder)
233 {
234 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
235 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
236 	enum phy phy;
237 	u32 tmp;
238 	int lane;
239 
240 	for_each_dsi_phy(phy, intel_dsi->phys) {
241 		/*
242 		 * Program voltage swing and pre-emphasis level values as per
243 		 * table in BSPEC under DDI buffer programing
244 		 */
245 		tmp = intel_de_read(dev_priv, ICL_PORT_TX_DW5_LN(0, phy));
246 		tmp &= ~(SCALING_MODE_SEL_MASK | RTERM_SELECT_MASK);
247 		tmp |= SCALING_MODE_SEL(0x2);
248 		tmp |= TAP2_DISABLE | TAP3_DISABLE;
249 		tmp |= RTERM_SELECT(0x6);
250 		intel_de_write(dev_priv, ICL_PORT_TX_DW5_GRP(phy), tmp);
251 
252 		tmp = intel_de_read(dev_priv, ICL_PORT_TX_DW5_AUX(phy));
253 		tmp &= ~(SCALING_MODE_SEL_MASK | RTERM_SELECT_MASK);
254 		tmp |= SCALING_MODE_SEL(0x2);
255 		tmp |= TAP2_DISABLE | TAP3_DISABLE;
256 		tmp |= RTERM_SELECT(0x6);
257 		intel_de_write(dev_priv, ICL_PORT_TX_DW5_AUX(phy), tmp);
258 
259 		tmp = intel_de_read(dev_priv, ICL_PORT_TX_DW2_LN(0, phy));
260 		tmp &= ~(SWING_SEL_LOWER_MASK | SWING_SEL_UPPER_MASK |
261 			 RCOMP_SCALAR_MASK);
262 		tmp |= SWING_SEL_UPPER(0x2);
263 		tmp |= SWING_SEL_LOWER(0x2);
264 		tmp |= RCOMP_SCALAR(0x98);
265 		intel_de_write(dev_priv, ICL_PORT_TX_DW2_GRP(phy), tmp);
266 
267 		tmp = intel_de_read(dev_priv, ICL_PORT_TX_DW2_AUX(phy));
268 		tmp &= ~(SWING_SEL_LOWER_MASK | SWING_SEL_UPPER_MASK |
269 			 RCOMP_SCALAR_MASK);
270 		tmp |= SWING_SEL_UPPER(0x2);
271 		tmp |= SWING_SEL_LOWER(0x2);
272 		tmp |= RCOMP_SCALAR(0x98);
273 		intel_de_write(dev_priv, ICL_PORT_TX_DW2_AUX(phy), tmp);
274 
275 		tmp = intel_de_read(dev_priv, ICL_PORT_TX_DW4_AUX(phy));
276 		tmp &= ~(POST_CURSOR_1_MASK | POST_CURSOR_2_MASK |
277 			 CURSOR_COEFF_MASK);
278 		tmp |= POST_CURSOR_1(0x0);
279 		tmp |= POST_CURSOR_2(0x0);
280 		tmp |= CURSOR_COEFF(0x3f);
281 		intel_de_write(dev_priv, ICL_PORT_TX_DW4_AUX(phy), tmp);
282 
283 		for (lane = 0; lane <= 3; lane++) {
284 			/* Bspec: must not use GRP register for write */
285 			tmp = intel_de_read(dev_priv,
286 					    ICL_PORT_TX_DW4_LN(lane, phy));
287 			tmp &= ~(POST_CURSOR_1_MASK | POST_CURSOR_2_MASK |
288 				 CURSOR_COEFF_MASK);
289 			tmp |= POST_CURSOR_1(0x0);
290 			tmp |= POST_CURSOR_2(0x0);
291 			tmp |= CURSOR_COEFF(0x3f);
292 			intel_de_write(dev_priv,
293 				       ICL_PORT_TX_DW4_LN(lane, phy), tmp);
294 		}
295 	}
296 }
297 
298 static void configure_dual_link_mode(struct intel_encoder *encoder,
299 				     const struct intel_crtc_state *pipe_config)
300 {
301 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
302 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
303 	i915_reg_t dss_ctl1_reg, dss_ctl2_reg;
304 	u32 dss_ctl1;
305 
306 	/* FIXME: Move all DSS handling to intel_vdsc.c */
307 	if (DISPLAY_VER(dev_priv) >= 12) {
308 		struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
309 
310 		dss_ctl1_reg = ICL_PIPE_DSS_CTL1(crtc->pipe);
311 		dss_ctl2_reg = ICL_PIPE_DSS_CTL2(crtc->pipe);
312 	} else {
313 		dss_ctl1_reg = DSS_CTL1;
314 		dss_ctl2_reg = DSS_CTL2;
315 	}
316 
317 	dss_ctl1 = intel_de_read(dev_priv, dss_ctl1_reg);
318 	dss_ctl1 |= SPLITTER_ENABLE;
319 	dss_ctl1 &= ~OVERLAP_PIXELS_MASK;
320 	dss_ctl1 |= OVERLAP_PIXELS(intel_dsi->pixel_overlap);
321 
322 	if (intel_dsi->dual_link == DSI_DUAL_LINK_FRONT_BACK) {
323 		const struct drm_display_mode *adjusted_mode =
324 					&pipe_config->hw.adjusted_mode;
325 		u32 dss_ctl2;
326 		u16 hactive = adjusted_mode->crtc_hdisplay;
327 		u16 dl_buffer_depth;
328 
329 		dss_ctl1 &= ~DUAL_LINK_MODE_INTERLEAVE;
330 		dl_buffer_depth = hactive / 2 + intel_dsi->pixel_overlap;
331 
332 		if (dl_buffer_depth > MAX_DL_BUFFER_TARGET_DEPTH)
333 			drm_err(&dev_priv->drm,
334 				"DL buffer depth exceed max value\n");
335 
336 		dss_ctl1 &= ~LEFT_DL_BUF_TARGET_DEPTH_MASK;
337 		dss_ctl1 |= LEFT_DL_BUF_TARGET_DEPTH(dl_buffer_depth);
338 		dss_ctl2 = intel_de_read(dev_priv, dss_ctl2_reg);
339 		dss_ctl2 &= ~RIGHT_DL_BUF_TARGET_DEPTH_MASK;
340 		dss_ctl2 |= RIGHT_DL_BUF_TARGET_DEPTH(dl_buffer_depth);
341 		intel_de_write(dev_priv, dss_ctl2_reg, dss_ctl2);
342 	} else {
343 		/* Interleave */
344 		dss_ctl1 |= DUAL_LINK_MODE_INTERLEAVE;
345 	}
346 
347 	intel_de_write(dev_priv, dss_ctl1_reg, dss_ctl1);
348 }
349 
350 /* aka DSI 8X clock */
351 static int afe_clk(struct intel_encoder *encoder,
352 		   const struct intel_crtc_state *crtc_state)
353 {
354 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
355 	int bpp;
356 
357 	if (crtc_state->dsc.compression_enable)
358 		bpp = crtc_state->dsc.compressed_bpp;
359 	else
360 		bpp = mipi_dsi_pixel_format_to_bpp(intel_dsi->pixel_format);
361 
362 	return DIV_ROUND_CLOSEST(intel_dsi->pclk * bpp, intel_dsi->lane_count);
363 }
364 
365 static void gen11_dsi_program_esc_clk_div(struct intel_encoder *encoder,
366 					  const struct intel_crtc_state *crtc_state)
367 {
368 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
369 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
370 	enum port port;
371 	int afe_clk_khz;
372 	int theo_word_clk, act_word_clk;
373 	u32 esc_clk_div_m, esc_clk_div_m_phy;
374 
375 	afe_clk_khz = afe_clk(encoder, crtc_state);
376 
377 	if (IS_ALDERLAKE_S(dev_priv) || IS_ALDERLAKE_P(dev_priv)) {
378 		theo_word_clk = DIV_ROUND_UP(afe_clk_khz, 8 * DSI_MAX_ESC_CLK);
379 		act_word_clk = max(3, theo_word_clk + (theo_word_clk + 1) % 2);
380 		esc_clk_div_m = act_word_clk * 8;
381 		esc_clk_div_m_phy = (act_word_clk - 1) / 2;
382 	} else {
383 		esc_clk_div_m = DIV_ROUND_UP(afe_clk_khz, DSI_MAX_ESC_CLK);
384 	}
385 
386 	for_each_dsi_port(port, intel_dsi->ports) {
387 		intel_de_write(dev_priv, ICL_DSI_ESC_CLK_DIV(port),
388 			       esc_clk_div_m & ICL_ESC_CLK_DIV_MASK);
389 		intel_de_posting_read(dev_priv, ICL_DSI_ESC_CLK_DIV(port));
390 	}
391 
392 	for_each_dsi_port(port, intel_dsi->ports) {
393 		intel_de_write(dev_priv, ICL_DPHY_ESC_CLK_DIV(port),
394 			       esc_clk_div_m & ICL_ESC_CLK_DIV_MASK);
395 		intel_de_posting_read(dev_priv, ICL_DPHY_ESC_CLK_DIV(port));
396 	}
397 
398 	if (IS_ALDERLAKE_S(dev_priv) || IS_ALDERLAKE_P(dev_priv)) {
399 		for_each_dsi_port(port, intel_dsi->ports) {
400 			intel_de_write(dev_priv, ADL_MIPIO_DW(port, 8),
401 				       esc_clk_div_m_phy & TX_ESC_CLK_DIV_PHY);
402 			intel_de_posting_read(dev_priv, ADL_MIPIO_DW(port, 8));
403 		}
404 	}
405 }
406 
407 static void get_dsi_io_power_domains(struct drm_i915_private *dev_priv,
408 				     struct intel_dsi *intel_dsi)
409 {
410 	enum port port;
411 
412 	for_each_dsi_port(port, intel_dsi->ports) {
413 		drm_WARN_ON(&dev_priv->drm, intel_dsi->io_wakeref[port]);
414 		intel_dsi->io_wakeref[port] =
415 			intel_display_power_get(dev_priv,
416 						port == PORT_A ?
417 						POWER_DOMAIN_PORT_DDI_IO_A :
418 						POWER_DOMAIN_PORT_DDI_IO_B);
419 	}
420 }
421 
422 static void gen11_dsi_enable_io_power(struct intel_encoder *encoder)
423 {
424 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
425 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
426 	enum port port;
427 	u32 tmp;
428 
429 	for_each_dsi_port(port, intel_dsi->ports) {
430 		tmp = intel_de_read(dev_priv, ICL_DSI_IO_MODECTL(port));
431 		tmp |= COMBO_PHY_MODE_DSI;
432 		intel_de_write(dev_priv, ICL_DSI_IO_MODECTL(port), tmp);
433 	}
434 
435 	get_dsi_io_power_domains(dev_priv, intel_dsi);
436 }
437 
438 static void gen11_dsi_power_up_lanes(struct intel_encoder *encoder)
439 {
440 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
441 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
442 	enum phy phy;
443 
444 	for_each_dsi_phy(phy, intel_dsi->phys)
445 		intel_combo_phy_power_up_lanes(dev_priv, phy, true,
446 					       intel_dsi->lane_count, false);
447 }
448 
449 static void gen11_dsi_config_phy_lanes_sequence(struct intel_encoder *encoder)
450 {
451 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
452 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
453 	enum phy phy;
454 	u32 tmp;
455 	int lane;
456 
457 	/* Step 4b(i) set loadgen select for transmit and aux lanes */
458 	for_each_dsi_phy(phy, intel_dsi->phys) {
459 		tmp = intel_de_read(dev_priv, ICL_PORT_TX_DW4_AUX(phy));
460 		tmp &= ~LOADGEN_SELECT;
461 		intel_de_write(dev_priv, ICL_PORT_TX_DW4_AUX(phy), tmp);
462 		for (lane = 0; lane <= 3; lane++) {
463 			tmp = intel_de_read(dev_priv,
464 					    ICL_PORT_TX_DW4_LN(lane, phy));
465 			tmp &= ~LOADGEN_SELECT;
466 			if (lane != 2)
467 				tmp |= LOADGEN_SELECT;
468 			intel_de_write(dev_priv,
469 				       ICL_PORT_TX_DW4_LN(lane, phy), tmp);
470 		}
471 	}
472 
473 	/* Step 4b(ii) set latency optimization for transmit and aux lanes */
474 	for_each_dsi_phy(phy, intel_dsi->phys) {
475 		tmp = intel_de_read(dev_priv, ICL_PORT_TX_DW2_AUX(phy));
476 		tmp &= ~FRC_LATENCY_OPTIM_MASK;
477 		tmp |= FRC_LATENCY_OPTIM_VAL(0x5);
478 		intel_de_write(dev_priv, ICL_PORT_TX_DW2_AUX(phy), tmp);
479 		tmp = intel_de_read(dev_priv, ICL_PORT_TX_DW2_LN(0, phy));
480 		tmp &= ~FRC_LATENCY_OPTIM_MASK;
481 		tmp |= FRC_LATENCY_OPTIM_VAL(0x5);
482 		intel_de_write(dev_priv, ICL_PORT_TX_DW2_GRP(phy), tmp);
483 
484 		/* For EHL, TGL, set latency optimization for PCS_DW1 lanes */
485 		if (IS_JSL_EHL(dev_priv) || (DISPLAY_VER(dev_priv) >= 12)) {
486 			tmp = intel_de_read(dev_priv,
487 					    ICL_PORT_PCS_DW1_AUX(phy));
488 			tmp &= ~LATENCY_OPTIM_MASK;
489 			tmp |= LATENCY_OPTIM_VAL(0);
490 			intel_de_write(dev_priv, ICL_PORT_PCS_DW1_AUX(phy),
491 				       tmp);
492 
493 			tmp = intel_de_read(dev_priv,
494 					    ICL_PORT_PCS_DW1_LN(0, phy));
495 			tmp &= ~LATENCY_OPTIM_MASK;
496 			tmp |= LATENCY_OPTIM_VAL(0x1);
497 			intel_de_write(dev_priv, ICL_PORT_PCS_DW1_GRP(phy),
498 				       tmp);
499 		}
500 	}
501 
502 }
503 
504 static void gen11_dsi_voltage_swing_program_seq(struct intel_encoder *encoder)
505 {
506 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
507 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
508 	u32 tmp;
509 	enum phy phy;
510 
511 	/* clear common keeper enable bit */
512 	for_each_dsi_phy(phy, intel_dsi->phys) {
513 		tmp = intel_de_read(dev_priv, ICL_PORT_PCS_DW1_LN(0, phy));
514 		tmp &= ~COMMON_KEEPER_EN;
515 		intel_de_write(dev_priv, ICL_PORT_PCS_DW1_GRP(phy), tmp);
516 		tmp = intel_de_read(dev_priv, ICL_PORT_PCS_DW1_AUX(phy));
517 		tmp &= ~COMMON_KEEPER_EN;
518 		intel_de_write(dev_priv, ICL_PORT_PCS_DW1_AUX(phy), tmp);
519 	}
520 
521 	/*
522 	 * Set SUS Clock Config bitfield to 11b
523 	 * Note: loadgen select program is done
524 	 * as part of lane phy sequence configuration
525 	 */
526 	for_each_dsi_phy(phy, intel_dsi->phys) {
527 		tmp = intel_de_read(dev_priv, ICL_PORT_CL_DW5(phy));
528 		tmp |= SUS_CLOCK_CONFIG;
529 		intel_de_write(dev_priv, ICL_PORT_CL_DW5(phy), tmp);
530 	}
531 
532 	/* Clear training enable to change swing values */
533 	for_each_dsi_phy(phy, intel_dsi->phys) {
534 		tmp = intel_de_read(dev_priv, ICL_PORT_TX_DW5_LN(0, phy));
535 		tmp &= ~TX_TRAINING_EN;
536 		intel_de_write(dev_priv, ICL_PORT_TX_DW5_GRP(phy), tmp);
537 		tmp = intel_de_read(dev_priv, ICL_PORT_TX_DW5_AUX(phy));
538 		tmp &= ~TX_TRAINING_EN;
539 		intel_de_write(dev_priv, ICL_PORT_TX_DW5_AUX(phy), tmp);
540 	}
541 
542 	/* Program swing and de-emphasis */
543 	dsi_program_swing_and_deemphasis(encoder);
544 
545 	/* Set training enable to trigger update */
546 	for_each_dsi_phy(phy, intel_dsi->phys) {
547 		tmp = intel_de_read(dev_priv, ICL_PORT_TX_DW5_LN(0, phy));
548 		tmp |= TX_TRAINING_EN;
549 		intel_de_write(dev_priv, ICL_PORT_TX_DW5_GRP(phy), tmp);
550 		tmp = intel_de_read(dev_priv, ICL_PORT_TX_DW5_AUX(phy));
551 		tmp |= TX_TRAINING_EN;
552 		intel_de_write(dev_priv, ICL_PORT_TX_DW5_AUX(phy), tmp);
553 	}
554 }
555 
556 static void gen11_dsi_enable_ddi_buffer(struct intel_encoder *encoder)
557 {
558 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
559 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
560 	u32 tmp;
561 	enum port port;
562 
563 	for_each_dsi_port(port, intel_dsi->ports) {
564 		tmp = intel_de_read(dev_priv, DDI_BUF_CTL(port));
565 		tmp |= DDI_BUF_CTL_ENABLE;
566 		intel_de_write(dev_priv, DDI_BUF_CTL(port), tmp);
567 
568 		if (wait_for_us(!(intel_de_read(dev_priv, DDI_BUF_CTL(port)) &
569 				  DDI_BUF_IS_IDLE),
570 				  500))
571 			drm_err(&dev_priv->drm, "DDI port:%c buffer idle\n",
572 				port_name(port));
573 	}
574 }
575 
576 static void
577 gen11_dsi_setup_dphy_timings(struct intel_encoder *encoder,
578 			     const struct intel_crtc_state *crtc_state)
579 {
580 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
581 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
582 	u32 tmp;
583 	enum port port;
584 	enum phy phy;
585 
586 	/* Program T-INIT master registers */
587 	for_each_dsi_port(port, intel_dsi->ports) {
588 		tmp = intel_de_read(dev_priv, ICL_DSI_T_INIT_MASTER(port));
589 		tmp &= ~DSI_T_INIT_MASTER_MASK;
590 		tmp |= intel_dsi->init_count;
591 		intel_de_write(dev_priv, ICL_DSI_T_INIT_MASTER(port), tmp);
592 	}
593 
594 	/* Program DPHY clock lanes timings */
595 	for_each_dsi_port(port, intel_dsi->ports) {
596 		intel_de_write(dev_priv, DPHY_CLK_TIMING_PARAM(port),
597 			       intel_dsi->dphy_reg);
598 
599 		/* shadow register inside display core */
600 		intel_de_write(dev_priv, DSI_CLK_TIMING_PARAM(port),
601 			       intel_dsi->dphy_reg);
602 	}
603 
604 	/* Program DPHY data lanes timings */
605 	for_each_dsi_port(port, intel_dsi->ports) {
606 		intel_de_write(dev_priv, DPHY_DATA_TIMING_PARAM(port),
607 			       intel_dsi->dphy_data_lane_reg);
608 
609 		/* shadow register inside display core */
610 		intel_de_write(dev_priv, DSI_DATA_TIMING_PARAM(port),
611 			       intel_dsi->dphy_data_lane_reg);
612 	}
613 
614 	/*
615 	 * If DSI link operating at or below an 800 MHz,
616 	 * TA_SURE should be override and programmed to
617 	 * a value '0' inside TA_PARAM_REGISTERS otherwise
618 	 * leave all fields at HW default values.
619 	 */
620 	if (DISPLAY_VER(dev_priv) == 11) {
621 		if (afe_clk(encoder, crtc_state) <= 800000) {
622 			for_each_dsi_port(port, intel_dsi->ports) {
623 				tmp = intel_de_read(dev_priv,
624 						    DPHY_TA_TIMING_PARAM(port));
625 				tmp &= ~TA_SURE_MASK;
626 				tmp |= TA_SURE_OVERRIDE | TA_SURE(0);
627 				intel_de_write(dev_priv,
628 					       DPHY_TA_TIMING_PARAM(port),
629 					       tmp);
630 
631 				/* shadow register inside display core */
632 				tmp = intel_de_read(dev_priv,
633 						    DSI_TA_TIMING_PARAM(port));
634 				tmp &= ~TA_SURE_MASK;
635 				tmp |= TA_SURE_OVERRIDE | TA_SURE(0);
636 				intel_de_write(dev_priv,
637 					       DSI_TA_TIMING_PARAM(port), tmp);
638 			}
639 		}
640 	}
641 
642 	if (IS_JSL_EHL(dev_priv)) {
643 		for_each_dsi_phy(phy, intel_dsi->phys) {
644 			tmp = intel_de_read(dev_priv, ICL_DPHY_CHKN(phy));
645 			tmp |= ICL_DPHY_CHKN_AFE_OVER_PPI_STRAP;
646 			intel_de_write(dev_priv, ICL_DPHY_CHKN(phy), tmp);
647 		}
648 	}
649 }
650 
651 static void gen11_dsi_gate_clocks(struct intel_encoder *encoder)
652 {
653 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
654 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
655 	u32 tmp;
656 	enum phy phy;
657 
658 	mutex_lock(&dev_priv->display.dpll.lock);
659 	tmp = intel_de_read(dev_priv, ICL_DPCLKA_CFGCR0);
660 	for_each_dsi_phy(phy, intel_dsi->phys)
661 		tmp |= ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy);
662 
663 	intel_de_write(dev_priv, ICL_DPCLKA_CFGCR0, tmp);
664 	mutex_unlock(&dev_priv->display.dpll.lock);
665 }
666 
667 static void gen11_dsi_ungate_clocks(struct intel_encoder *encoder)
668 {
669 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
670 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
671 	u32 tmp;
672 	enum phy phy;
673 
674 	mutex_lock(&dev_priv->display.dpll.lock);
675 	tmp = intel_de_read(dev_priv, ICL_DPCLKA_CFGCR0);
676 	for_each_dsi_phy(phy, intel_dsi->phys)
677 		tmp &= ~ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy);
678 
679 	intel_de_write(dev_priv, ICL_DPCLKA_CFGCR0, tmp);
680 	mutex_unlock(&dev_priv->display.dpll.lock);
681 }
682 
683 static bool gen11_dsi_is_clock_enabled(struct intel_encoder *encoder)
684 {
685 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
686 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
687 	bool clock_enabled = false;
688 	enum phy phy;
689 	u32 tmp;
690 
691 	tmp = intel_de_read(dev_priv, ICL_DPCLKA_CFGCR0);
692 
693 	for_each_dsi_phy(phy, intel_dsi->phys) {
694 		if (!(tmp & ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)))
695 			clock_enabled = true;
696 	}
697 
698 	return clock_enabled;
699 }
700 
701 static void gen11_dsi_map_pll(struct intel_encoder *encoder,
702 			      const struct intel_crtc_state *crtc_state)
703 {
704 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
705 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
706 	struct intel_shared_dpll *pll = crtc_state->shared_dpll;
707 	enum phy phy;
708 	u32 val;
709 
710 	mutex_lock(&dev_priv->display.dpll.lock);
711 
712 	val = intel_de_read(dev_priv, ICL_DPCLKA_CFGCR0);
713 	for_each_dsi_phy(phy, intel_dsi->phys) {
714 		val &= ~ICL_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy);
715 		val |= ICL_DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, phy);
716 	}
717 	intel_de_write(dev_priv, ICL_DPCLKA_CFGCR0, val);
718 
719 	for_each_dsi_phy(phy, intel_dsi->phys) {
720 		val &= ~ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy);
721 	}
722 	intel_de_write(dev_priv, ICL_DPCLKA_CFGCR0, val);
723 
724 	intel_de_posting_read(dev_priv, ICL_DPCLKA_CFGCR0);
725 
726 	mutex_unlock(&dev_priv->display.dpll.lock);
727 }
728 
729 static void
730 gen11_dsi_configure_transcoder(struct intel_encoder *encoder,
731 			       const struct intel_crtc_state *pipe_config)
732 {
733 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
734 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
735 	struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
736 	enum pipe pipe = crtc->pipe;
737 	u32 tmp;
738 	enum port port;
739 	enum transcoder dsi_trans;
740 
741 	for_each_dsi_port(port, intel_dsi->ports) {
742 		dsi_trans = dsi_port_to_transcoder(port);
743 		tmp = intel_de_read(dev_priv, DSI_TRANS_FUNC_CONF(dsi_trans));
744 
745 		if (intel_dsi->eotp_pkt)
746 			tmp &= ~EOTP_DISABLED;
747 		else
748 			tmp |= EOTP_DISABLED;
749 
750 		/* enable link calibration if freq > 1.5Gbps */
751 		if (afe_clk(encoder, pipe_config) >= 1500 * 1000) {
752 			tmp &= ~LINK_CALIBRATION_MASK;
753 			tmp |= CALIBRATION_ENABLED_INITIAL_ONLY;
754 		}
755 
756 		/* configure continuous clock */
757 		tmp &= ~CONTINUOUS_CLK_MASK;
758 		if (intel_dsi->clock_stop)
759 			tmp |= CLK_ENTER_LP_AFTER_DATA;
760 		else
761 			tmp |= CLK_HS_CONTINUOUS;
762 
763 		/* configure buffer threshold limit to minimum */
764 		tmp &= ~PIX_BUF_THRESHOLD_MASK;
765 		tmp |= PIX_BUF_THRESHOLD_1_4;
766 
767 		/* set virtual channel to '0' */
768 		tmp &= ~PIX_VIRT_CHAN_MASK;
769 		tmp |= PIX_VIRT_CHAN(0);
770 
771 		/* program BGR transmission */
772 		if (intel_dsi->bgr_enabled)
773 			tmp |= BGR_TRANSMISSION;
774 
775 		/* select pixel format */
776 		tmp &= ~PIX_FMT_MASK;
777 		if (pipe_config->dsc.compression_enable) {
778 			tmp |= PIX_FMT_COMPRESSED;
779 		} else {
780 			switch (intel_dsi->pixel_format) {
781 			default:
782 				MISSING_CASE(intel_dsi->pixel_format);
783 				fallthrough;
784 			case MIPI_DSI_FMT_RGB565:
785 				tmp |= PIX_FMT_RGB565;
786 				break;
787 			case MIPI_DSI_FMT_RGB666_PACKED:
788 				tmp |= PIX_FMT_RGB666_PACKED;
789 				break;
790 			case MIPI_DSI_FMT_RGB666:
791 				tmp |= PIX_FMT_RGB666_LOOSE;
792 				break;
793 			case MIPI_DSI_FMT_RGB888:
794 				tmp |= PIX_FMT_RGB888;
795 				break;
796 			}
797 		}
798 
799 		if (DISPLAY_VER(dev_priv) >= 12) {
800 			if (is_vid_mode(intel_dsi))
801 				tmp |= BLANKING_PACKET_ENABLE;
802 		}
803 
804 		/* program DSI operation mode */
805 		if (is_vid_mode(intel_dsi)) {
806 			tmp &= ~OP_MODE_MASK;
807 			switch (intel_dsi->video_mode) {
808 			default:
809 				MISSING_CASE(intel_dsi->video_mode);
810 				fallthrough;
811 			case NON_BURST_SYNC_EVENTS:
812 				tmp |= VIDEO_MODE_SYNC_EVENT;
813 				break;
814 			case NON_BURST_SYNC_PULSE:
815 				tmp |= VIDEO_MODE_SYNC_PULSE;
816 				break;
817 			}
818 		} else {
819 			/*
820 			 * FIXME: Retrieve this info from VBT.
821 			 * As per the spec when dsi transcoder is operating
822 			 * in TE GATE mode, TE comes from GPIO
823 			 * which is UTIL PIN for DSI 0.
824 			 * Also this GPIO would not be used for other
825 			 * purposes is an assumption.
826 			 */
827 			tmp &= ~OP_MODE_MASK;
828 			tmp |= CMD_MODE_TE_GATE;
829 			tmp |= TE_SOURCE_GPIO;
830 		}
831 
832 		intel_de_write(dev_priv, DSI_TRANS_FUNC_CONF(dsi_trans), tmp);
833 	}
834 
835 	/* enable port sync mode if dual link */
836 	if (intel_dsi->dual_link) {
837 		for_each_dsi_port(port, intel_dsi->ports) {
838 			dsi_trans = dsi_port_to_transcoder(port);
839 			tmp = intel_de_read(dev_priv,
840 					    TRANS_DDI_FUNC_CTL2(dsi_trans));
841 			tmp |= PORT_SYNC_MODE_ENABLE;
842 			intel_de_write(dev_priv,
843 				       TRANS_DDI_FUNC_CTL2(dsi_trans), tmp);
844 		}
845 
846 		/* configure stream splitting */
847 		configure_dual_link_mode(encoder, pipe_config);
848 	}
849 
850 	for_each_dsi_port(port, intel_dsi->ports) {
851 		dsi_trans = dsi_port_to_transcoder(port);
852 
853 		/* select data lane width */
854 		tmp = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(dsi_trans));
855 		tmp &= ~DDI_PORT_WIDTH_MASK;
856 		tmp |= DDI_PORT_WIDTH(intel_dsi->lane_count);
857 
858 		/* select input pipe */
859 		tmp &= ~TRANS_DDI_EDP_INPUT_MASK;
860 		switch (pipe) {
861 		default:
862 			MISSING_CASE(pipe);
863 			fallthrough;
864 		case PIPE_A:
865 			tmp |= TRANS_DDI_EDP_INPUT_A_ON;
866 			break;
867 		case PIPE_B:
868 			tmp |= TRANS_DDI_EDP_INPUT_B_ONOFF;
869 			break;
870 		case PIPE_C:
871 			tmp |= TRANS_DDI_EDP_INPUT_C_ONOFF;
872 			break;
873 		case PIPE_D:
874 			tmp |= TRANS_DDI_EDP_INPUT_D_ONOFF;
875 			break;
876 		}
877 
878 		/* enable DDI buffer */
879 		tmp |= TRANS_DDI_FUNC_ENABLE;
880 		intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(dsi_trans), tmp);
881 	}
882 
883 	/* wait for link ready */
884 	for_each_dsi_port(port, intel_dsi->ports) {
885 		dsi_trans = dsi_port_to_transcoder(port);
886 		if (wait_for_us((intel_de_read(dev_priv, DSI_TRANS_FUNC_CONF(dsi_trans)) &
887 				 LINK_READY), 2500))
888 			drm_err(&dev_priv->drm, "DSI link not ready\n");
889 	}
890 }
891 
892 static void
893 gen11_dsi_set_transcoder_timings(struct intel_encoder *encoder,
894 				 const struct intel_crtc_state *crtc_state)
895 {
896 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
897 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
898 	const struct drm_display_mode *adjusted_mode =
899 		&crtc_state->hw.adjusted_mode;
900 	enum port port;
901 	enum transcoder dsi_trans;
902 	/* horizontal timings */
903 	u16 htotal, hactive, hsync_start, hsync_end, hsync_size;
904 	u16 hback_porch;
905 	/* vertical timings */
906 	u16 vtotal, vactive, vsync_start, vsync_end, vsync_shift;
907 	int mul = 1, div = 1;
908 
909 	/*
910 	 * Adjust horizontal timings (htotal, hsync_start, hsync_end) to account
911 	 * for slower link speed if DSC is enabled.
912 	 *
913 	 * The compression frequency ratio is the ratio between compressed and
914 	 * non-compressed link speeds, and simplifies down to the ratio between
915 	 * compressed and non-compressed bpp.
916 	 */
917 	if (crtc_state->dsc.compression_enable) {
918 		mul = crtc_state->dsc.compressed_bpp;
919 		div = mipi_dsi_pixel_format_to_bpp(intel_dsi->pixel_format);
920 	}
921 
922 	hactive = adjusted_mode->crtc_hdisplay;
923 
924 	if (is_vid_mode(intel_dsi))
925 		htotal = DIV_ROUND_UP(adjusted_mode->crtc_htotal * mul, div);
926 	else
927 		htotal = DIV_ROUND_UP((hactive + 160) * mul, div);
928 
929 	hsync_start = DIV_ROUND_UP(adjusted_mode->crtc_hsync_start * mul, div);
930 	hsync_end = DIV_ROUND_UP(adjusted_mode->crtc_hsync_end * mul, div);
931 	hsync_size  = hsync_end - hsync_start;
932 	hback_porch = (adjusted_mode->crtc_htotal -
933 		       adjusted_mode->crtc_hsync_end);
934 	vactive = adjusted_mode->crtc_vdisplay;
935 
936 	if (is_vid_mode(intel_dsi)) {
937 		vtotal = adjusted_mode->crtc_vtotal;
938 	} else {
939 		int bpp, line_time_us, byte_clk_period_ns;
940 
941 		if (crtc_state->dsc.compression_enable)
942 			bpp = crtc_state->dsc.compressed_bpp;
943 		else
944 			bpp = mipi_dsi_pixel_format_to_bpp(intel_dsi->pixel_format);
945 
946 		byte_clk_period_ns = 1000000 / afe_clk(encoder, crtc_state);
947 		line_time_us = (htotal * (bpp / 8) * byte_clk_period_ns) / (1000 * intel_dsi->lane_count);
948 		vtotal = vactive + DIV_ROUND_UP(400, line_time_us);
949 	}
950 	vsync_start = adjusted_mode->crtc_vsync_start;
951 	vsync_end = adjusted_mode->crtc_vsync_end;
952 	vsync_shift = hsync_start - htotal / 2;
953 
954 	if (intel_dsi->dual_link) {
955 		hactive /= 2;
956 		if (intel_dsi->dual_link == DSI_DUAL_LINK_FRONT_BACK)
957 			hactive += intel_dsi->pixel_overlap;
958 		htotal /= 2;
959 	}
960 
961 	/* minimum hactive as per bspec: 256 pixels */
962 	if (adjusted_mode->crtc_hdisplay < 256)
963 		drm_err(&dev_priv->drm, "hactive is less then 256 pixels\n");
964 
965 	/* if RGB666 format, then hactive must be multiple of 4 pixels */
966 	if (intel_dsi->pixel_format == MIPI_DSI_FMT_RGB666 && hactive % 4 != 0)
967 		drm_err(&dev_priv->drm,
968 			"hactive pixels are not multiple of 4\n");
969 
970 	/* program TRANS_HTOTAL register */
971 	for_each_dsi_port(port, intel_dsi->ports) {
972 		dsi_trans = dsi_port_to_transcoder(port);
973 		intel_de_write(dev_priv, HTOTAL(dsi_trans),
974 			       (hactive - 1) | ((htotal - 1) << 16));
975 	}
976 
977 	/* TRANS_HSYNC register to be programmed only for video mode */
978 	if (is_vid_mode(intel_dsi)) {
979 		if (intel_dsi->video_mode == NON_BURST_SYNC_PULSE) {
980 			/* BSPEC: hsync size should be atleast 16 pixels */
981 			if (hsync_size < 16)
982 				drm_err(&dev_priv->drm,
983 					"hsync size < 16 pixels\n");
984 		}
985 
986 		if (hback_porch < 16)
987 			drm_err(&dev_priv->drm, "hback porch < 16 pixels\n");
988 
989 		if (intel_dsi->dual_link) {
990 			hsync_start /= 2;
991 			hsync_end /= 2;
992 		}
993 
994 		for_each_dsi_port(port, intel_dsi->ports) {
995 			dsi_trans = dsi_port_to_transcoder(port);
996 			intel_de_write(dev_priv, HSYNC(dsi_trans),
997 				       (hsync_start - 1) | ((hsync_end - 1) << 16));
998 		}
999 	}
1000 
1001 	/* program TRANS_VTOTAL register */
1002 	for_each_dsi_port(port, intel_dsi->ports) {
1003 		dsi_trans = dsi_port_to_transcoder(port);
1004 		/*
1005 		 * FIXME: Programing this by assuming progressive mode, since
1006 		 * non-interlaced info from VBT is not saved inside
1007 		 * struct drm_display_mode.
1008 		 * For interlace mode: program required pixel minus 2
1009 		 */
1010 		intel_de_write(dev_priv, VTOTAL(dsi_trans),
1011 			       (vactive - 1) | ((vtotal - 1) << 16));
1012 	}
1013 
1014 	if (vsync_end < vsync_start || vsync_end > vtotal)
1015 		drm_err(&dev_priv->drm, "Invalid vsync_end value\n");
1016 
1017 	if (vsync_start < vactive)
1018 		drm_err(&dev_priv->drm, "vsync_start less than vactive\n");
1019 
1020 	/* program TRANS_VSYNC register for video mode only */
1021 	if (is_vid_mode(intel_dsi)) {
1022 		for_each_dsi_port(port, intel_dsi->ports) {
1023 			dsi_trans = dsi_port_to_transcoder(port);
1024 			intel_de_write(dev_priv, VSYNC(dsi_trans),
1025 				       (vsync_start - 1) | ((vsync_end - 1) << 16));
1026 		}
1027 	}
1028 
1029 	/*
1030 	 * FIXME: It has to be programmed only for video modes and interlaced
1031 	 * modes. Put the check condition here once interlaced
1032 	 * info available as described above.
1033 	 * program TRANS_VSYNCSHIFT register
1034 	 */
1035 	if (is_vid_mode(intel_dsi)) {
1036 		for_each_dsi_port(port, intel_dsi->ports) {
1037 			dsi_trans = dsi_port_to_transcoder(port);
1038 			intel_de_write(dev_priv, VSYNCSHIFT(dsi_trans),
1039 				       vsync_shift);
1040 		}
1041 	}
1042 
1043 	/* program TRANS_VBLANK register, should be same as vtotal programmed */
1044 	if (DISPLAY_VER(dev_priv) >= 12) {
1045 		for_each_dsi_port(port, intel_dsi->ports) {
1046 			dsi_trans = dsi_port_to_transcoder(port);
1047 			intel_de_write(dev_priv, VBLANK(dsi_trans),
1048 				       (vactive - 1) | ((vtotal - 1) << 16));
1049 		}
1050 	}
1051 }
1052 
1053 static void gen11_dsi_enable_transcoder(struct intel_encoder *encoder)
1054 {
1055 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1056 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1057 	enum port port;
1058 	enum transcoder dsi_trans;
1059 	u32 tmp;
1060 
1061 	for_each_dsi_port(port, intel_dsi->ports) {
1062 		dsi_trans = dsi_port_to_transcoder(port);
1063 		tmp = intel_de_read(dev_priv, PIPECONF(dsi_trans));
1064 		tmp |= PIPECONF_ENABLE;
1065 		intel_de_write(dev_priv, PIPECONF(dsi_trans), tmp);
1066 
1067 		/* wait for transcoder to be enabled */
1068 		if (intel_de_wait_for_set(dev_priv, PIPECONF(dsi_trans),
1069 					  PIPECONF_STATE_ENABLE, 10))
1070 			drm_err(&dev_priv->drm,
1071 				"DSI transcoder not enabled\n");
1072 	}
1073 }
1074 
1075 static void gen11_dsi_setup_timeouts(struct intel_encoder *encoder,
1076 				     const struct intel_crtc_state *crtc_state)
1077 {
1078 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1079 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1080 	enum port port;
1081 	enum transcoder dsi_trans;
1082 	u32 tmp, hs_tx_timeout, lp_rx_timeout, ta_timeout, divisor, mul;
1083 
1084 	/*
1085 	 * escape clock count calculation:
1086 	 * BYTE_CLK_COUNT = TIME_NS/(8 * UI)
1087 	 * UI (nsec) = (10^6)/Bitrate
1088 	 * TIME_NS = (BYTE_CLK_COUNT * 8 * 10^6)/ Bitrate
1089 	 * ESCAPE_CLK_COUNT  = TIME_NS/ESC_CLK_NS
1090 	 */
1091 	divisor = intel_dsi_tlpx_ns(intel_dsi) * afe_clk(encoder, crtc_state) * 1000;
1092 	mul = 8 * 1000000;
1093 	hs_tx_timeout = DIV_ROUND_UP(intel_dsi->hs_tx_timeout * mul,
1094 				     divisor);
1095 	lp_rx_timeout = DIV_ROUND_UP(intel_dsi->lp_rx_timeout * mul, divisor);
1096 	ta_timeout = DIV_ROUND_UP(intel_dsi->turn_arnd_val * mul, divisor);
1097 
1098 	for_each_dsi_port(port, intel_dsi->ports) {
1099 		dsi_trans = dsi_port_to_transcoder(port);
1100 
1101 		/* program hst_tx_timeout */
1102 		tmp = intel_de_read(dev_priv, DSI_HSTX_TO(dsi_trans));
1103 		tmp &= ~HSTX_TIMEOUT_VALUE_MASK;
1104 		tmp |= HSTX_TIMEOUT_VALUE(hs_tx_timeout);
1105 		intel_de_write(dev_priv, DSI_HSTX_TO(dsi_trans), tmp);
1106 
1107 		/* FIXME: DSI_CALIB_TO */
1108 
1109 		/* program lp_rx_host timeout */
1110 		tmp = intel_de_read(dev_priv, DSI_LPRX_HOST_TO(dsi_trans));
1111 		tmp &= ~LPRX_TIMEOUT_VALUE_MASK;
1112 		tmp |= LPRX_TIMEOUT_VALUE(lp_rx_timeout);
1113 		intel_de_write(dev_priv, DSI_LPRX_HOST_TO(dsi_trans), tmp);
1114 
1115 		/* FIXME: DSI_PWAIT_TO */
1116 
1117 		/* program turn around timeout */
1118 		tmp = intel_de_read(dev_priv, DSI_TA_TO(dsi_trans));
1119 		tmp &= ~TA_TIMEOUT_VALUE_MASK;
1120 		tmp |= TA_TIMEOUT_VALUE(ta_timeout);
1121 		intel_de_write(dev_priv, DSI_TA_TO(dsi_trans), tmp);
1122 	}
1123 }
1124 
1125 static void gen11_dsi_config_util_pin(struct intel_encoder *encoder,
1126 				      bool enable)
1127 {
1128 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1129 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1130 	u32 tmp;
1131 
1132 	/*
1133 	 * used as TE i/p for DSI0,
1134 	 * for dual link/DSI1 TE is from slave DSI1
1135 	 * through GPIO.
1136 	 */
1137 	if (is_vid_mode(intel_dsi) || (intel_dsi->ports & BIT(PORT_B)))
1138 		return;
1139 
1140 	tmp = intel_de_read(dev_priv, UTIL_PIN_CTL);
1141 
1142 	if (enable) {
1143 		tmp |= UTIL_PIN_DIRECTION_INPUT;
1144 		tmp |= UTIL_PIN_ENABLE;
1145 	} else {
1146 		tmp &= ~UTIL_PIN_ENABLE;
1147 	}
1148 	intel_de_write(dev_priv, UTIL_PIN_CTL, tmp);
1149 }
1150 
1151 static void
1152 gen11_dsi_enable_port_and_phy(struct intel_encoder *encoder,
1153 			      const struct intel_crtc_state *crtc_state)
1154 {
1155 	/* step 4a: power up all lanes of the DDI used by DSI */
1156 	gen11_dsi_power_up_lanes(encoder);
1157 
1158 	/* step 4b: configure lane sequencing of the Combo-PHY transmitters */
1159 	gen11_dsi_config_phy_lanes_sequence(encoder);
1160 
1161 	/* step 4c: configure voltage swing and skew */
1162 	gen11_dsi_voltage_swing_program_seq(encoder);
1163 
1164 	/* enable DDI buffer */
1165 	gen11_dsi_enable_ddi_buffer(encoder);
1166 
1167 	/* setup D-PHY timings */
1168 	gen11_dsi_setup_dphy_timings(encoder, crtc_state);
1169 
1170 	/* Since transcoder is configured to take events from GPIO */
1171 	gen11_dsi_config_util_pin(encoder, true);
1172 
1173 	/* step 4h: setup DSI protocol timeouts */
1174 	gen11_dsi_setup_timeouts(encoder, crtc_state);
1175 
1176 	/* Step (4h, 4i, 4j, 4k): Configure transcoder */
1177 	gen11_dsi_configure_transcoder(encoder, crtc_state);
1178 
1179 	/* Step 4l: Gate DDI clocks */
1180 	gen11_dsi_gate_clocks(encoder);
1181 }
1182 
1183 static void gen11_dsi_powerup_panel(struct intel_encoder *encoder)
1184 {
1185 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1186 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1187 	struct mipi_dsi_device *dsi;
1188 	enum port port;
1189 	enum transcoder dsi_trans;
1190 	u32 tmp;
1191 	int ret;
1192 
1193 	/* set maximum return packet size */
1194 	for_each_dsi_port(port, intel_dsi->ports) {
1195 		dsi_trans = dsi_port_to_transcoder(port);
1196 
1197 		/*
1198 		 * FIXME: This uses the number of DW's currently in the payload
1199 		 * receive queue. This is probably not what we want here.
1200 		 */
1201 		tmp = intel_de_read(dev_priv, DSI_CMD_RXCTL(dsi_trans));
1202 		tmp &= NUMBER_RX_PLOAD_DW_MASK;
1203 		/* multiply "Number Rx Payload DW" by 4 to get max value */
1204 		tmp = tmp * 4;
1205 		dsi = intel_dsi->dsi_hosts[port]->device;
1206 		ret = mipi_dsi_set_maximum_return_packet_size(dsi, tmp);
1207 		if (ret < 0)
1208 			drm_err(&dev_priv->drm,
1209 				"error setting max return pkt size%d\n", tmp);
1210 	}
1211 
1212 	/* panel power on related mipi dsi vbt sequences */
1213 	intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_POWER_ON);
1214 	intel_dsi_msleep(intel_dsi, intel_dsi->panel_on_delay);
1215 	intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_DEASSERT_RESET);
1216 	intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_INIT_OTP);
1217 	intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_DISPLAY_ON);
1218 
1219 	/* ensure all panel commands dispatched before enabling transcoder */
1220 	wait_for_cmds_dispatched_to_panel(encoder);
1221 }
1222 
1223 static void gen11_dsi_pre_pll_enable(struct intel_atomic_state *state,
1224 				     struct intel_encoder *encoder,
1225 				     const struct intel_crtc_state *crtc_state,
1226 				     const struct drm_connector_state *conn_state)
1227 {
1228 	/* step2: enable IO power */
1229 	gen11_dsi_enable_io_power(encoder);
1230 
1231 	/* step3: enable DSI PLL */
1232 	gen11_dsi_program_esc_clk_div(encoder, crtc_state);
1233 }
1234 
1235 static void gen11_dsi_pre_enable(struct intel_atomic_state *state,
1236 				 struct intel_encoder *encoder,
1237 				 const struct intel_crtc_state *pipe_config,
1238 				 const struct drm_connector_state *conn_state)
1239 {
1240 	/* step3b */
1241 	gen11_dsi_map_pll(encoder, pipe_config);
1242 
1243 	/* step4: enable DSI port and DPHY */
1244 	gen11_dsi_enable_port_and_phy(encoder, pipe_config);
1245 
1246 	/* step5: program and powerup panel */
1247 	gen11_dsi_powerup_panel(encoder);
1248 
1249 	intel_dsc_dsi_pps_write(encoder, pipe_config);
1250 
1251 	/* step6c: configure transcoder timings */
1252 	gen11_dsi_set_transcoder_timings(encoder, pipe_config);
1253 }
1254 
1255 /*
1256  * Wa_1409054076:icl,jsl,ehl
1257  * When pipe A is disabled and MIPI DSI is enabled on pipe B,
1258  * the AMT KVMR feature will incorrectly see pipe A as enabled.
1259  * Set 0x42080 bit 23=1 before enabling DSI on pipe B and leave
1260  * it set while DSI is enabled on pipe B
1261  */
1262 static void icl_apply_kvmr_pipe_a_wa(struct intel_encoder *encoder,
1263 				     enum pipe pipe, bool enable)
1264 {
1265 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1266 
1267 	if (DISPLAY_VER(dev_priv) == 11 && pipe == PIPE_B)
1268 		intel_de_rmw(dev_priv, CHICKEN_PAR1_1,
1269 			     IGNORE_KVMR_PIPE_A,
1270 			     enable ? IGNORE_KVMR_PIPE_A : 0);
1271 }
1272 
1273 /*
1274  * Wa_16012360555:adl-p
1275  * SW will have to program the "LP to HS Wakeup Guardband"
1276  * to account for the repeaters on the HS Request/Ready
1277  * PPI signaling between the Display engine and the DPHY.
1278  */
1279 static void adlp_set_lp_hs_wakeup_gb(struct intel_encoder *encoder)
1280 {
1281 	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
1282 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1283 	enum port port;
1284 
1285 	if (DISPLAY_VER(i915) == 13) {
1286 		for_each_dsi_port(port, intel_dsi->ports)
1287 			intel_de_rmw(i915, TGL_DSI_CHKN_REG(port),
1288 				     TGL_DSI_CHKN_LSHS_GB_MASK,
1289 				     TGL_DSI_CHKN_LSHS_GB(4));
1290 	}
1291 }
1292 
1293 static void gen11_dsi_enable(struct intel_atomic_state *state,
1294 			     struct intel_encoder *encoder,
1295 			     const struct intel_crtc_state *crtc_state,
1296 			     const struct drm_connector_state *conn_state)
1297 {
1298 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1299 	struct intel_crtc *crtc = to_intel_crtc(conn_state->crtc);
1300 
1301 	drm_WARN_ON(state->base.dev, crtc_state->has_pch_encoder);
1302 
1303 	/* Wa_1409054076:icl,jsl,ehl */
1304 	icl_apply_kvmr_pipe_a_wa(encoder, crtc->pipe, true);
1305 
1306 	/* Wa_16012360555:adl-p */
1307 	adlp_set_lp_hs_wakeup_gb(encoder);
1308 
1309 	/* step6d: enable dsi transcoder */
1310 	gen11_dsi_enable_transcoder(encoder);
1311 
1312 	/* step7: enable backlight */
1313 	intel_backlight_enable(crtc_state, conn_state);
1314 	intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_BACKLIGHT_ON);
1315 
1316 	intel_crtc_vblank_on(crtc_state);
1317 }
1318 
1319 static void gen11_dsi_disable_transcoder(struct intel_encoder *encoder)
1320 {
1321 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1322 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1323 	enum port port;
1324 	enum transcoder dsi_trans;
1325 	u32 tmp;
1326 
1327 	for_each_dsi_port(port, intel_dsi->ports) {
1328 		dsi_trans = dsi_port_to_transcoder(port);
1329 
1330 		/* disable transcoder */
1331 		tmp = intel_de_read(dev_priv, PIPECONF(dsi_trans));
1332 		tmp &= ~PIPECONF_ENABLE;
1333 		intel_de_write(dev_priv, PIPECONF(dsi_trans), tmp);
1334 
1335 		/* wait for transcoder to be disabled */
1336 		if (intel_de_wait_for_clear(dev_priv, PIPECONF(dsi_trans),
1337 					    PIPECONF_STATE_ENABLE, 50))
1338 			drm_err(&dev_priv->drm,
1339 				"DSI trancoder not disabled\n");
1340 	}
1341 }
1342 
1343 static void gen11_dsi_powerdown_panel(struct intel_encoder *encoder)
1344 {
1345 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1346 
1347 	intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_DISPLAY_OFF);
1348 	intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_ASSERT_RESET);
1349 	intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_POWER_OFF);
1350 
1351 	/* ensure cmds dispatched to panel */
1352 	wait_for_cmds_dispatched_to_panel(encoder);
1353 }
1354 
1355 static void gen11_dsi_deconfigure_trancoder(struct intel_encoder *encoder)
1356 {
1357 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1358 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1359 	enum port port;
1360 	enum transcoder dsi_trans;
1361 	u32 tmp;
1362 
1363 	/* disable periodic update mode */
1364 	if (is_cmd_mode(intel_dsi)) {
1365 		for_each_dsi_port(port, intel_dsi->ports) {
1366 			tmp = intel_de_read(dev_priv, DSI_CMD_FRMCTL(port));
1367 			tmp &= ~DSI_PERIODIC_FRAME_UPDATE_ENABLE;
1368 			intel_de_write(dev_priv, DSI_CMD_FRMCTL(port), tmp);
1369 		}
1370 	}
1371 
1372 	/* put dsi link in ULPS */
1373 	for_each_dsi_port(port, intel_dsi->ports) {
1374 		dsi_trans = dsi_port_to_transcoder(port);
1375 		tmp = intel_de_read(dev_priv, DSI_LP_MSG(dsi_trans));
1376 		tmp |= LINK_ENTER_ULPS;
1377 		tmp &= ~LINK_ULPS_TYPE_LP11;
1378 		intel_de_write(dev_priv, DSI_LP_MSG(dsi_trans), tmp);
1379 
1380 		if (wait_for_us((intel_de_read(dev_priv, DSI_LP_MSG(dsi_trans)) &
1381 				 LINK_IN_ULPS),
1382 				10))
1383 			drm_err(&dev_priv->drm, "DSI link not in ULPS\n");
1384 	}
1385 
1386 	/* disable ddi function */
1387 	for_each_dsi_port(port, intel_dsi->ports) {
1388 		dsi_trans = dsi_port_to_transcoder(port);
1389 		tmp = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(dsi_trans));
1390 		tmp &= ~TRANS_DDI_FUNC_ENABLE;
1391 		intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(dsi_trans), tmp);
1392 	}
1393 
1394 	/* disable port sync mode if dual link */
1395 	if (intel_dsi->dual_link) {
1396 		for_each_dsi_port(port, intel_dsi->ports) {
1397 			dsi_trans = dsi_port_to_transcoder(port);
1398 			tmp = intel_de_read(dev_priv,
1399 					    TRANS_DDI_FUNC_CTL2(dsi_trans));
1400 			tmp &= ~PORT_SYNC_MODE_ENABLE;
1401 			intel_de_write(dev_priv,
1402 				       TRANS_DDI_FUNC_CTL2(dsi_trans), tmp);
1403 		}
1404 	}
1405 }
1406 
1407 static void gen11_dsi_disable_port(struct intel_encoder *encoder)
1408 {
1409 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1410 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1411 	u32 tmp;
1412 	enum port port;
1413 
1414 	gen11_dsi_ungate_clocks(encoder);
1415 	for_each_dsi_port(port, intel_dsi->ports) {
1416 		tmp = intel_de_read(dev_priv, DDI_BUF_CTL(port));
1417 		tmp &= ~DDI_BUF_CTL_ENABLE;
1418 		intel_de_write(dev_priv, DDI_BUF_CTL(port), tmp);
1419 
1420 		if (wait_for_us((intel_de_read(dev_priv, DDI_BUF_CTL(port)) &
1421 				 DDI_BUF_IS_IDLE),
1422 				 8))
1423 			drm_err(&dev_priv->drm,
1424 				"DDI port:%c buffer not idle\n",
1425 				port_name(port));
1426 	}
1427 	gen11_dsi_gate_clocks(encoder);
1428 }
1429 
1430 static void gen11_dsi_disable_io_power(struct intel_encoder *encoder)
1431 {
1432 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1433 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1434 	enum port port;
1435 	u32 tmp;
1436 
1437 	for_each_dsi_port(port, intel_dsi->ports) {
1438 		intel_wakeref_t wakeref;
1439 
1440 		wakeref = fetch_and_zero(&intel_dsi->io_wakeref[port]);
1441 		intel_display_power_put(dev_priv,
1442 					port == PORT_A ?
1443 					POWER_DOMAIN_PORT_DDI_IO_A :
1444 					POWER_DOMAIN_PORT_DDI_IO_B,
1445 					wakeref);
1446 	}
1447 
1448 	/* set mode to DDI */
1449 	for_each_dsi_port(port, intel_dsi->ports) {
1450 		tmp = intel_de_read(dev_priv, ICL_DSI_IO_MODECTL(port));
1451 		tmp &= ~COMBO_PHY_MODE_DSI;
1452 		intel_de_write(dev_priv, ICL_DSI_IO_MODECTL(port), tmp);
1453 	}
1454 }
1455 
1456 static void gen11_dsi_disable(struct intel_atomic_state *state,
1457 			      struct intel_encoder *encoder,
1458 			      const struct intel_crtc_state *old_crtc_state,
1459 			      const struct drm_connector_state *old_conn_state)
1460 {
1461 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1462 	struct intel_crtc *crtc = to_intel_crtc(old_conn_state->crtc);
1463 
1464 	/* step1: turn off backlight */
1465 	intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_BACKLIGHT_OFF);
1466 	intel_backlight_disable(old_conn_state);
1467 
1468 	/* step2d,e: disable transcoder and wait */
1469 	gen11_dsi_disable_transcoder(encoder);
1470 
1471 	/* Wa_1409054076:icl,jsl,ehl */
1472 	icl_apply_kvmr_pipe_a_wa(encoder, crtc->pipe, false);
1473 
1474 	/* step2f,g: powerdown panel */
1475 	gen11_dsi_powerdown_panel(encoder);
1476 
1477 	/* step2h,i,j: deconfig trancoder */
1478 	gen11_dsi_deconfigure_trancoder(encoder);
1479 
1480 	/* step3: disable port */
1481 	gen11_dsi_disable_port(encoder);
1482 
1483 	gen11_dsi_config_util_pin(encoder, false);
1484 
1485 	/* step4: disable IO power */
1486 	gen11_dsi_disable_io_power(encoder);
1487 }
1488 
1489 static void gen11_dsi_post_disable(struct intel_atomic_state *state,
1490 				   struct intel_encoder *encoder,
1491 				   const struct intel_crtc_state *old_crtc_state,
1492 				   const struct drm_connector_state *old_conn_state)
1493 {
1494 	intel_crtc_vblank_off(old_crtc_state);
1495 
1496 	intel_dsc_disable(old_crtc_state);
1497 
1498 	skl_scaler_disable(old_crtc_state);
1499 }
1500 
1501 static enum drm_mode_status gen11_dsi_mode_valid(struct drm_connector *connector,
1502 						 struct drm_display_mode *mode)
1503 {
1504 	/* FIXME: DSC? */
1505 	return intel_dsi_mode_valid(connector, mode);
1506 }
1507 
1508 static void gen11_dsi_get_timings(struct intel_encoder *encoder,
1509 				  struct intel_crtc_state *pipe_config)
1510 {
1511 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1512 	struct drm_display_mode *adjusted_mode =
1513 					&pipe_config->hw.adjusted_mode;
1514 
1515 	if (pipe_config->dsc.compressed_bpp) {
1516 		int div = pipe_config->dsc.compressed_bpp;
1517 		int mul = mipi_dsi_pixel_format_to_bpp(intel_dsi->pixel_format);
1518 
1519 		adjusted_mode->crtc_htotal =
1520 			DIV_ROUND_UP(adjusted_mode->crtc_htotal * mul, div);
1521 		adjusted_mode->crtc_hsync_start =
1522 			DIV_ROUND_UP(adjusted_mode->crtc_hsync_start * mul, div);
1523 		adjusted_mode->crtc_hsync_end =
1524 			DIV_ROUND_UP(adjusted_mode->crtc_hsync_end * mul, div);
1525 	}
1526 
1527 	if (intel_dsi->dual_link) {
1528 		adjusted_mode->crtc_hdisplay *= 2;
1529 		if (intel_dsi->dual_link == DSI_DUAL_LINK_FRONT_BACK)
1530 			adjusted_mode->crtc_hdisplay -=
1531 						intel_dsi->pixel_overlap;
1532 		adjusted_mode->crtc_htotal *= 2;
1533 	}
1534 	adjusted_mode->crtc_hblank_start = adjusted_mode->crtc_hdisplay;
1535 	adjusted_mode->crtc_hblank_end = adjusted_mode->crtc_htotal;
1536 
1537 	if (intel_dsi->operation_mode == INTEL_DSI_VIDEO_MODE) {
1538 		if (intel_dsi->dual_link) {
1539 			adjusted_mode->crtc_hsync_start *= 2;
1540 			adjusted_mode->crtc_hsync_end *= 2;
1541 		}
1542 	}
1543 	adjusted_mode->crtc_vblank_start = adjusted_mode->crtc_vdisplay;
1544 	adjusted_mode->crtc_vblank_end = adjusted_mode->crtc_vtotal;
1545 }
1546 
1547 static bool gen11_dsi_is_periodic_cmd_mode(struct intel_dsi *intel_dsi)
1548 {
1549 	struct drm_device *dev = intel_dsi->base.base.dev;
1550 	struct drm_i915_private *dev_priv = to_i915(dev);
1551 	enum transcoder dsi_trans;
1552 	u32 val;
1553 
1554 	if (intel_dsi->ports == BIT(PORT_B))
1555 		dsi_trans = TRANSCODER_DSI_1;
1556 	else
1557 		dsi_trans = TRANSCODER_DSI_0;
1558 
1559 	val = intel_de_read(dev_priv, DSI_TRANS_FUNC_CONF(dsi_trans));
1560 	return (val & DSI_PERIODIC_FRAME_UPDATE_ENABLE);
1561 }
1562 
1563 static void gen11_dsi_get_cmd_mode_config(struct intel_dsi *intel_dsi,
1564 					  struct intel_crtc_state *pipe_config)
1565 {
1566 	if (intel_dsi->ports == (BIT(PORT_B) | BIT(PORT_A)))
1567 		pipe_config->mode_flags |= I915_MODE_FLAG_DSI_USE_TE1 |
1568 					    I915_MODE_FLAG_DSI_USE_TE0;
1569 	else if (intel_dsi->ports == BIT(PORT_B))
1570 		pipe_config->mode_flags |= I915_MODE_FLAG_DSI_USE_TE1;
1571 	else
1572 		pipe_config->mode_flags |= I915_MODE_FLAG_DSI_USE_TE0;
1573 }
1574 
1575 static void gen11_dsi_get_config(struct intel_encoder *encoder,
1576 				 struct intel_crtc_state *pipe_config)
1577 {
1578 	struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
1579 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1580 
1581 	intel_ddi_get_clock(encoder, pipe_config, icl_ddi_combo_get_pll(encoder));
1582 
1583 	pipe_config->hw.adjusted_mode.crtc_clock = intel_dsi->pclk;
1584 	if (intel_dsi->dual_link)
1585 		pipe_config->hw.adjusted_mode.crtc_clock *= 2;
1586 
1587 	gen11_dsi_get_timings(encoder, pipe_config);
1588 	pipe_config->output_types |= BIT(INTEL_OUTPUT_DSI);
1589 	pipe_config->pipe_bpp = bdw_get_pipemisc_bpp(crtc);
1590 
1591 	/* Get the details on which TE should be enabled */
1592 	if (is_cmd_mode(intel_dsi))
1593 		gen11_dsi_get_cmd_mode_config(intel_dsi, pipe_config);
1594 
1595 	if (gen11_dsi_is_periodic_cmd_mode(intel_dsi))
1596 		pipe_config->mode_flags |= I915_MODE_FLAG_DSI_PERIODIC_CMD_MODE;
1597 }
1598 
1599 static void gen11_dsi_sync_state(struct intel_encoder *encoder,
1600 				 const struct intel_crtc_state *crtc_state)
1601 {
1602 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1603 	struct intel_crtc *intel_crtc;
1604 	enum pipe pipe;
1605 
1606 	if (!crtc_state)
1607 		return;
1608 
1609 	intel_crtc = to_intel_crtc(crtc_state->uapi.crtc);
1610 	pipe = intel_crtc->pipe;
1611 
1612 	/* wa verify 1409054076:icl,jsl,ehl */
1613 	if (DISPLAY_VER(dev_priv) == 11 && pipe == PIPE_B &&
1614 	    !(intel_de_read(dev_priv, CHICKEN_PAR1_1) & IGNORE_KVMR_PIPE_A))
1615 		drm_dbg_kms(&dev_priv->drm,
1616 			    "[ENCODER:%d:%s] BIOS left IGNORE_KVMR_PIPE_A cleared with pipe B enabled\n",
1617 			    encoder->base.base.id,
1618 			    encoder->base.name);
1619 }
1620 
1621 static int gen11_dsi_dsc_compute_config(struct intel_encoder *encoder,
1622 					struct intel_crtc_state *crtc_state)
1623 {
1624 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1625 	struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
1626 	int dsc_max_bpc = DISPLAY_VER(dev_priv) >= 12 ? 12 : 10;
1627 	bool use_dsc;
1628 	int ret;
1629 
1630 	use_dsc = intel_bios_get_dsc_params(encoder, crtc_state, dsc_max_bpc);
1631 	if (!use_dsc)
1632 		return 0;
1633 
1634 	if (crtc_state->pipe_bpp < 8 * 3)
1635 		return -EINVAL;
1636 
1637 	/* FIXME: split only when necessary */
1638 	if (crtc_state->dsc.slice_count > 1)
1639 		crtc_state->dsc.dsc_split = true;
1640 
1641 	vdsc_cfg->convert_rgb = true;
1642 
1643 	/* FIXME: initialize from VBT */
1644 	vdsc_cfg->rc_model_size = DSC_RC_MODEL_SIZE_CONST;
1645 
1646 	vdsc_cfg->pic_height = crtc_state->hw.adjusted_mode.crtc_vdisplay;
1647 
1648 	ret = intel_dsc_compute_params(crtc_state);
1649 	if (ret)
1650 		return ret;
1651 
1652 	/* DSI specific sanity checks on the common code */
1653 	drm_WARN_ON(&dev_priv->drm, vdsc_cfg->vbr_enable);
1654 	drm_WARN_ON(&dev_priv->drm, vdsc_cfg->simple_422);
1655 	drm_WARN_ON(&dev_priv->drm,
1656 		    vdsc_cfg->pic_width % vdsc_cfg->slice_width);
1657 	drm_WARN_ON(&dev_priv->drm, vdsc_cfg->slice_height < 8);
1658 	drm_WARN_ON(&dev_priv->drm,
1659 		    vdsc_cfg->pic_height % vdsc_cfg->slice_height);
1660 
1661 	ret = drm_dsc_compute_rc_parameters(vdsc_cfg);
1662 	if (ret)
1663 		return ret;
1664 
1665 	crtc_state->dsc.compression_enable = true;
1666 
1667 	return 0;
1668 }
1669 
1670 static int gen11_dsi_compute_config(struct intel_encoder *encoder,
1671 				    struct intel_crtc_state *pipe_config,
1672 				    struct drm_connector_state *conn_state)
1673 {
1674 	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
1675 	struct intel_dsi *intel_dsi = container_of(encoder, struct intel_dsi,
1676 						   base);
1677 	struct intel_connector *intel_connector = intel_dsi->attached_connector;
1678 	struct drm_display_mode *adjusted_mode =
1679 		&pipe_config->hw.adjusted_mode;
1680 	int ret;
1681 
1682 	pipe_config->output_format = INTEL_OUTPUT_FORMAT_RGB;
1683 
1684 	ret = intel_panel_compute_config(intel_connector, adjusted_mode);
1685 	if (ret)
1686 		return ret;
1687 
1688 	ret = intel_panel_fitting(pipe_config, conn_state);
1689 	if (ret)
1690 		return ret;
1691 
1692 	adjusted_mode->flags = 0;
1693 
1694 	/* Dual link goes to trancoder DSI'0' */
1695 	if (intel_dsi->ports == BIT(PORT_B))
1696 		pipe_config->cpu_transcoder = TRANSCODER_DSI_1;
1697 	else
1698 		pipe_config->cpu_transcoder = TRANSCODER_DSI_0;
1699 
1700 	if (intel_dsi->pixel_format == MIPI_DSI_FMT_RGB888)
1701 		pipe_config->pipe_bpp = 24;
1702 	else
1703 		pipe_config->pipe_bpp = 18;
1704 
1705 	pipe_config->clock_set = true;
1706 
1707 	if (gen11_dsi_dsc_compute_config(encoder, pipe_config))
1708 		drm_dbg_kms(&i915->drm, "Attempting to use DSC failed\n");
1709 
1710 	pipe_config->port_clock = afe_clk(encoder, pipe_config) / 5;
1711 
1712 	/*
1713 	 * In case of TE GATE cmd mode, we
1714 	 * receive TE from the slave if
1715 	 * dual link is enabled
1716 	 */
1717 	if (is_cmd_mode(intel_dsi))
1718 		gen11_dsi_get_cmd_mode_config(intel_dsi, pipe_config);
1719 
1720 	return 0;
1721 }
1722 
1723 static void gen11_dsi_get_power_domains(struct intel_encoder *encoder,
1724 					struct intel_crtc_state *crtc_state)
1725 {
1726 	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
1727 
1728 	get_dsi_io_power_domains(i915,
1729 				 enc_to_intel_dsi(encoder));
1730 }
1731 
1732 static bool gen11_dsi_get_hw_state(struct intel_encoder *encoder,
1733 				   enum pipe *pipe)
1734 {
1735 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1736 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1737 	enum transcoder dsi_trans;
1738 	intel_wakeref_t wakeref;
1739 	enum port port;
1740 	bool ret = false;
1741 	u32 tmp;
1742 
1743 	wakeref = intel_display_power_get_if_enabled(dev_priv,
1744 						     encoder->power_domain);
1745 	if (!wakeref)
1746 		return false;
1747 
1748 	for_each_dsi_port(port, intel_dsi->ports) {
1749 		dsi_trans = dsi_port_to_transcoder(port);
1750 		tmp = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(dsi_trans));
1751 		switch (tmp & TRANS_DDI_EDP_INPUT_MASK) {
1752 		case TRANS_DDI_EDP_INPUT_A_ON:
1753 			*pipe = PIPE_A;
1754 			break;
1755 		case TRANS_DDI_EDP_INPUT_B_ONOFF:
1756 			*pipe = PIPE_B;
1757 			break;
1758 		case TRANS_DDI_EDP_INPUT_C_ONOFF:
1759 			*pipe = PIPE_C;
1760 			break;
1761 		case TRANS_DDI_EDP_INPUT_D_ONOFF:
1762 			*pipe = PIPE_D;
1763 			break;
1764 		default:
1765 			drm_err(&dev_priv->drm, "Invalid PIPE input\n");
1766 			goto out;
1767 		}
1768 
1769 		tmp = intel_de_read(dev_priv, PIPECONF(dsi_trans));
1770 		ret = tmp & PIPECONF_ENABLE;
1771 	}
1772 out:
1773 	intel_display_power_put(dev_priv, encoder->power_domain, wakeref);
1774 	return ret;
1775 }
1776 
1777 static bool gen11_dsi_initial_fastset_check(struct intel_encoder *encoder,
1778 					    struct intel_crtc_state *crtc_state)
1779 {
1780 	if (crtc_state->dsc.compression_enable) {
1781 		drm_dbg_kms(encoder->base.dev, "Forcing full modeset due to DSC being enabled\n");
1782 		crtc_state->uapi.mode_changed = true;
1783 
1784 		return false;
1785 	}
1786 
1787 	return true;
1788 }
1789 
1790 static void gen11_dsi_encoder_destroy(struct drm_encoder *encoder)
1791 {
1792 	intel_encoder_destroy(encoder);
1793 }
1794 
1795 static const struct drm_encoder_funcs gen11_dsi_encoder_funcs = {
1796 	.destroy = gen11_dsi_encoder_destroy,
1797 };
1798 
1799 static const struct drm_connector_funcs gen11_dsi_connector_funcs = {
1800 	.detect = intel_panel_detect,
1801 	.late_register = intel_connector_register,
1802 	.early_unregister = intel_connector_unregister,
1803 	.destroy = intel_connector_destroy,
1804 	.fill_modes = drm_helper_probe_single_connector_modes,
1805 	.atomic_get_property = intel_digital_connector_atomic_get_property,
1806 	.atomic_set_property = intel_digital_connector_atomic_set_property,
1807 	.atomic_destroy_state = drm_atomic_helper_connector_destroy_state,
1808 	.atomic_duplicate_state = intel_digital_connector_duplicate_state,
1809 };
1810 
1811 static const struct drm_connector_helper_funcs gen11_dsi_connector_helper_funcs = {
1812 	.get_modes = intel_dsi_get_modes,
1813 	.mode_valid = gen11_dsi_mode_valid,
1814 	.atomic_check = intel_digital_connector_atomic_check,
1815 };
1816 
1817 static int gen11_dsi_host_attach(struct mipi_dsi_host *host,
1818 				 struct mipi_dsi_device *dsi)
1819 {
1820 	return 0;
1821 }
1822 
1823 static int gen11_dsi_host_detach(struct mipi_dsi_host *host,
1824 				 struct mipi_dsi_device *dsi)
1825 {
1826 	return 0;
1827 }
1828 
1829 static ssize_t gen11_dsi_host_transfer(struct mipi_dsi_host *host,
1830 				       const struct mipi_dsi_msg *msg)
1831 {
1832 	struct intel_dsi_host *intel_dsi_host = to_intel_dsi_host(host);
1833 	struct mipi_dsi_packet dsi_pkt;
1834 	ssize_t ret;
1835 	bool enable_lpdt = false;
1836 
1837 	ret = mipi_dsi_create_packet(&dsi_pkt, msg);
1838 	if (ret < 0)
1839 		return ret;
1840 
1841 	if (msg->flags & MIPI_DSI_MSG_USE_LPM)
1842 		enable_lpdt = true;
1843 
1844 	/* only long packet contains payload */
1845 	if (mipi_dsi_packet_format_is_long(msg->type)) {
1846 		ret = dsi_send_pkt_payld(intel_dsi_host, &dsi_pkt);
1847 		if (ret < 0)
1848 			return ret;
1849 	}
1850 
1851 	/* send packet header */
1852 	ret  = dsi_send_pkt_hdr(intel_dsi_host, &dsi_pkt, enable_lpdt);
1853 	if (ret < 0)
1854 		return ret;
1855 
1856 	//TODO: add payload receive code if needed
1857 
1858 	ret = sizeof(dsi_pkt.header) + dsi_pkt.payload_length;
1859 
1860 	return ret;
1861 }
1862 
1863 static const struct mipi_dsi_host_ops gen11_dsi_host_ops = {
1864 	.attach = gen11_dsi_host_attach,
1865 	.detach = gen11_dsi_host_detach,
1866 	.transfer = gen11_dsi_host_transfer,
1867 };
1868 
1869 #define ICL_PREPARE_CNT_MAX	0x7
1870 #define ICL_CLK_ZERO_CNT_MAX	0xf
1871 #define ICL_TRAIL_CNT_MAX	0x7
1872 #define ICL_TCLK_PRE_CNT_MAX	0x3
1873 #define ICL_TCLK_POST_CNT_MAX	0x7
1874 #define ICL_HS_ZERO_CNT_MAX	0xf
1875 #define ICL_EXIT_ZERO_CNT_MAX	0x7
1876 
1877 static void icl_dphy_param_init(struct intel_dsi *intel_dsi)
1878 {
1879 	struct drm_device *dev = intel_dsi->base.base.dev;
1880 	struct drm_i915_private *dev_priv = to_i915(dev);
1881 	struct intel_connector *connector = intel_dsi->attached_connector;
1882 	struct mipi_config *mipi_config = connector->panel.vbt.dsi.config;
1883 	u32 tlpx_ns;
1884 	u32 prepare_cnt, exit_zero_cnt, clk_zero_cnt, trail_cnt;
1885 	u32 ths_prepare_ns, tclk_trail_ns;
1886 	u32 hs_zero_cnt;
1887 	u32 tclk_pre_cnt, tclk_post_cnt;
1888 
1889 	tlpx_ns = intel_dsi_tlpx_ns(intel_dsi);
1890 
1891 	tclk_trail_ns = max(mipi_config->tclk_trail, mipi_config->ths_trail);
1892 	ths_prepare_ns = max(mipi_config->ths_prepare,
1893 			     mipi_config->tclk_prepare);
1894 
1895 	/*
1896 	 * prepare cnt in escape clocks
1897 	 * this field represents a hexadecimal value with a precision
1898 	 * of 1.2 – i.e. the most significant bit is the integer
1899 	 * and the least significant 2 bits are fraction bits.
1900 	 * so, the field can represent a range of 0.25 to 1.75
1901 	 */
1902 	prepare_cnt = DIV_ROUND_UP(ths_prepare_ns * 4, tlpx_ns);
1903 	if (prepare_cnt > ICL_PREPARE_CNT_MAX) {
1904 		drm_dbg_kms(&dev_priv->drm, "prepare_cnt out of range (%d)\n",
1905 			    prepare_cnt);
1906 		prepare_cnt = ICL_PREPARE_CNT_MAX;
1907 	}
1908 
1909 	/* clk zero count in escape clocks */
1910 	clk_zero_cnt = DIV_ROUND_UP(mipi_config->tclk_prepare_clkzero -
1911 				    ths_prepare_ns, tlpx_ns);
1912 	if (clk_zero_cnt > ICL_CLK_ZERO_CNT_MAX) {
1913 		drm_dbg_kms(&dev_priv->drm,
1914 			    "clk_zero_cnt out of range (%d)\n", clk_zero_cnt);
1915 		clk_zero_cnt = ICL_CLK_ZERO_CNT_MAX;
1916 	}
1917 
1918 	/* trail cnt in escape clocks*/
1919 	trail_cnt = DIV_ROUND_UP(tclk_trail_ns, tlpx_ns);
1920 	if (trail_cnt > ICL_TRAIL_CNT_MAX) {
1921 		drm_dbg_kms(&dev_priv->drm, "trail_cnt out of range (%d)\n",
1922 			    trail_cnt);
1923 		trail_cnt = ICL_TRAIL_CNT_MAX;
1924 	}
1925 
1926 	/* tclk pre count in escape clocks */
1927 	tclk_pre_cnt = DIV_ROUND_UP(mipi_config->tclk_pre, tlpx_ns);
1928 	if (tclk_pre_cnt > ICL_TCLK_PRE_CNT_MAX) {
1929 		drm_dbg_kms(&dev_priv->drm,
1930 			    "tclk_pre_cnt out of range (%d)\n", tclk_pre_cnt);
1931 		tclk_pre_cnt = ICL_TCLK_PRE_CNT_MAX;
1932 	}
1933 
1934 	/* tclk post count in escape clocks */
1935 	tclk_post_cnt = DIV_ROUND_UP(mipi_config->tclk_post, tlpx_ns);
1936 	if (tclk_post_cnt > ICL_TCLK_POST_CNT_MAX) {
1937 		drm_dbg_kms(&dev_priv->drm,
1938 			    "tclk_post_cnt out of range (%d)\n",
1939 			    tclk_post_cnt);
1940 		tclk_post_cnt = ICL_TCLK_POST_CNT_MAX;
1941 	}
1942 
1943 	/* hs zero cnt in escape clocks */
1944 	hs_zero_cnt = DIV_ROUND_UP(mipi_config->ths_prepare_hszero -
1945 				   ths_prepare_ns, tlpx_ns);
1946 	if (hs_zero_cnt > ICL_HS_ZERO_CNT_MAX) {
1947 		drm_dbg_kms(&dev_priv->drm, "hs_zero_cnt out of range (%d)\n",
1948 			    hs_zero_cnt);
1949 		hs_zero_cnt = ICL_HS_ZERO_CNT_MAX;
1950 	}
1951 
1952 	/* hs exit zero cnt in escape clocks */
1953 	exit_zero_cnt = DIV_ROUND_UP(mipi_config->ths_exit, tlpx_ns);
1954 	if (exit_zero_cnt > ICL_EXIT_ZERO_CNT_MAX) {
1955 		drm_dbg_kms(&dev_priv->drm,
1956 			    "exit_zero_cnt out of range (%d)\n",
1957 			    exit_zero_cnt);
1958 		exit_zero_cnt = ICL_EXIT_ZERO_CNT_MAX;
1959 	}
1960 
1961 	/* clock lane dphy timings */
1962 	intel_dsi->dphy_reg = (CLK_PREPARE_OVERRIDE |
1963 			       CLK_PREPARE(prepare_cnt) |
1964 			       CLK_ZERO_OVERRIDE |
1965 			       CLK_ZERO(clk_zero_cnt) |
1966 			       CLK_PRE_OVERRIDE |
1967 			       CLK_PRE(tclk_pre_cnt) |
1968 			       CLK_POST_OVERRIDE |
1969 			       CLK_POST(tclk_post_cnt) |
1970 			       CLK_TRAIL_OVERRIDE |
1971 			       CLK_TRAIL(trail_cnt));
1972 
1973 	/* data lanes dphy timings */
1974 	intel_dsi->dphy_data_lane_reg = (HS_PREPARE_OVERRIDE |
1975 					 HS_PREPARE(prepare_cnt) |
1976 					 HS_ZERO_OVERRIDE |
1977 					 HS_ZERO(hs_zero_cnt) |
1978 					 HS_TRAIL_OVERRIDE |
1979 					 HS_TRAIL(trail_cnt) |
1980 					 HS_EXIT_OVERRIDE |
1981 					 HS_EXIT(exit_zero_cnt));
1982 
1983 	intel_dsi_log_params(intel_dsi);
1984 }
1985 
1986 static void icl_dsi_add_properties(struct intel_connector *connector)
1987 {
1988 	const struct drm_display_mode *fixed_mode =
1989 		intel_panel_preferred_fixed_mode(connector);
1990 
1991 	intel_attach_scaling_mode_property(&connector->base);
1992 
1993 	drm_connector_set_panel_orientation_with_quirk(&connector->base,
1994 						       intel_dsi_get_panel_orientation(connector),
1995 						       fixed_mode->hdisplay,
1996 						       fixed_mode->vdisplay);
1997 }
1998 
1999 void icl_dsi_init(struct drm_i915_private *dev_priv)
2000 {
2001 	struct intel_dsi *intel_dsi;
2002 	struct intel_encoder *encoder;
2003 	struct intel_connector *intel_connector;
2004 	struct drm_connector *connector;
2005 	enum port port;
2006 
2007 	if (!intel_bios_is_dsi_present(dev_priv, &port))
2008 		return;
2009 
2010 	intel_dsi = kzalloc(sizeof(*intel_dsi), GFP_KERNEL);
2011 	if (!intel_dsi)
2012 		return;
2013 
2014 	intel_connector = intel_connector_alloc();
2015 	if (!intel_connector) {
2016 		kfree(intel_dsi);
2017 		return;
2018 	}
2019 
2020 	encoder = &intel_dsi->base;
2021 	intel_dsi->attached_connector = intel_connector;
2022 	connector = &intel_connector->base;
2023 
2024 	/* register DSI encoder with DRM subsystem */
2025 	drm_encoder_init(&dev_priv->drm, &encoder->base, &gen11_dsi_encoder_funcs,
2026 			 DRM_MODE_ENCODER_DSI, "DSI %c", port_name(port));
2027 
2028 	encoder->pre_pll_enable = gen11_dsi_pre_pll_enable;
2029 	encoder->pre_enable = gen11_dsi_pre_enable;
2030 	encoder->enable = gen11_dsi_enable;
2031 	encoder->disable = gen11_dsi_disable;
2032 	encoder->post_disable = gen11_dsi_post_disable;
2033 	encoder->port = port;
2034 	encoder->get_config = gen11_dsi_get_config;
2035 	encoder->sync_state = gen11_dsi_sync_state;
2036 	encoder->update_pipe = intel_backlight_update;
2037 	encoder->compute_config = gen11_dsi_compute_config;
2038 	encoder->get_hw_state = gen11_dsi_get_hw_state;
2039 	encoder->initial_fastset_check = gen11_dsi_initial_fastset_check;
2040 	encoder->type = INTEL_OUTPUT_DSI;
2041 	encoder->cloneable = 0;
2042 	encoder->pipe_mask = ~0;
2043 	encoder->power_domain = POWER_DOMAIN_PORT_DSI;
2044 	encoder->get_power_domains = gen11_dsi_get_power_domains;
2045 	encoder->disable_clock = gen11_dsi_gate_clocks;
2046 	encoder->is_clock_enabled = gen11_dsi_is_clock_enabled;
2047 
2048 	/* register DSI connector with DRM subsystem */
2049 	drm_connector_init(&dev_priv->drm, connector, &gen11_dsi_connector_funcs,
2050 			   DRM_MODE_CONNECTOR_DSI);
2051 	drm_connector_helper_add(connector, &gen11_dsi_connector_helper_funcs);
2052 	connector->display_info.subpixel_order = SubPixelHorizontalRGB;
2053 	intel_connector->get_hw_state = intel_connector_get_hw_state;
2054 
2055 	/* attach connector to encoder */
2056 	intel_connector_attach_encoder(intel_connector, encoder);
2057 
2058 	encoder->devdata = intel_bios_encoder_data_lookup(dev_priv, port);
2059 	intel_bios_init_panel_late(dev_priv, &intel_connector->panel, encoder->devdata, NULL);
2060 
2061 	mutex_lock(&dev_priv->drm.mode_config.mutex);
2062 	intel_panel_add_vbt_lfp_fixed_mode(intel_connector);
2063 	mutex_unlock(&dev_priv->drm.mode_config.mutex);
2064 
2065 	if (!intel_panel_preferred_fixed_mode(intel_connector)) {
2066 		drm_err(&dev_priv->drm, "DSI fixed mode info missing\n");
2067 		goto err;
2068 	}
2069 
2070 	intel_panel_init(intel_connector, NULL);
2071 
2072 	intel_backlight_setup(intel_connector, INVALID_PIPE);
2073 
2074 	if (intel_connector->panel.vbt.dsi.config->dual_link)
2075 		intel_dsi->ports = BIT(PORT_A) | BIT(PORT_B);
2076 	else
2077 		intel_dsi->ports = BIT(port);
2078 
2079 	if (drm_WARN_ON(&dev_priv->drm, intel_connector->panel.vbt.dsi.bl_ports & ~intel_dsi->ports))
2080 		intel_connector->panel.vbt.dsi.bl_ports &= intel_dsi->ports;
2081 
2082 	if (drm_WARN_ON(&dev_priv->drm, intel_connector->panel.vbt.dsi.cabc_ports & ~intel_dsi->ports))
2083 		intel_connector->panel.vbt.dsi.cabc_ports &= intel_dsi->ports;
2084 
2085 	for_each_dsi_port(port, intel_dsi->ports) {
2086 		struct intel_dsi_host *host;
2087 
2088 		host = intel_dsi_host_init(intel_dsi, &gen11_dsi_host_ops, port);
2089 		if (!host)
2090 			goto err;
2091 
2092 		intel_dsi->dsi_hosts[port] = host;
2093 	}
2094 
2095 	if (!intel_dsi_vbt_init(intel_dsi, MIPI_DSI_GENERIC_PANEL_ID)) {
2096 		drm_dbg_kms(&dev_priv->drm, "no device found\n");
2097 		goto err;
2098 	}
2099 
2100 	icl_dphy_param_init(intel_dsi);
2101 
2102 	icl_dsi_add_properties(intel_connector);
2103 	return;
2104 
2105 err:
2106 	drm_connector_cleanup(connector);
2107 	drm_encoder_cleanup(&encoder->base);
2108 	kfree(intel_dsi);
2109 	kfree(intel_connector);
2110 }
2111