xref: /openbmc/linux/drivers/gpu/drm/mediatek/mtk_dp.c (revision abb84c46)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (c) 2019-2022 MediaTek Inc.
4  * Copyright (c) 2022 BayLibre
5  */
6 
7 #include <drm/display/drm_dp_aux_bus.h>
8 #include <drm/display/drm_dp.h>
9 #include <drm/display/drm_dp_helper.h>
10 #include <drm/drm_atomic_helper.h>
11 #include <drm/drm_bridge.h>
12 #include <drm/drm_crtc.h>
13 #include <drm/drm_edid.h>
14 #include <drm/drm_of.h>
15 #include <drm/drm_panel.h>
16 #include <drm/drm_print.h>
17 #include <drm/drm_probe_helper.h>
18 #include <linux/arm-smccc.h>
19 #include <linux/clk.h>
20 #include <linux/delay.h>
21 #include <linux/errno.h>
22 #include <linux/kernel.h>
23 #include <linux/media-bus-format.h>
24 #include <linux/nvmem-consumer.h>
25 #include <linux/of.h>
26 #include <linux/of_irq.h>
27 #include <linux/of_platform.h>
28 #include <linux/phy/phy.h>
29 #include <linux/platform_device.h>
30 #include <linux/pm_runtime.h>
31 #include <linux/regmap.h>
32 #include <linux/soc/mediatek/mtk_sip_svc.h>
33 #include <sound/hdmi-codec.h>
34 #include <video/videomode.h>
35 
36 #include "mtk_dp_reg.h"
37 
38 #define MTK_DP_SIP_CONTROL_AARCH32	MTK_SIP_SMC_CMD(0x523)
39 #define MTK_DP_SIP_ATF_EDP_VIDEO_UNMUTE	(BIT(0) | BIT(5))
40 #define MTK_DP_SIP_ATF_VIDEO_UNMUTE	BIT(5)
41 
42 #define MTK_DP_THREAD_CABLE_STATE_CHG	BIT(0)
43 #define MTK_DP_THREAD_HPD_EVENT		BIT(1)
44 
45 #define MTK_DP_4P1T 4
46 #define MTK_DP_HDE 2
47 #define MTK_DP_PIX_PER_ADDR 2
48 #define MTK_DP_AUX_WAIT_REPLY_COUNT 20
49 #define MTK_DP_TBC_BUF_READ_START_ADDR 0x8
50 #define MTK_DP_TRAIN_VOLTAGE_LEVEL_RETRY 5
51 #define MTK_DP_TRAIN_DOWNSCALE_RETRY 10
52 #define MTK_DP_VERSION 0x11
53 #define MTK_DP_SDP_AUI 0x4
54 
55 enum {
56 	MTK_DP_CAL_GLB_BIAS_TRIM = 0,
57 	MTK_DP_CAL_CLKTX_IMPSE,
58 	MTK_DP_CAL_LN_TX_IMPSEL_PMOS_0,
59 	MTK_DP_CAL_LN_TX_IMPSEL_PMOS_1,
60 	MTK_DP_CAL_LN_TX_IMPSEL_PMOS_2,
61 	MTK_DP_CAL_LN_TX_IMPSEL_PMOS_3,
62 	MTK_DP_CAL_LN_TX_IMPSEL_NMOS_0,
63 	MTK_DP_CAL_LN_TX_IMPSEL_NMOS_1,
64 	MTK_DP_CAL_LN_TX_IMPSEL_NMOS_2,
65 	MTK_DP_CAL_LN_TX_IMPSEL_NMOS_3,
66 	MTK_DP_CAL_MAX,
67 };
68 
69 struct mtk_dp_train_info {
70 	bool sink_ssc;
71 	bool cable_plugged_in;
72 	/* link_rate is in multiple of 0.27Gbps */
73 	int link_rate;
74 	int lane_count;
75 	unsigned int channel_eq_pattern;
76 };
77 
78 struct mtk_dp_audio_cfg {
79 	bool detect_monitor;
80 	int sad_count;
81 	int sample_rate;
82 	int word_length_bits;
83 	int channels;
84 };
85 
86 struct mtk_dp_info {
87 	enum dp_pixelformat format;
88 	struct videomode vm;
89 	struct mtk_dp_audio_cfg audio_cur_cfg;
90 };
91 
92 struct mtk_dp_efuse_fmt {
93 	unsigned short idx;
94 	unsigned short shift;
95 	unsigned short mask;
96 	unsigned short min_val;
97 	unsigned short max_val;
98 	unsigned short default_val;
99 };
100 
101 struct mtk_dp {
102 	bool enabled;
103 	bool need_debounce;
104 	int irq;
105 	u8 max_lanes;
106 	u8 max_linkrate;
107 	u8 rx_cap[DP_RECEIVER_CAP_SIZE];
108 	u32 cal_data[MTK_DP_CAL_MAX];
109 	u32 irq_thread_handle;
110 	/* irq_thread_lock is used to protect irq_thread_handle */
111 	spinlock_t irq_thread_lock;
112 
113 	struct device *dev;
114 	struct drm_bridge bridge;
115 	struct drm_bridge *next_bridge;
116 	struct drm_connector *conn;
117 	struct drm_device *drm_dev;
118 	struct drm_dp_aux aux;
119 
120 	const struct mtk_dp_data *data;
121 	struct mtk_dp_info info;
122 	struct mtk_dp_train_info train_info;
123 
124 	struct platform_device *phy_dev;
125 	struct phy *phy;
126 	struct regmap *regs;
127 	struct timer_list debounce_timer;
128 
129 	/* For audio */
130 	bool audio_enable;
131 	hdmi_codec_plugged_cb plugged_cb;
132 	struct platform_device *audio_pdev;
133 
134 	struct device *codec_dev;
135 	/* protect the plugged_cb as it's used in both bridge ops and audio */
136 	struct mutex update_plugged_status_lock;
137 };
138 
139 struct mtk_dp_data {
140 	int bridge_type;
141 	unsigned int smc_cmd;
142 	const struct mtk_dp_efuse_fmt *efuse_fmt;
143 	bool audio_supported;
144 };
145 
146 static const struct mtk_dp_efuse_fmt mt8195_edp_efuse_fmt[MTK_DP_CAL_MAX] = {
147 	[MTK_DP_CAL_GLB_BIAS_TRIM] = {
148 		.idx = 3,
149 		.shift = 27,
150 		.mask = 0x1f,
151 		.min_val = 1,
152 		.max_val = 0x1e,
153 		.default_val = 0xf,
154 	},
155 	[MTK_DP_CAL_CLKTX_IMPSE] = {
156 		.idx = 0,
157 		.shift = 9,
158 		.mask = 0xf,
159 		.min_val = 1,
160 		.max_val = 0xe,
161 		.default_val = 0x8,
162 	},
163 	[MTK_DP_CAL_LN_TX_IMPSEL_PMOS_0] = {
164 		.idx = 2,
165 		.shift = 28,
166 		.mask = 0xf,
167 		.min_val = 1,
168 		.max_val = 0xe,
169 		.default_val = 0x8,
170 	},
171 	[MTK_DP_CAL_LN_TX_IMPSEL_PMOS_1] = {
172 		.idx = 2,
173 		.shift = 20,
174 		.mask = 0xf,
175 		.min_val = 1,
176 		.max_val = 0xe,
177 		.default_val = 0x8,
178 	},
179 	[MTK_DP_CAL_LN_TX_IMPSEL_PMOS_2] = {
180 		.idx = 2,
181 		.shift = 12,
182 		.mask = 0xf,
183 		.min_val = 1,
184 		.max_val = 0xe,
185 		.default_val = 0x8,
186 	},
187 	[MTK_DP_CAL_LN_TX_IMPSEL_PMOS_3] = {
188 		.idx = 2,
189 		.shift = 4,
190 		.mask = 0xf,
191 		.min_val = 1,
192 		.max_val = 0xe,
193 		.default_val = 0x8,
194 	},
195 	[MTK_DP_CAL_LN_TX_IMPSEL_NMOS_0] = {
196 		.idx = 2,
197 		.shift = 24,
198 		.mask = 0xf,
199 		.min_val = 1,
200 		.max_val = 0xe,
201 		.default_val = 0x8,
202 	},
203 	[MTK_DP_CAL_LN_TX_IMPSEL_NMOS_1] = {
204 		.idx = 2,
205 		.shift = 16,
206 		.mask = 0xf,
207 		.min_val = 1,
208 		.max_val = 0xe,
209 		.default_val = 0x8,
210 	},
211 	[MTK_DP_CAL_LN_TX_IMPSEL_NMOS_2] = {
212 		.idx = 2,
213 		.shift = 8,
214 		.mask = 0xf,
215 		.min_val = 1,
216 		.max_val = 0xe,
217 		.default_val = 0x8,
218 	},
219 	[MTK_DP_CAL_LN_TX_IMPSEL_NMOS_3] = {
220 		.idx = 2,
221 		.shift = 0,
222 		.mask = 0xf,
223 		.min_val = 1,
224 		.max_val = 0xe,
225 		.default_val = 0x8,
226 	},
227 };
228 
229 static const struct mtk_dp_efuse_fmt mt8195_dp_efuse_fmt[MTK_DP_CAL_MAX] = {
230 	[MTK_DP_CAL_GLB_BIAS_TRIM] = {
231 		.idx = 0,
232 		.shift = 27,
233 		.mask = 0x1f,
234 		.min_val = 1,
235 		.max_val = 0x1e,
236 		.default_val = 0xf,
237 	},
238 	[MTK_DP_CAL_CLKTX_IMPSE] = {
239 		.idx = 0,
240 		.shift = 13,
241 		.mask = 0xf,
242 		.min_val = 1,
243 		.max_val = 0xe,
244 		.default_val = 0x8,
245 	},
246 	[MTK_DP_CAL_LN_TX_IMPSEL_PMOS_0] = {
247 		.idx = 1,
248 		.shift = 28,
249 		.mask = 0xf,
250 		.min_val = 1,
251 		.max_val = 0xe,
252 		.default_val = 0x8,
253 	},
254 	[MTK_DP_CAL_LN_TX_IMPSEL_PMOS_1] = {
255 		.idx = 1,
256 		.shift = 20,
257 		.mask = 0xf,
258 		.min_val = 1,
259 		.max_val = 0xe,
260 		.default_val = 0x8,
261 	},
262 	[MTK_DP_CAL_LN_TX_IMPSEL_PMOS_2] = {
263 		.idx = 1,
264 		.shift = 12,
265 		.mask = 0xf,
266 		.min_val = 1,
267 		.max_val = 0xe,
268 		.default_val = 0x8,
269 	},
270 	[MTK_DP_CAL_LN_TX_IMPSEL_PMOS_3] = {
271 		.idx = 1,
272 		.shift = 4,
273 		.mask = 0xf,
274 		.min_val = 1,
275 		.max_val = 0xe,
276 		.default_val = 0x8,
277 	},
278 	[MTK_DP_CAL_LN_TX_IMPSEL_NMOS_0] = {
279 		.idx = 1,
280 		.shift = 24,
281 		.mask = 0xf,
282 		.min_val = 1,
283 		.max_val = 0xe,
284 		.default_val = 0x8,
285 	},
286 	[MTK_DP_CAL_LN_TX_IMPSEL_NMOS_1] = {
287 		.idx = 1,
288 		.shift = 16,
289 		.mask = 0xf,
290 		.min_val = 1,
291 		.max_val = 0xe,
292 		.default_val = 0x8,
293 	},
294 	[MTK_DP_CAL_LN_TX_IMPSEL_NMOS_2] = {
295 		.idx = 1,
296 		.shift = 8,
297 		.mask = 0xf,
298 		.min_val = 1,
299 		.max_val = 0xe,
300 		.default_val = 0x8,
301 	},
302 	[MTK_DP_CAL_LN_TX_IMPSEL_NMOS_3] = {
303 		.idx = 1,
304 		.shift = 0,
305 		.mask = 0xf,
306 		.min_val = 1,
307 		.max_val = 0xe,
308 		.default_val = 0x8,
309 	},
310 };
311 
312 static struct regmap_config mtk_dp_regmap_config = {
313 	.reg_bits = 32,
314 	.val_bits = 32,
315 	.reg_stride = 4,
316 	.max_register = SEC_OFFSET + 0x90,
317 	.name = "mtk-dp-registers",
318 };
319 
320 static struct mtk_dp *mtk_dp_from_bridge(struct drm_bridge *b)
321 {
322 	return container_of(b, struct mtk_dp, bridge);
323 }
324 
325 static u32 mtk_dp_read(struct mtk_dp *mtk_dp, u32 offset)
326 {
327 	u32 read_val;
328 	int ret;
329 
330 	ret = regmap_read(mtk_dp->regs, offset, &read_val);
331 	if (ret) {
332 		dev_err(mtk_dp->dev, "Failed to read register 0x%x: %d\n",
333 			offset, ret);
334 		return 0;
335 	}
336 
337 	return read_val;
338 }
339 
340 static int mtk_dp_write(struct mtk_dp *mtk_dp, u32 offset, u32 val)
341 {
342 	int ret = regmap_write(mtk_dp->regs, offset, val);
343 
344 	if (ret)
345 		dev_err(mtk_dp->dev,
346 			"Failed to write register 0x%x with value 0x%x\n",
347 			offset, val);
348 	return ret;
349 }
350 
351 static int mtk_dp_update_bits(struct mtk_dp *mtk_dp, u32 offset,
352 			      u32 val, u32 mask)
353 {
354 	int ret = regmap_update_bits(mtk_dp->regs, offset, mask, val);
355 
356 	if (ret)
357 		dev_err(mtk_dp->dev,
358 			"Failed to update register 0x%x with value 0x%x, mask 0x%x\n",
359 			offset, val, mask);
360 	return ret;
361 }
362 
363 static void mtk_dp_bulk_16bit_write(struct mtk_dp *mtk_dp, u32 offset, u8 *buf,
364 				    size_t length)
365 {
366 	int i;
367 
368 	/* 2 bytes per register */
369 	for (i = 0; i < length; i += 2) {
370 		u32 val = buf[i] | (i + 1 < length ? buf[i + 1] << 8 : 0);
371 
372 		if (mtk_dp_write(mtk_dp, offset + i * 2, val))
373 			return;
374 	}
375 }
376 
377 static void mtk_dp_msa_bypass_enable(struct mtk_dp *mtk_dp, bool enable)
378 {
379 	u32 mask = HTOTAL_SEL_DP_ENC0_P0 | VTOTAL_SEL_DP_ENC0_P0 |
380 		   HSTART_SEL_DP_ENC0_P0 | VSTART_SEL_DP_ENC0_P0 |
381 		   HWIDTH_SEL_DP_ENC0_P0 | VHEIGHT_SEL_DP_ENC0_P0 |
382 		   HSP_SEL_DP_ENC0_P0 | HSW_SEL_DP_ENC0_P0 |
383 		   VSP_SEL_DP_ENC0_P0 | VSW_SEL_DP_ENC0_P0;
384 
385 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3030, enable ? 0 : mask, mask);
386 }
387 
388 static void mtk_dp_set_msa(struct mtk_dp *mtk_dp)
389 {
390 	struct drm_display_mode mode;
391 	struct videomode *vm = &mtk_dp->info.vm;
392 
393 	drm_display_mode_from_videomode(vm, &mode);
394 
395 	/* horizontal */
396 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3010,
397 			   mode.htotal, HTOTAL_SW_DP_ENC0_P0_MASK);
398 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3018,
399 			   vm->hsync_len + vm->hback_porch,
400 			   HSTART_SW_DP_ENC0_P0_MASK);
401 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3028,
402 			   vm->hsync_len, HSW_SW_DP_ENC0_P0_MASK);
403 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3028,
404 			   0, HSP_SW_DP_ENC0_P0_MASK);
405 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3020,
406 			   vm->hactive, HWIDTH_SW_DP_ENC0_P0_MASK);
407 
408 	/* vertical */
409 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3014,
410 			   mode.vtotal, VTOTAL_SW_DP_ENC0_P0_MASK);
411 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_301C,
412 			   vm->vsync_len + vm->vback_porch,
413 			   VSTART_SW_DP_ENC0_P0_MASK);
414 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_302C,
415 			   vm->vsync_len, VSW_SW_DP_ENC0_P0_MASK);
416 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_302C,
417 			   0, VSP_SW_DP_ENC0_P0_MASK);
418 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3024,
419 			   vm->vactive, VHEIGHT_SW_DP_ENC0_P0_MASK);
420 
421 	/* horizontal */
422 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3064,
423 			   vm->hactive, HDE_NUM_LAST_DP_ENC0_P0_MASK);
424 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3154,
425 			   mode.htotal, PGEN_HTOTAL_DP_ENC0_P0_MASK);
426 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3158,
427 			   vm->hfront_porch,
428 			   PGEN_HSYNC_RISING_DP_ENC0_P0_MASK);
429 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_315C,
430 			   vm->hsync_len,
431 			   PGEN_HSYNC_PULSE_WIDTH_DP_ENC0_P0_MASK);
432 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3160,
433 			   vm->hback_porch + vm->hsync_len,
434 			   PGEN_HFDE_START_DP_ENC0_P0_MASK);
435 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3164,
436 			   vm->hactive,
437 			   PGEN_HFDE_ACTIVE_WIDTH_DP_ENC0_P0_MASK);
438 
439 	/* vertical */
440 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3168,
441 			   mode.vtotal,
442 			   PGEN_VTOTAL_DP_ENC0_P0_MASK);
443 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_316C,
444 			   vm->vfront_porch,
445 			   PGEN_VSYNC_RISING_DP_ENC0_P0_MASK);
446 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3170,
447 			   vm->vsync_len,
448 			   PGEN_VSYNC_PULSE_WIDTH_DP_ENC0_P0_MASK);
449 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3174,
450 			   vm->vback_porch + vm->vsync_len,
451 			   PGEN_VFDE_START_DP_ENC0_P0_MASK);
452 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3178,
453 			   vm->vactive,
454 			   PGEN_VFDE_ACTIVE_WIDTH_DP_ENC0_P0_MASK);
455 }
456 
457 static int mtk_dp_set_color_format(struct mtk_dp *mtk_dp,
458 				   enum dp_pixelformat color_format)
459 {
460 	u32 val;
461 
462 	/* update MISC0 */
463 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3034,
464 			   color_format << DP_TEST_COLOR_FORMAT_SHIFT,
465 			   DP_TEST_COLOR_FORMAT_MASK);
466 
467 	switch (color_format) {
468 	case DP_PIXELFORMAT_YUV422:
469 		val = PIXEL_ENCODE_FORMAT_DP_ENC0_P0_YCBCR422;
470 		break;
471 	case DP_PIXELFORMAT_RGB:
472 		val = PIXEL_ENCODE_FORMAT_DP_ENC0_P0_RGB;
473 		break;
474 	default:
475 		drm_warn(mtk_dp->drm_dev, "Unsupported color format: %d\n",
476 			 color_format);
477 		return -EINVAL;
478 	}
479 
480 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_303C,
481 			   val, PIXEL_ENCODE_FORMAT_DP_ENC0_P0_MASK);
482 	return 0;
483 }
484 
485 static void mtk_dp_set_color_depth(struct mtk_dp *mtk_dp)
486 {
487 	/* Only support 8 bits currently */
488 	/* Update MISC0 */
489 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3034,
490 			   DP_MSA_MISC_8_BPC, DP_TEST_BIT_DEPTH_MASK);
491 
492 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_303C,
493 			   VIDEO_COLOR_DEPTH_DP_ENC0_P0_8BIT,
494 			   VIDEO_COLOR_DEPTH_DP_ENC0_P0_MASK);
495 }
496 
497 static void mtk_dp_config_mn_mode(struct mtk_dp *mtk_dp)
498 {
499 	/* 0: hw mode, 1: sw mode */
500 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3004,
501 			   0, VIDEO_M_CODE_SEL_DP_ENC0_P0_MASK);
502 }
503 
504 static void mtk_dp_set_sram_read_start(struct mtk_dp *mtk_dp, u32 val)
505 {
506 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_303C,
507 			   val, SRAM_START_READ_THRD_DP_ENC0_P0_MASK);
508 }
509 
510 static void mtk_dp_setup_encoder(struct mtk_dp *mtk_dp)
511 {
512 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_303C,
513 			   VIDEO_MN_GEN_EN_DP_ENC0_P0,
514 			   VIDEO_MN_GEN_EN_DP_ENC0_P0);
515 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3040,
516 			   SDP_DOWN_CNT_DP_ENC0_P0_VAL,
517 			   SDP_DOWN_CNT_INIT_DP_ENC0_P0_MASK);
518 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3364,
519 			   SDP_DOWN_CNT_IN_HBLANK_DP_ENC1_P0_VAL,
520 			   SDP_DOWN_CNT_INIT_IN_HBLANK_DP_ENC1_P0_MASK);
521 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3300,
522 			   VIDEO_AFIFO_RDY_SEL_DP_ENC1_P0_VAL << 8,
523 			   VIDEO_AFIFO_RDY_SEL_DP_ENC1_P0_MASK);
524 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3364,
525 			   FIFO_READ_START_POINT_DP_ENC1_P0_VAL << 12,
526 			   FIFO_READ_START_POINT_DP_ENC1_P0_MASK);
527 	mtk_dp_write(mtk_dp, MTK_DP_ENC1_P0_3368, DP_ENC1_P0_3368_VAL);
528 }
529 
530 static void mtk_dp_pg_enable(struct mtk_dp *mtk_dp, bool enable)
531 {
532 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3038,
533 			   enable ? VIDEO_SOURCE_SEL_DP_ENC0_P0_MASK : 0,
534 			   VIDEO_SOURCE_SEL_DP_ENC0_P0_MASK);
535 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_31B0,
536 			   PGEN_PATTERN_SEL_VAL << 4, PGEN_PATTERN_SEL_MASK);
537 }
538 
539 static void mtk_dp_audio_setup_channels(struct mtk_dp *mtk_dp,
540 					struct mtk_dp_audio_cfg *cfg)
541 {
542 	u32 channel_enable_bits;
543 
544 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3324,
545 			   AUDIO_SOURCE_MUX_DP_ENC1_P0_DPRX,
546 			   AUDIO_SOURCE_MUX_DP_ENC1_P0_MASK);
547 
548 	/* audio channel count change reset */
549 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_33F4,
550 			   DP_ENC_DUMMY_RW_1, DP_ENC_DUMMY_RW_1);
551 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3304,
552 			   AU_PRTY_REGEN_DP_ENC1_P0_MASK |
553 			   AU_CH_STS_REGEN_DP_ENC1_P0_MASK |
554 			   AUDIO_SAMPLE_PRSENT_REGEN_DP_ENC1_P0_MASK,
555 			   AU_PRTY_REGEN_DP_ENC1_P0_MASK |
556 			   AU_CH_STS_REGEN_DP_ENC1_P0_MASK |
557 			   AUDIO_SAMPLE_PRSENT_REGEN_DP_ENC1_P0_MASK);
558 
559 	switch (cfg->channels) {
560 	case 2:
561 		channel_enable_bits = AUDIO_2CH_SEL_DP_ENC0_P0_MASK |
562 				      AUDIO_2CH_EN_DP_ENC0_P0_MASK;
563 		break;
564 	case 8:
565 	default:
566 		channel_enable_bits = AUDIO_8CH_SEL_DP_ENC0_P0_MASK |
567 				      AUDIO_8CH_EN_DP_ENC0_P0_MASK;
568 		break;
569 	}
570 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3088,
571 			   channel_enable_bits | AU_EN_DP_ENC0_P0,
572 			   AUDIO_2CH_SEL_DP_ENC0_P0_MASK |
573 			   AUDIO_2CH_EN_DP_ENC0_P0_MASK |
574 			   AUDIO_8CH_SEL_DP_ENC0_P0_MASK |
575 			   AUDIO_8CH_EN_DP_ENC0_P0_MASK |
576 			   AU_EN_DP_ENC0_P0);
577 
578 	/* audio channel count change reset */
579 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_33F4, 0, DP_ENC_DUMMY_RW_1);
580 
581 	/* enable audio reset */
582 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_33F4,
583 			   DP_ENC_DUMMY_RW_1_AUDIO_RST_EN,
584 			   DP_ENC_DUMMY_RW_1_AUDIO_RST_EN);
585 }
586 
587 static void mtk_dp_audio_channel_status_set(struct mtk_dp *mtk_dp,
588 					    struct mtk_dp_audio_cfg *cfg)
589 {
590 	struct snd_aes_iec958 iec = { 0 };
591 
592 	switch (cfg->sample_rate) {
593 	case 32000:
594 		iec.status[3] = IEC958_AES3_CON_FS_32000;
595 		break;
596 	case 44100:
597 		iec.status[3] = IEC958_AES3_CON_FS_44100;
598 		break;
599 	case 48000:
600 		iec.status[3] = IEC958_AES3_CON_FS_48000;
601 		break;
602 	case 88200:
603 		iec.status[3] = IEC958_AES3_CON_FS_88200;
604 		break;
605 	case 96000:
606 		iec.status[3] = IEC958_AES3_CON_FS_96000;
607 		break;
608 	case 192000:
609 		iec.status[3] = IEC958_AES3_CON_FS_192000;
610 		break;
611 	default:
612 		iec.status[3] = IEC958_AES3_CON_FS_NOTID;
613 		break;
614 	}
615 
616 	switch (cfg->word_length_bits) {
617 	case 16:
618 		iec.status[4] = IEC958_AES4_CON_WORDLEN_20_16;
619 		break;
620 	case 20:
621 		iec.status[4] = IEC958_AES4_CON_WORDLEN_20_16 |
622 				IEC958_AES4_CON_MAX_WORDLEN_24;
623 		break;
624 	case 24:
625 		iec.status[4] = IEC958_AES4_CON_WORDLEN_24_20 |
626 				IEC958_AES4_CON_MAX_WORDLEN_24;
627 		break;
628 	default:
629 		iec.status[4] = IEC958_AES4_CON_WORDLEN_NOTID;
630 	}
631 
632 	/* IEC 60958 consumer channel status bits */
633 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_308C,
634 			   0, CH_STATUS_0_DP_ENC0_P0_MASK);
635 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3090,
636 			   iec.status[3] << 8, CH_STATUS_1_DP_ENC0_P0_MASK);
637 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3094,
638 			   iec.status[4], CH_STATUS_2_DP_ENC0_P0_MASK);
639 }
640 
641 static void mtk_dp_audio_sdp_asp_set_channels(struct mtk_dp *mtk_dp,
642 					      int channels)
643 {
644 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_312C,
645 			   (min(8, channels) - 1) << 8,
646 			   ASP_HB2_DP_ENC0_P0_MASK | ASP_HB3_DP_ENC0_P0_MASK);
647 }
648 
649 static void mtk_dp_audio_set_divider(struct mtk_dp *mtk_dp)
650 {
651 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_30BC,
652 			   AUDIO_M_CODE_MULT_DIV_SEL_DP_ENC0_P0_DIV_2,
653 			   AUDIO_M_CODE_MULT_DIV_SEL_DP_ENC0_P0_MASK);
654 }
655 
656 static void mtk_dp_sdp_trigger_aui(struct mtk_dp *mtk_dp)
657 {
658 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3280,
659 			   MTK_DP_SDP_AUI, SDP_PACKET_TYPE_DP_ENC1_P0_MASK);
660 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3280,
661 			   SDP_PACKET_W_DP_ENC1_P0, SDP_PACKET_W_DP_ENC1_P0);
662 }
663 
664 static void mtk_dp_sdp_set_data(struct mtk_dp *mtk_dp, u8 *data_bytes)
665 {
666 	mtk_dp_bulk_16bit_write(mtk_dp, MTK_DP_ENC1_P0_3200,
667 				data_bytes, 0x10);
668 }
669 
670 static void mtk_dp_sdp_set_header_aui(struct mtk_dp *mtk_dp,
671 				      struct dp_sdp_header *header)
672 {
673 	u32 db_addr = MTK_DP_ENC0_P0_30D8 + (MTK_DP_SDP_AUI - 1) * 8;
674 
675 	mtk_dp_bulk_16bit_write(mtk_dp, db_addr, (u8 *)header, 4);
676 }
677 
678 static void mtk_dp_disable_sdp_aui(struct mtk_dp *mtk_dp)
679 {
680 	/* Disable periodic send */
681 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_30A8 & 0xfffc, 0,
682 			   0xff << ((MTK_DP_ENC0_P0_30A8 & 3) * 8));
683 }
684 
685 static void mtk_dp_setup_sdp_aui(struct mtk_dp *mtk_dp,
686 				 struct dp_sdp *sdp)
687 {
688 	u32 shift;
689 
690 	mtk_dp_sdp_set_data(mtk_dp, sdp->db);
691 	mtk_dp_sdp_set_header_aui(mtk_dp, &sdp->sdp_header);
692 	mtk_dp_disable_sdp_aui(mtk_dp);
693 
694 	shift = (MTK_DP_ENC0_P0_30A8 & 3) * 8;
695 
696 	mtk_dp_sdp_trigger_aui(mtk_dp);
697 	/* Enable periodic sending */
698 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_30A8 & 0xfffc,
699 			   0x05 << shift, 0xff << shift);
700 }
701 
702 static void mtk_dp_aux_irq_clear(struct mtk_dp *mtk_dp)
703 {
704 	mtk_dp_write(mtk_dp, MTK_DP_AUX_P0_3640, DP_AUX_P0_3640_VAL);
705 }
706 
707 static void mtk_dp_aux_set_cmd(struct mtk_dp *mtk_dp, u8 cmd, u32 addr)
708 {
709 	mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3644,
710 			   cmd, MCU_REQUEST_COMMAND_AUX_TX_P0_MASK);
711 	mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3648,
712 			   addr, MCU_REQUEST_ADDRESS_LSB_AUX_TX_P0_MASK);
713 	mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_364C,
714 			   addr >> 16, MCU_REQUEST_ADDRESS_MSB_AUX_TX_P0_MASK);
715 }
716 
717 static void mtk_dp_aux_clear_fifo(struct mtk_dp *mtk_dp)
718 {
719 	mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3650,
720 			   MCU_ACK_TRAN_COMPLETE_AUX_TX_P0,
721 			   MCU_ACK_TRAN_COMPLETE_AUX_TX_P0 |
722 			   PHY_FIFO_RST_AUX_TX_P0_MASK |
723 			   MCU_REQ_DATA_NUM_AUX_TX_P0_MASK);
724 }
725 
726 static void mtk_dp_aux_request_ready(struct mtk_dp *mtk_dp)
727 {
728 	mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3630,
729 			   AUX_TX_REQUEST_READY_AUX_TX_P0,
730 			   AUX_TX_REQUEST_READY_AUX_TX_P0);
731 }
732 
733 static void mtk_dp_aux_fill_write_fifo(struct mtk_dp *mtk_dp, u8 *buf,
734 				       size_t length)
735 {
736 	mtk_dp_bulk_16bit_write(mtk_dp, MTK_DP_AUX_P0_3708, buf, length);
737 }
738 
739 static void mtk_dp_aux_read_rx_fifo(struct mtk_dp *mtk_dp, u8 *buf,
740 				    size_t length, int read_delay)
741 {
742 	int read_pos;
743 
744 	mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3620,
745 			   0, AUX_RD_MODE_AUX_TX_P0_MASK);
746 
747 	for (read_pos = 0; read_pos < length; read_pos++) {
748 		mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3620,
749 				   AUX_RX_FIFO_READ_PULSE_TX_P0,
750 				   AUX_RX_FIFO_READ_PULSE_TX_P0);
751 
752 		/* Hardware needs time to update the data */
753 		usleep_range(read_delay, read_delay * 2);
754 		buf[read_pos] = (u8)(mtk_dp_read(mtk_dp, MTK_DP_AUX_P0_3620) &
755 				     AUX_RX_FIFO_READ_DATA_AUX_TX_P0_MASK);
756 	}
757 }
758 
759 static void mtk_dp_aux_set_length(struct mtk_dp *mtk_dp, size_t length)
760 {
761 	if (length > 0) {
762 		mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3650,
763 				   (length - 1) << 12,
764 				   MCU_REQ_DATA_NUM_AUX_TX_P0_MASK);
765 		mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_362C,
766 				   0,
767 				   AUX_NO_LENGTH_AUX_TX_P0 |
768 				   AUX_TX_AUXTX_OV_EN_AUX_TX_P0_MASK |
769 				   AUX_RESERVED_RW_0_AUX_TX_P0_MASK);
770 	} else {
771 		mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_362C,
772 				   AUX_NO_LENGTH_AUX_TX_P0,
773 				   AUX_NO_LENGTH_AUX_TX_P0 |
774 				   AUX_TX_AUXTX_OV_EN_AUX_TX_P0_MASK |
775 				   AUX_RESERVED_RW_0_AUX_TX_P0_MASK);
776 	}
777 }
778 
779 static int mtk_dp_aux_wait_for_completion(struct mtk_dp *mtk_dp, bool is_read)
780 {
781 	int wait_reply = MTK_DP_AUX_WAIT_REPLY_COUNT;
782 
783 	while (--wait_reply) {
784 		u32 aux_irq_status;
785 
786 		if (is_read) {
787 			u32 fifo_status = mtk_dp_read(mtk_dp, MTK_DP_AUX_P0_3618);
788 
789 			if (fifo_status &
790 			    (AUX_RX_FIFO_WRITE_POINTER_AUX_TX_P0_MASK |
791 			     AUX_RX_FIFO_FULL_AUX_TX_P0_MASK)) {
792 				return 0;
793 			}
794 		}
795 
796 		aux_irq_status = mtk_dp_read(mtk_dp, MTK_DP_AUX_P0_3640);
797 		if (aux_irq_status & AUX_RX_AUX_RECV_COMPLETE_IRQ_AUX_TX_P0)
798 			return 0;
799 
800 		if (aux_irq_status & AUX_400US_TIMEOUT_IRQ_AUX_TX_P0)
801 			return -ETIMEDOUT;
802 
803 		/* Give the hardware a chance to reach completion before retrying */
804 		usleep_range(100, 500);
805 	}
806 
807 	return -ETIMEDOUT;
808 }
809 
810 static int mtk_dp_aux_do_transfer(struct mtk_dp *mtk_dp, bool is_read, u8 cmd,
811 				  u32 addr, u8 *buf, size_t length, u8 *reply_cmd)
812 {
813 	int ret;
814 
815 	if (is_read && (length > DP_AUX_MAX_PAYLOAD_BYTES ||
816 			(cmd == DP_AUX_NATIVE_READ && !length)))
817 		return -EINVAL;
818 
819 	if (!is_read)
820 		mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3704,
821 				   AUX_TX_FIFO_NEW_MODE_EN_AUX_TX_P0,
822 				   AUX_TX_FIFO_NEW_MODE_EN_AUX_TX_P0);
823 
824 	/* We need to clear fifo and irq before sending commands to the sink device. */
825 	mtk_dp_aux_clear_fifo(mtk_dp);
826 	mtk_dp_aux_irq_clear(mtk_dp);
827 
828 	mtk_dp_aux_set_cmd(mtk_dp, cmd, addr);
829 	mtk_dp_aux_set_length(mtk_dp, length);
830 
831 	if (!is_read) {
832 		if (length)
833 			mtk_dp_aux_fill_write_fifo(mtk_dp, buf, length);
834 
835 		mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3704,
836 				   AUX_TX_FIFO_WDATA_NEW_MODE_T_AUX_TX_P0_MASK,
837 				   AUX_TX_FIFO_WDATA_NEW_MODE_T_AUX_TX_P0_MASK);
838 	}
839 
840 	mtk_dp_aux_request_ready(mtk_dp);
841 
842 	/* Wait for feedback from sink device. */
843 	ret = mtk_dp_aux_wait_for_completion(mtk_dp, is_read);
844 
845 	*reply_cmd = mtk_dp_read(mtk_dp, MTK_DP_AUX_P0_3624) &
846 		     AUX_RX_REPLY_COMMAND_AUX_TX_P0_MASK;
847 
848 	if (ret) {
849 		u32 phy_status = mtk_dp_read(mtk_dp, MTK_DP_AUX_P0_3628) &
850 				 AUX_RX_PHY_STATE_AUX_TX_P0_MASK;
851 		if (phy_status != AUX_RX_PHY_STATE_AUX_TX_P0_RX_IDLE) {
852 			dev_err(mtk_dp->dev,
853 				"AUX Rx Aux hang, need SW reset\n");
854 			return -EIO;
855 		}
856 
857 		return -ETIMEDOUT;
858 	}
859 
860 	if (!length) {
861 		mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_362C,
862 				   0,
863 				   AUX_NO_LENGTH_AUX_TX_P0 |
864 				   AUX_TX_AUXTX_OV_EN_AUX_TX_P0_MASK |
865 				   AUX_RESERVED_RW_0_AUX_TX_P0_MASK);
866 	} else if (is_read) {
867 		int read_delay;
868 
869 		if (cmd == (DP_AUX_I2C_READ | DP_AUX_I2C_MOT) ||
870 		    cmd == DP_AUX_I2C_READ)
871 			read_delay = 500;
872 		else
873 			read_delay = 100;
874 
875 		mtk_dp_aux_read_rx_fifo(mtk_dp, buf, length, read_delay);
876 	}
877 
878 	return 0;
879 }
880 
881 static void mtk_dp_set_swing_pre_emphasis(struct mtk_dp *mtk_dp, int lane_num,
882 					  int swing_val, int preemphasis)
883 {
884 	u32 lane_shift = lane_num * DP_TX1_VOLT_SWING_SHIFT;
885 
886 	dev_dbg(mtk_dp->dev,
887 		"link training: swing_val = 0x%x, pre-emphasis = 0x%x\n",
888 		swing_val, preemphasis);
889 
890 	mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_SWING_EMP,
891 			   swing_val << (DP_TX0_VOLT_SWING_SHIFT + lane_shift),
892 			   DP_TX0_VOLT_SWING_MASK << lane_shift);
893 	mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_SWING_EMP,
894 			   preemphasis << (DP_TX0_PRE_EMPH_SHIFT + lane_shift),
895 			   DP_TX0_PRE_EMPH_MASK << lane_shift);
896 }
897 
898 static void mtk_dp_reset_swing_pre_emphasis(struct mtk_dp *mtk_dp)
899 {
900 	mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_SWING_EMP,
901 			   0,
902 			   DP_TX0_VOLT_SWING_MASK |
903 			   DP_TX1_VOLT_SWING_MASK |
904 			   DP_TX2_VOLT_SWING_MASK |
905 			   DP_TX3_VOLT_SWING_MASK |
906 			   DP_TX0_PRE_EMPH_MASK |
907 			   DP_TX1_PRE_EMPH_MASK |
908 			   DP_TX2_PRE_EMPH_MASK |
909 			   DP_TX3_PRE_EMPH_MASK);
910 }
911 
912 static u32 mtk_dp_swirq_get_clear(struct mtk_dp *mtk_dp)
913 {
914 	u32 irq_status = mtk_dp_read(mtk_dp, MTK_DP_TRANS_P0_35D0) &
915 			 SW_IRQ_FINAL_STATUS_DP_TRANS_P0_MASK;
916 
917 	if (irq_status) {
918 		mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_35C8,
919 				   irq_status, SW_IRQ_CLR_DP_TRANS_P0_MASK);
920 		mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_35C8,
921 				   0, SW_IRQ_CLR_DP_TRANS_P0_MASK);
922 	}
923 
924 	return irq_status;
925 }
926 
927 static u32 mtk_dp_hwirq_get_clear(struct mtk_dp *mtk_dp)
928 {
929 	u32 irq_status = (mtk_dp_read(mtk_dp, MTK_DP_TRANS_P0_3418) &
930 			  IRQ_STATUS_DP_TRANS_P0_MASK) >> 12;
931 
932 	if (irq_status) {
933 		mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3418,
934 				   irq_status, IRQ_CLR_DP_TRANS_P0_MASK);
935 		mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3418,
936 				   0, IRQ_CLR_DP_TRANS_P0_MASK);
937 	}
938 
939 	return irq_status;
940 }
941 
942 static void mtk_dp_hwirq_enable(struct mtk_dp *mtk_dp, bool enable)
943 {
944 	mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3418,
945 			   enable ? 0 :
946 			   IRQ_MASK_DP_TRANS_P0_DISC_IRQ |
947 			   IRQ_MASK_DP_TRANS_P0_CONN_IRQ |
948 			   IRQ_MASK_DP_TRANS_P0_INT_IRQ,
949 			   IRQ_MASK_DP_TRANS_P0_MASK);
950 }
951 
952 static void mtk_dp_initialize_settings(struct mtk_dp *mtk_dp)
953 {
954 	mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_342C,
955 			   XTAL_FREQ_DP_TRANS_P0_DEFAULT,
956 			   XTAL_FREQ_DP_TRANS_P0_MASK);
957 	mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3540,
958 			   FEC_CLOCK_EN_MODE_DP_TRANS_P0,
959 			   FEC_CLOCK_EN_MODE_DP_TRANS_P0);
960 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_31EC,
961 			   AUDIO_CH_SRC_SEL_DP_ENC0_P0,
962 			   AUDIO_CH_SRC_SEL_DP_ENC0_P0);
963 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_304C,
964 			   0, SDP_VSYNC_RISING_MASK_DP_ENC0_P0_MASK);
965 	mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_IRQ_MASK,
966 			   IRQ_MASK_AUX_TOP_IRQ, IRQ_MASK_AUX_TOP_IRQ);
967 }
968 
969 static void mtk_dp_initialize_hpd_detect_settings(struct mtk_dp *mtk_dp)
970 {
971 	u32 val;
972 	/* Debounce threshold */
973 	mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3410,
974 			   8, HPD_DEB_THD_DP_TRANS_P0_MASK);
975 
976 	val = (HPD_INT_THD_DP_TRANS_P0_LOWER_500US |
977 	       HPD_INT_THD_DP_TRANS_P0_UPPER_1100US) << 4;
978 	mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3410,
979 			   val, HPD_INT_THD_DP_TRANS_P0_MASK);
980 
981 	/*
982 	 * Connect threshold 1.5ms + 5 x 0.1ms = 2ms
983 	 * Disconnect threshold 1.5ms + 5 x 0.1ms = 2ms
984 	 */
985 	val = (5 << 8) | (5 << 12);
986 	mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3410,
987 			   val,
988 			   HPD_DISC_THD_DP_TRANS_P0_MASK |
989 			   HPD_CONN_THD_DP_TRANS_P0_MASK);
990 	mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3430,
991 			   HPD_INT_THD_ECO_DP_TRANS_P0_HIGH_BOUND_EXT,
992 			   HPD_INT_THD_ECO_DP_TRANS_P0_MASK);
993 }
994 
995 static void mtk_dp_initialize_aux_settings(struct mtk_dp *mtk_dp)
996 {
997 	/* modify timeout threshold = 0x1595 */
998 	mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_360C,
999 			   AUX_TIMEOUT_THR_AUX_TX_P0_VAL,
1000 			   AUX_TIMEOUT_THR_AUX_TX_P0_MASK);
1001 	mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3658,
1002 			   0, AUX_TX_OV_EN_AUX_TX_P0_MASK);
1003 	/* 25 for 26M */
1004 	mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3634,
1005 			   AUX_TX_OVER_SAMPLE_RATE_FOR_26M << 8,
1006 			   AUX_TX_OVER_SAMPLE_RATE_AUX_TX_P0_MASK);
1007 	/* 13 for 26M */
1008 	mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3614,
1009 			   AUX_RX_UI_CNT_THR_AUX_FOR_26M,
1010 			   AUX_RX_UI_CNT_THR_AUX_TX_P0_MASK);
1011 	mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_37C8,
1012 			   MTK_ATOP_EN_AUX_TX_P0,
1013 			   MTK_ATOP_EN_AUX_TX_P0);
1014 
1015 	/* Set complete reply mode for AUX */
1016 	mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3690,
1017 			   RX_REPLY_COMPLETE_MODE_AUX_TX_P0,
1018 			   RX_REPLY_COMPLETE_MODE_AUX_TX_P0);
1019 }
1020 
1021 static void mtk_dp_initialize_digital_settings(struct mtk_dp *mtk_dp)
1022 {
1023 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_304C,
1024 			   0, VBID_VIDEO_MUTE_DP_ENC0_P0_MASK);
1025 
1026 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3368,
1027 			   BS2BS_MODE_DP_ENC1_P0_VAL << 12,
1028 			   BS2BS_MODE_DP_ENC1_P0_MASK);
1029 
1030 	/* dp tx encoder reset all sw */
1031 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3004,
1032 			   DP_TX_ENCODER_4P_RESET_SW_DP_ENC0_P0,
1033 			   DP_TX_ENCODER_4P_RESET_SW_DP_ENC0_P0);
1034 
1035 	/* Wait for sw reset to complete */
1036 	usleep_range(1000, 5000);
1037 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3004,
1038 			   0, DP_TX_ENCODER_4P_RESET_SW_DP_ENC0_P0);
1039 }
1040 
1041 static void mtk_dp_digital_sw_reset(struct mtk_dp *mtk_dp)
1042 {
1043 	mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_340C,
1044 			   DP_TX_TRANSMITTER_4P_RESET_SW_DP_TRANS_P0,
1045 			   DP_TX_TRANSMITTER_4P_RESET_SW_DP_TRANS_P0);
1046 
1047 	/* Wait for sw reset to complete */
1048 	usleep_range(1000, 5000);
1049 	mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_340C,
1050 			   0, DP_TX_TRANSMITTER_4P_RESET_SW_DP_TRANS_P0);
1051 }
1052 
1053 static void mtk_dp_set_lanes(struct mtk_dp *mtk_dp, int lanes)
1054 {
1055 	mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_35F0,
1056 			   lanes == 0 ? 0 : DP_TRANS_DUMMY_RW_0,
1057 			   DP_TRANS_DUMMY_RW_0_MASK);
1058 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3000,
1059 			   lanes, LANE_NUM_DP_ENC0_P0_MASK);
1060 	mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_34A4,
1061 			   lanes << 2, LANE_NUM_DP_TRANS_P0_MASK);
1062 }
1063 
1064 static void mtk_dp_get_calibration_data(struct mtk_dp *mtk_dp)
1065 {
1066 	const struct mtk_dp_efuse_fmt *fmt;
1067 	struct device *dev = mtk_dp->dev;
1068 	struct nvmem_cell *cell;
1069 	u32 *cal_data = mtk_dp->cal_data;
1070 	u32 *buf;
1071 	int i;
1072 	size_t len;
1073 
1074 	cell = nvmem_cell_get(dev, "dp_calibration_data");
1075 	if (IS_ERR(cell)) {
1076 		dev_warn(dev, "Failed to get nvmem cell dp_calibration_data\n");
1077 		goto use_default_val;
1078 	}
1079 
1080 	buf = (u32 *)nvmem_cell_read(cell, &len);
1081 	nvmem_cell_put(cell);
1082 
1083 	if (IS_ERR(buf) || ((len / sizeof(u32)) != 4)) {
1084 		dev_warn(dev, "Failed to read nvmem_cell_read\n");
1085 
1086 		if (!IS_ERR(buf))
1087 			kfree(buf);
1088 
1089 		goto use_default_val;
1090 	}
1091 
1092 	for (i = 0; i < MTK_DP_CAL_MAX; i++) {
1093 		fmt = &mtk_dp->data->efuse_fmt[i];
1094 		cal_data[i] = (buf[fmt->idx] >> fmt->shift) & fmt->mask;
1095 
1096 		if (cal_data[i] < fmt->min_val || cal_data[i] > fmt->max_val) {
1097 			dev_warn(mtk_dp->dev, "Invalid efuse data, idx = %d\n", i);
1098 			kfree(buf);
1099 			goto use_default_val;
1100 		}
1101 	}
1102 	kfree(buf);
1103 
1104 	return;
1105 
1106 use_default_val:
1107 	dev_warn(mtk_dp->dev, "Use default calibration data\n");
1108 	for (i = 0; i < MTK_DP_CAL_MAX; i++)
1109 		cal_data[i] = mtk_dp->data->efuse_fmt[i].default_val;
1110 }
1111 
1112 static void mtk_dp_set_calibration_data(struct mtk_dp *mtk_dp)
1113 {
1114 	u32 *cal_data = mtk_dp->cal_data;
1115 
1116 	mtk_dp_update_bits(mtk_dp, DP_PHY_GLB_DPAUX_TX,
1117 			   cal_data[MTK_DP_CAL_CLKTX_IMPSE] << 20,
1118 			   RG_CKM_PT0_CKTX_IMPSEL);
1119 	mtk_dp_update_bits(mtk_dp, DP_PHY_GLB_BIAS_GEN_00,
1120 			   cal_data[MTK_DP_CAL_GLB_BIAS_TRIM] << 16,
1121 			   RG_XTP_GLB_BIAS_INTR_CTRL);
1122 	mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_0,
1123 			   cal_data[MTK_DP_CAL_LN_TX_IMPSEL_PMOS_0] << 12,
1124 			   RG_XTP_LN0_TX_IMPSEL_PMOS);
1125 	mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_0,
1126 			   cal_data[MTK_DP_CAL_LN_TX_IMPSEL_NMOS_0] << 16,
1127 			   RG_XTP_LN0_TX_IMPSEL_NMOS);
1128 	mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_1,
1129 			   cal_data[MTK_DP_CAL_LN_TX_IMPSEL_PMOS_1] << 12,
1130 			   RG_XTP_LN1_TX_IMPSEL_PMOS);
1131 	mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_1,
1132 			   cal_data[MTK_DP_CAL_LN_TX_IMPSEL_NMOS_1] << 16,
1133 			   RG_XTP_LN1_TX_IMPSEL_NMOS);
1134 	mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_2,
1135 			   cal_data[MTK_DP_CAL_LN_TX_IMPSEL_PMOS_2] << 12,
1136 			   RG_XTP_LN2_TX_IMPSEL_PMOS);
1137 	mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_2,
1138 			   cal_data[MTK_DP_CAL_LN_TX_IMPSEL_NMOS_2] << 16,
1139 			   RG_XTP_LN2_TX_IMPSEL_NMOS);
1140 	mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_3,
1141 			   cal_data[MTK_DP_CAL_LN_TX_IMPSEL_PMOS_3] << 12,
1142 			   RG_XTP_LN3_TX_IMPSEL_PMOS);
1143 	mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_3,
1144 			   cal_data[MTK_DP_CAL_LN_TX_IMPSEL_NMOS_3] << 16,
1145 			   RG_XTP_LN3_TX_IMPSEL_NMOS);
1146 }
1147 
1148 static int mtk_dp_phy_configure(struct mtk_dp *mtk_dp,
1149 				u32 link_rate, int lane_count)
1150 {
1151 	int ret;
1152 	union phy_configure_opts phy_opts = {
1153 		.dp = {
1154 			.link_rate = drm_dp_bw_code_to_link_rate(link_rate) / 100,
1155 			.set_rate = 1,
1156 			.lanes = lane_count,
1157 			.set_lanes = 1,
1158 			.ssc = mtk_dp->train_info.sink_ssc,
1159 		}
1160 	};
1161 
1162 	mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE, DP_PWR_STATE_BANDGAP,
1163 			   DP_PWR_STATE_MASK);
1164 
1165 	ret = phy_configure(mtk_dp->phy, &phy_opts);
1166 	if (ret)
1167 		return ret;
1168 
1169 	mtk_dp_set_calibration_data(mtk_dp);
1170 	mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE,
1171 			   DP_PWR_STATE_BANDGAP_TPLL_LANE, DP_PWR_STATE_MASK);
1172 
1173 	return 0;
1174 }
1175 
1176 static void mtk_dp_set_idle_pattern(struct mtk_dp *mtk_dp, bool enable)
1177 {
1178 	u32 val = POST_MISC_DATA_LANE0_OV_DP_TRANS_P0_MASK |
1179 		  POST_MISC_DATA_LANE1_OV_DP_TRANS_P0_MASK |
1180 		  POST_MISC_DATA_LANE2_OV_DP_TRANS_P0_MASK |
1181 		  POST_MISC_DATA_LANE3_OV_DP_TRANS_P0_MASK;
1182 
1183 	mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3580,
1184 			   enable ? val : 0, val);
1185 }
1186 
1187 static void mtk_dp_train_set_pattern(struct mtk_dp *mtk_dp, int pattern)
1188 {
1189 	/* TPS1 */
1190 	if (pattern == 1)
1191 		mtk_dp_set_idle_pattern(mtk_dp, false);
1192 
1193 	mtk_dp_update_bits(mtk_dp,
1194 			   MTK_DP_TRANS_P0_3400,
1195 			   pattern ? BIT(pattern - 1) << 12 : 0,
1196 			   PATTERN1_EN_DP_TRANS_P0_MASK |
1197 			   PATTERN2_EN_DP_TRANS_P0_MASK |
1198 			   PATTERN3_EN_DP_TRANS_P0_MASK |
1199 			   PATTERN4_EN_DP_TRANS_P0_MASK);
1200 }
1201 
1202 static void mtk_dp_set_enhanced_frame_mode(struct mtk_dp *mtk_dp)
1203 {
1204 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3000,
1205 			   ENHANCED_FRAME_EN_DP_ENC0_P0,
1206 			   ENHANCED_FRAME_EN_DP_ENC0_P0);
1207 }
1208 
1209 static void mtk_dp_training_set_scramble(struct mtk_dp *mtk_dp, bool enable)
1210 {
1211 	mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3404,
1212 			   enable ? DP_SCR_EN_DP_TRANS_P0_MASK : 0,
1213 			   DP_SCR_EN_DP_TRANS_P0_MASK);
1214 }
1215 
1216 static void mtk_dp_video_mute(struct mtk_dp *mtk_dp, bool enable)
1217 {
1218 	struct arm_smccc_res res;
1219 	u32 val = VIDEO_MUTE_SEL_DP_ENC0_P0 |
1220 		  (enable ? VIDEO_MUTE_SW_DP_ENC0_P0 : 0);
1221 
1222 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3000,
1223 			   val,
1224 			   VIDEO_MUTE_SEL_DP_ENC0_P0 |
1225 			   VIDEO_MUTE_SW_DP_ENC0_P0);
1226 
1227 	arm_smccc_smc(MTK_DP_SIP_CONTROL_AARCH32,
1228 		      mtk_dp->data->smc_cmd, enable,
1229 		      0, 0, 0, 0, 0, &res);
1230 
1231 	dev_dbg(mtk_dp->dev, "smc cmd: 0x%x, p1: %s, ret: 0x%lx-0x%lx\n",
1232 		mtk_dp->data->smc_cmd, enable ? "enable" : "disable", res.a0, res.a1);
1233 }
1234 
1235 static void mtk_dp_audio_mute(struct mtk_dp *mtk_dp, bool mute)
1236 {
1237 	u32 val[3];
1238 
1239 	if (mute) {
1240 		val[0] = VBID_AUDIO_MUTE_FLAG_SW_DP_ENC0_P0 |
1241 			 VBID_AUDIO_MUTE_FLAG_SEL_DP_ENC0_P0;
1242 		val[1] = 0;
1243 		val[2] = 0;
1244 	} else {
1245 		val[0] = 0;
1246 		val[1] = AU_EN_DP_ENC0_P0;
1247 		/* Send one every two frames */
1248 		val[2] = 0x0F;
1249 	}
1250 
1251 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3030,
1252 			   val[0],
1253 			   VBID_AUDIO_MUTE_FLAG_SW_DP_ENC0_P0 |
1254 			   VBID_AUDIO_MUTE_FLAG_SEL_DP_ENC0_P0);
1255 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3088,
1256 			   val[1], AU_EN_DP_ENC0_P0);
1257 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_30A4,
1258 			   val[2], AU_TS_CFG_DP_ENC0_P0_MASK);
1259 }
1260 
1261 static void mtk_dp_aux_panel_poweron(struct mtk_dp *mtk_dp, bool pwron)
1262 {
1263 	if (pwron) {
1264 		/* power on aux */
1265 		mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE,
1266 				   DP_PWR_STATE_BANDGAP_TPLL_LANE,
1267 				   DP_PWR_STATE_MASK);
1268 
1269 		/* power on panel */
1270 		drm_dp_dpcd_writeb(&mtk_dp->aux, DP_SET_POWER, DP_SET_POWER_D0);
1271 		usleep_range(2000, 5000);
1272 	} else {
1273 		/* power off panel */
1274 		drm_dp_dpcd_writeb(&mtk_dp->aux, DP_SET_POWER, DP_SET_POWER_D3);
1275 		usleep_range(2000, 3000);
1276 
1277 		/* power off aux */
1278 		mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE,
1279 				   DP_PWR_STATE_BANDGAP_TPLL,
1280 				   DP_PWR_STATE_MASK);
1281 	}
1282 }
1283 
1284 static void mtk_dp_power_enable(struct mtk_dp *mtk_dp)
1285 {
1286 	mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_RESET_AND_PROBE,
1287 			   0, SW_RST_B_PHYD);
1288 
1289 	/* Wait for power enable */
1290 	usleep_range(10, 200);
1291 
1292 	mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_RESET_AND_PROBE,
1293 			   SW_RST_B_PHYD, SW_RST_B_PHYD);
1294 	mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE,
1295 			   DP_PWR_STATE_BANDGAP_TPLL, DP_PWR_STATE_MASK);
1296 	mtk_dp_write(mtk_dp, MTK_DP_1040,
1297 		     RG_DPAUX_RX_VALID_DEGLITCH_EN | RG_XTP_GLB_CKDET_EN |
1298 		     RG_DPAUX_RX_EN);
1299 	mtk_dp_update_bits(mtk_dp, MTK_DP_0034, 0, DA_CKM_CKTX0_EN_FORCE_EN);
1300 }
1301 
1302 static void mtk_dp_power_disable(struct mtk_dp *mtk_dp)
1303 {
1304 	mtk_dp_write(mtk_dp, MTK_DP_TOP_PWR_STATE, 0);
1305 
1306 	mtk_dp_update_bits(mtk_dp, MTK_DP_0034,
1307 			   DA_CKM_CKTX0_EN_FORCE_EN, DA_CKM_CKTX0_EN_FORCE_EN);
1308 
1309 	/* Disable RX */
1310 	mtk_dp_write(mtk_dp, MTK_DP_1040, 0);
1311 	mtk_dp_write(mtk_dp, MTK_DP_TOP_MEM_PD,
1312 		     0x550 | FUSE_SEL | MEM_ISO_EN);
1313 }
1314 
1315 static void mtk_dp_initialize_priv_data(struct mtk_dp *mtk_dp)
1316 {
1317 	bool plugged_in = (mtk_dp->bridge.type == DRM_MODE_CONNECTOR_eDP);
1318 
1319 	mtk_dp->train_info.link_rate = DP_LINK_BW_5_4;
1320 	mtk_dp->train_info.lane_count = mtk_dp->max_lanes;
1321 	mtk_dp->train_info.cable_plugged_in = plugged_in;
1322 
1323 	mtk_dp->info.format = DP_PIXELFORMAT_RGB;
1324 	memset(&mtk_dp->info.vm, 0, sizeof(struct videomode));
1325 	mtk_dp->audio_enable = false;
1326 }
1327 
1328 static void mtk_dp_sdp_set_down_cnt_init(struct mtk_dp *mtk_dp,
1329 					 u32 sram_read_start)
1330 {
1331 	u32 sdp_down_cnt_init = 0;
1332 	struct drm_display_mode mode;
1333 	struct videomode *vm = &mtk_dp->info.vm;
1334 
1335 	drm_display_mode_from_videomode(vm, &mode);
1336 
1337 	if (mode.clock > 0)
1338 		sdp_down_cnt_init = sram_read_start *
1339 				    mtk_dp->train_info.link_rate * 2700 * 8 /
1340 				    (mode.clock * 4);
1341 
1342 	switch (mtk_dp->train_info.lane_count) {
1343 	case 1:
1344 		sdp_down_cnt_init = max_t(u32, sdp_down_cnt_init, 0x1A);
1345 		break;
1346 	case 2:
1347 		/* case for LowResolution && High Audio Sample Rate */
1348 		sdp_down_cnt_init = max_t(u32, sdp_down_cnt_init, 0x10);
1349 		sdp_down_cnt_init += mode.vtotal <= 525 ? 4 : 0;
1350 		break;
1351 	case 4:
1352 	default:
1353 		sdp_down_cnt_init = max_t(u32, sdp_down_cnt_init, 6);
1354 		break;
1355 	}
1356 
1357 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3040,
1358 			   sdp_down_cnt_init,
1359 			   SDP_DOWN_CNT_INIT_DP_ENC0_P0_MASK);
1360 }
1361 
1362 static void mtk_dp_sdp_set_down_cnt_init_in_hblank(struct mtk_dp *mtk_dp)
1363 {
1364 	int pix_clk_mhz;
1365 	u32 dc_offset;
1366 	u32 spd_down_cnt_init = 0;
1367 	struct drm_display_mode mode;
1368 	struct videomode *vm = &mtk_dp->info.vm;
1369 
1370 	drm_display_mode_from_videomode(vm, &mode);
1371 
1372 	pix_clk_mhz = mtk_dp->info.format == DP_PIXELFORMAT_YUV420 ?
1373 		      mode.clock / 2000 : mode.clock / 1000;
1374 
1375 	switch (mtk_dp->train_info.lane_count) {
1376 	case 1:
1377 		spd_down_cnt_init = 0x20;
1378 		break;
1379 	case 2:
1380 		dc_offset = (mode.vtotal <= 525) ? 0x14 : 0x00;
1381 		spd_down_cnt_init = 0x18 + dc_offset;
1382 		break;
1383 	case 4:
1384 	default:
1385 		dc_offset = (mode.vtotal <= 525) ? 0x08 : 0x00;
1386 		if (pix_clk_mhz > mtk_dp->train_info.link_rate * 27)
1387 			spd_down_cnt_init = 0x8;
1388 		else
1389 			spd_down_cnt_init = 0x10 + dc_offset;
1390 		break;
1391 	}
1392 
1393 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3364, spd_down_cnt_init,
1394 			   SDP_DOWN_CNT_INIT_IN_HBLANK_DP_ENC1_P0_MASK);
1395 }
1396 
1397 static void mtk_dp_setup_tu(struct mtk_dp *mtk_dp)
1398 {
1399 	u32 sram_read_start = min_t(u32, MTK_DP_TBC_BUF_READ_START_ADDR,
1400 				    mtk_dp->info.vm.hactive /
1401 				    mtk_dp->train_info.lane_count /
1402 				    MTK_DP_4P1T / MTK_DP_HDE /
1403 				    MTK_DP_PIX_PER_ADDR);
1404 	mtk_dp_set_sram_read_start(mtk_dp, sram_read_start);
1405 	mtk_dp_setup_encoder(mtk_dp);
1406 	mtk_dp_sdp_set_down_cnt_init_in_hblank(mtk_dp);
1407 	mtk_dp_sdp_set_down_cnt_init(mtk_dp, sram_read_start);
1408 }
1409 
1410 static void mtk_dp_set_tx_out(struct mtk_dp *mtk_dp)
1411 {
1412 	mtk_dp_setup_tu(mtk_dp);
1413 }
1414 
1415 static void mtk_dp_train_update_swing_pre(struct mtk_dp *mtk_dp, int lanes,
1416 					  u8 dpcd_adjust_req[2])
1417 {
1418 	int lane;
1419 
1420 	for (lane = 0; lane < lanes; ++lane) {
1421 		u8 val;
1422 		u8 swing;
1423 		u8 preemphasis;
1424 		int index = lane / 2;
1425 		int shift = lane % 2 ? DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT : 0;
1426 
1427 		swing = (dpcd_adjust_req[index] >> shift) &
1428 			DP_ADJUST_VOLTAGE_SWING_LANE0_MASK;
1429 		preemphasis = ((dpcd_adjust_req[index] >> shift) &
1430 			       DP_ADJUST_PRE_EMPHASIS_LANE0_MASK) >>
1431 			      DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT;
1432 		val = swing << DP_TRAIN_VOLTAGE_SWING_SHIFT |
1433 		      preemphasis << DP_TRAIN_PRE_EMPHASIS_SHIFT;
1434 
1435 		if (swing == DP_TRAIN_VOLTAGE_SWING_LEVEL_3)
1436 			val |= DP_TRAIN_MAX_SWING_REACHED;
1437 		if (preemphasis == 3)
1438 			val |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
1439 
1440 		mtk_dp_set_swing_pre_emphasis(mtk_dp, lane, swing, preemphasis);
1441 		drm_dp_dpcd_writeb(&mtk_dp->aux, DP_TRAINING_LANE0_SET + lane,
1442 				   val);
1443 	}
1444 }
1445 
1446 static void mtk_dp_pattern(struct mtk_dp *mtk_dp, bool is_tps1)
1447 {
1448 	int pattern;
1449 	unsigned int aux_offset;
1450 
1451 	if (is_tps1) {
1452 		pattern = 1;
1453 		aux_offset = DP_LINK_SCRAMBLING_DISABLE | DP_TRAINING_PATTERN_1;
1454 	} else {
1455 		aux_offset = mtk_dp->train_info.channel_eq_pattern;
1456 
1457 		switch (mtk_dp->train_info.channel_eq_pattern) {
1458 		case DP_TRAINING_PATTERN_4:
1459 			pattern = 4;
1460 			break;
1461 		case DP_TRAINING_PATTERN_3:
1462 			pattern = 3;
1463 			aux_offset |= DP_LINK_SCRAMBLING_DISABLE;
1464 			break;
1465 		case DP_TRAINING_PATTERN_2:
1466 		default:
1467 			pattern = 2;
1468 			aux_offset |= DP_LINK_SCRAMBLING_DISABLE;
1469 			break;
1470 		}
1471 	}
1472 
1473 	mtk_dp_train_set_pattern(mtk_dp, pattern);
1474 	drm_dp_dpcd_writeb(&mtk_dp->aux, DP_TRAINING_PATTERN_SET, aux_offset);
1475 }
1476 
1477 static int mtk_dp_train_setting(struct mtk_dp *mtk_dp, u8 target_link_rate,
1478 				u8 target_lane_count)
1479 {
1480 	int ret;
1481 
1482 	drm_dp_dpcd_writeb(&mtk_dp->aux, DP_LINK_BW_SET, target_link_rate);
1483 	drm_dp_dpcd_writeb(&mtk_dp->aux, DP_LANE_COUNT_SET,
1484 			   target_lane_count | DP_LANE_COUNT_ENHANCED_FRAME_EN);
1485 
1486 	if (mtk_dp->train_info.sink_ssc)
1487 		drm_dp_dpcd_writeb(&mtk_dp->aux, DP_DOWNSPREAD_CTRL,
1488 				   DP_SPREAD_AMP_0_5);
1489 
1490 	mtk_dp_set_lanes(mtk_dp, target_lane_count / 2);
1491 	ret = mtk_dp_phy_configure(mtk_dp, target_link_rate, target_lane_count);
1492 	if (ret)
1493 		return ret;
1494 
1495 	dev_dbg(mtk_dp->dev,
1496 		"Link train target_link_rate = 0x%x, target_lane_count = 0x%x\n",
1497 		target_link_rate, target_lane_count);
1498 
1499 	return 0;
1500 }
1501 
1502 static int mtk_dp_train_cr(struct mtk_dp *mtk_dp, u8 target_lane_count)
1503 {
1504 	u8 lane_adjust[2] = {};
1505 	u8 link_status[DP_LINK_STATUS_SIZE] = {};
1506 	u8 prev_lane_adjust = 0xff;
1507 	int train_retries = 0;
1508 	int voltage_retries = 0;
1509 
1510 	mtk_dp_pattern(mtk_dp, true);
1511 
1512 	/* In DP spec 1.4, the retry count of CR is defined as 10. */
1513 	do {
1514 		train_retries++;
1515 		if (!mtk_dp->train_info.cable_plugged_in) {
1516 			mtk_dp_train_set_pattern(mtk_dp, 0);
1517 			return -ENODEV;
1518 		}
1519 
1520 		drm_dp_dpcd_read(&mtk_dp->aux, DP_ADJUST_REQUEST_LANE0_1,
1521 				 lane_adjust, sizeof(lane_adjust));
1522 		mtk_dp_train_update_swing_pre(mtk_dp, target_lane_count,
1523 					      lane_adjust);
1524 
1525 		drm_dp_link_train_clock_recovery_delay(&mtk_dp->aux,
1526 						       mtk_dp->rx_cap);
1527 
1528 		/* check link status from sink device */
1529 		drm_dp_dpcd_read_link_status(&mtk_dp->aux, link_status);
1530 		if (drm_dp_clock_recovery_ok(link_status,
1531 					     target_lane_count)) {
1532 			dev_dbg(mtk_dp->dev, "Link train CR pass\n");
1533 			return 0;
1534 		}
1535 
1536 		/*
1537 		 * In DP spec 1.4, if current voltage level is the same
1538 		 * with previous voltage level, we need to retry 5 times.
1539 		 */
1540 		if (prev_lane_adjust == link_status[4]) {
1541 			voltage_retries++;
1542 			/*
1543 			 * Condition of CR fail:
1544 			 * 1. Failed to pass CR using the same voltage
1545 			 *    level over five times.
1546 			 * 2. Failed to pass CR when the current voltage
1547 			 *    level is the same with previous voltage
1548 			 *    level and reach max voltage level (3).
1549 			 */
1550 			if (voltage_retries > MTK_DP_TRAIN_VOLTAGE_LEVEL_RETRY ||
1551 			    (prev_lane_adjust & DP_ADJUST_VOLTAGE_SWING_LANE0_MASK) == 3) {
1552 				dev_dbg(mtk_dp->dev, "Link train CR fail\n");
1553 				break;
1554 			}
1555 		} else {
1556 			/*
1557 			 * If the voltage level is changed, we need to
1558 			 * re-calculate this retry count.
1559 			 */
1560 			voltage_retries = 0;
1561 		}
1562 		prev_lane_adjust = link_status[4];
1563 	} while (train_retries < MTK_DP_TRAIN_DOWNSCALE_RETRY);
1564 
1565 	/* Failed to train CR, and disable pattern. */
1566 	drm_dp_dpcd_writeb(&mtk_dp->aux, DP_TRAINING_PATTERN_SET,
1567 			   DP_TRAINING_PATTERN_DISABLE);
1568 	mtk_dp_train_set_pattern(mtk_dp, 0);
1569 
1570 	return -ETIMEDOUT;
1571 }
1572 
1573 static int mtk_dp_train_eq(struct mtk_dp *mtk_dp, u8 target_lane_count)
1574 {
1575 	u8 lane_adjust[2] = {};
1576 	u8 link_status[DP_LINK_STATUS_SIZE] = {};
1577 	int train_retries = 0;
1578 
1579 	mtk_dp_pattern(mtk_dp, false);
1580 
1581 	do {
1582 		train_retries++;
1583 		if (!mtk_dp->train_info.cable_plugged_in) {
1584 			mtk_dp_train_set_pattern(mtk_dp, 0);
1585 			return -ENODEV;
1586 		}
1587 
1588 		drm_dp_dpcd_read(&mtk_dp->aux, DP_ADJUST_REQUEST_LANE0_1,
1589 				 lane_adjust, sizeof(lane_adjust));
1590 		mtk_dp_train_update_swing_pre(mtk_dp, target_lane_count,
1591 					      lane_adjust);
1592 
1593 		drm_dp_link_train_channel_eq_delay(&mtk_dp->aux,
1594 						   mtk_dp->rx_cap);
1595 
1596 		/* check link status from sink device */
1597 		drm_dp_dpcd_read_link_status(&mtk_dp->aux, link_status);
1598 		if (drm_dp_channel_eq_ok(link_status, target_lane_count)) {
1599 			dev_dbg(mtk_dp->dev, "Link train EQ pass\n");
1600 
1601 			/* Training done, and disable pattern. */
1602 			drm_dp_dpcd_writeb(&mtk_dp->aux, DP_TRAINING_PATTERN_SET,
1603 					   DP_TRAINING_PATTERN_DISABLE);
1604 			mtk_dp_train_set_pattern(mtk_dp, 0);
1605 			return 0;
1606 		}
1607 		dev_dbg(mtk_dp->dev, "Link train EQ fail\n");
1608 	} while (train_retries < MTK_DP_TRAIN_DOWNSCALE_RETRY);
1609 
1610 	/* Failed to train EQ, and disable pattern. */
1611 	drm_dp_dpcd_writeb(&mtk_dp->aux, DP_TRAINING_PATTERN_SET,
1612 			   DP_TRAINING_PATTERN_DISABLE);
1613 	mtk_dp_train_set_pattern(mtk_dp, 0);
1614 
1615 	return -ETIMEDOUT;
1616 }
1617 
1618 static int mtk_dp_parse_capabilities(struct mtk_dp *mtk_dp)
1619 {
1620 	u8 val;
1621 	ssize_t ret;
1622 
1623 	/*
1624 	 * If we're eDP and capabilities were already parsed we can skip
1625 	 * reading again because eDP panels aren't hotpluggable hence the
1626 	 * caps and training information won't ever change in a boot life
1627 	 */
1628 	if (mtk_dp->bridge.type == DRM_MODE_CONNECTOR_eDP &&
1629 	    mtk_dp->rx_cap[DP_MAX_LINK_RATE] &&
1630 	    mtk_dp->train_info.sink_ssc)
1631 		return 0;
1632 
1633 	ret = drm_dp_read_dpcd_caps(&mtk_dp->aux, mtk_dp->rx_cap);
1634 	if (ret < 0)
1635 		return ret;
1636 
1637 	if (drm_dp_tps4_supported(mtk_dp->rx_cap))
1638 		mtk_dp->train_info.channel_eq_pattern = DP_TRAINING_PATTERN_4;
1639 	else if (drm_dp_tps3_supported(mtk_dp->rx_cap))
1640 		mtk_dp->train_info.channel_eq_pattern = DP_TRAINING_PATTERN_3;
1641 	else
1642 		mtk_dp->train_info.channel_eq_pattern = DP_TRAINING_PATTERN_2;
1643 
1644 	mtk_dp->train_info.sink_ssc = drm_dp_max_downspread(mtk_dp->rx_cap);
1645 
1646 	ret = drm_dp_dpcd_readb(&mtk_dp->aux, DP_MSTM_CAP, &val);
1647 	if (ret < 1) {
1648 		drm_err(mtk_dp->drm_dev, "Read mstm cap failed\n");
1649 		return ret == 0 ? -EIO : ret;
1650 	}
1651 
1652 	if (val & DP_MST_CAP) {
1653 		/* Clear DP_DEVICE_SERVICE_IRQ_VECTOR_ESI0 */
1654 		ret = drm_dp_dpcd_readb(&mtk_dp->aux,
1655 					DP_DEVICE_SERVICE_IRQ_VECTOR_ESI0,
1656 					&val);
1657 		if (ret < 1) {
1658 			drm_err(mtk_dp->drm_dev, "Read irq vector failed\n");
1659 			return ret == 0 ? -EIO : ret;
1660 		}
1661 
1662 		if (val) {
1663 			ret = drm_dp_dpcd_writeb(&mtk_dp->aux,
1664 						 DP_DEVICE_SERVICE_IRQ_VECTOR_ESI0,
1665 						 val);
1666 			if (ret < 0)
1667 				return ret;
1668 		}
1669 	}
1670 
1671 	return 0;
1672 }
1673 
1674 static bool mtk_dp_edid_parse_audio_capabilities(struct mtk_dp *mtk_dp,
1675 						 struct mtk_dp_audio_cfg *cfg)
1676 {
1677 	if (!mtk_dp->data->audio_supported)
1678 		return false;
1679 
1680 	if (mtk_dp->info.audio_cur_cfg.sad_count <= 0) {
1681 		drm_info(mtk_dp->drm_dev, "The SADs is NULL\n");
1682 		return false;
1683 	}
1684 
1685 	return true;
1686 }
1687 
1688 static void mtk_dp_train_change_mode(struct mtk_dp *mtk_dp)
1689 {
1690 	phy_reset(mtk_dp->phy);
1691 	mtk_dp_reset_swing_pre_emphasis(mtk_dp);
1692 }
1693 
1694 static int mtk_dp_training(struct mtk_dp *mtk_dp)
1695 {
1696 	int ret;
1697 	u8 lane_count, link_rate, train_limit, max_link_rate;
1698 
1699 	link_rate = min_t(u8, mtk_dp->max_linkrate,
1700 			  mtk_dp->rx_cap[DP_MAX_LINK_RATE]);
1701 	max_link_rate = link_rate;
1702 	lane_count = min_t(u8, mtk_dp->max_lanes,
1703 			   drm_dp_max_lane_count(mtk_dp->rx_cap));
1704 
1705 	/*
1706 	 * TPS are generated by the hardware pattern generator. From the
1707 	 * hardware setting we need to disable this scramble setting before
1708 	 * use the TPS pattern generator.
1709 	 */
1710 	mtk_dp_training_set_scramble(mtk_dp, false);
1711 
1712 	for (train_limit = 6; train_limit > 0; train_limit--) {
1713 		mtk_dp_train_change_mode(mtk_dp);
1714 
1715 		ret = mtk_dp_train_setting(mtk_dp, link_rate, lane_count);
1716 		if (ret)
1717 			return ret;
1718 
1719 		ret = mtk_dp_train_cr(mtk_dp, lane_count);
1720 		if (ret == -ENODEV) {
1721 			return ret;
1722 		} else if (ret) {
1723 			/* reduce link rate */
1724 			switch (link_rate) {
1725 			case DP_LINK_BW_1_62:
1726 				lane_count = lane_count / 2;
1727 				link_rate = max_link_rate;
1728 				if (lane_count == 0)
1729 					return -EIO;
1730 				break;
1731 			case DP_LINK_BW_2_7:
1732 				link_rate = DP_LINK_BW_1_62;
1733 				break;
1734 			case DP_LINK_BW_5_4:
1735 				link_rate = DP_LINK_BW_2_7;
1736 				break;
1737 			case DP_LINK_BW_8_1:
1738 				link_rate = DP_LINK_BW_5_4;
1739 				break;
1740 			default:
1741 				return -EINVAL;
1742 			}
1743 			continue;
1744 		}
1745 
1746 		ret = mtk_dp_train_eq(mtk_dp, lane_count);
1747 		if (ret == -ENODEV) {
1748 			return ret;
1749 		} else if (ret) {
1750 			/* reduce lane count */
1751 			if (lane_count == 0)
1752 				return -EIO;
1753 			lane_count /= 2;
1754 			continue;
1755 		}
1756 
1757 		/* if we can run to this, training is done. */
1758 		break;
1759 	}
1760 
1761 	if (train_limit == 0)
1762 		return -ETIMEDOUT;
1763 
1764 	mtk_dp->train_info.link_rate = link_rate;
1765 	mtk_dp->train_info.lane_count = lane_count;
1766 
1767 	/*
1768 	 * After training done, we need to output normal stream instead of TPS,
1769 	 * so we need to enable scramble.
1770 	 */
1771 	mtk_dp_training_set_scramble(mtk_dp, true);
1772 	mtk_dp_set_enhanced_frame_mode(mtk_dp);
1773 
1774 	return 0;
1775 }
1776 
1777 static void mtk_dp_video_enable(struct mtk_dp *mtk_dp, bool enable)
1778 {
1779 	/* the mute sequence is different between enable and disable */
1780 	if (enable) {
1781 		mtk_dp_msa_bypass_enable(mtk_dp, false);
1782 		mtk_dp_pg_enable(mtk_dp, false);
1783 		mtk_dp_set_tx_out(mtk_dp);
1784 		mtk_dp_video_mute(mtk_dp, false);
1785 	} else {
1786 		mtk_dp_video_mute(mtk_dp, true);
1787 		mtk_dp_pg_enable(mtk_dp, true);
1788 		mtk_dp_msa_bypass_enable(mtk_dp, true);
1789 	}
1790 }
1791 
1792 static void mtk_dp_audio_sdp_setup(struct mtk_dp *mtk_dp,
1793 				   struct mtk_dp_audio_cfg *cfg)
1794 {
1795 	struct dp_sdp sdp;
1796 	struct hdmi_audio_infoframe frame;
1797 
1798 	hdmi_audio_infoframe_init(&frame);
1799 	frame.coding_type = HDMI_AUDIO_CODING_TYPE_PCM;
1800 	frame.channels = cfg->channels;
1801 	frame.sample_frequency = cfg->sample_rate;
1802 
1803 	switch (cfg->word_length_bits) {
1804 	case 16:
1805 		frame.sample_size = HDMI_AUDIO_SAMPLE_SIZE_16;
1806 		break;
1807 	case 20:
1808 		frame.sample_size = HDMI_AUDIO_SAMPLE_SIZE_20;
1809 		break;
1810 	case 24:
1811 	default:
1812 		frame.sample_size = HDMI_AUDIO_SAMPLE_SIZE_24;
1813 		break;
1814 	}
1815 
1816 	hdmi_audio_infoframe_pack_for_dp(&frame, &sdp, MTK_DP_VERSION);
1817 
1818 	mtk_dp_audio_sdp_asp_set_channels(mtk_dp, cfg->channels);
1819 	mtk_dp_setup_sdp_aui(mtk_dp, &sdp);
1820 }
1821 
1822 static void mtk_dp_audio_setup(struct mtk_dp *mtk_dp,
1823 			       struct mtk_dp_audio_cfg *cfg)
1824 {
1825 	mtk_dp_audio_sdp_setup(mtk_dp, cfg);
1826 	mtk_dp_audio_channel_status_set(mtk_dp, cfg);
1827 
1828 	mtk_dp_audio_setup_channels(mtk_dp, cfg);
1829 	mtk_dp_audio_set_divider(mtk_dp);
1830 }
1831 
1832 static int mtk_dp_video_config(struct mtk_dp *mtk_dp)
1833 {
1834 	mtk_dp_config_mn_mode(mtk_dp);
1835 	mtk_dp_set_msa(mtk_dp);
1836 	mtk_dp_set_color_depth(mtk_dp);
1837 	return mtk_dp_set_color_format(mtk_dp, mtk_dp->info.format);
1838 }
1839 
1840 static void mtk_dp_init_port(struct mtk_dp *mtk_dp)
1841 {
1842 	mtk_dp_set_idle_pattern(mtk_dp, true);
1843 	mtk_dp_initialize_priv_data(mtk_dp);
1844 
1845 	mtk_dp_initialize_settings(mtk_dp);
1846 	mtk_dp_initialize_aux_settings(mtk_dp);
1847 	mtk_dp_initialize_digital_settings(mtk_dp);
1848 	mtk_dp_initialize_hpd_detect_settings(mtk_dp);
1849 
1850 	mtk_dp_digital_sw_reset(mtk_dp);
1851 }
1852 
1853 static irqreturn_t mtk_dp_hpd_event_thread(int hpd, void *dev)
1854 {
1855 	struct mtk_dp *mtk_dp = dev;
1856 	unsigned long flags;
1857 	u32 status;
1858 
1859 	if (mtk_dp->need_debounce && mtk_dp->train_info.cable_plugged_in)
1860 		msleep(100);
1861 
1862 	spin_lock_irqsave(&mtk_dp->irq_thread_lock, flags);
1863 	status = mtk_dp->irq_thread_handle;
1864 	mtk_dp->irq_thread_handle = 0;
1865 	spin_unlock_irqrestore(&mtk_dp->irq_thread_lock, flags);
1866 
1867 	if (status & MTK_DP_THREAD_CABLE_STATE_CHG) {
1868 		if (mtk_dp->bridge.dev)
1869 			drm_helper_hpd_irq_event(mtk_dp->bridge.dev);
1870 
1871 		if (!mtk_dp->train_info.cable_plugged_in) {
1872 			mtk_dp_disable_sdp_aui(mtk_dp);
1873 			memset(&mtk_dp->info.audio_cur_cfg, 0,
1874 			       sizeof(mtk_dp->info.audio_cur_cfg));
1875 
1876 			mtk_dp->need_debounce = false;
1877 			mod_timer(&mtk_dp->debounce_timer,
1878 				  jiffies + msecs_to_jiffies(100) - 1);
1879 		}
1880 	}
1881 
1882 	if (status & MTK_DP_THREAD_HPD_EVENT)
1883 		dev_dbg(mtk_dp->dev, "Receive IRQ from sink devices\n");
1884 
1885 	return IRQ_HANDLED;
1886 }
1887 
1888 static irqreturn_t mtk_dp_hpd_event(int hpd, void *dev)
1889 {
1890 	struct mtk_dp *mtk_dp = dev;
1891 	bool cable_sta_chg = false;
1892 	unsigned long flags;
1893 	u32 irq_status = mtk_dp_swirq_get_clear(mtk_dp) |
1894 			 mtk_dp_hwirq_get_clear(mtk_dp);
1895 
1896 	if (!irq_status)
1897 		return IRQ_HANDLED;
1898 
1899 	spin_lock_irqsave(&mtk_dp->irq_thread_lock, flags);
1900 
1901 	if (irq_status & MTK_DP_HPD_INTERRUPT)
1902 		mtk_dp->irq_thread_handle |= MTK_DP_THREAD_HPD_EVENT;
1903 
1904 	/* Cable state is changed. */
1905 	if (irq_status != MTK_DP_HPD_INTERRUPT) {
1906 		mtk_dp->irq_thread_handle |= MTK_DP_THREAD_CABLE_STATE_CHG;
1907 		cable_sta_chg = true;
1908 	}
1909 
1910 	spin_unlock_irqrestore(&mtk_dp->irq_thread_lock, flags);
1911 
1912 	if (cable_sta_chg) {
1913 		if (!!(mtk_dp_read(mtk_dp, MTK_DP_TRANS_P0_3414) &
1914 		       HPD_DB_DP_TRANS_P0_MASK))
1915 			mtk_dp->train_info.cable_plugged_in = true;
1916 		else
1917 			mtk_dp->train_info.cable_plugged_in = false;
1918 	}
1919 
1920 	return IRQ_WAKE_THREAD;
1921 }
1922 
1923 static int mtk_dp_wait_hpd_asserted(struct drm_dp_aux *mtk_aux, unsigned long wait_us)
1924 {
1925 	struct mtk_dp *mtk_dp = container_of(mtk_aux, struct mtk_dp, aux);
1926 	u32 val;
1927 	int ret;
1928 
1929 	ret = regmap_read_poll_timeout(mtk_dp->regs, MTK_DP_TRANS_P0_3414,
1930 				       val, !!(val & HPD_DB_DP_TRANS_P0_MASK),
1931 				       wait_us / 100, wait_us);
1932 	if (ret) {
1933 		mtk_dp->train_info.cable_plugged_in = false;
1934 		return ret;
1935 	}
1936 
1937 	mtk_dp->train_info.cable_plugged_in = true;
1938 
1939 	ret = mtk_dp_parse_capabilities(mtk_dp);
1940 	if (ret) {
1941 		drm_err(mtk_dp->drm_dev, "Can't parse capabilities\n");
1942 		return ret;
1943 	}
1944 
1945 	return 0;
1946 }
1947 
1948 static int mtk_dp_dt_parse(struct mtk_dp *mtk_dp,
1949 			   struct platform_device *pdev)
1950 {
1951 	struct device_node *endpoint;
1952 	struct device *dev = &pdev->dev;
1953 	int ret;
1954 	void __iomem *base;
1955 	u32 linkrate;
1956 	int len;
1957 
1958 	base = devm_platform_ioremap_resource(pdev, 0);
1959 	if (IS_ERR(base))
1960 		return PTR_ERR(base);
1961 
1962 	mtk_dp->regs = devm_regmap_init_mmio(dev, base, &mtk_dp_regmap_config);
1963 	if (IS_ERR(mtk_dp->regs))
1964 		return PTR_ERR(mtk_dp->regs);
1965 
1966 	endpoint = of_graph_get_endpoint_by_regs(pdev->dev.of_node, 1, -1);
1967 	len = of_property_count_elems_of_size(endpoint,
1968 					      "data-lanes", sizeof(u32));
1969 	if (len < 0 || len > 4 || len == 3) {
1970 		dev_err(dev, "invalid data lane size: %d\n", len);
1971 		return -EINVAL;
1972 	}
1973 
1974 	mtk_dp->max_lanes = len;
1975 
1976 	ret = device_property_read_u32(dev, "max-linkrate-mhz", &linkrate);
1977 	if (ret) {
1978 		dev_err(dev, "failed to read max linkrate: %d\n", ret);
1979 		return ret;
1980 	}
1981 
1982 	mtk_dp->max_linkrate = drm_dp_link_rate_to_bw_code(linkrate * 100);
1983 
1984 	return 0;
1985 }
1986 
1987 static void mtk_dp_update_plugged_status(struct mtk_dp *mtk_dp)
1988 {
1989 	if (!mtk_dp->data->audio_supported || !mtk_dp->audio_enable)
1990 		return;
1991 
1992 	mutex_lock(&mtk_dp->update_plugged_status_lock);
1993 	if (mtk_dp->plugged_cb && mtk_dp->codec_dev)
1994 		mtk_dp->plugged_cb(mtk_dp->codec_dev,
1995 				   mtk_dp->enabled &
1996 				   mtk_dp->info.audio_cur_cfg.detect_monitor);
1997 	mutex_unlock(&mtk_dp->update_plugged_status_lock);
1998 }
1999 
2000 static enum drm_connector_status mtk_dp_bdg_detect(struct drm_bridge *bridge)
2001 {
2002 	struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge);
2003 	enum drm_connector_status ret = connector_status_disconnected;
2004 	bool enabled = mtk_dp->enabled;
2005 	u8 sink_count = 0;
2006 
2007 	if (!mtk_dp->train_info.cable_plugged_in)
2008 		return ret;
2009 
2010 	if (!enabled)
2011 		mtk_dp_aux_panel_poweron(mtk_dp, true);
2012 
2013 	/*
2014 	 * Some dongles still source HPD when they do not connect to any
2015 	 * sink device. To avoid this, we need to read the sink count
2016 	 * to make sure we do connect to sink devices. After this detect
2017 	 * function, we just need to check the HPD connection to check
2018 	 * whether we connect to a sink device.
2019 	 */
2020 	drm_dp_dpcd_readb(&mtk_dp->aux, DP_SINK_COUNT, &sink_count);
2021 	if (DP_GET_SINK_COUNT(sink_count))
2022 		ret = connector_status_connected;
2023 
2024 	if (!enabled)
2025 		mtk_dp_aux_panel_poweron(mtk_dp, false);
2026 
2027 	return ret;
2028 }
2029 
2030 static struct edid *mtk_dp_get_edid(struct drm_bridge *bridge,
2031 				    struct drm_connector *connector)
2032 {
2033 	struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge);
2034 	bool enabled = mtk_dp->enabled;
2035 	struct edid *new_edid = NULL;
2036 	struct mtk_dp_audio_cfg *audio_caps = &mtk_dp->info.audio_cur_cfg;
2037 
2038 	if (!enabled) {
2039 		drm_atomic_bridge_chain_pre_enable(bridge, connector->state->state);
2040 		mtk_dp_aux_panel_poweron(mtk_dp, true);
2041 	}
2042 
2043 	new_edid = drm_get_edid(connector, &mtk_dp->aux.ddc);
2044 
2045 	/*
2046 	 * Parse capability here to let atomic_get_input_bus_fmts and
2047 	 * mode_valid use the capability to calculate sink bitrates.
2048 	 */
2049 	if (mtk_dp_parse_capabilities(mtk_dp)) {
2050 		drm_err(mtk_dp->drm_dev, "Can't parse capabilities\n");
2051 		kfree(new_edid);
2052 		new_edid = NULL;
2053 	}
2054 
2055 	if (new_edid) {
2056 		struct cea_sad *sads;
2057 
2058 		audio_caps->sad_count = drm_edid_to_sad(new_edid, &sads);
2059 		kfree(sads);
2060 
2061 		audio_caps->detect_monitor = drm_detect_monitor_audio(new_edid);
2062 	}
2063 
2064 	if (!enabled) {
2065 		mtk_dp_aux_panel_poweron(mtk_dp, false);
2066 		drm_atomic_bridge_chain_post_disable(bridge, connector->state->state);
2067 	}
2068 
2069 	return new_edid;
2070 }
2071 
2072 static ssize_t mtk_dp_aux_transfer(struct drm_dp_aux *mtk_aux,
2073 				   struct drm_dp_aux_msg *msg)
2074 {
2075 	struct mtk_dp *mtk_dp = container_of(mtk_aux, struct mtk_dp, aux);
2076 	bool is_read;
2077 	u8 request;
2078 	size_t accessed_bytes = 0;
2079 	int ret;
2080 
2081 	if (mtk_dp->bridge.type != DRM_MODE_CONNECTOR_eDP &&
2082 	    !mtk_dp->train_info.cable_plugged_in) {
2083 		ret = -EIO;
2084 		goto err;
2085 	}
2086 
2087 	switch (msg->request) {
2088 	case DP_AUX_I2C_MOT:
2089 	case DP_AUX_I2C_WRITE:
2090 	case DP_AUX_NATIVE_WRITE:
2091 	case DP_AUX_I2C_WRITE_STATUS_UPDATE:
2092 	case DP_AUX_I2C_WRITE_STATUS_UPDATE | DP_AUX_I2C_MOT:
2093 		request = msg->request & ~DP_AUX_I2C_WRITE_STATUS_UPDATE;
2094 		is_read = false;
2095 		break;
2096 	case DP_AUX_I2C_READ:
2097 	case DP_AUX_NATIVE_READ:
2098 	case DP_AUX_I2C_READ | DP_AUX_I2C_MOT:
2099 		request = msg->request;
2100 		is_read = true;
2101 		break;
2102 	default:
2103 		dev_err(mtk_dp->dev, "invalid aux cmd = %d\n",
2104 			msg->request);
2105 		ret = -EINVAL;
2106 		goto err;
2107 	}
2108 
2109 	do {
2110 		size_t to_access = min_t(size_t, DP_AUX_MAX_PAYLOAD_BYTES,
2111 					 msg->size - accessed_bytes);
2112 
2113 		ret = mtk_dp_aux_do_transfer(mtk_dp, is_read, request,
2114 					     msg->address + accessed_bytes,
2115 					     msg->buffer + accessed_bytes,
2116 					     to_access, &msg->reply);
2117 
2118 		if (ret) {
2119 			dev_info(mtk_dp->dev,
2120 				 "Failed to do AUX transfer: %d\n", ret);
2121 			goto err;
2122 		}
2123 		accessed_bytes += to_access;
2124 	} while (accessed_bytes < msg->size);
2125 
2126 	return msg->size;
2127 err:
2128 	msg->reply = DP_AUX_NATIVE_REPLY_NACK | DP_AUX_I2C_REPLY_NACK;
2129 	return ret;
2130 }
2131 
2132 static int mtk_dp_poweron(struct mtk_dp *mtk_dp)
2133 {
2134 	int ret;
2135 
2136 	ret = phy_init(mtk_dp->phy);
2137 	if (ret) {
2138 		dev_err(mtk_dp->dev, "Failed to initialize phy: %d\n", ret);
2139 		return ret;
2140 	}
2141 
2142 	mtk_dp_init_port(mtk_dp);
2143 	mtk_dp_power_enable(mtk_dp);
2144 
2145 	return 0;
2146 }
2147 
2148 static void mtk_dp_poweroff(struct mtk_dp *mtk_dp)
2149 {
2150 	mtk_dp_power_disable(mtk_dp);
2151 	phy_exit(mtk_dp->phy);
2152 }
2153 
2154 static int mtk_dp_bridge_attach(struct drm_bridge *bridge,
2155 				enum drm_bridge_attach_flags flags)
2156 {
2157 	struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge);
2158 	int ret;
2159 
2160 	if (!(flags & DRM_BRIDGE_ATTACH_NO_CONNECTOR)) {
2161 		dev_err(mtk_dp->dev, "Driver does not provide a connector!");
2162 		return -EINVAL;
2163 	}
2164 
2165 	mtk_dp->aux.drm_dev = bridge->dev;
2166 	ret = drm_dp_aux_register(&mtk_dp->aux);
2167 	if (ret) {
2168 		dev_err(mtk_dp->dev,
2169 			"failed to register DP AUX channel: %d\n", ret);
2170 		return ret;
2171 	}
2172 
2173 	ret = mtk_dp_poweron(mtk_dp);
2174 	if (ret)
2175 		goto err_aux_register;
2176 
2177 	if (mtk_dp->next_bridge) {
2178 		ret = drm_bridge_attach(bridge->encoder, mtk_dp->next_bridge,
2179 					&mtk_dp->bridge, flags);
2180 		if (ret) {
2181 			drm_warn(mtk_dp->drm_dev,
2182 				 "Failed to attach external bridge: %d\n", ret);
2183 			goto err_bridge_attach;
2184 		}
2185 	}
2186 
2187 	mtk_dp->drm_dev = bridge->dev;
2188 
2189 	if (mtk_dp->bridge.type != DRM_MODE_CONNECTOR_eDP) {
2190 		irq_clear_status_flags(mtk_dp->irq, IRQ_NOAUTOEN);
2191 		enable_irq(mtk_dp->irq);
2192 		mtk_dp_hwirq_enable(mtk_dp, true);
2193 	}
2194 
2195 	return 0;
2196 
2197 err_bridge_attach:
2198 	mtk_dp_poweroff(mtk_dp);
2199 err_aux_register:
2200 	drm_dp_aux_unregister(&mtk_dp->aux);
2201 	return ret;
2202 }
2203 
2204 static void mtk_dp_bridge_detach(struct drm_bridge *bridge)
2205 {
2206 	struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge);
2207 
2208 	if (mtk_dp->bridge.type != DRM_MODE_CONNECTOR_eDP) {
2209 		mtk_dp_hwirq_enable(mtk_dp, false);
2210 		disable_irq(mtk_dp->irq);
2211 	}
2212 	mtk_dp->drm_dev = NULL;
2213 	mtk_dp_poweroff(mtk_dp);
2214 	drm_dp_aux_unregister(&mtk_dp->aux);
2215 }
2216 
2217 static void mtk_dp_bridge_atomic_enable(struct drm_bridge *bridge,
2218 					struct drm_bridge_state *old_state)
2219 {
2220 	struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge);
2221 	int ret;
2222 
2223 	mtk_dp->conn = drm_atomic_get_new_connector_for_encoder(old_state->base.state,
2224 								bridge->encoder);
2225 	if (!mtk_dp->conn) {
2226 		drm_err(mtk_dp->drm_dev,
2227 			"Can't enable bridge as connector is missing\n");
2228 		return;
2229 	}
2230 
2231 	mtk_dp_aux_panel_poweron(mtk_dp, true);
2232 
2233 	/* Training */
2234 	ret = mtk_dp_training(mtk_dp);
2235 	if (ret) {
2236 		drm_err(mtk_dp->drm_dev, "Training failed, %d\n", ret);
2237 		goto power_off_aux;
2238 	}
2239 
2240 	ret = mtk_dp_video_config(mtk_dp);
2241 	if (ret)
2242 		goto power_off_aux;
2243 
2244 	mtk_dp_video_enable(mtk_dp, true);
2245 
2246 	mtk_dp->audio_enable =
2247 		mtk_dp_edid_parse_audio_capabilities(mtk_dp,
2248 						     &mtk_dp->info.audio_cur_cfg);
2249 	if (mtk_dp->audio_enable) {
2250 		mtk_dp_audio_setup(mtk_dp, &mtk_dp->info.audio_cur_cfg);
2251 		mtk_dp_audio_mute(mtk_dp, false);
2252 	} else {
2253 		memset(&mtk_dp->info.audio_cur_cfg, 0,
2254 		       sizeof(mtk_dp->info.audio_cur_cfg));
2255 	}
2256 
2257 	mtk_dp->enabled = true;
2258 	mtk_dp_update_plugged_status(mtk_dp);
2259 
2260 	return;
2261 power_off_aux:
2262 	mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE,
2263 			   DP_PWR_STATE_BANDGAP_TPLL,
2264 			   DP_PWR_STATE_MASK);
2265 }
2266 
2267 static void mtk_dp_bridge_atomic_disable(struct drm_bridge *bridge,
2268 					 struct drm_bridge_state *old_state)
2269 {
2270 	struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge);
2271 
2272 	mtk_dp->enabled = false;
2273 	mtk_dp_update_plugged_status(mtk_dp);
2274 	mtk_dp_video_enable(mtk_dp, false);
2275 	mtk_dp_audio_mute(mtk_dp, true);
2276 
2277 	if (mtk_dp->train_info.cable_plugged_in) {
2278 		drm_dp_dpcd_writeb(&mtk_dp->aux, DP_SET_POWER, DP_SET_POWER_D3);
2279 		usleep_range(2000, 3000);
2280 	}
2281 
2282 	/* power off aux */
2283 	mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE,
2284 			   DP_PWR_STATE_BANDGAP_TPLL,
2285 			   DP_PWR_STATE_MASK);
2286 
2287 	/* Ensure the sink is muted */
2288 	msleep(20);
2289 }
2290 
2291 static enum drm_mode_status
2292 mtk_dp_bridge_mode_valid(struct drm_bridge *bridge,
2293 			 const struct drm_display_info *info,
2294 			 const struct drm_display_mode *mode)
2295 {
2296 	struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge);
2297 	u32 bpp = info->color_formats & DRM_COLOR_FORMAT_YCBCR422 ? 16 : 24;
2298 	u32 rate = min_t(u32, drm_dp_max_link_rate(mtk_dp->rx_cap) *
2299 			      drm_dp_max_lane_count(mtk_dp->rx_cap),
2300 			 drm_dp_bw_code_to_link_rate(mtk_dp->max_linkrate) *
2301 			 mtk_dp->max_lanes);
2302 
2303 	if (rate < mode->clock * bpp / 8)
2304 		return MODE_CLOCK_HIGH;
2305 
2306 	return MODE_OK;
2307 }
2308 
2309 static u32 *mtk_dp_bridge_atomic_get_output_bus_fmts(struct drm_bridge *bridge,
2310 						     struct drm_bridge_state *bridge_state,
2311 						     struct drm_crtc_state *crtc_state,
2312 						     struct drm_connector_state *conn_state,
2313 						     unsigned int *num_output_fmts)
2314 {
2315 	u32 *output_fmts;
2316 
2317 	*num_output_fmts = 0;
2318 	output_fmts = kmalloc(sizeof(*output_fmts), GFP_KERNEL);
2319 	if (!output_fmts)
2320 		return NULL;
2321 	*num_output_fmts = 1;
2322 	output_fmts[0] = MEDIA_BUS_FMT_FIXED;
2323 	return output_fmts;
2324 }
2325 
2326 static const u32 mt8195_input_fmts[] = {
2327 	MEDIA_BUS_FMT_RGB888_1X24,
2328 	MEDIA_BUS_FMT_YUV8_1X24,
2329 	MEDIA_BUS_FMT_YUYV8_1X16,
2330 };
2331 
2332 static u32 *mtk_dp_bridge_atomic_get_input_bus_fmts(struct drm_bridge *bridge,
2333 						    struct drm_bridge_state *bridge_state,
2334 						    struct drm_crtc_state *crtc_state,
2335 						    struct drm_connector_state *conn_state,
2336 						    u32 output_fmt,
2337 						    unsigned int *num_input_fmts)
2338 {
2339 	u32 *input_fmts;
2340 	struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge);
2341 	struct drm_display_mode *mode = &crtc_state->adjusted_mode;
2342 	struct drm_display_info *display_info =
2343 		&conn_state->connector->display_info;
2344 	u32 rate = min_t(u32, drm_dp_max_link_rate(mtk_dp->rx_cap) *
2345 			      drm_dp_max_lane_count(mtk_dp->rx_cap),
2346 			 drm_dp_bw_code_to_link_rate(mtk_dp->max_linkrate) *
2347 			 mtk_dp->max_lanes);
2348 
2349 	*num_input_fmts = 0;
2350 
2351 	/*
2352 	 * If the linkrate is smaller than datarate of RGB888, larger than
2353 	 * datarate of YUV422 and sink device supports YUV422, we output YUV422
2354 	 * format. Use this condition, we can support more resolution.
2355 	 */
2356 	if ((rate < (mode->clock * 24 / 8)) &&
2357 	    (rate > (mode->clock * 16 / 8)) &&
2358 	    (display_info->color_formats & DRM_COLOR_FORMAT_YCBCR422)) {
2359 		input_fmts = kcalloc(1, sizeof(*input_fmts), GFP_KERNEL);
2360 		if (!input_fmts)
2361 			return NULL;
2362 		*num_input_fmts = 1;
2363 		input_fmts[0] = MEDIA_BUS_FMT_YUYV8_1X16;
2364 	} else {
2365 		input_fmts = kcalloc(ARRAY_SIZE(mt8195_input_fmts),
2366 				     sizeof(*input_fmts),
2367 				     GFP_KERNEL);
2368 		if (!input_fmts)
2369 			return NULL;
2370 
2371 		*num_input_fmts = ARRAY_SIZE(mt8195_input_fmts);
2372 		memcpy(input_fmts, mt8195_input_fmts, sizeof(mt8195_input_fmts));
2373 	}
2374 
2375 	return input_fmts;
2376 }
2377 
2378 static int mtk_dp_bridge_atomic_check(struct drm_bridge *bridge,
2379 				      struct drm_bridge_state *bridge_state,
2380 				      struct drm_crtc_state *crtc_state,
2381 				      struct drm_connector_state *conn_state)
2382 {
2383 	struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge);
2384 	struct drm_crtc *crtc = conn_state->crtc;
2385 	unsigned int input_bus_format;
2386 
2387 	input_bus_format = bridge_state->input_bus_cfg.format;
2388 
2389 	dev_dbg(mtk_dp->dev, "input format 0x%04x, output format 0x%04x\n",
2390 		bridge_state->input_bus_cfg.format,
2391 		 bridge_state->output_bus_cfg.format);
2392 
2393 	if (input_bus_format == MEDIA_BUS_FMT_YUYV8_1X16)
2394 		mtk_dp->info.format = DP_PIXELFORMAT_YUV422;
2395 	else
2396 		mtk_dp->info.format = DP_PIXELFORMAT_RGB;
2397 
2398 	if (!crtc) {
2399 		drm_err(mtk_dp->drm_dev,
2400 			"Can't enable bridge as connector state doesn't have a crtc\n");
2401 		return -EINVAL;
2402 	}
2403 
2404 	drm_display_mode_to_videomode(&crtc_state->adjusted_mode, &mtk_dp->info.vm);
2405 
2406 	return 0;
2407 }
2408 
2409 static const struct drm_bridge_funcs mtk_dp_bridge_funcs = {
2410 	.atomic_check = mtk_dp_bridge_atomic_check,
2411 	.atomic_duplicate_state = drm_atomic_helper_bridge_duplicate_state,
2412 	.atomic_destroy_state = drm_atomic_helper_bridge_destroy_state,
2413 	.atomic_get_output_bus_fmts = mtk_dp_bridge_atomic_get_output_bus_fmts,
2414 	.atomic_get_input_bus_fmts = mtk_dp_bridge_atomic_get_input_bus_fmts,
2415 	.atomic_reset = drm_atomic_helper_bridge_reset,
2416 	.attach = mtk_dp_bridge_attach,
2417 	.detach = mtk_dp_bridge_detach,
2418 	.atomic_enable = mtk_dp_bridge_atomic_enable,
2419 	.atomic_disable = mtk_dp_bridge_atomic_disable,
2420 	.mode_valid = mtk_dp_bridge_mode_valid,
2421 	.get_edid = mtk_dp_get_edid,
2422 	.detect = mtk_dp_bdg_detect,
2423 };
2424 
2425 static void mtk_dp_debounce_timer(struct timer_list *t)
2426 {
2427 	struct mtk_dp *mtk_dp = from_timer(mtk_dp, t, debounce_timer);
2428 
2429 	mtk_dp->need_debounce = true;
2430 }
2431 
2432 /*
2433  * HDMI audio codec callbacks
2434  */
2435 static int mtk_dp_audio_hw_params(struct device *dev, void *data,
2436 				  struct hdmi_codec_daifmt *daifmt,
2437 				  struct hdmi_codec_params *params)
2438 {
2439 	struct mtk_dp *mtk_dp = dev_get_drvdata(dev);
2440 
2441 	if (!mtk_dp->enabled) {
2442 		dev_err(mtk_dp->dev, "%s, DP is not ready!\n", __func__);
2443 		return -ENODEV;
2444 	}
2445 
2446 	mtk_dp->info.audio_cur_cfg.channels = params->cea.channels;
2447 	mtk_dp->info.audio_cur_cfg.sample_rate = params->sample_rate;
2448 
2449 	mtk_dp_audio_setup(mtk_dp, &mtk_dp->info.audio_cur_cfg);
2450 
2451 	return 0;
2452 }
2453 
2454 static int mtk_dp_audio_startup(struct device *dev, void *data)
2455 {
2456 	struct mtk_dp *mtk_dp = dev_get_drvdata(dev);
2457 
2458 	mtk_dp_audio_mute(mtk_dp, false);
2459 
2460 	return 0;
2461 }
2462 
2463 static void mtk_dp_audio_shutdown(struct device *dev, void *data)
2464 {
2465 	struct mtk_dp *mtk_dp = dev_get_drvdata(dev);
2466 
2467 	mtk_dp_audio_mute(mtk_dp, true);
2468 }
2469 
2470 static int mtk_dp_audio_get_eld(struct device *dev, void *data, uint8_t *buf,
2471 				size_t len)
2472 {
2473 	struct mtk_dp *mtk_dp = dev_get_drvdata(dev);
2474 
2475 	if (mtk_dp->enabled)
2476 		memcpy(buf, mtk_dp->conn->eld, len);
2477 	else
2478 		memset(buf, 0, len);
2479 
2480 	return 0;
2481 }
2482 
2483 static int mtk_dp_audio_hook_plugged_cb(struct device *dev, void *data,
2484 					hdmi_codec_plugged_cb fn,
2485 					struct device *codec_dev)
2486 {
2487 	struct mtk_dp *mtk_dp = data;
2488 
2489 	mutex_lock(&mtk_dp->update_plugged_status_lock);
2490 	mtk_dp->plugged_cb = fn;
2491 	mtk_dp->codec_dev = codec_dev;
2492 	mutex_unlock(&mtk_dp->update_plugged_status_lock);
2493 
2494 	mtk_dp_update_plugged_status(mtk_dp);
2495 
2496 	return 0;
2497 }
2498 
2499 static const struct hdmi_codec_ops mtk_dp_audio_codec_ops = {
2500 	.hw_params = mtk_dp_audio_hw_params,
2501 	.audio_startup = mtk_dp_audio_startup,
2502 	.audio_shutdown = mtk_dp_audio_shutdown,
2503 	.get_eld = mtk_dp_audio_get_eld,
2504 	.hook_plugged_cb = mtk_dp_audio_hook_plugged_cb,
2505 	.no_capture_mute = 1,
2506 };
2507 
2508 static int mtk_dp_register_audio_driver(struct device *dev)
2509 {
2510 	struct mtk_dp *mtk_dp = dev_get_drvdata(dev);
2511 	struct hdmi_codec_pdata codec_data = {
2512 		.ops = &mtk_dp_audio_codec_ops,
2513 		.max_i2s_channels = 8,
2514 		.i2s = 1,
2515 		.data = mtk_dp,
2516 	};
2517 
2518 	mtk_dp->audio_pdev = platform_device_register_data(dev,
2519 							   HDMI_CODEC_DRV_NAME,
2520 							   PLATFORM_DEVID_AUTO,
2521 							   &codec_data,
2522 							   sizeof(codec_data));
2523 	return PTR_ERR_OR_ZERO(mtk_dp->audio_pdev);
2524 }
2525 
2526 static int mtk_dp_register_phy(struct mtk_dp *mtk_dp)
2527 {
2528 	struct device *dev = mtk_dp->dev;
2529 
2530 	mtk_dp->phy_dev = platform_device_register_data(dev, "mediatek-dp-phy",
2531 							PLATFORM_DEVID_AUTO,
2532 							&mtk_dp->regs,
2533 							sizeof(struct regmap *));
2534 	if (IS_ERR(mtk_dp->phy_dev))
2535 		return dev_err_probe(dev, PTR_ERR(mtk_dp->phy_dev),
2536 				     "Failed to create device mediatek-dp-phy\n");
2537 
2538 	mtk_dp_get_calibration_data(mtk_dp);
2539 
2540 	mtk_dp->phy = devm_phy_get(&mtk_dp->phy_dev->dev, "dp");
2541 	if (IS_ERR(mtk_dp->phy)) {
2542 		platform_device_unregister(mtk_dp->phy_dev);
2543 		return dev_err_probe(dev, PTR_ERR(mtk_dp->phy), "Failed to get phy\n");
2544 	}
2545 
2546 	return 0;
2547 }
2548 
2549 static int mtk_dp_edp_link_panel(struct drm_dp_aux *mtk_aux)
2550 {
2551 	struct mtk_dp *mtk_dp = container_of(mtk_aux, struct mtk_dp, aux);
2552 	struct device *dev = mtk_aux->dev;
2553 	int ret;
2554 
2555 	mtk_dp->next_bridge = devm_drm_of_get_bridge(dev, dev->of_node, 1, 0);
2556 
2557 	/* Power off the DP and AUX: either detection is done, or no panel present */
2558 	mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE,
2559 			   DP_PWR_STATE_BANDGAP_TPLL,
2560 			   DP_PWR_STATE_MASK);
2561 	mtk_dp_power_disable(mtk_dp);
2562 
2563 	if (IS_ERR(mtk_dp->next_bridge)) {
2564 		ret = PTR_ERR(mtk_dp->next_bridge);
2565 		mtk_dp->next_bridge = NULL;
2566 		return ret;
2567 	}
2568 
2569 	/* For eDP, we add the bridge only if the panel was found */
2570 	ret = devm_drm_bridge_add(dev, &mtk_dp->bridge);
2571 	if (ret)
2572 		return ret;
2573 
2574 	return 0;
2575 }
2576 
2577 static int mtk_dp_probe(struct platform_device *pdev)
2578 {
2579 	struct mtk_dp *mtk_dp;
2580 	struct device *dev = &pdev->dev;
2581 	int ret;
2582 
2583 	mtk_dp = devm_kzalloc(dev, sizeof(*mtk_dp), GFP_KERNEL);
2584 	if (!mtk_dp)
2585 		return -ENOMEM;
2586 
2587 	mtk_dp->dev = dev;
2588 	mtk_dp->data = (struct mtk_dp_data *)of_device_get_match_data(dev);
2589 
2590 	ret = mtk_dp_dt_parse(mtk_dp, pdev);
2591 	if (ret)
2592 		return dev_err_probe(dev, ret, "Failed to parse dt\n");
2593 
2594 	/*
2595 	 * Request the interrupt and install service routine only if we are
2596 	 * on full DisplayPort.
2597 	 * For eDP, polling the HPD instead is more convenient because we
2598 	 * don't expect any (un)plug events during runtime, hence we can
2599 	 * avoid some locking.
2600 	 */
2601 	if (mtk_dp->data->bridge_type != DRM_MODE_CONNECTOR_eDP) {
2602 		mtk_dp->irq = platform_get_irq(pdev, 0);
2603 		if (mtk_dp->irq < 0)
2604 			return dev_err_probe(dev, mtk_dp->irq,
2605 					     "failed to request dp irq resource\n");
2606 
2607 		spin_lock_init(&mtk_dp->irq_thread_lock);
2608 
2609 		irq_set_status_flags(mtk_dp->irq, IRQ_NOAUTOEN);
2610 		ret = devm_request_threaded_irq(dev, mtk_dp->irq, mtk_dp_hpd_event,
2611 						mtk_dp_hpd_event_thread,
2612 						IRQ_TYPE_LEVEL_HIGH, dev_name(dev),
2613 						mtk_dp);
2614 		if (ret)
2615 			return dev_err_probe(dev, ret,
2616 					     "failed to request mediatek dptx irq\n");
2617 
2618 		mtk_dp->need_debounce = true;
2619 		timer_setup(&mtk_dp->debounce_timer, mtk_dp_debounce_timer, 0);
2620 	}
2621 
2622 	mtk_dp->aux.name = "aux_mtk_dp";
2623 	mtk_dp->aux.dev = dev;
2624 	mtk_dp->aux.transfer = mtk_dp_aux_transfer;
2625 	mtk_dp->aux.wait_hpd_asserted = mtk_dp_wait_hpd_asserted;
2626 	drm_dp_aux_init(&mtk_dp->aux);
2627 
2628 	platform_set_drvdata(pdev, mtk_dp);
2629 
2630 	if (mtk_dp->data->audio_supported) {
2631 		mutex_init(&mtk_dp->update_plugged_status_lock);
2632 
2633 		ret = mtk_dp_register_audio_driver(dev);
2634 		if (ret) {
2635 			dev_err(dev, "Failed to register audio driver: %d\n",
2636 				ret);
2637 			return ret;
2638 		}
2639 	}
2640 
2641 	ret = mtk_dp_register_phy(mtk_dp);
2642 	if (ret)
2643 		return ret;
2644 
2645 	mtk_dp->bridge.funcs = &mtk_dp_bridge_funcs;
2646 	mtk_dp->bridge.of_node = dev->of_node;
2647 	mtk_dp->bridge.type = mtk_dp->data->bridge_type;
2648 
2649 	if (mtk_dp->bridge.type == DRM_MODE_CONNECTOR_eDP) {
2650 		/*
2651 		 * Set the data lanes to idle in case the bootloader didn't
2652 		 * properly close the eDP port to avoid stalls and then
2653 		 * reinitialize, reset and power on the AUX block.
2654 		 */
2655 		mtk_dp_set_idle_pattern(mtk_dp, true);
2656 		mtk_dp_initialize_aux_settings(mtk_dp);
2657 		mtk_dp_power_enable(mtk_dp);
2658 
2659 		/* Disable HW interrupts: we don't need any for eDP */
2660 		mtk_dp_hwirq_enable(mtk_dp, false);
2661 
2662 		/*
2663 		 * Power on the AUX to allow reading the EDID from aux-bus:
2664 		 * please note that it is necessary to call power off in the
2665 		 * .done_probing() callback (mtk_dp_edp_link_panel), as only
2666 		 * there we can safely assume that we finished reading EDID.
2667 		 */
2668 		mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE,
2669 				   DP_PWR_STATE_BANDGAP_TPLL_LANE,
2670 				   DP_PWR_STATE_MASK);
2671 
2672 		ret = devm_of_dp_aux_populate_bus(&mtk_dp->aux, mtk_dp_edp_link_panel);
2673 		if (ret) {
2674 			/* -ENODEV this means that the panel is not on the aux-bus */
2675 			if (ret == -ENODEV) {
2676 				ret = mtk_dp_edp_link_panel(&mtk_dp->aux);
2677 				if (ret)
2678 					return ret;
2679 			} else {
2680 				mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE,
2681 						   DP_PWR_STATE_BANDGAP_TPLL,
2682 						   DP_PWR_STATE_MASK);
2683 				mtk_dp_power_disable(mtk_dp);
2684 				return ret;
2685 			}
2686 		}
2687 	} else {
2688 		mtk_dp->bridge.ops = DRM_BRIDGE_OP_DETECT |
2689 				     DRM_BRIDGE_OP_EDID | DRM_BRIDGE_OP_HPD;
2690 		ret = devm_drm_bridge_add(dev, &mtk_dp->bridge);
2691 		if (ret)
2692 			return dev_err_probe(dev, ret, "Failed to add bridge\n");
2693 	}
2694 
2695 	pm_runtime_enable(dev);
2696 	pm_runtime_get_sync(dev);
2697 
2698 	return 0;
2699 }
2700 
2701 static void mtk_dp_remove(struct platform_device *pdev)
2702 {
2703 	struct mtk_dp *mtk_dp = platform_get_drvdata(pdev);
2704 
2705 	pm_runtime_put(&pdev->dev);
2706 	pm_runtime_disable(&pdev->dev);
2707 	if (mtk_dp->data->bridge_type != DRM_MODE_CONNECTOR_eDP)
2708 		del_timer_sync(&mtk_dp->debounce_timer);
2709 	platform_device_unregister(mtk_dp->phy_dev);
2710 	if (mtk_dp->audio_pdev)
2711 		platform_device_unregister(mtk_dp->audio_pdev);
2712 }
2713 
2714 #ifdef CONFIG_PM_SLEEP
2715 static int mtk_dp_suspend(struct device *dev)
2716 {
2717 	struct mtk_dp *mtk_dp = dev_get_drvdata(dev);
2718 
2719 	mtk_dp_power_disable(mtk_dp);
2720 	if (mtk_dp->bridge.type != DRM_MODE_CONNECTOR_eDP)
2721 		mtk_dp_hwirq_enable(mtk_dp, false);
2722 	pm_runtime_put_sync(dev);
2723 
2724 	return 0;
2725 }
2726 
2727 static int mtk_dp_resume(struct device *dev)
2728 {
2729 	struct mtk_dp *mtk_dp = dev_get_drvdata(dev);
2730 
2731 	pm_runtime_get_sync(dev);
2732 	mtk_dp_init_port(mtk_dp);
2733 	if (mtk_dp->bridge.type != DRM_MODE_CONNECTOR_eDP)
2734 		mtk_dp_hwirq_enable(mtk_dp, true);
2735 	mtk_dp_power_enable(mtk_dp);
2736 
2737 	return 0;
2738 }
2739 #endif
2740 
2741 static SIMPLE_DEV_PM_OPS(mtk_dp_pm_ops, mtk_dp_suspend, mtk_dp_resume);
2742 
2743 static const struct mtk_dp_data mt8195_edp_data = {
2744 	.bridge_type = DRM_MODE_CONNECTOR_eDP,
2745 	.smc_cmd = MTK_DP_SIP_ATF_EDP_VIDEO_UNMUTE,
2746 	.efuse_fmt = mt8195_edp_efuse_fmt,
2747 	.audio_supported = false,
2748 };
2749 
2750 static const struct mtk_dp_data mt8195_dp_data = {
2751 	.bridge_type = DRM_MODE_CONNECTOR_DisplayPort,
2752 	.smc_cmd = MTK_DP_SIP_ATF_VIDEO_UNMUTE,
2753 	.efuse_fmt = mt8195_dp_efuse_fmt,
2754 	.audio_supported = true,
2755 };
2756 
2757 static const struct of_device_id mtk_dp_of_match[] = {
2758 	{
2759 		.compatible = "mediatek,mt8195-edp-tx",
2760 		.data = &mt8195_edp_data,
2761 	},
2762 	{
2763 		.compatible = "mediatek,mt8195-dp-tx",
2764 		.data = &mt8195_dp_data,
2765 	},
2766 	{},
2767 };
2768 MODULE_DEVICE_TABLE(of, mtk_dp_of_match);
2769 
2770 static struct platform_driver mtk_dp_driver = {
2771 	.probe = mtk_dp_probe,
2772 	.remove_new = mtk_dp_remove,
2773 	.driver = {
2774 		.name = "mediatek-drm-dp",
2775 		.of_match_table = mtk_dp_of_match,
2776 		.pm = &mtk_dp_pm_ops,
2777 	},
2778 };
2779 
2780 module_platform_driver(mtk_dp_driver);
2781 
2782 MODULE_AUTHOR("Jitao Shi <jitao.shi@mediatek.com>");
2783 MODULE_AUTHOR("Markus Schneider-Pargmann <msp@baylibre.com>");
2784 MODULE_AUTHOR("Bo-Chen Chen <rex-bc.chen@mediatek.com>");
2785 MODULE_DESCRIPTION("MediaTek DisplayPort Driver");
2786 MODULE_LICENSE("GPL");
2787 MODULE_SOFTDEP("pre: phy_mtk_dp");
2788