xref: /openbmc/linux/drivers/gpu/drm/mediatek/mtk_dp.c (revision d32fd6bb9f2bc8178cdd65ebec1ad670a8bfa241)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (c) 2019-2022 MediaTek Inc.
4  * Copyright (c) 2022 BayLibre
5  */
6 
7 #include <drm/display/drm_dp_aux_bus.h>
8 #include <drm/display/drm_dp.h>
9 #include <drm/display/drm_dp_helper.h>
10 #include <drm/drm_atomic_helper.h>
11 #include <drm/drm_bridge.h>
12 #include <drm/drm_crtc.h>
13 #include <drm/drm_edid.h>
14 #include <drm/drm_of.h>
15 #include <drm/drm_panel.h>
16 #include <drm/drm_print.h>
17 #include <drm/drm_probe_helper.h>
18 #include <linux/arm-smccc.h>
19 #include <linux/clk.h>
20 #include <linux/delay.h>
21 #include <linux/errno.h>
22 #include <linux/kernel.h>
23 #include <linux/media-bus-format.h>
24 #include <linux/nvmem-consumer.h>
25 #include <linux/of.h>
26 #include <linux/of_irq.h>
27 #include <linux/of_platform.h>
28 #include <linux/phy/phy.h>
29 #include <linux/platform_device.h>
30 #include <linux/pm_runtime.h>
31 #include <linux/regmap.h>
32 #include <linux/soc/mediatek/mtk_sip_svc.h>
33 #include <sound/hdmi-codec.h>
34 #include <video/videomode.h>
35 
36 #include "mtk_dp_reg.h"
37 
38 #define MTK_DP_SIP_CONTROL_AARCH32	MTK_SIP_SMC_CMD(0x523)
39 #define MTK_DP_SIP_ATF_EDP_VIDEO_UNMUTE	(BIT(0) | BIT(5))
40 #define MTK_DP_SIP_ATF_VIDEO_UNMUTE	BIT(5)
41 
42 #define MTK_DP_THREAD_CABLE_STATE_CHG	BIT(0)
43 #define MTK_DP_THREAD_HPD_EVENT		BIT(1)
44 
45 #define MTK_DP_4P1T 4
46 #define MTK_DP_HDE 2
47 #define MTK_DP_PIX_PER_ADDR 2
48 #define MTK_DP_AUX_WAIT_REPLY_COUNT 20
49 #define MTK_DP_TBC_BUF_READ_START_ADDR 0x8
50 #define MTK_DP_TRAIN_VOLTAGE_LEVEL_RETRY 5
51 #define MTK_DP_TRAIN_DOWNSCALE_RETRY 10
52 #define MTK_DP_VERSION 0x11
53 #define MTK_DP_SDP_AUI 0x4
54 
55 enum {
56 	MTK_DP_CAL_GLB_BIAS_TRIM = 0,
57 	MTK_DP_CAL_CLKTX_IMPSE,
58 	MTK_DP_CAL_LN_TX_IMPSEL_PMOS_0,
59 	MTK_DP_CAL_LN_TX_IMPSEL_PMOS_1,
60 	MTK_DP_CAL_LN_TX_IMPSEL_PMOS_2,
61 	MTK_DP_CAL_LN_TX_IMPSEL_PMOS_3,
62 	MTK_DP_CAL_LN_TX_IMPSEL_NMOS_0,
63 	MTK_DP_CAL_LN_TX_IMPSEL_NMOS_1,
64 	MTK_DP_CAL_LN_TX_IMPSEL_NMOS_2,
65 	MTK_DP_CAL_LN_TX_IMPSEL_NMOS_3,
66 	MTK_DP_CAL_MAX,
67 };
68 
69 struct mtk_dp_train_info {
70 	bool sink_ssc;
71 	bool cable_plugged_in;
72 	/* link_rate is in multiple of 0.27Gbps */
73 	int link_rate;
74 	int lane_count;
75 	unsigned int channel_eq_pattern;
76 };
77 
78 struct mtk_dp_audio_cfg {
79 	bool detect_monitor;
80 	int sad_count;
81 	int sample_rate;
82 	int word_length_bits;
83 	int channels;
84 };
85 
86 struct mtk_dp_info {
87 	enum dp_pixelformat format;
88 	struct videomode vm;
89 	struct mtk_dp_audio_cfg audio_cur_cfg;
90 };
91 
92 struct mtk_dp_efuse_fmt {
93 	unsigned short idx;
94 	unsigned short shift;
95 	unsigned short mask;
96 	unsigned short min_val;
97 	unsigned short max_val;
98 	unsigned short default_val;
99 };
100 
101 struct mtk_dp {
102 	bool enabled;
103 	bool need_debounce;
104 	int irq;
105 	u8 max_lanes;
106 	u8 max_linkrate;
107 	u8 rx_cap[DP_RECEIVER_CAP_SIZE];
108 	u32 cal_data[MTK_DP_CAL_MAX];
109 	u32 irq_thread_handle;
110 	/* irq_thread_lock is used to protect irq_thread_handle */
111 	spinlock_t irq_thread_lock;
112 
113 	struct device *dev;
114 	struct drm_bridge bridge;
115 	struct drm_bridge *next_bridge;
116 	struct drm_connector *conn;
117 	struct drm_device *drm_dev;
118 	struct drm_dp_aux aux;
119 
120 	const struct mtk_dp_data *data;
121 	struct mtk_dp_info info;
122 	struct mtk_dp_train_info train_info;
123 
124 	struct platform_device *phy_dev;
125 	struct phy *phy;
126 	struct regmap *regs;
127 	struct timer_list debounce_timer;
128 
129 	/* For audio */
130 	bool audio_enable;
131 	hdmi_codec_plugged_cb plugged_cb;
132 	struct platform_device *audio_pdev;
133 
134 	struct device *codec_dev;
135 	/* protect the plugged_cb as it's used in both bridge ops and audio */
136 	struct mutex update_plugged_status_lock;
137 };
138 
139 struct mtk_dp_data {
140 	int bridge_type;
141 	unsigned int smc_cmd;
142 	const struct mtk_dp_efuse_fmt *efuse_fmt;
143 	bool audio_supported;
144 };
145 
146 static const struct mtk_dp_efuse_fmt mt8195_edp_efuse_fmt[MTK_DP_CAL_MAX] = {
147 	[MTK_DP_CAL_GLB_BIAS_TRIM] = {
148 		.idx = 3,
149 		.shift = 27,
150 		.mask = 0x1f,
151 		.min_val = 1,
152 		.max_val = 0x1e,
153 		.default_val = 0xf,
154 	},
155 	[MTK_DP_CAL_CLKTX_IMPSE] = {
156 		.idx = 0,
157 		.shift = 9,
158 		.mask = 0xf,
159 		.min_val = 1,
160 		.max_val = 0xe,
161 		.default_val = 0x8,
162 	},
163 	[MTK_DP_CAL_LN_TX_IMPSEL_PMOS_0] = {
164 		.idx = 2,
165 		.shift = 28,
166 		.mask = 0xf,
167 		.min_val = 1,
168 		.max_val = 0xe,
169 		.default_val = 0x8,
170 	},
171 	[MTK_DP_CAL_LN_TX_IMPSEL_PMOS_1] = {
172 		.idx = 2,
173 		.shift = 20,
174 		.mask = 0xf,
175 		.min_val = 1,
176 		.max_val = 0xe,
177 		.default_val = 0x8,
178 	},
179 	[MTK_DP_CAL_LN_TX_IMPSEL_PMOS_2] = {
180 		.idx = 2,
181 		.shift = 12,
182 		.mask = 0xf,
183 		.min_val = 1,
184 		.max_val = 0xe,
185 		.default_val = 0x8,
186 	},
187 	[MTK_DP_CAL_LN_TX_IMPSEL_PMOS_3] = {
188 		.idx = 2,
189 		.shift = 4,
190 		.mask = 0xf,
191 		.min_val = 1,
192 		.max_val = 0xe,
193 		.default_val = 0x8,
194 	},
195 	[MTK_DP_CAL_LN_TX_IMPSEL_NMOS_0] = {
196 		.idx = 2,
197 		.shift = 24,
198 		.mask = 0xf,
199 		.min_val = 1,
200 		.max_val = 0xe,
201 		.default_val = 0x8,
202 	},
203 	[MTK_DP_CAL_LN_TX_IMPSEL_NMOS_1] = {
204 		.idx = 2,
205 		.shift = 16,
206 		.mask = 0xf,
207 		.min_val = 1,
208 		.max_val = 0xe,
209 		.default_val = 0x8,
210 	},
211 	[MTK_DP_CAL_LN_TX_IMPSEL_NMOS_2] = {
212 		.idx = 2,
213 		.shift = 8,
214 		.mask = 0xf,
215 		.min_val = 1,
216 		.max_val = 0xe,
217 		.default_val = 0x8,
218 	},
219 	[MTK_DP_CAL_LN_TX_IMPSEL_NMOS_3] = {
220 		.idx = 2,
221 		.shift = 0,
222 		.mask = 0xf,
223 		.min_val = 1,
224 		.max_val = 0xe,
225 		.default_val = 0x8,
226 	},
227 };
228 
229 static const struct mtk_dp_efuse_fmt mt8195_dp_efuse_fmt[MTK_DP_CAL_MAX] = {
230 	[MTK_DP_CAL_GLB_BIAS_TRIM] = {
231 		.idx = 0,
232 		.shift = 27,
233 		.mask = 0x1f,
234 		.min_val = 1,
235 		.max_val = 0x1e,
236 		.default_val = 0xf,
237 	},
238 	[MTK_DP_CAL_CLKTX_IMPSE] = {
239 		.idx = 0,
240 		.shift = 13,
241 		.mask = 0xf,
242 		.min_val = 1,
243 		.max_val = 0xe,
244 		.default_val = 0x8,
245 	},
246 	[MTK_DP_CAL_LN_TX_IMPSEL_PMOS_0] = {
247 		.idx = 1,
248 		.shift = 28,
249 		.mask = 0xf,
250 		.min_val = 1,
251 		.max_val = 0xe,
252 		.default_val = 0x8,
253 	},
254 	[MTK_DP_CAL_LN_TX_IMPSEL_PMOS_1] = {
255 		.idx = 1,
256 		.shift = 20,
257 		.mask = 0xf,
258 		.min_val = 1,
259 		.max_val = 0xe,
260 		.default_val = 0x8,
261 	},
262 	[MTK_DP_CAL_LN_TX_IMPSEL_PMOS_2] = {
263 		.idx = 1,
264 		.shift = 12,
265 		.mask = 0xf,
266 		.min_val = 1,
267 		.max_val = 0xe,
268 		.default_val = 0x8,
269 	},
270 	[MTK_DP_CAL_LN_TX_IMPSEL_PMOS_3] = {
271 		.idx = 1,
272 		.shift = 4,
273 		.mask = 0xf,
274 		.min_val = 1,
275 		.max_val = 0xe,
276 		.default_val = 0x8,
277 	},
278 	[MTK_DP_CAL_LN_TX_IMPSEL_NMOS_0] = {
279 		.idx = 1,
280 		.shift = 24,
281 		.mask = 0xf,
282 		.min_val = 1,
283 		.max_val = 0xe,
284 		.default_val = 0x8,
285 	},
286 	[MTK_DP_CAL_LN_TX_IMPSEL_NMOS_1] = {
287 		.idx = 1,
288 		.shift = 16,
289 		.mask = 0xf,
290 		.min_val = 1,
291 		.max_val = 0xe,
292 		.default_val = 0x8,
293 	},
294 	[MTK_DP_CAL_LN_TX_IMPSEL_NMOS_2] = {
295 		.idx = 1,
296 		.shift = 8,
297 		.mask = 0xf,
298 		.min_val = 1,
299 		.max_val = 0xe,
300 		.default_val = 0x8,
301 	},
302 	[MTK_DP_CAL_LN_TX_IMPSEL_NMOS_3] = {
303 		.idx = 1,
304 		.shift = 0,
305 		.mask = 0xf,
306 		.min_val = 1,
307 		.max_val = 0xe,
308 		.default_val = 0x8,
309 	},
310 };
311 
312 static struct regmap_config mtk_dp_regmap_config = {
313 	.reg_bits = 32,
314 	.val_bits = 32,
315 	.reg_stride = 4,
316 	.max_register = SEC_OFFSET + 0x90,
317 	.name = "mtk-dp-registers",
318 };
319 
mtk_dp_from_bridge(struct drm_bridge * b)320 static struct mtk_dp *mtk_dp_from_bridge(struct drm_bridge *b)
321 {
322 	return container_of(b, struct mtk_dp, bridge);
323 }
324 
mtk_dp_read(struct mtk_dp * mtk_dp,u32 offset)325 static u32 mtk_dp_read(struct mtk_dp *mtk_dp, u32 offset)
326 {
327 	u32 read_val;
328 	int ret;
329 
330 	ret = regmap_read(mtk_dp->regs, offset, &read_val);
331 	if (ret) {
332 		dev_err(mtk_dp->dev, "Failed to read register 0x%x: %d\n",
333 			offset, ret);
334 		return 0;
335 	}
336 
337 	return read_val;
338 }
339 
mtk_dp_write(struct mtk_dp * mtk_dp,u32 offset,u32 val)340 static int mtk_dp_write(struct mtk_dp *mtk_dp, u32 offset, u32 val)
341 {
342 	int ret = regmap_write(mtk_dp->regs, offset, val);
343 
344 	if (ret)
345 		dev_err(mtk_dp->dev,
346 			"Failed to write register 0x%x with value 0x%x\n",
347 			offset, val);
348 	return ret;
349 }
350 
mtk_dp_update_bits(struct mtk_dp * mtk_dp,u32 offset,u32 val,u32 mask)351 static int mtk_dp_update_bits(struct mtk_dp *mtk_dp, u32 offset,
352 			      u32 val, u32 mask)
353 {
354 	int ret = regmap_update_bits(mtk_dp->regs, offset, mask, val);
355 
356 	if (ret)
357 		dev_err(mtk_dp->dev,
358 			"Failed to update register 0x%x with value 0x%x, mask 0x%x\n",
359 			offset, val, mask);
360 	return ret;
361 }
362 
mtk_dp_bulk_16bit_write(struct mtk_dp * mtk_dp,u32 offset,u8 * buf,size_t length)363 static void mtk_dp_bulk_16bit_write(struct mtk_dp *mtk_dp, u32 offset, u8 *buf,
364 				    size_t length)
365 {
366 	int i;
367 
368 	/* 2 bytes per register */
369 	for (i = 0; i < length; i += 2) {
370 		u32 val = buf[i] | (i + 1 < length ? buf[i + 1] << 8 : 0);
371 
372 		if (mtk_dp_write(mtk_dp, offset + i * 2, val))
373 			return;
374 	}
375 }
376 
mtk_dp_msa_bypass_enable(struct mtk_dp * mtk_dp,bool enable)377 static void mtk_dp_msa_bypass_enable(struct mtk_dp *mtk_dp, bool enable)
378 {
379 	u32 mask = HTOTAL_SEL_DP_ENC0_P0 | VTOTAL_SEL_DP_ENC0_P0 |
380 		   HSTART_SEL_DP_ENC0_P0 | VSTART_SEL_DP_ENC0_P0 |
381 		   HWIDTH_SEL_DP_ENC0_P0 | VHEIGHT_SEL_DP_ENC0_P0 |
382 		   HSP_SEL_DP_ENC0_P0 | HSW_SEL_DP_ENC0_P0 |
383 		   VSP_SEL_DP_ENC0_P0 | VSW_SEL_DP_ENC0_P0;
384 
385 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3030, enable ? 0 : mask, mask);
386 }
387 
mtk_dp_set_msa(struct mtk_dp * mtk_dp)388 static void mtk_dp_set_msa(struct mtk_dp *mtk_dp)
389 {
390 	struct drm_display_mode mode;
391 	struct videomode *vm = &mtk_dp->info.vm;
392 
393 	drm_display_mode_from_videomode(vm, &mode);
394 
395 	/* horizontal */
396 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3010,
397 			   mode.htotal, HTOTAL_SW_DP_ENC0_P0_MASK);
398 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3018,
399 			   vm->hsync_len + vm->hback_porch,
400 			   HSTART_SW_DP_ENC0_P0_MASK);
401 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3028,
402 			   vm->hsync_len, HSW_SW_DP_ENC0_P0_MASK);
403 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3028,
404 			   0, HSP_SW_DP_ENC0_P0_MASK);
405 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3020,
406 			   vm->hactive, HWIDTH_SW_DP_ENC0_P0_MASK);
407 
408 	/* vertical */
409 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3014,
410 			   mode.vtotal, VTOTAL_SW_DP_ENC0_P0_MASK);
411 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_301C,
412 			   vm->vsync_len + vm->vback_porch,
413 			   VSTART_SW_DP_ENC0_P0_MASK);
414 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_302C,
415 			   vm->vsync_len, VSW_SW_DP_ENC0_P0_MASK);
416 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_302C,
417 			   0, VSP_SW_DP_ENC0_P0_MASK);
418 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3024,
419 			   vm->vactive, VHEIGHT_SW_DP_ENC0_P0_MASK);
420 
421 	/* horizontal */
422 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3064,
423 			   vm->hactive, HDE_NUM_LAST_DP_ENC0_P0_MASK);
424 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3154,
425 			   mode.htotal, PGEN_HTOTAL_DP_ENC0_P0_MASK);
426 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3158,
427 			   vm->hfront_porch,
428 			   PGEN_HSYNC_RISING_DP_ENC0_P0_MASK);
429 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_315C,
430 			   vm->hsync_len,
431 			   PGEN_HSYNC_PULSE_WIDTH_DP_ENC0_P0_MASK);
432 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3160,
433 			   vm->hback_porch + vm->hsync_len,
434 			   PGEN_HFDE_START_DP_ENC0_P0_MASK);
435 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3164,
436 			   vm->hactive,
437 			   PGEN_HFDE_ACTIVE_WIDTH_DP_ENC0_P0_MASK);
438 
439 	/* vertical */
440 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3168,
441 			   mode.vtotal,
442 			   PGEN_VTOTAL_DP_ENC0_P0_MASK);
443 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_316C,
444 			   vm->vfront_porch,
445 			   PGEN_VSYNC_RISING_DP_ENC0_P0_MASK);
446 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3170,
447 			   vm->vsync_len,
448 			   PGEN_VSYNC_PULSE_WIDTH_DP_ENC0_P0_MASK);
449 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3174,
450 			   vm->vback_porch + vm->vsync_len,
451 			   PGEN_VFDE_START_DP_ENC0_P0_MASK);
452 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3178,
453 			   vm->vactive,
454 			   PGEN_VFDE_ACTIVE_WIDTH_DP_ENC0_P0_MASK);
455 }
456 
mtk_dp_set_color_format(struct mtk_dp * mtk_dp,enum dp_pixelformat color_format)457 static int mtk_dp_set_color_format(struct mtk_dp *mtk_dp,
458 				   enum dp_pixelformat color_format)
459 {
460 	u32 val;
461 	u32 misc0_color;
462 
463 	switch (color_format) {
464 	case DP_PIXELFORMAT_YUV422:
465 		val = PIXEL_ENCODE_FORMAT_DP_ENC0_P0_YCBCR422;
466 		misc0_color = DP_COLOR_FORMAT_YCbCr422;
467 		break;
468 	case DP_PIXELFORMAT_RGB:
469 		val = PIXEL_ENCODE_FORMAT_DP_ENC0_P0_RGB;
470 		misc0_color = DP_COLOR_FORMAT_RGB;
471 		break;
472 	default:
473 		drm_warn(mtk_dp->drm_dev, "Unsupported color format: %d\n",
474 			 color_format);
475 		return -EINVAL;
476 	}
477 
478 	/* update MISC0 */
479 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3034,
480 			   misc0_color,
481 			   DP_TEST_COLOR_FORMAT_MASK);
482 
483 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_303C,
484 			   val, PIXEL_ENCODE_FORMAT_DP_ENC0_P0_MASK);
485 	return 0;
486 }
487 
mtk_dp_set_color_depth(struct mtk_dp * mtk_dp)488 static void mtk_dp_set_color_depth(struct mtk_dp *mtk_dp)
489 {
490 	/* Only support 8 bits currently */
491 	/* Update MISC0 */
492 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3034,
493 			   DP_MSA_MISC_8_BPC, DP_TEST_BIT_DEPTH_MASK);
494 
495 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_303C,
496 			   VIDEO_COLOR_DEPTH_DP_ENC0_P0_8BIT,
497 			   VIDEO_COLOR_DEPTH_DP_ENC0_P0_MASK);
498 }
499 
mtk_dp_config_mn_mode(struct mtk_dp * mtk_dp)500 static void mtk_dp_config_mn_mode(struct mtk_dp *mtk_dp)
501 {
502 	/* 0: hw mode, 1: sw mode */
503 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3004,
504 			   0, VIDEO_M_CODE_SEL_DP_ENC0_P0_MASK);
505 }
506 
mtk_dp_set_sram_read_start(struct mtk_dp * mtk_dp,u32 val)507 static void mtk_dp_set_sram_read_start(struct mtk_dp *mtk_dp, u32 val)
508 {
509 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_303C,
510 			   val, SRAM_START_READ_THRD_DP_ENC0_P0_MASK);
511 }
512 
mtk_dp_setup_encoder(struct mtk_dp * mtk_dp)513 static void mtk_dp_setup_encoder(struct mtk_dp *mtk_dp)
514 {
515 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_303C,
516 			   VIDEO_MN_GEN_EN_DP_ENC0_P0,
517 			   VIDEO_MN_GEN_EN_DP_ENC0_P0);
518 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3040,
519 			   SDP_DOWN_CNT_DP_ENC0_P0_VAL,
520 			   SDP_DOWN_CNT_INIT_DP_ENC0_P0_MASK);
521 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3364,
522 			   SDP_DOWN_CNT_IN_HBLANK_DP_ENC1_P0_VAL,
523 			   SDP_DOWN_CNT_INIT_IN_HBLANK_DP_ENC1_P0_MASK);
524 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3300,
525 			   VIDEO_AFIFO_RDY_SEL_DP_ENC1_P0_VAL << 8,
526 			   VIDEO_AFIFO_RDY_SEL_DP_ENC1_P0_MASK);
527 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3364,
528 			   FIFO_READ_START_POINT_DP_ENC1_P0_VAL << 12,
529 			   FIFO_READ_START_POINT_DP_ENC1_P0_MASK);
530 	mtk_dp_write(mtk_dp, MTK_DP_ENC1_P0_3368, DP_ENC1_P0_3368_VAL);
531 }
532 
mtk_dp_pg_enable(struct mtk_dp * mtk_dp,bool enable)533 static void mtk_dp_pg_enable(struct mtk_dp *mtk_dp, bool enable)
534 {
535 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3038,
536 			   enable ? VIDEO_SOURCE_SEL_DP_ENC0_P0_MASK : 0,
537 			   VIDEO_SOURCE_SEL_DP_ENC0_P0_MASK);
538 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_31B0,
539 			   PGEN_PATTERN_SEL_VAL << 4, PGEN_PATTERN_SEL_MASK);
540 }
541 
mtk_dp_audio_setup_channels(struct mtk_dp * mtk_dp,struct mtk_dp_audio_cfg * cfg)542 static void mtk_dp_audio_setup_channels(struct mtk_dp *mtk_dp,
543 					struct mtk_dp_audio_cfg *cfg)
544 {
545 	u32 channel_enable_bits;
546 
547 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3324,
548 			   AUDIO_SOURCE_MUX_DP_ENC1_P0_DPRX,
549 			   AUDIO_SOURCE_MUX_DP_ENC1_P0_MASK);
550 
551 	/* audio channel count change reset */
552 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_33F4,
553 			   DP_ENC_DUMMY_RW_1, DP_ENC_DUMMY_RW_1);
554 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3304,
555 			   AU_PRTY_REGEN_DP_ENC1_P0_MASK |
556 			   AU_CH_STS_REGEN_DP_ENC1_P0_MASK |
557 			   AUDIO_SAMPLE_PRSENT_REGEN_DP_ENC1_P0_MASK,
558 			   AU_PRTY_REGEN_DP_ENC1_P0_MASK |
559 			   AU_CH_STS_REGEN_DP_ENC1_P0_MASK |
560 			   AUDIO_SAMPLE_PRSENT_REGEN_DP_ENC1_P0_MASK);
561 
562 	switch (cfg->channels) {
563 	case 2:
564 		channel_enable_bits = AUDIO_2CH_SEL_DP_ENC0_P0_MASK |
565 				      AUDIO_2CH_EN_DP_ENC0_P0_MASK;
566 		break;
567 	case 8:
568 	default:
569 		channel_enable_bits = AUDIO_8CH_SEL_DP_ENC0_P0_MASK |
570 				      AUDIO_8CH_EN_DP_ENC0_P0_MASK;
571 		break;
572 	}
573 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3088,
574 			   channel_enable_bits | AU_EN_DP_ENC0_P0,
575 			   AUDIO_2CH_SEL_DP_ENC0_P0_MASK |
576 			   AUDIO_2CH_EN_DP_ENC0_P0_MASK |
577 			   AUDIO_8CH_SEL_DP_ENC0_P0_MASK |
578 			   AUDIO_8CH_EN_DP_ENC0_P0_MASK |
579 			   AU_EN_DP_ENC0_P0);
580 
581 	/* audio channel count change reset */
582 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_33F4, 0, DP_ENC_DUMMY_RW_1);
583 
584 	/* enable audio reset */
585 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_33F4,
586 			   DP_ENC_DUMMY_RW_1_AUDIO_RST_EN,
587 			   DP_ENC_DUMMY_RW_1_AUDIO_RST_EN);
588 }
589 
mtk_dp_audio_channel_status_set(struct mtk_dp * mtk_dp,struct mtk_dp_audio_cfg * cfg)590 static void mtk_dp_audio_channel_status_set(struct mtk_dp *mtk_dp,
591 					    struct mtk_dp_audio_cfg *cfg)
592 {
593 	struct snd_aes_iec958 iec = { 0 };
594 
595 	switch (cfg->sample_rate) {
596 	case 32000:
597 		iec.status[3] = IEC958_AES3_CON_FS_32000;
598 		break;
599 	case 44100:
600 		iec.status[3] = IEC958_AES3_CON_FS_44100;
601 		break;
602 	case 48000:
603 		iec.status[3] = IEC958_AES3_CON_FS_48000;
604 		break;
605 	case 88200:
606 		iec.status[3] = IEC958_AES3_CON_FS_88200;
607 		break;
608 	case 96000:
609 		iec.status[3] = IEC958_AES3_CON_FS_96000;
610 		break;
611 	case 192000:
612 		iec.status[3] = IEC958_AES3_CON_FS_192000;
613 		break;
614 	default:
615 		iec.status[3] = IEC958_AES3_CON_FS_NOTID;
616 		break;
617 	}
618 
619 	switch (cfg->word_length_bits) {
620 	case 16:
621 		iec.status[4] = IEC958_AES4_CON_WORDLEN_20_16;
622 		break;
623 	case 20:
624 		iec.status[4] = IEC958_AES4_CON_WORDLEN_20_16 |
625 				IEC958_AES4_CON_MAX_WORDLEN_24;
626 		break;
627 	case 24:
628 		iec.status[4] = IEC958_AES4_CON_WORDLEN_24_20 |
629 				IEC958_AES4_CON_MAX_WORDLEN_24;
630 		break;
631 	default:
632 		iec.status[4] = IEC958_AES4_CON_WORDLEN_NOTID;
633 	}
634 
635 	/* IEC 60958 consumer channel status bits */
636 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_308C,
637 			   0, CH_STATUS_0_DP_ENC0_P0_MASK);
638 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3090,
639 			   iec.status[3] << 8, CH_STATUS_1_DP_ENC0_P0_MASK);
640 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3094,
641 			   iec.status[4], CH_STATUS_2_DP_ENC0_P0_MASK);
642 }
643 
mtk_dp_audio_sdp_asp_set_channels(struct mtk_dp * mtk_dp,int channels)644 static void mtk_dp_audio_sdp_asp_set_channels(struct mtk_dp *mtk_dp,
645 					      int channels)
646 {
647 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_312C,
648 			   (min(8, channels) - 1) << 8,
649 			   ASP_HB2_DP_ENC0_P0_MASK | ASP_HB3_DP_ENC0_P0_MASK);
650 }
651 
mtk_dp_audio_set_divider(struct mtk_dp * mtk_dp)652 static void mtk_dp_audio_set_divider(struct mtk_dp *mtk_dp)
653 {
654 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_30BC,
655 			   AUDIO_M_CODE_MULT_DIV_SEL_DP_ENC0_P0_DIV_2,
656 			   AUDIO_M_CODE_MULT_DIV_SEL_DP_ENC0_P0_MASK);
657 }
658 
mtk_dp_sdp_trigger_aui(struct mtk_dp * mtk_dp)659 static void mtk_dp_sdp_trigger_aui(struct mtk_dp *mtk_dp)
660 {
661 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3280,
662 			   MTK_DP_SDP_AUI, SDP_PACKET_TYPE_DP_ENC1_P0_MASK);
663 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3280,
664 			   SDP_PACKET_W_DP_ENC1_P0, SDP_PACKET_W_DP_ENC1_P0);
665 }
666 
mtk_dp_sdp_set_data(struct mtk_dp * mtk_dp,u8 * data_bytes)667 static void mtk_dp_sdp_set_data(struct mtk_dp *mtk_dp, u8 *data_bytes)
668 {
669 	mtk_dp_bulk_16bit_write(mtk_dp, MTK_DP_ENC1_P0_3200,
670 				data_bytes, 0x10);
671 }
672 
mtk_dp_sdp_set_header_aui(struct mtk_dp * mtk_dp,struct dp_sdp_header * header)673 static void mtk_dp_sdp_set_header_aui(struct mtk_dp *mtk_dp,
674 				      struct dp_sdp_header *header)
675 {
676 	u32 db_addr = MTK_DP_ENC0_P0_30D8 + (MTK_DP_SDP_AUI - 1) * 8;
677 
678 	mtk_dp_bulk_16bit_write(mtk_dp, db_addr, (u8 *)header, 4);
679 }
680 
mtk_dp_disable_sdp_aui(struct mtk_dp * mtk_dp)681 static void mtk_dp_disable_sdp_aui(struct mtk_dp *mtk_dp)
682 {
683 	/* Disable periodic send */
684 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_30A8 & 0xfffc, 0,
685 			   0xff << ((MTK_DP_ENC0_P0_30A8 & 3) * 8));
686 }
687 
mtk_dp_setup_sdp_aui(struct mtk_dp * mtk_dp,struct dp_sdp * sdp)688 static void mtk_dp_setup_sdp_aui(struct mtk_dp *mtk_dp,
689 				 struct dp_sdp *sdp)
690 {
691 	u32 shift;
692 
693 	mtk_dp_sdp_set_data(mtk_dp, sdp->db);
694 	mtk_dp_sdp_set_header_aui(mtk_dp, &sdp->sdp_header);
695 	mtk_dp_disable_sdp_aui(mtk_dp);
696 
697 	shift = (MTK_DP_ENC0_P0_30A8 & 3) * 8;
698 
699 	mtk_dp_sdp_trigger_aui(mtk_dp);
700 	/* Enable periodic sending */
701 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_30A8 & 0xfffc,
702 			   0x05 << shift, 0xff << shift);
703 }
704 
mtk_dp_aux_irq_clear(struct mtk_dp * mtk_dp)705 static void mtk_dp_aux_irq_clear(struct mtk_dp *mtk_dp)
706 {
707 	mtk_dp_write(mtk_dp, MTK_DP_AUX_P0_3640, DP_AUX_P0_3640_VAL);
708 }
709 
mtk_dp_aux_set_cmd(struct mtk_dp * mtk_dp,u8 cmd,u32 addr)710 static void mtk_dp_aux_set_cmd(struct mtk_dp *mtk_dp, u8 cmd, u32 addr)
711 {
712 	mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3644,
713 			   cmd, MCU_REQUEST_COMMAND_AUX_TX_P0_MASK);
714 	mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3648,
715 			   addr, MCU_REQUEST_ADDRESS_LSB_AUX_TX_P0_MASK);
716 	mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_364C,
717 			   addr >> 16, MCU_REQUEST_ADDRESS_MSB_AUX_TX_P0_MASK);
718 }
719 
mtk_dp_aux_clear_fifo(struct mtk_dp * mtk_dp)720 static void mtk_dp_aux_clear_fifo(struct mtk_dp *mtk_dp)
721 {
722 	mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3650,
723 			   MCU_ACK_TRAN_COMPLETE_AUX_TX_P0,
724 			   MCU_ACK_TRAN_COMPLETE_AUX_TX_P0 |
725 			   PHY_FIFO_RST_AUX_TX_P0_MASK |
726 			   MCU_REQ_DATA_NUM_AUX_TX_P0_MASK);
727 }
728 
mtk_dp_aux_request_ready(struct mtk_dp * mtk_dp)729 static void mtk_dp_aux_request_ready(struct mtk_dp *mtk_dp)
730 {
731 	mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3630,
732 			   AUX_TX_REQUEST_READY_AUX_TX_P0,
733 			   AUX_TX_REQUEST_READY_AUX_TX_P0);
734 }
735 
mtk_dp_aux_fill_write_fifo(struct mtk_dp * mtk_dp,u8 * buf,size_t length)736 static void mtk_dp_aux_fill_write_fifo(struct mtk_dp *mtk_dp, u8 *buf,
737 				       size_t length)
738 {
739 	mtk_dp_bulk_16bit_write(mtk_dp, MTK_DP_AUX_P0_3708, buf, length);
740 }
741 
mtk_dp_aux_read_rx_fifo(struct mtk_dp * mtk_dp,u8 * buf,size_t length,int read_delay)742 static void mtk_dp_aux_read_rx_fifo(struct mtk_dp *mtk_dp, u8 *buf,
743 				    size_t length, int read_delay)
744 {
745 	int read_pos;
746 
747 	mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3620,
748 			   0, AUX_RD_MODE_AUX_TX_P0_MASK);
749 
750 	for (read_pos = 0; read_pos < length; read_pos++) {
751 		mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3620,
752 				   AUX_RX_FIFO_READ_PULSE_TX_P0,
753 				   AUX_RX_FIFO_READ_PULSE_TX_P0);
754 
755 		/* Hardware needs time to update the data */
756 		usleep_range(read_delay, read_delay * 2);
757 		buf[read_pos] = (u8)(mtk_dp_read(mtk_dp, MTK_DP_AUX_P0_3620) &
758 				     AUX_RX_FIFO_READ_DATA_AUX_TX_P0_MASK);
759 	}
760 }
761 
mtk_dp_aux_set_length(struct mtk_dp * mtk_dp,size_t length)762 static void mtk_dp_aux_set_length(struct mtk_dp *mtk_dp, size_t length)
763 {
764 	if (length > 0) {
765 		mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3650,
766 				   (length - 1) << 12,
767 				   MCU_REQ_DATA_NUM_AUX_TX_P0_MASK);
768 		mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_362C,
769 				   0,
770 				   AUX_NO_LENGTH_AUX_TX_P0 |
771 				   AUX_TX_AUXTX_OV_EN_AUX_TX_P0_MASK |
772 				   AUX_RESERVED_RW_0_AUX_TX_P0_MASK);
773 	} else {
774 		mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_362C,
775 				   AUX_NO_LENGTH_AUX_TX_P0,
776 				   AUX_NO_LENGTH_AUX_TX_P0 |
777 				   AUX_TX_AUXTX_OV_EN_AUX_TX_P0_MASK |
778 				   AUX_RESERVED_RW_0_AUX_TX_P0_MASK);
779 	}
780 }
781 
mtk_dp_aux_wait_for_completion(struct mtk_dp * mtk_dp,bool is_read)782 static int mtk_dp_aux_wait_for_completion(struct mtk_dp *mtk_dp, bool is_read)
783 {
784 	int wait_reply = MTK_DP_AUX_WAIT_REPLY_COUNT;
785 
786 	while (--wait_reply) {
787 		u32 aux_irq_status;
788 
789 		if (is_read) {
790 			u32 fifo_status = mtk_dp_read(mtk_dp, MTK_DP_AUX_P0_3618);
791 
792 			if (fifo_status &
793 			    (AUX_RX_FIFO_WRITE_POINTER_AUX_TX_P0_MASK |
794 			     AUX_RX_FIFO_FULL_AUX_TX_P0_MASK)) {
795 				return 0;
796 			}
797 		}
798 
799 		aux_irq_status = mtk_dp_read(mtk_dp, MTK_DP_AUX_P0_3640);
800 		if (aux_irq_status & AUX_RX_AUX_RECV_COMPLETE_IRQ_AUX_TX_P0)
801 			return 0;
802 
803 		if (aux_irq_status & AUX_400US_TIMEOUT_IRQ_AUX_TX_P0)
804 			return -ETIMEDOUT;
805 
806 		/* Give the hardware a chance to reach completion before retrying */
807 		usleep_range(100, 500);
808 	}
809 
810 	return -ETIMEDOUT;
811 }
812 
mtk_dp_aux_do_transfer(struct mtk_dp * mtk_dp,bool is_read,u8 cmd,u32 addr,u8 * buf,size_t length,u8 * reply_cmd)813 static int mtk_dp_aux_do_transfer(struct mtk_dp *mtk_dp, bool is_read, u8 cmd,
814 				  u32 addr, u8 *buf, size_t length, u8 *reply_cmd)
815 {
816 	int ret;
817 
818 	if (is_read && (length > DP_AUX_MAX_PAYLOAD_BYTES ||
819 			(cmd == DP_AUX_NATIVE_READ && !length)))
820 		return -EINVAL;
821 
822 	if (!is_read)
823 		mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3704,
824 				   AUX_TX_FIFO_NEW_MODE_EN_AUX_TX_P0,
825 				   AUX_TX_FIFO_NEW_MODE_EN_AUX_TX_P0);
826 
827 	/* We need to clear fifo and irq before sending commands to the sink device. */
828 	mtk_dp_aux_clear_fifo(mtk_dp);
829 	mtk_dp_aux_irq_clear(mtk_dp);
830 
831 	mtk_dp_aux_set_cmd(mtk_dp, cmd, addr);
832 	mtk_dp_aux_set_length(mtk_dp, length);
833 
834 	if (!is_read) {
835 		if (length)
836 			mtk_dp_aux_fill_write_fifo(mtk_dp, buf, length);
837 
838 		mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3704,
839 				   AUX_TX_FIFO_WDATA_NEW_MODE_T_AUX_TX_P0_MASK,
840 				   AUX_TX_FIFO_WDATA_NEW_MODE_T_AUX_TX_P0_MASK);
841 	}
842 
843 	mtk_dp_aux_request_ready(mtk_dp);
844 
845 	/* Wait for feedback from sink device. */
846 	ret = mtk_dp_aux_wait_for_completion(mtk_dp, is_read);
847 
848 	*reply_cmd = mtk_dp_read(mtk_dp, MTK_DP_AUX_P0_3624) &
849 		     AUX_RX_REPLY_COMMAND_AUX_TX_P0_MASK;
850 
851 	if (ret) {
852 		u32 phy_status = mtk_dp_read(mtk_dp, MTK_DP_AUX_P0_3628) &
853 				 AUX_RX_PHY_STATE_AUX_TX_P0_MASK;
854 		if (phy_status != AUX_RX_PHY_STATE_AUX_TX_P0_RX_IDLE) {
855 			dev_err(mtk_dp->dev,
856 				"AUX Rx Aux hang, need SW reset\n");
857 			return -EIO;
858 		}
859 
860 		return -ETIMEDOUT;
861 	}
862 
863 	if (!length) {
864 		mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_362C,
865 				   0,
866 				   AUX_NO_LENGTH_AUX_TX_P0 |
867 				   AUX_TX_AUXTX_OV_EN_AUX_TX_P0_MASK |
868 				   AUX_RESERVED_RW_0_AUX_TX_P0_MASK);
869 	} else if (is_read) {
870 		int read_delay;
871 
872 		if (cmd == (DP_AUX_I2C_READ | DP_AUX_I2C_MOT) ||
873 		    cmd == DP_AUX_I2C_READ)
874 			read_delay = 500;
875 		else
876 			read_delay = 100;
877 
878 		mtk_dp_aux_read_rx_fifo(mtk_dp, buf, length, read_delay);
879 	}
880 
881 	return 0;
882 }
883 
mtk_dp_set_swing_pre_emphasis(struct mtk_dp * mtk_dp,int lane_num,int swing_val,int preemphasis)884 static void mtk_dp_set_swing_pre_emphasis(struct mtk_dp *mtk_dp, int lane_num,
885 					  int swing_val, int preemphasis)
886 {
887 	u32 lane_shift = lane_num * DP_TX1_VOLT_SWING_SHIFT;
888 
889 	dev_dbg(mtk_dp->dev,
890 		"link training: swing_val = 0x%x, pre-emphasis = 0x%x\n",
891 		swing_val, preemphasis);
892 
893 	mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_SWING_EMP,
894 			   swing_val << (DP_TX0_VOLT_SWING_SHIFT + lane_shift),
895 			   DP_TX0_VOLT_SWING_MASK << lane_shift);
896 	mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_SWING_EMP,
897 			   preemphasis << (DP_TX0_PRE_EMPH_SHIFT + lane_shift),
898 			   DP_TX0_PRE_EMPH_MASK << lane_shift);
899 }
900 
mtk_dp_reset_swing_pre_emphasis(struct mtk_dp * mtk_dp)901 static void mtk_dp_reset_swing_pre_emphasis(struct mtk_dp *mtk_dp)
902 {
903 	mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_SWING_EMP,
904 			   0,
905 			   DP_TX0_VOLT_SWING_MASK |
906 			   DP_TX1_VOLT_SWING_MASK |
907 			   DP_TX2_VOLT_SWING_MASK |
908 			   DP_TX3_VOLT_SWING_MASK |
909 			   DP_TX0_PRE_EMPH_MASK |
910 			   DP_TX1_PRE_EMPH_MASK |
911 			   DP_TX2_PRE_EMPH_MASK |
912 			   DP_TX3_PRE_EMPH_MASK);
913 }
914 
mtk_dp_swirq_get_clear(struct mtk_dp * mtk_dp)915 static u32 mtk_dp_swirq_get_clear(struct mtk_dp *mtk_dp)
916 {
917 	u32 irq_status = mtk_dp_read(mtk_dp, MTK_DP_TRANS_P0_35D0) &
918 			 SW_IRQ_FINAL_STATUS_DP_TRANS_P0_MASK;
919 
920 	if (irq_status) {
921 		mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_35C8,
922 				   irq_status, SW_IRQ_CLR_DP_TRANS_P0_MASK);
923 		mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_35C8,
924 				   0, SW_IRQ_CLR_DP_TRANS_P0_MASK);
925 	}
926 
927 	return irq_status;
928 }
929 
mtk_dp_hwirq_get_clear(struct mtk_dp * mtk_dp)930 static u32 mtk_dp_hwirq_get_clear(struct mtk_dp *mtk_dp)
931 {
932 	u32 irq_status = (mtk_dp_read(mtk_dp, MTK_DP_TRANS_P0_3418) &
933 			  IRQ_STATUS_DP_TRANS_P0_MASK) >> 12;
934 
935 	if (irq_status) {
936 		mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3418,
937 				   irq_status, IRQ_CLR_DP_TRANS_P0_MASK);
938 		mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3418,
939 				   0, IRQ_CLR_DP_TRANS_P0_MASK);
940 	}
941 
942 	return irq_status;
943 }
944 
mtk_dp_hwirq_enable(struct mtk_dp * mtk_dp,bool enable)945 static void mtk_dp_hwirq_enable(struct mtk_dp *mtk_dp, bool enable)
946 {
947 	mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3418,
948 			   enable ? 0 :
949 			   IRQ_MASK_DP_TRANS_P0_DISC_IRQ |
950 			   IRQ_MASK_DP_TRANS_P0_CONN_IRQ |
951 			   IRQ_MASK_DP_TRANS_P0_INT_IRQ,
952 			   IRQ_MASK_DP_TRANS_P0_MASK);
953 }
954 
mtk_dp_initialize_settings(struct mtk_dp * mtk_dp)955 static void mtk_dp_initialize_settings(struct mtk_dp *mtk_dp)
956 {
957 	mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_342C,
958 			   XTAL_FREQ_DP_TRANS_P0_DEFAULT,
959 			   XTAL_FREQ_DP_TRANS_P0_MASK);
960 	mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3540,
961 			   FEC_CLOCK_EN_MODE_DP_TRANS_P0,
962 			   FEC_CLOCK_EN_MODE_DP_TRANS_P0);
963 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_31EC,
964 			   AUDIO_CH_SRC_SEL_DP_ENC0_P0,
965 			   AUDIO_CH_SRC_SEL_DP_ENC0_P0);
966 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_304C,
967 			   0, SDP_VSYNC_RISING_MASK_DP_ENC0_P0_MASK);
968 	mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_IRQ_MASK,
969 			   IRQ_MASK_AUX_TOP_IRQ, IRQ_MASK_AUX_TOP_IRQ);
970 }
971 
mtk_dp_initialize_hpd_detect_settings(struct mtk_dp * mtk_dp)972 static void mtk_dp_initialize_hpd_detect_settings(struct mtk_dp *mtk_dp)
973 {
974 	u32 val;
975 	/* Debounce threshold */
976 	mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3410,
977 			   8, HPD_DEB_THD_DP_TRANS_P0_MASK);
978 
979 	val = (HPD_INT_THD_DP_TRANS_P0_LOWER_500US |
980 	       HPD_INT_THD_DP_TRANS_P0_UPPER_1100US) << 4;
981 	mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3410,
982 			   val, HPD_INT_THD_DP_TRANS_P0_MASK);
983 
984 	/*
985 	 * Connect threshold 1.5ms + 5 x 0.1ms = 2ms
986 	 * Disconnect threshold 1.5ms + 5 x 0.1ms = 2ms
987 	 */
988 	val = (5 << 8) | (5 << 12);
989 	mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3410,
990 			   val,
991 			   HPD_DISC_THD_DP_TRANS_P0_MASK |
992 			   HPD_CONN_THD_DP_TRANS_P0_MASK);
993 	mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3430,
994 			   HPD_INT_THD_ECO_DP_TRANS_P0_HIGH_BOUND_EXT,
995 			   HPD_INT_THD_ECO_DP_TRANS_P0_MASK);
996 }
997 
mtk_dp_initialize_aux_settings(struct mtk_dp * mtk_dp)998 static void mtk_dp_initialize_aux_settings(struct mtk_dp *mtk_dp)
999 {
1000 	/* modify timeout threshold = 0x1595 */
1001 	mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_360C,
1002 			   AUX_TIMEOUT_THR_AUX_TX_P0_VAL,
1003 			   AUX_TIMEOUT_THR_AUX_TX_P0_MASK);
1004 	mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3658,
1005 			   0, AUX_TX_OV_EN_AUX_TX_P0_MASK);
1006 	/* 25 for 26M */
1007 	mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3634,
1008 			   AUX_TX_OVER_SAMPLE_RATE_FOR_26M << 8,
1009 			   AUX_TX_OVER_SAMPLE_RATE_AUX_TX_P0_MASK);
1010 	/* 13 for 26M */
1011 	mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3614,
1012 			   AUX_RX_UI_CNT_THR_AUX_FOR_26M,
1013 			   AUX_RX_UI_CNT_THR_AUX_TX_P0_MASK);
1014 	mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_37C8,
1015 			   MTK_ATOP_EN_AUX_TX_P0,
1016 			   MTK_ATOP_EN_AUX_TX_P0);
1017 
1018 	/* Set complete reply mode for AUX */
1019 	mtk_dp_update_bits(mtk_dp, MTK_DP_AUX_P0_3690,
1020 			   RX_REPLY_COMPLETE_MODE_AUX_TX_P0,
1021 			   RX_REPLY_COMPLETE_MODE_AUX_TX_P0);
1022 }
1023 
mtk_dp_initialize_digital_settings(struct mtk_dp * mtk_dp)1024 static void mtk_dp_initialize_digital_settings(struct mtk_dp *mtk_dp)
1025 {
1026 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_304C,
1027 			   0, VBID_VIDEO_MUTE_DP_ENC0_P0_MASK);
1028 
1029 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3368,
1030 			   BS2BS_MODE_DP_ENC1_P0_VAL << 12,
1031 			   BS2BS_MODE_DP_ENC1_P0_MASK);
1032 
1033 	/* dp tx encoder reset all sw */
1034 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3004,
1035 			   DP_TX_ENCODER_4P_RESET_SW_DP_ENC0_P0,
1036 			   DP_TX_ENCODER_4P_RESET_SW_DP_ENC0_P0);
1037 
1038 	/* Wait for sw reset to complete */
1039 	usleep_range(1000, 5000);
1040 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3004,
1041 			   0, DP_TX_ENCODER_4P_RESET_SW_DP_ENC0_P0);
1042 }
1043 
mtk_dp_digital_sw_reset(struct mtk_dp * mtk_dp)1044 static void mtk_dp_digital_sw_reset(struct mtk_dp *mtk_dp)
1045 {
1046 	mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_340C,
1047 			   DP_TX_TRANSMITTER_4P_RESET_SW_DP_TRANS_P0,
1048 			   DP_TX_TRANSMITTER_4P_RESET_SW_DP_TRANS_P0);
1049 
1050 	/* Wait for sw reset to complete */
1051 	usleep_range(1000, 5000);
1052 	mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_340C,
1053 			   0, DP_TX_TRANSMITTER_4P_RESET_SW_DP_TRANS_P0);
1054 }
1055 
mtk_dp_set_lanes(struct mtk_dp * mtk_dp,int lanes)1056 static void mtk_dp_set_lanes(struct mtk_dp *mtk_dp, int lanes)
1057 {
1058 	mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_35F0,
1059 			   lanes == 0 ? 0 : DP_TRANS_DUMMY_RW_0,
1060 			   DP_TRANS_DUMMY_RW_0_MASK);
1061 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3000,
1062 			   lanes, LANE_NUM_DP_ENC0_P0_MASK);
1063 	mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_34A4,
1064 			   lanes << 2, LANE_NUM_DP_TRANS_P0_MASK);
1065 }
1066 
mtk_dp_get_calibration_data(struct mtk_dp * mtk_dp)1067 static void mtk_dp_get_calibration_data(struct mtk_dp *mtk_dp)
1068 {
1069 	const struct mtk_dp_efuse_fmt *fmt;
1070 	struct device *dev = mtk_dp->dev;
1071 	struct nvmem_cell *cell;
1072 	u32 *cal_data = mtk_dp->cal_data;
1073 	u32 *buf;
1074 	int i;
1075 	size_t len;
1076 
1077 	cell = nvmem_cell_get(dev, "dp_calibration_data");
1078 	if (IS_ERR(cell)) {
1079 		dev_warn(dev, "Failed to get nvmem cell dp_calibration_data\n");
1080 		goto use_default_val;
1081 	}
1082 
1083 	buf = (u32 *)nvmem_cell_read(cell, &len);
1084 	nvmem_cell_put(cell);
1085 
1086 	if (IS_ERR(buf) || ((len / sizeof(u32)) != 4)) {
1087 		dev_warn(dev, "Failed to read nvmem_cell_read\n");
1088 
1089 		if (!IS_ERR(buf))
1090 			kfree(buf);
1091 
1092 		goto use_default_val;
1093 	}
1094 
1095 	for (i = 0; i < MTK_DP_CAL_MAX; i++) {
1096 		fmt = &mtk_dp->data->efuse_fmt[i];
1097 		cal_data[i] = (buf[fmt->idx] >> fmt->shift) & fmt->mask;
1098 
1099 		if (cal_data[i] < fmt->min_val || cal_data[i] > fmt->max_val) {
1100 			dev_warn(mtk_dp->dev, "Invalid efuse data, idx = %d\n", i);
1101 			kfree(buf);
1102 			goto use_default_val;
1103 		}
1104 	}
1105 	kfree(buf);
1106 
1107 	return;
1108 
1109 use_default_val:
1110 	dev_warn(mtk_dp->dev, "Use default calibration data\n");
1111 	for (i = 0; i < MTK_DP_CAL_MAX; i++)
1112 		cal_data[i] = mtk_dp->data->efuse_fmt[i].default_val;
1113 }
1114 
mtk_dp_set_calibration_data(struct mtk_dp * mtk_dp)1115 static void mtk_dp_set_calibration_data(struct mtk_dp *mtk_dp)
1116 {
1117 	u32 *cal_data = mtk_dp->cal_data;
1118 
1119 	mtk_dp_update_bits(mtk_dp, DP_PHY_GLB_DPAUX_TX,
1120 			   cal_data[MTK_DP_CAL_CLKTX_IMPSE] << 20,
1121 			   RG_CKM_PT0_CKTX_IMPSEL);
1122 	mtk_dp_update_bits(mtk_dp, DP_PHY_GLB_BIAS_GEN_00,
1123 			   cal_data[MTK_DP_CAL_GLB_BIAS_TRIM] << 16,
1124 			   RG_XTP_GLB_BIAS_INTR_CTRL);
1125 	mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_0,
1126 			   cal_data[MTK_DP_CAL_LN_TX_IMPSEL_PMOS_0] << 12,
1127 			   RG_XTP_LN0_TX_IMPSEL_PMOS);
1128 	mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_0,
1129 			   cal_data[MTK_DP_CAL_LN_TX_IMPSEL_NMOS_0] << 16,
1130 			   RG_XTP_LN0_TX_IMPSEL_NMOS);
1131 	mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_1,
1132 			   cal_data[MTK_DP_CAL_LN_TX_IMPSEL_PMOS_1] << 12,
1133 			   RG_XTP_LN1_TX_IMPSEL_PMOS);
1134 	mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_1,
1135 			   cal_data[MTK_DP_CAL_LN_TX_IMPSEL_NMOS_1] << 16,
1136 			   RG_XTP_LN1_TX_IMPSEL_NMOS);
1137 	mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_2,
1138 			   cal_data[MTK_DP_CAL_LN_TX_IMPSEL_PMOS_2] << 12,
1139 			   RG_XTP_LN2_TX_IMPSEL_PMOS);
1140 	mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_2,
1141 			   cal_data[MTK_DP_CAL_LN_TX_IMPSEL_NMOS_2] << 16,
1142 			   RG_XTP_LN2_TX_IMPSEL_NMOS);
1143 	mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_3,
1144 			   cal_data[MTK_DP_CAL_LN_TX_IMPSEL_PMOS_3] << 12,
1145 			   RG_XTP_LN3_TX_IMPSEL_PMOS);
1146 	mtk_dp_update_bits(mtk_dp, DP_PHY_LANE_TX_3,
1147 			   cal_data[MTK_DP_CAL_LN_TX_IMPSEL_NMOS_3] << 16,
1148 			   RG_XTP_LN3_TX_IMPSEL_NMOS);
1149 }
1150 
mtk_dp_phy_configure(struct mtk_dp * mtk_dp,u32 link_rate,int lane_count)1151 static int mtk_dp_phy_configure(struct mtk_dp *mtk_dp,
1152 				u32 link_rate, int lane_count)
1153 {
1154 	int ret;
1155 	union phy_configure_opts phy_opts = {
1156 		.dp = {
1157 			.link_rate = drm_dp_bw_code_to_link_rate(link_rate) / 100,
1158 			.set_rate = 1,
1159 			.lanes = lane_count,
1160 			.set_lanes = 1,
1161 			.ssc = mtk_dp->train_info.sink_ssc,
1162 		}
1163 	};
1164 
1165 	mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE, DP_PWR_STATE_BANDGAP,
1166 			   DP_PWR_STATE_MASK);
1167 
1168 	ret = phy_configure(mtk_dp->phy, &phy_opts);
1169 	if (ret)
1170 		return ret;
1171 
1172 	mtk_dp_set_calibration_data(mtk_dp);
1173 	mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE,
1174 			   DP_PWR_STATE_BANDGAP_TPLL_LANE, DP_PWR_STATE_MASK);
1175 
1176 	return 0;
1177 }
1178 
mtk_dp_set_idle_pattern(struct mtk_dp * mtk_dp,bool enable)1179 static void mtk_dp_set_idle_pattern(struct mtk_dp *mtk_dp, bool enable)
1180 {
1181 	u32 val = POST_MISC_DATA_LANE0_OV_DP_TRANS_P0_MASK |
1182 		  POST_MISC_DATA_LANE1_OV_DP_TRANS_P0_MASK |
1183 		  POST_MISC_DATA_LANE2_OV_DP_TRANS_P0_MASK |
1184 		  POST_MISC_DATA_LANE3_OV_DP_TRANS_P0_MASK;
1185 
1186 	mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3580,
1187 			   enable ? val : 0, val);
1188 }
1189 
mtk_dp_train_set_pattern(struct mtk_dp * mtk_dp,int pattern)1190 static void mtk_dp_train_set_pattern(struct mtk_dp *mtk_dp, int pattern)
1191 {
1192 	/* TPS1 */
1193 	if (pattern == 1)
1194 		mtk_dp_set_idle_pattern(mtk_dp, false);
1195 
1196 	mtk_dp_update_bits(mtk_dp,
1197 			   MTK_DP_TRANS_P0_3400,
1198 			   pattern ? BIT(pattern - 1) << 12 : 0,
1199 			   PATTERN1_EN_DP_TRANS_P0_MASK |
1200 			   PATTERN2_EN_DP_TRANS_P0_MASK |
1201 			   PATTERN3_EN_DP_TRANS_P0_MASK |
1202 			   PATTERN4_EN_DP_TRANS_P0_MASK);
1203 }
1204 
mtk_dp_set_enhanced_frame_mode(struct mtk_dp * mtk_dp)1205 static void mtk_dp_set_enhanced_frame_mode(struct mtk_dp *mtk_dp)
1206 {
1207 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3000,
1208 			   ENHANCED_FRAME_EN_DP_ENC0_P0,
1209 			   ENHANCED_FRAME_EN_DP_ENC0_P0);
1210 }
1211 
mtk_dp_training_set_scramble(struct mtk_dp * mtk_dp,bool enable)1212 static void mtk_dp_training_set_scramble(struct mtk_dp *mtk_dp, bool enable)
1213 {
1214 	mtk_dp_update_bits(mtk_dp, MTK_DP_TRANS_P0_3404,
1215 			   enable ? DP_SCR_EN_DP_TRANS_P0_MASK : 0,
1216 			   DP_SCR_EN_DP_TRANS_P0_MASK);
1217 }
1218 
mtk_dp_video_mute(struct mtk_dp * mtk_dp,bool enable)1219 static void mtk_dp_video_mute(struct mtk_dp *mtk_dp, bool enable)
1220 {
1221 	struct arm_smccc_res res;
1222 	u32 val = VIDEO_MUTE_SEL_DP_ENC0_P0 |
1223 		  (enable ? VIDEO_MUTE_SW_DP_ENC0_P0 : 0);
1224 
1225 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3000,
1226 			   val,
1227 			   VIDEO_MUTE_SEL_DP_ENC0_P0 |
1228 			   VIDEO_MUTE_SW_DP_ENC0_P0);
1229 
1230 	arm_smccc_smc(MTK_DP_SIP_CONTROL_AARCH32,
1231 		      mtk_dp->data->smc_cmd, enable,
1232 		      0, 0, 0, 0, 0, &res);
1233 
1234 	dev_dbg(mtk_dp->dev, "smc cmd: 0x%x, p1: %s, ret: 0x%lx-0x%lx\n",
1235 		mtk_dp->data->smc_cmd, enable ? "enable" : "disable", res.a0, res.a1);
1236 }
1237 
mtk_dp_audio_mute(struct mtk_dp * mtk_dp,bool mute)1238 static void mtk_dp_audio_mute(struct mtk_dp *mtk_dp, bool mute)
1239 {
1240 	u32 val[3];
1241 
1242 	if (mute) {
1243 		val[0] = VBID_AUDIO_MUTE_FLAG_SW_DP_ENC0_P0 |
1244 			 VBID_AUDIO_MUTE_FLAG_SEL_DP_ENC0_P0;
1245 		val[1] = 0;
1246 		val[2] = 0;
1247 	} else {
1248 		val[0] = 0;
1249 		val[1] = AU_EN_DP_ENC0_P0;
1250 		/* Send one every two frames */
1251 		val[2] = 0x0F;
1252 	}
1253 
1254 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3030,
1255 			   val[0],
1256 			   VBID_AUDIO_MUTE_FLAG_SW_DP_ENC0_P0 |
1257 			   VBID_AUDIO_MUTE_FLAG_SEL_DP_ENC0_P0);
1258 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3088,
1259 			   val[1], AU_EN_DP_ENC0_P0);
1260 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_30A4,
1261 			   val[2], AU_TS_CFG_DP_ENC0_P0_MASK);
1262 }
1263 
mtk_dp_aux_panel_poweron(struct mtk_dp * mtk_dp,bool pwron)1264 static void mtk_dp_aux_panel_poweron(struct mtk_dp *mtk_dp, bool pwron)
1265 {
1266 	if (pwron) {
1267 		/* power on aux */
1268 		mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE,
1269 				   DP_PWR_STATE_BANDGAP_TPLL_LANE,
1270 				   DP_PWR_STATE_MASK);
1271 
1272 		/* power on panel */
1273 		drm_dp_dpcd_writeb(&mtk_dp->aux, DP_SET_POWER, DP_SET_POWER_D0);
1274 		usleep_range(2000, 5000);
1275 	} else {
1276 		/* power off panel */
1277 		drm_dp_dpcd_writeb(&mtk_dp->aux, DP_SET_POWER, DP_SET_POWER_D3);
1278 		usleep_range(2000, 3000);
1279 
1280 		/* power off aux */
1281 		mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE,
1282 				   DP_PWR_STATE_BANDGAP_TPLL,
1283 				   DP_PWR_STATE_MASK);
1284 	}
1285 }
1286 
mtk_dp_power_enable(struct mtk_dp * mtk_dp)1287 static void mtk_dp_power_enable(struct mtk_dp *mtk_dp)
1288 {
1289 	mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_RESET_AND_PROBE,
1290 			   0, SW_RST_B_PHYD);
1291 
1292 	/* Wait for power enable */
1293 	usleep_range(10, 200);
1294 
1295 	mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_RESET_AND_PROBE,
1296 			   SW_RST_B_PHYD, SW_RST_B_PHYD);
1297 	mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE,
1298 			   DP_PWR_STATE_BANDGAP_TPLL, DP_PWR_STATE_MASK);
1299 	mtk_dp_write(mtk_dp, MTK_DP_1040,
1300 		     RG_DPAUX_RX_VALID_DEGLITCH_EN | RG_XTP_GLB_CKDET_EN |
1301 		     RG_DPAUX_RX_EN);
1302 	mtk_dp_update_bits(mtk_dp, MTK_DP_0034, 0, DA_CKM_CKTX0_EN_FORCE_EN);
1303 }
1304 
mtk_dp_power_disable(struct mtk_dp * mtk_dp)1305 static void mtk_dp_power_disable(struct mtk_dp *mtk_dp)
1306 {
1307 	mtk_dp_write(mtk_dp, MTK_DP_TOP_PWR_STATE, 0);
1308 
1309 	mtk_dp_update_bits(mtk_dp, MTK_DP_0034,
1310 			   DA_CKM_CKTX0_EN_FORCE_EN, DA_CKM_CKTX0_EN_FORCE_EN);
1311 
1312 	/* Disable RX */
1313 	mtk_dp_write(mtk_dp, MTK_DP_1040, 0);
1314 	mtk_dp_write(mtk_dp, MTK_DP_TOP_MEM_PD,
1315 		     0x550 | FUSE_SEL | MEM_ISO_EN);
1316 }
1317 
mtk_dp_initialize_priv_data(struct mtk_dp * mtk_dp)1318 static void mtk_dp_initialize_priv_data(struct mtk_dp *mtk_dp)
1319 {
1320 	bool plugged_in = (mtk_dp->bridge.type == DRM_MODE_CONNECTOR_eDP);
1321 
1322 	mtk_dp->train_info.link_rate = DP_LINK_BW_5_4;
1323 	mtk_dp->train_info.lane_count = mtk_dp->max_lanes;
1324 	mtk_dp->train_info.cable_plugged_in = plugged_in;
1325 
1326 	mtk_dp->info.format = DP_PIXELFORMAT_RGB;
1327 	memset(&mtk_dp->info.vm, 0, sizeof(struct videomode));
1328 	mtk_dp->audio_enable = false;
1329 }
1330 
mtk_dp_sdp_set_down_cnt_init(struct mtk_dp * mtk_dp,u32 sram_read_start)1331 static void mtk_dp_sdp_set_down_cnt_init(struct mtk_dp *mtk_dp,
1332 					 u32 sram_read_start)
1333 {
1334 	u32 sdp_down_cnt_init = 0;
1335 	struct drm_display_mode mode;
1336 	struct videomode *vm = &mtk_dp->info.vm;
1337 
1338 	drm_display_mode_from_videomode(vm, &mode);
1339 
1340 	if (mode.clock > 0)
1341 		sdp_down_cnt_init = sram_read_start *
1342 				    mtk_dp->train_info.link_rate * 2700 * 8 /
1343 				    (mode.clock * 4);
1344 
1345 	switch (mtk_dp->train_info.lane_count) {
1346 	case 1:
1347 		sdp_down_cnt_init = max_t(u32, sdp_down_cnt_init, 0x1A);
1348 		break;
1349 	case 2:
1350 		/* case for LowResolution && High Audio Sample Rate */
1351 		sdp_down_cnt_init = max_t(u32, sdp_down_cnt_init, 0x10);
1352 		sdp_down_cnt_init += mode.vtotal <= 525 ? 4 : 0;
1353 		break;
1354 	case 4:
1355 	default:
1356 		sdp_down_cnt_init = max_t(u32, sdp_down_cnt_init, 6);
1357 		break;
1358 	}
1359 
1360 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC0_P0_3040,
1361 			   sdp_down_cnt_init,
1362 			   SDP_DOWN_CNT_INIT_DP_ENC0_P0_MASK);
1363 }
1364 
mtk_dp_sdp_set_down_cnt_init_in_hblank(struct mtk_dp * mtk_dp)1365 static void mtk_dp_sdp_set_down_cnt_init_in_hblank(struct mtk_dp *mtk_dp)
1366 {
1367 	int pix_clk_mhz;
1368 	u32 dc_offset;
1369 	u32 spd_down_cnt_init = 0;
1370 	struct drm_display_mode mode;
1371 	struct videomode *vm = &mtk_dp->info.vm;
1372 
1373 	drm_display_mode_from_videomode(vm, &mode);
1374 
1375 	pix_clk_mhz = mtk_dp->info.format == DP_PIXELFORMAT_YUV420 ?
1376 		      mode.clock / 2000 : mode.clock / 1000;
1377 
1378 	switch (mtk_dp->train_info.lane_count) {
1379 	case 1:
1380 		spd_down_cnt_init = 0x20;
1381 		break;
1382 	case 2:
1383 		dc_offset = (mode.vtotal <= 525) ? 0x14 : 0x00;
1384 		spd_down_cnt_init = 0x18 + dc_offset;
1385 		break;
1386 	case 4:
1387 	default:
1388 		dc_offset = (mode.vtotal <= 525) ? 0x08 : 0x00;
1389 		if (pix_clk_mhz > mtk_dp->train_info.link_rate * 27)
1390 			spd_down_cnt_init = 0x8;
1391 		else
1392 			spd_down_cnt_init = 0x10 + dc_offset;
1393 		break;
1394 	}
1395 
1396 	mtk_dp_update_bits(mtk_dp, MTK_DP_ENC1_P0_3364, spd_down_cnt_init,
1397 			   SDP_DOWN_CNT_INIT_IN_HBLANK_DP_ENC1_P0_MASK);
1398 }
1399 
mtk_dp_setup_tu(struct mtk_dp * mtk_dp)1400 static void mtk_dp_setup_tu(struct mtk_dp *mtk_dp)
1401 {
1402 	u32 sram_read_start = min_t(u32, MTK_DP_TBC_BUF_READ_START_ADDR,
1403 				    mtk_dp->info.vm.hactive /
1404 				    mtk_dp->train_info.lane_count /
1405 				    MTK_DP_4P1T / MTK_DP_HDE /
1406 				    MTK_DP_PIX_PER_ADDR);
1407 	mtk_dp_set_sram_read_start(mtk_dp, sram_read_start);
1408 	mtk_dp_setup_encoder(mtk_dp);
1409 	mtk_dp_sdp_set_down_cnt_init_in_hblank(mtk_dp);
1410 	mtk_dp_sdp_set_down_cnt_init(mtk_dp, sram_read_start);
1411 }
1412 
mtk_dp_set_tx_out(struct mtk_dp * mtk_dp)1413 static void mtk_dp_set_tx_out(struct mtk_dp *mtk_dp)
1414 {
1415 	mtk_dp_setup_tu(mtk_dp);
1416 }
1417 
mtk_dp_train_update_swing_pre(struct mtk_dp * mtk_dp,int lanes,u8 dpcd_adjust_req[2])1418 static void mtk_dp_train_update_swing_pre(struct mtk_dp *mtk_dp, int lanes,
1419 					  u8 dpcd_adjust_req[2])
1420 {
1421 	int lane;
1422 
1423 	for (lane = 0; lane < lanes; ++lane) {
1424 		u8 val;
1425 		u8 swing;
1426 		u8 preemphasis;
1427 		int index = lane / 2;
1428 		int shift = lane % 2 ? DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT : 0;
1429 
1430 		swing = (dpcd_adjust_req[index] >> shift) &
1431 			DP_ADJUST_VOLTAGE_SWING_LANE0_MASK;
1432 		preemphasis = ((dpcd_adjust_req[index] >> shift) &
1433 			       DP_ADJUST_PRE_EMPHASIS_LANE0_MASK) >>
1434 			      DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT;
1435 		val = swing << DP_TRAIN_VOLTAGE_SWING_SHIFT |
1436 		      preemphasis << DP_TRAIN_PRE_EMPHASIS_SHIFT;
1437 
1438 		if (swing == DP_TRAIN_VOLTAGE_SWING_LEVEL_3)
1439 			val |= DP_TRAIN_MAX_SWING_REACHED;
1440 		if (preemphasis == 3)
1441 			val |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
1442 
1443 		mtk_dp_set_swing_pre_emphasis(mtk_dp, lane, swing, preemphasis);
1444 		drm_dp_dpcd_writeb(&mtk_dp->aux, DP_TRAINING_LANE0_SET + lane,
1445 				   val);
1446 	}
1447 }
1448 
mtk_dp_pattern(struct mtk_dp * mtk_dp,bool is_tps1)1449 static void mtk_dp_pattern(struct mtk_dp *mtk_dp, bool is_tps1)
1450 {
1451 	int pattern;
1452 	unsigned int aux_offset;
1453 
1454 	if (is_tps1) {
1455 		pattern = 1;
1456 		aux_offset = DP_LINK_SCRAMBLING_DISABLE | DP_TRAINING_PATTERN_1;
1457 	} else {
1458 		aux_offset = mtk_dp->train_info.channel_eq_pattern;
1459 
1460 		switch (mtk_dp->train_info.channel_eq_pattern) {
1461 		case DP_TRAINING_PATTERN_4:
1462 			pattern = 4;
1463 			break;
1464 		case DP_TRAINING_PATTERN_3:
1465 			pattern = 3;
1466 			aux_offset |= DP_LINK_SCRAMBLING_DISABLE;
1467 			break;
1468 		case DP_TRAINING_PATTERN_2:
1469 		default:
1470 			pattern = 2;
1471 			aux_offset |= DP_LINK_SCRAMBLING_DISABLE;
1472 			break;
1473 		}
1474 	}
1475 
1476 	mtk_dp_train_set_pattern(mtk_dp, pattern);
1477 	drm_dp_dpcd_writeb(&mtk_dp->aux, DP_TRAINING_PATTERN_SET, aux_offset);
1478 }
1479 
mtk_dp_train_setting(struct mtk_dp * mtk_dp,u8 target_link_rate,u8 target_lane_count)1480 static int mtk_dp_train_setting(struct mtk_dp *mtk_dp, u8 target_link_rate,
1481 				u8 target_lane_count)
1482 {
1483 	int ret;
1484 
1485 	drm_dp_dpcd_writeb(&mtk_dp->aux, DP_LINK_BW_SET, target_link_rate);
1486 	drm_dp_dpcd_writeb(&mtk_dp->aux, DP_LANE_COUNT_SET,
1487 			   target_lane_count | DP_LANE_COUNT_ENHANCED_FRAME_EN);
1488 
1489 	if (mtk_dp->train_info.sink_ssc)
1490 		drm_dp_dpcd_writeb(&mtk_dp->aux, DP_DOWNSPREAD_CTRL,
1491 				   DP_SPREAD_AMP_0_5);
1492 
1493 	mtk_dp_set_lanes(mtk_dp, target_lane_count / 2);
1494 	ret = mtk_dp_phy_configure(mtk_dp, target_link_rate, target_lane_count);
1495 	if (ret)
1496 		return ret;
1497 
1498 	dev_dbg(mtk_dp->dev,
1499 		"Link train target_link_rate = 0x%x, target_lane_count = 0x%x\n",
1500 		target_link_rate, target_lane_count);
1501 
1502 	return 0;
1503 }
1504 
mtk_dp_train_cr(struct mtk_dp * mtk_dp,u8 target_lane_count)1505 static int mtk_dp_train_cr(struct mtk_dp *mtk_dp, u8 target_lane_count)
1506 {
1507 	u8 lane_adjust[2] = {};
1508 	u8 link_status[DP_LINK_STATUS_SIZE] = {};
1509 	u8 prev_lane_adjust = 0xff;
1510 	int train_retries = 0;
1511 	int voltage_retries = 0;
1512 
1513 	mtk_dp_pattern(mtk_dp, true);
1514 
1515 	/* In DP spec 1.4, the retry count of CR is defined as 10. */
1516 	do {
1517 		train_retries++;
1518 		if (!mtk_dp->train_info.cable_plugged_in) {
1519 			mtk_dp_train_set_pattern(mtk_dp, 0);
1520 			return -ENODEV;
1521 		}
1522 
1523 		drm_dp_dpcd_read(&mtk_dp->aux, DP_ADJUST_REQUEST_LANE0_1,
1524 				 lane_adjust, sizeof(lane_adjust));
1525 		mtk_dp_train_update_swing_pre(mtk_dp, target_lane_count,
1526 					      lane_adjust);
1527 
1528 		drm_dp_link_train_clock_recovery_delay(&mtk_dp->aux,
1529 						       mtk_dp->rx_cap);
1530 
1531 		/* check link status from sink device */
1532 		drm_dp_dpcd_read_link_status(&mtk_dp->aux, link_status);
1533 		if (drm_dp_clock_recovery_ok(link_status,
1534 					     target_lane_count)) {
1535 			dev_dbg(mtk_dp->dev, "Link train CR pass\n");
1536 			return 0;
1537 		}
1538 
1539 		/*
1540 		 * In DP spec 1.4, if current voltage level is the same
1541 		 * with previous voltage level, we need to retry 5 times.
1542 		 */
1543 		if (prev_lane_adjust == link_status[4]) {
1544 			voltage_retries++;
1545 			/*
1546 			 * Condition of CR fail:
1547 			 * 1. Failed to pass CR using the same voltage
1548 			 *    level over five times.
1549 			 * 2. Failed to pass CR when the current voltage
1550 			 *    level is the same with previous voltage
1551 			 *    level and reach max voltage level (3).
1552 			 */
1553 			if (voltage_retries > MTK_DP_TRAIN_VOLTAGE_LEVEL_RETRY ||
1554 			    (prev_lane_adjust & DP_ADJUST_VOLTAGE_SWING_LANE0_MASK) == 3) {
1555 				dev_dbg(mtk_dp->dev, "Link train CR fail\n");
1556 				break;
1557 			}
1558 		} else {
1559 			/*
1560 			 * If the voltage level is changed, we need to
1561 			 * re-calculate this retry count.
1562 			 */
1563 			voltage_retries = 0;
1564 		}
1565 		prev_lane_adjust = link_status[4];
1566 	} while (train_retries < MTK_DP_TRAIN_DOWNSCALE_RETRY);
1567 
1568 	/* Failed to train CR, and disable pattern. */
1569 	drm_dp_dpcd_writeb(&mtk_dp->aux, DP_TRAINING_PATTERN_SET,
1570 			   DP_TRAINING_PATTERN_DISABLE);
1571 	mtk_dp_train_set_pattern(mtk_dp, 0);
1572 
1573 	return -ETIMEDOUT;
1574 }
1575 
mtk_dp_train_eq(struct mtk_dp * mtk_dp,u8 target_lane_count)1576 static int mtk_dp_train_eq(struct mtk_dp *mtk_dp, u8 target_lane_count)
1577 {
1578 	u8 lane_adjust[2] = {};
1579 	u8 link_status[DP_LINK_STATUS_SIZE] = {};
1580 	int train_retries = 0;
1581 
1582 	mtk_dp_pattern(mtk_dp, false);
1583 
1584 	do {
1585 		train_retries++;
1586 		if (!mtk_dp->train_info.cable_plugged_in) {
1587 			mtk_dp_train_set_pattern(mtk_dp, 0);
1588 			return -ENODEV;
1589 		}
1590 
1591 		drm_dp_dpcd_read(&mtk_dp->aux, DP_ADJUST_REQUEST_LANE0_1,
1592 				 lane_adjust, sizeof(lane_adjust));
1593 		mtk_dp_train_update_swing_pre(mtk_dp, target_lane_count,
1594 					      lane_adjust);
1595 
1596 		drm_dp_link_train_channel_eq_delay(&mtk_dp->aux,
1597 						   mtk_dp->rx_cap);
1598 
1599 		/* check link status from sink device */
1600 		drm_dp_dpcd_read_link_status(&mtk_dp->aux, link_status);
1601 		if (drm_dp_channel_eq_ok(link_status, target_lane_count)) {
1602 			dev_dbg(mtk_dp->dev, "Link train EQ pass\n");
1603 
1604 			/* Training done, and disable pattern. */
1605 			drm_dp_dpcd_writeb(&mtk_dp->aux, DP_TRAINING_PATTERN_SET,
1606 					   DP_TRAINING_PATTERN_DISABLE);
1607 			mtk_dp_train_set_pattern(mtk_dp, 0);
1608 			return 0;
1609 		}
1610 		dev_dbg(mtk_dp->dev, "Link train EQ fail\n");
1611 	} while (train_retries < MTK_DP_TRAIN_DOWNSCALE_RETRY);
1612 
1613 	/* Failed to train EQ, and disable pattern. */
1614 	drm_dp_dpcd_writeb(&mtk_dp->aux, DP_TRAINING_PATTERN_SET,
1615 			   DP_TRAINING_PATTERN_DISABLE);
1616 	mtk_dp_train_set_pattern(mtk_dp, 0);
1617 
1618 	return -ETIMEDOUT;
1619 }
1620 
mtk_dp_parse_capabilities(struct mtk_dp * mtk_dp)1621 static int mtk_dp_parse_capabilities(struct mtk_dp *mtk_dp)
1622 {
1623 	u8 val;
1624 	ssize_t ret;
1625 
1626 	/*
1627 	 * If we're eDP and capabilities were already parsed we can skip
1628 	 * reading again because eDP panels aren't hotpluggable hence the
1629 	 * caps and training information won't ever change in a boot life
1630 	 */
1631 	if (mtk_dp->bridge.type == DRM_MODE_CONNECTOR_eDP &&
1632 	    mtk_dp->rx_cap[DP_MAX_LINK_RATE] &&
1633 	    mtk_dp->train_info.sink_ssc)
1634 		return 0;
1635 
1636 	ret = drm_dp_read_dpcd_caps(&mtk_dp->aux, mtk_dp->rx_cap);
1637 	if (ret < 0)
1638 		return ret;
1639 
1640 	if (drm_dp_tps4_supported(mtk_dp->rx_cap))
1641 		mtk_dp->train_info.channel_eq_pattern = DP_TRAINING_PATTERN_4;
1642 	else if (drm_dp_tps3_supported(mtk_dp->rx_cap))
1643 		mtk_dp->train_info.channel_eq_pattern = DP_TRAINING_PATTERN_3;
1644 	else
1645 		mtk_dp->train_info.channel_eq_pattern = DP_TRAINING_PATTERN_2;
1646 
1647 	mtk_dp->train_info.sink_ssc = drm_dp_max_downspread(mtk_dp->rx_cap);
1648 
1649 	ret = drm_dp_dpcd_readb(&mtk_dp->aux, DP_MSTM_CAP, &val);
1650 	if (ret < 1) {
1651 		drm_err(mtk_dp->drm_dev, "Read mstm cap failed\n");
1652 		return ret == 0 ? -EIO : ret;
1653 	}
1654 
1655 	if (val & DP_MST_CAP) {
1656 		/* Clear DP_DEVICE_SERVICE_IRQ_VECTOR_ESI0 */
1657 		ret = drm_dp_dpcd_readb(&mtk_dp->aux,
1658 					DP_DEVICE_SERVICE_IRQ_VECTOR_ESI0,
1659 					&val);
1660 		if (ret < 1) {
1661 			drm_err(mtk_dp->drm_dev, "Read irq vector failed\n");
1662 			return ret == 0 ? -EIO : ret;
1663 		}
1664 
1665 		if (val) {
1666 			ret = drm_dp_dpcd_writeb(&mtk_dp->aux,
1667 						 DP_DEVICE_SERVICE_IRQ_VECTOR_ESI0,
1668 						 val);
1669 			if (ret < 0)
1670 				return ret;
1671 		}
1672 	}
1673 
1674 	return 0;
1675 }
1676 
mtk_dp_edid_parse_audio_capabilities(struct mtk_dp * mtk_dp,struct mtk_dp_audio_cfg * cfg)1677 static bool mtk_dp_edid_parse_audio_capabilities(struct mtk_dp *mtk_dp,
1678 						 struct mtk_dp_audio_cfg *cfg)
1679 {
1680 	if (!mtk_dp->data->audio_supported)
1681 		return false;
1682 
1683 	if (mtk_dp->info.audio_cur_cfg.sad_count <= 0) {
1684 		drm_info(mtk_dp->drm_dev, "The SADs is NULL\n");
1685 		return false;
1686 	}
1687 
1688 	return true;
1689 }
1690 
mtk_dp_train_change_mode(struct mtk_dp * mtk_dp)1691 static void mtk_dp_train_change_mode(struct mtk_dp *mtk_dp)
1692 {
1693 	phy_reset(mtk_dp->phy);
1694 	mtk_dp_reset_swing_pre_emphasis(mtk_dp);
1695 }
1696 
mtk_dp_training(struct mtk_dp * mtk_dp)1697 static int mtk_dp_training(struct mtk_dp *mtk_dp)
1698 {
1699 	int ret;
1700 	u8 lane_count, link_rate, train_limit, max_link_rate;
1701 
1702 	link_rate = min_t(u8, mtk_dp->max_linkrate,
1703 			  mtk_dp->rx_cap[DP_MAX_LINK_RATE]);
1704 	max_link_rate = link_rate;
1705 	lane_count = min_t(u8, mtk_dp->max_lanes,
1706 			   drm_dp_max_lane_count(mtk_dp->rx_cap));
1707 
1708 	/*
1709 	 * TPS are generated by the hardware pattern generator. From the
1710 	 * hardware setting we need to disable this scramble setting before
1711 	 * use the TPS pattern generator.
1712 	 */
1713 	mtk_dp_training_set_scramble(mtk_dp, false);
1714 
1715 	for (train_limit = 6; train_limit > 0; train_limit--) {
1716 		mtk_dp_train_change_mode(mtk_dp);
1717 
1718 		ret = mtk_dp_train_setting(mtk_dp, link_rate, lane_count);
1719 		if (ret)
1720 			return ret;
1721 
1722 		ret = mtk_dp_train_cr(mtk_dp, lane_count);
1723 		if (ret == -ENODEV) {
1724 			return ret;
1725 		} else if (ret) {
1726 			/* reduce link rate */
1727 			switch (link_rate) {
1728 			case DP_LINK_BW_1_62:
1729 				lane_count = lane_count / 2;
1730 				link_rate = max_link_rate;
1731 				if (lane_count == 0)
1732 					return -EIO;
1733 				break;
1734 			case DP_LINK_BW_2_7:
1735 				link_rate = DP_LINK_BW_1_62;
1736 				break;
1737 			case DP_LINK_BW_5_4:
1738 				link_rate = DP_LINK_BW_2_7;
1739 				break;
1740 			case DP_LINK_BW_8_1:
1741 				link_rate = DP_LINK_BW_5_4;
1742 				break;
1743 			default:
1744 				return -EINVAL;
1745 			}
1746 			continue;
1747 		}
1748 
1749 		ret = mtk_dp_train_eq(mtk_dp, lane_count);
1750 		if (ret == -ENODEV) {
1751 			return ret;
1752 		} else if (ret) {
1753 			/* reduce lane count */
1754 			if (lane_count == 0)
1755 				return -EIO;
1756 			lane_count /= 2;
1757 			continue;
1758 		}
1759 
1760 		/* if we can run to this, training is done. */
1761 		break;
1762 	}
1763 
1764 	if (train_limit == 0)
1765 		return -ETIMEDOUT;
1766 
1767 	mtk_dp->train_info.link_rate = link_rate;
1768 	mtk_dp->train_info.lane_count = lane_count;
1769 
1770 	/*
1771 	 * After training done, we need to output normal stream instead of TPS,
1772 	 * so we need to enable scramble.
1773 	 */
1774 	mtk_dp_training_set_scramble(mtk_dp, true);
1775 	mtk_dp_set_enhanced_frame_mode(mtk_dp);
1776 
1777 	return 0;
1778 }
1779 
mtk_dp_video_enable(struct mtk_dp * mtk_dp,bool enable)1780 static void mtk_dp_video_enable(struct mtk_dp *mtk_dp, bool enable)
1781 {
1782 	/* the mute sequence is different between enable and disable */
1783 	if (enable) {
1784 		mtk_dp_msa_bypass_enable(mtk_dp, false);
1785 		mtk_dp_pg_enable(mtk_dp, false);
1786 		mtk_dp_set_tx_out(mtk_dp);
1787 		mtk_dp_video_mute(mtk_dp, false);
1788 	} else {
1789 		mtk_dp_video_mute(mtk_dp, true);
1790 		mtk_dp_pg_enable(mtk_dp, true);
1791 		mtk_dp_msa_bypass_enable(mtk_dp, true);
1792 	}
1793 }
1794 
mtk_dp_audio_sdp_setup(struct mtk_dp * mtk_dp,struct mtk_dp_audio_cfg * cfg)1795 static void mtk_dp_audio_sdp_setup(struct mtk_dp *mtk_dp,
1796 				   struct mtk_dp_audio_cfg *cfg)
1797 {
1798 	struct dp_sdp sdp;
1799 	struct hdmi_audio_infoframe frame;
1800 
1801 	hdmi_audio_infoframe_init(&frame);
1802 	frame.coding_type = HDMI_AUDIO_CODING_TYPE_PCM;
1803 	frame.channels = cfg->channels;
1804 	frame.sample_frequency = cfg->sample_rate;
1805 
1806 	switch (cfg->word_length_bits) {
1807 	case 16:
1808 		frame.sample_size = HDMI_AUDIO_SAMPLE_SIZE_16;
1809 		break;
1810 	case 20:
1811 		frame.sample_size = HDMI_AUDIO_SAMPLE_SIZE_20;
1812 		break;
1813 	case 24:
1814 	default:
1815 		frame.sample_size = HDMI_AUDIO_SAMPLE_SIZE_24;
1816 		break;
1817 	}
1818 
1819 	hdmi_audio_infoframe_pack_for_dp(&frame, &sdp, MTK_DP_VERSION);
1820 
1821 	mtk_dp_audio_sdp_asp_set_channels(mtk_dp, cfg->channels);
1822 	mtk_dp_setup_sdp_aui(mtk_dp, &sdp);
1823 }
1824 
mtk_dp_audio_setup(struct mtk_dp * mtk_dp,struct mtk_dp_audio_cfg * cfg)1825 static void mtk_dp_audio_setup(struct mtk_dp *mtk_dp,
1826 			       struct mtk_dp_audio_cfg *cfg)
1827 {
1828 	mtk_dp_audio_sdp_setup(mtk_dp, cfg);
1829 	mtk_dp_audio_channel_status_set(mtk_dp, cfg);
1830 
1831 	mtk_dp_audio_setup_channels(mtk_dp, cfg);
1832 	mtk_dp_audio_set_divider(mtk_dp);
1833 }
1834 
mtk_dp_video_config(struct mtk_dp * mtk_dp)1835 static int mtk_dp_video_config(struct mtk_dp *mtk_dp)
1836 {
1837 	mtk_dp_config_mn_mode(mtk_dp);
1838 	mtk_dp_set_msa(mtk_dp);
1839 	mtk_dp_set_color_depth(mtk_dp);
1840 	return mtk_dp_set_color_format(mtk_dp, mtk_dp->info.format);
1841 }
1842 
mtk_dp_init_port(struct mtk_dp * mtk_dp)1843 static void mtk_dp_init_port(struct mtk_dp *mtk_dp)
1844 {
1845 	mtk_dp_set_idle_pattern(mtk_dp, true);
1846 	mtk_dp_initialize_priv_data(mtk_dp);
1847 
1848 	mtk_dp_initialize_settings(mtk_dp);
1849 	mtk_dp_initialize_aux_settings(mtk_dp);
1850 	mtk_dp_initialize_digital_settings(mtk_dp);
1851 	mtk_dp_initialize_hpd_detect_settings(mtk_dp);
1852 
1853 	mtk_dp_digital_sw_reset(mtk_dp);
1854 }
1855 
mtk_dp_hpd_event_thread(int hpd,void * dev)1856 static irqreturn_t mtk_dp_hpd_event_thread(int hpd, void *dev)
1857 {
1858 	struct mtk_dp *mtk_dp = dev;
1859 	unsigned long flags;
1860 	u32 status;
1861 
1862 	if (mtk_dp->need_debounce && mtk_dp->train_info.cable_plugged_in)
1863 		msleep(100);
1864 
1865 	spin_lock_irqsave(&mtk_dp->irq_thread_lock, flags);
1866 	status = mtk_dp->irq_thread_handle;
1867 	mtk_dp->irq_thread_handle = 0;
1868 	spin_unlock_irqrestore(&mtk_dp->irq_thread_lock, flags);
1869 
1870 	if (status & MTK_DP_THREAD_CABLE_STATE_CHG) {
1871 		if (mtk_dp->bridge.dev)
1872 			drm_helper_hpd_irq_event(mtk_dp->bridge.dev);
1873 
1874 		if (!mtk_dp->train_info.cable_plugged_in) {
1875 			mtk_dp_disable_sdp_aui(mtk_dp);
1876 			memset(&mtk_dp->info.audio_cur_cfg, 0,
1877 			       sizeof(mtk_dp->info.audio_cur_cfg));
1878 
1879 			mtk_dp->need_debounce = false;
1880 			mod_timer(&mtk_dp->debounce_timer,
1881 				  jiffies + msecs_to_jiffies(100) - 1);
1882 		}
1883 	}
1884 
1885 	if (status & MTK_DP_THREAD_HPD_EVENT)
1886 		dev_dbg(mtk_dp->dev, "Receive IRQ from sink devices\n");
1887 
1888 	return IRQ_HANDLED;
1889 }
1890 
mtk_dp_hpd_event(int hpd,void * dev)1891 static irqreturn_t mtk_dp_hpd_event(int hpd, void *dev)
1892 {
1893 	struct mtk_dp *mtk_dp = dev;
1894 	bool cable_sta_chg = false;
1895 	unsigned long flags;
1896 	u32 irq_status = mtk_dp_swirq_get_clear(mtk_dp) |
1897 			 mtk_dp_hwirq_get_clear(mtk_dp);
1898 
1899 	if (!irq_status)
1900 		return IRQ_HANDLED;
1901 
1902 	spin_lock_irqsave(&mtk_dp->irq_thread_lock, flags);
1903 
1904 	if (irq_status & MTK_DP_HPD_INTERRUPT)
1905 		mtk_dp->irq_thread_handle |= MTK_DP_THREAD_HPD_EVENT;
1906 
1907 	/* Cable state is changed. */
1908 	if (irq_status != MTK_DP_HPD_INTERRUPT) {
1909 		mtk_dp->irq_thread_handle |= MTK_DP_THREAD_CABLE_STATE_CHG;
1910 		cable_sta_chg = true;
1911 	}
1912 
1913 	spin_unlock_irqrestore(&mtk_dp->irq_thread_lock, flags);
1914 
1915 	if (cable_sta_chg) {
1916 		if (!!(mtk_dp_read(mtk_dp, MTK_DP_TRANS_P0_3414) &
1917 		       HPD_DB_DP_TRANS_P0_MASK))
1918 			mtk_dp->train_info.cable_plugged_in = true;
1919 		else
1920 			mtk_dp->train_info.cable_plugged_in = false;
1921 	}
1922 
1923 	return IRQ_WAKE_THREAD;
1924 }
1925 
mtk_dp_wait_hpd_asserted(struct drm_dp_aux * mtk_aux,unsigned long wait_us)1926 static int mtk_dp_wait_hpd_asserted(struct drm_dp_aux *mtk_aux, unsigned long wait_us)
1927 {
1928 	struct mtk_dp *mtk_dp = container_of(mtk_aux, struct mtk_dp, aux);
1929 	u32 val;
1930 	int ret;
1931 
1932 	ret = regmap_read_poll_timeout(mtk_dp->regs, MTK_DP_TRANS_P0_3414,
1933 				       val, !!(val & HPD_DB_DP_TRANS_P0_MASK),
1934 				       wait_us / 100, wait_us);
1935 	if (ret) {
1936 		mtk_dp->train_info.cable_plugged_in = false;
1937 		return ret;
1938 	}
1939 
1940 	mtk_dp->train_info.cable_plugged_in = true;
1941 
1942 	ret = mtk_dp_parse_capabilities(mtk_dp);
1943 	if (ret) {
1944 		drm_err(mtk_dp->drm_dev, "Can't parse capabilities\n");
1945 		return ret;
1946 	}
1947 
1948 	return 0;
1949 }
1950 
mtk_dp_dt_parse(struct mtk_dp * mtk_dp,struct platform_device * pdev)1951 static int mtk_dp_dt_parse(struct mtk_dp *mtk_dp,
1952 			   struct platform_device *pdev)
1953 {
1954 	struct device_node *endpoint;
1955 	struct device *dev = &pdev->dev;
1956 	int ret;
1957 	void __iomem *base;
1958 	u32 linkrate;
1959 	int len;
1960 
1961 	base = devm_platform_ioremap_resource(pdev, 0);
1962 	if (IS_ERR(base))
1963 		return PTR_ERR(base);
1964 
1965 	mtk_dp->regs = devm_regmap_init_mmio(dev, base, &mtk_dp_regmap_config);
1966 	if (IS_ERR(mtk_dp->regs))
1967 		return PTR_ERR(mtk_dp->regs);
1968 
1969 	endpoint = of_graph_get_endpoint_by_regs(pdev->dev.of_node, 1, -1);
1970 	len = of_property_count_elems_of_size(endpoint,
1971 					      "data-lanes", sizeof(u32));
1972 	if (len < 0 || len > 4 || len == 3) {
1973 		dev_err(dev, "invalid data lane size: %d\n", len);
1974 		return -EINVAL;
1975 	}
1976 
1977 	mtk_dp->max_lanes = len;
1978 
1979 	ret = device_property_read_u32(dev, "max-linkrate-mhz", &linkrate);
1980 	if (ret) {
1981 		dev_err(dev, "failed to read max linkrate: %d\n", ret);
1982 		return ret;
1983 	}
1984 
1985 	mtk_dp->max_linkrate = drm_dp_link_rate_to_bw_code(linkrate * 100);
1986 
1987 	return 0;
1988 }
1989 
mtk_dp_update_plugged_status(struct mtk_dp * mtk_dp)1990 static void mtk_dp_update_plugged_status(struct mtk_dp *mtk_dp)
1991 {
1992 	if (!mtk_dp->data->audio_supported || !mtk_dp->audio_enable)
1993 		return;
1994 
1995 	mutex_lock(&mtk_dp->update_plugged_status_lock);
1996 	if (mtk_dp->plugged_cb && mtk_dp->codec_dev)
1997 		mtk_dp->plugged_cb(mtk_dp->codec_dev,
1998 				   mtk_dp->enabled &
1999 				   mtk_dp->info.audio_cur_cfg.detect_monitor);
2000 	mutex_unlock(&mtk_dp->update_plugged_status_lock);
2001 }
2002 
mtk_dp_bdg_detect(struct drm_bridge * bridge)2003 static enum drm_connector_status mtk_dp_bdg_detect(struct drm_bridge *bridge)
2004 {
2005 	struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge);
2006 	enum drm_connector_status ret = connector_status_disconnected;
2007 	bool enabled = mtk_dp->enabled;
2008 
2009 	if (!mtk_dp->train_info.cable_plugged_in)
2010 		return ret;
2011 
2012 	if (!enabled)
2013 		mtk_dp_aux_panel_poweron(mtk_dp, true);
2014 
2015 	/*
2016 	 * Some dongles still source HPD when they do not connect to any
2017 	 * sink device. To avoid this, we need to read the sink count
2018 	 * to make sure we do connect to sink devices. After this detect
2019 	 * function, we just need to check the HPD connection to check
2020 	 * whether we connect to a sink device.
2021 	 */
2022 
2023 	if (drm_dp_read_sink_count(&mtk_dp->aux) > 0)
2024 		ret = connector_status_connected;
2025 
2026 	if (!enabled)
2027 		mtk_dp_aux_panel_poweron(mtk_dp, false);
2028 
2029 	return ret;
2030 }
2031 
mtk_dp_edid_read(struct drm_bridge * bridge,struct drm_connector * connector)2032 static const struct drm_edid *mtk_dp_edid_read(struct drm_bridge *bridge,
2033 					       struct drm_connector *connector)
2034 {
2035 	struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge);
2036 	bool enabled = mtk_dp->enabled;
2037 	const struct drm_edid *drm_edid;
2038 	struct mtk_dp_audio_cfg *audio_caps = &mtk_dp->info.audio_cur_cfg;
2039 
2040 	if (!enabled) {
2041 		drm_atomic_bridge_chain_pre_enable(bridge, connector->state->state);
2042 		mtk_dp_aux_panel_poweron(mtk_dp, true);
2043 	}
2044 
2045 	drm_edid = drm_edid_read_ddc(connector, &mtk_dp->aux.ddc);
2046 
2047 	/*
2048 	 * Parse capability here to let atomic_get_input_bus_fmts and
2049 	 * mode_valid use the capability to calculate sink bitrates.
2050 	 */
2051 	if (mtk_dp_parse_capabilities(mtk_dp)) {
2052 		drm_err(mtk_dp->drm_dev, "Can't parse capabilities\n");
2053 		drm_edid_free(drm_edid);
2054 		drm_edid = NULL;
2055 	}
2056 
2057 	if (drm_edid) {
2058 		/*
2059 		 * FIXME: get rid of drm_edid_raw()
2060 		 */
2061 		const struct edid *edid = drm_edid_raw(drm_edid);
2062 		struct cea_sad *sads;
2063 		int ret;
2064 
2065 		ret = drm_edid_to_sad(edid, &sads);
2066 		/* Ignore any errors */
2067 		if (ret < 0)
2068 			ret = 0;
2069 		if (ret)
2070 			kfree(sads);
2071 		audio_caps->sad_count = ret;
2072 
2073 		/*
2074 		 * FIXME: This should use connector->display_info.has_audio from
2075 		 * a path that has read the EDID and called
2076 		 * drm_edid_connector_update().
2077 		 */
2078 		audio_caps->detect_monitor = drm_detect_monitor_audio(edid);
2079 	}
2080 
2081 	if (!enabled) {
2082 		mtk_dp_aux_panel_poweron(mtk_dp, false);
2083 		drm_atomic_bridge_chain_post_disable(bridge, connector->state->state);
2084 	}
2085 
2086 	return drm_edid;
2087 }
2088 
mtk_dp_aux_transfer(struct drm_dp_aux * mtk_aux,struct drm_dp_aux_msg * msg)2089 static ssize_t mtk_dp_aux_transfer(struct drm_dp_aux *mtk_aux,
2090 				   struct drm_dp_aux_msg *msg)
2091 {
2092 	struct mtk_dp *mtk_dp = container_of(mtk_aux, struct mtk_dp, aux);
2093 	bool is_read;
2094 	u8 request;
2095 	size_t accessed_bytes = 0;
2096 	int ret;
2097 
2098 	if (mtk_dp->bridge.type != DRM_MODE_CONNECTOR_eDP &&
2099 	    !mtk_dp->train_info.cable_plugged_in) {
2100 		ret = -EIO;
2101 		goto err;
2102 	}
2103 
2104 	switch (msg->request) {
2105 	case DP_AUX_I2C_MOT:
2106 	case DP_AUX_I2C_WRITE:
2107 	case DP_AUX_NATIVE_WRITE:
2108 	case DP_AUX_I2C_WRITE_STATUS_UPDATE:
2109 	case DP_AUX_I2C_WRITE_STATUS_UPDATE | DP_AUX_I2C_MOT:
2110 		request = msg->request & ~DP_AUX_I2C_WRITE_STATUS_UPDATE;
2111 		is_read = false;
2112 		break;
2113 	case DP_AUX_I2C_READ:
2114 	case DP_AUX_NATIVE_READ:
2115 	case DP_AUX_I2C_READ | DP_AUX_I2C_MOT:
2116 		request = msg->request;
2117 		is_read = true;
2118 		break;
2119 	default:
2120 		dev_err(mtk_dp->dev, "invalid aux cmd = %d\n",
2121 			msg->request);
2122 		ret = -EINVAL;
2123 		goto err;
2124 	}
2125 
2126 	do {
2127 		size_t to_access = min_t(size_t, DP_AUX_MAX_PAYLOAD_BYTES,
2128 					 msg->size - accessed_bytes);
2129 
2130 		ret = mtk_dp_aux_do_transfer(mtk_dp, is_read, request,
2131 					     msg->address + accessed_bytes,
2132 					     msg->buffer + accessed_bytes,
2133 					     to_access, &msg->reply);
2134 
2135 		if (ret) {
2136 			dev_info(mtk_dp->dev,
2137 				 "Failed to do AUX transfer: %d\n", ret);
2138 			goto err;
2139 		}
2140 		accessed_bytes += to_access;
2141 	} while (accessed_bytes < msg->size);
2142 
2143 	return msg->size;
2144 err:
2145 	msg->reply = DP_AUX_NATIVE_REPLY_NACK | DP_AUX_I2C_REPLY_NACK;
2146 	return ret;
2147 }
2148 
mtk_dp_poweron(struct mtk_dp * mtk_dp)2149 static int mtk_dp_poweron(struct mtk_dp *mtk_dp)
2150 {
2151 	int ret;
2152 
2153 	ret = phy_init(mtk_dp->phy);
2154 	if (ret) {
2155 		dev_err(mtk_dp->dev, "Failed to initialize phy: %d\n", ret);
2156 		return ret;
2157 	}
2158 
2159 	mtk_dp_init_port(mtk_dp);
2160 	mtk_dp_power_enable(mtk_dp);
2161 
2162 	return 0;
2163 }
2164 
mtk_dp_poweroff(struct mtk_dp * mtk_dp)2165 static void mtk_dp_poweroff(struct mtk_dp *mtk_dp)
2166 {
2167 	mtk_dp_power_disable(mtk_dp);
2168 	phy_exit(mtk_dp->phy);
2169 }
2170 
mtk_dp_bridge_attach(struct drm_bridge * bridge,enum drm_bridge_attach_flags flags)2171 static int mtk_dp_bridge_attach(struct drm_bridge *bridge,
2172 				enum drm_bridge_attach_flags flags)
2173 {
2174 	struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge);
2175 	int ret;
2176 
2177 	if (!(flags & DRM_BRIDGE_ATTACH_NO_CONNECTOR)) {
2178 		dev_err(mtk_dp->dev, "Driver does not provide a connector!");
2179 		return -EINVAL;
2180 	}
2181 
2182 	mtk_dp->aux.drm_dev = bridge->dev;
2183 	ret = drm_dp_aux_register(&mtk_dp->aux);
2184 	if (ret) {
2185 		dev_err(mtk_dp->dev,
2186 			"failed to register DP AUX channel: %d\n", ret);
2187 		return ret;
2188 	}
2189 
2190 	ret = mtk_dp_poweron(mtk_dp);
2191 	if (ret)
2192 		goto err_aux_register;
2193 
2194 	if (mtk_dp->next_bridge) {
2195 		ret = drm_bridge_attach(bridge->encoder, mtk_dp->next_bridge,
2196 					&mtk_dp->bridge, flags);
2197 		if (ret) {
2198 			drm_warn(mtk_dp->drm_dev,
2199 				 "Failed to attach external bridge: %d\n", ret);
2200 			goto err_bridge_attach;
2201 		}
2202 	}
2203 
2204 	mtk_dp->drm_dev = bridge->dev;
2205 
2206 	if (mtk_dp->bridge.type != DRM_MODE_CONNECTOR_eDP) {
2207 		irq_clear_status_flags(mtk_dp->irq, IRQ_NOAUTOEN);
2208 		enable_irq(mtk_dp->irq);
2209 		mtk_dp_hwirq_enable(mtk_dp, true);
2210 	}
2211 
2212 	return 0;
2213 
2214 err_bridge_attach:
2215 	mtk_dp_poweroff(mtk_dp);
2216 err_aux_register:
2217 	drm_dp_aux_unregister(&mtk_dp->aux);
2218 	return ret;
2219 }
2220 
mtk_dp_bridge_detach(struct drm_bridge * bridge)2221 static void mtk_dp_bridge_detach(struct drm_bridge *bridge)
2222 {
2223 	struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge);
2224 
2225 	if (mtk_dp->bridge.type != DRM_MODE_CONNECTOR_eDP) {
2226 		mtk_dp_hwirq_enable(mtk_dp, false);
2227 		disable_irq(mtk_dp->irq);
2228 	}
2229 	mtk_dp->drm_dev = NULL;
2230 	mtk_dp_poweroff(mtk_dp);
2231 	drm_dp_aux_unregister(&mtk_dp->aux);
2232 }
2233 
mtk_dp_bridge_atomic_enable(struct drm_bridge * bridge,struct drm_bridge_state * old_state)2234 static void mtk_dp_bridge_atomic_enable(struct drm_bridge *bridge,
2235 					struct drm_bridge_state *old_state)
2236 {
2237 	struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge);
2238 	int ret;
2239 
2240 	mtk_dp->conn = drm_atomic_get_new_connector_for_encoder(old_state->base.state,
2241 								bridge->encoder);
2242 	if (!mtk_dp->conn) {
2243 		drm_err(mtk_dp->drm_dev,
2244 			"Can't enable bridge as connector is missing\n");
2245 		return;
2246 	}
2247 
2248 	mtk_dp_aux_panel_poweron(mtk_dp, true);
2249 
2250 	/* Training */
2251 	ret = mtk_dp_training(mtk_dp);
2252 	if (ret) {
2253 		drm_err(mtk_dp->drm_dev, "Training failed, %d\n", ret);
2254 		goto power_off_aux;
2255 	}
2256 
2257 	ret = mtk_dp_video_config(mtk_dp);
2258 	if (ret)
2259 		goto power_off_aux;
2260 
2261 	mtk_dp_video_enable(mtk_dp, true);
2262 
2263 	mtk_dp->audio_enable =
2264 		mtk_dp_edid_parse_audio_capabilities(mtk_dp,
2265 						     &mtk_dp->info.audio_cur_cfg);
2266 	if (mtk_dp->audio_enable) {
2267 		mtk_dp_audio_setup(mtk_dp, &mtk_dp->info.audio_cur_cfg);
2268 		mtk_dp_audio_mute(mtk_dp, false);
2269 	} else {
2270 		memset(&mtk_dp->info.audio_cur_cfg, 0,
2271 		       sizeof(mtk_dp->info.audio_cur_cfg));
2272 	}
2273 
2274 	mtk_dp->enabled = true;
2275 	mtk_dp_update_plugged_status(mtk_dp);
2276 
2277 	return;
2278 power_off_aux:
2279 	mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE,
2280 			   DP_PWR_STATE_BANDGAP_TPLL,
2281 			   DP_PWR_STATE_MASK);
2282 }
2283 
mtk_dp_bridge_atomic_disable(struct drm_bridge * bridge,struct drm_bridge_state * old_state)2284 static void mtk_dp_bridge_atomic_disable(struct drm_bridge *bridge,
2285 					 struct drm_bridge_state *old_state)
2286 {
2287 	struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge);
2288 
2289 	mtk_dp->enabled = false;
2290 	mtk_dp_update_plugged_status(mtk_dp);
2291 	mtk_dp_video_enable(mtk_dp, false);
2292 	mtk_dp_audio_mute(mtk_dp, true);
2293 
2294 	if (mtk_dp->train_info.cable_plugged_in) {
2295 		drm_dp_dpcd_writeb(&mtk_dp->aux, DP_SET_POWER, DP_SET_POWER_D3);
2296 		usleep_range(2000, 3000);
2297 	}
2298 
2299 	/* power off aux */
2300 	mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE,
2301 			   DP_PWR_STATE_BANDGAP_TPLL,
2302 			   DP_PWR_STATE_MASK);
2303 
2304 	/* Ensure the sink is muted */
2305 	msleep(20);
2306 }
2307 
2308 static enum drm_mode_status
mtk_dp_bridge_mode_valid(struct drm_bridge * bridge,const struct drm_display_info * info,const struct drm_display_mode * mode)2309 mtk_dp_bridge_mode_valid(struct drm_bridge *bridge,
2310 			 const struct drm_display_info *info,
2311 			 const struct drm_display_mode *mode)
2312 {
2313 	struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge);
2314 	u32 bpp = info->color_formats & DRM_COLOR_FORMAT_YCBCR422 ? 16 : 24;
2315 	u32 lane_count_min = mtk_dp->train_info.lane_count;
2316 	u32 rate = drm_dp_bw_code_to_link_rate(mtk_dp->train_info.link_rate) *
2317 		   lane_count_min;
2318 
2319 	/*
2320 	 *FEC overhead is approximately 2.4% from DP 1.4a spec 2.2.1.4.2.
2321 	 *The down-spread amplitude shall either be disabled (0.0%) or up
2322 	 *to 0.5% from 1.4a 3.5.2.6. Add up to approximately 3% total overhead.
2323 	 *
2324 	 *Because rate is already divided by 10,
2325 	 *mode->clock does not need to be multiplied by 10
2326 	 */
2327 	if ((rate * 97 / 100) < (mode->clock * bpp / 8))
2328 		return MODE_CLOCK_HIGH;
2329 
2330 	return MODE_OK;
2331 }
2332 
mtk_dp_bridge_atomic_get_output_bus_fmts(struct drm_bridge * bridge,struct drm_bridge_state * bridge_state,struct drm_crtc_state * crtc_state,struct drm_connector_state * conn_state,unsigned int * num_output_fmts)2333 static u32 *mtk_dp_bridge_atomic_get_output_bus_fmts(struct drm_bridge *bridge,
2334 						     struct drm_bridge_state *bridge_state,
2335 						     struct drm_crtc_state *crtc_state,
2336 						     struct drm_connector_state *conn_state,
2337 						     unsigned int *num_output_fmts)
2338 {
2339 	u32 *output_fmts;
2340 
2341 	*num_output_fmts = 0;
2342 	output_fmts = kmalloc(sizeof(*output_fmts), GFP_KERNEL);
2343 	if (!output_fmts)
2344 		return NULL;
2345 	*num_output_fmts = 1;
2346 	output_fmts[0] = MEDIA_BUS_FMT_FIXED;
2347 	return output_fmts;
2348 }
2349 
2350 static const u32 mt8195_input_fmts[] = {
2351 	MEDIA_BUS_FMT_RGB888_1X24,
2352 	MEDIA_BUS_FMT_YUV8_1X24,
2353 	MEDIA_BUS_FMT_YUYV8_1X16,
2354 };
2355 
mtk_dp_bridge_atomic_get_input_bus_fmts(struct drm_bridge * bridge,struct drm_bridge_state * bridge_state,struct drm_crtc_state * crtc_state,struct drm_connector_state * conn_state,u32 output_fmt,unsigned int * num_input_fmts)2356 static u32 *mtk_dp_bridge_atomic_get_input_bus_fmts(struct drm_bridge *bridge,
2357 						    struct drm_bridge_state *bridge_state,
2358 						    struct drm_crtc_state *crtc_state,
2359 						    struct drm_connector_state *conn_state,
2360 						    u32 output_fmt,
2361 						    unsigned int *num_input_fmts)
2362 {
2363 	u32 *input_fmts;
2364 	struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge);
2365 	struct drm_display_mode *mode = &crtc_state->adjusted_mode;
2366 	struct drm_display_info *display_info =
2367 		&conn_state->connector->display_info;
2368 	u32 lane_count_min = mtk_dp->train_info.lane_count;
2369 	u32 rate = drm_dp_bw_code_to_link_rate(mtk_dp->train_info.link_rate) *
2370 		   lane_count_min;
2371 
2372 	*num_input_fmts = 0;
2373 
2374 	/*
2375 	 * If the linkrate is smaller than datarate of RGB888, larger than
2376 	 * datarate of YUV422 and sink device supports YUV422, we output YUV422
2377 	 * format. Use this condition, we can support more resolution.
2378 	 */
2379 	if (((rate * 97 / 100) < (mode->clock * 24 / 8)) &&
2380 	    ((rate * 97 / 100) > (mode->clock * 16 / 8)) &&
2381 	    (display_info->color_formats & DRM_COLOR_FORMAT_YCBCR422)) {
2382 		input_fmts = kcalloc(1, sizeof(*input_fmts), GFP_KERNEL);
2383 		if (!input_fmts)
2384 			return NULL;
2385 		*num_input_fmts = 1;
2386 		input_fmts[0] = MEDIA_BUS_FMT_YUYV8_1X16;
2387 	} else {
2388 		input_fmts = kcalloc(ARRAY_SIZE(mt8195_input_fmts),
2389 				     sizeof(*input_fmts),
2390 				     GFP_KERNEL);
2391 		if (!input_fmts)
2392 			return NULL;
2393 
2394 		*num_input_fmts = ARRAY_SIZE(mt8195_input_fmts);
2395 		memcpy(input_fmts, mt8195_input_fmts, sizeof(mt8195_input_fmts));
2396 	}
2397 
2398 	return input_fmts;
2399 }
2400 
mtk_dp_bridge_atomic_check(struct drm_bridge * bridge,struct drm_bridge_state * bridge_state,struct drm_crtc_state * crtc_state,struct drm_connector_state * conn_state)2401 static int mtk_dp_bridge_atomic_check(struct drm_bridge *bridge,
2402 				      struct drm_bridge_state *bridge_state,
2403 				      struct drm_crtc_state *crtc_state,
2404 				      struct drm_connector_state *conn_state)
2405 {
2406 	struct mtk_dp *mtk_dp = mtk_dp_from_bridge(bridge);
2407 	struct drm_crtc *crtc = conn_state->crtc;
2408 	unsigned int input_bus_format;
2409 
2410 	input_bus_format = bridge_state->input_bus_cfg.format;
2411 
2412 	dev_dbg(mtk_dp->dev, "input format 0x%04x, output format 0x%04x\n",
2413 		bridge_state->input_bus_cfg.format,
2414 		 bridge_state->output_bus_cfg.format);
2415 
2416 	if (input_bus_format == MEDIA_BUS_FMT_YUYV8_1X16)
2417 		mtk_dp->info.format = DP_PIXELFORMAT_YUV422;
2418 	else
2419 		mtk_dp->info.format = DP_PIXELFORMAT_RGB;
2420 
2421 	if (!crtc) {
2422 		drm_err(mtk_dp->drm_dev,
2423 			"Can't enable bridge as connector state doesn't have a crtc\n");
2424 		return -EINVAL;
2425 	}
2426 
2427 	drm_display_mode_to_videomode(&crtc_state->adjusted_mode, &mtk_dp->info.vm);
2428 
2429 	return 0;
2430 }
2431 
2432 static const struct drm_bridge_funcs mtk_dp_bridge_funcs = {
2433 	.atomic_check = mtk_dp_bridge_atomic_check,
2434 	.atomic_duplicate_state = drm_atomic_helper_bridge_duplicate_state,
2435 	.atomic_destroy_state = drm_atomic_helper_bridge_destroy_state,
2436 	.atomic_get_output_bus_fmts = mtk_dp_bridge_atomic_get_output_bus_fmts,
2437 	.atomic_get_input_bus_fmts = mtk_dp_bridge_atomic_get_input_bus_fmts,
2438 	.atomic_reset = drm_atomic_helper_bridge_reset,
2439 	.attach = mtk_dp_bridge_attach,
2440 	.detach = mtk_dp_bridge_detach,
2441 	.atomic_enable = mtk_dp_bridge_atomic_enable,
2442 	.atomic_disable = mtk_dp_bridge_atomic_disable,
2443 	.mode_valid = mtk_dp_bridge_mode_valid,
2444 	.edid_read = mtk_dp_edid_read,
2445 	.detect = mtk_dp_bdg_detect,
2446 };
2447 
mtk_dp_debounce_timer(struct timer_list * t)2448 static void mtk_dp_debounce_timer(struct timer_list *t)
2449 {
2450 	struct mtk_dp *mtk_dp = from_timer(mtk_dp, t, debounce_timer);
2451 
2452 	mtk_dp->need_debounce = true;
2453 }
2454 
2455 /*
2456  * HDMI audio codec callbacks
2457  */
mtk_dp_audio_hw_params(struct device * dev,void * data,struct hdmi_codec_daifmt * daifmt,struct hdmi_codec_params * params)2458 static int mtk_dp_audio_hw_params(struct device *dev, void *data,
2459 				  struct hdmi_codec_daifmt *daifmt,
2460 				  struct hdmi_codec_params *params)
2461 {
2462 	struct mtk_dp *mtk_dp = dev_get_drvdata(dev);
2463 
2464 	if (!mtk_dp->enabled) {
2465 		dev_err(mtk_dp->dev, "%s, DP is not ready!\n", __func__);
2466 		return -ENODEV;
2467 	}
2468 
2469 	mtk_dp->info.audio_cur_cfg.channels = params->cea.channels;
2470 	mtk_dp->info.audio_cur_cfg.sample_rate = params->sample_rate;
2471 
2472 	mtk_dp_audio_setup(mtk_dp, &mtk_dp->info.audio_cur_cfg);
2473 
2474 	return 0;
2475 }
2476 
mtk_dp_audio_startup(struct device * dev,void * data)2477 static int mtk_dp_audio_startup(struct device *dev, void *data)
2478 {
2479 	struct mtk_dp *mtk_dp = dev_get_drvdata(dev);
2480 
2481 	mtk_dp_audio_mute(mtk_dp, false);
2482 
2483 	return 0;
2484 }
2485 
mtk_dp_audio_shutdown(struct device * dev,void * data)2486 static void mtk_dp_audio_shutdown(struct device *dev, void *data)
2487 {
2488 	struct mtk_dp *mtk_dp = dev_get_drvdata(dev);
2489 
2490 	mtk_dp_audio_mute(mtk_dp, true);
2491 }
2492 
mtk_dp_audio_get_eld(struct device * dev,void * data,uint8_t * buf,size_t len)2493 static int mtk_dp_audio_get_eld(struct device *dev, void *data, uint8_t *buf,
2494 				size_t len)
2495 {
2496 	struct mtk_dp *mtk_dp = dev_get_drvdata(dev);
2497 
2498 	if (mtk_dp->enabled)
2499 		memcpy(buf, mtk_dp->conn->eld, len);
2500 	else
2501 		memset(buf, 0, len);
2502 
2503 	return 0;
2504 }
2505 
mtk_dp_audio_hook_plugged_cb(struct device * dev,void * data,hdmi_codec_plugged_cb fn,struct device * codec_dev)2506 static int mtk_dp_audio_hook_plugged_cb(struct device *dev, void *data,
2507 					hdmi_codec_plugged_cb fn,
2508 					struct device *codec_dev)
2509 {
2510 	struct mtk_dp *mtk_dp = data;
2511 
2512 	mutex_lock(&mtk_dp->update_plugged_status_lock);
2513 	mtk_dp->plugged_cb = fn;
2514 	mtk_dp->codec_dev = codec_dev;
2515 	mutex_unlock(&mtk_dp->update_plugged_status_lock);
2516 
2517 	mtk_dp_update_plugged_status(mtk_dp);
2518 
2519 	return 0;
2520 }
2521 
2522 static const struct hdmi_codec_ops mtk_dp_audio_codec_ops = {
2523 	.hw_params = mtk_dp_audio_hw_params,
2524 	.audio_startup = mtk_dp_audio_startup,
2525 	.audio_shutdown = mtk_dp_audio_shutdown,
2526 	.get_eld = mtk_dp_audio_get_eld,
2527 	.hook_plugged_cb = mtk_dp_audio_hook_plugged_cb,
2528 	.no_capture_mute = 1,
2529 };
2530 
mtk_dp_register_audio_driver(struct device * dev)2531 static int mtk_dp_register_audio_driver(struct device *dev)
2532 {
2533 	struct mtk_dp *mtk_dp = dev_get_drvdata(dev);
2534 	struct hdmi_codec_pdata codec_data = {
2535 		.ops = &mtk_dp_audio_codec_ops,
2536 		.max_i2s_channels = 8,
2537 		.i2s = 1,
2538 		.data = mtk_dp,
2539 	};
2540 
2541 	mtk_dp->audio_pdev = platform_device_register_data(dev,
2542 							   HDMI_CODEC_DRV_NAME,
2543 							   PLATFORM_DEVID_AUTO,
2544 							   &codec_data,
2545 							   sizeof(codec_data));
2546 	return PTR_ERR_OR_ZERO(mtk_dp->audio_pdev);
2547 }
2548 
mtk_dp_register_phy(struct mtk_dp * mtk_dp)2549 static int mtk_dp_register_phy(struct mtk_dp *mtk_dp)
2550 {
2551 	struct device *dev = mtk_dp->dev;
2552 
2553 	mtk_dp->phy_dev = platform_device_register_data(dev, "mediatek-dp-phy",
2554 							PLATFORM_DEVID_AUTO,
2555 							&mtk_dp->regs,
2556 							sizeof(struct regmap *));
2557 	if (IS_ERR(mtk_dp->phy_dev))
2558 		return dev_err_probe(dev, PTR_ERR(mtk_dp->phy_dev),
2559 				     "Failed to create device mediatek-dp-phy\n");
2560 
2561 	mtk_dp_get_calibration_data(mtk_dp);
2562 
2563 	mtk_dp->phy = devm_phy_get(&mtk_dp->phy_dev->dev, "dp");
2564 	if (IS_ERR(mtk_dp->phy)) {
2565 		platform_device_unregister(mtk_dp->phy_dev);
2566 		return dev_err_probe(dev, PTR_ERR(mtk_dp->phy), "Failed to get phy\n");
2567 	}
2568 
2569 	return 0;
2570 }
2571 
mtk_dp_edp_link_panel(struct drm_dp_aux * mtk_aux)2572 static int mtk_dp_edp_link_panel(struct drm_dp_aux *mtk_aux)
2573 {
2574 	struct mtk_dp *mtk_dp = container_of(mtk_aux, struct mtk_dp, aux);
2575 	struct device *dev = mtk_aux->dev;
2576 	int ret;
2577 
2578 	mtk_dp->next_bridge = devm_drm_of_get_bridge(dev, dev->of_node, 1, 0);
2579 
2580 	/* Power off the DP and AUX: either detection is done, or no panel present */
2581 	mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE,
2582 			   DP_PWR_STATE_BANDGAP_TPLL,
2583 			   DP_PWR_STATE_MASK);
2584 	mtk_dp_power_disable(mtk_dp);
2585 
2586 	if (IS_ERR(mtk_dp->next_bridge)) {
2587 		ret = PTR_ERR(mtk_dp->next_bridge);
2588 		mtk_dp->next_bridge = NULL;
2589 		return ret;
2590 	}
2591 
2592 	/* For eDP, we add the bridge only if the panel was found */
2593 	ret = devm_drm_bridge_add(dev, &mtk_dp->bridge);
2594 	if (ret)
2595 		return ret;
2596 
2597 	return 0;
2598 }
2599 
mtk_dp_probe(struct platform_device * pdev)2600 static int mtk_dp_probe(struct platform_device *pdev)
2601 {
2602 	struct mtk_dp *mtk_dp;
2603 	struct device *dev = &pdev->dev;
2604 	int ret;
2605 
2606 	mtk_dp = devm_kzalloc(dev, sizeof(*mtk_dp), GFP_KERNEL);
2607 	if (!mtk_dp)
2608 		return -ENOMEM;
2609 
2610 	mtk_dp->dev = dev;
2611 	mtk_dp->data = (struct mtk_dp_data *)of_device_get_match_data(dev);
2612 
2613 	ret = mtk_dp_dt_parse(mtk_dp, pdev);
2614 	if (ret)
2615 		return dev_err_probe(dev, ret, "Failed to parse dt\n");
2616 
2617 	/*
2618 	 * Request the interrupt and install service routine only if we are
2619 	 * on full DisplayPort.
2620 	 * For eDP, polling the HPD instead is more convenient because we
2621 	 * don't expect any (un)plug events during runtime, hence we can
2622 	 * avoid some locking.
2623 	 */
2624 	if (mtk_dp->data->bridge_type != DRM_MODE_CONNECTOR_eDP) {
2625 		mtk_dp->irq = platform_get_irq(pdev, 0);
2626 		if (mtk_dp->irq < 0)
2627 			return dev_err_probe(dev, mtk_dp->irq,
2628 					     "failed to request dp irq resource\n");
2629 
2630 		spin_lock_init(&mtk_dp->irq_thread_lock);
2631 
2632 		irq_set_status_flags(mtk_dp->irq, IRQ_NOAUTOEN);
2633 		ret = devm_request_threaded_irq(dev, mtk_dp->irq, mtk_dp_hpd_event,
2634 						mtk_dp_hpd_event_thread,
2635 						IRQ_TYPE_LEVEL_HIGH, dev_name(dev),
2636 						mtk_dp);
2637 		if (ret)
2638 			return dev_err_probe(dev, ret,
2639 					     "failed to request mediatek dptx irq\n");
2640 
2641 		mtk_dp->need_debounce = true;
2642 		timer_setup(&mtk_dp->debounce_timer, mtk_dp_debounce_timer, 0);
2643 	}
2644 
2645 	mtk_dp->aux.name = "aux_mtk_dp";
2646 	mtk_dp->aux.dev = dev;
2647 	mtk_dp->aux.transfer = mtk_dp_aux_transfer;
2648 	mtk_dp->aux.wait_hpd_asserted = mtk_dp_wait_hpd_asserted;
2649 	drm_dp_aux_init(&mtk_dp->aux);
2650 
2651 	platform_set_drvdata(pdev, mtk_dp);
2652 
2653 	if (mtk_dp->data->audio_supported) {
2654 		mutex_init(&mtk_dp->update_plugged_status_lock);
2655 
2656 		ret = mtk_dp_register_audio_driver(dev);
2657 		if (ret) {
2658 			dev_err(dev, "Failed to register audio driver: %d\n",
2659 				ret);
2660 			return ret;
2661 		}
2662 	}
2663 
2664 	ret = mtk_dp_register_phy(mtk_dp);
2665 	if (ret)
2666 		return ret;
2667 
2668 	mtk_dp->bridge.funcs = &mtk_dp_bridge_funcs;
2669 	mtk_dp->bridge.of_node = dev->of_node;
2670 	mtk_dp->bridge.type = mtk_dp->data->bridge_type;
2671 
2672 	if (mtk_dp->bridge.type == DRM_MODE_CONNECTOR_eDP) {
2673 		/*
2674 		 * Set the data lanes to idle in case the bootloader didn't
2675 		 * properly close the eDP port to avoid stalls and then
2676 		 * reinitialize, reset and power on the AUX block.
2677 		 */
2678 		mtk_dp_set_idle_pattern(mtk_dp, true);
2679 		mtk_dp_initialize_aux_settings(mtk_dp);
2680 		mtk_dp_power_enable(mtk_dp);
2681 
2682 		/* Disable HW interrupts: we don't need any for eDP */
2683 		mtk_dp_hwirq_enable(mtk_dp, false);
2684 
2685 		/*
2686 		 * Power on the AUX to allow reading the EDID from aux-bus:
2687 		 * please note that it is necessary to call power off in the
2688 		 * .done_probing() callback (mtk_dp_edp_link_panel), as only
2689 		 * there we can safely assume that we finished reading EDID.
2690 		 */
2691 		mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE,
2692 				   DP_PWR_STATE_BANDGAP_TPLL_LANE,
2693 				   DP_PWR_STATE_MASK);
2694 
2695 		ret = devm_of_dp_aux_populate_bus(&mtk_dp->aux, mtk_dp_edp_link_panel);
2696 		if (ret) {
2697 			/* -ENODEV this means that the panel is not on the aux-bus */
2698 			if (ret == -ENODEV) {
2699 				ret = mtk_dp_edp_link_panel(&mtk_dp->aux);
2700 				if (ret)
2701 					return ret;
2702 			} else {
2703 				mtk_dp_update_bits(mtk_dp, MTK_DP_TOP_PWR_STATE,
2704 						   DP_PWR_STATE_BANDGAP_TPLL,
2705 						   DP_PWR_STATE_MASK);
2706 				mtk_dp_power_disable(mtk_dp);
2707 				return ret;
2708 			}
2709 		}
2710 	} else {
2711 		mtk_dp->bridge.ops = DRM_BRIDGE_OP_DETECT |
2712 				     DRM_BRIDGE_OP_EDID | DRM_BRIDGE_OP_HPD;
2713 		ret = devm_drm_bridge_add(dev, &mtk_dp->bridge);
2714 		if (ret)
2715 			return dev_err_probe(dev, ret, "Failed to add bridge\n");
2716 	}
2717 
2718 	pm_runtime_enable(dev);
2719 	pm_runtime_get_sync(dev);
2720 
2721 	return 0;
2722 }
2723 
mtk_dp_remove(struct platform_device * pdev)2724 static void mtk_dp_remove(struct platform_device *pdev)
2725 {
2726 	struct mtk_dp *mtk_dp = platform_get_drvdata(pdev);
2727 
2728 	pm_runtime_put(&pdev->dev);
2729 	pm_runtime_disable(&pdev->dev);
2730 	if (mtk_dp->data->bridge_type != DRM_MODE_CONNECTOR_eDP)
2731 		del_timer_sync(&mtk_dp->debounce_timer);
2732 	platform_device_unregister(mtk_dp->phy_dev);
2733 	if (mtk_dp->audio_pdev)
2734 		platform_device_unregister(mtk_dp->audio_pdev);
2735 }
2736 
2737 #ifdef CONFIG_PM_SLEEP
mtk_dp_suspend(struct device * dev)2738 static int mtk_dp_suspend(struct device *dev)
2739 {
2740 	struct mtk_dp *mtk_dp = dev_get_drvdata(dev);
2741 
2742 	mtk_dp_power_disable(mtk_dp);
2743 	if (mtk_dp->bridge.type != DRM_MODE_CONNECTOR_eDP)
2744 		mtk_dp_hwirq_enable(mtk_dp, false);
2745 	pm_runtime_put_sync(dev);
2746 
2747 	return 0;
2748 }
2749 
mtk_dp_resume(struct device * dev)2750 static int mtk_dp_resume(struct device *dev)
2751 {
2752 	struct mtk_dp *mtk_dp = dev_get_drvdata(dev);
2753 
2754 	pm_runtime_get_sync(dev);
2755 	mtk_dp_init_port(mtk_dp);
2756 	if (mtk_dp->bridge.type != DRM_MODE_CONNECTOR_eDP)
2757 		mtk_dp_hwirq_enable(mtk_dp, true);
2758 	mtk_dp_power_enable(mtk_dp);
2759 
2760 	return 0;
2761 }
2762 #endif
2763 
2764 static SIMPLE_DEV_PM_OPS(mtk_dp_pm_ops, mtk_dp_suspend, mtk_dp_resume);
2765 
2766 static const struct mtk_dp_data mt8195_edp_data = {
2767 	.bridge_type = DRM_MODE_CONNECTOR_eDP,
2768 	.smc_cmd = MTK_DP_SIP_ATF_EDP_VIDEO_UNMUTE,
2769 	.efuse_fmt = mt8195_edp_efuse_fmt,
2770 	.audio_supported = false,
2771 };
2772 
2773 static const struct mtk_dp_data mt8195_dp_data = {
2774 	.bridge_type = DRM_MODE_CONNECTOR_DisplayPort,
2775 	.smc_cmd = MTK_DP_SIP_ATF_VIDEO_UNMUTE,
2776 	.efuse_fmt = mt8195_dp_efuse_fmt,
2777 	.audio_supported = true,
2778 };
2779 
2780 static const struct of_device_id mtk_dp_of_match[] = {
2781 	{
2782 		.compatible = "mediatek,mt8195-edp-tx",
2783 		.data = &mt8195_edp_data,
2784 	},
2785 	{
2786 		.compatible = "mediatek,mt8195-dp-tx",
2787 		.data = &mt8195_dp_data,
2788 	},
2789 	{},
2790 };
2791 MODULE_DEVICE_TABLE(of, mtk_dp_of_match);
2792 
2793 static struct platform_driver mtk_dp_driver = {
2794 	.probe = mtk_dp_probe,
2795 	.remove_new = mtk_dp_remove,
2796 	.driver = {
2797 		.name = "mediatek-drm-dp",
2798 		.of_match_table = mtk_dp_of_match,
2799 		.pm = &mtk_dp_pm_ops,
2800 	},
2801 };
2802 
2803 module_platform_driver(mtk_dp_driver);
2804 
2805 MODULE_AUTHOR("Jitao Shi <jitao.shi@mediatek.com>");
2806 MODULE_AUTHOR("Markus Schneider-Pargmann <msp@baylibre.com>");
2807 MODULE_AUTHOR("Bo-Chen Chen <rex-bc.chen@mediatek.com>");
2808 MODULE_DESCRIPTION("MediaTek DisplayPort Driver");
2809 MODULE_LICENSE("GPL");
2810 MODULE_SOFTDEP("pre: phy_mtk_dp");
2811