xref: /openbmc/linux/drivers/gpu/drm/msm/dp/dp_catalog.c (revision 36acd5e2)
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved.
4  */
5 
6 #define pr_fmt(fmt)	"[drm-dp] %s: " fmt, __func__
7 
8 #include <linux/delay.h>
9 #include <linux/iopoll.h>
10 #include <linux/phy/phy.h>
11 #include <linux/phy/phy-dp.h>
12 #include <linux/rational.h>
13 #include <drm/drm_dp_helper.h>
14 #include <drm/drm_print.h>
15 
16 #include "dp_catalog.h"
17 #include "dp_reg.h"
18 
19 #define POLLING_SLEEP_US			1000
20 #define POLLING_TIMEOUT_US			10000
21 
22 #define SCRAMBLER_RESET_COUNT_VALUE		0xFC
23 
24 #define DP_INTERRUPT_STATUS_ACK_SHIFT	1
25 #define DP_INTERRUPT_STATUS_MASK_SHIFT	2
26 
27 #define MSM_DP_CONTROLLER_AHB_OFFSET	0x0000
28 #define MSM_DP_CONTROLLER_AHB_SIZE	0x0200
29 #define MSM_DP_CONTROLLER_AUX_OFFSET	0x0200
30 #define MSM_DP_CONTROLLER_AUX_SIZE	0x0200
31 #define MSM_DP_CONTROLLER_LINK_OFFSET	0x0400
32 #define MSM_DP_CONTROLLER_LINK_SIZE	0x0C00
33 #define MSM_DP_CONTROLLER_P0_OFFSET	0x1000
34 #define MSM_DP_CONTROLLER_P0_SIZE	0x0400
35 
36 #define DP_INTERRUPT_STATUS1 \
37 	(DP_INTR_AUX_I2C_DONE| \
38 	DP_INTR_WRONG_ADDR | DP_INTR_TIMEOUT | \
39 	DP_INTR_NACK_DEFER | DP_INTR_WRONG_DATA_CNT | \
40 	DP_INTR_I2C_NACK | DP_INTR_I2C_DEFER | \
41 	DP_INTR_PLL_UNLOCKED | DP_INTR_AUX_ERROR)
42 
43 #define DP_INTERRUPT_STATUS1_ACK \
44 	(DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_ACK_SHIFT)
45 #define DP_INTERRUPT_STATUS1_MASK \
46 	(DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_MASK_SHIFT)
47 
48 #define DP_INTERRUPT_STATUS2 \
49 	(DP_INTR_READY_FOR_VIDEO | DP_INTR_IDLE_PATTERN_SENT | \
50 	DP_INTR_FRAME_END | DP_INTR_CRC_UPDATED)
51 
52 #define DP_INTERRUPT_STATUS2_ACK \
53 	(DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_ACK_SHIFT)
54 #define DP_INTERRUPT_STATUS2_MASK \
55 	(DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_MASK_SHIFT)
56 
57 struct dp_catalog_private {
58 	struct device *dev;
59 	struct dp_io *io;
60 	u32 (*audio_map)[DP_AUDIO_SDP_HEADER_MAX];
61 	struct dp_catalog dp_catalog;
62 	u8 aux_lut_cfg_index[PHY_AUX_CFG_MAX];
63 };
64 
65 static inline u32 dp_read_aux(struct dp_catalog_private *catalog, u32 offset)
66 {
67 	offset += MSM_DP_CONTROLLER_AUX_OFFSET;
68 	return readl_relaxed(catalog->io->dp_controller.base + offset);
69 }
70 
71 static inline void dp_write_aux(struct dp_catalog_private *catalog,
72 			       u32 offset, u32 data)
73 {
74 	offset += MSM_DP_CONTROLLER_AUX_OFFSET;
75 	/*
76 	 * To make sure aux reg writes happens before any other operation,
77 	 * this function uses writel() instread of writel_relaxed()
78 	 */
79 	writel(data, catalog->io->dp_controller.base + offset);
80 }
81 
82 static inline u32 dp_read_ahb(struct dp_catalog_private *catalog, u32 offset)
83 {
84 	offset += MSM_DP_CONTROLLER_AHB_OFFSET;
85 	return readl_relaxed(catalog->io->dp_controller.base + offset);
86 }
87 
88 static inline void dp_write_ahb(struct dp_catalog_private *catalog,
89 			       u32 offset, u32 data)
90 {
91 	offset += MSM_DP_CONTROLLER_AHB_OFFSET;
92 	/*
93 	 * To make sure phy reg writes happens before any other operation,
94 	 * this function uses writel() instread of writel_relaxed()
95 	 */
96 	writel(data, catalog->io->dp_controller.base + offset);
97 }
98 
99 static inline void dp_write_p0(struct dp_catalog_private *catalog,
100 			       u32 offset, u32 data)
101 {
102 	offset += MSM_DP_CONTROLLER_P0_OFFSET;
103 	/*
104 	 * To make sure interface reg writes happens before any other operation,
105 	 * this function uses writel() instread of writel_relaxed()
106 	 */
107 	writel(data, catalog->io->dp_controller.base + offset);
108 }
109 
110 static inline u32 dp_read_p0(struct dp_catalog_private *catalog,
111 			       u32 offset)
112 {
113 	offset += MSM_DP_CONTROLLER_P0_OFFSET;
114 	/*
115 	 * To make sure interface reg writes happens before any other operation,
116 	 * this function uses writel() instread of writel_relaxed()
117 	 */
118 	return readl_relaxed(catalog->io->dp_controller.base + offset);
119 }
120 
121 static inline u32 dp_read_link(struct dp_catalog_private *catalog, u32 offset)
122 {
123 	offset += MSM_DP_CONTROLLER_LINK_OFFSET;
124 	return readl_relaxed(catalog->io->dp_controller.base + offset);
125 }
126 
127 static inline void dp_write_link(struct dp_catalog_private *catalog,
128 			       u32 offset, u32 data)
129 {
130 	offset += MSM_DP_CONTROLLER_LINK_OFFSET;
131 	/*
132 	 * To make sure link reg writes happens before any other operation,
133 	 * this function uses writel() instread of writel_relaxed()
134 	 */
135 	writel(data, catalog->io->dp_controller.base + offset);
136 }
137 
138 /* aux related catalog functions */
139 u32 dp_catalog_aux_read_data(struct dp_catalog *dp_catalog)
140 {
141 	struct dp_catalog_private *catalog = container_of(dp_catalog,
142 				struct dp_catalog_private, dp_catalog);
143 
144 	return dp_read_aux(catalog, REG_DP_AUX_DATA);
145 }
146 
147 int dp_catalog_aux_write_data(struct dp_catalog *dp_catalog)
148 {
149 	struct dp_catalog_private *catalog = container_of(dp_catalog,
150 				struct dp_catalog_private, dp_catalog);
151 
152 	dp_write_aux(catalog, REG_DP_AUX_DATA, dp_catalog->aux_data);
153 	return 0;
154 }
155 
156 int dp_catalog_aux_write_trans(struct dp_catalog *dp_catalog)
157 {
158 	struct dp_catalog_private *catalog = container_of(dp_catalog,
159 				struct dp_catalog_private, dp_catalog);
160 
161 	dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, dp_catalog->aux_data);
162 	return 0;
163 }
164 
165 int dp_catalog_aux_clear_trans(struct dp_catalog *dp_catalog, bool read)
166 {
167 	u32 data;
168 	struct dp_catalog_private *catalog = container_of(dp_catalog,
169 				struct dp_catalog_private, dp_catalog);
170 
171 	if (read) {
172 		data = dp_read_aux(catalog, REG_DP_AUX_TRANS_CTRL);
173 		data &= ~DP_AUX_TRANS_CTRL_GO;
174 		dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, data);
175 	} else {
176 		dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, 0);
177 	}
178 	return 0;
179 }
180 
181 int dp_catalog_aux_clear_hw_interrupts(struct dp_catalog *dp_catalog)
182 {
183 	struct dp_catalog_private *catalog = container_of(dp_catalog,
184 				struct dp_catalog_private, dp_catalog);
185 
186 	dp_read_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_STATUS);
187 	dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x1f);
188 	dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x9f);
189 	dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0);
190 	return 0;
191 }
192 
193 void dp_catalog_aux_reset(struct dp_catalog *dp_catalog)
194 {
195 	u32 aux_ctrl;
196 	struct dp_catalog_private *catalog = container_of(dp_catalog,
197 				struct dp_catalog_private, dp_catalog);
198 
199 	aux_ctrl = dp_read_aux(catalog, REG_DP_AUX_CTRL);
200 
201 	aux_ctrl |= DP_AUX_CTRL_RESET;
202 	dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
203 	usleep_range(1000, 1100); /* h/w recommended delay */
204 
205 	aux_ctrl &= ~DP_AUX_CTRL_RESET;
206 	dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
207 }
208 
209 void dp_catalog_aux_enable(struct dp_catalog *dp_catalog, bool enable)
210 {
211 	u32 aux_ctrl;
212 	struct dp_catalog_private *catalog = container_of(dp_catalog,
213 				struct dp_catalog_private, dp_catalog);
214 
215 	aux_ctrl = dp_read_aux(catalog, REG_DP_AUX_CTRL);
216 
217 	if (enable) {
218 		dp_write_aux(catalog, REG_DP_TIMEOUT_COUNT, 0xffff);
219 		dp_write_aux(catalog, REG_DP_AUX_LIMITS, 0xffff);
220 		aux_ctrl |= DP_AUX_CTRL_ENABLE;
221 	} else {
222 		aux_ctrl &= ~DP_AUX_CTRL_ENABLE;
223 	}
224 
225 	dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
226 }
227 
228 void dp_catalog_aux_update_cfg(struct dp_catalog *dp_catalog)
229 {
230 	struct dp_catalog_private *catalog = container_of(dp_catalog,
231 				struct dp_catalog_private, dp_catalog);
232 	struct dp_io *dp_io = catalog->io;
233 	struct phy *phy = dp_io->phy;
234 
235 	phy_calibrate(phy);
236 }
237 
238 static void dump_regs(void __iomem *base, int len)
239 {
240 	int i;
241 	u32 x0, x4, x8, xc;
242 	u32 addr_off = 0;
243 
244 	len = DIV_ROUND_UP(len, 16);
245 	for (i = 0; i < len; i++) {
246 		x0 = readl_relaxed(base + addr_off);
247 		x4 = readl_relaxed(base + addr_off + 0x04);
248 		x8 = readl_relaxed(base + addr_off + 0x08);
249 		xc = readl_relaxed(base + addr_off + 0x0c);
250 
251 		pr_info("%08x: %08x %08x %08x %08x", addr_off, x0, x4, x8, xc);
252 		addr_off += 16;
253 	}
254 }
255 
256 void dp_catalog_dump_regs(struct dp_catalog *dp_catalog)
257 {
258 	u32 offset, len;
259 	struct dp_catalog_private *catalog = container_of(dp_catalog,
260 		struct dp_catalog_private, dp_catalog);
261 
262 	pr_info("AHB regs\n");
263 	offset = MSM_DP_CONTROLLER_AHB_OFFSET;
264 	len = MSM_DP_CONTROLLER_AHB_SIZE;
265 	dump_regs(catalog->io->dp_controller.base + offset, len);
266 
267 	pr_info("AUXCLK regs\n");
268 	offset = MSM_DP_CONTROLLER_AUX_OFFSET;
269 	len = MSM_DP_CONTROLLER_AUX_SIZE;
270 	dump_regs(catalog->io->dp_controller.base + offset, len);
271 
272 	pr_info("LCLK regs\n");
273 	offset = MSM_DP_CONTROLLER_LINK_OFFSET;
274 	len = MSM_DP_CONTROLLER_LINK_SIZE;
275 	dump_regs(catalog->io->dp_controller.base + offset, len);
276 
277 	pr_info("P0CLK regs\n");
278 	offset = MSM_DP_CONTROLLER_P0_OFFSET;
279 	len = MSM_DP_CONTROLLER_P0_SIZE;
280 	dump_regs(catalog->io->dp_controller.base + offset, len);
281 }
282 
283 int dp_catalog_aux_get_irq(struct dp_catalog *dp_catalog)
284 {
285 	struct dp_catalog_private *catalog = container_of(dp_catalog,
286 				struct dp_catalog_private, dp_catalog);
287 	u32 intr, intr_ack;
288 
289 	intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS);
290 	intr &= ~DP_INTERRUPT_STATUS1_MASK;
291 	intr_ack = (intr & DP_INTERRUPT_STATUS1)
292 			<< DP_INTERRUPT_STATUS_ACK_SHIFT;
293 	dp_write_ahb(catalog, REG_DP_INTR_STATUS, intr_ack |
294 			DP_INTERRUPT_STATUS1_MASK);
295 
296 	return intr;
297 
298 }
299 
300 /* controller related catalog functions */
301 void dp_catalog_ctrl_update_transfer_unit(struct dp_catalog *dp_catalog,
302 				u32 dp_tu, u32 valid_boundary,
303 				u32 valid_boundary2)
304 {
305 	struct dp_catalog_private *catalog = container_of(dp_catalog,
306 				struct dp_catalog_private, dp_catalog);
307 
308 	dp_write_link(catalog, REG_DP_VALID_BOUNDARY, valid_boundary);
309 	dp_write_link(catalog, REG_DP_TU, dp_tu);
310 	dp_write_link(catalog, REG_DP_VALID_BOUNDARY_2, valid_boundary2);
311 }
312 
313 void dp_catalog_ctrl_state_ctrl(struct dp_catalog *dp_catalog, u32 state)
314 {
315 	struct dp_catalog_private *catalog = container_of(dp_catalog,
316 				struct dp_catalog_private, dp_catalog);
317 
318 	dp_write_link(catalog, REG_DP_STATE_CTRL, state);
319 }
320 
321 void dp_catalog_ctrl_config_ctrl(struct dp_catalog *dp_catalog, u32 cfg)
322 {
323 	struct dp_catalog_private *catalog = container_of(dp_catalog,
324 				struct dp_catalog_private, dp_catalog);
325 
326 	DRM_DEBUG_DP("DP_CONFIGURATION_CTRL=0x%x\n", cfg);
327 
328 	dp_write_link(catalog, REG_DP_CONFIGURATION_CTRL, cfg);
329 }
330 
331 void dp_catalog_ctrl_lane_mapping(struct dp_catalog *dp_catalog)
332 {
333 	struct dp_catalog_private *catalog = container_of(dp_catalog,
334 				struct dp_catalog_private, dp_catalog);
335 	u32 ln_0 = 0, ln_1 = 1, ln_2 = 2, ln_3 = 3; /* One-to-One mapping */
336 	u32 ln_mapping;
337 
338 	ln_mapping = ln_0 << LANE0_MAPPING_SHIFT;
339 	ln_mapping |= ln_1 << LANE1_MAPPING_SHIFT;
340 	ln_mapping |= ln_2 << LANE2_MAPPING_SHIFT;
341 	ln_mapping |= ln_3 << LANE3_MAPPING_SHIFT;
342 
343 	dp_write_link(catalog, REG_DP_LOGICAL2PHYSICAL_LANE_MAPPING,
344 			ln_mapping);
345 }
346 
347 void dp_catalog_ctrl_mainlink_ctrl(struct dp_catalog *dp_catalog,
348 						bool enable)
349 {
350 	u32 mainlink_ctrl;
351 	struct dp_catalog_private *catalog = container_of(dp_catalog,
352 				struct dp_catalog_private, dp_catalog);
353 
354 	if (enable) {
355 		/*
356 		 * To make sure link reg writes happens before other operation,
357 		 * dp_write_link() function uses writel()
358 		 */
359 		mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
360 
361 		mainlink_ctrl &= ~(DP_MAINLINK_CTRL_RESET |
362 						DP_MAINLINK_CTRL_ENABLE);
363 		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
364 
365 		mainlink_ctrl |= DP_MAINLINK_CTRL_RESET;
366 		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
367 
368 		mainlink_ctrl &= ~DP_MAINLINK_CTRL_RESET;
369 		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
370 
371 		mainlink_ctrl |= (DP_MAINLINK_CTRL_ENABLE |
372 					DP_MAINLINK_FB_BOUNDARY_SEL);
373 		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
374 	} else {
375 		mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
376 		mainlink_ctrl &= ~DP_MAINLINK_CTRL_ENABLE;
377 		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
378 	}
379 }
380 
381 void dp_catalog_ctrl_config_misc(struct dp_catalog *dp_catalog,
382 					u32 colorimetry_cfg,
383 					u32 test_bits_depth)
384 {
385 	u32 misc_val;
386 	struct dp_catalog_private *catalog = container_of(dp_catalog,
387 				struct dp_catalog_private, dp_catalog);
388 
389 	misc_val = dp_read_link(catalog, REG_DP_MISC1_MISC0);
390 
391 	/* clear bpp bits */
392 	misc_val &= ~(0x07 << DP_MISC0_TEST_BITS_DEPTH_SHIFT);
393 	misc_val |= colorimetry_cfg << DP_MISC0_COLORIMETRY_CFG_SHIFT;
394 	misc_val |= test_bits_depth << DP_MISC0_TEST_BITS_DEPTH_SHIFT;
395 	/* Configure clock to synchronous mode */
396 	misc_val |= DP_MISC0_SYNCHRONOUS_CLK;
397 
398 	DRM_DEBUG_DP("misc settings = 0x%x\n", misc_val);
399 	dp_write_link(catalog, REG_DP_MISC1_MISC0, misc_val);
400 }
401 
402 void dp_catalog_ctrl_config_msa(struct dp_catalog *dp_catalog,
403 					u32 rate, u32 stream_rate_khz,
404 					bool fixed_nvid)
405 {
406 	u32 pixel_m, pixel_n;
407 	u32 mvid, nvid, pixel_div = 0, dispcc_input_rate;
408 	u32 const nvid_fixed = DP_LINK_CONSTANT_N_VALUE;
409 	u32 const link_rate_hbr2 = 540000;
410 	u32 const link_rate_hbr3 = 810000;
411 	unsigned long den, num;
412 
413 	struct dp_catalog_private *catalog = container_of(dp_catalog,
414 				struct dp_catalog_private, dp_catalog);
415 
416 	if (rate == link_rate_hbr3)
417 		pixel_div = 6;
418 	else if (rate == 1620000 || rate == 270000)
419 		pixel_div = 2;
420 	else if (rate == link_rate_hbr2)
421 		pixel_div = 4;
422 	else
423 		DRM_ERROR("Invalid pixel mux divider\n");
424 
425 	dispcc_input_rate = (rate * 10) / pixel_div;
426 
427 	rational_best_approximation(dispcc_input_rate, stream_rate_khz,
428 			(unsigned long)(1 << 16) - 1,
429 			(unsigned long)(1 << 16) - 1, &den, &num);
430 
431 	den = ~(den - num);
432 	den = den & 0xFFFF;
433 	pixel_m = num;
434 	pixel_n = den;
435 
436 	mvid = (pixel_m & 0xFFFF) * 5;
437 	nvid = (0xFFFF & (~pixel_n)) + (pixel_m & 0xFFFF);
438 
439 	if (nvid < nvid_fixed) {
440 		u32 temp;
441 
442 		temp = (nvid_fixed / nvid) * nvid;
443 		mvid = (nvid_fixed / nvid) * mvid;
444 		nvid = temp;
445 	}
446 
447 	if (link_rate_hbr2 == rate)
448 		nvid *= 2;
449 
450 	if (link_rate_hbr3 == rate)
451 		nvid *= 3;
452 
453 	DRM_DEBUG_DP("mvid=0x%x, nvid=0x%x\n", mvid, nvid);
454 	dp_write_link(catalog, REG_DP_SOFTWARE_MVID, mvid);
455 	dp_write_link(catalog, REG_DP_SOFTWARE_NVID, nvid);
456 	dp_write_p0(catalog, MMSS_DP_DSC_DTO, 0x0);
457 }
458 
459 int dp_catalog_ctrl_set_pattern(struct dp_catalog *dp_catalog,
460 					u32 pattern)
461 {
462 	int bit, ret;
463 	u32 data;
464 	struct dp_catalog_private *catalog = container_of(dp_catalog,
465 				struct dp_catalog_private, dp_catalog);
466 
467 	bit = BIT(pattern - 1);
468 	DRM_DEBUG_DP("hw: bit=%d train=%d\n", bit, pattern);
469 	dp_catalog_ctrl_state_ctrl(dp_catalog, bit);
470 
471 	bit = BIT(pattern - 1) << DP_MAINLINK_READY_LINK_TRAINING_SHIFT;
472 
473 	/* Poll for mainlink ready status */
474 	ret = readx_poll_timeout(readl, catalog->io->dp_controller.base +
475 					MSM_DP_CONTROLLER_LINK_OFFSET +
476 					REG_DP_MAINLINK_READY,
477 					data, data & bit,
478 					POLLING_SLEEP_US, POLLING_TIMEOUT_US);
479 	if (ret < 0) {
480 		DRM_ERROR("set pattern for link_train=%d failed\n", pattern);
481 		return ret;
482 	}
483 	return 0;
484 }
485 
486 void dp_catalog_ctrl_reset(struct dp_catalog *dp_catalog)
487 {
488 	u32 sw_reset;
489 	struct dp_catalog_private *catalog = container_of(dp_catalog,
490 				struct dp_catalog_private, dp_catalog);
491 
492 	sw_reset = dp_read_ahb(catalog, REG_DP_SW_RESET);
493 
494 	sw_reset |= DP_SW_RESET;
495 	dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset);
496 	usleep_range(1000, 1100); /* h/w recommended delay */
497 
498 	sw_reset &= ~DP_SW_RESET;
499 	dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset);
500 }
501 
502 bool dp_catalog_ctrl_mainlink_ready(struct dp_catalog *dp_catalog)
503 {
504 	u32 data;
505 	int ret;
506 	struct dp_catalog_private *catalog = container_of(dp_catalog,
507 				struct dp_catalog_private, dp_catalog);
508 
509 	/* Poll for mainlink ready status */
510 	ret = readl_poll_timeout(catalog->io->dp_controller.base +
511 				MSM_DP_CONTROLLER_LINK_OFFSET +
512 				REG_DP_MAINLINK_READY,
513 				data, data & DP_MAINLINK_READY_FOR_VIDEO,
514 				POLLING_SLEEP_US, POLLING_TIMEOUT_US);
515 	if (ret < 0) {
516 		DRM_ERROR("mainlink not ready\n");
517 		return false;
518 	}
519 
520 	return true;
521 }
522 
523 void dp_catalog_ctrl_enable_irq(struct dp_catalog *dp_catalog,
524 						bool enable)
525 {
526 	struct dp_catalog_private *catalog = container_of(dp_catalog,
527 				struct dp_catalog_private, dp_catalog);
528 
529 	if (enable) {
530 		dp_write_ahb(catalog, REG_DP_INTR_STATUS,
531 				DP_INTERRUPT_STATUS1_MASK);
532 		dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
533 				DP_INTERRUPT_STATUS2_MASK);
534 	} else {
535 		dp_write_ahb(catalog, REG_DP_INTR_STATUS, 0x00);
536 		dp_write_ahb(catalog, REG_DP_INTR_STATUS2, 0x00);
537 	}
538 }
539 
540 void dp_catalog_hpd_config_intr(struct dp_catalog *dp_catalog,
541 			u32 intr_mask, bool en)
542 {
543 	struct dp_catalog_private *catalog = container_of(dp_catalog,
544 				struct dp_catalog_private, dp_catalog);
545 
546 	u32 config = dp_read_aux(catalog, REG_DP_DP_HPD_INT_MASK);
547 
548 	config = (en ? config | intr_mask : config & ~intr_mask);
549 
550 	dp_write_aux(catalog, REG_DP_DP_HPD_INT_MASK,
551 				config & DP_DP_HPD_INT_MASK);
552 }
553 
554 void dp_catalog_ctrl_hpd_config(struct dp_catalog *dp_catalog)
555 {
556 	struct dp_catalog_private *catalog = container_of(dp_catalog,
557 				struct dp_catalog_private, dp_catalog);
558 
559 	u32 reftimer = dp_read_aux(catalog, REG_DP_DP_HPD_REFTIMER);
560 
561 	/* enable HPD interrupts */
562 	dp_catalog_hpd_config_intr(dp_catalog,
563 		DP_DP_HPD_PLUG_INT_MASK | DP_DP_IRQ_HPD_INT_MASK
564 		| DP_DP_HPD_UNPLUG_INT_MASK | DP_DP_HPD_REPLUG_INT_MASK, true);
565 
566 	/* Configure REFTIMER and enable it */
567 	reftimer |= DP_DP_HPD_REFTIMER_ENABLE;
568 	dp_write_aux(catalog, REG_DP_DP_HPD_REFTIMER, reftimer);
569 
570 	/* Enable HPD */
571 	dp_write_aux(catalog, REG_DP_DP_HPD_CTRL, DP_DP_HPD_CTRL_HPD_EN);
572 }
573 
574 u32 dp_catalog_link_is_connected(struct dp_catalog *dp_catalog)
575 {
576 	struct dp_catalog_private *catalog = container_of(dp_catalog,
577 				struct dp_catalog_private, dp_catalog);
578 	u32 status;
579 
580 	status = dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS);
581 	status >>= DP_DP_HPD_STATE_STATUS_BITS_SHIFT;
582 	status &= DP_DP_HPD_STATE_STATUS_BITS_MASK;
583 
584 	return status;
585 }
586 
587 u32 dp_catalog_hpd_get_intr_status(struct dp_catalog *dp_catalog)
588 {
589 	struct dp_catalog_private *catalog = container_of(dp_catalog,
590 				struct dp_catalog_private, dp_catalog);
591 	int isr = 0;
592 
593 	isr = dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS);
594 	dp_write_aux(catalog, REG_DP_DP_HPD_INT_ACK,
595 				 (isr & DP_DP_HPD_INT_MASK));
596 
597 	return isr;
598 }
599 
600 int dp_catalog_ctrl_get_interrupt(struct dp_catalog *dp_catalog)
601 {
602 	struct dp_catalog_private *catalog = container_of(dp_catalog,
603 				struct dp_catalog_private, dp_catalog);
604 	u32 intr, intr_ack;
605 
606 	intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS2);
607 	intr &= ~DP_INTERRUPT_STATUS2_MASK;
608 	intr_ack = (intr & DP_INTERRUPT_STATUS2)
609 			<< DP_INTERRUPT_STATUS_ACK_SHIFT;
610 	dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
611 			intr_ack | DP_INTERRUPT_STATUS2_MASK);
612 
613 	return intr;
614 }
615 
616 void dp_catalog_ctrl_phy_reset(struct dp_catalog *dp_catalog)
617 {
618 	struct dp_catalog_private *catalog = container_of(dp_catalog,
619 				struct dp_catalog_private, dp_catalog);
620 
621 	dp_write_ahb(catalog, REG_DP_PHY_CTRL,
622 			DP_PHY_CTRL_SW_RESET | DP_PHY_CTRL_SW_RESET_PLL);
623 	usleep_range(1000, 1100); /* h/w recommended delay */
624 	dp_write_ahb(catalog, REG_DP_PHY_CTRL, 0x0);
625 }
626 
627 int dp_catalog_ctrl_update_vx_px(struct dp_catalog *dp_catalog,
628 		u8 v_level, u8 p_level)
629 {
630 	struct dp_catalog_private *catalog = container_of(dp_catalog,
631 				struct dp_catalog_private, dp_catalog);
632 	struct dp_io *dp_io = catalog->io;
633 	struct phy *phy = dp_io->phy;
634 	struct phy_configure_opts_dp *opts_dp = &dp_io->phy_opts.dp;
635 
636 	/* TODO: Update for all lanes instead of just first one */
637 	opts_dp->voltage[0] = v_level;
638 	opts_dp->pre[0] = p_level;
639 	opts_dp->set_voltages = 1;
640 	phy_configure(phy, &dp_io->phy_opts);
641 	opts_dp->set_voltages = 0;
642 
643 	return 0;
644 }
645 
646 void dp_catalog_ctrl_send_phy_pattern(struct dp_catalog *dp_catalog,
647 			u32 pattern)
648 {
649 	struct dp_catalog_private *catalog = container_of(dp_catalog,
650 				struct dp_catalog_private, dp_catalog);
651 	u32 value = 0x0;
652 
653 	/* Make sure to clear the current pattern before starting a new one */
654 	dp_write_link(catalog, REG_DP_STATE_CTRL, 0x0);
655 
656 	switch (pattern) {
657 	case DP_PHY_TEST_PATTERN_D10_2:
658 		dp_write_link(catalog, REG_DP_STATE_CTRL,
659 				DP_STATE_CTRL_LINK_TRAINING_PATTERN1);
660 		break;
661 	case DP_PHY_TEST_PATTERN_ERROR_COUNT:
662 		value &= ~(1 << 16);
663 		dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
664 					value);
665 		value |= SCRAMBLER_RESET_COUNT_VALUE;
666 		dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
667 					value);
668 		dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
669 					DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
670 		dp_write_link(catalog, REG_DP_STATE_CTRL,
671 					DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
672 		break;
673 	case DP_PHY_TEST_PATTERN_PRBS7:
674 		dp_write_link(catalog, REG_DP_STATE_CTRL,
675 				DP_STATE_CTRL_LINK_PRBS7);
676 		break;
677 	case DP_PHY_TEST_PATTERN_80BIT_CUSTOM:
678 		dp_write_link(catalog, REG_DP_STATE_CTRL,
679 				DP_STATE_CTRL_LINK_TEST_CUSTOM_PATTERN);
680 		/* 00111110000011111000001111100000 */
681 		dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG0,
682 				0x3E0F83E0);
683 		/* 00001111100000111110000011111000 */
684 		dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG1,
685 				0x0F83E0F8);
686 		/* 1111100000111110 */
687 		dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG2,
688 				0x0000F83E);
689 		break;
690 	case DP_PHY_TEST_PATTERN_CP2520:
691 		value = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
692 		value &= ~DP_MAINLINK_CTRL_SW_BYPASS_SCRAMBLER;
693 		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value);
694 
695 		value = DP_HBR2_ERM_PATTERN;
696 		dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
697 				value);
698 		value |= SCRAMBLER_RESET_COUNT_VALUE;
699 		dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
700 					value);
701 		dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
702 					DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
703 		dp_write_link(catalog, REG_DP_STATE_CTRL,
704 					DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
705 		value = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
706 		value |= DP_MAINLINK_CTRL_ENABLE;
707 		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value);
708 		break;
709 	case DP_PHY_TEST_PATTERN_SEL_MASK:
710 		dp_write_link(catalog, REG_DP_MAINLINK_CTRL,
711 				DP_MAINLINK_CTRL_ENABLE);
712 		dp_write_link(catalog, REG_DP_STATE_CTRL,
713 				DP_STATE_CTRL_LINK_TRAINING_PATTERN4);
714 		break;
715 	default:
716 		DRM_DEBUG_DP("No valid test pattern requested:0x%x\n", pattern);
717 		break;
718 	}
719 }
720 
721 u32 dp_catalog_ctrl_read_phy_pattern(struct dp_catalog *dp_catalog)
722 {
723 	struct dp_catalog_private *catalog = container_of(dp_catalog,
724 				struct dp_catalog_private, dp_catalog);
725 
726 	return dp_read_link(catalog, REG_DP_MAINLINK_READY);
727 }
728 
729 /* panel related catalog functions */
730 int dp_catalog_panel_timing_cfg(struct dp_catalog *dp_catalog)
731 {
732 	struct dp_catalog_private *catalog = container_of(dp_catalog,
733 				struct dp_catalog_private, dp_catalog);
734 
735 	dp_write_link(catalog, REG_DP_TOTAL_HOR_VER,
736 				dp_catalog->total);
737 	dp_write_link(catalog, REG_DP_START_HOR_VER_FROM_SYNC,
738 				dp_catalog->sync_start);
739 	dp_write_link(catalog, REG_DP_HSYNC_VSYNC_WIDTH_POLARITY,
740 				dp_catalog->width_blanking);
741 	dp_write_link(catalog, REG_DP_ACTIVE_HOR_VER, dp_catalog->dp_active);
742 	return 0;
743 }
744 
745 void dp_catalog_panel_tpg_enable(struct dp_catalog *dp_catalog,
746 				struct drm_display_mode *drm_mode)
747 {
748 	struct dp_catalog_private *catalog = container_of(dp_catalog,
749 				struct dp_catalog_private, dp_catalog);
750 	u32 hsync_period, vsync_period;
751 	u32 display_v_start, display_v_end;
752 	u32 hsync_start_x, hsync_end_x;
753 	u32 v_sync_width;
754 	u32 hsync_ctl;
755 	u32 display_hctl;
756 
757 	/* TPG config parameters*/
758 	hsync_period = drm_mode->htotal;
759 	vsync_period = drm_mode->vtotal;
760 
761 	display_v_start = ((drm_mode->vtotal - drm_mode->vsync_start) *
762 					hsync_period);
763 	display_v_end = ((vsync_period - (drm_mode->vsync_start -
764 					drm_mode->vdisplay))
765 					* hsync_period) - 1;
766 
767 	display_v_start += drm_mode->htotal - drm_mode->hsync_start;
768 	display_v_end -= (drm_mode->hsync_start - drm_mode->hdisplay);
769 
770 	hsync_start_x = drm_mode->htotal - drm_mode->hsync_start;
771 	hsync_end_x = hsync_period - (drm_mode->hsync_start -
772 					drm_mode->hdisplay) - 1;
773 
774 	v_sync_width = drm_mode->vsync_end - drm_mode->vsync_start;
775 
776 	hsync_ctl = (hsync_period << 16) |
777 			(drm_mode->hsync_end - drm_mode->hsync_start);
778 	display_hctl = (hsync_end_x << 16) | hsync_start_x;
779 
780 
781 	dp_write_p0(catalog, MMSS_DP_INTF_CONFIG, 0x0);
782 	dp_write_p0(catalog, MMSS_DP_INTF_HSYNC_CTL, hsync_ctl);
783 	dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F0, vsync_period *
784 			hsync_period);
785 	dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F0, v_sync_width *
786 			hsync_period);
787 	dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F1, 0);
788 	dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F1, 0);
789 	dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_HCTL, display_hctl);
790 	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_HCTL, 0);
791 	dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F0, display_v_start);
792 	dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F0, display_v_end);
793 	dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F1, 0);
794 	dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F1, 0);
795 	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F0, 0);
796 	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F0, 0);
797 	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F1, 0);
798 	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F1, 0);
799 	dp_write_p0(catalog, MMSS_DP_INTF_POLARITY_CTL, 0);
800 
801 	dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL,
802 				DP_TPG_CHECKERED_RECT_PATTERN);
803 	dp_write_p0(catalog, MMSS_DP_TPG_VIDEO_CONFIG,
804 				DP_TPG_VIDEO_CONFIG_BPP_8BIT |
805 				DP_TPG_VIDEO_CONFIG_RGB);
806 	dp_write_p0(catalog, MMSS_DP_BIST_ENABLE,
807 				DP_BIST_ENABLE_DPBIST_EN);
808 	dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN,
809 				DP_TIMING_ENGINE_EN_EN);
810 	DRM_DEBUG_DP("%s: enabled tpg\n", __func__);
811 }
812 
813 void dp_catalog_panel_tpg_disable(struct dp_catalog *dp_catalog)
814 {
815 	struct dp_catalog_private *catalog = container_of(dp_catalog,
816 				struct dp_catalog_private, dp_catalog);
817 
818 	dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL, 0x0);
819 	dp_write_p0(catalog, MMSS_DP_BIST_ENABLE, 0x0);
820 	dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN, 0x0);
821 }
822 
823 struct dp_catalog *dp_catalog_get(struct device *dev, struct dp_io *io)
824 {
825 	struct dp_catalog_private *catalog;
826 
827 	if (!io) {
828 		DRM_ERROR("invalid input\n");
829 		return ERR_PTR(-EINVAL);
830 	}
831 
832 	catalog  = devm_kzalloc(dev, sizeof(*catalog), GFP_KERNEL);
833 	if (!catalog)
834 		return ERR_PTR(-ENOMEM);
835 
836 	catalog->dev = dev;
837 	catalog->io = io;
838 
839 	return &catalog->dp_catalog;
840 }
841 
842 void dp_catalog_audio_get_header(struct dp_catalog *dp_catalog)
843 {
844 	struct dp_catalog_private *catalog;
845 	u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX];
846 	enum dp_catalog_audio_sdp_type sdp;
847 	enum dp_catalog_audio_header_type header;
848 
849 	if (!dp_catalog)
850 		return;
851 
852 	catalog = container_of(dp_catalog,
853 		struct dp_catalog_private, dp_catalog);
854 
855 	sdp_map = catalog->audio_map;
856 	sdp     = dp_catalog->sdp_type;
857 	header  = dp_catalog->sdp_header;
858 
859 	dp_catalog->audio_data = dp_read_link(catalog,
860 			sdp_map[sdp][header]);
861 }
862 
863 void dp_catalog_audio_set_header(struct dp_catalog *dp_catalog)
864 {
865 	struct dp_catalog_private *catalog;
866 	u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX];
867 	enum dp_catalog_audio_sdp_type sdp;
868 	enum dp_catalog_audio_header_type header;
869 	u32 data;
870 
871 	if (!dp_catalog)
872 		return;
873 
874 	catalog = container_of(dp_catalog,
875 		struct dp_catalog_private, dp_catalog);
876 
877 	sdp_map = catalog->audio_map;
878 	sdp     = dp_catalog->sdp_type;
879 	header  = dp_catalog->sdp_header;
880 	data    = dp_catalog->audio_data;
881 
882 	dp_write_link(catalog, sdp_map[sdp][header], data);
883 }
884 
885 void dp_catalog_audio_config_acr(struct dp_catalog *dp_catalog)
886 {
887 	struct dp_catalog_private *catalog;
888 	u32 acr_ctrl, select;
889 
890 	if (!dp_catalog)
891 		return;
892 
893 	catalog = container_of(dp_catalog,
894 		struct dp_catalog_private, dp_catalog);
895 
896 	select = dp_catalog->audio_data;
897 	acr_ctrl = select << 4 | BIT(31) | BIT(8) | BIT(14);
898 
899 	DRM_DEBUG_DP("select = 0x%x, acr_ctrl = 0x%x\n", select, acr_ctrl);
900 
901 	dp_write_link(catalog, MMSS_DP_AUDIO_ACR_CTRL, acr_ctrl);
902 }
903 
904 void dp_catalog_audio_enable(struct dp_catalog *dp_catalog)
905 {
906 	struct dp_catalog_private *catalog;
907 	bool enable;
908 	u32 audio_ctrl;
909 
910 	if (!dp_catalog)
911 		return;
912 
913 	catalog = container_of(dp_catalog,
914 		struct dp_catalog_private, dp_catalog);
915 
916 	enable = !!dp_catalog->audio_data;
917 	audio_ctrl = dp_read_link(catalog, MMSS_DP_AUDIO_CFG);
918 
919 	if (enable)
920 		audio_ctrl |= BIT(0);
921 	else
922 		audio_ctrl &= ~BIT(0);
923 
924 	DRM_DEBUG_DP("dp_audio_cfg = 0x%x\n", audio_ctrl);
925 
926 	dp_write_link(catalog, MMSS_DP_AUDIO_CFG, audio_ctrl);
927 	/* make sure audio engine is disabled */
928 	wmb();
929 }
930 
931 void dp_catalog_audio_config_sdp(struct dp_catalog *dp_catalog)
932 {
933 	struct dp_catalog_private *catalog;
934 	u32 sdp_cfg = 0;
935 	u32 sdp_cfg2 = 0;
936 
937 	if (!dp_catalog)
938 		return;
939 
940 	catalog = container_of(dp_catalog,
941 		struct dp_catalog_private, dp_catalog);
942 
943 	sdp_cfg = dp_read_link(catalog, MMSS_DP_SDP_CFG);
944 	/* AUDIO_TIMESTAMP_SDP_EN */
945 	sdp_cfg |= BIT(1);
946 	/* AUDIO_STREAM_SDP_EN */
947 	sdp_cfg |= BIT(2);
948 	/* AUDIO_COPY_MANAGEMENT_SDP_EN */
949 	sdp_cfg |= BIT(5);
950 	/* AUDIO_ISRC_SDP_EN  */
951 	sdp_cfg |= BIT(6);
952 	/* AUDIO_INFOFRAME_SDP_EN  */
953 	sdp_cfg |= BIT(20);
954 
955 	DRM_DEBUG_DP("sdp_cfg = 0x%x\n", sdp_cfg);
956 
957 	dp_write_link(catalog, MMSS_DP_SDP_CFG, sdp_cfg);
958 
959 	sdp_cfg2 = dp_read_link(catalog, MMSS_DP_SDP_CFG2);
960 	/* IFRM_REGSRC -> Do not use reg values */
961 	sdp_cfg2 &= ~BIT(0);
962 	/* AUDIO_STREAM_HB3_REGSRC-> Do not use reg values */
963 	sdp_cfg2 &= ~BIT(1);
964 
965 	DRM_DEBUG_DP("sdp_cfg2 = 0x%x\n", sdp_cfg2);
966 
967 	dp_write_link(catalog, MMSS_DP_SDP_CFG2, sdp_cfg2);
968 }
969 
970 void dp_catalog_audio_init(struct dp_catalog *dp_catalog)
971 {
972 	struct dp_catalog_private *catalog;
973 
974 	static u32 sdp_map[][DP_AUDIO_SDP_HEADER_MAX] = {
975 		{
976 			MMSS_DP_AUDIO_STREAM_0,
977 			MMSS_DP_AUDIO_STREAM_1,
978 			MMSS_DP_AUDIO_STREAM_1,
979 		},
980 		{
981 			MMSS_DP_AUDIO_TIMESTAMP_0,
982 			MMSS_DP_AUDIO_TIMESTAMP_1,
983 			MMSS_DP_AUDIO_TIMESTAMP_1,
984 		},
985 		{
986 			MMSS_DP_AUDIO_INFOFRAME_0,
987 			MMSS_DP_AUDIO_INFOFRAME_1,
988 			MMSS_DP_AUDIO_INFOFRAME_1,
989 		},
990 		{
991 			MMSS_DP_AUDIO_COPYMANAGEMENT_0,
992 			MMSS_DP_AUDIO_COPYMANAGEMENT_1,
993 			MMSS_DP_AUDIO_COPYMANAGEMENT_1,
994 		},
995 		{
996 			MMSS_DP_AUDIO_ISRC_0,
997 			MMSS_DP_AUDIO_ISRC_1,
998 			MMSS_DP_AUDIO_ISRC_1,
999 		},
1000 	};
1001 
1002 	if (!dp_catalog)
1003 		return;
1004 
1005 	catalog = container_of(dp_catalog,
1006 		struct dp_catalog_private, dp_catalog);
1007 
1008 	catalog->audio_map = sdp_map;
1009 }
1010 
1011 void dp_catalog_audio_sfe_level(struct dp_catalog *dp_catalog)
1012 {
1013 	struct dp_catalog_private *catalog;
1014 	u32 mainlink_levels, safe_to_exit_level;
1015 
1016 	if (!dp_catalog)
1017 		return;
1018 
1019 	catalog = container_of(dp_catalog,
1020 		struct dp_catalog_private, dp_catalog);
1021 
1022 	safe_to_exit_level = dp_catalog->audio_data;
1023 	mainlink_levels = dp_read_link(catalog, REG_DP_MAINLINK_LEVELS);
1024 	mainlink_levels &= 0xFE0;
1025 	mainlink_levels |= safe_to_exit_level;
1026 
1027 	DRM_DEBUG_DP("mainlink_level = 0x%x, safe_to_exit_level = 0x%x\n",
1028 			 mainlink_levels, safe_to_exit_level);
1029 
1030 	dp_write_link(catalog, REG_DP_MAINLINK_LEVELS, mainlink_levels);
1031 }
1032