xref: /openbmc/linux/drivers/gpu/drm/msm/dp/dp_catalog.c (revision 7ddb4cc2)
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved.
4  */
5 
6 #define pr_fmt(fmt)	"[drm-dp] %s: " fmt, __func__
7 
8 #include <linux/delay.h>
9 #include <linux/iopoll.h>
10 #include <linux/phy/phy.h>
11 #include <linux/phy/phy-dp.h>
12 #include <linux/rational.h>
13 #include <drm/drm_dp_helper.h>
14 #include <drm/drm_print.h>
15 
16 #include "dp_catalog.h"
17 #include "dp_reg.h"
18 
19 #define POLLING_SLEEP_US			1000
20 #define POLLING_TIMEOUT_US			10000
21 
22 #define SCRAMBLER_RESET_COUNT_VALUE		0xFC
23 
24 #define DP_INTERRUPT_STATUS_ACK_SHIFT	1
25 #define DP_INTERRUPT_STATUS_MASK_SHIFT	2
26 
27 #define MSM_DP_CONTROLLER_AHB_OFFSET	0x0000
28 #define MSM_DP_CONTROLLER_AHB_SIZE	0x0200
29 #define MSM_DP_CONTROLLER_AUX_OFFSET	0x0200
30 #define MSM_DP_CONTROLLER_AUX_SIZE	0x0200
31 #define MSM_DP_CONTROLLER_LINK_OFFSET	0x0400
32 #define MSM_DP_CONTROLLER_LINK_SIZE	0x0C00
33 #define MSM_DP_CONTROLLER_P0_OFFSET	0x1000
34 #define MSM_DP_CONTROLLER_P0_SIZE	0x0400
35 
36 #define DP_INTERRUPT_STATUS1 \
37 	(DP_INTR_AUX_I2C_DONE| \
38 	DP_INTR_WRONG_ADDR | DP_INTR_TIMEOUT | \
39 	DP_INTR_NACK_DEFER | DP_INTR_WRONG_DATA_CNT | \
40 	DP_INTR_I2C_NACK | DP_INTR_I2C_DEFER | \
41 	DP_INTR_PLL_UNLOCKED | DP_INTR_AUX_ERROR)
42 
43 #define DP_INTERRUPT_STATUS1_ACK \
44 	(DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_ACK_SHIFT)
45 #define DP_INTERRUPT_STATUS1_MASK \
46 	(DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_MASK_SHIFT)
47 
48 #define DP_INTERRUPT_STATUS2 \
49 	(DP_INTR_READY_FOR_VIDEO | DP_INTR_IDLE_PATTERN_SENT | \
50 	DP_INTR_FRAME_END | DP_INTR_CRC_UPDATED)
51 
52 #define DP_INTERRUPT_STATUS2_ACK \
53 	(DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_ACK_SHIFT)
54 #define DP_INTERRUPT_STATUS2_MASK \
55 	(DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_MASK_SHIFT)
56 
57 struct dp_catalog_private {
58 	struct device *dev;
59 	struct dp_io *io;
60 	u32 (*audio_map)[DP_AUDIO_SDP_HEADER_MAX];
61 	struct dp_catalog dp_catalog;
62 	u8 aux_lut_cfg_index[PHY_AUX_CFG_MAX];
63 };
64 
65 static inline u32 dp_read_aux(struct dp_catalog_private *catalog, u32 offset)
66 {
67 	offset += MSM_DP_CONTROLLER_AUX_OFFSET;
68 	return readl_relaxed(catalog->io->dp_controller.base + offset);
69 }
70 
71 static inline void dp_write_aux(struct dp_catalog_private *catalog,
72 			       u32 offset, u32 data)
73 {
74 	offset += MSM_DP_CONTROLLER_AUX_OFFSET;
75 	/*
76 	 * To make sure aux reg writes happens before any other operation,
77 	 * this function uses writel() instread of writel_relaxed()
78 	 */
79 	writel(data, catalog->io->dp_controller.base + offset);
80 }
81 
82 static inline u32 dp_read_ahb(struct dp_catalog_private *catalog, u32 offset)
83 {
84 	offset += MSM_DP_CONTROLLER_AHB_OFFSET;
85 	return readl_relaxed(catalog->io->dp_controller.base + offset);
86 }
87 
88 static inline void dp_write_ahb(struct dp_catalog_private *catalog,
89 			       u32 offset, u32 data)
90 {
91 	offset += MSM_DP_CONTROLLER_AHB_OFFSET;
92 	/*
93 	 * To make sure phy reg writes happens before any other operation,
94 	 * this function uses writel() instread of writel_relaxed()
95 	 */
96 	writel(data, catalog->io->dp_controller.base + offset);
97 }
98 
99 static inline void dp_write_p0(struct dp_catalog_private *catalog,
100 			       u32 offset, u32 data)
101 {
102 	offset += MSM_DP_CONTROLLER_P0_OFFSET;
103 	/*
104 	 * To make sure interface reg writes happens before any other operation,
105 	 * this function uses writel() instread of writel_relaxed()
106 	 */
107 	writel(data, catalog->io->dp_controller.base + offset);
108 }
109 
110 static inline u32 dp_read_p0(struct dp_catalog_private *catalog,
111 			       u32 offset)
112 {
113 	offset += MSM_DP_CONTROLLER_P0_OFFSET;
114 	/*
115 	 * To make sure interface reg writes happens before any other operation,
116 	 * this function uses writel() instread of writel_relaxed()
117 	 */
118 	return readl_relaxed(catalog->io->dp_controller.base + offset);
119 }
120 
121 static inline u32 dp_read_link(struct dp_catalog_private *catalog, u32 offset)
122 {
123 	offset += MSM_DP_CONTROLLER_LINK_OFFSET;
124 	return readl_relaxed(catalog->io->dp_controller.base + offset);
125 }
126 
127 static inline void dp_write_link(struct dp_catalog_private *catalog,
128 			       u32 offset, u32 data)
129 {
130 	offset += MSM_DP_CONTROLLER_LINK_OFFSET;
131 	/*
132 	 * To make sure link reg writes happens before any other operation,
133 	 * this function uses writel() instread of writel_relaxed()
134 	 */
135 	writel(data, catalog->io->dp_controller.base + offset);
136 }
137 
138 /* aux related catalog functions */
139 u32 dp_catalog_aux_read_data(struct dp_catalog *dp_catalog)
140 {
141 	struct dp_catalog_private *catalog = container_of(dp_catalog,
142 				struct dp_catalog_private, dp_catalog);
143 
144 	return dp_read_aux(catalog, REG_DP_AUX_DATA);
145 }
146 
147 int dp_catalog_aux_write_data(struct dp_catalog *dp_catalog)
148 {
149 	struct dp_catalog_private *catalog = container_of(dp_catalog,
150 				struct dp_catalog_private, dp_catalog);
151 
152 	dp_write_aux(catalog, REG_DP_AUX_DATA, dp_catalog->aux_data);
153 	return 0;
154 }
155 
156 int dp_catalog_aux_write_trans(struct dp_catalog *dp_catalog)
157 {
158 	struct dp_catalog_private *catalog = container_of(dp_catalog,
159 				struct dp_catalog_private, dp_catalog);
160 
161 	dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, dp_catalog->aux_data);
162 	return 0;
163 }
164 
165 int dp_catalog_aux_clear_trans(struct dp_catalog *dp_catalog, bool read)
166 {
167 	u32 data;
168 	struct dp_catalog_private *catalog = container_of(dp_catalog,
169 				struct dp_catalog_private, dp_catalog);
170 
171 	if (read) {
172 		data = dp_read_aux(catalog, REG_DP_AUX_TRANS_CTRL);
173 		data &= ~DP_AUX_TRANS_CTRL_GO;
174 		dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, data);
175 	} else {
176 		dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, 0);
177 	}
178 	return 0;
179 }
180 
181 int dp_catalog_aux_clear_hw_interrupts(struct dp_catalog *dp_catalog)
182 {
183 	struct dp_catalog_private *catalog = container_of(dp_catalog,
184 				struct dp_catalog_private, dp_catalog);
185 
186 	dp_read_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_STATUS);
187 	dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x1f);
188 	dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x9f);
189 	dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0);
190 	return 0;
191 }
192 
193 /**
194  * dp_catalog_aux_reset() - reset AUX controller
195  *
196  * @aux: DP catalog structure
197  *
198  * return: void
199  *
200  * This function reset AUX controller
201  *
202  * NOTE: reset AUX controller will also clear any pending HPD related interrupts
203  *
204  */
205 void dp_catalog_aux_reset(struct dp_catalog *dp_catalog)
206 {
207 	u32 aux_ctrl;
208 	struct dp_catalog_private *catalog = container_of(dp_catalog,
209 				struct dp_catalog_private, dp_catalog);
210 
211 	aux_ctrl = dp_read_aux(catalog, REG_DP_AUX_CTRL);
212 
213 	aux_ctrl |= DP_AUX_CTRL_RESET;
214 	dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
215 	usleep_range(1000, 1100); /* h/w recommended delay */
216 
217 	aux_ctrl &= ~DP_AUX_CTRL_RESET;
218 	dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
219 }
220 
221 void dp_catalog_aux_enable(struct dp_catalog *dp_catalog, bool enable)
222 {
223 	u32 aux_ctrl;
224 	struct dp_catalog_private *catalog = container_of(dp_catalog,
225 				struct dp_catalog_private, dp_catalog);
226 
227 	aux_ctrl = dp_read_aux(catalog, REG_DP_AUX_CTRL);
228 
229 	if (enable) {
230 		dp_write_aux(catalog, REG_DP_TIMEOUT_COUNT, 0xffff);
231 		dp_write_aux(catalog, REG_DP_AUX_LIMITS, 0xffff);
232 		aux_ctrl |= DP_AUX_CTRL_ENABLE;
233 	} else {
234 		aux_ctrl &= ~DP_AUX_CTRL_ENABLE;
235 	}
236 
237 	dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
238 }
239 
240 void dp_catalog_aux_update_cfg(struct dp_catalog *dp_catalog)
241 {
242 	struct dp_catalog_private *catalog = container_of(dp_catalog,
243 				struct dp_catalog_private, dp_catalog);
244 	struct dp_io *dp_io = catalog->io;
245 	struct phy *phy = dp_io->phy;
246 
247 	phy_calibrate(phy);
248 }
249 
250 static void dump_regs(void __iomem *base, int len)
251 {
252 	int i;
253 	u32 x0, x4, x8, xc;
254 	u32 addr_off = 0;
255 
256 	len = DIV_ROUND_UP(len, 16);
257 	for (i = 0; i < len; i++) {
258 		x0 = readl_relaxed(base + addr_off);
259 		x4 = readl_relaxed(base + addr_off + 0x04);
260 		x8 = readl_relaxed(base + addr_off + 0x08);
261 		xc = readl_relaxed(base + addr_off + 0x0c);
262 
263 		pr_info("%08x: %08x %08x %08x %08x", addr_off, x0, x4, x8, xc);
264 		addr_off += 16;
265 	}
266 }
267 
268 void dp_catalog_dump_regs(struct dp_catalog *dp_catalog)
269 {
270 	u32 offset, len;
271 	struct dp_catalog_private *catalog = container_of(dp_catalog,
272 		struct dp_catalog_private, dp_catalog);
273 
274 	pr_info("AHB regs\n");
275 	offset = MSM_DP_CONTROLLER_AHB_OFFSET;
276 	len = MSM_DP_CONTROLLER_AHB_SIZE;
277 	dump_regs(catalog->io->dp_controller.base + offset, len);
278 
279 	pr_info("AUXCLK regs\n");
280 	offset = MSM_DP_CONTROLLER_AUX_OFFSET;
281 	len = MSM_DP_CONTROLLER_AUX_SIZE;
282 	dump_regs(catalog->io->dp_controller.base + offset, len);
283 
284 	pr_info("LCLK regs\n");
285 	offset = MSM_DP_CONTROLLER_LINK_OFFSET;
286 	len = MSM_DP_CONTROLLER_LINK_SIZE;
287 	dump_regs(catalog->io->dp_controller.base + offset, len);
288 
289 	pr_info("P0CLK regs\n");
290 	offset = MSM_DP_CONTROLLER_P0_OFFSET;
291 	len = MSM_DP_CONTROLLER_P0_SIZE;
292 	dump_regs(catalog->io->dp_controller.base + offset, len);
293 }
294 
295 int dp_catalog_aux_get_irq(struct dp_catalog *dp_catalog)
296 {
297 	struct dp_catalog_private *catalog = container_of(dp_catalog,
298 				struct dp_catalog_private, dp_catalog);
299 	u32 intr, intr_ack;
300 
301 	intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS);
302 	intr &= ~DP_INTERRUPT_STATUS1_MASK;
303 	intr_ack = (intr & DP_INTERRUPT_STATUS1)
304 			<< DP_INTERRUPT_STATUS_ACK_SHIFT;
305 	dp_write_ahb(catalog, REG_DP_INTR_STATUS, intr_ack |
306 			DP_INTERRUPT_STATUS1_MASK);
307 
308 	return intr;
309 
310 }
311 
312 /* controller related catalog functions */
313 void dp_catalog_ctrl_update_transfer_unit(struct dp_catalog *dp_catalog,
314 				u32 dp_tu, u32 valid_boundary,
315 				u32 valid_boundary2)
316 {
317 	struct dp_catalog_private *catalog = container_of(dp_catalog,
318 				struct dp_catalog_private, dp_catalog);
319 
320 	dp_write_link(catalog, REG_DP_VALID_BOUNDARY, valid_boundary);
321 	dp_write_link(catalog, REG_DP_TU, dp_tu);
322 	dp_write_link(catalog, REG_DP_VALID_BOUNDARY_2, valid_boundary2);
323 }
324 
325 void dp_catalog_ctrl_state_ctrl(struct dp_catalog *dp_catalog, u32 state)
326 {
327 	struct dp_catalog_private *catalog = container_of(dp_catalog,
328 				struct dp_catalog_private, dp_catalog);
329 
330 	dp_write_link(catalog, REG_DP_STATE_CTRL, state);
331 }
332 
333 void dp_catalog_ctrl_config_ctrl(struct dp_catalog *dp_catalog, u32 cfg)
334 {
335 	struct dp_catalog_private *catalog = container_of(dp_catalog,
336 				struct dp_catalog_private, dp_catalog);
337 
338 	DRM_DEBUG_DP("DP_CONFIGURATION_CTRL=0x%x\n", cfg);
339 
340 	dp_write_link(catalog, REG_DP_CONFIGURATION_CTRL, cfg);
341 }
342 
343 void dp_catalog_ctrl_lane_mapping(struct dp_catalog *dp_catalog)
344 {
345 	struct dp_catalog_private *catalog = container_of(dp_catalog,
346 				struct dp_catalog_private, dp_catalog);
347 	u32 ln_0 = 0, ln_1 = 1, ln_2 = 2, ln_3 = 3; /* One-to-One mapping */
348 	u32 ln_mapping;
349 
350 	ln_mapping = ln_0 << LANE0_MAPPING_SHIFT;
351 	ln_mapping |= ln_1 << LANE1_MAPPING_SHIFT;
352 	ln_mapping |= ln_2 << LANE2_MAPPING_SHIFT;
353 	ln_mapping |= ln_3 << LANE3_MAPPING_SHIFT;
354 
355 	dp_write_link(catalog, REG_DP_LOGICAL2PHYSICAL_LANE_MAPPING,
356 			ln_mapping);
357 }
358 
359 void dp_catalog_ctrl_mainlink_ctrl(struct dp_catalog *dp_catalog,
360 						bool enable)
361 {
362 	u32 mainlink_ctrl;
363 	struct dp_catalog_private *catalog = container_of(dp_catalog,
364 				struct dp_catalog_private, dp_catalog);
365 
366 	if (enable) {
367 		/*
368 		 * To make sure link reg writes happens before other operation,
369 		 * dp_write_link() function uses writel()
370 		 */
371 		mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
372 
373 		mainlink_ctrl &= ~(DP_MAINLINK_CTRL_RESET |
374 						DP_MAINLINK_CTRL_ENABLE);
375 		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
376 
377 		mainlink_ctrl |= DP_MAINLINK_CTRL_RESET;
378 		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
379 
380 		mainlink_ctrl &= ~DP_MAINLINK_CTRL_RESET;
381 		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
382 
383 		mainlink_ctrl |= (DP_MAINLINK_CTRL_ENABLE |
384 					DP_MAINLINK_FB_BOUNDARY_SEL);
385 		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
386 	} else {
387 		mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
388 		mainlink_ctrl &= ~DP_MAINLINK_CTRL_ENABLE;
389 		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
390 	}
391 }
392 
393 void dp_catalog_ctrl_config_misc(struct dp_catalog *dp_catalog,
394 					u32 colorimetry_cfg,
395 					u32 test_bits_depth)
396 {
397 	u32 misc_val;
398 	struct dp_catalog_private *catalog = container_of(dp_catalog,
399 				struct dp_catalog_private, dp_catalog);
400 
401 	misc_val = dp_read_link(catalog, REG_DP_MISC1_MISC0);
402 
403 	/* clear bpp bits */
404 	misc_val &= ~(0x07 << DP_MISC0_TEST_BITS_DEPTH_SHIFT);
405 	misc_val |= colorimetry_cfg << DP_MISC0_COLORIMETRY_CFG_SHIFT;
406 	misc_val |= test_bits_depth << DP_MISC0_TEST_BITS_DEPTH_SHIFT;
407 	/* Configure clock to synchronous mode */
408 	misc_val |= DP_MISC0_SYNCHRONOUS_CLK;
409 
410 	DRM_DEBUG_DP("misc settings = 0x%x\n", misc_val);
411 	dp_write_link(catalog, REG_DP_MISC1_MISC0, misc_val);
412 }
413 
414 void dp_catalog_ctrl_config_msa(struct dp_catalog *dp_catalog,
415 					u32 rate, u32 stream_rate_khz,
416 					bool fixed_nvid)
417 {
418 	u32 pixel_m, pixel_n;
419 	u32 mvid, nvid, pixel_div = 0, dispcc_input_rate;
420 	u32 const nvid_fixed = DP_LINK_CONSTANT_N_VALUE;
421 	u32 const link_rate_hbr2 = 540000;
422 	u32 const link_rate_hbr3 = 810000;
423 	unsigned long den, num;
424 
425 	struct dp_catalog_private *catalog = container_of(dp_catalog,
426 				struct dp_catalog_private, dp_catalog);
427 
428 	if (rate == link_rate_hbr3)
429 		pixel_div = 6;
430 	else if (rate == 1620000 || rate == 270000)
431 		pixel_div = 2;
432 	else if (rate == link_rate_hbr2)
433 		pixel_div = 4;
434 	else
435 		DRM_ERROR("Invalid pixel mux divider\n");
436 
437 	dispcc_input_rate = (rate * 10) / pixel_div;
438 
439 	rational_best_approximation(dispcc_input_rate, stream_rate_khz,
440 			(unsigned long)(1 << 16) - 1,
441 			(unsigned long)(1 << 16) - 1, &den, &num);
442 
443 	den = ~(den - num);
444 	den = den & 0xFFFF;
445 	pixel_m = num;
446 	pixel_n = den;
447 
448 	mvid = (pixel_m & 0xFFFF) * 5;
449 	nvid = (0xFFFF & (~pixel_n)) + (pixel_m & 0xFFFF);
450 
451 	if (nvid < nvid_fixed) {
452 		u32 temp;
453 
454 		temp = (nvid_fixed / nvid) * nvid;
455 		mvid = (nvid_fixed / nvid) * mvid;
456 		nvid = temp;
457 	}
458 
459 	if (link_rate_hbr2 == rate)
460 		nvid *= 2;
461 
462 	if (link_rate_hbr3 == rate)
463 		nvid *= 3;
464 
465 	DRM_DEBUG_DP("mvid=0x%x, nvid=0x%x\n", mvid, nvid);
466 	dp_write_link(catalog, REG_DP_SOFTWARE_MVID, mvid);
467 	dp_write_link(catalog, REG_DP_SOFTWARE_NVID, nvid);
468 	dp_write_p0(catalog, MMSS_DP_DSC_DTO, 0x0);
469 }
470 
471 int dp_catalog_ctrl_set_pattern(struct dp_catalog *dp_catalog,
472 					u32 pattern)
473 {
474 	int bit, ret;
475 	u32 data;
476 	struct dp_catalog_private *catalog = container_of(dp_catalog,
477 				struct dp_catalog_private, dp_catalog);
478 
479 	bit = BIT(pattern - 1);
480 	DRM_DEBUG_DP("hw: bit=%d train=%d\n", bit, pattern);
481 	dp_catalog_ctrl_state_ctrl(dp_catalog, bit);
482 
483 	bit = BIT(pattern - 1) << DP_MAINLINK_READY_LINK_TRAINING_SHIFT;
484 
485 	/* Poll for mainlink ready status */
486 	ret = readx_poll_timeout(readl, catalog->io->dp_controller.base +
487 					MSM_DP_CONTROLLER_LINK_OFFSET +
488 					REG_DP_MAINLINK_READY,
489 					data, data & bit,
490 					POLLING_SLEEP_US, POLLING_TIMEOUT_US);
491 	if (ret < 0) {
492 		DRM_ERROR("set pattern for link_train=%d failed\n", pattern);
493 		return ret;
494 	}
495 	return 0;
496 }
497 
498 /**
499  * dp_catalog_ctrl_reset() - reset DP controller
500  *
501  * @dp_catalog: DP catalog structure
502  *
503  * return: void
504  *
505  * This function reset the DP controller
506  *
507  * NOTE: reset DP controller will also clear any pending HPD related interrupts
508  *
509  */
510 void dp_catalog_ctrl_reset(struct dp_catalog *dp_catalog)
511 {
512 	u32 sw_reset;
513 	struct dp_catalog_private *catalog = container_of(dp_catalog,
514 				struct dp_catalog_private, dp_catalog);
515 
516 	sw_reset = dp_read_ahb(catalog, REG_DP_SW_RESET);
517 
518 	sw_reset |= DP_SW_RESET;
519 	dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset);
520 	usleep_range(1000, 1100); /* h/w recommended delay */
521 
522 	sw_reset &= ~DP_SW_RESET;
523 	dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset);
524 }
525 
526 bool dp_catalog_ctrl_mainlink_ready(struct dp_catalog *dp_catalog)
527 {
528 	u32 data;
529 	int ret;
530 	struct dp_catalog_private *catalog = container_of(dp_catalog,
531 				struct dp_catalog_private, dp_catalog);
532 
533 	/* Poll for mainlink ready status */
534 	ret = readl_poll_timeout(catalog->io->dp_controller.base +
535 				MSM_DP_CONTROLLER_LINK_OFFSET +
536 				REG_DP_MAINLINK_READY,
537 				data, data & DP_MAINLINK_READY_FOR_VIDEO,
538 				POLLING_SLEEP_US, POLLING_TIMEOUT_US);
539 	if (ret < 0) {
540 		DRM_ERROR("mainlink not ready\n");
541 		return false;
542 	}
543 
544 	return true;
545 }
546 
547 void dp_catalog_ctrl_enable_irq(struct dp_catalog *dp_catalog,
548 						bool enable)
549 {
550 	struct dp_catalog_private *catalog = container_of(dp_catalog,
551 				struct dp_catalog_private, dp_catalog);
552 
553 	if (enable) {
554 		dp_write_ahb(catalog, REG_DP_INTR_STATUS,
555 				DP_INTERRUPT_STATUS1_MASK);
556 		dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
557 				DP_INTERRUPT_STATUS2_MASK);
558 	} else {
559 		dp_write_ahb(catalog, REG_DP_INTR_STATUS, 0x00);
560 		dp_write_ahb(catalog, REG_DP_INTR_STATUS2, 0x00);
561 	}
562 }
563 
564 void dp_catalog_hpd_config_intr(struct dp_catalog *dp_catalog,
565 			u32 intr_mask, bool en)
566 {
567 	struct dp_catalog_private *catalog = container_of(dp_catalog,
568 				struct dp_catalog_private, dp_catalog);
569 
570 	u32 config = dp_read_aux(catalog, REG_DP_DP_HPD_INT_MASK);
571 
572 	config = (en ? config | intr_mask : config & ~intr_mask);
573 
574 	dp_write_aux(catalog, REG_DP_DP_HPD_INT_MASK,
575 				config & DP_DP_HPD_INT_MASK);
576 }
577 
578 void dp_catalog_ctrl_hpd_config(struct dp_catalog *dp_catalog)
579 {
580 	struct dp_catalog_private *catalog = container_of(dp_catalog,
581 				struct dp_catalog_private, dp_catalog);
582 
583 	u32 reftimer = dp_read_aux(catalog, REG_DP_DP_HPD_REFTIMER);
584 
585 	/* enable HPD interrupts */
586 	dp_catalog_hpd_config_intr(dp_catalog,
587 		DP_DP_HPD_PLUG_INT_MASK | DP_DP_IRQ_HPD_INT_MASK
588 		| DP_DP_HPD_UNPLUG_INT_MASK | DP_DP_HPD_REPLUG_INT_MASK, true);
589 
590 	/* Configure REFTIMER and enable it */
591 	reftimer |= DP_DP_HPD_REFTIMER_ENABLE;
592 	dp_write_aux(catalog, REG_DP_DP_HPD_REFTIMER, reftimer);
593 
594 	/* Enable HPD */
595 	dp_write_aux(catalog, REG_DP_DP_HPD_CTRL, DP_DP_HPD_CTRL_HPD_EN);
596 }
597 
598 u32 dp_catalog_link_is_connected(struct dp_catalog *dp_catalog)
599 {
600 	struct dp_catalog_private *catalog = container_of(dp_catalog,
601 				struct dp_catalog_private, dp_catalog);
602 	u32 status;
603 
604 	status = dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS);
605 	status >>= DP_DP_HPD_STATE_STATUS_BITS_SHIFT;
606 	status &= DP_DP_HPD_STATE_STATUS_BITS_MASK;
607 
608 	return status;
609 }
610 
611 u32 dp_catalog_hpd_get_intr_status(struct dp_catalog *dp_catalog)
612 {
613 	struct dp_catalog_private *catalog = container_of(dp_catalog,
614 				struct dp_catalog_private, dp_catalog);
615 	int isr = 0;
616 
617 	isr = dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS);
618 	dp_write_aux(catalog, REG_DP_DP_HPD_INT_ACK,
619 				 (isr & DP_DP_HPD_INT_MASK));
620 
621 	return isr;
622 }
623 
624 int dp_catalog_ctrl_get_interrupt(struct dp_catalog *dp_catalog)
625 {
626 	struct dp_catalog_private *catalog = container_of(dp_catalog,
627 				struct dp_catalog_private, dp_catalog);
628 	u32 intr, intr_ack;
629 
630 	intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS2);
631 	intr &= ~DP_INTERRUPT_STATUS2_MASK;
632 	intr_ack = (intr & DP_INTERRUPT_STATUS2)
633 			<< DP_INTERRUPT_STATUS_ACK_SHIFT;
634 	dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
635 			intr_ack | DP_INTERRUPT_STATUS2_MASK);
636 
637 	return intr;
638 }
639 
640 void dp_catalog_ctrl_phy_reset(struct dp_catalog *dp_catalog)
641 {
642 	struct dp_catalog_private *catalog = container_of(dp_catalog,
643 				struct dp_catalog_private, dp_catalog);
644 
645 	dp_write_ahb(catalog, REG_DP_PHY_CTRL,
646 			DP_PHY_CTRL_SW_RESET | DP_PHY_CTRL_SW_RESET_PLL);
647 	usleep_range(1000, 1100); /* h/w recommended delay */
648 	dp_write_ahb(catalog, REG_DP_PHY_CTRL, 0x0);
649 }
650 
651 int dp_catalog_ctrl_update_vx_px(struct dp_catalog *dp_catalog,
652 		u8 v_level, u8 p_level)
653 {
654 	struct dp_catalog_private *catalog = container_of(dp_catalog,
655 				struct dp_catalog_private, dp_catalog);
656 	struct dp_io *dp_io = catalog->io;
657 	struct phy *phy = dp_io->phy;
658 	struct phy_configure_opts_dp *opts_dp = &dp_io->phy_opts.dp;
659 
660 	/* TODO: Update for all lanes instead of just first one */
661 	opts_dp->voltage[0] = v_level;
662 	opts_dp->pre[0] = p_level;
663 	opts_dp->set_voltages = 1;
664 	phy_configure(phy, &dp_io->phy_opts);
665 	opts_dp->set_voltages = 0;
666 
667 	return 0;
668 }
669 
670 void dp_catalog_ctrl_send_phy_pattern(struct dp_catalog *dp_catalog,
671 			u32 pattern)
672 {
673 	struct dp_catalog_private *catalog = container_of(dp_catalog,
674 				struct dp_catalog_private, dp_catalog);
675 	u32 value = 0x0;
676 
677 	/* Make sure to clear the current pattern before starting a new one */
678 	dp_write_link(catalog, REG_DP_STATE_CTRL, 0x0);
679 
680 	switch (pattern) {
681 	case DP_PHY_TEST_PATTERN_D10_2:
682 		dp_write_link(catalog, REG_DP_STATE_CTRL,
683 				DP_STATE_CTRL_LINK_TRAINING_PATTERN1);
684 		break;
685 	case DP_PHY_TEST_PATTERN_ERROR_COUNT:
686 		value &= ~(1 << 16);
687 		dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
688 					value);
689 		value |= SCRAMBLER_RESET_COUNT_VALUE;
690 		dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
691 					value);
692 		dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
693 					DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
694 		dp_write_link(catalog, REG_DP_STATE_CTRL,
695 					DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
696 		break;
697 	case DP_PHY_TEST_PATTERN_PRBS7:
698 		dp_write_link(catalog, REG_DP_STATE_CTRL,
699 				DP_STATE_CTRL_LINK_PRBS7);
700 		break;
701 	case DP_PHY_TEST_PATTERN_80BIT_CUSTOM:
702 		dp_write_link(catalog, REG_DP_STATE_CTRL,
703 				DP_STATE_CTRL_LINK_TEST_CUSTOM_PATTERN);
704 		/* 00111110000011111000001111100000 */
705 		dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG0,
706 				0x3E0F83E0);
707 		/* 00001111100000111110000011111000 */
708 		dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG1,
709 				0x0F83E0F8);
710 		/* 1111100000111110 */
711 		dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG2,
712 				0x0000F83E);
713 		break;
714 	case DP_PHY_TEST_PATTERN_CP2520:
715 		value = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
716 		value &= ~DP_MAINLINK_CTRL_SW_BYPASS_SCRAMBLER;
717 		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value);
718 
719 		value = DP_HBR2_ERM_PATTERN;
720 		dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
721 				value);
722 		value |= SCRAMBLER_RESET_COUNT_VALUE;
723 		dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
724 					value);
725 		dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
726 					DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
727 		dp_write_link(catalog, REG_DP_STATE_CTRL,
728 					DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
729 		value = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
730 		value |= DP_MAINLINK_CTRL_ENABLE;
731 		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value);
732 		break;
733 	case DP_PHY_TEST_PATTERN_SEL_MASK:
734 		dp_write_link(catalog, REG_DP_MAINLINK_CTRL,
735 				DP_MAINLINK_CTRL_ENABLE);
736 		dp_write_link(catalog, REG_DP_STATE_CTRL,
737 				DP_STATE_CTRL_LINK_TRAINING_PATTERN4);
738 		break;
739 	default:
740 		DRM_DEBUG_DP("No valid test pattern requested:0x%x\n", pattern);
741 		break;
742 	}
743 }
744 
745 u32 dp_catalog_ctrl_read_phy_pattern(struct dp_catalog *dp_catalog)
746 {
747 	struct dp_catalog_private *catalog = container_of(dp_catalog,
748 				struct dp_catalog_private, dp_catalog);
749 
750 	return dp_read_link(catalog, REG_DP_MAINLINK_READY);
751 }
752 
753 /* panel related catalog functions */
754 int dp_catalog_panel_timing_cfg(struct dp_catalog *dp_catalog)
755 {
756 	struct dp_catalog_private *catalog = container_of(dp_catalog,
757 				struct dp_catalog_private, dp_catalog);
758 
759 	dp_write_link(catalog, REG_DP_TOTAL_HOR_VER,
760 				dp_catalog->total);
761 	dp_write_link(catalog, REG_DP_START_HOR_VER_FROM_SYNC,
762 				dp_catalog->sync_start);
763 	dp_write_link(catalog, REG_DP_HSYNC_VSYNC_WIDTH_POLARITY,
764 				dp_catalog->width_blanking);
765 	dp_write_link(catalog, REG_DP_ACTIVE_HOR_VER, dp_catalog->dp_active);
766 	return 0;
767 }
768 
769 void dp_catalog_panel_tpg_enable(struct dp_catalog *dp_catalog,
770 				struct drm_display_mode *drm_mode)
771 {
772 	struct dp_catalog_private *catalog = container_of(dp_catalog,
773 				struct dp_catalog_private, dp_catalog);
774 	u32 hsync_period, vsync_period;
775 	u32 display_v_start, display_v_end;
776 	u32 hsync_start_x, hsync_end_x;
777 	u32 v_sync_width;
778 	u32 hsync_ctl;
779 	u32 display_hctl;
780 
781 	/* TPG config parameters*/
782 	hsync_period = drm_mode->htotal;
783 	vsync_period = drm_mode->vtotal;
784 
785 	display_v_start = ((drm_mode->vtotal - drm_mode->vsync_start) *
786 					hsync_period);
787 	display_v_end = ((vsync_period - (drm_mode->vsync_start -
788 					drm_mode->vdisplay))
789 					* hsync_period) - 1;
790 
791 	display_v_start += drm_mode->htotal - drm_mode->hsync_start;
792 	display_v_end -= (drm_mode->hsync_start - drm_mode->hdisplay);
793 
794 	hsync_start_x = drm_mode->htotal - drm_mode->hsync_start;
795 	hsync_end_x = hsync_period - (drm_mode->hsync_start -
796 					drm_mode->hdisplay) - 1;
797 
798 	v_sync_width = drm_mode->vsync_end - drm_mode->vsync_start;
799 
800 	hsync_ctl = (hsync_period << 16) |
801 			(drm_mode->hsync_end - drm_mode->hsync_start);
802 	display_hctl = (hsync_end_x << 16) | hsync_start_x;
803 
804 
805 	dp_write_p0(catalog, MMSS_DP_INTF_CONFIG, 0x0);
806 	dp_write_p0(catalog, MMSS_DP_INTF_HSYNC_CTL, hsync_ctl);
807 	dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F0, vsync_period *
808 			hsync_period);
809 	dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F0, v_sync_width *
810 			hsync_period);
811 	dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F1, 0);
812 	dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F1, 0);
813 	dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_HCTL, display_hctl);
814 	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_HCTL, 0);
815 	dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F0, display_v_start);
816 	dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F0, display_v_end);
817 	dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F1, 0);
818 	dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F1, 0);
819 	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F0, 0);
820 	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F0, 0);
821 	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F1, 0);
822 	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F1, 0);
823 	dp_write_p0(catalog, MMSS_DP_INTF_POLARITY_CTL, 0);
824 
825 	dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL,
826 				DP_TPG_CHECKERED_RECT_PATTERN);
827 	dp_write_p0(catalog, MMSS_DP_TPG_VIDEO_CONFIG,
828 				DP_TPG_VIDEO_CONFIG_BPP_8BIT |
829 				DP_TPG_VIDEO_CONFIG_RGB);
830 	dp_write_p0(catalog, MMSS_DP_BIST_ENABLE,
831 				DP_BIST_ENABLE_DPBIST_EN);
832 	dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN,
833 				DP_TIMING_ENGINE_EN_EN);
834 	DRM_DEBUG_DP("%s: enabled tpg\n", __func__);
835 }
836 
837 void dp_catalog_panel_tpg_disable(struct dp_catalog *dp_catalog)
838 {
839 	struct dp_catalog_private *catalog = container_of(dp_catalog,
840 				struct dp_catalog_private, dp_catalog);
841 
842 	dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL, 0x0);
843 	dp_write_p0(catalog, MMSS_DP_BIST_ENABLE, 0x0);
844 	dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN, 0x0);
845 }
846 
847 struct dp_catalog *dp_catalog_get(struct device *dev, struct dp_io *io)
848 {
849 	struct dp_catalog_private *catalog;
850 
851 	if (!io) {
852 		DRM_ERROR("invalid input\n");
853 		return ERR_PTR(-EINVAL);
854 	}
855 
856 	catalog  = devm_kzalloc(dev, sizeof(*catalog), GFP_KERNEL);
857 	if (!catalog)
858 		return ERR_PTR(-ENOMEM);
859 
860 	catalog->dev = dev;
861 	catalog->io = io;
862 
863 	return &catalog->dp_catalog;
864 }
865 
866 void dp_catalog_audio_get_header(struct dp_catalog *dp_catalog)
867 {
868 	struct dp_catalog_private *catalog;
869 	u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX];
870 	enum dp_catalog_audio_sdp_type sdp;
871 	enum dp_catalog_audio_header_type header;
872 
873 	if (!dp_catalog)
874 		return;
875 
876 	catalog = container_of(dp_catalog,
877 		struct dp_catalog_private, dp_catalog);
878 
879 	sdp_map = catalog->audio_map;
880 	sdp     = dp_catalog->sdp_type;
881 	header  = dp_catalog->sdp_header;
882 
883 	dp_catalog->audio_data = dp_read_link(catalog,
884 			sdp_map[sdp][header]);
885 }
886 
887 void dp_catalog_audio_set_header(struct dp_catalog *dp_catalog)
888 {
889 	struct dp_catalog_private *catalog;
890 	u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX];
891 	enum dp_catalog_audio_sdp_type sdp;
892 	enum dp_catalog_audio_header_type header;
893 	u32 data;
894 
895 	if (!dp_catalog)
896 		return;
897 
898 	catalog = container_of(dp_catalog,
899 		struct dp_catalog_private, dp_catalog);
900 
901 	sdp_map = catalog->audio_map;
902 	sdp     = dp_catalog->sdp_type;
903 	header  = dp_catalog->sdp_header;
904 	data    = dp_catalog->audio_data;
905 
906 	dp_write_link(catalog, sdp_map[sdp][header], data);
907 }
908 
909 void dp_catalog_audio_config_acr(struct dp_catalog *dp_catalog)
910 {
911 	struct dp_catalog_private *catalog;
912 	u32 acr_ctrl, select;
913 
914 	if (!dp_catalog)
915 		return;
916 
917 	catalog = container_of(dp_catalog,
918 		struct dp_catalog_private, dp_catalog);
919 
920 	select = dp_catalog->audio_data;
921 	acr_ctrl = select << 4 | BIT(31) | BIT(8) | BIT(14);
922 
923 	DRM_DEBUG_DP("select = 0x%x, acr_ctrl = 0x%x\n", select, acr_ctrl);
924 
925 	dp_write_link(catalog, MMSS_DP_AUDIO_ACR_CTRL, acr_ctrl);
926 }
927 
928 void dp_catalog_audio_enable(struct dp_catalog *dp_catalog)
929 {
930 	struct dp_catalog_private *catalog;
931 	bool enable;
932 	u32 audio_ctrl;
933 
934 	if (!dp_catalog)
935 		return;
936 
937 	catalog = container_of(dp_catalog,
938 		struct dp_catalog_private, dp_catalog);
939 
940 	enable = !!dp_catalog->audio_data;
941 	audio_ctrl = dp_read_link(catalog, MMSS_DP_AUDIO_CFG);
942 
943 	if (enable)
944 		audio_ctrl |= BIT(0);
945 	else
946 		audio_ctrl &= ~BIT(0);
947 
948 	DRM_DEBUG_DP("dp_audio_cfg = 0x%x\n", audio_ctrl);
949 
950 	dp_write_link(catalog, MMSS_DP_AUDIO_CFG, audio_ctrl);
951 	/* make sure audio engine is disabled */
952 	wmb();
953 }
954 
955 void dp_catalog_audio_config_sdp(struct dp_catalog *dp_catalog)
956 {
957 	struct dp_catalog_private *catalog;
958 	u32 sdp_cfg = 0;
959 	u32 sdp_cfg2 = 0;
960 
961 	if (!dp_catalog)
962 		return;
963 
964 	catalog = container_of(dp_catalog,
965 		struct dp_catalog_private, dp_catalog);
966 
967 	sdp_cfg = dp_read_link(catalog, MMSS_DP_SDP_CFG);
968 	/* AUDIO_TIMESTAMP_SDP_EN */
969 	sdp_cfg |= BIT(1);
970 	/* AUDIO_STREAM_SDP_EN */
971 	sdp_cfg |= BIT(2);
972 	/* AUDIO_COPY_MANAGEMENT_SDP_EN */
973 	sdp_cfg |= BIT(5);
974 	/* AUDIO_ISRC_SDP_EN  */
975 	sdp_cfg |= BIT(6);
976 	/* AUDIO_INFOFRAME_SDP_EN  */
977 	sdp_cfg |= BIT(20);
978 
979 	DRM_DEBUG_DP("sdp_cfg = 0x%x\n", sdp_cfg);
980 
981 	dp_write_link(catalog, MMSS_DP_SDP_CFG, sdp_cfg);
982 
983 	sdp_cfg2 = dp_read_link(catalog, MMSS_DP_SDP_CFG2);
984 	/* IFRM_REGSRC -> Do not use reg values */
985 	sdp_cfg2 &= ~BIT(0);
986 	/* AUDIO_STREAM_HB3_REGSRC-> Do not use reg values */
987 	sdp_cfg2 &= ~BIT(1);
988 
989 	DRM_DEBUG_DP("sdp_cfg2 = 0x%x\n", sdp_cfg2);
990 
991 	dp_write_link(catalog, MMSS_DP_SDP_CFG2, sdp_cfg2);
992 }
993 
994 void dp_catalog_audio_init(struct dp_catalog *dp_catalog)
995 {
996 	struct dp_catalog_private *catalog;
997 
998 	static u32 sdp_map[][DP_AUDIO_SDP_HEADER_MAX] = {
999 		{
1000 			MMSS_DP_AUDIO_STREAM_0,
1001 			MMSS_DP_AUDIO_STREAM_1,
1002 			MMSS_DP_AUDIO_STREAM_1,
1003 		},
1004 		{
1005 			MMSS_DP_AUDIO_TIMESTAMP_0,
1006 			MMSS_DP_AUDIO_TIMESTAMP_1,
1007 			MMSS_DP_AUDIO_TIMESTAMP_1,
1008 		},
1009 		{
1010 			MMSS_DP_AUDIO_INFOFRAME_0,
1011 			MMSS_DP_AUDIO_INFOFRAME_1,
1012 			MMSS_DP_AUDIO_INFOFRAME_1,
1013 		},
1014 		{
1015 			MMSS_DP_AUDIO_COPYMANAGEMENT_0,
1016 			MMSS_DP_AUDIO_COPYMANAGEMENT_1,
1017 			MMSS_DP_AUDIO_COPYMANAGEMENT_1,
1018 		},
1019 		{
1020 			MMSS_DP_AUDIO_ISRC_0,
1021 			MMSS_DP_AUDIO_ISRC_1,
1022 			MMSS_DP_AUDIO_ISRC_1,
1023 		},
1024 	};
1025 
1026 	if (!dp_catalog)
1027 		return;
1028 
1029 	catalog = container_of(dp_catalog,
1030 		struct dp_catalog_private, dp_catalog);
1031 
1032 	catalog->audio_map = sdp_map;
1033 }
1034 
1035 void dp_catalog_audio_sfe_level(struct dp_catalog *dp_catalog)
1036 {
1037 	struct dp_catalog_private *catalog;
1038 	u32 mainlink_levels, safe_to_exit_level;
1039 
1040 	if (!dp_catalog)
1041 		return;
1042 
1043 	catalog = container_of(dp_catalog,
1044 		struct dp_catalog_private, dp_catalog);
1045 
1046 	safe_to_exit_level = dp_catalog->audio_data;
1047 	mainlink_levels = dp_read_link(catalog, REG_DP_MAINLINK_LEVELS);
1048 	mainlink_levels &= 0xFE0;
1049 	mainlink_levels |= safe_to_exit_level;
1050 
1051 	DRM_DEBUG_DP("mainlink_level = 0x%x, safe_to_exit_level = 0x%x\n",
1052 			 mainlink_levels, safe_to_exit_level);
1053 
1054 	dp_write_link(catalog, REG_DP_MAINLINK_LEVELS, mainlink_levels);
1055 }
1056