1 /*
2  * DesignWare MIPI DSI Host Controller v1.02 driver
3  *
4  * Copyright (c) 2016 Linaro Limited.
5  * Copyright (c) 2014-2016 Hisilicon Limited.
6  *
7  * Author:
8  *	Xinliang Liu <z.liuxinliang@hisilicon.com>
9  *	Xinliang Liu <xinliang.liu@linaro.org>
10  *	Xinwei Kong <kong.kongxinwei@hisilicon.com>
11  *
12  * This program is free software; you can redistribute it and/or modify
13  * it under the terms of the GNU General Public License version 2 as
14  * published by the Free Software Foundation.
15  *
16  */
17 
18 #include <linux/clk.h>
19 #include <linux/component.h>
20 #include <linux/delay.h>
21 #include <linux/module.h>
22 #include <linux/platform_device.h>
23 
24 #include <drm/drm_atomic_helper.h>
25 #include <drm/drm_device.h>
26 #include <drm/drm_encoder_slave.h>
27 #include <drm/drm_mipi_dsi.h>
28 #include <drm/drm_of.h>
29 #include <drm/drm_print.h>
30 #include <drm/drm_probe_helper.h>
31 
32 #include "dw_dsi_reg.h"
33 
34 #define MAX_TX_ESC_CLK		10
35 #define ROUND(x, y)		((x) / (y) + \
36 				((x) % (y) * 10 / (y) >= 5 ? 1 : 0))
37 #define PHY_REF_CLK_RATE	19200000
38 #define PHY_REF_CLK_PERIOD_PS	(1000000000 / (PHY_REF_CLK_RATE / 1000))
39 
40 #define encoder_to_dsi(encoder) \
41 	container_of(encoder, struct dw_dsi, encoder)
42 #define host_to_dsi(host) \
43 	container_of(host, struct dw_dsi, host)
44 
45 struct mipi_phy_params {
46 	u32 clk_t_lpx;
47 	u32 clk_t_hs_prepare;
48 	u32 clk_t_hs_zero;
49 	u32 clk_t_hs_trial;
50 	u32 clk_t_wakeup;
51 	u32 data_t_lpx;
52 	u32 data_t_hs_prepare;
53 	u32 data_t_hs_zero;
54 	u32 data_t_hs_trial;
55 	u32 data_t_ta_go;
56 	u32 data_t_ta_get;
57 	u32 data_t_wakeup;
58 	u32 hstx_ckg_sel;
59 	u32 pll_fbd_div5f;
60 	u32 pll_fbd_div1f;
61 	u32 pll_fbd_2p;
62 	u32 pll_enbwt;
63 	u32 pll_fbd_p;
64 	u32 pll_fbd_s;
65 	u32 pll_pre_div1p;
66 	u32 pll_pre_p;
67 	u32 pll_vco_750M;
68 	u32 pll_lpf_rs;
69 	u32 pll_lpf_cs;
70 	u32 clklp2hs_time;
71 	u32 clkhs2lp_time;
72 	u32 lp2hs_time;
73 	u32 hs2lp_time;
74 	u32 clk_to_data_delay;
75 	u32 data_to_clk_delay;
76 	u32 lane_byte_clk_kHz;
77 	u32 clk_division;
78 };
79 
80 struct dsi_hw_ctx {
81 	void __iomem *base;
82 	struct clk *pclk;
83 };
84 
85 struct dw_dsi {
86 	struct drm_encoder encoder;
87 	struct drm_bridge *bridge;
88 	struct mipi_dsi_host host;
89 	struct drm_display_mode cur_mode;
90 	struct dsi_hw_ctx *ctx;
91 	struct mipi_phy_params phy;
92 
93 	u32 lanes;
94 	enum mipi_dsi_pixel_format format;
95 	unsigned long mode_flags;
96 	bool enable;
97 };
98 
99 struct dsi_data {
100 	struct dw_dsi dsi;
101 	struct dsi_hw_ctx ctx;
102 };
103 
104 struct dsi_phy_range {
105 	u32 min_range_kHz;
106 	u32 max_range_kHz;
107 	u32 pll_vco_750M;
108 	u32 hstx_ckg_sel;
109 };
110 
111 static const struct dsi_phy_range dphy_range_info[] = {
112 	{   46875,    62500,   1,    7 },
113 	{   62500,    93750,   0,    7 },
114 	{   93750,   125000,   1,    6 },
115 	{  125000,   187500,   0,    6 },
116 	{  187500,   250000,   1,    5 },
117 	{  250000,   375000,   0,    5 },
118 	{  375000,   500000,   1,    4 },
119 	{  500000,   750000,   0,    4 },
120 	{  750000,  1000000,   1,    0 },
121 	{ 1000000,  1500000,   0,    0 }
122 };
123 
124 static u32 dsi_calc_phy_rate(u32 req_kHz, struct mipi_phy_params *phy)
125 {
126 	u32 ref_clk_ps = PHY_REF_CLK_PERIOD_PS;
127 	u32 tmp_kHz = req_kHz;
128 	u32 i = 0;
129 	u32 q_pll = 1;
130 	u32 m_pll = 0;
131 	u32 n_pll = 0;
132 	u32 r_pll = 1;
133 	u32 m_n = 0;
134 	u32 m_n_int = 0;
135 	u32 f_kHz = 0;
136 	u64 temp;
137 
138 	/*
139 	 * Find a rate >= req_kHz.
140 	 */
141 	do {
142 		f_kHz = tmp_kHz;
143 
144 		for (i = 0; i < ARRAY_SIZE(dphy_range_info); i++)
145 			if (f_kHz >= dphy_range_info[i].min_range_kHz &&
146 			    f_kHz <= dphy_range_info[i].max_range_kHz)
147 				break;
148 
149 		if (i == ARRAY_SIZE(dphy_range_info)) {
150 			DRM_ERROR("%dkHz out of range\n", f_kHz);
151 			return 0;
152 		}
153 
154 		phy->pll_vco_750M = dphy_range_info[i].pll_vco_750M;
155 		phy->hstx_ckg_sel = dphy_range_info[i].hstx_ckg_sel;
156 
157 		if (phy->hstx_ckg_sel <= 7 &&
158 		    phy->hstx_ckg_sel >= 4)
159 			q_pll = 0x10 >> (7 - phy->hstx_ckg_sel);
160 
161 		temp = f_kHz * (u64)q_pll * (u64)ref_clk_ps;
162 		m_n_int = temp / (u64)1000000000;
163 		m_n = (temp % (u64)1000000000) / (u64)100000000;
164 
165 		if (m_n_int % 2 == 0) {
166 			if (m_n * 6 >= 50) {
167 				n_pll = 2;
168 				m_pll = (m_n_int + 1) * n_pll;
169 			} else if (m_n * 6 >= 30) {
170 				n_pll = 3;
171 				m_pll = m_n_int * n_pll + 2;
172 			} else {
173 				n_pll = 1;
174 				m_pll = m_n_int * n_pll;
175 			}
176 		} else {
177 			if (m_n * 6 >= 50) {
178 				n_pll = 1;
179 				m_pll = (m_n_int + 1) * n_pll;
180 			} else if (m_n * 6 >= 30) {
181 				n_pll = 1;
182 				m_pll = (m_n_int + 1) * n_pll;
183 			} else if (m_n * 6 >= 10) {
184 				n_pll = 3;
185 				m_pll = m_n_int * n_pll + 1;
186 			} else {
187 				n_pll = 2;
188 				m_pll = m_n_int * n_pll;
189 			}
190 		}
191 
192 		if (n_pll == 1) {
193 			phy->pll_fbd_p = 0;
194 			phy->pll_pre_div1p = 1;
195 		} else {
196 			phy->pll_fbd_p = n_pll;
197 			phy->pll_pre_div1p = 0;
198 		}
199 
200 		if (phy->pll_fbd_2p <= 7 && phy->pll_fbd_2p >= 4)
201 			r_pll = 0x10 >> (7 - phy->pll_fbd_2p);
202 
203 		if (m_pll == 2) {
204 			phy->pll_pre_p = 0;
205 			phy->pll_fbd_s = 0;
206 			phy->pll_fbd_div1f = 0;
207 			phy->pll_fbd_div5f = 1;
208 		} else if (m_pll >= 2 * 2 * r_pll && m_pll <= 2 * 4 * r_pll) {
209 			phy->pll_pre_p = m_pll / (2 * r_pll);
210 			phy->pll_fbd_s = 0;
211 			phy->pll_fbd_div1f = 1;
212 			phy->pll_fbd_div5f = 0;
213 		} else if (m_pll >= 2 * 5 * r_pll && m_pll <= 2 * 150 * r_pll) {
214 			if (((m_pll / (2 * r_pll)) % 2) == 0) {
215 				phy->pll_pre_p =
216 					(m_pll / (2 * r_pll)) / 2 - 1;
217 				phy->pll_fbd_s =
218 					(m_pll / (2 * r_pll)) % 2 + 2;
219 			} else {
220 				phy->pll_pre_p =
221 					(m_pll / (2 * r_pll)) / 2;
222 				phy->pll_fbd_s =
223 					(m_pll / (2 * r_pll)) % 2;
224 			}
225 			phy->pll_fbd_div1f = 0;
226 			phy->pll_fbd_div5f = 0;
227 		} else {
228 			phy->pll_pre_p = 0;
229 			phy->pll_fbd_s = 0;
230 			phy->pll_fbd_div1f = 0;
231 			phy->pll_fbd_div5f = 1;
232 		}
233 
234 		f_kHz = (u64)1000000000 * (u64)m_pll /
235 			((u64)ref_clk_ps * (u64)n_pll * (u64)q_pll);
236 
237 		if (f_kHz >= req_kHz)
238 			break;
239 
240 		tmp_kHz += 10;
241 
242 	} while (true);
243 
244 	return f_kHz;
245 }
246 
247 static void dsi_get_phy_params(u32 phy_req_kHz,
248 			       struct mipi_phy_params *phy)
249 {
250 	u32 ref_clk_ps = PHY_REF_CLK_PERIOD_PS;
251 	u32 phy_rate_kHz;
252 	u32 ui;
253 
254 	memset(phy, 0, sizeof(*phy));
255 
256 	phy_rate_kHz = dsi_calc_phy_rate(phy_req_kHz, phy);
257 	if (!phy_rate_kHz)
258 		return;
259 
260 	ui = 1000000 / phy_rate_kHz;
261 
262 	phy->clk_t_lpx = ROUND(50, 8 * ui);
263 	phy->clk_t_hs_prepare = ROUND(133, 16 * ui) - 1;
264 
265 	phy->clk_t_hs_zero = ROUND(262, 8 * ui);
266 	phy->clk_t_hs_trial = 2 * (ROUND(60, 8 * ui) - 1);
267 	phy->clk_t_wakeup = ROUND(1000000, (ref_clk_ps / 1000) - 1);
268 	if (phy->clk_t_wakeup > 0xff)
269 		phy->clk_t_wakeup = 0xff;
270 	phy->data_t_wakeup = phy->clk_t_wakeup;
271 	phy->data_t_lpx = phy->clk_t_lpx;
272 	phy->data_t_hs_prepare = ROUND(125 + 10 * ui, 16 * ui) - 1;
273 	phy->data_t_hs_zero = ROUND(105 + 6 * ui, 8 * ui);
274 	phy->data_t_hs_trial = 2 * (ROUND(60 + 4 * ui, 8 * ui) - 1);
275 	phy->data_t_ta_go = 3;
276 	phy->data_t_ta_get = 4;
277 
278 	phy->pll_enbwt = 1;
279 	phy->clklp2hs_time = ROUND(407, 8 * ui) + 12;
280 	phy->clkhs2lp_time = ROUND(105 + 12 * ui, 8 * ui);
281 	phy->lp2hs_time = ROUND(240 + 12 * ui, 8 * ui) + 1;
282 	phy->hs2lp_time = phy->clkhs2lp_time;
283 	phy->clk_to_data_delay = 1 + phy->clklp2hs_time;
284 	phy->data_to_clk_delay = ROUND(60 + 52 * ui, 8 * ui) +
285 				phy->clkhs2lp_time;
286 
287 	phy->lane_byte_clk_kHz = phy_rate_kHz / 8;
288 	phy->clk_division =
289 		DIV_ROUND_UP(phy->lane_byte_clk_kHz, MAX_TX_ESC_CLK);
290 }
291 
292 static u32 dsi_get_dpi_color_coding(enum mipi_dsi_pixel_format format)
293 {
294 	u32 val;
295 
296 	/*
297 	 * TODO: only support RGB888 now, to support more
298 	 */
299 	switch (format) {
300 	case MIPI_DSI_FMT_RGB888:
301 		val = DSI_24BITS_1;
302 		break;
303 	default:
304 		val = DSI_24BITS_1;
305 		break;
306 	}
307 
308 	return val;
309 }
310 
311 /*
312  * dsi phy reg write function
313  */
314 static void dsi_phy_tst_set(void __iomem *base, u32 reg, u32 val)
315 {
316 	u32 reg_write = 0x10000 + reg;
317 
318 	/*
319 	 * latch reg first
320 	 */
321 	writel(reg_write, base + PHY_TST_CTRL1);
322 	writel(0x02, base + PHY_TST_CTRL0);
323 	writel(0x00, base + PHY_TST_CTRL0);
324 
325 	/*
326 	 * then latch value
327 	 */
328 	writel(val, base + PHY_TST_CTRL1);
329 	writel(0x02, base + PHY_TST_CTRL0);
330 	writel(0x00, base + PHY_TST_CTRL0);
331 }
332 
333 static void dsi_set_phy_timer(void __iomem *base,
334 			      struct mipi_phy_params *phy,
335 			      u32 lanes)
336 {
337 	u32 val;
338 
339 	/*
340 	 * Set lane value and phy stop wait time.
341 	 */
342 	val = (lanes - 1) | (PHY_STOP_WAIT_TIME << 8);
343 	writel(val, base + PHY_IF_CFG);
344 
345 	/*
346 	 * Set phy clk division.
347 	 */
348 	val = readl(base + CLKMGR_CFG) | phy->clk_division;
349 	writel(val, base + CLKMGR_CFG);
350 
351 	/*
352 	 * Set lp and hs switching params.
353 	 */
354 	dw_update_bits(base + PHY_TMR_CFG, 24, MASK(8), phy->hs2lp_time);
355 	dw_update_bits(base + PHY_TMR_CFG, 16, MASK(8), phy->lp2hs_time);
356 	dw_update_bits(base + PHY_TMR_LPCLK_CFG, 16, MASK(10),
357 		       phy->clkhs2lp_time);
358 	dw_update_bits(base + PHY_TMR_LPCLK_CFG, 0, MASK(10),
359 		       phy->clklp2hs_time);
360 	dw_update_bits(base + CLK_DATA_TMR_CFG, 8, MASK(8),
361 		       phy->data_to_clk_delay);
362 	dw_update_bits(base + CLK_DATA_TMR_CFG, 0, MASK(8),
363 		       phy->clk_to_data_delay);
364 }
365 
366 static void dsi_set_mipi_phy(void __iomem *base,
367 			     struct mipi_phy_params *phy,
368 			     u32 lanes)
369 {
370 	u32 delay_count;
371 	u32 val;
372 	u32 i;
373 
374 	/* phy timer setting */
375 	dsi_set_phy_timer(base, phy, lanes);
376 
377 	/*
378 	 * Reset to clean up phy tst params.
379 	 */
380 	writel(0, base + PHY_RSTZ);
381 	writel(0, base + PHY_TST_CTRL0);
382 	writel(1, base + PHY_TST_CTRL0);
383 	writel(0, base + PHY_TST_CTRL0);
384 
385 	/*
386 	 * Clock lane timing control setting: TLPX, THS-PREPARE,
387 	 * THS-ZERO, THS-TRAIL, TWAKEUP.
388 	 */
389 	dsi_phy_tst_set(base, CLK_TLPX, phy->clk_t_lpx);
390 	dsi_phy_tst_set(base, CLK_THS_PREPARE, phy->clk_t_hs_prepare);
391 	dsi_phy_tst_set(base, CLK_THS_ZERO, phy->clk_t_hs_zero);
392 	dsi_phy_tst_set(base, CLK_THS_TRAIL, phy->clk_t_hs_trial);
393 	dsi_phy_tst_set(base, CLK_TWAKEUP, phy->clk_t_wakeup);
394 
395 	/*
396 	 * Data lane timing control setting: TLPX, THS-PREPARE,
397 	 * THS-ZERO, THS-TRAIL, TTA-GO, TTA-GET, TWAKEUP.
398 	 */
399 	for (i = 0; i < lanes; i++) {
400 		dsi_phy_tst_set(base, DATA_TLPX(i), phy->data_t_lpx);
401 		dsi_phy_tst_set(base, DATA_THS_PREPARE(i),
402 				phy->data_t_hs_prepare);
403 		dsi_phy_tst_set(base, DATA_THS_ZERO(i), phy->data_t_hs_zero);
404 		dsi_phy_tst_set(base, DATA_THS_TRAIL(i), phy->data_t_hs_trial);
405 		dsi_phy_tst_set(base, DATA_TTA_GO(i), phy->data_t_ta_go);
406 		dsi_phy_tst_set(base, DATA_TTA_GET(i), phy->data_t_ta_get);
407 		dsi_phy_tst_set(base, DATA_TWAKEUP(i), phy->data_t_wakeup);
408 	}
409 
410 	/*
411 	 * physical configuration: I, pll I, pll II, pll III,
412 	 * pll IV, pll V.
413 	 */
414 	dsi_phy_tst_set(base, PHY_CFG_I, phy->hstx_ckg_sel);
415 	val = (phy->pll_fbd_div5f << 5) + (phy->pll_fbd_div1f << 4) +
416 				(phy->pll_fbd_2p << 1) + phy->pll_enbwt;
417 	dsi_phy_tst_set(base, PHY_CFG_PLL_I, val);
418 	dsi_phy_tst_set(base, PHY_CFG_PLL_II, phy->pll_fbd_p);
419 	dsi_phy_tst_set(base, PHY_CFG_PLL_III, phy->pll_fbd_s);
420 	val = (phy->pll_pre_div1p << 7) + phy->pll_pre_p;
421 	dsi_phy_tst_set(base, PHY_CFG_PLL_IV, val);
422 	val = (5 << 5) + (phy->pll_vco_750M << 4) + (phy->pll_lpf_rs << 2) +
423 		phy->pll_lpf_cs;
424 	dsi_phy_tst_set(base, PHY_CFG_PLL_V, val);
425 
426 	writel(PHY_ENABLECLK, base + PHY_RSTZ);
427 	udelay(1);
428 	writel(PHY_ENABLECLK | PHY_UNSHUTDOWNZ, base + PHY_RSTZ);
429 	udelay(1);
430 	writel(PHY_ENABLECLK | PHY_UNRSTZ | PHY_UNSHUTDOWNZ, base + PHY_RSTZ);
431 	usleep_range(1000, 1500);
432 
433 	/*
434 	 * wait for phy's clock ready
435 	 */
436 	delay_count = 100;
437 	while (delay_count) {
438 		val = readl(base +  PHY_STATUS);
439 		if ((BIT(0) | BIT(2)) & val)
440 			break;
441 
442 		udelay(1);
443 		delay_count--;
444 	}
445 
446 	if (!delay_count)
447 		DRM_INFO("phylock and phystopstateclklane is not ready.\n");
448 }
449 
450 static void dsi_set_mode_timing(void __iomem *base,
451 				u32 lane_byte_clk_kHz,
452 				struct drm_display_mode *mode,
453 				enum mipi_dsi_pixel_format format)
454 {
455 	u32 hfp, hbp, hsw, vfp, vbp, vsw;
456 	u32 hline_time;
457 	u32 hsa_time;
458 	u32 hbp_time;
459 	u32 pixel_clk_kHz;
460 	int htot, vtot;
461 	u32 val;
462 	u64 tmp;
463 
464 	val = dsi_get_dpi_color_coding(format);
465 	writel(val, base + DPI_COLOR_CODING);
466 
467 	val = (mode->flags & DRM_MODE_FLAG_NHSYNC ? 1 : 0) << 2;
468 	val |= (mode->flags & DRM_MODE_FLAG_NVSYNC ? 1 : 0) << 1;
469 	writel(val, base +  DPI_CFG_POL);
470 
471 	/*
472 	 * The DSI IP accepts vertical timing using lines as normal,
473 	 * but horizontal timing is a mixture of pixel-clocks for the
474 	 * active region and byte-lane clocks for the blanking-related
475 	 * timings.  hfp is specified as the total hline_time in byte-
476 	 * lane clocks minus hsa, hbp and active.
477 	 */
478 	pixel_clk_kHz = mode->clock;
479 	htot = mode->htotal;
480 	vtot = mode->vtotal;
481 	hfp = mode->hsync_start - mode->hdisplay;
482 	hbp = mode->htotal - mode->hsync_end;
483 	hsw = mode->hsync_end - mode->hsync_start;
484 	vfp = mode->vsync_start - mode->vdisplay;
485 	vbp = mode->vtotal - mode->vsync_end;
486 	vsw = mode->vsync_end - mode->vsync_start;
487 	if (vsw > 15) {
488 		DRM_DEBUG_DRIVER("vsw exceeded 15\n");
489 		vsw = 15;
490 	}
491 
492 	hsa_time = (hsw * lane_byte_clk_kHz) / pixel_clk_kHz;
493 	hbp_time = (hbp * lane_byte_clk_kHz) / pixel_clk_kHz;
494 	tmp = (u64)htot * (u64)lane_byte_clk_kHz;
495 	hline_time = DIV_ROUND_UP(tmp, pixel_clk_kHz);
496 
497 	/* all specified in byte-lane clocks */
498 	writel(hsa_time, base + VID_HSA_TIME);
499 	writel(hbp_time, base + VID_HBP_TIME);
500 	writel(hline_time, base + VID_HLINE_TIME);
501 
502 	writel(vsw, base + VID_VSA_LINES);
503 	writel(vbp, base + VID_VBP_LINES);
504 	writel(vfp, base + VID_VFP_LINES);
505 	writel(mode->vdisplay, base + VID_VACTIVE_LINES);
506 	writel(mode->hdisplay, base + VID_PKT_SIZE);
507 
508 	DRM_DEBUG_DRIVER("htot=%d, hfp=%d, hbp=%d, hsw=%d\n",
509 			 htot, hfp, hbp, hsw);
510 	DRM_DEBUG_DRIVER("vtol=%d, vfp=%d, vbp=%d, vsw=%d\n",
511 			 vtot, vfp, vbp, vsw);
512 	DRM_DEBUG_DRIVER("hsa_time=%d, hbp_time=%d, hline_time=%d\n",
513 			 hsa_time, hbp_time, hline_time);
514 }
515 
516 static void dsi_set_video_mode(void __iomem *base, unsigned long flags)
517 {
518 	u32 val;
519 	u32 mode_mask = MIPI_DSI_MODE_VIDEO | MIPI_DSI_MODE_VIDEO_BURST |
520 		MIPI_DSI_MODE_VIDEO_SYNC_PULSE;
521 	u32 non_burst_sync_pulse = MIPI_DSI_MODE_VIDEO |
522 		MIPI_DSI_MODE_VIDEO_SYNC_PULSE;
523 	u32 non_burst_sync_event = MIPI_DSI_MODE_VIDEO;
524 
525 	/*
526 	 * choose video mode type
527 	 */
528 	if ((flags & mode_mask) == non_burst_sync_pulse)
529 		val = DSI_NON_BURST_SYNC_PULSES;
530 	else if ((flags & mode_mask) == non_burst_sync_event)
531 		val = DSI_NON_BURST_SYNC_EVENTS;
532 	else
533 		val = DSI_BURST_SYNC_PULSES_1;
534 	writel(val, base + VID_MODE_CFG);
535 
536 	writel(PHY_TXREQUESTCLKHS, base + LPCLK_CTRL);
537 	writel(DSI_VIDEO_MODE, base + MODE_CFG);
538 }
539 
540 static void dsi_mipi_init(struct dw_dsi *dsi)
541 {
542 	struct dsi_hw_ctx *ctx = dsi->ctx;
543 	struct mipi_phy_params *phy = &dsi->phy;
544 	struct drm_display_mode *mode = &dsi->cur_mode;
545 	u32 bpp = mipi_dsi_pixel_format_to_bpp(dsi->format);
546 	void __iomem *base = ctx->base;
547 	u32 dphy_req_kHz;
548 
549 	/*
550 	 * count phy params
551 	 */
552 	dphy_req_kHz = mode->clock * bpp / dsi->lanes;
553 	dsi_get_phy_params(dphy_req_kHz, phy);
554 
555 	/* reset Core */
556 	writel(RESET, base + PWR_UP);
557 
558 	/* set dsi phy params */
559 	dsi_set_mipi_phy(base, phy, dsi->lanes);
560 
561 	/* set dsi mode timing */
562 	dsi_set_mode_timing(base, phy->lane_byte_clk_kHz, mode, dsi->format);
563 
564 	/* set dsi video mode */
565 	dsi_set_video_mode(base, dsi->mode_flags);
566 
567 	/* dsi wake up */
568 	writel(POWERUP, base + PWR_UP);
569 
570 	DRM_DEBUG_DRIVER("lanes=%d, pixel_clk=%d kHz, bytes_freq=%d kHz\n",
571 			 dsi->lanes, mode->clock, phy->lane_byte_clk_kHz);
572 }
573 
574 static void dsi_encoder_disable(struct drm_encoder *encoder)
575 {
576 	struct dw_dsi *dsi = encoder_to_dsi(encoder);
577 	struct dsi_hw_ctx *ctx = dsi->ctx;
578 	void __iomem *base = ctx->base;
579 
580 	if (!dsi->enable)
581 		return;
582 
583 	writel(0, base + PWR_UP);
584 	writel(0, base + LPCLK_CTRL);
585 	writel(0, base + PHY_RSTZ);
586 	clk_disable_unprepare(ctx->pclk);
587 
588 	dsi->enable = false;
589 }
590 
591 static void dsi_encoder_enable(struct drm_encoder *encoder)
592 {
593 	struct dw_dsi *dsi = encoder_to_dsi(encoder);
594 	struct dsi_hw_ctx *ctx = dsi->ctx;
595 	int ret;
596 
597 	if (dsi->enable)
598 		return;
599 
600 	ret = clk_prepare_enable(ctx->pclk);
601 	if (ret) {
602 		DRM_ERROR("fail to enable pclk: %d\n", ret);
603 		return;
604 	}
605 
606 	dsi_mipi_init(dsi);
607 
608 	dsi->enable = true;
609 }
610 
611 static enum drm_mode_status dsi_encoder_phy_mode_valid(
612 					struct drm_encoder *encoder,
613 					const struct drm_display_mode *mode)
614 {
615 	struct dw_dsi *dsi = encoder_to_dsi(encoder);
616 	struct mipi_phy_params phy;
617 	u32 bpp = mipi_dsi_pixel_format_to_bpp(dsi->format);
618 	u32 req_kHz, act_kHz, lane_byte_clk_kHz;
619 
620 	/* Calculate the lane byte clk using the adjusted mode clk */
621 	memset(&phy, 0, sizeof(phy));
622 	req_kHz = mode->clock * bpp / dsi->lanes;
623 	act_kHz = dsi_calc_phy_rate(req_kHz, &phy);
624 	lane_byte_clk_kHz = act_kHz / 8;
625 
626 	DRM_DEBUG_DRIVER("Checking mode %ix%i-%i@%i clock: %i...",
627 			mode->hdisplay, mode->vdisplay, bpp,
628 			drm_mode_vrefresh(mode), mode->clock);
629 
630 	/*
631 	 * Make sure the adjusted mode clock and the lane byte clk
632 	 * have a common denominator base frequency
633 	 */
634 	if (mode->clock/dsi->lanes == lane_byte_clk_kHz/3) {
635 		DRM_DEBUG_DRIVER("OK!\n");
636 		return MODE_OK;
637 	}
638 
639 	DRM_DEBUG_DRIVER("BAD!\n");
640 	return MODE_BAD;
641 }
642 
643 static enum drm_mode_status dsi_encoder_mode_valid(struct drm_encoder *encoder,
644 					const struct drm_display_mode *mode)
645 
646 {
647 	const struct drm_crtc_helper_funcs *crtc_funcs = NULL;
648 	struct drm_crtc *crtc = NULL;
649 	struct drm_display_mode adj_mode;
650 	enum drm_mode_status ret;
651 
652 	/*
653 	 * The crtc might adjust the mode, so go through the
654 	 * possible crtcs (technically just one) and call
655 	 * mode_fixup to figure out the adjusted mode before we
656 	 * validate it.
657 	 */
658 	drm_for_each_crtc(crtc, encoder->dev) {
659 		/*
660 		 * reset adj_mode to the mode value each time,
661 		 * so we don't adjust the mode twice
662 		 */
663 		drm_mode_copy(&adj_mode, mode);
664 
665 		crtc_funcs = crtc->helper_private;
666 		if (crtc_funcs && crtc_funcs->mode_fixup)
667 			if (!crtc_funcs->mode_fixup(crtc, mode, &adj_mode))
668 				return MODE_BAD;
669 
670 		ret = dsi_encoder_phy_mode_valid(encoder, &adj_mode);
671 		if (ret != MODE_OK)
672 			return ret;
673 	}
674 	return MODE_OK;
675 }
676 
677 static void dsi_encoder_mode_set(struct drm_encoder *encoder,
678 				 struct drm_display_mode *mode,
679 				 struct drm_display_mode *adj_mode)
680 {
681 	struct dw_dsi *dsi = encoder_to_dsi(encoder);
682 
683 	drm_mode_copy(&dsi->cur_mode, adj_mode);
684 }
685 
686 static int dsi_encoder_atomic_check(struct drm_encoder *encoder,
687 				    struct drm_crtc_state *crtc_state,
688 				    struct drm_connector_state *conn_state)
689 {
690 	/* do nothing */
691 	return 0;
692 }
693 
694 static const struct drm_encoder_helper_funcs dw_encoder_helper_funcs = {
695 	.atomic_check	= dsi_encoder_atomic_check,
696 	.mode_valid	= dsi_encoder_mode_valid,
697 	.mode_set	= dsi_encoder_mode_set,
698 	.enable		= dsi_encoder_enable,
699 	.disable	= dsi_encoder_disable
700 };
701 
702 static const struct drm_encoder_funcs dw_encoder_funcs = {
703 	.destroy = drm_encoder_cleanup,
704 };
705 
706 static int dw_drm_encoder_init(struct device *dev,
707 			       struct drm_device *drm_dev,
708 			       struct drm_encoder *encoder)
709 {
710 	int ret;
711 	u32 crtc_mask = drm_of_find_possible_crtcs(drm_dev, dev->of_node);
712 
713 	if (!crtc_mask) {
714 		DRM_ERROR("failed to find crtc mask\n");
715 		return -EINVAL;
716 	}
717 
718 	encoder->possible_crtcs = crtc_mask;
719 	ret = drm_encoder_init(drm_dev, encoder, &dw_encoder_funcs,
720 			       DRM_MODE_ENCODER_DSI, NULL);
721 	if (ret) {
722 		DRM_ERROR("failed to init dsi encoder\n");
723 		return ret;
724 	}
725 
726 	drm_encoder_helper_add(encoder, &dw_encoder_helper_funcs);
727 
728 	return 0;
729 }
730 
731 static int dsi_host_attach(struct mipi_dsi_host *host,
732 			   struct mipi_dsi_device *mdsi)
733 {
734 	struct dw_dsi *dsi = host_to_dsi(host);
735 
736 	if (mdsi->lanes < 1 || mdsi->lanes > 4) {
737 		DRM_ERROR("dsi device params invalid\n");
738 		return -EINVAL;
739 	}
740 
741 	dsi->lanes = mdsi->lanes;
742 	dsi->format = mdsi->format;
743 	dsi->mode_flags = mdsi->mode_flags;
744 
745 	return 0;
746 }
747 
748 static int dsi_host_detach(struct mipi_dsi_host *host,
749 			   struct mipi_dsi_device *mdsi)
750 {
751 	/* do nothing */
752 	return 0;
753 }
754 
755 static const struct mipi_dsi_host_ops dsi_host_ops = {
756 	.attach = dsi_host_attach,
757 	.detach = dsi_host_detach,
758 };
759 
760 static int dsi_host_init(struct device *dev, struct dw_dsi *dsi)
761 {
762 	struct mipi_dsi_host *host = &dsi->host;
763 	int ret;
764 
765 	host->dev = dev;
766 	host->ops = &dsi_host_ops;
767 	ret = mipi_dsi_host_register(host);
768 	if (ret) {
769 		DRM_ERROR("failed to register dsi host\n");
770 		return ret;
771 	}
772 
773 	return 0;
774 }
775 
776 static int dsi_bridge_init(struct drm_device *dev, struct dw_dsi *dsi)
777 {
778 	struct drm_encoder *encoder = &dsi->encoder;
779 	struct drm_bridge *bridge = dsi->bridge;
780 	int ret;
781 
782 	/* associate the bridge to dsi encoder */
783 	ret = drm_bridge_attach(encoder, bridge, NULL);
784 	if (ret) {
785 		DRM_ERROR("failed to attach external bridge\n");
786 		return ret;
787 	}
788 
789 	return 0;
790 }
791 
792 static int dsi_bind(struct device *dev, struct device *master, void *data)
793 {
794 	struct dsi_data *ddata = dev_get_drvdata(dev);
795 	struct dw_dsi *dsi = &ddata->dsi;
796 	struct drm_device *drm_dev = data;
797 	int ret;
798 
799 	ret = dw_drm_encoder_init(dev, drm_dev, &dsi->encoder);
800 	if (ret)
801 		return ret;
802 
803 	ret = dsi_host_init(dev, dsi);
804 	if (ret)
805 		return ret;
806 
807 	ret = dsi_bridge_init(drm_dev, dsi);
808 	if (ret)
809 		return ret;
810 
811 	return 0;
812 }
813 
814 static void dsi_unbind(struct device *dev, struct device *master, void *data)
815 {
816 	/* do nothing */
817 }
818 
819 static const struct component_ops dsi_ops = {
820 	.bind	= dsi_bind,
821 	.unbind	= dsi_unbind,
822 };
823 
824 static int dsi_parse_dt(struct platform_device *pdev, struct dw_dsi *dsi)
825 {
826 	struct dsi_hw_ctx *ctx = dsi->ctx;
827 	struct device_node *np = pdev->dev.of_node;
828 	struct resource *res;
829 	int ret;
830 
831 	/*
832 	 * Get the endpoint node. In our case, dsi has one output port1
833 	 * to which the external HDMI bridge is connected.
834 	 */
835 	ret = drm_of_find_panel_or_bridge(np, 1, 0, NULL, &dsi->bridge);
836 	if (ret)
837 		return ret;
838 
839 	ctx->pclk = devm_clk_get(&pdev->dev, "pclk");
840 	if (IS_ERR(ctx->pclk)) {
841 		DRM_ERROR("failed to get pclk clock\n");
842 		return PTR_ERR(ctx->pclk);
843 	}
844 
845 	res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
846 	ctx->base = devm_ioremap_resource(&pdev->dev, res);
847 	if (IS_ERR(ctx->base)) {
848 		DRM_ERROR("failed to remap dsi io region\n");
849 		return PTR_ERR(ctx->base);
850 	}
851 
852 	return 0;
853 }
854 
855 static int dsi_probe(struct platform_device *pdev)
856 {
857 	struct dsi_data *data;
858 	struct dw_dsi *dsi;
859 	struct dsi_hw_ctx *ctx;
860 	int ret;
861 
862 	data = devm_kzalloc(&pdev->dev, sizeof(*data), GFP_KERNEL);
863 	if (!data) {
864 		DRM_ERROR("failed to allocate dsi data.\n");
865 		return -ENOMEM;
866 	}
867 	dsi = &data->dsi;
868 	ctx = &data->ctx;
869 	dsi->ctx = ctx;
870 
871 	ret = dsi_parse_dt(pdev, dsi);
872 	if (ret)
873 		return ret;
874 
875 	platform_set_drvdata(pdev, data);
876 
877 	return component_add(&pdev->dev, &dsi_ops);
878 }
879 
880 static int dsi_remove(struct platform_device *pdev)
881 {
882 	component_del(&pdev->dev, &dsi_ops);
883 
884 	return 0;
885 }
886 
887 static const struct of_device_id dsi_of_match[] = {
888 	{.compatible = "hisilicon,hi6220-dsi"},
889 	{ }
890 };
891 MODULE_DEVICE_TABLE(of, dsi_of_match);
892 
893 static struct platform_driver dsi_driver = {
894 	.probe = dsi_probe,
895 	.remove = dsi_remove,
896 	.driver = {
897 		.name = "dw-dsi",
898 		.of_match_table = dsi_of_match,
899 	},
900 };
901 
902 module_platform_driver(dsi_driver);
903 
904 MODULE_AUTHOR("Xinliang Liu <xinliang.liu@linaro.org>");
905 MODULE_AUTHOR("Xinliang Liu <z.liuxinliang@hisilicon.com>");
906 MODULE_AUTHOR("Xinwei Kong <kong.kongxinwei@hisilicon.com>");
907 MODULE_DESCRIPTION("DesignWare MIPI DSI Host Controller v1.02 driver");
908 MODULE_LICENSE("GPL v2");
909