1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3 * DesignWare MIPI DSI Host Controller v1.02 driver
4 *
5 * Copyright (c) 2016 Linaro Limited.
6 * Copyright (c) 2014-2016 HiSilicon Limited.
7 *
8 * Author:
9 * Xinliang Liu <z.liuxinliang@hisilicon.com>
10 * Xinliang Liu <xinliang.liu@linaro.org>
11 * Xinwei Kong <kong.kongxinwei@hisilicon.com>
12 */
13
14 #include <linux/clk.h>
15 #include <linux/component.h>
16 #include <linux/delay.h>
17 #include <linux/mod_devicetable.h>
18 #include <linux/module.h>
19 #include <linux/platform_device.h>
20
21 #include <drm/drm_atomic_helper.h>
22 #include <drm/drm_bridge.h>
23 #include <drm/drm_device.h>
24 #include <drm/drm_mipi_dsi.h>
25 #include <drm/drm_of.h>
26 #include <drm/drm_print.h>
27 #include <drm/drm_probe_helper.h>
28 #include <drm/drm_simple_kms_helper.h>
29
30 #include "dw_dsi_reg.h"
31
32 #define MAX_TX_ESC_CLK 10
33 #define ROUND(x, y) ((x) / (y) + \
34 ((x) % (y) * 10 / (y) >= 5 ? 1 : 0))
35 #define PHY_REF_CLK_RATE 19200000
36 #define PHY_REF_CLK_PERIOD_PS (1000000000 / (PHY_REF_CLK_RATE / 1000))
37
38 #define encoder_to_dsi(encoder) \
39 container_of(encoder, struct dw_dsi, encoder)
40 #define host_to_dsi(host) \
41 container_of(host, struct dw_dsi, host)
42
43 struct mipi_phy_params {
44 u32 clk_t_lpx;
45 u32 clk_t_hs_prepare;
46 u32 clk_t_hs_zero;
47 u32 clk_t_hs_trial;
48 u32 clk_t_wakeup;
49 u32 data_t_lpx;
50 u32 data_t_hs_prepare;
51 u32 data_t_hs_zero;
52 u32 data_t_hs_trial;
53 u32 data_t_ta_go;
54 u32 data_t_ta_get;
55 u32 data_t_wakeup;
56 u32 hstx_ckg_sel;
57 u32 pll_fbd_div5f;
58 u32 pll_fbd_div1f;
59 u32 pll_fbd_2p;
60 u32 pll_enbwt;
61 u32 pll_fbd_p;
62 u32 pll_fbd_s;
63 u32 pll_pre_div1p;
64 u32 pll_pre_p;
65 u32 pll_vco_750M;
66 u32 pll_lpf_rs;
67 u32 pll_lpf_cs;
68 u32 clklp2hs_time;
69 u32 clkhs2lp_time;
70 u32 lp2hs_time;
71 u32 hs2lp_time;
72 u32 clk_to_data_delay;
73 u32 data_to_clk_delay;
74 u32 lane_byte_clk_kHz;
75 u32 clk_division;
76 };
77
78 struct dsi_hw_ctx {
79 void __iomem *base;
80 struct clk *pclk;
81 };
82
83 struct dw_dsi {
84 struct drm_encoder encoder;
85 struct device *dev;
86 struct mipi_dsi_host host;
87 struct drm_display_mode cur_mode;
88 struct dsi_hw_ctx *ctx;
89 struct mipi_phy_params phy;
90
91 u32 lanes;
92 enum mipi_dsi_pixel_format format;
93 unsigned long mode_flags;
94 bool enable;
95 };
96
97 struct dsi_data {
98 struct dw_dsi dsi;
99 struct dsi_hw_ctx ctx;
100 };
101
102 struct dsi_phy_range {
103 u32 min_range_kHz;
104 u32 max_range_kHz;
105 u32 pll_vco_750M;
106 u32 hstx_ckg_sel;
107 };
108
109 static const struct dsi_phy_range dphy_range_info[] = {
110 { 46875, 62500, 1, 7 },
111 { 62500, 93750, 0, 7 },
112 { 93750, 125000, 1, 6 },
113 { 125000, 187500, 0, 6 },
114 { 187500, 250000, 1, 5 },
115 { 250000, 375000, 0, 5 },
116 { 375000, 500000, 1, 4 },
117 { 500000, 750000, 0, 4 },
118 { 750000, 1000000, 1, 0 },
119 { 1000000, 1500000, 0, 0 }
120 };
121
dsi_calc_phy_rate(u32 req_kHz,struct mipi_phy_params * phy)122 static u32 dsi_calc_phy_rate(u32 req_kHz, struct mipi_phy_params *phy)
123 {
124 u32 ref_clk_ps = PHY_REF_CLK_PERIOD_PS;
125 u32 tmp_kHz = req_kHz;
126 u32 i = 0;
127 u32 q_pll = 1;
128 u32 m_pll = 0;
129 u32 n_pll = 0;
130 u32 r_pll = 1;
131 u32 m_n = 0;
132 u32 m_n_int = 0;
133 u32 f_kHz = 0;
134 u64 temp;
135
136 /*
137 * Find a rate >= req_kHz.
138 */
139 do {
140 f_kHz = tmp_kHz;
141
142 for (i = 0; i < ARRAY_SIZE(dphy_range_info); i++)
143 if (f_kHz >= dphy_range_info[i].min_range_kHz &&
144 f_kHz <= dphy_range_info[i].max_range_kHz)
145 break;
146
147 if (i == ARRAY_SIZE(dphy_range_info)) {
148 DRM_ERROR("%dkHz out of range\n", f_kHz);
149 return 0;
150 }
151
152 phy->pll_vco_750M = dphy_range_info[i].pll_vco_750M;
153 phy->hstx_ckg_sel = dphy_range_info[i].hstx_ckg_sel;
154
155 if (phy->hstx_ckg_sel <= 7 &&
156 phy->hstx_ckg_sel >= 4)
157 q_pll = 0x10 >> (7 - phy->hstx_ckg_sel);
158
159 temp = f_kHz * (u64)q_pll * (u64)ref_clk_ps;
160 m_n_int = temp / (u64)1000000000;
161 m_n = (temp % (u64)1000000000) / (u64)100000000;
162
163 if (m_n_int % 2 == 0) {
164 if (m_n * 6 >= 50) {
165 n_pll = 2;
166 m_pll = (m_n_int + 1) * n_pll;
167 } else if (m_n * 6 >= 30) {
168 n_pll = 3;
169 m_pll = m_n_int * n_pll + 2;
170 } else {
171 n_pll = 1;
172 m_pll = m_n_int * n_pll;
173 }
174 } else {
175 if (m_n * 6 >= 50) {
176 n_pll = 1;
177 m_pll = (m_n_int + 1) * n_pll;
178 } else if (m_n * 6 >= 30) {
179 n_pll = 1;
180 m_pll = (m_n_int + 1) * n_pll;
181 } else if (m_n * 6 >= 10) {
182 n_pll = 3;
183 m_pll = m_n_int * n_pll + 1;
184 } else {
185 n_pll = 2;
186 m_pll = m_n_int * n_pll;
187 }
188 }
189
190 if (n_pll == 1) {
191 phy->pll_fbd_p = 0;
192 phy->pll_pre_div1p = 1;
193 } else {
194 phy->pll_fbd_p = n_pll;
195 phy->pll_pre_div1p = 0;
196 }
197
198 if (phy->pll_fbd_2p <= 7 && phy->pll_fbd_2p >= 4)
199 r_pll = 0x10 >> (7 - phy->pll_fbd_2p);
200
201 if (m_pll == 2) {
202 phy->pll_pre_p = 0;
203 phy->pll_fbd_s = 0;
204 phy->pll_fbd_div1f = 0;
205 phy->pll_fbd_div5f = 1;
206 } else if (m_pll >= 2 * 2 * r_pll && m_pll <= 2 * 4 * r_pll) {
207 phy->pll_pre_p = m_pll / (2 * r_pll);
208 phy->pll_fbd_s = 0;
209 phy->pll_fbd_div1f = 1;
210 phy->pll_fbd_div5f = 0;
211 } else if (m_pll >= 2 * 5 * r_pll && m_pll <= 2 * 150 * r_pll) {
212 if (((m_pll / (2 * r_pll)) % 2) == 0) {
213 phy->pll_pre_p =
214 (m_pll / (2 * r_pll)) / 2 - 1;
215 phy->pll_fbd_s =
216 (m_pll / (2 * r_pll)) % 2 + 2;
217 } else {
218 phy->pll_pre_p =
219 (m_pll / (2 * r_pll)) / 2;
220 phy->pll_fbd_s =
221 (m_pll / (2 * r_pll)) % 2;
222 }
223 phy->pll_fbd_div1f = 0;
224 phy->pll_fbd_div5f = 0;
225 } else {
226 phy->pll_pre_p = 0;
227 phy->pll_fbd_s = 0;
228 phy->pll_fbd_div1f = 0;
229 phy->pll_fbd_div5f = 1;
230 }
231
232 f_kHz = (u64)1000000000 * (u64)m_pll /
233 ((u64)ref_clk_ps * (u64)n_pll * (u64)q_pll);
234
235 if (f_kHz >= req_kHz)
236 break;
237
238 tmp_kHz += 10;
239
240 } while (true);
241
242 return f_kHz;
243 }
244
dsi_get_phy_params(u32 phy_req_kHz,struct mipi_phy_params * phy)245 static void dsi_get_phy_params(u32 phy_req_kHz,
246 struct mipi_phy_params *phy)
247 {
248 u32 ref_clk_ps = PHY_REF_CLK_PERIOD_PS;
249 u32 phy_rate_kHz;
250 u32 ui;
251
252 memset(phy, 0, sizeof(*phy));
253
254 phy_rate_kHz = dsi_calc_phy_rate(phy_req_kHz, phy);
255 if (!phy_rate_kHz)
256 return;
257
258 ui = 1000000 / phy_rate_kHz;
259
260 phy->clk_t_lpx = ROUND(50, 8 * ui);
261 phy->clk_t_hs_prepare = ROUND(133, 16 * ui) - 1;
262
263 phy->clk_t_hs_zero = ROUND(262, 8 * ui);
264 phy->clk_t_hs_trial = 2 * (ROUND(60, 8 * ui) - 1);
265 phy->clk_t_wakeup = ROUND(1000000, (ref_clk_ps / 1000) - 1);
266 if (phy->clk_t_wakeup > 0xff)
267 phy->clk_t_wakeup = 0xff;
268 phy->data_t_wakeup = phy->clk_t_wakeup;
269 phy->data_t_lpx = phy->clk_t_lpx;
270 phy->data_t_hs_prepare = ROUND(125 + 10 * ui, 16 * ui) - 1;
271 phy->data_t_hs_zero = ROUND(105 + 6 * ui, 8 * ui);
272 phy->data_t_hs_trial = 2 * (ROUND(60 + 4 * ui, 8 * ui) - 1);
273 phy->data_t_ta_go = 3;
274 phy->data_t_ta_get = 4;
275
276 phy->pll_enbwt = 1;
277 phy->clklp2hs_time = ROUND(407, 8 * ui) + 12;
278 phy->clkhs2lp_time = ROUND(105 + 12 * ui, 8 * ui);
279 phy->lp2hs_time = ROUND(240 + 12 * ui, 8 * ui) + 1;
280 phy->hs2lp_time = phy->clkhs2lp_time;
281 phy->clk_to_data_delay = 1 + phy->clklp2hs_time;
282 phy->data_to_clk_delay = ROUND(60 + 52 * ui, 8 * ui) +
283 phy->clkhs2lp_time;
284
285 phy->lane_byte_clk_kHz = phy_rate_kHz / 8;
286 phy->clk_division =
287 DIV_ROUND_UP(phy->lane_byte_clk_kHz, MAX_TX_ESC_CLK);
288 }
289
dsi_get_dpi_color_coding(enum mipi_dsi_pixel_format format)290 static u32 dsi_get_dpi_color_coding(enum mipi_dsi_pixel_format format)
291 {
292 u32 val;
293
294 /*
295 * TODO: only support RGB888 now, to support more
296 */
297 switch (format) {
298 case MIPI_DSI_FMT_RGB888:
299 val = DSI_24BITS_1;
300 break;
301 default:
302 val = DSI_24BITS_1;
303 break;
304 }
305
306 return val;
307 }
308
309 /*
310 * dsi phy reg write function
311 */
dsi_phy_tst_set(void __iomem * base,u32 reg,u32 val)312 static void dsi_phy_tst_set(void __iomem *base, u32 reg, u32 val)
313 {
314 u32 reg_write = 0x10000 + reg;
315
316 /*
317 * latch reg first
318 */
319 writel(reg_write, base + PHY_TST_CTRL1);
320 writel(0x02, base + PHY_TST_CTRL0);
321 writel(0x00, base + PHY_TST_CTRL0);
322
323 /*
324 * then latch value
325 */
326 writel(val, base + PHY_TST_CTRL1);
327 writel(0x02, base + PHY_TST_CTRL0);
328 writel(0x00, base + PHY_TST_CTRL0);
329 }
330
dsi_set_phy_timer(void __iomem * base,struct mipi_phy_params * phy,u32 lanes)331 static void dsi_set_phy_timer(void __iomem *base,
332 struct mipi_phy_params *phy,
333 u32 lanes)
334 {
335 u32 val;
336
337 /*
338 * Set lane value and phy stop wait time.
339 */
340 val = (lanes - 1) | (PHY_STOP_WAIT_TIME << 8);
341 writel(val, base + PHY_IF_CFG);
342
343 /*
344 * Set phy clk division.
345 */
346 val = readl(base + CLKMGR_CFG) | phy->clk_division;
347 writel(val, base + CLKMGR_CFG);
348
349 /*
350 * Set lp and hs switching params.
351 */
352 dw_update_bits(base + PHY_TMR_CFG, 24, MASK(8), phy->hs2lp_time);
353 dw_update_bits(base + PHY_TMR_CFG, 16, MASK(8), phy->lp2hs_time);
354 dw_update_bits(base + PHY_TMR_LPCLK_CFG, 16, MASK(10),
355 phy->clkhs2lp_time);
356 dw_update_bits(base + PHY_TMR_LPCLK_CFG, 0, MASK(10),
357 phy->clklp2hs_time);
358 dw_update_bits(base + CLK_DATA_TMR_CFG, 8, MASK(8),
359 phy->data_to_clk_delay);
360 dw_update_bits(base + CLK_DATA_TMR_CFG, 0, MASK(8),
361 phy->clk_to_data_delay);
362 }
363
dsi_set_mipi_phy(void __iomem * base,struct mipi_phy_params * phy,u32 lanes)364 static void dsi_set_mipi_phy(void __iomem *base,
365 struct mipi_phy_params *phy,
366 u32 lanes)
367 {
368 u32 delay_count;
369 u32 val;
370 u32 i;
371
372 /* phy timer setting */
373 dsi_set_phy_timer(base, phy, lanes);
374
375 /*
376 * Reset to clean up phy tst params.
377 */
378 writel(0, base + PHY_RSTZ);
379 writel(0, base + PHY_TST_CTRL0);
380 writel(1, base + PHY_TST_CTRL0);
381 writel(0, base + PHY_TST_CTRL0);
382
383 /*
384 * Clock lane timing control setting: TLPX, THS-PREPARE,
385 * THS-ZERO, THS-TRAIL, TWAKEUP.
386 */
387 dsi_phy_tst_set(base, CLK_TLPX, phy->clk_t_lpx);
388 dsi_phy_tst_set(base, CLK_THS_PREPARE, phy->clk_t_hs_prepare);
389 dsi_phy_tst_set(base, CLK_THS_ZERO, phy->clk_t_hs_zero);
390 dsi_phy_tst_set(base, CLK_THS_TRAIL, phy->clk_t_hs_trial);
391 dsi_phy_tst_set(base, CLK_TWAKEUP, phy->clk_t_wakeup);
392
393 /*
394 * Data lane timing control setting: TLPX, THS-PREPARE,
395 * THS-ZERO, THS-TRAIL, TTA-GO, TTA-GET, TWAKEUP.
396 */
397 for (i = 0; i < lanes; i++) {
398 dsi_phy_tst_set(base, DATA_TLPX(i), phy->data_t_lpx);
399 dsi_phy_tst_set(base, DATA_THS_PREPARE(i),
400 phy->data_t_hs_prepare);
401 dsi_phy_tst_set(base, DATA_THS_ZERO(i), phy->data_t_hs_zero);
402 dsi_phy_tst_set(base, DATA_THS_TRAIL(i), phy->data_t_hs_trial);
403 dsi_phy_tst_set(base, DATA_TTA_GO(i), phy->data_t_ta_go);
404 dsi_phy_tst_set(base, DATA_TTA_GET(i), phy->data_t_ta_get);
405 dsi_phy_tst_set(base, DATA_TWAKEUP(i), phy->data_t_wakeup);
406 }
407
408 /*
409 * physical configuration: I, pll I, pll II, pll III,
410 * pll IV, pll V.
411 */
412 dsi_phy_tst_set(base, PHY_CFG_I, phy->hstx_ckg_sel);
413 val = (phy->pll_fbd_div5f << 5) + (phy->pll_fbd_div1f << 4) +
414 (phy->pll_fbd_2p << 1) + phy->pll_enbwt;
415 dsi_phy_tst_set(base, PHY_CFG_PLL_I, val);
416 dsi_phy_tst_set(base, PHY_CFG_PLL_II, phy->pll_fbd_p);
417 dsi_phy_tst_set(base, PHY_CFG_PLL_III, phy->pll_fbd_s);
418 val = (phy->pll_pre_div1p << 7) + phy->pll_pre_p;
419 dsi_phy_tst_set(base, PHY_CFG_PLL_IV, val);
420 val = (5 << 5) + (phy->pll_vco_750M << 4) + (phy->pll_lpf_rs << 2) +
421 phy->pll_lpf_cs;
422 dsi_phy_tst_set(base, PHY_CFG_PLL_V, val);
423
424 writel(PHY_ENABLECLK, base + PHY_RSTZ);
425 udelay(1);
426 writel(PHY_ENABLECLK | PHY_UNSHUTDOWNZ, base + PHY_RSTZ);
427 udelay(1);
428 writel(PHY_ENABLECLK | PHY_UNRSTZ | PHY_UNSHUTDOWNZ, base + PHY_RSTZ);
429 usleep_range(1000, 1500);
430
431 /*
432 * wait for phy's clock ready
433 */
434 delay_count = 100;
435 while (delay_count) {
436 val = readl(base + PHY_STATUS);
437 if ((BIT(0) | BIT(2)) & val)
438 break;
439
440 udelay(1);
441 delay_count--;
442 }
443
444 if (!delay_count)
445 DRM_INFO("phylock and phystopstateclklane is not ready.\n");
446 }
447
dsi_set_mode_timing(void __iomem * base,u32 lane_byte_clk_kHz,struct drm_display_mode * mode,enum mipi_dsi_pixel_format format)448 static void dsi_set_mode_timing(void __iomem *base,
449 u32 lane_byte_clk_kHz,
450 struct drm_display_mode *mode,
451 enum mipi_dsi_pixel_format format)
452 {
453 u32 hfp, hbp, hsw, vfp, vbp, vsw;
454 u32 hline_time;
455 u32 hsa_time;
456 u32 hbp_time;
457 u32 pixel_clk_kHz;
458 int htot, vtot;
459 u32 val;
460 u64 tmp;
461
462 val = dsi_get_dpi_color_coding(format);
463 writel(val, base + DPI_COLOR_CODING);
464
465 val = (mode->flags & DRM_MODE_FLAG_NHSYNC ? 1 : 0) << 2;
466 val |= (mode->flags & DRM_MODE_FLAG_NVSYNC ? 1 : 0) << 1;
467 writel(val, base + DPI_CFG_POL);
468
469 /*
470 * The DSI IP accepts vertical timing using lines as normal,
471 * but horizontal timing is a mixture of pixel-clocks for the
472 * active region and byte-lane clocks for the blanking-related
473 * timings. hfp is specified as the total hline_time in byte-
474 * lane clocks minus hsa, hbp and active.
475 */
476 pixel_clk_kHz = mode->clock;
477 htot = mode->htotal;
478 vtot = mode->vtotal;
479 hfp = mode->hsync_start - mode->hdisplay;
480 hbp = mode->htotal - mode->hsync_end;
481 hsw = mode->hsync_end - mode->hsync_start;
482 vfp = mode->vsync_start - mode->vdisplay;
483 vbp = mode->vtotal - mode->vsync_end;
484 vsw = mode->vsync_end - mode->vsync_start;
485 if (vsw > 15) {
486 DRM_DEBUG_DRIVER("vsw exceeded 15\n");
487 vsw = 15;
488 }
489
490 hsa_time = (hsw * lane_byte_clk_kHz) / pixel_clk_kHz;
491 hbp_time = (hbp * lane_byte_clk_kHz) / pixel_clk_kHz;
492 tmp = (u64)htot * (u64)lane_byte_clk_kHz;
493 hline_time = DIV_ROUND_UP(tmp, pixel_clk_kHz);
494
495 /* all specified in byte-lane clocks */
496 writel(hsa_time, base + VID_HSA_TIME);
497 writel(hbp_time, base + VID_HBP_TIME);
498 writel(hline_time, base + VID_HLINE_TIME);
499
500 writel(vsw, base + VID_VSA_LINES);
501 writel(vbp, base + VID_VBP_LINES);
502 writel(vfp, base + VID_VFP_LINES);
503 writel(mode->vdisplay, base + VID_VACTIVE_LINES);
504 writel(mode->hdisplay, base + VID_PKT_SIZE);
505
506 DRM_DEBUG_DRIVER("htot=%d, hfp=%d, hbp=%d, hsw=%d\n",
507 htot, hfp, hbp, hsw);
508 DRM_DEBUG_DRIVER("vtol=%d, vfp=%d, vbp=%d, vsw=%d\n",
509 vtot, vfp, vbp, vsw);
510 DRM_DEBUG_DRIVER("hsa_time=%d, hbp_time=%d, hline_time=%d\n",
511 hsa_time, hbp_time, hline_time);
512 }
513
dsi_set_video_mode(void __iomem * base,unsigned long flags)514 static void dsi_set_video_mode(void __iomem *base, unsigned long flags)
515 {
516 u32 val;
517 u32 mode_mask = MIPI_DSI_MODE_VIDEO | MIPI_DSI_MODE_VIDEO_BURST |
518 MIPI_DSI_MODE_VIDEO_SYNC_PULSE;
519 u32 non_burst_sync_pulse = MIPI_DSI_MODE_VIDEO |
520 MIPI_DSI_MODE_VIDEO_SYNC_PULSE;
521 u32 non_burst_sync_event = MIPI_DSI_MODE_VIDEO;
522
523 /*
524 * choose video mode type
525 */
526 if ((flags & mode_mask) == non_burst_sync_pulse)
527 val = DSI_NON_BURST_SYNC_PULSES;
528 else if ((flags & mode_mask) == non_burst_sync_event)
529 val = DSI_NON_BURST_SYNC_EVENTS;
530 else
531 val = DSI_BURST_SYNC_PULSES_1;
532 writel(val, base + VID_MODE_CFG);
533
534 writel(PHY_TXREQUESTCLKHS, base + LPCLK_CTRL);
535 writel(DSI_VIDEO_MODE, base + MODE_CFG);
536 }
537
dsi_mipi_init(struct dw_dsi * dsi)538 static void dsi_mipi_init(struct dw_dsi *dsi)
539 {
540 struct dsi_hw_ctx *ctx = dsi->ctx;
541 struct mipi_phy_params *phy = &dsi->phy;
542 struct drm_display_mode *mode = &dsi->cur_mode;
543 u32 bpp = mipi_dsi_pixel_format_to_bpp(dsi->format);
544 void __iomem *base = ctx->base;
545 u32 dphy_req_kHz;
546
547 /*
548 * count phy params
549 */
550 dphy_req_kHz = mode->clock * bpp / dsi->lanes;
551 dsi_get_phy_params(dphy_req_kHz, phy);
552
553 /* reset Core */
554 writel(RESET, base + PWR_UP);
555
556 /* set dsi phy params */
557 dsi_set_mipi_phy(base, phy, dsi->lanes);
558
559 /* set dsi mode timing */
560 dsi_set_mode_timing(base, phy->lane_byte_clk_kHz, mode, dsi->format);
561
562 /* set dsi video mode */
563 dsi_set_video_mode(base, dsi->mode_flags);
564
565 /* dsi wake up */
566 writel(POWERUP, base + PWR_UP);
567
568 DRM_DEBUG_DRIVER("lanes=%d, pixel_clk=%d kHz, bytes_freq=%d kHz\n",
569 dsi->lanes, mode->clock, phy->lane_byte_clk_kHz);
570 }
571
dsi_encoder_disable(struct drm_encoder * encoder)572 static void dsi_encoder_disable(struct drm_encoder *encoder)
573 {
574 struct dw_dsi *dsi = encoder_to_dsi(encoder);
575 struct dsi_hw_ctx *ctx = dsi->ctx;
576 void __iomem *base = ctx->base;
577
578 if (!dsi->enable)
579 return;
580
581 writel(0, base + PWR_UP);
582 writel(0, base + LPCLK_CTRL);
583 writel(0, base + PHY_RSTZ);
584 clk_disable_unprepare(ctx->pclk);
585
586 dsi->enable = false;
587 }
588
dsi_encoder_enable(struct drm_encoder * encoder)589 static void dsi_encoder_enable(struct drm_encoder *encoder)
590 {
591 struct dw_dsi *dsi = encoder_to_dsi(encoder);
592 struct dsi_hw_ctx *ctx = dsi->ctx;
593 int ret;
594
595 if (dsi->enable)
596 return;
597
598 ret = clk_prepare_enable(ctx->pclk);
599 if (ret) {
600 DRM_ERROR("fail to enable pclk: %d\n", ret);
601 return;
602 }
603
604 dsi_mipi_init(dsi);
605
606 dsi->enable = true;
607 }
608
dsi_encoder_phy_mode_valid(struct drm_encoder * encoder,const struct drm_display_mode * mode)609 static enum drm_mode_status dsi_encoder_phy_mode_valid(
610 struct drm_encoder *encoder,
611 const struct drm_display_mode *mode)
612 {
613 struct dw_dsi *dsi = encoder_to_dsi(encoder);
614 struct mipi_phy_params phy;
615 u32 bpp = mipi_dsi_pixel_format_to_bpp(dsi->format);
616 u32 req_kHz, act_kHz, lane_byte_clk_kHz;
617
618 /* Calculate the lane byte clk using the adjusted mode clk */
619 memset(&phy, 0, sizeof(phy));
620 req_kHz = mode->clock * bpp / dsi->lanes;
621 act_kHz = dsi_calc_phy_rate(req_kHz, &phy);
622 lane_byte_clk_kHz = act_kHz / 8;
623
624 DRM_DEBUG_DRIVER("Checking mode %ix%i-%i@%i clock: %i...",
625 mode->hdisplay, mode->vdisplay, bpp,
626 drm_mode_vrefresh(mode), mode->clock);
627
628 /*
629 * Make sure the adjusted mode clock and the lane byte clk
630 * have a common denominator base frequency
631 */
632 if (mode->clock/dsi->lanes == lane_byte_clk_kHz/3) {
633 DRM_DEBUG_DRIVER("OK!\n");
634 return MODE_OK;
635 }
636
637 DRM_DEBUG_DRIVER("BAD!\n");
638 return MODE_BAD;
639 }
640
dsi_encoder_mode_valid(struct drm_encoder * encoder,const struct drm_display_mode * mode)641 static enum drm_mode_status dsi_encoder_mode_valid(struct drm_encoder *encoder,
642 const struct drm_display_mode *mode)
643
644 {
645 const struct drm_crtc_helper_funcs *crtc_funcs = NULL;
646 struct drm_crtc *crtc = NULL;
647 struct drm_display_mode adj_mode;
648 enum drm_mode_status ret;
649
650 /*
651 * The crtc might adjust the mode, so go through the
652 * possible crtcs (technically just one) and call
653 * mode_fixup to figure out the adjusted mode before we
654 * validate it.
655 */
656 drm_for_each_crtc(crtc, encoder->dev) {
657 /*
658 * reset adj_mode to the mode value each time,
659 * so we don't adjust the mode twice
660 */
661 drm_mode_init(&adj_mode, mode);
662
663 crtc_funcs = crtc->helper_private;
664 if (crtc_funcs && crtc_funcs->mode_fixup)
665 if (!crtc_funcs->mode_fixup(crtc, mode, &adj_mode))
666 return MODE_BAD;
667
668 ret = dsi_encoder_phy_mode_valid(encoder, &adj_mode);
669 if (ret != MODE_OK)
670 return ret;
671 }
672 return MODE_OK;
673 }
674
dsi_encoder_mode_set(struct drm_encoder * encoder,struct drm_display_mode * mode,struct drm_display_mode * adj_mode)675 static void dsi_encoder_mode_set(struct drm_encoder *encoder,
676 struct drm_display_mode *mode,
677 struct drm_display_mode *adj_mode)
678 {
679 struct dw_dsi *dsi = encoder_to_dsi(encoder);
680
681 drm_mode_copy(&dsi->cur_mode, adj_mode);
682 }
683
dsi_encoder_atomic_check(struct drm_encoder * encoder,struct drm_crtc_state * crtc_state,struct drm_connector_state * conn_state)684 static int dsi_encoder_atomic_check(struct drm_encoder *encoder,
685 struct drm_crtc_state *crtc_state,
686 struct drm_connector_state *conn_state)
687 {
688 /* do nothing */
689 return 0;
690 }
691
692 static const struct drm_encoder_helper_funcs dw_encoder_helper_funcs = {
693 .atomic_check = dsi_encoder_atomic_check,
694 .mode_valid = dsi_encoder_mode_valid,
695 .mode_set = dsi_encoder_mode_set,
696 .enable = dsi_encoder_enable,
697 .disable = dsi_encoder_disable
698 };
699
dw_drm_encoder_init(struct device * dev,struct drm_device * drm_dev,struct drm_encoder * encoder)700 static int dw_drm_encoder_init(struct device *dev,
701 struct drm_device *drm_dev,
702 struct drm_encoder *encoder)
703 {
704 int ret;
705 u32 crtc_mask = drm_of_find_possible_crtcs(drm_dev, dev->of_node);
706
707 if (!crtc_mask) {
708 DRM_ERROR("failed to find crtc mask\n");
709 return -EINVAL;
710 }
711
712 encoder->possible_crtcs = crtc_mask;
713 ret = drm_simple_encoder_init(drm_dev, encoder, DRM_MODE_ENCODER_DSI);
714 if (ret) {
715 DRM_ERROR("failed to init dsi encoder\n");
716 return ret;
717 }
718
719 drm_encoder_helper_add(encoder, &dw_encoder_helper_funcs);
720
721 return 0;
722 }
723
724 static const struct component_ops dsi_ops;
dsi_host_attach(struct mipi_dsi_host * host,struct mipi_dsi_device * mdsi)725 static int dsi_host_attach(struct mipi_dsi_host *host,
726 struct mipi_dsi_device *mdsi)
727 {
728 struct dw_dsi *dsi = host_to_dsi(host);
729 struct device *dev = host->dev;
730 int ret;
731
732 if (mdsi->lanes < 1 || mdsi->lanes > 4) {
733 DRM_ERROR("dsi device params invalid\n");
734 return -EINVAL;
735 }
736
737 dsi->lanes = mdsi->lanes;
738 dsi->format = mdsi->format;
739 dsi->mode_flags = mdsi->mode_flags;
740
741 ret = component_add(dev, &dsi_ops);
742 if (ret)
743 return ret;
744
745 return 0;
746 }
747
dsi_host_detach(struct mipi_dsi_host * host,struct mipi_dsi_device * mdsi)748 static int dsi_host_detach(struct mipi_dsi_host *host,
749 struct mipi_dsi_device *mdsi)
750 {
751 struct device *dev = host->dev;
752
753 component_del(dev, &dsi_ops);
754
755 return 0;
756 }
757
758 static const struct mipi_dsi_host_ops dsi_host_ops = {
759 .attach = dsi_host_attach,
760 .detach = dsi_host_detach,
761 };
762
dsi_host_init(struct device * dev,struct dw_dsi * dsi)763 static int dsi_host_init(struct device *dev, struct dw_dsi *dsi)
764 {
765 struct mipi_dsi_host *host = &dsi->host;
766 int ret;
767
768 host->dev = dev;
769 host->ops = &dsi_host_ops;
770 ret = mipi_dsi_host_register(host);
771 if (ret) {
772 DRM_ERROR("failed to register dsi host\n");
773 return ret;
774 }
775
776 return 0;
777 }
778
dsi_bridge_init(struct drm_device * dev,struct dw_dsi * dsi)779 static int dsi_bridge_init(struct drm_device *dev, struct dw_dsi *dsi)
780 {
781 struct drm_encoder *encoder = &dsi->encoder;
782 struct drm_bridge *bridge;
783 struct device_node *np = dsi->dev->of_node;
784 int ret;
785
786 /*
787 * Get the endpoint node. In our case, dsi has one output port1
788 * to which the external HDMI bridge is connected.
789 */
790 ret = drm_of_find_panel_or_bridge(np, 1, 0, NULL, &bridge);
791 if (ret)
792 return ret;
793
794 /* associate the bridge to dsi encoder */
795 return drm_bridge_attach(encoder, bridge, NULL, 0);
796 }
797
dsi_bind(struct device * dev,struct device * master,void * data)798 static int dsi_bind(struct device *dev, struct device *master, void *data)
799 {
800 struct dsi_data *ddata = dev_get_drvdata(dev);
801 struct dw_dsi *dsi = &ddata->dsi;
802 struct drm_device *drm_dev = data;
803 int ret;
804
805 ret = dw_drm_encoder_init(dev, drm_dev, &dsi->encoder);
806 if (ret)
807 return ret;
808
809 ret = dsi_bridge_init(drm_dev, dsi);
810 if (ret)
811 return ret;
812
813 return 0;
814 }
815
dsi_unbind(struct device * dev,struct device * master,void * data)816 static void dsi_unbind(struct device *dev, struct device *master, void *data)
817 {
818 /* do nothing */
819 }
820
821 static const struct component_ops dsi_ops = {
822 .bind = dsi_bind,
823 .unbind = dsi_unbind,
824 };
825
dsi_parse_dt(struct platform_device * pdev,struct dw_dsi * dsi)826 static int dsi_parse_dt(struct platform_device *pdev, struct dw_dsi *dsi)
827 {
828 struct dsi_hw_ctx *ctx = dsi->ctx;
829 struct resource *res;
830
831 ctx->pclk = devm_clk_get(&pdev->dev, "pclk");
832 if (IS_ERR(ctx->pclk)) {
833 DRM_ERROR("failed to get pclk clock\n");
834 return PTR_ERR(ctx->pclk);
835 }
836
837 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
838 ctx->base = devm_ioremap_resource(&pdev->dev, res);
839 if (IS_ERR(ctx->base)) {
840 DRM_ERROR("failed to remap dsi io region\n");
841 return PTR_ERR(ctx->base);
842 }
843
844 return 0;
845 }
846
dsi_probe(struct platform_device * pdev)847 static int dsi_probe(struct platform_device *pdev)
848 {
849 struct dsi_data *data;
850 struct dw_dsi *dsi;
851 struct dsi_hw_ctx *ctx;
852 int ret;
853
854 data = devm_kzalloc(&pdev->dev, sizeof(*data), GFP_KERNEL);
855 if (!data) {
856 DRM_ERROR("failed to allocate dsi data.\n");
857 return -ENOMEM;
858 }
859 dsi = &data->dsi;
860 ctx = &data->ctx;
861 dsi->ctx = ctx;
862 dsi->dev = &pdev->dev;
863
864 ret = dsi_parse_dt(pdev, dsi);
865 if (ret)
866 return ret;
867
868 platform_set_drvdata(pdev, data);
869
870 ret = dsi_host_init(&pdev->dev, dsi);
871 if (ret)
872 return ret;
873
874 return 0;
875 }
876
dsi_remove(struct platform_device * pdev)877 static void dsi_remove(struct platform_device *pdev)
878 {
879 struct dsi_data *data = platform_get_drvdata(pdev);
880 struct dw_dsi *dsi = &data->dsi;
881
882 mipi_dsi_host_unregister(&dsi->host);
883 }
884
885 static const struct of_device_id dsi_of_match[] = {
886 {.compatible = "hisilicon,hi6220-dsi"},
887 { }
888 };
889 MODULE_DEVICE_TABLE(of, dsi_of_match);
890
891 static struct platform_driver dsi_driver = {
892 .probe = dsi_probe,
893 .remove_new = dsi_remove,
894 .driver = {
895 .name = "dw-dsi",
896 .of_match_table = dsi_of_match,
897 },
898 };
899
900 module_platform_driver(dsi_driver);
901
902 MODULE_AUTHOR("Xinliang Liu <xinliang.liu@linaro.org>");
903 MODULE_AUTHOR("Xinliang Liu <z.liuxinliang@hisilicon.com>");
904 MODULE_AUTHOR("Xinwei Kong <kong.kongxinwei@hisilicon.com>");
905 MODULE_DESCRIPTION("DesignWare MIPI DSI Host Controller v1.02 driver");
906 MODULE_LICENSE("GPL v2");
907