1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3  * Copyright (c) 2016, Fuzhou Rockchip Electronics Co., Ltd
4  * Copyright (C) STMicroelectronics SA 2017
5  *
6  * Modified by Philippe Cornu <philippe.cornu@st.com>
7  * This generic Synopsys DesignWare MIPI DSI host driver is based on the
8  * Rockchip version from rockchip/dw-mipi-dsi.c with phy & bridge APIs.
9  */
10 
11 #include <linux/clk.h>
12 #include <linux/component.h>
13 #include <linux/debugfs.h>
14 #include <linux/iopoll.h>
15 #include <linux/module.h>
16 #include <linux/of_device.h>
17 #include <linux/pm_runtime.h>
18 #include <linux/reset.h>
19 
20 #include <video/mipi_display.h>
21 
22 #include <drm/bridge/dw_mipi_dsi.h>
23 #include <drm/drm_atomic_helper.h>
24 #include <drm/drm_bridge.h>
25 #include <drm/drm_crtc.h>
26 #include <drm/drm_mipi_dsi.h>
27 #include <drm/drm_modes.h>
28 #include <drm/drm_of.h>
29 #include <drm/drm_print.h>
30 
31 #define HWVER_131			0x31333100	/* IP version 1.31 */
32 
33 #define DSI_VERSION			0x00
34 #define VERSION				GENMASK(31, 8)
35 
36 #define DSI_PWR_UP			0x04
37 #define RESET				0
38 #define POWERUP				BIT(0)
39 
40 #define DSI_CLKMGR_CFG			0x08
41 #define TO_CLK_DIVISION(div)		(((div) & 0xff) << 8)
42 #define TX_ESC_CLK_DIVISION(div)	((div) & 0xff)
43 
44 #define DSI_DPI_VCID			0x0c
45 #define DPI_VCID(vcid)			((vcid) & 0x3)
46 
47 #define DSI_DPI_COLOR_CODING		0x10
48 #define LOOSELY18_EN			BIT(8)
49 #define DPI_COLOR_CODING_16BIT_1	0x0
50 #define DPI_COLOR_CODING_16BIT_2	0x1
51 #define DPI_COLOR_CODING_16BIT_3	0x2
52 #define DPI_COLOR_CODING_18BIT_1	0x3
53 #define DPI_COLOR_CODING_18BIT_2	0x4
54 #define DPI_COLOR_CODING_24BIT		0x5
55 
56 #define DSI_DPI_CFG_POL			0x14
57 #define COLORM_ACTIVE_LOW		BIT(4)
58 #define SHUTD_ACTIVE_LOW		BIT(3)
59 #define HSYNC_ACTIVE_LOW		BIT(2)
60 #define VSYNC_ACTIVE_LOW		BIT(1)
61 #define DATAEN_ACTIVE_LOW		BIT(0)
62 
63 #define DSI_DPI_LP_CMD_TIM		0x18
64 #define OUTVACT_LPCMD_TIME(p)		(((p) & 0xff) << 16)
65 #define INVACT_LPCMD_TIME(p)		((p) & 0xff)
66 
67 #define DSI_DBI_VCID			0x1c
68 #define DSI_DBI_CFG			0x20
69 #define DSI_DBI_PARTITIONING_EN		0x24
70 #define DSI_DBI_CMDSIZE			0x28
71 
72 #define DSI_PCKHDL_CFG			0x2c
73 #define CRC_RX_EN			BIT(4)
74 #define ECC_RX_EN			BIT(3)
75 #define BTA_EN				BIT(2)
76 #define EOTP_RX_EN			BIT(1)
77 #define EOTP_TX_EN			BIT(0)
78 
79 #define DSI_GEN_VCID			0x30
80 
81 #define DSI_MODE_CFG			0x34
82 #define ENABLE_VIDEO_MODE		0
83 #define ENABLE_CMD_MODE			BIT(0)
84 
85 #define DSI_VID_MODE_CFG		0x38
86 #define ENABLE_LOW_POWER		(0x3f << 8)
87 #define ENABLE_LOW_POWER_MASK		(0x3f << 8)
88 #define VID_MODE_TYPE_NON_BURST_SYNC_PULSES	0x0
89 #define VID_MODE_TYPE_NON_BURST_SYNC_EVENTS	0x1
90 #define VID_MODE_TYPE_BURST			0x2
91 #define VID_MODE_TYPE_MASK			0x3
92 #define ENABLE_LOW_POWER_CMD		BIT(15)
93 #define VID_MODE_VPG_ENABLE		BIT(16)
94 #define VID_MODE_VPG_MODE		BIT(20)
95 #define VID_MODE_VPG_HORIZONTAL		BIT(24)
96 
97 #define DSI_VID_PKT_SIZE		0x3c
98 #define VID_PKT_SIZE(p)			((p) & 0x3fff)
99 
100 #define DSI_VID_NUM_CHUNKS		0x40
101 #define VID_NUM_CHUNKS(c)		((c) & 0x1fff)
102 
103 #define DSI_VID_NULL_SIZE		0x44
104 #define VID_NULL_SIZE(b)		((b) & 0x1fff)
105 
106 #define DSI_VID_HSA_TIME		0x48
107 #define DSI_VID_HBP_TIME		0x4c
108 #define DSI_VID_HLINE_TIME		0x50
109 #define DSI_VID_VSA_LINES		0x54
110 #define DSI_VID_VBP_LINES		0x58
111 #define DSI_VID_VFP_LINES		0x5c
112 #define DSI_VID_VACTIVE_LINES		0x60
113 #define DSI_EDPI_CMD_SIZE		0x64
114 
115 #define DSI_CMD_MODE_CFG		0x68
116 #define MAX_RD_PKT_SIZE_LP		BIT(24)
117 #define DCS_LW_TX_LP			BIT(19)
118 #define DCS_SR_0P_TX_LP			BIT(18)
119 #define DCS_SW_1P_TX_LP			BIT(17)
120 #define DCS_SW_0P_TX_LP			BIT(16)
121 #define GEN_LW_TX_LP			BIT(14)
122 #define GEN_SR_2P_TX_LP			BIT(13)
123 #define GEN_SR_1P_TX_LP			BIT(12)
124 #define GEN_SR_0P_TX_LP			BIT(11)
125 #define GEN_SW_2P_TX_LP			BIT(10)
126 #define GEN_SW_1P_TX_LP			BIT(9)
127 #define GEN_SW_0P_TX_LP			BIT(8)
128 #define ACK_RQST_EN			BIT(1)
129 #define TEAR_FX_EN			BIT(0)
130 
131 #define CMD_MODE_ALL_LP			(MAX_RD_PKT_SIZE_LP | \
132 					 DCS_LW_TX_LP | \
133 					 DCS_SR_0P_TX_LP | \
134 					 DCS_SW_1P_TX_LP | \
135 					 DCS_SW_0P_TX_LP | \
136 					 GEN_LW_TX_LP | \
137 					 GEN_SR_2P_TX_LP | \
138 					 GEN_SR_1P_TX_LP | \
139 					 GEN_SR_0P_TX_LP | \
140 					 GEN_SW_2P_TX_LP | \
141 					 GEN_SW_1P_TX_LP | \
142 					 GEN_SW_0P_TX_LP)
143 
144 #define DSI_GEN_HDR			0x6c
145 #define DSI_GEN_PLD_DATA		0x70
146 
147 #define DSI_CMD_PKT_STATUS		0x74
148 #define GEN_RD_CMD_BUSY			BIT(6)
149 #define GEN_PLD_R_FULL			BIT(5)
150 #define GEN_PLD_R_EMPTY			BIT(4)
151 #define GEN_PLD_W_FULL			BIT(3)
152 #define GEN_PLD_W_EMPTY			BIT(2)
153 #define GEN_CMD_FULL			BIT(1)
154 #define GEN_CMD_EMPTY			BIT(0)
155 
156 #define DSI_TO_CNT_CFG			0x78
157 #define HSTX_TO_CNT(p)			(((p) & 0xffff) << 16)
158 #define LPRX_TO_CNT(p)			((p) & 0xffff)
159 
160 #define DSI_HS_RD_TO_CNT		0x7c
161 #define DSI_LP_RD_TO_CNT		0x80
162 #define DSI_HS_WR_TO_CNT		0x84
163 #define DSI_LP_WR_TO_CNT		0x88
164 #define DSI_BTA_TO_CNT			0x8c
165 
166 #define DSI_LPCLK_CTRL			0x94
167 #define AUTO_CLKLANE_CTRL		BIT(1)
168 #define PHY_TXREQUESTCLKHS		BIT(0)
169 
170 #define DSI_PHY_TMR_LPCLK_CFG		0x98
171 #define PHY_CLKHS2LP_TIME(lbcc)		(((lbcc) & 0x3ff) << 16)
172 #define PHY_CLKLP2HS_TIME(lbcc)		((lbcc) & 0x3ff)
173 
174 #define DSI_PHY_TMR_CFG			0x9c
175 #define PHY_HS2LP_TIME(lbcc)		(((lbcc) & 0xff) << 24)
176 #define PHY_LP2HS_TIME(lbcc)		(((lbcc) & 0xff) << 16)
177 #define MAX_RD_TIME(lbcc)		((lbcc) & 0x7fff)
178 #define PHY_HS2LP_TIME_V131(lbcc)	(((lbcc) & 0x3ff) << 16)
179 #define PHY_LP2HS_TIME_V131(lbcc)	((lbcc) & 0x3ff)
180 
181 #define DSI_PHY_RSTZ			0xa0
182 #define PHY_DISFORCEPLL			0
183 #define PHY_ENFORCEPLL			BIT(3)
184 #define PHY_DISABLECLK			0
185 #define PHY_ENABLECLK			BIT(2)
186 #define PHY_RSTZ			0
187 #define PHY_UNRSTZ			BIT(1)
188 #define PHY_SHUTDOWNZ			0
189 #define PHY_UNSHUTDOWNZ			BIT(0)
190 
191 #define DSI_PHY_IF_CFG			0xa4
192 #define PHY_STOP_WAIT_TIME(cycle)	(((cycle) & 0xff) << 8)
193 #define N_LANES(n)			(((n) - 1) & 0x3)
194 
195 #define DSI_PHY_ULPS_CTRL		0xa8
196 #define DSI_PHY_TX_TRIGGERS		0xac
197 
198 #define DSI_PHY_STATUS			0xb0
199 #define PHY_STOP_STATE_CLK_LANE		BIT(2)
200 #define PHY_LOCK			BIT(0)
201 
202 #define DSI_PHY_TST_CTRL0		0xb4
203 #define PHY_TESTCLK			BIT(1)
204 #define PHY_UNTESTCLK			0
205 #define PHY_TESTCLR			BIT(0)
206 #define PHY_UNTESTCLR			0
207 
208 #define DSI_PHY_TST_CTRL1		0xb8
209 #define PHY_TESTEN			BIT(16)
210 #define PHY_UNTESTEN			0
211 #define PHY_TESTDOUT(n)			(((n) & 0xff) << 8)
212 #define PHY_TESTDIN(n)			((n) & 0xff)
213 
214 #define DSI_INT_ST0			0xbc
215 #define DSI_INT_ST1			0xc0
216 #define DSI_INT_MSK0			0xc4
217 #define DSI_INT_MSK1			0xc8
218 
219 #define DSI_PHY_TMR_RD_CFG		0xf4
220 #define MAX_RD_TIME_V131(lbcc)		((lbcc) & 0x7fff)
221 
222 #define PHY_STATUS_TIMEOUT_US		10000
223 #define CMD_PKT_STATUS_TIMEOUT_US	20000
224 
225 #ifdef CONFIG_DEBUG_FS
226 #define VPG_DEFS(name, dsi) \
227 	((void __force *)&((*dsi).vpg_defs.name))
228 
229 #define REGISTER(name, mask, dsi) \
230 	{ #name, VPG_DEFS(name, dsi), mask, dsi }
231 
232 struct debugfs_entries {
233 	const char				*name;
234 	bool					*reg;
235 	u32					mask;
236 	struct dw_mipi_dsi			*dsi;
237 };
238 #endif /* CONFIG_DEBUG_FS */
239 
240 struct dw_mipi_dsi {
241 	struct drm_bridge bridge;
242 	struct mipi_dsi_host dsi_host;
243 	struct drm_bridge *panel_bridge;
244 	struct device *dev;
245 	void __iomem *base;
246 
247 	struct clk *pclk;
248 
249 	unsigned int lane_mbps; /* per lane */
250 	u32 channel;
251 	u32 lanes;
252 	u32 format;
253 	unsigned long mode_flags;
254 
255 #ifdef CONFIG_DEBUG_FS
256 	struct dentry *debugfs;
257 	struct debugfs_entries *debugfs_vpg;
258 	struct {
259 		bool vpg;
260 		bool vpg_horizontal;
261 		bool vpg_ber_pattern;
262 	} vpg_defs;
263 #endif /* CONFIG_DEBUG_FS */
264 
265 	struct dw_mipi_dsi *master; /* dual-dsi master ptr */
266 	struct dw_mipi_dsi *slave; /* dual-dsi slave ptr */
267 
268 	const struct dw_mipi_dsi_plat_data *plat_data;
269 };
270 
271 /*
272  * Check if either a link to a master or slave is present
273  */
274 static inline bool dw_mipi_is_dual_mode(struct dw_mipi_dsi *dsi)
275 {
276 	return dsi->slave || dsi->master;
277 }
278 
279 /*
280  * The controller should generate 2 frames before
281  * preparing the peripheral.
282  */
283 static void dw_mipi_dsi_wait_for_two_frames(const struct drm_display_mode *mode)
284 {
285 	int refresh, two_frames;
286 
287 	refresh = drm_mode_vrefresh(mode);
288 	two_frames = DIV_ROUND_UP(MSEC_PER_SEC, refresh) * 2;
289 	msleep(two_frames);
290 }
291 
292 static inline struct dw_mipi_dsi *host_to_dsi(struct mipi_dsi_host *host)
293 {
294 	return container_of(host, struct dw_mipi_dsi, dsi_host);
295 }
296 
297 static inline struct dw_mipi_dsi *bridge_to_dsi(struct drm_bridge *bridge)
298 {
299 	return container_of(bridge, struct dw_mipi_dsi, bridge);
300 }
301 
302 static inline void dsi_write(struct dw_mipi_dsi *dsi, u32 reg, u32 val)
303 {
304 	writel(val, dsi->base + reg);
305 }
306 
307 static inline u32 dsi_read(struct dw_mipi_dsi *dsi, u32 reg)
308 {
309 	return readl(dsi->base + reg);
310 }
311 
312 static int dw_mipi_dsi_host_attach(struct mipi_dsi_host *host,
313 				   struct mipi_dsi_device *device)
314 {
315 	struct dw_mipi_dsi *dsi = host_to_dsi(host);
316 	const struct dw_mipi_dsi_plat_data *pdata = dsi->plat_data;
317 	struct drm_bridge *bridge;
318 	struct drm_panel *panel;
319 	int ret;
320 
321 	if (device->lanes > dsi->plat_data->max_data_lanes) {
322 		dev_err(dsi->dev, "the number of data lanes(%u) is too many\n",
323 			device->lanes);
324 		return -EINVAL;
325 	}
326 
327 	dsi->lanes = device->lanes;
328 	dsi->channel = device->channel;
329 	dsi->format = device->format;
330 	dsi->mode_flags = device->mode_flags;
331 
332 	ret = drm_of_find_panel_or_bridge(host->dev->of_node, 1, 0,
333 					  &panel, &bridge);
334 	if (ret)
335 		return ret;
336 
337 	if (panel) {
338 		bridge = drm_panel_bridge_add_typed(panel,
339 						    DRM_MODE_CONNECTOR_DSI);
340 		if (IS_ERR(bridge))
341 			return PTR_ERR(bridge);
342 	}
343 
344 	dsi->panel_bridge = bridge;
345 
346 	drm_bridge_add(&dsi->bridge);
347 
348 	if (pdata->host_ops && pdata->host_ops->attach) {
349 		ret = pdata->host_ops->attach(pdata->priv_data, device);
350 		if (ret < 0)
351 			return ret;
352 	}
353 
354 	return 0;
355 }
356 
357 static int dw_mipi_dsi_host_detach(struct mipi_dsi_host *host,
358 				   struct mipi_dsi_device *device)
359 {
360 	struct dw_mipi_dsi *dsi = host_to_dsi(host);
361 	const struct dw_mipi_dsi_plat_data *pdata = dsi->plat_data;
362 	int ret;
363 
364 	if (pdata->host_ops && pdata->host_ops->detach) {
365 		ret = pdata->host_ops->detach(pdata->priv_data, device);
366 		if (ret < 0)
367 			return ret;
368 	}
369 
370 	drm_of_panel_bridge_remove(host->dev->of_node, 1, 0);
371 
372 	drm_bridge_remove(&dsi->bridge);
373 
374 	return 0;
375 }
376 
377 static void dw_mipi_message_config(struct dw_mipi_dsi *dsi,
378 				   const struct mipi_dsi_msg *msg)
379 {
380 	bool lpm = msg->flags & MIPI_DSI_MSG_USE_LPM;
381 	u32 val = 0;
382 
383 	/*
384 	 * TODO dw drv improvements
385 	 * largest packet sizes during hfp or during vsa/vpb/vfp
386 	 * should be computed according to byte lane, lane number and only
387 	 * if sending lp cmds in high speed is enable (PHY_TXREQUESTCLKHS)
388 	 */
389 	dsi_write(dsi, DSI_DPI_LP_CMD_TIM, OUTVACT_LPCMD_TIME(16)
390 		  | INVACT_LPCMD_TIME(4));
391 
392 	if (msg->flags & MIPI_DSI_MSG_REQ_ACK)
393 		val |= ACK_RQST_EN;
394 	if (lpm)
395 		val |= CMD_MODE_ALL_LP;
396 
397 	dsi_write(dsi, DSI_CMD_MODE_CFG, val);
398 
399 	val = dsi_read(dsi, DSI_VID_MODE_CFG);
400 	if (lpm)
401 		val |= ENABLE_LOW_POWER_CMD;
402 	else
403 		val &= ~ENABLE_LOW_POWER_CMD;
404 	dsi_write(dsi, DSI_VID_MODE_CFG, val);
405 }
406 
407 static int dw_mipi_dsi_gen_pkt_hdr_write(struct dw_mipi_dsi *dsi, u32 hdr_val)
408 {
409 	int ret;
410 	u32 val, mask;
411 
412 	ret = readl_poll_timeout(dsi->base + DSI_CMD_PKT_STATUS,
413 				 val, !(val & GEN_CMD_FULL), 1000,
414 				 CMD_PKT_STATUS_TIMEOUT_US);
415 	if (ret) {
416 		dev_err(dsi->dev, "failed to get available command FIFO\n");
417 		return ret;
418 	}
419 
420 	dsi_write(dsi, DSI_GEN_HDR, hdr_val);
421 
422 	mask = GEN_CMD_EMPTY | GEN_PLD_W_EMPTY;
423 	ret = readl_poll_timeout(dsi->base + DSI_CMD_PKT_STATUS,
424 				 val, (val & mask) == mask,
425 				 1000, CMD_PKT_STATUS_TIMEOUT_US);
426 	if (ret) {
427 		dev_err(dsi->dev, "failed to write command FIFO\n");
428 		return ret;
429 	}
430 
431 	return 0;
432 }
433 
434 static int dw_mipi_dsi_write(struct dw_mipi_dsi *dsi,
435 			     const struct mipi_dsi_packet *packet)
436 {
437 	const u8 *tx_buf = packet->payload;
438 	int len = packet->payload_length, pld_data_bytes = sizeof(u32), ret;
439 	__le32 word;
440 	u32 val;
441 
442 	while (len) {
443 		if (len < pld_data_bytes) {
444 			word = 0;
445 			memcpy(&word, tx_buf, len);
446 			dsi_write(dsi, DSI_GEN_PLD_DATA, le32_to_cpu(word));
447 			len = 0;
448 		} else {
449 			memcpy(&word, tx_buf, pld_data_bytes);
450 			dsi_write(dsi, DSI_GEN_PLD_DATA, le32_to_cpu(word));
451 			tx_buf += pld_data_bytes;
452 			len -= pld_data_bytes;
453 		}
454 
455 		ret = readl_poll_timeout(dsi->base + DSI_CMD_PKT_STATUS,
456 					 val, !(val & GEN_PLD_W_FULL), 1000,
457 					 CMD_PKT_STATUS_TIMEOUT_US);
458 		if (ret) {
459 			dev_err(dsi->dev,
460 				"failed to get available write payload FIFO\n");
461 			return ret;
462 		}
463 	}
464 
465 	word = 0;
466 	memcpy(&word, packet->header, sizeof(packet->header));
467 	return dw_mipi_dsi_gen_pkt_hdr_write(dsi, le32_to_cpu(word));
468 }
469 
470 static int dw_mipi_dsi_read(struct dw_mipi_dsi *dsi,
471 			    const struct mipi_dsi_msg *msg)
472 {
473 	int i, j, ret, len = msg->rx_len;
474 	u8 *buf = msg->rx_buf;
475 	u32 val;
476 
477 	/* Wait end of the read operation */
478 	ret = readl_poll_timeout(dsi->base + DSI_CMD_PKT_STATUS,
479 				 val, !(val & GEN_RD_CMD_BUSY),
480 				 1000, CMD_PKT_STATUS_TIMEOUT_US);
481 	if (ret) {
482 		dev_err(dsi->dev, "Timeout during read operation\n");
483 		return ret;
484 	}
485 
486 	for (i = 0; i < len; i += 4) {
487 		/* Read fifo must not be empty before all bytes are read */
488 		ret = readl_poll_timeout(dsi->base + DSI_CMD_PKT_STATUS,
489 					 val, !(val & GEN_PLD_R_EMPTY),
490 					 1000, CMD_PKT_STATUS_TIMEOUT_US);
491 		if (ret) {
492 			dev_err(dsi->dev, "Read payload FIFO is empty\n");
493 			return ret;
494 		}
495 
496 		val = dsi_read(dsi, DSI_GEN_PLD_DATA);
497 		for (j = 0; j < 4 && j + i < len; j++)
498 			buf[i + j] = val >> (8 * j);
499 	}
500 
501 	return ret;
502 }
503 
504 static ssize_t dw_mipi_dsi_host_transfer(struct mipi_dsi_host *host,
505 					 const struct mipi_dsi_msg *msg)
506 {
507 	struct dw_mipi_dsi *dsi = host_to_dsi(host);
508 	struct mipi_dsi_packet packet;
509 	int ret, nb_bytes;
510 
511 	ret = mipi_dsi_create_packet(&packet, msg);
512 	if (ret) {
513 		dev_err(dsi->dev, "failed to create packet: %d\n", ret);
514 		return ret;
515 	}
516 
517 	dw_mipi_message_config(dsi, msg);
518 	if (dsi->slave)
519 		dw_mipi_message_config(dsi->slave, msg);
520 
521 	ret = dw_mipi_dsi_write(dsi, &packet);
522 	if (ret)
523 		return ret;
524 	if (dsi->slave) {
525 		ret = dw_mipi_dsi_write(dsi->slave, &packet);
526 		if (ret)
527 			return ret;
528 	}
529 
530 	if (msg->rx_buf && msg->rx_len) {
531 		ret = dw_mipi_dsi_read(dsi, msg);
532 		if (ret)
533 			return ret;
534 		nb_bytes = msg->rx_len;
535 	} else {
536 		nb_bytes = packet.size;
537 	}
538 
539 	return nb_bytes;
540 }
541 
542 static const struct mipi_dsi_host_ops dw_mipi_dsi_host_ops = {
543 	.attach = dw_mipi_dsi_host_attach,
544 	.detach = dw_mipi_dsi_host_detach,
545 	.transfer = dw_mipi_dsi_host_transfer,
546 };
547 
548 static void dw_mipi_dsi_video_mode_config(struct dw_mipi_dsi *dsi)
549 {
550 	u32 val;
551 
552 	/*
553 	 * TODO dw drv improvements
554 	 * enabling low power is panel-dependent, we should use the
555 	 * panel configuration here...
556 	 */
557 	val = ENABLE_LOW_POWER;
558 
559 	if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO_BURST)
560 		val |= VID_MODE_TYPE_BURST;
561 	else if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO_SYNC_PULSE)
562 		val |= VID_MODE_TYPE_NON_BURST_SYNC_PULSES;
563 	else
564 		val |= VID_MODE_TYPE_NON_BURST_SYNC_EVENTS;
565 
566 #ifdef CONFIG_DEBUG_FS
567 	if (dsi->vpg_defs.vpg) {
568 		val |= VID_MODE_VPG_ENABLE;
569 		val |= dsi->vpg_defs.vpg_horizontal ?
570 		       VID_MODE_VPG_HORIZONTAL : 0;
571 		val |= dsi->vpg_defs.vpg_ber_pattern ? VID_MODE_VPG_MODE : 0;
572 	}
573 #endif /* CONFIG_DEBUG_FS */
574 
575 	dsi_write(dsi, DSI_VID_MODE_CFG, val);
576 }
577 
578 static void dw_mipi_dsi_set_mode(struct dw_mipi_dsi *dsi,
579 				 unsigned long mode_flags)
580 {
581 	u32 val;
582 
583 	dsi_write(dsi, DSI_PWR_UP, RESET);
584 
585 	if (mode_flags & MIPI_DSI_MODE_VIDEO) {
586 		dsi_write(dsi, DSI_MODE_CFG, ENABLE_VIDEO_MODE);
587 		dw_mipi_dsi_video_mode_config(dsi);
588 	} else {
589 		dsi_write(dsi, DSI_MODE_CFG, ENABLE_CMD_MODE);
590 	}
591 
592 	val = PHY_TXREQUESTCLKHS;
593 	if (dsi->mode_flags & MIPI_DSI_CLOCK_NON_CONTINUOUS)
594 		val |= AUTO_CLKLANE_CTRL;
595 	dsi_write(dsi, DSI_LPCLK_CTRL, val);
596 
597 	dsi_write(dsi, DSI_PWR_UP, POWERUP);
598 }
599 
600 static void dw_mipi_dsi_disable(struct dw_mipi_dsi *dsi)
601 {
602 	dsi_write(dsi, DSI_PWR_UP, RESET);
603 	dsi_write(dsi, DSI_PHY_RSTZ, PHY_RSTZ);
604 }
605 
606 static void dw_mipi_dsi_init(struct dw_mipi_dsi *dsi)
607 {
608 	const struct dw_mipi_dsi_phy_ops *phy_ops = dsi->plat_data->phy_ops;
609 	unsigned int esc_rate; /* in MHz */
610 	u32 esc_clk_division;
611 	int ret;
612 
613 	/*
614 	 * The maximum permitted escape clock is 20MHz and it is derived from
615 	 * lanebyteclk, which is running at "lane_mbps / 8".
616 	 */
617 	if (phy_ops->get_esc_clk_rate) {
618 		ret = phy_ops->get_esc_clk_rate(dsi->plat_data->priv_data,
619 						&esc_rate);
620 		if (ret)
621 			DRM_DEBUG_DRIVER("Phy get_esc_clk_rate() failed\n");
622 	} else
623 		esc_rate = 20; /* Default to 20MHz */
624 
625 	/*
626 	 * We want :
627 	 *     (lane_mbps >> 3) / esc_clk_division < X
628 	 * which is:
629 	 *     (lane_mbps >> 3) / X > esc_clk_division
630 	 */
631 	esc_clk_division = (dsi->lane_mbps >> 3) / esc_rate + 1;
632 
633 	dsi_write(dsi, DSI_PWR_UP, RESET);
634 
635 	/*
636 	 * TODO dw drv improvements
637 	 * timeout clock division should be computed with the
638 	 * high speed transmission counter timeout and byte lane...
639 	 */
640 	dsi_write(dsi, DSI_CLKMGR_CFG, TO_CLK_DIVISION(10) |
641 		  TX_ESC_CLK_DIVISION(esc_clk_division));
642 }
643 
644 static void dw_mipi_dsi_dpi_config(struct dw_mipi_dsi *dsi,
645 				   const struct drm_display_mode *mode)
646 {
647 	u32 val = 0, color = 0;
648 
649 	switch (dsi->format) {
650 	case MIPI_DSI_FMT_RGB888:
651 		color = DPI_COLOR_CODING_24BIT;
652 		break;
653 	case MIPI_DSI_FMT_RGB666:
654 		color = DPI_COLOR_CODING_18BIT_2 | LOOSELY18_EN;
655 		break;
656 	case MIPI_DSI_FMT_RGB666_PACKED:
657 		color = DPI_COLOR_CODING_18BIT_1;
658 		break;
659 	case MIPI_DSI_FMT_RGB565:
660 		color = DPI_COLOR_CODING_16BIT_1;
661 		break;
662 	}
663 
664 	if (mode->flags & DRM_MODE_FLAG_NVSYNC)
665 		val |= VSYNC_ACTIVE_LOW;
666 	if (mode->flags & DRM_MODE_FLAG_NHSYNC)
667 		val |= HSYNC_ACTIVE_LOW;
668 
669 	dsi_write(dsi, DSI_DPI_VCID, DPI_VCID(dsi->channel));
670 	dsi_write(dsi, DSI_DPI_COLOR_CODING, color);
671 	dsi_write(dsi, DSI_DPI_CFG_POL, val);
672 }
673 
674 static void dw_mipi_dsi_packet_handler_config(struct dw_mipi_dsi *dsi)
675 {
676 	dsi_write(dsi, DSI_PCKHDL_CFG, CRC_RX_EN | ECC_RX_EN | BTA_EN);
677 }
678 
679 static void dw_mipi_dsi_video_packet_config(struct dw_mipi_dsi *dsi,
680 					    const struct drm_display_mode *mode)
681 {
682 	/*
683 	 * TODO dw drv improvements
684 	 * only burst mode is supported here. For non-burst video modes,
685 	 * we should compute DSI_VID_PKT_SIZE, DSI_VCCR.NUMC &
686 	 * DSI_VNPCR.NPSIZE... especially because this driver supports
687 	 * non-burst video modes, see dw_mipi_dsi_video_mode_config()...
688 	 */
689 
690 	dsi_write(dsi, DSI_VID_PKT_SIZE,
691 		       dw_mipi_is_dual_mode(dsi) ?
692 				VID_PKT_SIZE(mode->hdisplay / 2) :
693 				VID_PKT_SIZE(mode->hdisplay));
694 }
695 
696 static void dw_mipi_dsi_command_mode_config(struct dw_mipi_dsi *dsi)
697 {
698 	/*
699 	 * TODO dw drv improvements
700 	 * compute high speed transmission counter timeout according
701 	 * to the timeout clock division (TO_CLK_DIVISION) and byte lane...
702 	 */
703 	dsi_write(dsi, DSI_TO_CNT_CFG, HSTX_TO_CNT(1000) | LPRX_TO_CNT(1000));
704 	/*
705 	 * TODO dw drv improvements
706 	 * the Bus-Turn-Around Timeout Counter should be computed
707 	 * according to byte lane...
708 	 */
709 	dsi_write(dsi, DSI_BTA_TO_CNT, 0xd00);
710 	dsi_write(dsi, DSI_MODE_CFG, ENABLE_CMD_MODE);
711 }
712 
713 /* Get lane byte clock cycles. */
714 static u32 dw_mipi_dsi_get_hcomponent_lbcc(struct dw_mipi_dsi *dsi,
715 					   const struct drm_display_mode *mode,
716 					   u32 hcomponent)
717 {
718 	u32 frac, lbcc;
719 
720 	lbcc = hcomponent * dsi->lane_mbps * MSEC_PER_SEC / 8;
721 
722 	frac = lbcc % mode->clock;
723 	lbcc = lbcc / mode->clock;
724 	if (frac)
725 		lbcc++;
726 
727 	return lbcc;
728 }
729 
730 static void dw_mipi_dsi_line_timer_config(struct dw_mipi_dsi *dsi,
731 					  const struct drm_display_mode *mode)
732 {
733 	u32 htotal, hsa, hbp, lbcc;
734 
735 	htotal = mode->htotal;
736 	hsa = mode->hsync_end - mode->hsync_start;
737 	hbp = mode->htotal - mode->hsync_end;
738 
739 	/*
740 	 * TODO dw drv improvements
741 	 * computations below may be improved...
742 	 */
743 	lbcc = dw_mipi_dsi_get_hcomponent_lbcc(dsi, mode, htotal);
744 	dsi_write(dsi, DSI_VID_HLINE_TIME, lbcc);
745 
746 	lbcc = dw_mipi_dsi_get_hcomponent_lbcc(dsi, mode, hsa);
747 	dsi_write(dsi, DSI_VID_HSA_TIME, lbcc);
748 
749 	lbcc = dw_mipi_dsi_get_hcomponent_lbcc(dsi, mode, hbp);
750 	dsi_write(dsi, DSI_VID_HBP_TIME, lbcc);
751 }
752 
753 static void dw_mipi_dsi_vertical_timing_config(struct dw_mipi_dsi *dsi,
754 					const struct drm_display_mode *mode)
755 {
756 	u32 vactive, vsa, vfp, vbp;
757 
758 	vactive = mode->vdisplay;
759 	vsa = mode->vsync_end - mode->vsync_start;
760 	vfp = mode->vsync_start - mode->vdisplay;
761 	vbp = mode->vtotal - mode->vsync_end;
762 
763 	dsi_write(dsi, DSI_VID_VACTIVE_LINES, vactive);
764 	dsi_write(dsi, DSI_VID_VSA_LINES, vsa);
765 	dsi_write(dsi, DSI_VID_VFP_LINES, vfp);
766 	dsi_write(dsi, DSI_VID_VBP_LINES, vbp);
767 }
768 
769 static void dw_mipi_dsi_dphy_timing_config(struct dw_mipi_dsi *dsi)
770 {
771 	const struct dw_mipi_dsi_phy_ops *phy_ops = dsi->plat_data->phy_ops;
772 	struct dw_mipi_dsi_dphy_timing timing;
773 	u32 hw_version;
774 	int ret;
775 
776 	ret = phy_ops->get_timing(dsi->plat_data->priv_data,
777 				  dsi->lane_mbps, &timing);
778 	if (ret)
779 		DRM_DEV_ERROR(dsi->dev, "Retrieving phy timings failed\n");
780 
781 	/*
782 	 * TODO dw drv improvements
783 	 * data & clock lane timers should be computed according to panel
784 	 * blankings and to the automatic clock lane control mode...
785 	 * note: DSI_PHY_TMR_CFG.MAX_RD_TIME should be in line with
786 	 * DSI_CMD_MODE_CFG.MAX_RD_PKT_SIZE_LP (see CMD_MODE_ALL_LP)
787 	 */
788 
789 	hw_version = dsi_read(dsi, DSI_VERSION) & VERSION;
790 
791 	if (hw_version >= HWVER_131) {
792 		dsi_write(dsi, DSI_PHY_TMR_CFG,
793 			  PHY_HS2LP_TIME_V131(timing.data_hs2lp) |
794 			  PHY_LP2HS_TIME_V131(timing.data_lp2hs));
795 		dsi_write(dsi, DSI_PHY_TMR_RD_CFG, MAX_RD_TIME_V131(10000));
796 	} else {
797 		dsi_write(dsi, DSI_PHY_TMR_CFG,
798 			  PHY_HS2LP_TIME(timing.data_hs2lp) |
799 			  PHY_LP2HS_TIME(timing.data_lp2hs) |
800 			  MAX_RD_TIME(10000));
801 	}
802 
803 	dsi_write(dsi, DSI_PHY_TMR_LPCLK_CFG,
804 		  PHY_CLKHS2LP_TIME(timing.clk_hs2lp) |
805 		  PHY_CLKLP2HS_TIME(timing.clk_lp2hs));
806 }
807 
808 static void dw_mipi_dsi_dphy_interface_config(struct dw_mipi_dsi *dsi)
809 {
810 	/*
811 	 * TODO dw drv improvements
812 	 * stop wait time should be the maximum between host dsi
813 	 * and panel stop wait times
814 	 */
815 	dsi_write(dsi, DSI_PHY_IF_CFG, PHY_STOP_WAIT_TIME(0x20) |
816 		  N_LANES(dsi->lanes));
817 }
818 
819 static void dw_mipi_dsi_dphy_init(struct dw_mipi_dsi *dsi)
820 {
821 	/* Clear PHY state */
822 	dsi_write(dsi, DSI_PHY_RSTZ, PHY_DISFORCEPLL | PHY_DISABLECLK
823 		  | PHY_RSTZ | PHY_SHUTDOWNZ);
824 	dsi_write(dsi, DSI_PHY_TST_CTRL0, PHY_UNTESTCLR);
825 	dsi_write(dsi, DSI_PHY_TST_CTRL0, PHY_TESTCLR);
826 	dsi_write(dsi, DSI_PHY_TST_CTRL0, PHY_UNTESTCLR);
827 }
828 
829 static void dw_mipi_dsi_dphy_enable(struct dw_mipi_dsi *dsi)
830 {
831 	u32 val;
832 	int ret;
833 
834 	dsi_write(dsi, DSI_PHY_RSTZ, PHY_ENFORCEPLL | PHY_ENABLECLK |
835 		  PHY_UNRSTZ | PHY_UNSHUTDOWNZ);
836 
837 	ret = readl_poll_timeout(dsi->base + DSI_PHY_STATUS, val,
838 				 val & PHY_LOCK, 1000, PHY_STATUS_TIMEOUT_US);
839 	if (ret)
840 		DRM_DEBUG_DRIVER("failed to wait phy lock state\n");
841 
842 	ret = readl_poll_timeout(dsi->base + DSI_PHY_STATUS,
843 				 val, val & PHY_STOP_STATE_CLK_LANE, 1000,
844 				 PHY_STATUS_TIMEOUT_US);
845 	if (ret)
846 		DRM_DEBUG_DRIVER("failed to wait phy clk lane stop state\n");
847 }
848 
849 static void dw_mipi_dsi_clear_err(struct dw_mipi_dsi *dsi)
850 {
851 	dsi_read(dsi, DSI_INT_ST0);
852 	dsi_read(dsi, DSI_INT_ST1);
853 	dsi_write(dsi, DSI_INT_MSK0, 0);
854 	dsi_write(dsi, DSI_INT_MSK1, 0);
855 }
856 
857 static void dw_mipi_dsi_bridge_post_disable(struct drm_bridge *bridge)
858 {
859 	struct dw_mipi_dsi *dsi = bridge_to_dsi(bridge);
860 	const struct dw_mipi_dsi_phy_ops *phy_ops = dsi->plat_data->phy_ops;
861 
862 	/*
863 	 * Switch to command mode before panel-bridge post_disable &
864 	 * panel unprepare.
865 	 * Note: panel-bridge disable & panel disable has been called
866 	 * before by the drm framework.
867 	 */
868 	dw_mipi_dsi_set_mode(dsi, 0);
869 
870 	/*
871 	 * TODO Only way found to call panel-bridge post_disable &
872 	 * panel unprepare before the dsi "final" disable...
873 	 * This needs to be fixed in the drm_bridge framework and the API
874 	 * needs to be updated to manage our own call chains...
875 	 */
876 	if (dsi->panel_bridge->funcs->post_disable)
877 		dsi->panel_bridge->funcs->post_disable(dsi->panel_bridge);
878 
879 	if (phy_ops->power_off)
880 		phy_ops->power_off(dsi->plat_data->priv_data);
881 
882 	if (dsi->slave) {
883 		dw_mipi_dsi_disable(dsi->slave);
884 		clk_disable_unprepare(dsi->slave->pclk);
885 		pm_runtime_put(dsi->slave->dev);
886 	}
887 	dw_mipi_dsi_disable(dsi);
888 
889 	clk_disable_unprepare(dsi->pclk);
890 	pm_runtime_put(dsi->dev);
891 }
892 
893 static unsigned int dw_mipi_dsi_get_lanes(struct dw_mipi_dsi *dsi)
894 {
895 	/* this instance is the slave, so add the master's lanes */
896 	if (dsi->master)
897 		return dsi->master->lanes + dsi->lanes;
898 
899 	/* this instance is the master, so add the slave's lanes */
900 	if (dsi->slave)
901 		return dsi->lanes + dsi->slave->lanes;
902 
903 	/* single-dsi, so no other instance to consider */
904 	return dsi->lanes;
905 }
906 
907 static void dw_mipi_dsi_mode_set(struct dw_mipi_dsi *dsi,
908 				 const struct drm_display_mode *adjusted_mode)
909 {
910 	const struct dw_mipi_dsi_phy_ops *phy_ops = dsi->plat_data->phy_ops;
911 	void *priv_data = dsi->plat_data->priv_data;
912 	int ret;
913 	u32 lanes = dw_mipi_dsi_get_lanes(dsi);
914 
915 	clk_prepare_enable(dsi->pclk);
916 
917 	ret = phy_ops->get_lane_mbps(priv_data, adjusted_mode, dsi->mode_flags,
918 				     lanes, dsi->format, &dsi->lane_mbps);
919 	if (ret)
920 		DRM_DEBUG_DRIVER("Phy get_lane_mbps() failed\n");
921 
922 	pm_runtime_get_sync(dsi->dev);
923 	dw_mipi_dsi_init(dsi);
924 	dw_mipi_dsi_dpi_config(dsi, adjusted_mode);
925 	dw_mipi_dsi_packet_handler_config(dsi);
926 	dw_mipi_dsi_video_mode_config(dsi);
927 	dw_mipi_dsi_video_packet_config(dsi, adjusted_mode);
928 	dw_mipi_dsi_command_mode_config(dsi);
929 	dw_mipi_dsi_line_timer_config(dsi, adjusted_mode);
930 	dw_mipi_dsi_vertical_timing_config(dsi, adjusted_mode);
931 
932 	dw_mipi_dsi_dphy_init(dsi);
933 	dw_mipi_dsi_dphy_timing_config(dsi);
934 	dw_mipi_dsi_dphy_interface_config(dsi);
935 
936 	dw_mipi_dsi_clear_err(dsi);
937 
938 	ret = phy_ops->init(priv_data);
939 	if (ret)
940 		DRM_DEBUG_DRIVER("Phy init() failed\n");
941 
942 	dw_mipi_dsi_dphy_enable(dsi);
943 
944 	dw_mipi_dsi_wait_for_two_frames(adjusted_mode);
945 
946 	/* Switch to cmd mode for panel-bridge pre_enable & panel prepare */
947 	dw_mipi_dsi_set_mode(dsi, 0);
948 
949 	if (phy_ops->power_on)
950 		phy_ops->power_on(dsi->plat_data->priv_data);
951 }
952 
953 static void dw_mipi_dsi_bridge_mode_set(struct drm_bridge *bridge,
954 					const struct drm_display_mode *mode,
955 					const struct drm_display_mode *adjusted_mode)
956 {
957 	struct dw_mipi_dsi *dsi = bridge_to_dsi(bridge);
958 
959 	dw_mipi_dsi_mode_set(dsi, adjusted_mode);
960 	if (dsi->slave)
961 		dw_mipi_dsi_mode_set(dsi->slave, adjusted_mode);
962 }
963 
964 static void dw_mipi_dsi_bridge_enable(struct drm_bridge *bridge)
965 {
966 	struct dw_mipi_dsi *dsi = bridge_to_dsi(bridge);
967 
968 	/* Switch to video mode for panel-bridge enable & panel enable */
969 	dw_mipi_dsi_set_mode(dsi, MIPI_DSI_MODE_VIDEO);
970 	if (dsi->slave)
971 		dw_mipi_dsi_set_mode(dsi->slave, MIPI_DSI_MODE_VIDEO);
972 }
973 
974 static enum drm_mode_status
975 dw_mipi_dsi_bridge_mode_valid(struct drm_bridge *bridge,
976 			      const struct drm_display_info *info,
977 			      const struct drm_display_mode *mode)
978 {
979 	struct dw_mipi_dsi *dsi = bridge_to_dsi(bridge);
980 	const struct dw_mipi_dsi_plat_data *pdata = dsi->plat_data;
981 	enum drm_mode_status mode_status = MODE_OK;
982 
983 	if (pdata->mode_valid)
984 		mode_status = pdata->mode_valid(pdata->priv_data, mode);
985 
986 	return mode_status;
987 }
988 
989 static int dw_mipi_dsi_bridge_attach(struct drm_bridge *bridge,
990 				     enum drm_bridge_attach_flags flags)
991 {
992 	struct dw_mipi_dsi *dsi = bridge_to_dsi(bridge);
993 
994 	if (!bridge->encoder) {
995 		DRM_ERROR("Parent encoder object not found\n");
996 		return -ENODEV;
997 	}
998 
999 	/* Set the encoder type as caller does not know it */
1000 	bridge->encoder->encoder_type = DRM_MODE_ENCODER_DSI;
1001 
1002 	/* Attach the panel-bridge to the dsi bridge */
1003 	return drm_bridge_attach(bridge->encoder, dsi->panel_bridge, bridge,
1004 				 flags);
1005 }
1006 
1007 static const struct drm_bridge_funcs dw_mipi_dsi_bridge_funcs = {
1008 	.mode_set     = dw_mipi_dsi_bridge_mode_set,
1009 	.enable	      = dw_mipi_dsi_bridge_enable,
1010 	.post_disable = dw_mipi_dsi_bridge_post_disable,
1011 	.mode_valid   = dw_mipi_dsi_bridge_mode_valid,
1012 	.attach	      = dw_mipi_dsi_bridge_attach,
1013 };
1014 
1015 #ifdef CONFIG_DEBUG_FS
1016 
1017 static int dw_mipi_dsi_debugfs_write(void *data, u64 val)
1018 {
1019 	struct debugfs_entries *vpg = data;
1020 	struct dw_mipi_dsi *dsi;
1021 	u32 mode_cfg;
1022 
1023 	if (!vpg)
1024 		return -ENODEV;
1025 
1026 	dsi = vpg->dsi;
1027 
1028 	*vpg->reg = (bool)val;
1029 
1030 	mode_cfg = dsi_read(dsi, DSI_VID_MODE_CFG);
1031 
1032 	if (*vpg->reg)
1033 		mode_cfg |= vpg->mask;
1034 	else
1035 		mode_cfg &= ~vpg->mask;
1036 
1037 	dsi_write(dsi, DSI_VID_MODE_CFG, mode_cfg);
1038 
1039 	return 0;
1040 }
1041 
1042 static int dw_mipi_dsi_debugfs_show(void *data, u64 *val)
1043 {
1044 	struct debugfs_entries *vpg = data;
1045 
1046 	if (!vpg)
1047 		return -ENODEV;
1048 
1049 	*val = *vpg->reg;
1050 
1051 	return 0;
1052 }
1053 
1054 DEFINE_DEBUGFS_ATTRIBUTE(fops_x32, dw_mipi_dsi_debugfs_show,
1055 			 dw_mipi_dsi_debugfs_write, "%llu\n");
1056 
1057 static void debugfs_create_files(void *data)
1058 {
1059 	struct dw_mipi_dsi *dsi = data;
1060 	struct debugfs_entries debugfs[] = {
1061 		REGISTER(vpg, VID_MODE_VPG_ENABLE, dsi),
1062 		REGISTER(vpg_horizontal, VID_MODE_VPG_HORIZONTAL, dsi),
1063 		REGISTER(vpg_ber_pattern, VID_MODE_VPG_MODE, dsi),
1064 	};
1065 	int i;
1066 
1067 	dsi->debugfs_vpg = kmemdup(debugfs, sizeof(debugfs), GFP_KERNEL);
1068 	if (!dsi->debugfs_vpg)
1069 		return;
1070 
1071 	for (i = 0; i < ARRAY_SIZE(debugfs); i++)
1072 		debugfs_create_file(dsi->debugfs_vpg[i].name, 0644,
1073 				    dsi->debugfs, &dsi->debugfs_vpg[i],
1074 				    &fops_x32);
1075 }
1076 
1077 static void dw_mipi_dsi_debugfs_init(struct dw_mipi_dsi *dsi)
1078 {
1079 	dsi->debugfs = debugfs_create_dir(dev_name(dsi->dev), NULL);
1080 	if (IS_ERR(dsi->debugfs)) {
1081 		dev_err(dsi->dev, "failed to create debugfs root\n");
1082 		return;
1083 	}
1084 
1085 	debugfs_create_files(dsi);
1086 }
1087 
1088 static void dw_mipi_dsi_debugfs_remove(struct dw_mipi_dsi *dsi)
1089 {
1090 	debugfs_remove_recursive(dsi->debugfs);
1091 	kfree(dsi->debugfs_vpg);
1092 }
1093 
1094 #else
1095 
1096 static void dw_mipi_dsi_debugfs_init(struct dw_mipi_dsi *dsi) { }
1097 static void dw_mipi_dsi_debugfs_remove(struct dw_mipi_dsi *dsi) { }
1098 
1099 #endif /* CONFIG_DEBUG_FS */
1100 
1101 static struct dw_mipi_dsi *
1102 __dw_mipi_dsi_probe(struct platform_device *pdev,
1103 		    const struct dw_mipi_dsi_plat_data *plat_data)
1104 {
1105 	struct device *dev = &pdev->dev;
1106 	struct reset_control *apb_rst;
1107 	struct dw_mipi_dsi *dsi;
1108 	int ret;
1109 
1110 	dsi = devm_kzalloc(dev, sizeof(*dsi), GFP_KERNEL);
1111 	if (!dsi)
1112 		return ERR_PTR(-ENOMEM);
1113 
1114 	dsi->dev = dev;
1115 	dsi->plat_data = plat_data;
1116 
1117 	if (!plat_data->phy_ops->init || !plat_data->phy_ops->get_lane_mbps ||
1118 	    !plat_data->phy_ops->get_timing) {
1119 		DRM_ERROR("Phy not properly configured\n");
1120 		return ERR_PTR(-ENODEV);
1121 	}
1122 
1123 	if (!plat_data->base) {
1124 		dsi->base = devm_platform_ioremap_resource(pdev, 0);
1125 		if (IS_ERR(dsi->base))
1126 			return ERR_PTR(-ENODEV);
1127 
1128 	} else {
1129 		dsi->base = plat_data->base;
1130 	}
1131 
1132 	dsi->pclk = devm_clk_get(dev, "pclk");
1133 	if (IS_ERR(dsi->pclk)) {
1134 		ret = PTR_ERR(dsi->pclk);
1135 		dev_err(dev, "Unable to get pclk: %d\n", ret);
1136 		return ERR_PTR(ret);
1137 	}
1138 
1139 	/*
1140 	 * Note that the reset was not defined in the initial device tree, so
1141 	 * we have to be prepared for it not being found.
1142 	 */
1143 	apb_rst = devm_reset_control_get_optional_exclusive(dev, "apb");
1144 	if (IS_ERR(apb_rst)) {
1145 		ret = PTR_ERR(apb_rst);
1146 
1147 		if (ret != -EPROBE_DEFER)
1148 			dev_err(dev, "Unable to get reset control: %d\n", ret);
1149 
1150 		return ERR_PTR(ret);
1151 	}
1152 
1153 	if (apb_rst) {
1154 		ret = clk_prepare_enable(dsi->pclk);
1155 		if (ret) {
1156 			dev_err(dev, "%s: Failed to enable pclk\n", __func__);
1157 			return ERR_PTR(ret);
1158 		}
1159 
1160 		reset_control_assert(apb_rst);
1161 		usleep_range(10, 20);
1162 		reset_control_deassert(apb_rst);
1163 
1164 		clk_disable_unprepare(dsi->pclk);
1165 	}
1166 
1167 	dw_mipi_dsi_debugfs_init(dsi);
1168 	pm_runtime_enable(dev);
1169 
1170 	dsi->dsi_host.ops = &dw_mipi_dsi_host_ops;
1171 	dsi->dsi_host.dev = dev;
1172 	ret = mipi_dsi_host_register(&dsi->dsi_host);
1173 	if (ret) {
1174 		dev_err(dev, "Failed to register MIPI host: %d\n", ret);
1175 		dw_mipi_dsi_debugfs_remove(dsi);
1176 		return ERR_PTR(ret);
1177 	}
1178 
1179 	dsi->bridge.driver_private = dsi;
1180 	dsi->bridge.funcs = &dw_mipi_dsi_bridge_funcs;
1181 #ifdef CONFIG_OF
1182 	dsi->bridge.of_node = pdev->dev.of_node;
1183 #endif
1184 
1185 	return dsi;
1186 }
1187 
1188 static void __dw_mipi_dsi_remove(struct dw_mipi_dsi *dsi)
1189 {
1190 	mipi_dsi_host_unregister(&dsi->dsi_host);
1191 
1192 	pm_runtime_disable(dsi->dev);
1193 	dw_mipi_dsi_debugfs_remove(dsi);
1194 }
1195 
1196 void dw_mipi_dsi_set_slave(struct dw_mipi_dsi *dsi, struct dw_mipi_dsi *slave)
1197 {
1198 	/* introduce controllers to each other */
1199 	dsi->slave = slave;
1200 	dsi->slave->master = dsi;
1201 
1202 	/* migrate settings for already attached displays */
1203 	dsi->slave->lanes = dsi->lanes;
1204 	dsi->slave->channel = dsi->channel;
1205 	dsi->slave->format = dsi->format;
1206 	dsi->slave->mode_flags = dsi->mode_flags;
1207 }
1208 EXPORT_SYMBOL_GPL(dw_mipi_dsi_set_slave);
1209 
1210 /*
1211  * Probe/remove API, used from platforms based on the DRM bridge API.
1212  */
1213 struct dw_mipi_dsi *
1214 dw_mipi_dsi_probe(struct platform_device *pdev,
1215 		  const struct dw_mipi_dsi_plat_data *plat_data)
1216 {
1217 	return __dw_mipi_dsi_probe(pdev, plat_data);
1218 }
1219 EXPORT_SYMBOL_GPL(dw_mipi_dsi_probe);
1220 
1221 void dw_mipi_dsi_remove(struct dw_mipi_dsi *dsi)
1222 {
1223 	__dw_mipi_dsi_remove(dsi);
1224 }
1225 EXPORT_SYMBOL_GPL(dw_mipi_dsi_remove);
1226 
1227 /*
1228  * Bind/unbind API, used from platforms based on the component framework.
1229  */
1230 int dw_mipi_dsi_bind(struct dw_mipi_dsi *dsi, struct drm_encoder *encoder)
1231 {
1232 	int ret;
1233 
1234 	ret = drm_bridge_attach(encoder, &dsi->bridge, NULL, 0);
1235 	if (ret) {
1236 		DRM_ERROR("Failed to initialize bridge with drm\n");
1237 		return ret;
1238 	}
1239 
1240 	return ret;
1241 }
1242 EXPORT_SYMBOL_GPL(dw_mipi_dsi_bind);
1243 
1244 void dw_mipi_dsi_unbind(struct dw_mipi_dsi *dsi)
1245 {
1246 }
1247 EXPORT_SYMBOL_GPL(dw_mipi_dsi_unbind);
1248 
1249 MODULE_AUTHOR("Chris Zhong <zyw@rock-chips.com>");
1250 MODULE_AUTHOR("Philippe Cornu <philippe.cornu@st.com>");
1251 MODULE_DESCRIPTION("DW MIPI DSI host controller driver");
1252 MODULE_LICENSE("GPL");
1253 MODULE_ALIAS("platform:dw-mipi-dsi");
1254