xref: /openbmc/linux/drivers/gpu/drm/sprd/sprd_dsi.c (revision b7b3c35e)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (C) 2020 Unisoc Inc.
4  */
5 
6 #include <linux/component.h>
7 #include <linux/module.h>
8 #include <linux/of_address.h>
9 #include <linux/of_device.h>
10 #include <linux/of_irq.h>
11 #include <linux/of_graph.h>
12 #include <video/mipi_display.h>
13 
14 #include <drm/drm_atomic_helper.h>
15 #include <drm/drm_bridge.h>
16 #include <drm/drm_crtc_helper.h>
17 #include <drm/drm_of.h>
18 #include <drm/drm_probe_helper.h>
19 
20 #include "sprd_drm.h"
21 #include "sprd_dpu.h"
22 #include "sprd_dsi.h"
23 
24 #define SOFT_RESET 0x04
25 #define MASK_PROTOCOL_INT 0x0C
26 #define MASK_INTERNAL_INT 0x14
27 #define DSI_MODE_CFG 0x18
28 
29 #define VIRTUAL_CHANNEL_ID 0x1C
30 #define GEN_RX_VCID GENMASK(1, 0)
31 #define VIDEO_PKT_VCID GENMASK(3, 2)
32 
33 #define DPI_VIDEO_FORMAT 0x20
34 #define DPI_VIDEO_MODE_FORMAT GENMASK(5, 0)
35 #define LOOSELY18_EN BIT(6)
36 
37 #define VIDEO_PKT_CONFIG 0x24
38 #define VIDEO_PKT_SIZE GENMASK(15, 0)
39 #define VIDEO_LINE_CHUNK_NUM GENMASK(31, 16)
40 
41 #define VIDEO_LINE_HBLK_TIME 0x28
42 #define VIDEO_LINE_HBP_TIME GENMASK(15, 0)
43 #define VIDEO_LINE_HSA_TIME GENMASK(31, 16)
44 
45 #define VIDEO_LINE_TIME 0x2C
46 
47 #define VIDEO_VBLK_LINES 0x30
48 #define VFP_LINES GENMASK(9, 0)
49 #define VBP_LINES GENMASK(19, 10)
50 #define VSA_LINES GENMASK(29, 20)
51 
52 #define VIDEO_VACTIVE_LINES 0x34
53 
54 #define VID_MODE_CFG 0x38
55 #define VID_MODE_TYPE GENMASK(1, 0)
56 #define LP_VSA_EN BIT(8)
57 #define LP_VBP_EN BIT(9)
58 #define LP_VFP_EN BIT(10)
59 #define LP_VACT_EN BIT(11)
60 #define LP_HBP_EN BIT(12)
61 #define LP_HFP_EN BIT(13)
62 #define FRAME_BTA_ACK_EN BIT(14)
63 
64 #define TIMEOUT_CNT_CLK_CONFIG 0x40
65 #define HTX_TO_CONFIG 0x44
66 #define LRX_H_TO_CONFIG 0x48
67 
68 #define TX_ESC_CLK_CONFIG 0x5C
69 
70 #define CMD_MODE_CFG 0x68
71 #define TEAR_FX_EN BIT(0)
72 
73 #define GEN_HDR 0x6C
74 #define GEN_DT GENMASK(5, 0)
75 #define GEN_VC GENMASK(7, 6)
76 
77 #define GEN_PLD_DATA 0x70
78 
79 #define PHY_CLK_LANE_LP_CTRL 0x74
80 #define PHY_CLKLANE_TX_REQ_HS BIT(0)
81 #define AUTO_CLKLANE_CTRL_EN BIT(1)
82 
83 #define PHY_INTERFACE_CTRL 0x78
84 #define RF_PHY_SHUTDOWN BIT(0)
85 #define RF_PHY_RESET_N BIT(1)
86 #define RF_PHY_CLK_EN BIT(2)
87 
88 #define CMD_MODE_STATUS 0x98
89 #define GEN_CMD_RDATA_FIFO_EMPTY BIT(1)
90 #define GEN_CMD_WDATA_FIFO_EMPTY BIT(3)
91 #define GEN_CMD_CMD_FIFO_EMPTY BIT(5)
92 #define GEN_CMD_RDCMD_DONE BIT(7)
93 
94 #define PHY_STATUS 0x9C
95 #define PHY_LOCK BIT(1)
96 
97 #define PHY_MIN_STOP_TIME 0xA0
98 #define PHY_LANE_NUM_CONFIG 0xA4
99 
100 #define PHY_CLKLANE_TIME_CONFIG 0xA8
101 #define PHY_CLKLANE_LP_TO_HS_TIME GENMASK(15, 0)
102 #define PHY_CLKLANE_HS_TO_LP_TIME GENMASK(31, 16)
103 
104 #define PHY_DATALANE_TIME_CONFIG 0xAC
105 #define PHY_DATALANE_LP_TO_HS_TIME GENMASK(15, 0)
106 #define PHY_DATALANE_HS_TO_LP_TIME GENMASK(31, 16)
107 
108 #define MAX_READ_TIME 0xB0
109 
110 #define RX_PKT_CHECK_CONFIG 0xB4
111 #define RX_PKT_ECC_EN BIT(0)
112 #define RX_PKT_CRC_EN BIT(1)
113 
114 #define TA_EN 0xB8
115 
116 #define EOTP_EN 0xBC
117 #define TX_EOTP_EN BIT(0)
118 #define RX_EOTP_EN BIT(1)
119 
120 #define VIDEO_NULLPKT_SIZE 0xC0
121 #define DCS_WM_PKT_SIZE 0xC4
122 
123 #define VIDEO_SIG_DELAY_CONFIG 0xD0
124 #define VIDEO_SIG_DELAY GENMASK(23, 0)
125 
126 #define PHY_TST_CTRL0 0xF0
127 #define PHY_TESTCLR BIT(0)
128 #define PHY_TESTCLK BIT(1)
129 
130 #define PHY_TST_CTRL1 0xF4
131 #define PHY_TESTDIN GENMASK(7, 0)
132 #define PHY_TESTDOUT GENMASK(15, 8)
133 #define PHY_TESTEN BIT(16)
134 
135 #define host_to_dsi(host) \
136 	container_of(host, struct sprd_dsi, host)
137 
138 static inline u32
139 dsi_reg_rd(struct dsi_context *ctx, u32 offset, u32 mask,
140 	   u32 shift)
141 {
142 	return (readl(ctx->base + offset) & mask) >> shift;
143 }
144 
145 static inline void
146 dsi_reg_wr(struct dsi_context *ctx, u32 offset, u32 mask,
147 	   u32 shift, u32 val)
148 {
149 	u32 ret;
150 
151 	ret = readl(ctx->base + offset);
152 	ret &= ~mask;
153 	ret |= (val << shift) & mask;
154 	writel(ret, ctx->base + offset);
155 }
156 
157 static inline void
158 dsi_reg_up(struct dsi_context *ctx, u32 offset, u32 mask,
159 	   u32 val)
160 {
161 	u32 ret = readl(ctx->base + offset);
162 
163 	writel((ret & ~mask) | (val & mask), ctx->base + offset);
164 }
165 
166 static int regmap_tst_io_write(void *context, u32 reg, u32 val)
167 {
168 	struct sprd_dsi *dsi = context;
169 	struct dsi_context *ctx = &dsi->ctx;
170 
171 	if (val > 0xff || reg > 0xff)
172 		return -EINVAL;
173 
174 	drm_dbg(dsi->drm, "reg = 0x%02x, val = 0x%02x\n", reg, val);
175 
176 	dsi_reg_up(ctx, PHY_TST_CTRL1, PHY_TESTEN, PHY_TESTEN);
177 	dsi_reg_wr(ctx, PHY_TST_CTRL1, PHY_TESTDIN, 0, reg);
178 	dsi_reg_up(ctx, PHY_TST_CTRL0, PHY_TESTCLK, PHY_TESTCLK);
179 	dsi_reg_up(ctx, PHY_TST_CTRL0, PHY_TESTCLK, 0);
180 	dsi_reg_up(ctx, PHY_TST_CTRL1, PHY_TESTEN, 0);
181 	dsi_reg_wr(ctx, PHY_TST_CTRL1, PHY_TESTDIN, 0, val);
182 	dsi_reg_up(ctx, PHY_TST_CTRL0, PHY_TESTCLK, PHY_TESTCLK);
183 	dsi_reg_up(ctx, PHY_TST_CTRL0, PHY_TESTCLK, 0);
184 
185 	return 0;
186 }
187 
188 static int regmap_tst_io_read(void *context, u32 reg, u32 *val)
189 {
190 	struct sprd_dsi *dsi = context;
191 	struct dsi_context *ctx = &dsi->ctx;
192 	int ret;
193 
194 	if (reg > 0xff)
195 		return -EINVAL;
196 
197 	dsi_reg_up(ctx, PHY_TST_CTRL1, PHY_TESTEN, PHY_TESTEN);
198 	dsi_reg_wr(ctx, PHY_TST_CTRL1, PHY_TESTDIN, 0, reg);
199 	dsi_reg_up(ctx, PHY_TST_CTRL0, PHY_TESTCLK, PHY_TESTCLK);
200 	dsi_reg_up(ctx, PHY_TST_CTRL0, PHY_TESTCLK, 0);
201 	dsi_reg_up(ctx, PHY_TST_CTRL1, PHY_TESTEN, 0);
202 
203 	udelay(1);
204 
205 	ret = dsi_reg_rd(ctx, PHY_TST_CTRL1, PHY_TESTDOUT, 8);
206 	if (ret < 0)
207 		return ret;
208 
209 	*val = ret;
210 
211 	drm_dbg(dsi->drm, "reg = 0x%02x, val = 0x%02x\n", reg, *val);
212 	return 0;
213 }
214 
215 static struct regmap_bus regmap_tst_io = {
216 	.reg_write = regmap_tst_io_write,
217 	.reg_read = regmap_tst_io_read,
218 };
219 
220 static const struct regmap_config byte_config = {
221 	.reg_bits = 8,
222 	.val_bits = 8,
223 };
224 
225 static int dphy_wait_pll_locked(struct dsi_context *ctx)
226 {
227 	struct sprd_dsi *dsi = container_of(ctx, struct sprd_dsi, ctx);
228 	int i;
229 
230 	for (i = 0; i < 50000; i++) {
231 		if (dsi_reg_rd(ctx, PHY_STATUS, PHY_LOCK, 1))
232 			return 0;
233 		udelay(3);
234 	}
235 
236 	drm_err(dsi->drm, "dphy pll can not be locked\n");
237 	return -ETIMEDOUT;
238 }
239 
240 static int dsi_wait_tx_payload_fifo_empty(struct dsi_context *ctx)
241 {
242 	int i;
243 
244 	for (i = 0; i < 5000; i++) {
245 		if (dsi_reg_rd(ctx, CMD_MODE_STATUS, GEN_CMD_WDATA_FIFO_EMPTY, 3))
246 			return 0;
247 		udelay(1);
248 	}
249 
250 	return -ETIMEDOUT;
251 }
252 
253 static int dsi_wait_tx_cmd_fifo_empty(struct dsi_context *ctx)
254 {
255 	int i;
256 
257 	for (i = 0; i < 5000; i++) {
258 		if (dsi_reg_rd(ctx, CMD_MODE_STATUS, GEN_CMD_CMD_FIFO_EMPTY, 5))
259 			return 0;
260 		udelay(1);
261 	}
262 
263 	return -ETIMEDOUT;
264 }
265 
266 static int dsi_wait_rd_resp_completed(struct dsi_context *ctx)
267 {
268 	int i;
269 
270 	for (i = 0; i < 10000; i++) {
271 		if (dsi_reg_rd(ctx, CMD_MODE_STATUS, GEN_CMD_RDCMD_DONE, 7))
272 			return 0;
273 		udelay(10);
274 	}
275 
276 	return -ETIMEDOUT;
277 }
278 
279 static u16 calc_bytes_per_pixel_x100(int coding)
280 {
281 	u16 bpp_x100;
282 
283 	switch (coding) {
284 	case COLOR_CODE_16BIT_CONFIG1:
285 	case COLOR_CODE_16BIT_CONFIG2:
286 	case COLOR_CODE_16BIT_CONFIG3:
287 		bpp_x100 = 200;
288 		break;
289 	case COLOR_CODE_18BIT_CONFIG1:
290 	case COLOR_CODE_18BIT_CONFIG2:
291 		bpp_x100 = 225;
292 		break;
293 	case COLOR_CODE_24BIT:
294 		bpp_x100 = 300;
295 		break;
296 	case COLOR_CODE_COMPRESSTION:
297 		bpp_x100 = 100;
298 		break;
299 	case COLOR_CODE_20BIT_YCC422_LOOSELY:
300 		bpp_x100 = 250;
301 		break;
302 	case COLOR_CODE_24BIT_YCC422:
303 		bpp_x100 = 300;
304 		break;
305 	case COLOR_CODE_16BIT_YCC422:
306 		bpp_x100 = 200;
307 		break;
308 	case COLOR_CODE_30BIT:
309 		bpp_x100 = 375;
310 		break;
311 	case COLOR_CODE_36BIT:
312 		bpp_x100 = 450;
313 		break;
314 	case COLOR_CODE_12BIT_YCC420:
315 		bpp_x100 = 150;
316 		break;
317 	default:
318 		DRM_ERROR("invalid color coding");
319 		bpp_x100 = 0;
320 		break;
321 	}
322 
323 	return bpp_x100;
324 }
325 
326 static u8 calc_video_size_step(int coding)
327 {
328 	u8 video_size_step;
329 
330 	switch (coding) {
331 	case COLOR_CODE_16BIT_CONFIG1:
332 	case COLOR_CODE_16BIT_CONFIG2:
333 	case COLOR_CODE_16BIT_CONFIG3:
334 	case COLOR_CODE_18BIT_CONFIG1:
335 	case COLOR_CODE_18BIT_CONFIG2:
336 	case COLOR_CODE_24BIT:
337 	case COLOR_CODE_COMPRESSTION:
338 		return video_size_step = 1;
339 	case COLOR_CODE_20BIT_YCC422_LOOSELY:
340 	case COLOR_CODE_24BIT_YCC422:
341 	case COLOR_CODE_16BIT_YCC422:
342 	case COLOR_CODE_30BIT:
343 	case COLOR_CODE_36BIT:
344 	case COLOR_CODE_12BIT_YCC420:
345 		return video_size_step = 2;
346 	default:
347 		DRM_ERROR("invalid color coding");
348 		return 0;
349 	}
350 }
351 
352 static u16 round_video_size(int coding, u16 video_size)
353 {
354 	switch (coding) {
355 	case COLOR_CODE_16BIT_YCC422:
356 	case COLOR_CODE_24BIT_YCC422:
357 	case COLOR_CODE_20BIT_YCC422_LOOSELY:
358 	case COLOR_CODE_12BIT_YCC420:
359 		/* round up active H pixels to a multiple of 2 */
360 		if ((video_size % 2) != 0)
361 			video_size += 1;
362 		break;
363 	default:
364 		break;
365 	}
366 
367 	return video_size;
368 }
369 
370 #define SPRD_MIPI_DSI_FMT_DSC 0xff
371 static u32 fmt_to_coding(u32 fmt)
372 {
373 	switch (fmt) {
374 	case MIPI_DSI_FMT_RGB565:
375 		return COLOR_CODE_16BIT_CONFIG1;
376 	case MIPI_DSI_FMT_RGB666:
377 	case MIPI_DSI_FMT_RGB666_PACKED:
378 		return COLOR_CODE_18BIT_CONFIG1;
379 	case MIPI_DSI_FMT_RGB888:
380 		return COLOR_CODE_24BIT;
381 	case SPRD_MIPI_DSI_FMT_DSC:
382 		return COLOR_CODE_COMPRESSTION;
383 	default:
384 		DRM_ERROR("Unsupported format (%d)\n", fmt);
385 		return COLOR_CODE_24BIT;
386 	}
387 }
388 
389 #define ns_to_cycle(ns, byte_clk) \
390 	DIV_ROUND_UP((ns) * (byte_clk), 1000000)
391 
392 static void sprd_dsi_init(struct dsi_context *ctx)
393 {
394 	struct sprd_dsi *dsi = container_of(ctx, struct sprd_dsi, ctx);
395 	u32 byte_clk = dsi->slave->hs_rate / 8;
396 	u16 data_hs2lp, data_lp2hs, clk_hs2lp, clk_lp2hs;
397 	u16 max_rd_time;
398 	int div;
399 
400 	writel(0, ctx->base + SOFT_RESET);
401 	writel(0xffffffff, ctx->base + MASK_PROTOCOL_INT);
402 	writel(0xffffffff, ctx->base + MASK_INTERNAL_INT);
403 	writel(1, ctx->base + DSI_MODE_CFG);
404 	dsi_reg_up(ctx, EOTP_EN, RX_EOTP_EN, 0);
405 	dsi_reg_up(ctx, EOTP_EN, TX_EOTP_EN, 0);
406 	dsi_reg_up(ctx, RX_PKT_CHECK_CONFIG, RX_PKT_ECC_EN, RX_PKT_ECC_EN);
407 	dsi_reg_up(ctx, RX_PKT_CHECK_CONFIG, RX_PKT_CRC_EN, RX_PKT_CRC_EN);
408 	writel(1, ctx->base + TA_EN);
409 	dsi_reg_up(ctx, VIRTUAL_CHANNEL_ID, VIDEO_PKT_VCID, 0);
410 	dsi_reg_up(ctx, VIRTUAL_CHANNEL_ID, GEN_RX_VCID, 0);
411 
412 	div = DIV_ROUND_UP(byte_clk, dsi->slave->lp_rate);
413 	writel(div, ctx->base + TX_ESC_CLK_CONFIG);
414 
415 	max_rd_time = ns_to_cycle(ctx->max_rd_time, byte_clk);
416 	writel(max_rd_time, ctx->base + MAX_READ_TIME);
417 
418 	data_hs2lp = ns_to_cycle(ctx->data_hs2lp, byte_clk);
419 	data_lp2hs = ns_to_cycle(ctx->data_lp2hs, byte_clk);
420 	clk_hs2lp = ns_to_cycle(ctx->clk_hs2lp, byte_clk);
421 	clk_lp2hs = ns_to_cycle(ctx->clk_lp2hs, byte_clk);
422 	dsi_reg_wr(ctx, PHY_DATALANE_TIME_CONFIG,
423 		   PHY_DATALANE_HS_TO_LP_TIME, 16, data_hs2lp);
424 	dsi_reg_wr(ctx, PHY_DATALANE_TIME_CONFIG,
425 		   PHY_DATALANE_LP_TO_HS_TIME, 0, data_lp2hs);
426 	dsi_reg_wr(ctx, PHY_CLKLANE_TIME_CONFIG,
427 		   PHY_CLKLANE_HS_TO_LP_TIME, 16, clk_hs2lp);
428 	dsi_reg_wr(ctx, PHY_CLKLANE_TIME_CONFIG,
429 		   PHY_CLKLANE_LP_TO_HS_TIME, 0, clk_lp2hs);
430 
431 	writel(1, ctx->base + SOFT_RESET);
432 }
433 
434 /*
435  * Free up resources and shutdown host controller and PHY
436  */
437 static void sprd_dsi_fini(struct dsi_context *ctx)
438 {
439 	writel(0xffffffff, ctx->base + MASK_PROTOCOL_INT);
440 	writel(0xffffffff, ctx->base + MASK_INTERNAL_INT);
441 	writel(0, ctx->base + SOFT_RESET);
442 }
443 
444 /*
445  * If not in burst mode, it will compute the video and null packet sizes
446  * according to necessity.
447  * Configure timers for data lanes and/or clock lane to return to LP when
448  * bandwidth is not filled by data.
449  */
450 static int sprd_dsi_dpi_video(struct dsi_context *ctx)
451 {
452 	struct sprd_dsi *dsi = container_of(ctx, struct sprd_dsi, ctx);
453 	struct videomode *vm = &ctx->vm;
454 	u32 byte_clk = dsi->slave->hs_rate / 8;
455 	u16 bpp_x100;
456 	u16 video_size;
457 	u32 ratio_x1000;
458 	u16 null_pkt_size = 0;
459 	u8 video_size_step;
460 	u32 hs_to;
461 	u32 total_bytes;
462 	u32 bytes_per_chunk;
463 	u32 chunks = 0;
464 	u32 bytes_left = 0;
465 	u32 chunk_overhead;
466 	const u8 pkt_header = 6;
467 	u8 coding;
468 	int div;
469 	u16 hline;
470 	u16 byte_cycle;
471 
472 	coding = fmt_to_coding(dsi->slave->format);
473 	video_size = round_video_size(coding, vm->hactive);
474 	bpp_x100 = calc_bytes_per_pixel_x100(coding);
475 	video_size_step = calc_video_size_step(coding);
476 	ratio_x1000 = byte_clk * 1000 / (vm->pixelclock / 1000);
477 	hline = vm->hactive + vm->hsync_len + vm->hfront_porch +
478 		vm->hback_porch;
479 
480 	writel(0, ctx->base + SOFT_RESET);
481 	dsi_reg_wr(ctx, VID_MODE_CFG, FRAME_BTA_ACK_EN, 15, ctx->frame_ack_en);
482 	dsi_reg_wr(ctx, DPI_VIDEO_FORMAT, DPI_VIDEO_MODE_FORMAT, 0, coding);
483 	dsi_reg_wr(ctx, VID_MODE_CFG, VID_MODE_TYPE, 0, ctx->burst_mode);
484 	byte_cycle = 95 * hline * ratio_x1000 / 100000;
485 	dsi_reg_wr(ctx, VIDEO_SIG_DELAY_CONFIG, VIDEO_SIG_DELAY, 0, byte_cycle);
486 	byte_cycle = hline * ratio_x1000 / 1000;
487 	writel(byte_cycle, ctx->base + VIDEO_LINE_TIME);
488 	byte_cycle = vm->hsync_len * ratio_x1000 / 1000;
489 	dsi_reg_wr(ctx, VIDEO_LINE_HBLK_TIME, VIDEO_LINE_HSA_TIME, 16, byte_cycle);
490 	byte_cycle = vm->hback_porch * ratio_x1000 / 1000;
491 	dsi_reg_wr(ctx, VIDEO_LINE_HBLK_TIME, VIDEO_LINE_HBP_TIME, 0, byte_cycle);
492 	writel(vm->vactive, ctx->base + VIDEO_VACTIVE_LINES);
493 	dsi_reg_wr(ctx, VIDEO_VBLK_LINES, VFP_LINES, 0, vm->vfront_porch);
494 	dsi_reg_wr(ctx, VIDEO_VBLK_LINES, VBP_LINES, 10, vm->vback_porch);
495 	dsi_reg_wr(ctx, VIDEO_VBLK_LINES, VSA_LINES, 20, vm->vsync_len);
496 	dsi_reg_up(ctx, VID_MODE_CFG, LP_HBP_EN | LP_HFP_EN | LP_VACT_EN |
497 			LP_VFP_EN | LP_VBP_EN | LP_VSA_EN, LP_HBP_EN | LP_HFP_EN |
498 			LP_VACT_EN | LP_VFP_EN | LP_VBP_EN | LP_VSA_EN);
499 
500 	hs_to = (hline * vm->vactive) + (2 * bpp_x100) / 100;
501 	for (div = 0x80; (div < hs_to) && (div > 2); div--) {
502 		if ((hs_to % div) == 0) {
503 			writel(div, ctx->base + TIMEOUT_CNT_CLK_CONFIG);
504 			writel(hs_to / div, ctx->base + LRX_H_TO_CONFIG);
505 			writel(hs_to / div, ctx->base + HTX_TO_CONFIG);
506 			break;
507 		}
508 	}
509 
510 	if (ctx->burst_mode == VIDEO_BURST_WITH_SYNC_PULSES) {
511 		dsi_reg_wr(ctx, VIDEO_PKT_CONFIG, VIDEO_PKT_SIZE, 0, video_size);
512 		writel(0, ctx->base + VIDEO_NULLPKT_SIZE);
513 		dsi_reg_up(ctx, VIDEO_PKT_CONFIG, VIDEO_LINE_CHUNK_NUM, 0);
514 	} else {
515 		/* non burst transmission */
516 		null_pkt_size = 0;
517 
518 		/* bytes to be sent - first as one chunk */
519 		bytes_per_chunk = vm->hactive * bpp_x100 / 100 + pkt_header;
520 
521 		/* hline total bytes from the DPI interface */
522 		total_bytes = (vm->hactive + vm->hfront_porch) *
523 				ratio_x1000 / dsi->slave->lanes / 1000;
524 
525 		/* check if the pixels actually fit on the DSI link */
526 		if (total_bytes < bytes_per_chunk) {
527 			drm_err(dsi->drm, "current resolution can not be set\n");
528 			return -EINVAL;
529 		}
530 
531 		chunk_overhead = total_bytes - bytes_per_chunk;
532 
533 		/* overhead higher than 1 -> enable multi packets */
534 		if (chunk_overhead > 1) {
535 			/* multi packets */
536 			for (video_size = video_size_step;
537 			     video_size < vm->hactive;
538 			     video_size += video_size_step) {
539 				if (vm->hactive * 1000 / video_size % 1000)
540 					continue;
541 
542 				chunks = vm->hactive / video_size;
543 				bytes_per_chunk = bpp_x100 * video_size / 100
544 						  + pkt_header;
545 				if (total_bytes >= (bytes_per_chunk * chunks)) {
546 					bytes_left = total_bytes -
547 						     bytes_per_chunk * chunks;
548 					break;
549 				}
550 			}
551 
552 			/* prevent overflow (unsigned - unsigned) */
553 			if (bytes_left > (pkt_header * chunks)) {
554 				null_pkt_size = (bytes_left -
555 						pkt_header * chunks) / chunks;
556 				/* avoid register overflow */
557 				if (null_pkt_size > 1023)
558 					null_pkt_size = 1023;
559 			}
560 
561 		} else {
562 			/* single packet */
563 			chunks = 1;
564 
565 			/* must be a multiple of 4 except 18 loosely */
566 			for (video_size = vm->hactive;
567 			    (video_size % video_size_step) != 0;
568 			     video_size++)
569 				;
570 		}
571 
572 		dsi_reg_wr(ctx, VIDEO_PKT_CONFIG, VIDEO_PKT_SIZE, 0, video_size);
573 		writel(null_pkt_size, ctx->base + VIDEO_NULLPKT_SIZE);
574 		dsi_reg_wr(ctx, VIDEO_PKT_CONFIG, VIDEO_LINE_CHUNK_NUM, 16, chunks);
575 	}
576 
577 	writel(ctx->int0_mask, ctx->base + MASK_PROTOCOL_INT);
578 	writel(ctx->int1_mask, ctx->base + MASK_INTERNAL_INT);
579 	writel(1, ctx->base + SOFT_RESET);
580 
581 	return 0;
582 }
583 
584 static void sprd_dsi_edpi_video(struct dsi_context *ctx)
585 {
586 	struct sprd_dsi *dsi = container_of(ctx, struct sprd_dsi, ctx);
587 	const u32 fifo_depth = 1096;
588 	const u32 word_length = 4;
589 	u32 hactive = ctx->vm.hactive;
590 	u32 bpp_x100;
591 	u32 max_fifo_len;
592 	u8 coding;
593 
594 	coding = fmt_to_coding(dsi->slave->format);
595 	bpp_x100 = calc_bytes_per_pixel_x100(coding);
596 	max_fifo_len = word_length * fifo_depth * 100 / bpp_x100;
597 
598 	writel(0, ctx->base + SOFT_RESET);
599 	dsi_reg_wr(ctx, DPI_VIDEO_FORMAT, DPI_VIDEO_MODE_FORMAT, 0, coding);
600 	dsi_reg_wr(ctx, CMD_MODE_CFG, TEAR_FX_EN, 0, ctx->te_ack_en);
601 
602 	if (max_fifo_len > hactive)
603 		writel(hactive, ctx->base + DCS_WM_PKT_SIZE);
604 	else
605 		writel(max_fifo_len, ctx->base + DCS_WM_PKT_SIZE);
606 
607 	writel(ctx->int0_mask, ctx->base + MASK_PROTOCOL_INT);
608 	writel(ctx->int1_mask, ctx->base + MASK_INTERNAL_INT);
609 	writel(1, ctx->base + SOFT_RESET);
610 }
611 
612 /*
613  * Send a packet on the generic interface,
614  * this function has an active delay to wait for the buffer to clear.
615  * The delay is limited to:
616  * (param_length / 4) x DSIH_FIFO_ACTIVE_WAIT x register access time
617  * the controller restricts the sending of.
618  *
619  * This function will not be able to send Null and Blanking packets due to
620  * controller restriction
621  */
622 static int sprd_dsi_wr_pkt(struct dsi_context *ctx, u8 vc, u8 type,
623 			   const u8 *param, u16 len)
624 {
625 	struct sprd_dsi *dsi = container_of(ctx, struct sprd_dsi, ctx);
626 	u8 wc_lsbyte, wc_msbyte;
627 	u32 payload;
628 	int i, j, ret;
629 
630 	if (vc > 3)
631 		return -EINVAL;
632 
633 	/* 1st: for long packet, must config payload first */
634 	ret = dsi_wait_tx_payload_fifo_empty(ctx);
635 	if (ret) {
636 		drm_err(dsi->drm, "tx payload fifo is not empty\n");
637 		return ret;
638 	}
639 
640 	if (len > 2) {
641 		for (i = 0, j = 0; i < len; i += j) {
642 			payload = 0;
643 			for (j = 0; (j < 4) && ((j + i) < (len)); j++)
644 				payload |= param[i + j] << (j * 8);
645 
646 			writel(payload, ctx->base + GEN_PLD_DATA);
647 		}
648 		wc_lsbyte = len & 0xff;
649 		wc_msbyte = len >> 8;
650 	} else {
651 		wc_lsbyte = (len > 0) ? param[0] : 0;
652 		wc_msbyte = (len > 1) ? param[1] : 0;
653 	}
654 
655 	/* 2nd: then set packet header */
656 	ret = dsi_wait_tx_cmd_fifo_empty(ctx);
657 	if (ret) {
658 		drm_err(dsi->drm, "tx cmd fifo is not empty\n");
659 		return ret;
660 	}
661 
662 	writel(type | (vc << 6) | (wc_lsbyte << 8) | (wc_msbyte << 16),
663 	       ctx->base + GEN_HDR);
664 
665 	return 0;
666 }
667 
668 /*
669  * Send READ packet to peripheral using the generic interface,
670  * this will force command mode and stop video mode (because of BTA).
671  *
672  * This function has an active delay to wait for the buffer to clear,
673  * the delay is limited to 2 x DSIH_FIFO_ACTIVE_WAIT
674  * (waiting for command buffer, and waiting for receiving)
675  * @note this function will enable BTA
676  */
677 static int sprd_dsi_rd_pkt(struct dsi_context *ctx, u8 vc, u8 type,
678 			   u8 msb_byte, u8 lsb_byte,
679 			   u8 *buffer, u8 bytes_to_read)
680 {
681 	struct sprd_dsi *dsi = container_of(ctx, struct sprd_dsi, ctx);
682 	int i, ret;
683 	int count = 0;
684 	u32 temp;
685 
686 	if (vc > 3)
687 		return -EINVAL;
688 
689 	/* 1st: send read command to peripheral */
690 	ret = dsi_reg_rd(ctx, CMD_MODE_STATUS, GEN_CMD_CMD_FIFO_EMPTY, 5);
691 	if (!ret)
692 		return -EIO;
693 
694 	writel(type | (vc << 6) | (lsb_byte << 8) | (msb_byte << 16),
695 	       ctx->base + GEN_HDR);
696 
697 	/* 2nd: wait peripheral response completed */
698 	ret = dsi_wait_rd_resp_completed(ctx);
699 	if (ret) {
700 		drm_err(dsi->drm, "wait read response time out\n");
701 		return ret;
702 	}
703 
704 	/* 3rd: get data from rx payload fifo */
705 	ret = dsi_reg_rd(ctx, CMD_MODE_STATUS, GEN_CMD_RDATA_FIFO_EMPTY, 1);
706 	if (ret) {
707 		drm_err(dsi->drm, "rx payload fifo empty\n");
708 		return -EIO;
709 	}
710 
711 	for (i = 0; i < 100; i++) {
712 		temp = readl(ctx->base + GEN_PLD_DATA);
713 
714 		if (count < bytes_to_read)
715 			buffer[count++] = temp & 0xff;
716 		if (count < bytes_to_read)
717 			buffer[count++] = (temp >> 8) & 0xff;
718 		if (count < bytes_to_read)
719 			buffer[count++] = (temp >> 16) & 0xff;
720 		if (count < bytes_to_read)
721 			buffer[count++] = (temp >> 24) & 0xff;
722 
723 		ret = dsi_reg_rd(ctx, CMD_MODE_STATUS, GEN_CMD_RDATA_FIFO_EMPTY, 1);
724 		if (ret)
725 			return count;
726 	}
727 
728 	return 0;
729 }
730 
731 static void sprd_dsi_set_work_mode(struct dsi_context *ctx, u8 mode)
732 {
733 	if (mode == DSI_MODE_CMD)
734 		writel(1, ctx->base + DSI_MODE_CFG);
735 	else
736 		writel(0, ctx->base + DSI_MODE_CFG);
737 }
738 
739 static void sprd_dsi_state_reset(struct dsi_context *ctx)
740 {
741 	writel(0, ctx->base + SOFT_RESET);
742 	udelay(100);
743 	writel(1, ctx->base + SOFT_RESET);
744 }
745 
746 static int sprd_dphy_init(struct dsi_context *ctx)
747 {
748 	struct sprd_dsi *dsi = container_of(ctx, struct sprd_dsi, ctx);
749 	int ret;
750 
751 	dsi_reg_up(ctx, PHY_INTERFACE_CTRL, RF_PHY_RESET_N, 0);
752 	dsi_reg_up(ctx, PHY_INTERFACE_CTRL, RF_PHY_SHUTDOWN, 0);
753 	dsi_reg_up(ctx, PHY_INTERFACE_CTRL, RF_PHY_CLK_EN, 0);
754 
755 	dsi_reg_up(ctx, PHY_TST_CTRL0, PHY_TESTCLR, 0);
756 	dsi_reg_up(ctx, PHY_TST_CTRL0, PHY_TESTCLR, PHY_TESTCLR);
757 	dsi_reg_up(ctx, PHY_TST_CTRL0, PHY_TESTCLR, 0);
758 
759 	dphy_pll_config(ctx);
760 	dphy_timing_config(ctx);
761 
762 	dsi_reg_up(ctx, PHY_INTERFACE_CTRL, RF_PHY_SHUTDOWN, RF_PHY_SHUTDOWN);
763 	dsi_reg_up(ctx, PHY_INTERFACE_CTRL, RF_PHY_RESET_N, RF_PHY_RESET_N);
764 	writel(0x1C, ctx->base + PHY_MIN_STOP_TIME);
765 	dsi_reg_up(ctx, PHY_INTERFACE_CTRL, RF_PHY_CLK_EN, RF_PHY_CLK_EN);
766 	writel(dsi->slave->lanes - 1, ctx->base + PHY_LANE_NUM_CONFIG);
767 
768 	ret = dphy_wait_pll_locked(ctx);
769 	if (ret) {
770 		drm_err(dsi->drm, "dphy initial failed\n");
771 		return ret;
772 	}
773 
774 	return 0;
775 }
776 
777 static void sprd_dphy_fini(struct dsi_context *ctx)
778 {
779 	dsi_reg_up(ctx, PHY_INTERFACE_CTRL, RF_PHY_RESET_N, 0);
780 	dsi_reg_up(ctx, PHY_INTERFACE_CTRL, RF_PHY_SHUTDOWN, 0);
781 	dsi_reg_up(ctx, PHY_INTERFACE_CTRL, RF_PHY_RESET_N, RF_PHY_RESET_N);
782 }
783 
784 static void sprd_dsi_encoder_mode_set(struct drm_encoder *encoder,
785 				      struct drm_display_mode *mode,
786 				 struct drm_display_mode *adj_mode)
787 {
788 	struct sprd_dsi *dsi = encoder_to_dsi(encoder);
789 
790 	drm_display_mode_to_videomode(adj_mode, &dsi->ctx.vm);
791 }
792 
793 static void sprd_dsi_encoder_enable(struct drm_encoder *encoder)
794 {
795 	struct sprd_dsi *dsi = encoder_to_dsi(encoder);
796 	struct sprd_dpu *dpu = to_sprd_crtc(encoder->crtc);
797 	struct dsi_context *ctx = &dsi->ctx;
798 
799 	if (ctx->enabled) {
800 		drm_warn(dsi->drm, "dsi is initialized\n");
801 		return;
802 	}
803 
804 	sprd_dsi_init(ctx);
805 	if (ctx->work_mode == DSI_MODE_VIDEO)
806 		sprd_dsi_dpi_video(ctx);
807 	else
808 		sprd_dsi_edpi_video(ctx);
809 
810 	sprd_dphy_init(ctx);
811 
812 	sprd_dsi_set_work_mode(ctx, ctx->work_mode);
813 	sprd_dsi_state_reset(ctx);
814 
815 	if (dsi->slave->mode_flags & MIPI_DSI_CLOCK_NON_CONTINUOUS) {
816 		dsi_reg_up(ctx, PHY_CLK_LANE_LP_CTRL, AUTO_CLKLANE_CTRL_EN,
817 			   AUTO_CLKLANE_CTRL_EN);
818 	} else {
819 		dsi_reg_up(ctx, PHY_CLK_LANE_LP_CTRL, RF_PHY_CLK_EN, RF_PHY_CLK_EN);
820 		dsi_reg_up(ctx, PHY_CLK_LANE_LP_CTRL, PHY_CLKLANE_TX_REQ_HS,
821 			   PHY_CLKLANE_TX_REQ_HS);
822 		dphy_wait_pll_locked(ctx);
823 	}
824 
825 	sprd_dpu_run(dpu);
826 
827 	ctx->enabled = true;
828 }
829 
830 static void sprd_dsi_encoder_disable(struct drm_encoder *encoder)
831 {
832 	struct sprd_dsi *dsi = encoder_to_dsi(encoder);
833 	struct sprd_dpu *dpu = to_sprd_crtc(encoder->crtc);
834 	struct dsi_context *ctx = &dsi->ctx;
835 
836 	if (!ctx->enabled) {
837 		drm_warn(dsi->drm, "dsi isn't initialized\n");
838 		return;
839 	}
840 
841 	sprd_dpu_stop(dpu);
842 	sprd_dphy_fini(ctx);
843 	sprd_dsi_fini(ctx);
844 
845 	ctx->enabled = false;
846 }
847 
848 static const struct drm_encoder_helper_funcs sprd_encoder_helper_funcs = {
849 	.mode_set	= sprd_dsi_encoder_mode_set,
850 	.enable		= sprd_dsi_encoder_enable,
851 	.disable	= sprd_dsi_encoder_disable
852 };
853 
854 static const struct drm_encoder_funcs sprd_encoder_funcs = {
855 	.destroy = drm_encoder_cleanup,
856 };
857 
858 static int sprd_dsi_encoder_init(struct sprd_dsi *dsi,
859 				 struct device *dev)
860 {
861 	struct drm_encoder *encoder = &dsi->encoder;
862 	u32 crtc_mask;
863 	int ret;
864 
865 	crtc_mask = drm_of_find_possible_crtcs(dsi->drm, dev->of_node);
866 	if (!crtc_mask) {
867 		drm_err(dsi->drm, "failed to find crtc mask\n");
868 		return -EINVAL;
869 	}
870 
871 	drm_dbg(dsi->drm, "find possible crtcs: 0x%08x\n", crtc_mask);
872 
873 	encoder->possible_crtcs = crtc_mask;
874 	ret = drm_encoder_init(dsi->drm, encoder, &sprd_encoder_funcs,
875 			       DRM_MODE_ENCODER_DSI, NULL);
876 	if (ret) {
877 		drm_err(dsi->drm, "failed to init dsi encoder\n");
878 		return ret;
879 	}
880 
881 	drm_encoder_helper_add(encoder, &sprd_encoder_helper_funcs);
882 
883 	return 0;
884 }
885 
886 static int sprd_dsi_bridge_init(struct sprd_dsi *dsi,
887 				struct device *dev)
888 {
889 	int ret;
890 
891 	dsi->panel_bridge = devm_drm_of_get_bridge(dev, dev->of_node, 1, 0);
892 	if (IS_ERR(dsi->panel_bridge))
893 		return PTR_ERR(dsi->panel_bridge);
894 
895 	ret = drm_bridge_attach(&dsi->encoder, dsi->panel_bridge, NULL, 0);
896 	if (ret)
897 		return ret;
898 
899 	return 0;
900 }
901 
902 static int sprd_dsi_context_init(struct sprd_dsi *dsi,
903 				 struct device *dev)
904 {
905 	struct platform_device *pdev = to_platform_device(dev);
906 	struct dsi_context *ctx = &dsi->ctx;
907 	struct resource *res;
908 
909 	res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
910 	ctx->base = devm_ioremap(dev, res->start, resource_size(res));
911 	if (!ctx->base) {
912 		drm_err(dsi->drm, "failed to map dsi host registers\n");
913 		return -ENXIO;
914 	}
915 
916 	ctx->regmap = devm_regmap_init(dev, &regmap_tst_io, dsi, &byte_config);
917 	if (IS_ERR(ctx->regmap)) {
918 		drm_err(dsi->drm, "dphy regmap init failed\n");
919 		return PTR_ERR(ctx->regmap);
920 	}
921 
922 	ctx->data_hs2lp = 120;
923 	ctx->data_lp2hs = 500;
924 	ctx->clk_hs2lp = 4;
925 	ctx->clk_lp2hs = 15;
926 	ctx->max_rd_time = 6000;
927 	ctx->int0_mask = 0xffffffff;
928 	ctx->int1_mask = 0xffffffff;
929 	ctx->enabled = true;
930 
931 	return 0;
932 }
933 
934 static int sprd_dsi_bind(struct device *dev, struct device *master, void *data)
935 {
936 	struct drm_device *drm = data;
937 	struct sprd_dsi *dsi = dev_get_drvdata(dev);
938 	int ret;
939 
940 	dsi->drm = drm;
941 
942 	ret = sprd_dsi_encoder_init(dsi, dev);
943 	if (ret)
944 		return ret;
945 
946 	ret = sprd_dsi_bridge_init(dsi, dev);
947 	if (ret)
948 		return ret;
949 
950 	ret = sprd_dsi_context_init(dsi, dev);
951 	if (ret)
952 		return ret;
953 
954 	return 0;
955 }
956 
957 static void sprd_dsi_unbind(struct device *dev,
958 			    struct device *master, void *data)
959 {
960 	struct sprd_dsi *dsi = dev_get_drvdata(dev);
961 
962 	drm_of_panel_bridge_remove(dev->of_node, 1, 0);
963 
964 	drm_encoder_cleanup(&dsi->encoder);
965 }
966 
967 static const struct component_ops dsi_component_ops = {
968 	.bind	= sprd_dsi_bind,
969 	.unbind	= sprd_dsi_unbind,
970 };
971 
972 static int sprd_dsi_host_attach(struct mipi_dsi_host *host,
973 				struct mipi_dsi_device *slave)
974 {
975 	struct sprd_dsi *dsi = host_to_dsi(host);
976 	struct dsi_context *ctx = &dsi->ctx;
977 
978 	dsi->slave = slave;
979 
980 	if (slave->mode_flags & MIPI_DSI_MODE_VIDEO)
981 		ctx->work_mode = DSI_MODE_VIDEO;
982 	else
983 		ctx->work_mode = DSI_MODE_CMD;
984 
985 	if (slave->mode_flags & MIPI_DSI_MODE_VIDEO_BURST)
986 		ctx->burst_mode = VIDEO_BURST_WITH_SYNC_PULSES;
987 	else if (slave->mode_flags & MIPI_DSI_MODE_VIDEO_SYNC_PULSE)
988 		ctx->burst_mode = VIDEO_NON_BURST_WITH_SYNC_PULSES;
989 	else
990 		ctx->burst_mode = VIDEO_NON_BURST_WITH_SYNC_EVENTS;
991 
992 	return component_add(host->dev, &dsi_component_ops);
993 }
994 
995 static int sprd_dsi_host_detach(struct mipi_dsi_host *host,
996 				struct mipi_dsi_device *slave)
997 {
998 	component_del(host->dev, &dsi_component_ops);
999 
1000 	return 0;
1001 }
1002 
1003 static ssize_t sprd_dsi_host_transfer(struct mipi_dsi_host *host,
1004 				      const struct mipi_dsi_msg *msg)
1005 {
1006 	struct sprd_dsi *dsi = host_to_dsi(host);
1007 	const u8 *tx_buf = msg->tx_buf;
1008 
1009 	if (msg->rx_buf && msg->rx_len) {
1010 		u8 lsb = (msg->tx_len > 0) ? tx_buf[0] : 0;
1011 		u8 msb = (msg->tx_len > 1) ? tx_buf[1] : 0;
1012 
1013 		return sprd_dsi_rd_pkt(&dsi->ctx, msg->channel, msg->type,
1014 				msb, lsb, msg->rx_buf, msg->rx_len);
1015 	}
1016 
1017 	if (msg->tx_buf && msg->tx_len)
1018 		return sprd_dsi_wr_pkt(&dsi->ctx, msg->channel, msg->type,
1019 					tx_buf, msg->tx_len);
1020 
1021 	return 0;
1022 }
1023 
1024 static const struct mipi_dsi_host_ops sprd_dsi_host_ops = {
1025 	.attach = sprd_dsi_host_attach,
1026 	.detach = sprd_dsi_host_detach,
1027 	.transfer = sprd_dsi_host_transfer,
1028 };
1029 
1030 static const struct of_device_id dsi_match_table[] = {
1031 	{ .compatible = "sprd,sharkl3-dsi-host" },
1032 	{ /* sentinel */ },
1033 };
1034 
1035 static int sprd_dsi_probe(struct platform_device *pdev)
1036 {
1037 	struct device *dev = &pdev->dev;
1038 	struct sprd_dsi *dsi;
1039 
1040 	dsi = devm_kzalloc(dev, sizeof(*dsi), GFP_KERNEL);
1041 	if (!dsi)
1042 		return -ENOMEM;
1043 
1044 	dev_set_drvdata(dev, dsi);
1045 
1046 	dsi->host.ops = &sprd_dsi_host_ops;
1047 	dsi->host.dev = dev;
1048 
1049 	return mipi_dsi_host_register(&dsi->host);
1050 }
1051 
1052 static int sprd_dsi_remove(struct platform_device *pdev)
1053 {
1054 	struct sprd_dsi *dsi = dev_get_drvdata(&pdev->dev);
1055 
1056 	mipi_dsi_host_unregister(&dsi->host);
1057 
1058 	return 0;
1059 }
1060 
1061 struct platform_driver sprd_dsi_driver = {
1062 	.probe = sprd_dsi_probe,
1063 	.remove = sprd_dsi_remove,
1064 	.driver = {
1065 		.name = "sprd-dsi-drv",
1066 		.of_match_table = dsi_match_table,
1067 	},
1068 };
1069 
1070 MODULE_AUTHOR("Leon He <leon.he@unisoc.com>");
1071 MODULE_AUTHOR("Kevin Tang <kevin.tang@unisoc.com>");
1072 MODULE_DESCRIPTION("Unisoc MIPI DSI HOST Controller Driver");
1073 MODULE_LICENSE("GPL v2");
1074