xref: /openbmc/linux/drivers/gpu/drm/sprd/sprd_dpu.c (revision 85250a24)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (C) 2020 Unisoc Inc.
4  */
5 
6 #include <linux/component.h>
7 #include <linux/delay.h>
8 #include <linux/dma-buf.h>
9 #include <linux/io.h>
10 #include <linux/module.h>
11 #include <linux/of.h>
12 #include <linux/of_address.h>
13 #include <linux/of_device.h>
14 #include <linux/of_graph.h>
15 #include <linux/of_irq.h>
16 #include <linux/wait.h>
17 #include <linux/workqueue.h>
18 
19 #include <drm/drm_atomic_helper.h>
20 #include <drm/drm_blend.h>
21 #include <drm/drm_crtc_helper.h>
22 #include <drm/drm_fb_dma_helper.h>
23 #include <drm/drm_framebuffer.h>
24 #include <drm/drm_gem_dma_helper.h>
25 #include <drm/drm_gem_framebuffer_helper.h>
26 
27 #include "sprd_drm.h"
28 #include "sprd_dpu.h"
29 #include "sprd_dsi.h"
30 
31 /* Global control registers */
32 #define REG_DPU_CTRL	0x04
33 #define REG_DPU_CFG0	0x08
34 #define REG_PANEL_SIZE	0x20
35 #define REG_BLEND_SIZE	0x24
36 #define REG_BG_COLOR	0x2C
37 
38 /* Layer0 control registers */
39 #define REG_LAY_BASE_ADDR0	0x30
40 #define REG_LAY_BASE_ADDR1	0x34
41 #define REG_LAY_BASE_ADDR2	0x38
42 #define REG_LAY_CTRL		0x40
43 #define REG_LAY_SIZE		0x44
44 #define REG_LAY_PITCH		0x48
45 #define REG_LAY_POS		0x4C
46 #define REG_LAY_ALPHA		0x50
47 #define REG_LAY_CROP_START	0x5C
48 
49 /* Interrupt control registers */
50 #define REG_DPU_INT_EN		0x1E0
51 #define REG_DPU_INT_CLR		0x1E4
52 #define REG_DPU_INT_STS		0x1E8
53 
54 /* DPI control registers */
55 #define REG_DPI_CTRL		0x1F0
56 #define REG_DPI_H_TIMING	0x1F4
57 #define REG_DPI_V_TIMING	0x1F8
58 
59 /* MMU control registers */
60 #define REG_MMU_EN			0x800
61 #define REG_MMU_VPN_RANGE		0x80C
62 #define REG_MMU_PPN1			0x83C
63 #define REG_MMU_RANGE1			0x840
64 #define REG_MMU_PPN2			0x844
65 #define REG_MMU_RANGE2			0x848
66 
67 /* Global control bits */
68 #define BIT_DPU_RUN			BIT(0)
69 #define BIT_DPU_STOP			BIT(1)
70 #define BIT_DPU_REG_UPDATE		BIT(2)
71 #define BIT_DPU_IF_EDPI			BIT(0)
72 
73 /* Layer control bits */
74 #define BIT_DPU_LAY_EN				BIT(0)
75 #define BIT_DPU_LAY_LAYER_ALPHA			(0x01 << 2)
76 #define BIT_DPU_LAY_COMBO_ALPHA			(0x02 << 2)
77 #define BIT_DPU_LAY_FORMAT_YUV422_2PLANE		(0x00 << 4)
78 #define BIT_DPU_LAY_FORMAT_YUV420_2PLANE		(0x01 << 4)
79 #define BIT_DPU_LAY_FORMAT_YUV420_3PLANE		(0x02 << 4)
80 #define BIT_DPU_LAY_FORMAT_ARGB8888			(0x03 << 4)
81 #define BIT_DPU_LAY_FORMAT_RGB565			(0x04 << 4)
82 #define BIT_DPU_LAY_DATA_ENDIAN_B0B1B2B3		(0x00 << 8)
83 #define BIT_DPU_LAY_DATA_ENDIAN_B3B2B1B0		(0x01 << 8)
84 #define BIT_DPU_LAY_NO_SWITCH			(0x00 << 10)
85 #define BIT_DPU_LAY_RB_OR_UV_SWITCH		(0x01 << 10)
86 #define BIT_DPU_LAY_MODE_BLEND_NORMAL		(0x00 << 16)
87 #define BIT_DPU_LAY_MODE_BLEND_PREMULT		(0x01 << 16)
88 #define BIT_DPU_LAY_ROTATION_0		(0x00 << 20)
89 #define BIT_DPU_LAY_ROTATION_90		(0x01 << 20)
90 #define BIT_DPU_LAY_ROTATION_180	(0x02 << 20)
91 #define BIT_DPU_LAY_ROTATION_270	(0x03 << 20)
92 #define BIT_DPU_LAY_ROTATION_0_M	(0x04 << 20)
93 #define BIT_DPU_LAY_ROTATION_90_M	(0x05 << 20)
94 #define BIT_DPU_LAY_ROTATION_180_M	(0x06 << 20)
95 #define BIT_DPU_LAY_ROTATION_270_M	(0x07 << 20)
96 
97 /* Interrupt control & status bits */
98 #define BIT_DPU_INT_DONE		BIT(0)
99 #define BIT_DPU_INT_TE			BIT(1)
100 #define BIT_DPU_INT_ERR			BIT(2)
101 #define BIT_DPU_INT_UPDATE_DONE		BIT(4)
102 #define BIT_DPU_INT_VSYNC		BIT(5)
103 
104 /* DPI control bits */
105 #define BIT_DPU_EDPI_TE_EN		BIT(8)
106 #define BIT_DPU_EDPI_FROM_EXTERNAL_PAD	BIT(10)
107 #define BIT_DPU_DPI_HALT_EN		BIT(16)
108 
109 static const u32 layer_fmts[] = {
110 	DRM_FORMAT_XRGB8888,
111 	DRM_FORMAT_XBGR8888,
112 	DRM_FORMAT_ARGB8888,
113 	DRM_FORMAT_ABGR8888,
114 	DRM_FORMAT_RGBA8888,
115 	DRM_FORMAT_BGRA8888,
116 	DRM_FORMAT_RGBX8888,
117 	DRM_FORMAT_RGB565,
118 	DRM_FORMAT_BGR565,
119 	DRM_FORMAT_NV12,
120 	DRM_FORMAT_NV21,
121 	DRM_FORMAT_NV16,
122 	DRM_FORMAT_NV61,
123 	DRM_FORMAT_YUV420,
124 	DRM_FORMAT_YVU420,
125 };
126 
127 struct sprd_plane {
128 	struct drm_plane base;
129 };
130 
131 static int dpu_wait_stop_done(struct sprd_dpu *dpu)
132 {
133 	struct dpu_context *ctx = &dpu->ctx;
134 	int rc;
135 
136 	if (ctx->stopped)
137 		return 0;
138 
139 	rc = wait_event_interruptible_timeout(ctx->wait_queue, ctx->evt_stop,
140 					      msecs_to_jiffies(500));
141 	ctx->evt_stop = false;
142 
143 	ctx->stopped = true;
144 
145 	if (!rc) {
146 		drm_err(dpu->drm, "dpu wait for stop done time out!\n");
147 		return -ETIMEDOUT;
148 	}
149 
150 	return 0;
151 }
152 
153 static int dpu_wait_update_done(struct sprd_dpu *dpu)
154 {
155 	struct dpu_context *ctx = &dpu->ctx;
156 	int rc;
157 
158 	ctx->evt_update = false;
159 
160 	rc = wait_event_interruptible_timeout(ctx->wait_queue, ctx->evt_update,
161 					      msecs_to_jiffies(500));
162 
163 	if (!rc) {
164 		drm_err(dpu->drm, "dpu wait for reg update done time out!\n");
165 		return -ETIMEDOUT;
166 	}
167 
168 	return 0;
169 }
170 
171 static u32 drm_format_to_dpu(struct drm_framebuffer *fb)
172 {
173 	u32 format = 0;
174 
175 	switch (fb->format->format) {
176 	case DRM_FORMAT_BGRA8888:
177 		/* BGRA8888 -> ARGB8888 */
178 		format |= BIT_DPU_LAY_DATA_ENDIAN_B3B2B1B0;
179 		format |= BIT_DPU_LAY_FORMAT_ARGB8888;
180 		break;
181 	case DRM_FORMAT_RGBX8888:
182 	case DRM_FORMAT_RGBA8888:
183 		/* RGBA8888 -> ABGR8888 */
184 		format |= BIT_DPU_LAY_DATA_ENDIAN_B3B2B1B0;
185 		fallthrough;
186 	case DRM_FORMAT_ABGR8888:
187 		/* RB switch */
188 		format |= BIT_DPU_LAY_RB_OR_UV_SWITCH;
189 		fallthrough;
190 	case DRM_FORMAT_ARGB8888:
191 		format |= BIT_DPU_LAY_FORMAT_ARGB8888;
192 		break;
193 	case DRM_FORMAT_XBGR8888:
194 		/* RB switch */
195 		format |= BIT_DPU_LAY_RB_OR_UV_SWITCH;
196 		fallthrough;
197 	case DRM_FORMAT_XRGB8888:
198 		format |= BIT_DPU_LAY_FORMAT_ARGB8888;
199 		break;
200 	case DRM_FORMAT_BGR565:
201 		/* RB switch */
202 		format |= BIT_DPU_LAY_RB_OR_UV_SWITCH;
203 		fallthrough;
204 	case DRM_FORMAT_RGB565:
205 		format |= BIT_DPU_LAY_FORMAT_RGB565;
206 		break;
207 	case DRM_FORMAT_NV12:
208 		/* 2-Lane: Yuv420 */
209 		format |= BIT_DPU_LAY_FORMAT_YUV420_2PLANE;
210 		/* Y endian */
211 		format |= BIT_DPU_LAY_DATA_ENDIAN_B0B1B2B3;
212 		/* UV endian */
213 		format |= BIT_DPU_LAY_NO_SWITCH;
214 		break;
215 	case DRM_FORMAT_NV21:
216 		/* 2-Lane: Yuv420 */
217 		format |= BIT_DPU_LAY_FORMAT_YUV420_2PLANE;
218 		/* Y endian */
219 		format |= BIT_DPU_LAY_DATA_ENDIAN_B0B1B2B3;
220 		/* UV endian */
221 		format |= BIT_DPU_LAY_RB_OR_UV_SWITCH;
222 		break;
223 	case DRM_FORMAT_NV16:
224 		/* 2-Lane: Yuv422 */
225 		format |= BIT_DPU_LAY_FORMAT_YUV422_2PLANE;
226 		/* Y endian */
227 		format |= BIT_DPU_LAY_DATA_ENDIAN_B3B2B1B0;
228 		/* UV endian */
229 		format |= BIT_DPU_LAY_RB_OR_UV_SWITCH;
230 		break;
231 	case DRM_FORMAT_NV61:
232 		/* 2-Lane: Yuv422 */
233 		format |= BIT_DPU_LAY_FORMAT_YUV422_2PLANE;
234 		/* Y endian */
235 		format |= BIT_DPU_LAY_DATA_ENDIAN_B0B1B2B3;
236 		/* UV endian */
237 		format |= BIT_DPU_LAY_NO_SWITCH;
238 		break;
239 	case DRM_FORMAT_YUV420:
240 		format |= BIT_DPU_LAY_FORMAT_YUV420_3PLANE;
241 		/* Y endian */
242 		format |= BIT_DPU_LAY_DATA_ENDIAN_B0B1B2B3;
243 		/* UV endian */
244 		format |= BIT_DPU_LAY_NO_SWITCH;
245 		break;
246 	case DRM_FORMAT_YVU420:
247 		format |= BIT_DPU_LAY_FORMAT_YUV420_3PLANE;
248 		/* Y endian */
249 		format |= BIT_DPU_LAY_DATA_ENDIAN_B0B1B2B3;
250 		/* UV endian */
251 		format |= BIT_DPU_LAY_RB_OR_UV_SWITCH;
252 		break;
253 	default:
254 		break;
255 	}
256 
257 	return format;
258 }
259 
260 static u32 drm_rotation_to_dpu(struct drm_plane_state *state)
261 {
262 	u32 rotation = 0;
263 
264 	switch (state->rotation) {
265 	default:
266 	case DRM_MODE_ROTATE_0:
267 		rotation = BIT_DPU_LAY_ROTATION_0;
268 		break;
269 	case DRM_MODE_ROTATE_90:
270 		rotation = BIT_DPU_LAY_ROTATION_90;
271 		break;
272 	case DRM_MODE_ROTATE_180:
273 		rotation = BIT_DPU_LAY_ROTATION_180;
274 		break;
275 	case DRM_MODE_ROTATE_270:
276 		rotation = BIT_DPU_LAY_ROTATION_270;
277 		break;
278 	case DRM_MODE_REFLECT_Y:
279 		rotation = BIT_DPU_LAY_ROTATION_180_M;
280 		break;
281 	case (DRM_MODE_REFLECT_Y | DRM_MODE_ROTATE_90):
282 		rotation = BIT_DPU_LAY_ROTATION_90_M;
283 		break;
284 	case DRM_MODE_REFLECT_X:
285 		rotation = BIT_DPU_LAY_ROTATION_0_M;
286 		break;
287 	case (DRM_MODE_REFLECT_X | DRM_MODE_ROTATE_90):
288 		rotation = BIT_DPU_LAY_ROTATION_270_M;
289 		break;
290 	}
291 
292 	return rotation;
293 }
294 
295 static u32 drm_blend_to_dpu(struct drm_plane_state *state)
296 {
297 	u32 blend = 0;
298 
299 	switch (state->pixel_blend_mode) {
300 	case DRM_MODE_BLEND_COVERAGE:
301 		/* alpha mode select - combo alpha */
302 		blend |= BIT_DPU_LAY_COMBO_ALPHA;
303 		/* Normal mode */
304 		blend |= BIT_DPU_LAY_MODE_BLEND_NORMAL;
305 		break;
306 	case DRM_MODE_BLEND_PREMULTI:
307 		/* alpha mode select - combo alpha */
308 		blend |= BIT_DPU_LAY_COMBO_ALPHA;
309 		/* Pre-mult mode */
310 		blend |= BIT_DPU_LAY_MODE_BLEND_PREMULT;
311 		break;
312 	case DRM_MODE_BLEND_PIXEL_NONE:
313 	default:
314 		/* don't do blending, maybe RGBX */
315 		/* alpha mode select - layer alpha */
316 		blend |= BIT_DPU_LAY_LAYER_ALPHA;
317 		break;
318 	}
319 
320 	return blend;
321 }
322 
323 static void sprd_dpu_layer(struct sprd_dpu *dpu, struct drm_plane_state *state)
324 {
325 	struct dpu_context *ctx = &dpu->ctx;
326 	struct drm_gem_dma_object *dma_obj;
327 	struct drm_framebuffer *fb = state->fb;
328 	u32 addr, size, offset, pitch, blend, format, rotation;
329 	u32 src_x = state->src_x >> 16;
330 	u32 src_y = state->src_y >> 16;
331 	u32 src_w = state->src_w >> 16;
332 	u32 src_h = state->src_h >> 16;
333 	u32 dst_x = state->crtc_x;
334 	u32 dst_y = state->crtc_y;
335 	u32 alpha = state->alpha;
336 	u32 index = state->zpos;
337 	int i;
338 
339 	offset = (dst_x & 0xffff) | (dst_y << 16);
340 	size = (src_w & 0xffff) | (src_h << 16);
341 
342 	for (i = 0; i < fb->format->num_planes; i++) {
343 		dma_obj = drm_fb_dma_get_gem_obj(fb, i);
344 		addr = dma_obj->dma_addr + fb->offsets[i];
345 
346 		if (i == 0)
347 			layer_reg_wr(ctx, REG_LAY_BASE_ADDR0, addr, index);
348 		else if (i == 1)
349 			layer_reg_wr(ctx, REG_LAY_BASE_ADDR1, addr, index);
350 		else
351 			layer_reg_wr(ctx, REG_LAY_BASE_ADDR2, addr, index);
352 	}
353 
354 	if (fb->format->num_planes == 3) {
355 		/* UV pitch is 1/2 of Y pitch */
356 		pitch = (fb->pitches[0] / fb->format->cpp[0]) |
357 				(fb->pitches[0] / fb->format->cpp[0] << 15);
358 	} else {
359 		pitch = fb->pitches[0] / fb->format->cpp[0];
360 	}
361 
362 	layer_reg_wr(ctx, REG_LAY_POS, offset, index);
363 	layer_reg_wr(ctx, REG_LAY_SIZE, size, index);
364 	layer_reg_wr(ctx, REG_LAY_CROP_START,
365 		     src_y << 16 | src_x, index);
366 	layer_reg_wr(ctx, REG_LAY_ALPHA, alpha, index);
367 	layer_reg_wr(ctx, REG_LAY_PITCH, pitch, index);
368 
369 	format = drm_format_to_dpu(fb);
370 	blend = drm_blend_to_dpu(state);
371 	rotation = drm_rotation_to_dpu(state);
372 
373 	layer_reg_wr(ctx, REG_LAY_CTRL, BIT_DPU_LAY_EN |
374 				format |
375 				blend |
376 				rotation,
377 				index);
378 }
379 
380 static void sprd_dpu_flip(struct sprd_dpu *dpu)
381 {
382 	struct dpu_context *ctx = &dpu->ctx;
383 
384 	/*
385 	 * Make sure the dpu is in stop status. DPU has no shadow
386 	 * registers in EDPI mode. So the config registers can only be
387 	 * updated in the rising edge of DPU_RUN bit.
388 	 */
389 	if (ctx->if_type == SPRD_DPU_IF_EDPI)
390 		dpu_wait_stop_done(dpu);
391 
392 	/* update trigger and wait */
393 	if (ctx->if_type == SPRD_DPU_IF_DPI) {
394 		if (!ctx->stopped) {
395 			dpu_reg_set(ctx, REG_DPU_CTRL, BIT_DPU_REG_UPDATE);
396 			dpu_wait_update_done(dpu);
397 		}
398 
399 		dpu_reg_set(ctx, REG_DPU_INT_EN, BIT_DPU_INT_ERR);
400 	} else if (ctx->if_type == SPRD_DPU_IF_EDPI) {
401 		dpu_reg_set(ctx, REG_DPU_CTRL, BIT_DPU_RUN);
402 
403 		ctx->stopped = false;
404 	}
405 }
406 
407 static void sprd_dpu_init(struct sprd_dpu *dpu)
408 {
409 	struct dpu_context *ctx = &dpu->ctx;
410 	u32 int_mask = 0;
411 
412 	writel(0x00, ctx->base + REG_BG_COLOR);
413 	writel(0x00, ctx->base + REG_MMU_EN);
414 	writel(0x00, ctx->base + REG_MMU_PPN1);
415 	writel(0xffff, ctx->base + REG_MMU_RANGE1);
416 	writel(0x00, ctx->base + REG_MMU_PPN2);
417 	writel(0xffff, ctx->base + REG_MMU_RANGE2);
418 	writel(0x1ffff, ctx->base + REG_MMU_VPN_RANGE);
419 
420 	if (ctx->if_type == SPRD_DPU_IF_DPI) {
421 		/* use dpi as interface */
422 		dpu_reg_clr(ctx, REG_DPU_CFG0, BIT_DPU_IF_EDPI);
423 		/* disable Halt function for SPRD DSI */
424 		dpu_reg_clr(ctx, REG_DPI_CTRL, BIT_DPU_DPI_HALT_EN);
425 		/* select te from external pad */
426 		dpu_reg_set(ctx, REG_DPI_CTRL, BIT_DPU_EDPI_FROM_EXTERNAL_PAD);
427 
428 		/* enable dpu update done INT */
429 		int_mask |= BIT_DPU_INT_UPDATE_DONE;
430 		/* enable dpu done INT */
431 		int_mask |= BIT_DPU_INT_DONE;
432 		/* enable dpu dpi vsync */
433 		int_mask |= BIT_DPU_INT_VSYNC;
434 		/* enable dpu TE INT */
435 		int_mask |= BIT_DPU_INT_TE;
436 		/* enable underflow err INT */
437 		int_mask |= BIT_DPU_INT_ERR;
438 	} else if (ctx->if_type == SPRD_DPU_IF_EDPI) {
439 		/* use edpi as interface */
440 		dpu_reg_set(ctx, REG_DPU_CFG0, BIT_DPU_IF_EDPI);
441 		/* use external te */
442 		dpu_reg_set(ctx, REG_DPI_CTRL, BIT_DPU_EDPI_FROM_EXTERNAL_PAD);
443 		/* enable te */
444 		dpu_reg_set(ctx, REG_DPI_CTRL, BIT_DPU_EDPI_TE_EN);
445 
446 		/* enable stop done INT */
447 		int_mask |= BIT_DPU_INT_DONE;
448 		/* enable TE INT */
449 		int_mask |= BIT_DPU_INT_TE;
450 	}
451 
452 	writel(int_mask, ctx->base + REG_DPU_INT_EN);
453 }
454 
455 static void sprd_dpu_fini(struct sprd_dpu *dpu)
456 {
457 	struct dpu_context *ctx = &dpu->ctx;
458 
459 	writel(0x00, ctx->base + REG_DPU_INT_EN);
460 	writel(0xff, ctx->base + REG_DPU_INT_CLR);
461 }
462 
463 static void sprd_dpi_init(struct sprd_dpu *dpu)
464 {
465 	struct dpu_context *ctx = &dpu->ctx;
466 	u32 reg_val;
467 	u32 size;
468 
469 	size = (ctx->vm.vactive << 16) | ctx->vm.hactive;
470 	writel(size, ctx->base + REG_PANEL_SIZE);
471 	writel(size, ctx->base + REG_BLEND_SIZE);
472 
473 	if (ctx->if_type == SPRD_DPU_IF_DPI) {
474 		/* set dpi timing */
475 		reg_val = ctx->vm.hsync_len << 0 |
476 			  ctx->vm.hback_porch << 8 |
477 			  ctx->vm.hfront_porch << 20;
478 		writel(reg_val, ctx->base + REG_DPI_H_TIMING);
479 
480 		reg_val = ctx->vm.vsync_len << 0 |
481 			  ctx->vm.vback_porch << 8 |
482 			  ctx->vm.vfront_porch << 20;
483 		writel(reg_val, ctx->base + REG_DPI_V_TIMING);
484 	}
485 }
486 
487 void sprd_dpu_run(struct sprd_dpu *dpu)
488 {
489 	struct dpu_context *ctx = &dpu->ctx;
490 
491 	dpu_reg_set(ctx, REG_DPU_CTRL, BIT_DPU_RUN);
492 
493 	ctx->stopped = false;
494 }
495 
496 void sprd_dpu_stop(struct sprd_dpu *dpu)
497 {
498 	struct dpu_context *ctx = &dpu->ctx;
499 
500 	if (ctx->if_type == SPRD_DPU_IF_DPI)
501 		dpu_reg_set(ctx, REG_DPU_CTRL, BIT_DPU_STOP);
502 
503 	dpu_wait_stop_done(dpu);
504 }
505 
506 static int sprd_plane_atomic_check(struct drm_plane *plane,
507 				   struct drm_atomic_state *state)
508 {
509 	struct drm_plane_state *plane_state = drm_atomic_get_new_plane_state(state,
510 									     plane);
511 	struct drm_crtc_state *crtc_state;
512 	u32 fmt;
513 
514 	if (!plane_state->fb || !plane_state->crtc)
515 		return 0;
516 
517 	fmt = drm_format_to_dpu(plane_state->fb);
518 	if (!fmt)
519 		return -EINVAL;
520 
521 	crtc_state = drm_atomic_get_crtc_state(plane_state->state, plane_state->crtc);
522 	if (IS_ERR(crtc_state))
523 		return PTR_ERR(crtc_state);
524 
525 	return drm_atomic_helper_check_plane_state(plane_state, crtc_state,
526 						  DRM_PLANE_NO_SCALING,
527 						  DRM_PLANE_NO_SCALING,
528 						  true, true);
529 }
530 
531 static void sprd_plane_atomic_update(struct drm_plane *drm_plane,
532 				     struct drm_atomic_state *state)
533 {
534 	struct drm_plane_state *new_state = drm_atomic_get_new_plane_state(state,
535 									   drm_plane);
536 	struct sprd_dpu *dpu = to_sprd_crtc(new_state->crtc);
537 
538 	/* start configure dpu layers */
539 	sprd_dpu_layer(dpu, new_state);
540 }
541 
542 static void sprd_plane_atomic_disable(struct drm_plane *drm_plane,
543 				      struct drm_atomic_state *state)
544 {
545 	struct drm_plane_state *old_state = drm_atomic_get_old_plane_state(state,
546 									   drm_plane);
547 	struct sprd_dpu *dpu = to_sprd_crtc(old_state->crtc);
548 
549 	layer_reg_wr(&dpu->ctx, REG_LAY_CTRL, 0x00, old_state->zpos);
550 }
551 
552 static void sprd_plane_create_properties(struct sprd_plane *plane, int index)
553 {
554 	unsigned int supported_modes = BIT(DRM_MODE_BLEND_PIXEL_NONE) |
555 				       BIT(DRM_MODE_BLEND_PREMULTI) |
556 				       BIT(DRM_MODE_BLEND_COVERAGE);
557 
558 	/* create rotation property */
559 	drm_plane_create_rotation_property(&plane->base,
560 					   DRM_MODE_ROTATE_0,
561 					   DRM_MODE_ROTATE_MASK |
562 					   DRM_MODE_REFLECT_MASK);
563 
564 	/* create alpha property */
565 	drm_plane_create_alpha_property(&plane->base);
566 
567 	/* create blend mode property */
568 	drm_plane_create_blend_mode_property(&plane->base, supported_modes);
569 
570 	/* create zpos property */
571 	drm_plane_create_zpos_immutable_property(&plane->base, index);
572 }
573 
574 static const struct drm_plane_helper_funcs sprd_plane_helper_funcs = {
575 	.atomic_check = sprd_plane_atomic_check,
576 	.atomic_update = sprd_plane_atomic_update,
577 	.atomic_disable = sprd_plane_atomic_disable,
578 };
579 
580 static const struct drm_plane_funcs sprd_plane_funcs = {
581 	.update_plane = drm_atomic_helper_update_plane,
582 	.disable_plane	= drm_atomic_helper_disable_plane,
583 	.destroy = drm_plane_cleanup,
584 	.reset = drm_atomic_helper_plane_reset,
585 	.atomic_duplicate_state = drm_atomic_helper_plane_duplicate_state,
586 	.atomic_destroy_state = drm_atomic_helper_plane_destroy_state,
587 };
588 
589 static struct sprd_plane *sprd_planes_init(struct drm_device *drm)
590 {
591 	struct sprd_plane *plane, *primary;
592 	enum drm_plane_type plane_type;
593 	int i;
594 
595 	for (i = 0; i < 6; i++) {
596 		plane_type = (i == 0) ? DRM_PLANE_TYPE_PRIMARY :
597 					DRM_PLANE_TYPE_OVERLAY;
598 
599 		plane = drmm_universal_plane_alloc(drm, struct sprd_plane, base,
600 						   1, &sprd_plane_funcs,
601 						   layer_fmts, ARRAY_SIZE(layer_fmts),
602 						   NULL, plane_type, NULL);
603 		if (IS_ERR(plane)) {
604 			drm_err(drm, "failed to init drm plane: %d\n", i);
605 			return plane;
606 		}
607 
608 		drm_plane_helper_add(&plane->base, &sprd_plane_helper_funcs);
609 
610 		sprd_plane_create_properties(plane, i);
611 
612 		if (i == 0)
613 			primary = plane;
614 	}
615 
616 	return primary;
617 }
618 
619 static void sprd_crtc_mode_set_nofb(struct drm_crtc *crtc)
620 {
621 	struct sprd_dpu *dpu = to_sprd_crtc(crtc);
622 	struct drm_display_mode *mode = &crtc->state->adjusted_mode;
623 	struct drm_encoder *encoder;
624 	struct sprd_dsi *dsi;
625 
626 	drm_display_mode_to_videomode(mode, &dpu->ctx.vm);
627 
628 	drm_for_each_encoder_mask(encoder, crtc->dev,
629 				  crtc->state->encoder_mask) {
630 		dsi = encoder_to_dsi(encoder);
631 
632 		if (dsi->slave->mode_flags & MIPI_DSI_MODE_VIDEO)
633 			dpu->ctx.if_type = SPRD_DPU_IF_DPI;
634 		else
635 			dpu->ctx.if_type = SPRD_DPU_IF_EDPI;
636 	}
637 
638 	sprd_dpi_init(dpu);
639 }
640 
641 static void sprd_crtc_atomic_enable(struct drm_crtc *crtc,
642 				    struct drm_atomic_state *state)
643 {
644 	struct sprd_dpu *dpu = to_sprd_crtc(crtc);
645 
646 	sprd_dpu_init(dpu);
647 
648 	drm_crtc_vblank_on(&dpu->base);
649 }
650 
651 static void sprd_crtc_atomic_disable(struct drm_crtc *crtc,
652 				     struct drm_atomic_state *state)
653 {
654 	struct sprd_dpu *dpu = to_sprd_crtc(crtc);
655 	struct drm_device *drm = dpu->base.dev;
656 
657 	drm_crtc_vblank_off(&dpu->base);
658 
659 	sprd_dpu_fini(dpu);
660 
661 	spin_lock_irq(&drm->event_lock);
662 	if (crtc->state->event) {
663 		drm_crtc_send_vblank_event(crtc, crtc->state->event);
664 		crtc->state->event = NULL;
665 	}
666 	spin_unlock_irq(&drm->event_lock);
667 }
668 
669 static void sprd_crtc_atomic_flush(struct drm_crtc *crtc,
670 				   struct drm_atomic_state *state)
671 
672 {
673 	struct sprd_dpu *dpu = to_sprd_crtc(crtc);
674 	struct drm_device *drm = dpu->base.dev;
675 
676 	sprd_dpu_flip(dpu);
677 
678 	spin_lock_irq(&drm->event_lock);
679 	if (crtc->state->event) {
680 		drm_crtc_send_vblank_event(crtc, crtc->state->event);
681 		crtc->state->event = NULL;
682 	}
683 	spin_unlock_irq(&drm->event_lock);
684 }
685 
686 static int sprd_crtc_enable_vblank(struct drm_crtc *crtc)
687 {
688 	struct sprd_dpu *dpu = to_sprd_crtc(crtc);
689 
690 	dpu_reg_set(&dpu->ctx, REG_DPU_INT_EN, BIT_DPU_INT_VSYNC);
691 
692 	return 0;
693 }
694 
695 static void sprd_crtc_disable_vblank(struct drm_crtc *crtc)
696 {
697 	struct sprd_dpu *dpu = to_sprd_crtc(crtc);
698 
699 	dpu_reg_clr(&dpu->ctx, REG_DPU_INT_EN, BIT_DPU_INT_VSYNC);
700 }
701 
702 static const struct drm_crtc_helper_funcs sprd_crtc_helper_funcs = {
703 	.mode_set_nofb	= sprd_crtc_mode_set_nofb,
704 	.atomic_flush	= sprd_crtc_atomic_flush,
705 	.atomic_enable	= sprd_crtc_atomic_enable,
706 	.atomic_disable	= sprd_crtc_atomic_disable,
707 };
708 
709 static const struct drm_crtc_funcs sprd_crtc_funcs = {
710 	.destroy	= drm_crtc_cleanup,
711 	.set_config	= drm_atomic_helper_set_config,
712 	.page_flip	= drm_atomic_helper_page_flip,
713 	.reset		= drm_atomic_helper_crtc_reset,
714 	.atomic_duplicate_state	= drm_atomic_helper_crtc_duplicate_state,
715 	.atomic_destroy_state	= drm_atomic_helper_crtc_destroy_state,
716 	.enable_vblank	= sprd_crtc_enable_vblank,
717 	.disable_vblank	= sprd_crtc_disable_vblank,
718 };
719 
720 static struct sprd_dpu *sprd_crtc_init(struct drm_device *drm,
721 				       struct drm_plane *primary, struct device *dev)
722 {
723 	struct device_node *port;
724 	struct sprd_dpu *dpu;
725 
726 	dpu = drmm_crtc_alloc_with_planes(drm, struct sprd_dpu, base,
727 					  primary, NULL,
728 					&sprd_crtc_funcs, NULL);
729 	if (IS_ERR(dpu)) {
730 		drm_err(drm, "failed to init crtc\n");
731 		return dpu;
732 	}
733 	drm_crtc_helper_add(&dpu->base, &sprd_crtc_helper_funcs);
734 
735 	/*
736 	 * set crtc port so that drm_of_find_possible_crtcs call works
737 	 */
738 	port = of_graph_get_port_by_id(dev->of_node, 0);
739 	if (!port) {
740 		drm_err(drm, "failed to found crtc output port for %s\n",
741 			dev->of_node->full_name);
742 		return ERR_PTR(-EINVAL);
743 	}
744 	dpu->base.port = port;
745 	of_node_put(port);
746 
747 	return dpu;
748 }
749 
750 static irqreturn_t sprd_dpu_isr(int irq, void *data)
751 {
752 	struct sprd_dpu *dpu = data;
753 	struct dpu_context *ctx = &dpu->ctx;
754 	u32 reg_val, int_mask = 0;
755 
756 	reg_val = readl(ctx->base + REG_DPU_INT_STS);
757 
758 	/* disable err interrupt */
759 	if (reg_val & BIT_DPU_INT_ERR) {
760 		int_mask |= BIT_DPU_INT_ERR;
761 		drm_warn(dpu->drm, "Warning: dpu underflow!\n");
762 	}
763 
764 	/* dpu update done isr */
765 	if (reg_val & BIT_DPU_INT_UPDATE_DONE) {
766 		ctx->evt_update = true;
767 		wake_up_interruptible_all(&ctx->wait_queue);
768 	}
769 
770 	/* dpu stop done isr */
771 	if (reg_val & BIT_DPU_INT_DONE) {
772 		ctx->evt_stop = true;
773 		wake_up_interruptible_all(&ctx->wait_queue);
774 	}
775 
776 	if (reg_val & BIT_DPU_INT_VSYNC)
777 		drm_crtc_handle_vblank(&dpu->base);
778 
779 	writel(reg_val, ctx->base + REG_DPU_INT_CLR);
780 	dpu_reg_clr(ctx, REG_DPU_INT_EN, int_mask);
781 
782 	return IRQ_HANDLED;
783 }
784 
785 static int sprd_dpu_context_init(struct sprd_dpu *dpu,
786 				 struct device *dev)
787 {
788 	struct platform_device *pdev = to_platform_device(dev);
789 	struct dpu_context *ctx = &dpu->ctx;
790 	struct resource *res;
791 	int ret;
792 
793 	res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
794 	if (!res) {
795 		dev_err(dev, "failed to get I/O resource\n");
796 		return -EINVAL;
797 	}
798 
799 	ctx->base = devm_ioremap(dev, res->start, resource_size(res));
800 	if (!ctx->base) {
801 		dev_err(dev, "failed to map dpu registers\n");
802 		return -EFAULT;
803 	}
804 
805 	ctx->irq = platform_get_irq(pdev, 0);
806 	if (ctx->irq < 0) {
807 		dev_err(dev, "failed to get dpu irq\n");
808 		return ctx->irq;
809 	}
810 
811 	/* disable and clear interrupts before register dpu IRQ. */
812 	writel(0x00, ctx->base + REG_DPU_INT_EN);
813 	writel(0xff, ctx->base + REG_DPU_INT_CLR);
814 
815 	ret = devm_request_irq(dev, ctx->irq, sprd_dpu_isr,
816 			       IRQF_TRIGGER_NONE, "DPU", dpu);
817 	if (ret) {
818 		dev_err(dev, "failed to register dpu irq handler\n");
819 		return ret;
820 	}
821 
822 	init_waitqueue_head(&ctx->wait_queue);
823 
824 	return 0;
825 }
826 
827 static int sprd_dpu_bind(struct device *dev, struct device *master, void *data)
828 {
829 	struct drm_device *drm = data;
830 	struct sprd_dpu *dpu;
831 	struct sprd_plane *plane;
832 	int ret;
833 
834 	plane = sprd_planes_init(drm);
835 	if (IS_ERR(plane))
836 		return PTR_ERR(plane);
837 
838 	dpu = sprd_crtc_init(drm, &plane->base, dev);
839 	if (IS_ERR(dpu))
840 		return PTR_ERR(dpu);
841 
842 	dpu->drm = drm;
843 	dev_set_drvdata(dev, dpu);
844 
845 	ret = sprd_dpu_context_init(dpu, dev);
846 	if (ret)
847 		return ret;
848 
849 	return 0;
850 }
851 
852 static const struct component_ops dpu_component_ops = {
853 	.bind = sprd_dpu_bind,
854 };
855 
856 static const struct of_device_id dpu_match_table[] = {
857 	{ .compatible = "sprd,sharkl3-dpu" },
858 	{ /* sentinel */ },
859 };
860 MODULE_DEVICE_TABLE(of, dpu_match_table);
861 
862 static int sprd_dpu_probe(struct platform_device *pdev)
863 {
864 	return component_add(&pdev->dev, &dpu_component_ops);
865 }
866 
867 static int sprd_dpu_remove(struct platform_device *pdev)
868 {
869 	component_del(&pdev->dev, &dpu_component_ops);
870 
871 	return 0;
872 }
873 
874 struct platform_driver sprd_dpu_driver = {
875 	.probe = sprd_dpu_probe,
876 	.remove = sprd_dpu_remove,
877 	.driver = {
878 		.name = "sprd-dpu-drv",
879 		.of_match_table = dpu_match_table,
880 	},
881 };
882 
883 MODULE_AUTHOR("Leon He <leon.he@unisoc.com>");
884 MODULE_AUTHOR("Kevin Tang <kevin.tang@unisoc.com>");
885 MODULE_DESCRIPTION("Unisoc Display Controller Driver");
886 MODULE_LICENSE("GPL v2");
887