1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (C) 2012 Texas Instruments
4  * Author: Rob Clark <robdclark@gmail.com>
5  */
6 
7 #include <linux/delay.h>
8 #include <linux/dma-mapping.h>
9 #include <linux/of_graph.h>
10 #include <linux/pm_runtime.h>
11 
12 #include <drm/drm_atomic.h>
13 #include <drm/drm_atomic_helper.h>
14 #include <drm/drm_crtc.h>
15 #include <drm/drm_fb_cma_helper.h>
16 #include <drm/drm_fourcc.h>
17 #include <drm/drm_gem_cma_helper.h>
18 #include <drm/drm_modeset_helper_vtables.h>
19 #include <drm/drm_print.h>
20 #include <drm/drm_vblank.h>
21 
22 #include "tilcdc_drv.h"
23 #include "tilcdc_regs.h"
24 
25 #define TILCDC_VBLANK_SAFETY_THRESHOLD_US	1000
26 #define TILCDC_PALETTE_SIZE			32
27 #define TILCDC_PALETTE_FIRST_ENTRY		0x4000
28 
29 struct tilcdc_crtc {
30 	struct drm_crtc base;
31 
32 	struct drm_plane primary;
33 	const struct tilcdc_panel_info *info;
34 	struct drm_pending_vblank_event *event;
35 	struct mutex enable_lock;
36 	bool enabled;
37 	bool shutdown;
38 	wait_queue_head_t frame_done_wq;
39 	bool frame_done;
40 	spinlock_t irq_lock;
41 
42 	unsigned int lcd_fck_rate;
43 
44 	ktime_t last_vblank;
45 	unsigned int hvtotal_us;
46 
47 	struct drm_framebuffer *next_fb;
48 
49 	/* Only set if an external encoder is connected */
50 	bool simulate_vesa_sync;
51 
52 	int sync_lost_count;
53 	bool frame_intact;
54 	struct work_struct recover_work;
55 
56 	dma_addr_t palette_dma_handle;
57 	u16 *palette_base;
58 	struct completion palette_loaded;
59 };
60 #define to_tilcdc_crtc(x) container_of(x, struct tilcdc_crtc, base)
61 
62 static void set_scanout(struct drm_crtc *crtc, struct drm_framebuffer *fb)
63 {
64 	struct drm_device *dev = crtc->dev;
65 	struct tilcdc_drm_private *priv = dev->dev_private;
66 	struct drm_gem_cma_object *gem;
67 	dma_addr_t start, end;
68 	u64 dma_base_and_ceiling;
69 
70 	gem = drm_fb_cma_get_gem_obj(fb, 0);
71 
72 	start = gem->paddr + fb->offsets[0] +
73 		crtc->y * fb->pitches[0] +
74 		crtc->x * fb->format->cpp[0];
75 
76 	end = start + (crtc->mode.vdisplay * fb->pitches[0]);
77 
78 	/* Write LCDC_DMA_FB_BASE_ADDR_0_REG and LCDC_DMA_FB_CEILING_ADDR_0_REG
79 	 * with a single insruction, if available. This should make it more
80 	 * unlikely that LCDC would fetch the DMA addresses in the middle of
81 	 * an update.
82 	 */
83 	if (priv->rev == 1)
84 		end -= 1;
85 
86 	dma_base_and_ceiling = (u64)end << 32 | start;
87 	tilcdc_write64(dev, LCDC_DMA_FB_BASE_ADDR_0_REG, dma_base_and_ceiling);
88 }
89 
90 /*
91  * The driver currently only supports only true color formats. For
92  * true color the palette block is bypassed, but a 32 byte palette
93  * should still be loaded. The first 16-bit entry must be 0x4000 while
94  * all other entries must be zeroed.
95  */
96 static void tilcdc_crtc_load_palette(struct drm_crtc *crtc)
97 {
98 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
99 	struct drm_device *dev = crtc->dev;
100 	struct tilcdc_drm_private *priv = dev->dev_private;
101 	int ret;
102 
103 	reinit_completion(&tilcdc_crtc->palette_loaded);
104 
105 	/* Tell the LCDC where the palette is located. */
106 	tilcdc_write(dev, LCDC_DMA_FB_BASE_ADDR_0_REG,
107 		     tilcdc_crtc->palette_dma_handle);
108 	tilcdc_write(dev, LCDC_DMA_FB_CEILING_ADDR_0_REG,
109 		     (u32) tilcdc_crtc->palette_dma_handle +
110 		     TILCDC_PALETTE_SIZE - 1);
111 
112 	/* Set dma load mode for palette loading only. */
113 	tilcdc_write_mask(dev, LCDC_RASTER_CTRL_REG,
114 			  LCDC_PALETTE_LOAD_MODE(PALETTE_ONLY),
115 			  LCDC_PALETTE_LOAD_MODE_MASK);
116 
117 	/* Enable DMA Palette Loaded Interrupt */
118 	if (priv->rev == 1)
119 		tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_V1_PL_INT_ENA);
120 	else
121 		tilcdc_write(dev, LCDC_INT_ENABLE_SET_REG, LCDC_V2_PL_INT_ENA);
122 
123 	/* Enable LCDC DMA and wait for palette to be loaded. */
124 	tilcdc_clear_irqstatus(dev, 0xffffffff);
125 	tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
126 
127 	ret = wait_for_completion_timeout(&tilcdc_crtc->palette_loaded,
128 					  msecs_to_jiffies(50));
129 	if (ret == 0)
130 		dev_err(dev->dev, "%s: Palette loading timeout", __func__);
131 
132 	/* Disable LCDC DMA and DMA Palette Loaded Interrupt. */
133 	tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
134 	if (priv->rev == 1)
135 		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_V1_PL_INT_ENA);
136 	else
137 		tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG, LCDC_V2_PL_INT_ENA);
138 }
139 
140 static void tilcdc_crtc_enable_irqs(struct drm_device *dev)
141 {
142 	struct tilcdc_drm_private *priv = dev->dev_private;
143 
144 	tilcdc_clear_irqstatus(dev, 0xffffffff);
145 
146 	if (priv->rev == 1) {
147 		tilcdc_set(dev, LCDC_RASTER_CTRL_REG,
148 			LCDC_V1_SYNC_LOST_INT_ENA | LCDC_V1_FRAME_DONE_INT_ENA |
149 			LCDC_V1_UNDERFLOW_INT_ENA);
150 	} else {
151 		tilcdc_write(dev, LCDC_INT_ENABLE_SET_REG,
152 			LCDC_V2_UNDERFLOW_INT_ENA |
153 			LCDC_FRAME_DONE | LCDC_SYNC_LOST);
154 	}
155 }
156 
157 static void tilcdc_crtc_disable_irqs(struct drm_device *dev)
158 {
159 	struct tilcdc_drm_private *priv = dev->dev_private;
160 
161 	/* disable irqs that we might have enabled: */
162 	if (priv->rev == 1) {
163 		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
164 			LCDC_V1_SYNC_LOST_INT_ENA | LCDC_V1_FRAME_DONE_INT_ENA |
165 			LCDC_V1_UNDERFLOW_INT_ENA | LCDC_V1_PL_INT_ENA);
166 		tilcdc_clear(dev, LCDC_DMA_CTRL_REG,
167 			LCDC_V1_END_OF_FRAME_INT_ENA);
168 	} else {
169 		tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
170 			LCDC_V2_UNDERFLOW_INT_ENA | LCDC_V2_PL_INT_ENA |
171 			LCDC_V2_END_OF_FRAME0_INT_ENA |
172 			LCDC_FRAME_DONE | LCDC_SYNC_LOST);
173 	}
174 }
175 
176 static void reset(struct drm_crtc *crtc)
177 {
178 	struct drm_device *dev = crtc->dev;
179 	struct tilcdc_drm_private *priv = dev->dev_private;
180 
181 	if (priv->rev != 2)
182 		return;
183 
184 	tilcdc_set(dev, LCDC_CLK_RESET_REG, LCDC_CLK_MAIN_RESET);
185 	usleep_range(250, 1000);
186 	tilcdc_clear(dev, LCDC_CLK_RESET_REG, LCDC_CLK_MAIN_RESET);
187 }
188 
189 /*
190  * Calculate the percentage difference between the requested pixel clock rate
191  * and the effective rate resulting from calculating the clock divider value.
192  */
193 static unsigned int tilcdc_pclk_diff(unsigned long rate,
194 				     unsigned long real_rate)
195 {
196 	int r = rate / 100, rr = real_rate / 100;
197 
198 	return (unsigned int)(abs(((rr - r) * 100) / r));
199 }
200 
201 static void tilcdc_crtc_set_clk(struct drm_crtc *crtc)
202 {
203 	struct drm_device *dev = crtc->dev;
204 	struct tilcdc_drm_private *priv = dev->dev_private;
205 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
206 	unsigned long clk_rate, real_rate, req_rate;
207 	unsigned int clkdiv;
208 	int ret;
209 
210 	clkdiv = 2; /* first try using a standard divider of 2 */
211 
212 	/* mode.clock is in KHz, set_rate wants parameter in Hz */
213 	req_rate = crtc->mode.clock * 1000;
214 
215 	ret = clk_set_rate(priv->clk, req_rate * clkdiv);
216 	clk_rate = clk_get_rate(priv->clk);
217 	if (ret < 0 || tilcdc_pclk_diff(req_rate, clk_rate) > 5) {
218 		/*
219 		 * If we fail to set the clock rate (some architectures don't
220 		 * use the common clock framework yet and may not implement
221 		 * all the clk API calls for every clock), try the next best
222 		 * thing: adjusting the clock divider, unless clk_get_rate()
223 		 * failed as well.
224 		 */
225 		if (!clk_rate) {
226 			/* Nothing more we can do. Just bail out. */
227 			dev_err(dev->dev,
228 				"failed to set the pixel clock - unable to read current lcdc clock rate\n");
229 			return;
230 		}
231 
232 		clkdiv = DIV_ROUND_CLOSEST(clk_rate, req_rate);
233 
234 		/*
235 		 * Emit a warning if the real clock rate resulting from the
236 		 * calculated divider differs much from the requested rate.
237 		 *
238 		 * 5% is an arbitrary value - LCDs are usually quite tolerant
239 		 * about pixel clock rates.
240 		 */
241 		real_rate = clkdiv * req_rate;
242 
243 		if (tilcdc_pclk_diff(clk_rate, real_rate) > 5) {
244 			dev_warn(dev->dev,
245 				 "effective pixel clock rate (%luHz) differs from the calculated rate (%luHz)\n",
246 				 clk_rate, real_rate);
247 		}
248 	}
249 
250 	tilcdc_crtc->lcd_fck_rate = clk_rate;
251 
252 	DBG("lcd_clk=%u, mode clock=%d, div=%u",
253 	    tilcdc_crtc->lcd_fck_rate, crtc->mode.clock, clkdiv);
254 
255 	/* Configure the LCD clock divisor. */
256 	tilcdc_write(dev, LCDC_CTRL_REG, LCDC_CLK_DIVISOR(clkdiv) |
257 		     LCDC_RASTER_MODE);
258 
259 	if (priv->rev == 2)
260 		tilcdc_set(dev, LCDC_CLK_ENABLE_REG,
261 				LCDC_V2_DMA_CLK_EN | LCDC_V2_LIDD_CLK_EN |
262 				LCDC_V2_CORE_CLK_EN);
263 }
264 
265 static uint tilcdc_mode_hvtotal(const struct drm_display_mode *mode)
266 {
267 	return (uint) div_u64(1000llu * mode->htotal * mode->vtotal,
268 			      mode->clock);
269 }
270 
271 static void tilcdc_crtc_set_mode(struct drm_crtc *crtc)
272 {
273 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
274 	struct drm_device *dev = crtc->dev;
275 	struct tilcdc_drm_private *priv = dev->dev_private;
276 	const struct tilcdc_panel_info *info = tilcdc_crtc->info;
277 	uint32_t reg, hbp, hfp, hsw, vbp, vfp, vsw;
278 	struct drm_display_mode *mode = &crtc->state->adjusted_mode;
279 	struct drm_framebuffer *fb = crtc->primary->state->fb;
280 
281 	if (WARN_ON(!info))
282 		return;
283 
284 	if (WARN_ON(!fb))
285 		return;
286 
287 	/* Configure the Burst Size and fifo threshold of DMA: */
288 	reg = tilcdc_read(dev, LCDC_DMA_CTRL_REG) & ~0x00000770;
289 	switch (info->dma_burst_sz) {
290 	case 1:
291 		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_1);
292 		break;
293 	case 2:
294 		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_2);
295 		break;
296 	case 4:
297 		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_4);
298 		break;
299 	case 8:
300 		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_8);
301 		break;
302 	case 16:
303 		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_16);
304 		break;
305 	default:
306 		dev_err(dev->dev, "invalid burst size\n");
307 		return;
308 	}
309 	reg |= (info->fifo_th << 8);
310 	tilcdc_write(dev, LCDC_DMA_CTRL_REG, reg);
311 
312 	/* Configure timings: */
313 	hbp = mode->htotal - mode->hsync_end;
314 	hfp = mode->hsync_start - mode->hdisplay;
315 	hsw = mode->hsync_end - mode->hsync_start;
316 	vbp = mode->vtotal - mode->vsync_end;
317 	vfp = mode->vsync_start - mode->vdisplay;
318 	vsw = mode->vsync_end - mode->vsync_start;
319 
320 	DBG("%dx%d, hbp=%u, hfp=%u, hsw=%u, vbp=%u, vfp=%u, vsw=%u",
321 	    mode->hdisplay, mode->vdisplay, hbp, hfp, hsw, vbp, vfp, vsw);
322 
323 	/* Set AC Bias Period and Number of Transitions per Interrupt: */
324 	reg = tilcdc_read(dev, LCDC_RASTER_TIMING_2_REG) & ~0x000fff00;
325 	reg |= LCDC_AC_BIAS_FREQUENCY(info->ac_bias) |
326 		LCDC_AC_BIAS_TRANSITIONS_PER_INT(info->ac_bias_intrpt);
327 
328 	/*
329 	 * subtract one from hfp, hbp, hsw because the hardware uses
330 	 * a value of 0 as 1
331 	 */
332 	if (priv->rev == 2) {
333 		/* clear bits we're going to set */
334 		reg &= ~0x78000033;
335 		reg |= ((hfp-1) & 0x300) >> 8;
336 		reg |= ((hbp-1) & 0x300) >> 4;
337 		reg |= ((hsw-1) & 0x3c0) << 21;
338 	}
339 	tilcdc_write(dev, LCDC_RASTER_TIMING_2_REG, reg);
340 
341 	reg = (((mode->hdisplay >> 4) - 1) << 4) |
342 		(((hbp-1) & 0xff) << 24) |
343 		(((hfp-1) & 0xff) << 16) |
344 		(((hsw-1) & 0x3f) << 10);
345 	if (priv->rev == 2)
346 		reg |= (((mode->hdisplay >> 4) - 1) & 0x40) >> 3;
347 	tilcdc_write(dev, LCDC_RASTER_TIMING_0_REG, reg);
348 
349 	reg = ((mode->vdisplay - 1) & 0x3ff) |
350 		((vbp & 0xff) << 24) |
351 		((vfp & 0xff) << 16) |
352 		(((vsw-1) & 0x3f) << 10);
353 	tilcdc_write(dev, LCDC_RASTER_TIMING_1_REG, reg);
354 
355 	/*
356 	 * be sure to set Bit 10 for the V2 LCDC controller,
357 	 * otherwise limited to 1024 pixels width, stopping
358 	 * 1920x1080 being supported.
359 	 */
360 	if (priv->rev == 2) {
361 		if ((mode->vdisplay - 1) & 0x400) {
362 			tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG,
363 				LCDC_LPP_B10);
364 		} else {
365 			tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG,
366 				LCDC_LPP_B10);
367 		}
368 	}
369 
370 	/* Configure display type: */
371 	reg = tilcdc_read(dev, LCDC_RASTER_CTRL_REG) &
372 		~(LCDC_TFT_MODE | LCDC_MONO_8BIT_MODE | LCDC_MONOCHROME_MODE |
373 		  LCDC_V2_TFT_24BPP_MODE | LCDC_V2_TFT_24BPP_UNPACK |
374 		  0x000ff000 /* Palette Loading Delay bits */);
375 	reg |= LCDC_TFT_MODE; /* no monochrome/passive support */
376 	if (info->tft_alt_mode)
377 		reg |= LCDC_TFT_ALT_ENABLE;
378 	if (priv->rev == 2) {
379 		switch (fb->format->format) {
380 		case DRM_FORMAT_BGR565:
381 		case DRM_FORMAT_RGB565:
382 			break;
383 		case DRM_FORMAT_XBGR8888:
384 		case DRM_FORMAT_XRGB8888:
385 			reg |= LCDC_V2_TFT_24BPP_UNPACK;
386 			fallthrough;
387 		case DRM_FORMAT_BGR888:
388 		case DRM_FORMAT_RGB888:
389 			reg |= LCDC_V2_TFT_24BPP_MODE;
390 			break;
391 		default:
392 			dev_err(dev->dev, "invalid pixel format\n");
393 			return;
394 		}
395 	}
396 	reg |= info->fdd < 12;
397 	tilcdc_write(dev, LCDC_RASTER_CTRL_REG, reg);
398 
399 	if (info->invert_pxl_clk)
400 		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_PIXEL_CLOCK);
401 	else
402 		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_PIXEL_CLOCK);
403 
404 	if (info->sync_ctrl)
405 		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_CTRL);
406 	else
407 		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_CTRL);
408 
409 	if (info->sync_edge)
410 		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_EDGE);
411 	else
412 		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_EDGE);
413 
414 	if (mode->flags & DRM_MODE_FLAG_NHSYNC)
415 		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_HSYNC);
416 	else
417 		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_HSYNC);
418 
419 	if (mode->flags & DRM_MODE_FLAG_NVSYNC)
420 		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_VSYNC);
421 	else
422 		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_VSYNC);
423 
424 	if (info->raster_order)
425 		tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ORDER);
426 	else
427 		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ORDER);
428 
429 	tilcdc_crtc_set_clk(crtc);
430 
431 	tilcdc_crtc_load_palette(crtc);
432 
433 	set_scanout(crtc, fb);
434 
435 	crtc->hwmode = crtc->state->adjusted_mode;
436 
437 	tilcdc_crtc->hvtotal_us =
438 		tilcdc_mode_hvtotal(&crtc->hwmode);
439 }
440 
441 static void tilcdc_crtc_enable(struct drm_crtc *crtc)
442 {
443 	struct drm_device *dev = crtc->dev;
444 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
445 	unsigned long flags;
446 
447 	mutex_lock(&tilcdc_crtc->enable_lock);
448 	if (tilcdc_crtc->enabled || tilcdc_crtc->shutdown) {
449 		mutex_unlock(&tilcdc_crtc->enable_lock);
450 		return;
451 	}
452 
453 	pm_runtime_get_sync(dev->dev);
454 
455 	reset(crtc);
456 
457 	tilcdc_crtc_set_mode(crtc);
458 
459 	tilcdc_crtc_enable_irqs(dev);
460 
461 	tilcdc_clear(dev, LCDC_DMA_CTRL_REG, LCDC_DUAL_FRAME_BUFFER_ENABLE);
462 	tilcdc_write_mask(dev, LCDC_RASTER_CTRL_REG,
463 			  LCDC_PALETTE_LOAD_MODE(DATA_ONLY),
464 			  LCDC_PALETTE_LOAD_MODE_MASK);
465 
466 	/* There is no real chance for a race here as the time stamp
467 	 * is taken before the raster DMA is started. The spin-lock is
468 	 * taken to have a memory barrier after taking the time-stamp
469 	 * and to avoid a context switch between taking the stamp and
470 	 * enabling the raster.
471 	 */
472 	spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
473 	tilcdc_crtc->last_vblank = ktime_get();
474 	tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
475 	spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
476 
477 	drm_crtc_vblank_on(crtc);
478 
479 	tilcdc_crtc->enabled = true;
480 	mutex_unlock(&tilcdc_crtc->enable_lock);
481 }
482 
483 static void tilcdc_crtc_atomic_enable(struct drm_crtc *crtc,
484 				      struct drm_atomic_state *state)
485 {
486 	tilcdc_crtc_enable(crtc);
487 }
488 
489 static void tilcdc_crtc_off(struct drm_crtc *crtc, bool shutdown)
490 {
491 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
492 	struct drm_device *dev = crtc->dev;
493 	int ret;
494 
495 	mutex_lock(&tilcdc_crtc->enable_lock);
496 	if (shutdown)
497 		tilcdc_crtc->shutdown = true;
498 	if (!tilcdc_crtc->enabled) {
499 		mutex_unlock(&tilcdc_crtc->enable_lock);
500 		return;
501 	}
502 	tilcdc_crtc->frame_done = false;
503 	tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
504 
505 	/*
506 	 * Wait for framedone irq which will still come before putting
507 	 * things to sleep..
508 	 */
509 	ret = wait_event_timeout(tilcdc_crtc->frame_done_wq,
510 				 tilcdc_crtc->frame_done,
511 				 msecs_to_jiffies(500));
512 	if (ret == 0)
513 		dev_err(dev->dev, "%s: timeout waiting for framedone\n",
514 			__func__);
515 
516 	drm_crtc_vblank_off(crtc);
517 
518 	tilcdc_crtc_disable_irqs(dev);
519 
520 	pm_runtime_put_sync(dev->dev);
521 
522 	tilcdc_crtc->enabled = false;
523 	mutex_unlock(&tilcdc_crtc->enable_lock);
524 }
525 
526 static void tilcdc_crtc_disable(struct drm_crtc *crtc)
527 {
528 	tilcdc_crtc_off(crtc, false);
529 }
530 
531 static void tilcdc_crtc_atomic_disable(struct drm_crtc *crtc,
532 				       struct drm_atomic_state *state)
533 {
534 	tilcdc_crtc_disable(crtc);
535 }
536 
537 static void tilcdc_crtc_atomic_flush(struct drm_crtc *crtc,
538 				     struct drm_crtc_state *old_state)
539 {
540 	if (!crtc->state->event)
541 		return;
542 
543 	spin_lock_irq(&crtc->dev->event_lock);
544 	drm_crtc_send_vblank_event(crtc, crtc->state->event);
545 	crtc->state->event = NULL;
546 	spin_unlock_irq(&crtc->dev->event_lock);
547 }
548 
549 void tilcdc_crtc_shutdown(struct drm_crtc *crtc)
550 {
551 	tilcdc_crtc_off(crtc, true);
552 }
553 
554 static bool tilcdc_crtc_is_on(struct drm_crtc *crtc)
555 {
556 	return crtc->state && crtc->state->enable && crtc->state->active;
557 }
558 
559 static void tilcdc_crtc_recover_work(struct work_struct *work)
560 {
561 	struct tilcdc_crtc *tilcdc_crtc =
562 		container_of(work, struct tilcdc_crtc, recover_work);
563 	struct drm_crtc *crtc = &tilcdc_crtc->base;
564 
565 	dev_info(crtc->dev->dev, "%s: Reset CRTC", __func__);
566 
567 	drm_modeset_lock(&crtc->mutex, NULL);
568 
569 	if (!tilcdc_crtc_is_on(crtc))
570 		goto out;
571 
572 	tilcdc_crtc_disable(crtc);
573 	tilcdc_crtc_enable(crtc);
574 out:
575 	drm_modeset_unlock(&crtc->mutex);
576 }
577 
578 static void tilcdc_crtc_destroy(struct drm_crtc *crtc)
579 {
580 	struct tilcdc_drm_private *priv = crtc->dev->dev_private;
581 
582 	tilcdc_crtc_shutdown(crtc);
583 
584 	flush_workqueue(priv->wq);
585 
586 	of_node_put(crtc->port);
587 	drm_crtc_cleanup(crtc);
588 }
589 
590 int tilcdc_crtc_update_fb(struct drm_crtc *crtc,
591 		struct drm_framebuffer *fb,
592 		struct drm_pending_vblank_event *event)
593 {
594 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
595 	struct drm_device *dev = crtc->dev;
596 
597 	if (tilcdc_crtc->event) {
598 		dev_err(dev->dev, "already pending page flip!\n");
599 		return -EBUSY;
600 	}
601 
602 	tilcdc_crtc->event = event;
603 
604 	mutex_lock(&tilcdc_crtc->enable_lock);
605 
606 	if (tilcdc_crtc->enabled) {
607 		unsigned long flags;
608 		ktime_t next_vblank;
609 		s64 tdiff;
610 
611 		spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
612 
613 		next_vblank = ktime_add_us(tilcdc_crtc->last_vblank,
614 					   tilcdc_crtc->hvtotal_us);
615 		tdiff = ktime_to_us(ktime_sub(next_vblank, ktime_get()));
616 
617 		if (tdiff < TILCDC_VBLANK_SAFETY_THRESHOLD_US)
618 			tilcdc_crtc->next_fb = fb;
619 		else
620 			set_scanout(crtc, fb);
621 
622 		spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
623 	}
624 
625 	mutex_unlock(&tilcdc_crtc->enable_lock);
626 
627 	return 0;
628 }
629 
630 static bool tilcdc_crtc_mode_fixup(struct drm_crtc *crtc,
631 		const struct drm_display_mode *mode,
632 		struct drm_display_mode *adjusted_mode)
633 {
634 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
635 
636 	if (!tilcdc_crtc->simulate_vesa_sync)
637 		return true;
638 
639 	/*
640 	 * tilcdc does not generate VESA-compliant sync but aligns
641 	 * VS on the second edge of HS instead of first edge.
642 	 * We use adjusted_mode, to fixup sync by aligning both rising
643 	 * edges and add HSKEW offset to fix the sync.
644 	 */
645 	adjusted_mode->hskew = mode->hsync_end - mode->hsync_start;
646 	adjusted_mode->flags |= DRM_MODE_FLAG_HSKEW;
647 
648 	if (mode->flags & DRM_MODE_FLAG_NHSYNC) {
649 		adjusted_mode->flags |= DRM_MODE_FLAG_PHSYNC;
650 		adjusted_mode->flags &= ~DRM_MODE_FLAG_NHSYNC;
651 	} else {
652 		adjusted_mode->flags |= DRM_MODE_FLAG_NHSYNC;
653 		adjusted_mode->flags &= ~DRM_MODE_FLAG_PHSYNC;
654 	}
655 
656 	return true;
657 }
658 
659 static int tilcdc_crtc_atomic_check(struct drm_crtc *crtc,
660 				    struct drm_crtc_state *state)
661 {
662 	/* If we are not active we don't care */
663 	if (!state->active)
664 		return 0;
665 
666 	if (state->state->planes[0].ptr != crtc->primary ||
667 	    state->state->planes[0].state == NULL ||
668 	    state->state->planes[0].state->crtc != crtc) {
669 		dev_dbg(crtc->dev->dev, "CRTC primary plane must be present");
670 		return -EINVAL;
671 	}
672 
673 	return 0;
674 }
675 
676 static int tilcdc_crtc_enable_vblank(struct drm_crtc *crtc)
677 {
678 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
679 	struct drm_device *dev = crtc->dev;
680 	struct tilcdc_drm_private *priv = dev->dev_private;
681 	unsigned long flags;
682 
683 	spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
684 
685 	tilcdc_clear_irqstatus(dev, LCDC_END_OF_FRAME0);
686 
687 	if (priv->rev == 1)
688 		tilcdc_set(dev, LCDC_DMA_CTRL_REG,
689 			   LCDC_V1_END_OF_FRAME_INT_ENA);
690 	else
691 		tilcdc_set(dev, LCDC_INT_ENABLE_SET_REG,
692 			   LCDC_V2_END_OF_FRAME0_INT_ENA);
693 
694 	spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
695 
696 	return 0;
697 }
698 
699 static void tilcdc_crtc_disable_vblank(struct drm_crtc *crtc)
700 {
701 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
702 	struct drm_device *dev = crtc->dev;
703 	struct tilcdc_drm_private *priv = dev->dev_private;
704 	unsigned long flags;
705 
706 	spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
707 
708 	if (priv->rev == 1)
709 		tilcdc_clear(dev, LCDC_DMA_CTRL_REG,
710 			     LCDC_V1_END_OF_FRAME_INT_ENA);
711 	else
712 		tilcdc_clear(dev, LCDC_INT_ENABLE_SET_REG,
713 			     LCDC_V2_END_OF_FRAME0_INT_ENA);
714 
715 	spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
716 }
717 
718 static void tilcdc_crtc_reset(struct drm_crtc *crtc)
719 {
720 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
721 	struct drm_device *dev = crtc->dev;
722 	int ret;
723 
724 	drm_atomic_helper_crtc_reset(crtc);
725 
726 	/* Turn the raster off if it for some reason is on. */
727 	pm_runtime_get_sync(dev->dev);
728 	if (tilcdc_read(dev, LCDC_RASTER_CTRL_REG) & LCDC_RASTER_ENABLE) {
729 		/* Enable DMA Frame Done Interrupt */
730 		tilcdc_write(dev, LCDC_INT_ENABLE_SET_REG, LCDC_FRAME_DONE);
731 		tilcdc_clear_irqstatus(dev, 0xffffffff);
732 
733 		tilcdc_crtc->frame_done = false;
734 		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
735 
736 		ret = wait_event_timeout(tilcdc_crtc->frame_done_wq,
737 					 tilcdc_crtc->frame_done,
738 					 msecs_to_jiffies(500));
739 		if (ret == 0)
740 			dev_err(dev->dev, "%s: timeout waiting for framedone\n",
741 				__func__);
742 	}
743 	pm_runtime_put_sync(dev->dev);
744 }
745 
746 static const struct drm_crtc_funcs tilcdc_crtc_funcs = {
747 	.destroy        = tilcdc_crtc_destroy,
748 	.set_config     = drm_atomic_helper_set_config,
749 	.page_flip      = drm_atomic_helper_page_flip,
750 	.reset		= tilcdc_crtc_reset,
751 	.atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
752 	.atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
753 	.enable_vblank	= tilcdc_crtc_enable_vblank,
754 	.disable_vblank	= tilcdc_crtc_disable_vblank,
755 };
756 
757 static enum drm_mode_status
758 tilcdc_crtc_mode_valid(struct drm_crtc *crtc,
759 		       const struct drm_display_mode *mode)
760 {
761 	struct tilcdc_drm_private *priv = crtc->dev->dev_private;
762 	unsigned int bandwidth;
763 	uint32_t hbp, hfp, hsw, vbp, vfp, vsw;
764 
765 	/*
766 	 * check to see if the width is within the range that
767 	 * the LCD Controller physically supports
768 	 */
769 	if (mode->hdisplay > priv->max_width)
770 		return MODE_VIRTUAL_X;
771 
772 	/* width must be multiple of 16 */
773 	if (mode->hdisplay & 0xf)
774 		return MODE_VIRTUAL_X;
775 
776 	if (mode->vdisplay > 2048)
777 		return MODE_VIRTUAL_Y;
778 
779 	DBG("Processing mode %dx%d@%d with pixel clock %d",
780 		mode->hdisplay, mode->vdisplay,
781 		drm_mode_vrefresh(mode), mode->clock);
782 
783 	hbp = mode->htotal - mode->hsync_end;
784 	hfp = mode->hsync_start - mode->hdisplay;
785 	hsw = mode->hsync_end - mode->hsync_start;
786 	vbp = mode->vtotal - mode->vsync_end;
787 	vfp = mode->vsync_start - mode->vdisplay;
788 	vsw = mode->vsync_end - mode->vsync_start;
789 
790 	if ((hbp-1) & ~0x3ff) {
791 		DBG("Pruning mode: Horizontal Back Porch out of range");
792 		return MODE_HBLANK_WIDE;
793 	}
794 
795 	if ((hfp-1) & ~0x3ff) {
796 		DBG("Pruning mode: Horizontal Front Porch out of range");
797 		return MODE_HBLANK_WIDE;
798 	}
799 
800 	if ((hsw-1) & ~0x3ff) {
801 		DBG("Pruning mode: Horizontal Sync Width out of range");
802 		return MODE_HSYNC_WIDE;
803 	}
804 
805 	if (vbp & ~0xff) {
806 		DBG("Pruning mode: Vertical Back Porch out of range");
807 		return MODE_VBLANK_WIDE;
808 	}
809 
810 	if (vfp & ~0xff) {
811 		DBG("Pruning mode: Vertical Front Porch out of range");
812 		return MODE_VBLANK_WIDE;
813 	}
814 
815 	if ((vsw-1) & ~0x3f) {
816 		DBG("Pruning mode: Vertical Sync Width out of range");
817 		return MODE_VSYNC_WIDE;
818 	}
819 
820 	/*
821 	 * some devices have a maximum allowed pixel clock
822 	 * configured from the DT
823 	 */
824 	if (mode->clock > priv->max_pixelclock) {
825 		DBG("Pruning mode: pixel clock too high");
826 		return MODE_CLOCK_HIGH;
827 	}
828 
829 	/*
830 	 * some devices further limit the max horizontal resolution
831 	 * configured from the DT
832 	 */
833 	if (mode->hdisplay > priv->max_width)
834 		return MODE_BAD_WIDTH;
835 
836 	/* filter out modes that would require too much memory bandwidth: */
837 	bandwidth = mode->hdisplay * mode->vdisplay *
838 		drm_mode_vrefresh(mode);
839 	if (bandwidth > priv->max_bandwidth) {
840 		DBG("Pruning mode: exceeds defined bandwidth limit");
841 		return MODE_BAD;
842 	}
843 
844 	return MODE_OK;
845 }
846 
847 static const struct drm_crtc_helper_funcs tilcdc_crtc_helper_funcs = {
848 	.mode_valid	= tilcdc_crtc_mode_valid,
849 	.mode_fixup	= tilcdc_crtc_mode_fixup,
850 	.atomic_check	= tilcdc_crtc_atomic_check,
851 	.atomic_enable	= tilcdc_crtc_atomic_enable,
852 	.atomic_disable	= tilcdc_crtc_atomic_disable,
853 	.atomic_flush	= tilcdc_crtc_atomic_flush,
854 };
855 
856 void tilcdc_crtc_set_panel_info(struct drm_crtc *crtc,
857 		const struct tilcdc_panel_info *info)
858 {
859 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
860 	tilcdc_crtc->info = info;
861 }
862 
863 void tilcdc_crtc_set_simulate_vesa_sync(struct drm_crtc *crtc,
864 					bool simulate_vesa_sync)
865 {
866 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
867 
868 	tilcdc_crtc->simulate_vesa_sync = simulate_vesa_sync;
869 }
870 
871 void tilcdc_crtc_update_clk(struct drm_crtc *crtc)
872 {
873 	struct drm_device *dev = crtc->dev;
874 	struct tilcdc_drm_private *priv = dev->dev_private;
875 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
876 
877 	drm_modeset_lock(&crtc->mutex, NULL);
878 	if (tilcdc_crtc->lcd_fck_rate != clk_get_rate(priv->clk)) {
879 		if (tilcdc_crtc_is_on(crtc)) {
880 			pm_runtime_get_sync(dev->dev);
881 			tilcdc_crtc_disable(crtc);
882 
883 			tilcdc_crtc_set_clk(crtc);
884 
885 			tilcdc_crtc_enable(crtc);
886 			pm_runtime_put_sync(dev->dev);
887 		}
888 	}
889 	drm_modeset_unlock(&crtc->mutex);
890 }
891 
892 #define SYNC_LOST_COUNT_LIMIT 50
893 
894 irqreturn_t tilcdc_crtc_irq(struct drm_crtc *crtc)
895 {
896 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
897 	struct drm_device *dev = crtc->dev;
898 	struct tilcdc_drm_private *priv = dev->dev_private;
899 	uint32_t stat, reg;
900 
901 	stat = tilcdc_read_irqstatus(dev);
902 	tilcdc_clear_irqstatus(dev, stat);
903 
904 	if (stat & LCDC_END_OF_FRAME0) {
905 		unsigned long flags;
906 		bool skip_event = false;
907 		ktime_t now;
908 
909 		now = ktime_get();
910 
911 		spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
912 
913 		tilcdc_crtc->last_vblank = now;
914 
915 		if (tilcdc_crtc->next_fb) {
916 			set_scanout(crtc, tilcdc_crtc->next_fb);
917 			tilcdc_crtc->next_fb = NULL;
918 			skip_event = true;
919 		}
920 
921 		spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
922 
923 		drm_crtc_handle_vblank(crtc);
924 
925 		if (!skip_event) {
926 			struct drm_pending_vblank_event *event;
927 
928 			spin_lock_irqsave(&dev->event_lock, flags);
929 
930 			event = tilcdc_crtc->event;
931 			tilcdc_crtc->event = NULL;
932 			if (event)
933 				drm_crtc_send_vblank_event(crtc, event);
934 
935 			spin_unlock_irqrestore(&dev->event_lock, flags);
936 		}
937 
938 		if (tilcdc_crtc->frame_intact)
939 			tilcdc_crtc->sync_lost_count = 0;
940 		else
941 			tilcdc_crtc->frame_intact = true;
942 	}
943 
944 	if (stat & LCDC_FIFO_UNDERFLOW)
945 		dev_err_ratelimited(dev->dev, "%s(0x%08x): FIFO underflow",
946 				    __func__, stat);
947 
948 	if (stat & LCDC_PL_LOAD_DONE) {
949 		complete(&tilcdc_crtc->palette_loaded);
950 		if (priv->rev == 1)
951 			tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
952 				     LCDC_V1_PL_INT_ENA);
953 		else
954 			tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
955 				     LCDC_V2_PL_INT_ENA);
956 	}
957 
958 	if (stat & LCDC_SYNC_LOST) {
959 		dev_err_ratelimited(dev->dev, "%s(0x%08x): Sync lost",
960 				    __func__, stat);
961 		tilcdc_crtc->frame_intact = false;
962 		if (priv->rev == 1) {
963 			reg = tilcdc_read(dev, LCDC_RASTER_CTRL_REG);
964 			if (reg & LCDC_RASTER_ENABLE) {
965 				tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
966 					     LCDC_RASTER_ENABLE);
967 				tilcdc_set(dev, LCDC_RASTER_CTRL_REG,
968 					   LCDC_RASTER_ENABLE);
969 			}
970 		} else {
971 			if (tilcdc_crtc->sync_lost_count++ >
972 			    SYNC_LOST_COUNT_LIMIT) {
973 				dev_err(dev->dev,
974 					"%s(0x%08x): Sync lost flood detected, recovering",
975 					__func__, stat);
976 				queue_work(system_wq,
977 					   &tilcdc_crtc->recover_work);
978 				tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
979 					     LCDC_SYNC_LOST);
980 				tilcdc_crtc->sync_lost_count = 0;
981 			}
982 		}
983 	}
984 
985 	if (stat & LCDC_FRAME_DONE) {
986 		tilcdc_crtc->frame_done = true;
987 		wake_up(&tilcdc_crtc->frame_done_wq);
988 		/* rev 1 lcdc appears to hang if irq is not disbaled here */
989 		if (priv->rev == 1)
990 			tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
991 				     LCDC_V1_FRAME_DONE_INT_ENA);
992 	}
993 
994 	/* For revision 2 only */
995 	if (priv->rev == 2) {
996 		/* Indicate to LCDC that the interrupt service routine has
997 		 * completed, see 13.3.6.1.6 in AM335x TRM.
998 		 */
999 		tilcdc_write(dev, LCDC_END_OF_INT_IND_REG, 0);
1000 	}
1001 
1002 	return IRQ_HANDLED;
1003 }
1004 
1005 int tilcdc_crtc_create(struct drm_device *dev)
1006 {
1007 	struct tilcdc_drm_private *priv = dev->dev_private;
1008 	struct tilcdc_crtc *tilcdc_crtc;
1009 	struct drm_crtc *crtc;
1010 	int ret;
1011 
1012 	tilcdc_crtc = devm_kzalloc(dev->dev, sizeof(*tilcdc_crtc), GFP_KERNEL);
1013 	if (!tilcdc_crtc)
1014 		return -ENOMEM;
1015 
1016 	init_completion(&tilcdc_crtc->palette_loaded);
1017 	tilcdc_crtc->palette_base = dmam_alloc_coherent(dev->dev,
1018 					TILCDC_PALETTE_SIZE,
1019 					&tilcdc_crtc->palette_dma_handle,
1020 					GFP_KERNEL | __GFP_ZERO);
1021 	if (!tilcdc_crtc->palette_base)
1022 		return -ENOMEM;
1023 	*tilcdc_crtc->palette_base = TILCDC_PALETTE_FIRST_ENTRY;
1024 
1025 	crtc = &tilcdc_crtc->base;
1026 
1027 	ret = tilcdc_plane_init(dev, &tilcdc_crtc->primary);
1028 	if (ret < 0)
1029 		goto fail;
1030 
1031 	mutex_init(&tilcdc_crtc->enable_lock);
1032 
1033 	init_waitqueue_head(&tilcdc_crtc->frame_done_wq);
1034 
1035 	spin_lock_init(&tilcdc_crtc->irq_lock);
1036 	INIT_WORK(&tilcdc_crtc->recover_work, tilcdc_crtc_recover_work);
1037 
1038 	ret = drm_crtc_init_with_planes(dev, crtc,
1039 					&tilcdc_crtc->primary,
1040 					NULL,
1041 					&tilcdc_crtc_funcs,
1042 					"tilcdc crtc");
1043 	if (ret < 0)
1044 		goto fail;
1045 
1046 	drm_crtc_helper_add(crtc, &tilcdc_crtc_helper_funcs);
1047 
1048 	if (priv->is_componentized) {
1049 		crtc->port = of_graph_get_port_by_id(dev->dev->of_node, 0);
1050 		if (!crtc->port) { /* This should never happen */
1051 			dev_err(dev->dev, "Port node not found in %pOF\n",
1052 				dev->dev->of_node);
1053 			ret = -EINVAL;
1054 			goto fail;
1055 		}
1056 	}
1057 
1058 	priv->crtc = crtc;
1059 	return 0;
1060 
1061 fail:
1062 	tilcdc_crtc_destroy(crtc);
1063 	return ret;
1064 }
1065