1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (C) 2012 Texas Instruments
4  * Author: Rob Clark <robdclark@gmail.com>
5  */
6 
7 #include <linux/delay.h>
8 #include <linux/dma-mapping.h>
9 #include <linux/of_graph.h>
10 #include <linux/pm_runtime.h>
11 
12 #include <drm/drm_atomic.h>
13 #include <drm/drm_atomic_helper.h>
14 #include <drm/drm_crtc.h>
15 #include <drm/drm_fb_cma_helper.h>
16 #include <drm/drm_fourcc.h>
17 #include <drm/drm_gem_cma_helper.h>
18 #include <drm/drm_modeset_helper_vtables.h>
19 #include <drm/drm_print.h>
20 #include <drm/drm_vblank.h>
21 
22 #include "tilcdc_drv.h"
23 #include "tilcdc_regs.h"
24 
25 #define TILCDC_VBLANK_SAFETY_THRESHOLD_US	1000
26 #define TILCDC_PALETTE_SIZE			32
27 #define TILCDC_PALETTE_FIRST_ENTRY		0x4000
28 
29 struct tilcdc_crtc {
30 	struct drm_crtc base;
31 
32 	struct drm_plane primary;
33 	const struct tilcdc_panel_info *info;
34 	struct drm_pending_vblank_event *event;
35 	struct mutex enable_lock;
36 	bool enabled;
37 	bool shutdown;
38 	wait_queue_head_t frame_done_wq;
39 	bool frame_done;
40 	spinlock_t irq_lock;
41 
42 	unsigned int lcd_fck_rate;
43 
44 	ktime_t last_vblank;
45 	unsigned int hvtotal_us;
46 
47 	struct drm_framebuffer *next_fb;
48 
49 	/* Only set if an external encoder is connected */
50 	bool simulate_vesa_sync;
51 
52 	int sync_lost_count;
53 	bool frame_intact;
54 	struct work_struct recover_work;
55 
56 	dma_addr_t palette_dma_handle;
57 	u16 *palette_base;
58 	struct completion palette_loaded;
59 };
60 #define to_tilcdc_crtc(x) container_of(x, struct tilcdc_crtc, base)
61 
62 static void set_scanout(struct drm_crtc *crtc, struct drm_framebuffer *fb)
63 {
64 	struct drm_device *dev = crtc->dev;
65 	struct tilcdc_drm_private *priv = dev->dev_private;
66 	struct drm_gem_cma_object *gem;
67 	dma_addr_t start, end;
68 	u64 dma_base_and_ceiling;
69 
70 	gem = drm_fb_cma_get_gem_obj(fb, 0);
71 
72 	start = gem->paddr + fb->offsets[0] +
73 		crtc->y * fb->pitches[0] +
74 		crtc->x * fb->format->cpp[0];
75 
76 	end = start + (crtc->mode.vdisplay * fb->pitches[0]);
77 
78 	/* Write LCDC_DMA_FB_BASE_ADDR_0_REG and LCDC_DMA_FB_CEILING_ADDR_0_REG
79 	 * with a single insruction, if available. This should make it more
80 	 * unlikely that LCDC would fetch the DMA addresses in the middle of
81 	 * an update.
82 	 */
83 	if (priv->rev == 1)
84 		end -= 1;
85 
86 	dma_base_and_ceiling = (u64)end << 32 | start;
87 	tilcdc_write64(dev, LCDC_DMA_FB_BASE_ADDR_0_REG, dma_base_and_ceiling);
88 }
89 
90 /*
91  * The driver currently only supports only true color formats. For
92  * true color the palette block is bypassed, but a 32 byte palette
93  * should still be loaded. The first 16-bit entry must be 0x4000 while
94  * all other entries must be zeroed.
95  */
96 static void tilcdc_crtc_load_palette(struct drm_crtc *crtc)
97 {
98 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
99 	struct drm_device *dev = crtc->dev;
100 	struct tilcdc_drm_private *priv = dev->dev_private;
101 	int ret;
102 
103 	reinit_completion(&tilcdc_crtc->palette_loaded);
104 
105 	/* Tell the LCDC where the palette is located. */
106 	tilcdc_write(dev, LCDC_DMA_FB_BASE_ADDR_0_REG,
107 		     tilcdc_crtc->palette_dma_handle);
108 	tilcdc_write(dev, LCDC_DMA_FB_CEILING_ADDR_0_REG,
109 		     (u32) tilcdc_crtc->palette_dma_handle +
110 		     TILCDC_PALETTE_SIZE - 1);
111 
112 	/* Set dma load mode for palette loading only. */
113 	tilcdc_write_mask(dev, LCDC_RASTER_CTRL_REG,
114 			  LCDC_PALETTE_LOAD_MODE(PALETTE_ONLY),
115 			  LCDC_PALETTE_LOAD_MODE_MASK);
116 
117 	/* Enable DMA Palette Loaded Interrupt */
118 	if (priv->rev == 1)
119 		tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_V1_PL_INT_ENA);
120 	else
121 		tilcdc_write(dev, LCDC_INT_ENABLE_SET_REG, LCDC_V2_PL_INT_ENA);
122 
123 	/* Enable LCDC DMA and wait for palette to be loaded. */
124 	tilcdc_clear_irqstatus(dev, 0xffffffff);
125 	tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
126 
127 	ret = wait_for_completion_timeout(&tilcdc_crtc->palette_loaded,
128 					  msecs_to_jiffies(50));
129 	if (ret == 0)
130 		dev_err(dev->dev, "%s: Palette loading timeout", __func__);
131 
132 	/* Disable LCDC DMA and DMA Palette Loaded Interrupt. */
133 	tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
134 	if (priv->rev == 1)
135 		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_V1_PL_INT_ENA);
136 	else
137 		tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG, LCDC_V2_PL_INT_ENA);
138 }
139 
140 static void tilcdc_crtc_enable_irqs(struct drm_device *dev)
141 {
142 	struct tilcdc_drm_private *priv = dev->dev_private;
143 
144 	tilcdc_clear_irqstatus(dev, 0xffffffff);
145 
146 	if (priv->rev == 1) {
147 		tilcdc_set(dev, LCDC_RASTER_CTRL_REG,
148 			LCDC_V1_SYNC_LOST_INT_ENA | LCDC_V1_FRAME_DONE_INT_ENA |
149 			LCDC_V1_UNDERFLOW_INT_ENA);
150 	} else {
151 		tilcdc_write(dev, LCDC_INT_ENABLE_SET_REG,
152 			LCDC_V2_UNDERFLOW_INT_ENA |
153 			LCDC_FRAME_DONE | LCDC_SYNC_LOST);
154 	}
155 }
156 
157 static void tilcdc_crtc_disable_irqs(struct drm_device *dev)
158 {
159 	struct tilcdc_drm_private *priv = dev->dev_private;
160 
161 	/* disable irqs that we might have enabled: */
162 	if (priv->rev == 1) {
163 		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
164 			LCDC_V1_SYNC_LOST_INT_ENA | LCDC_V1_FRAME_DONE_INT_ENA |
165 			LCDC_V1_UNDERFLOW_INT_ENA | LCDC_V1_PL_INT_ENA);
166 		tilcdc_clear(dev, LCDC_DMA_CTRL_REG,
167 			LCDC_V1_END_OF_FRAME_INT_ENA);
168 	} else {
169 		tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
170 			LCDC_V2_UNDERFLOW_INT_ENA | LCDC_V2_PL_INT_ENA |
171 			LCDC_V2_END_OF_FRAME0_INT_ENA |
172 			LCDC_FRAME_DONE | LCDC_SYNC_LOST);
173 	}
174 }
175 
176 static void reset(struct drm_crtc *crtc)
177 {
178 	struct drm_device *dev = crtc->dev;
179 	struct tilcdc_drm_private *priv = dev->dev_private;
180 
181 	if (priv->rev != 2)
182 		return;
183 
184 	tilcdc_set(dev, LCDC_CLK_RESET_REG, LCDC_CLK_MAIN_RESET);
185 	usleep_range(250, 1000);
186 	tilcdc_clear(dev, LCDC_CLK_RESET_REG, LCDC_CLK_MAIN_RESET);
187 }
188 
189 /*
190  * Calculate the percentage difference between the requested pixel clock rate
191  * and the effective rate resulting from calculating the clock divider value.
192  */
193 static unsigned int tilcdc_pclk_diff(unsigned long rate,
194 				     unsigned long real_rate)
195 {
196 	int r = rate / 100, rr = real_rate / 100;
197 
198 	return (unsigned int)(abs(((rr - r) * 100) / r));
199 }
200 
201 static void tilcdc_crtc_set_clk(struct drm_crtc *crtc)
202 {
203 	struct drm_device *dev = crtc->dev;
204 	struct tilcdc_drm_private *priv = dev->dev_private;
205 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
206 	unsigned long clk_rate, real_rate, req_rate;
207 	unsigned int clkdiv;
208 	int ret;
209 
210 	clkdiv = 2; /* first try using a standard divider of 2 */
211 
212 	/* mode.clock is in KHz, set_rate wants parameter in Hz */
213 	req_rate = crtc->mode.clock * 1000;
214 
215 	ret = clk_set_rate(priv->clk, req_rate * clkdiv);
216 	clk_rate = clk_get_rate(priv->clk);
217 	if (ret < 0 || tilcdc_pclk_diff(req_rate, clk_rate) > 5) {
218 		/*
219 		 * If we fail to set the clock rate (some architectures don't
220 		 * use the common clock framework yet and may not implement
221 		 * all the clk API calls for every clock), try the next best
222 		 * thing: adjusting the clock divider, unless clk_get_rate()
223 		 * failed as well.
224 		 */
225 		if (!clk_rate) {
226 			/* Nothing more we can do. Just bail out. */
227 			dev_err(dev->dev,
228 				"failed to set the pixel clock - unable to read current lcdc clock rate\n");
229 			return;
230 		}
231 
232 		clkdiv = DIV_ROUND_CLOSEST(clk_rate, req_rate);
233 
234 		/*
235 		 * Emit a warning if the real clock rate resulting from the
236 		 * calculated divider differs much from the requested rate.
237 		 *
238 		 * 5% is an arbitrary value - LCDs are usually quite tolerant
239 		 * about pixel clock rates.
240 		 */
241 		real_rate = clkdiv * req_rate;
242 
243 		if (tilcdc_pclk_diff(clk_rate, real_rate) > 5) {
244 			dev_warn(dev->dev,
245 				 "effective pixel clock rate (%luHz) differs from the calculated rate (%luHz)\n",
246 				 clk_rate, real_rate);
247 		}
248 	}
249 
250 	tilcdc_crtc->lcd_fck_rate = clk_rate;
251 
252 	DBG("lcd_clk=%u, mode clock=%d, div=%u",
253 	    tilcdc_crtc->lcd_fck_rate, crtc->mode.clock, clkdiv);
254 
255 	/* Configure the LCD clock divisor. */
256 	tilcdc_write(dev, LCDC_CTRL_REG, LCDC_CLK_DIVISOR(clkdiv) |
257 		     LCDC_RASTER_MODE);
258 
259 	if (priv->rev == 2)
260 		tilcdc_set(dev, LCDC_CLK_ENABLE_REG,
261 				LCDC_V2_DMA_CLK_EN | LCDC_V2_LIDD_CLK_EN |
262 				LCDC_V2_CORE_CLK_EN);
263 }
264 
265 static uint tilcdc_mode_hvtotal(const struct drm_display_mode *mode)
266 {
267 	return (uint) div_u64(1000llu * mode->htotal * mode->vtotal,
268 			      mode->clock);
269 }
270 
271 static void tilcdc_crtc_set_mode(struct drm_crtc *crtc)
272 {
273 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
274 	struct drm_device *dev = crtc->dev;
275 	struct tilcdc_drm_private *priv = dev->dev_private;
276 	const struct tilcdc_panel_info *info = tilcdc_crtc->info;
277 	uint32_t reg, hbp, hfp, hsw, vbp, vfp, vsw;
278 	struct drm_display_mode *mode = &crtc->state->adjusted_mode;
279 	struct drm_framebuffer *fb = crtc->primary->state->fb;
280 
281 	if (WARN_ON(!info))
282 		return;
283 
284 	if (WARN_ON(!fb))
285 		return;
286 
287 	/* Configure the Burst Size and fifo threshold of DMA: */
288 	reg = tilcdc_read(dev, LCDC_DMA_CTRL_REG) & ~0x00000770;
289 	switch (info->dma_burst_sz) {
290 	case 1:
291 		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_1);
292 		break;
293 	case 2:
294 		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_2);
295 		break;
296 	case 4:
297 		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_4);
298 		break;
299 	case 8:
300 		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_8);
301 		break;
302 	case 16:
303 		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_16);
304 		break;
305 	default:
306 		dev_err(dev->dev, "invalid burst size\n");
307 		return;
308 	}
309 	reg |= (info->fifo_th << 8);
310 	tilcdc_write(dev, LCDC_DMA_CTRL_REG, reg);
311 
312 	/* Configure timings: */
313 	hbp = mode->htotal - mode->hsync_end;
314 	hfp = mode->hsync_start - mode->hdisplay;
315 	hsw = mode->hsync_end - mode->hsync_start;
316 	vbp = mode->vtotal - mode->vsync_end;
317 	vfp = mode->vsync_start - mode->vdisplay;
318 	vsw = mode->vsync_end - mode->vsync_start;
319 
320 	DBG("%dx%d, hbp=%u, hfp=%u, hsw=%u, vbp=%u, vfp=%u, vsw=%u",
321 	    mode->hdisplay, mode->vdisplay, hbp, hfp, hsw, vbp, vfp, vsw);
322 
323 	/* Set AC Bias Period and Number of Transitions per Interrupt: */
324 	reg = tilcdc_read(dev, LCDC_RASTER_TIMING_2_REG) & ~0x000fff00;
325 	reg |= LCDC_AC_BIAS_FREQUENCY(info->ac_bias) |
326 		LCDC_AC_BIAS_TRANSITIONS_PER_INT(info->ac_bias_intrpt);
327 
328 	/*
329 	 * subtract one from hfp, hbp, hsw because the hardware uses
330 	 * a value of 0 as 1
331 	 */
332 	if (priv->rev == 2) {
333 		/* clear bits we're going to set */
334 		reg &= ~0x78000033;
335 		reg |= ((hfp-1) & 0x300) >> 8;
336 		reg |= ((hbp-1) & 0x300) >> 4;
337 		reg |= ((hsw-1) & 0x3c0) << 21;
338 	}
339 	tilcdc_write(dev, LCDC_RASTER_TIMING_2_REG, reg);
340 
341 	reg = (((mode->hdisplay >> 4) - 1) << 4) |
342 		(((hbp-1) & 0xff) << 24) |
343 		(((hfp-1) & 0xff) << 16) |
344 		(((hsw-1) & 0x3f) << 10);
345 	if (priv->rev == 2)
346 		reg |= (((mode->hdisplay >> 4) - 1) & 0x40) >> 3;
347 	tilcdc_write(dev, LCDC_RASTER_TIMING_0_REG, reg);
348 
349 	reg = ((mode->vdisplay - 1) & 0x3ff) |
350 		((vbp & 0xff) << 24) |
351 		((vfp & 0xff) << 16) |
352 		(((vsw-1) & 0x3f) << 10);
353 	tilcdc_write(dev, LCDC_RASTER_TIMING_1_REG, reg);
354 
355 	/*
356 	 * be sure to set Bit 10 for the V2 LCDC controller,
357 	 * otherwise limited to 1024 pixels width, stopping
358 	 * 1920x1080 being supported.
359 	 */
360 	if (priv->rev == 2) {
361 		if ((mode->vdisplay - 1) & 0x400) {
362 			tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG,
363 				LCDC_LPP_B10);
364 		} else {
365 			tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG,
366 				LCDC_LPP_B10);
367 		}
368 	}
369 
370 	/* Configure display type: */
371 	reg = tilcdc_read(dev, LCDC_RASTER_CTRL_REG) &
372 		~(LCDC_TFT_MODE | LCDC_MONO_8BIT_MODE | LCDC_MONOCHROME_MODE |
373 		  LCDC_V2_TFT_24BPP_MODE | LCDC_V2_TFT_24BPP_UNPACK |
374 		  0x000ff000 /* Palette Loading Delay bits */);
375 	reg |= LCDC_TFT_MODE; /* no monochrome/passive support */
376 	if (info->tft_alt_mode)
377 		reg |= LCDC_TFT_ALT_ENABLE;
378 	if (priv->rev == 2) {
379 		switch (fb->format->format) {
380 		case DRM_FORMAT_BGR565:
381 		case DRM_FORMAT_RGB565:
382 			break;
383 		case DRM_FORMAT_XBGR8888:
384 		case DRM_FORMAT_XRGB8888:
385 			reg |= LCDC_V2_TFT_24BPP_UNPACK;
386 			fallthrough;
387 		case DRM_FORMAT_BGR888:
388 		case DRM_FORMAT_RGB888:
389 			reg |= LCDC_V2_TFT_24BPP_MODE;
390 			break;
391 		default:
392 			dev_err(dev->dev, "invalid pixel format\n");
393 			return;
394 		}
395 	}
396 	reg |= info->fdd < 12;
397 	tilcdc_write(dev, LCDC_RASTER_CTRL_REG, reg);
398 
399 	if (info->invert_pxl_clk)
400 		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_PIXEL_CLOCK);
401 	else
402 		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_PIXEL_CLOCK);
403 
404 	if (info->sync_ctrl)
405 		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_CTRL);
406 	else
407 		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_CTRL);
408 
409 	if (info->sync_edge)
410 		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_EDGE);
411 	else
412 		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_EDGE);
413 
414 	if (mode->flags & DRM_MODE_FLAG_NHSYNC)
415 		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_HSYNC);
416 	else
417 		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_HSYNC);
418 
419 	if (mode->flags & DRM_MODE_FLAG_NVSYNC)
420 		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_VSYNC);
421 	else
422 		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_VSYNC);
423 
424 	if (info->raster_order)
425 		tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ORDER);
426 	else
427 		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ORDER);
428 
429 	tilcdc_crtc_set_clk(crtc);
430 
431 	tilcdc_crtc_load_palette(crtc);
432 
433 	set_scanout(crtc, fb);
434 
435 	crtc->hwmode = crtc->state->adjusted_mode;
436 
437 	tilcdc_crtc->hvtotal_us =
438 		tilcdc_mode_hvtotal(&crtc->hwmode);
439 }
440 
441 static void tilcdc_crtc_enable(struct drm_crtc *crtc)
442 {
443 	struct drm_device *dev = crtc->dev;
444 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
445 	unsigned long flags;
446 
447 	mutex_lock(&tilcdc_crtc->enable_lock);
448 	if (tilcdc_crtc->enabled || tilcdc_crtc->shutdown) {
449 		mutex_unlock(&tilcdc_crtc->enable_lock);
450 		return;
451 	}
452 
453 	pm_runtime_get_sync(dev->dev);
454 
455 	reset(crtc);
456 
457 	tilcdc_crtc_set_mode(crtc);
458 
459 	tilcdc_crtc_enable_irqs(dev);
460 
461 	tilcdc_clear(dev, LCDC_DMA_CTRL_REG, LCDC_DUAL_FRAME_BUFFER_ENABLE);
462 	tilcdc_write_mask(dev, LCDC_RASTER_CTRL_REG,
463 			  LCDC_PALETTE_LOAD_MODE(DATA_ONLY),
464 			  LCDC_PALETTE_LOAD_MODE_MASK);
465 
466 	/* There is no real chance for a race here as the time stamp
467 	 * is taken before the raster DMA is started. The spin-lock is
468 	 * taken to have a memory barrier after taking the time-stamp
469 	 * and to avoid a context switch between taking the stamp and
470 	 * enabling the raster.
471 	 */
472 	spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
473 	tilcdc_crtc->last_vblank = ktime_get();
474 	tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
475 	spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
476 
477 	drm_crtc_vblank_on(crtc);
478 
479 	tilcdc_crtc->enabled = true;
480 	mutex_unlock(&tilcdc_crtc->enable_lock);
481 }
482 
483 static void tilcdc_crtc_atomic_enable(struct drm_crtc *crtc,
484 				      struct drm_atomic_state *state)
485 {
486 	tilcdc_crtc_enable(crtc);
487 }
488 
489 static void tilcdc_crtc_off(struct drm_crtc *crtc, bool shutdown)
490 {
491 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
492 	struct drm_device *dev = crtc->dev;
493 	int ret;
494 
495 	mutex_lock(&tilcdc_crtc->enable_lock);
496 	if (shutdown)
497 		tilcdc_crtc->shutdown = true;
498 	if (!tilcdc_crtc->enabled) {
499 		mutex_unlock(&tilcdc_crtc->enable_lock);
500 		return;
501 	}
502 	tilcdc_crtc->frame_done = false;
503 	tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
504 
505 	/*
506 	 * Wait for framedone irq which will still come before putting
507 	 * things to sleep..
508 	 */
509 	ret = wait_event_timeout(tilcdc_crtc->frame_done_wq,
510 				 tilcdc_crtc->frame_done,
511 				 msecs_to_jiffies(500));
512 	if (ret == 0)
513 		dev_err(dev->dev, "%s: timeout waiting for framedone\n",
514 			__func__);
515 
516 	drm_crtc_vblank_off(crtc);
517 
518 	tilcdc_crtc_disable_irqs(dev);
519 
520 	pm_runtime_put_sync(dev->dev);
521 
522 	tilcdc_crtc->enabled = false;
523 	mutex_unlock(&tilcdc_crtc->enable_lock);
524 }
525 
526 static void tilcdc_crtc_disable(struct drm_crtc *crtc)
527 {
528 	tilcdc_crtc_off(crtc, false);
529 }
530 
531 static void tilcdc_crtc_atomic_disable(struct drm_crtc *crtc,
532 				       struct drm_atomic_state *state)
533 {
534 	tilcdc_crtc_disable(crtc);
535 }
536 
537 static void tilcdc_crtc_atomic_flush(struct drm_crtc *crtc,
538 				     struct drm_atomic_state *state)
539 {
540 	if (!crtc->state->event)
541 		return;
542 
543 	spin_lock_irq(&crtc->dev->event_lock);
544 	drm_crtc_send_vblank_event(crtc, crtc->state->event);
545 	crtc->state->event = NULL;
546 	spin_unlock_irq(&crtc->dev->event_lock);
547 }
548 
549 void tilcdc_crtc_shutdown(struct drm_crtc *crtc)
550 {
551 	tilcdc_crtc_off(crtc, true);
552 }
553 
554 static bool tilcdc_crtc_is_on(struct drm_crtc *crtc)
555 {
556 	return crtc->state && crtc->state->enable && crtc->state->active;
557 }
558 
559 static void tilcdc_crtc_recover_work(struct work_struct *work)
560 {
561 	struct tilcdc_crtc *tilcdc_crtc =
562 		container_of(work, struct tilcdc_crtc, recover_work);
563 	struct drm_crtc *crtc = &tilcdc_crtc->base;
564 
565 	dev_info(crtc->dev->dev, "%s: Reset CRTC", __func__);
566 
567 	drm_modeset_lock(&crtc->mutex, NULL);
568 
569 	if (!tilcdc_crtc_is_on(crtc))
570 		goto out;
571 
572 	tilcdc_crtc_disable(crtc);
573 	tilcdc_crtc_enable(crtc);
574 out:
575 	drm_modeset_unlock(&crtc->mutex);
576 }
577 
578 static void tilcdc_crtc_destroy(struct drm_crtc *crtc)
579 {
580 	struct tilcdc_drm_private *priv = crtc->dev->dev_private;
581 
582 	tilcdc_crtc_shutdown(crtc);
583 
584 	flush_workqueue(priv->wq);
585 
586 	of_node_put(crtc->port);
587 	drm_crtc_cleanup(crtc);
588 }
589 
590 int tilcdc_crtc_update_fb(struct drm_crtc *crtc,
591 		struct drm_framebuffer *fb,
592 		struct drm_pending_vblank_event *event)
593 {
594 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
595 	struct drm_device *dev = crtc->dev;
596 
597 	if (tilcdc_crtc->event) {
598 		dev_err(dev->dev, "already pending page flip!\n");
599 		return -EBUSY;
600 	}
601 
602 	tilcdc_crtc->event = event;
603 
604 	mutex_lock(&tilcdc_crtc->enable_lock);
605 
606 	if (tilcdc_crtc->enabled) {
607 		unsigned long flags;
608 		ktime_t next_vblank;
609 		s64 tdiff;
610 
611 		spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
612 
613 		next_vblank = ktime_add_us(tilcdc_crtc->last_vblank,
614 					   tilcdc_crtc->hvtotal_us);
615 		tdiff = ktime_to_us(ktime_sub(next_vblank, ktime_get()));
616 
617 		if (tdiff < TILCDC_VBLANK_SAFETY_THRESHOLD_US)
618 			tilcdc_crtc->next_fb = fb;
619 		else
620 			set_scanout(crtc, fb);
621 
622 		spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
623 	}
624 
625 	mutex_unlock(&tilcdc_crtc->enable_lock);
626 
627 	return 0;
628 }
629 
630 static bool tilcdc_crtc_mode_fixup(struct drm_crtc *crtc,
631 		const struct drm_display_mode *mode,
632 		struct drm_display_mode *adjusted_mode)
633 {
634 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
635 
636 	if (!tilcdc_crtc->simulate_vesa_sync)
637 		return true;
638 
639 	/*
640 	 * tilcdc does not generate VESA-compliant sync but aligns
641 	 * VS on the second edge of HS instead of first edge.
642 	 * We use adjusted_mode, to fixup sync by aligning both rising
643 	 * edges and add HSKEW offset to fix the sync.
644 	 */
645 	adjusted_mode->hskew = mode->hsync_end - mode->hsync_start;
646 	adjusted_mode->flags |= DRM_MODE_FLAG_HSKEW;
647 
648 	if (mode->flags & DRM_MODE_FLAG_NHSYNC) {
649 		adjusted_mode->flags |= DRM_MODE_FLAG_PHSYNC;
650 		adjusted_mode->flags &= ~DRM_MODE_FLAG_NHSYNC;
651 	} else {
652 		adjusted_mode->flags |= DRM_MODE_FLAG_NHSYNC;
653 		adjusted_mode->flags &= ~DRM_MODE_FLAG_PHSYNC;
654 	}
655 
656 	return true;
657 }
658 
659 static int tilcdc_crtc_atomic_check(struct drm_crtc *crtc,
660 				    struct drm_atomic_state *state)
661 {
662 	struct drm_crtc_state *crtc_state = drm_atomic_get_new_crtc_state(state,
663 									  crtc);
664 	/* If we are not active we don't care */
665 	if (!crtc_state->active)
666 		return 0;
667 
668 	if (state->planes[0].ptr != crtc->primary ||
669 	    state->planes[0].state == NULL ||
670 	    state->planes[0].state->crtc != crtc) {
671 		dev_dbg(crtc->dev->dev, "CRTC primary plane must be present");
672 		return -EINVAL;
673 	}
674 
675 	return 0;
676 }
677 
678 static int tilcdc_crtc_enable_vblank(struct drm_crtc *crtc)
679 {
680 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
681 	struct drm_device *dev = crtc->dev;
682 	struct tilcdc_drm_private *priv = dev->dev_private;
683 	unsigned long flags;
684 
685 	spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
686 
687 	tilcdc_clear_irqstatus(dev, LCDC_END_OF_FRAME0);
688 
689 	if (priv->rev == 1)
690 		tilcdc_set(dev, LCDC_DMA_CTRL_REG,
691 			   LCDC_V1_END_OF_FRAME_INT_ENA);
692 	else
693 		tilcdc_set(dev, LCDC_INT_ENABLE_SET_REG,
694 			   LCDC_V2_END_OF_FRAME0_INT_ENA);
695 
696 	spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
697 
698 	return 0;
699 }
700 
701 static void tilcdc_crtc_disable_vblank(struct drm_crtc *crtc)
702 {
703 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
704 	struct drm_device *dev = crtc->dev;
705 	struct tilcdc_drm_private *priv = dev->dev_private;
706 	unsigned long flags;
707 
708 	spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
709 
710 	if (priv->rev == 1)
711 		tilcdc_clear(dev, LCDC_DMA_CTRL_REG,
712 			     LCDC_V1_END_OF_FRAME_INT_ENA);
713 	else
714 		tilcdc_clear(dev, LCDC_INT_ENABLE_SET_REG,
715 			     LCDC_V2_END_OF_FRAME0_INT_ENA);
716 
717 	spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
718 }
719 
720 static void tilcdc_crtc_reset(struct drm_crtc *crtc)
721 {
722 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
723 	struct drm_device *dev = crtc->dev;
724 	int ret;
725 
726 	drm_atomic_helper_crtc_reset(crtc);
727 
728 	/* Turn the raster off if it for some reason is on. */
729 	pm_runtime_get_sync(dev->dev);
730 	if (tilcdc_read(dev, LCDC_RASTER_CTRL_REG) & LCDC_RASTER_ENABLE) {
731 		/* Enable DMA Frame Done Interrupt */
732 		tilcdc_write(dev, LCDC_INT_ENABLE_SET_REG, LCDC_FRAME_DONE);
733 		tilcdc_clear_irqstatus(dev, 0xffffffff);
734 
735 		tilcdc_crtc->frame_done = false;
736 		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
737 
738 		ret = wait_event_timeout(tilcdc_crtc->frame_done_wq,
739 					 tilcdc_crtc->frame_done,
740 					 msecs_to_jiffies(500));
741 		if (ret == 0)
742 			dev_err(dev->dev, "%s: timeout waiting for framedone\n",
743 				__func__);
744 	}
745 	pm_runtime_put_sync(dev->dev);
746 }
747 
748 static const struct drm_crtc_funcs tilcdc_crtc_funcs = {
749 	.destroy        = tilcdc_crtc_destroy,
750 	.set_config     = drm_atomic_helper_set_config,
751 	.page_flip      = drm_atomic_helper_page_flip,
752 	.reset		= tilcdc_crtc_reset,
753 	.atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
754 	.atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
755 	.enable_vblank	= tilcdc_crtc_enable_vblank,
756 	.disable_vblank	= tilcdc_crtc_disable_vblank,
757 };
758 
759 static enum drm_mode_status
760 tilcdc_crtc_mode_valid(struct drm_crtc *crtc,
761 		       const struct drm_display_mode *mode)
762 {
763 	struct tilcdc_drm_private *priv = crtc->dev->dev_private;
764 	unsigned int bandwidth;
765 	uint32_t hbp, hfp, hsw, vbp, vfp, vsw;
766 
767 	/*
768 	 * check to see if the width is within the range that
769 	 * the LCD Controller physically supports
770 	 */
771 	if (mode->hdisplay > priv->max_width)
772 		return MODE_VIRTUAL_X;
773 
774 	/* width must be multiple of 16 */
775 	if (mode->hdisplay & 0xf)
776 		return MODE_VIRTUAL_X;
777 
778 	if (mode->vdisplay > 2048)
779 		return MODE_VIRTUAL_Y;
780 
781 	DBG("Processing mode %dx%d@%d with pixel clock %d",
782 		mode->hdisplay, mode->vdisplay,
783 		drm_mode_vrefresh(mode), mode->clock);
784 
785 	hbp = mode->htotal - mode->hsync_end;
786 	hfp = mode->hsync_start - mode->hdisplay;
787 	hsw = mode->hsync_end - mode->hsync_start;
788 	vbp = mode->vtotal - mode->vsync_end;
789 	vfp = mode->vsync_start - mode->vdisplay;
790 	vsw = mode->vsync_end - mode->vsync_start;
791 
792 	if ((hbp-1) & ~0x3ff) {
793 		DBG("Pruning mode: Horizontal Back Porch out of range");
794 		return MODE_HBLANK_WIDE;
795 	}
796 
797 	if ((hfp-1) & ~0x3ff) {
798 		DBG("Pruning mode: Horizontal Front Porch out of range");
799 		return MODE_HBLANK_WIDE;
800 	}
801 
802 	if ((hsw-1) & ~0x3ff) {
803 		DBG("Pruning mode: Horizontal Sync Width out of range");
804 		return MODE_HSYNC_WIDE;
805 	}
806 
807 	if (vbp & ~0xff) {
808 		DBG("Pruning mode: Vertical Back Porch out of range");
809 		return MODE_VBLANK_WIDE;
810 	}
811 
812 	if (vfp & ~0xff) {
813 		DBG("Pruning mode: Vertical Front Porch out of range");
814 		return MODE_VBLANK_WIDE;
815 	}
816 
817 	if ((vsw-1) & ~0x3f) {
818 		DBG("Pruning mode: Vertical Sync Width out of range");
819 		return MODE_VSYNC_WIDE;
820 	}
821 
822 	/*
823 	 * some devices have a maximum allowed pixel clock
824 	 * configured from the DT
825 	 */
826 	if (mode->clock > priv->max_pixelclock) {
827 		DBG("Pruning mode: pixel clock too high");
828 		return MODE_CLOCK_HIGH;
829 	}
830 
831 	/*
832 	 * some devices further limit the max horizontal resolution
833 	 * configured from the DT
834 	 */
835 	if (mode->hdisplay > priv->max_width)
836 		return MODE_BAD_WIDTH;
837 
838 	/* filter out modes that would require too much memory bandwidth: */
839 	bandwidth = mode->hdisplay * mode->vdisplay *
840 		drm_mode_vrefresh(mode);
841 	if (bandwidth > priv->max_bandwidth) {
842 		DBG("Pruning mode: exceeds defined bandwidth limit");
843 		return MODE_BAD;
844 	}
845 
846 	return MODE_OK;
847 }
848 
849 static const struct drm_crtc_helper_funcs tilcdc_crtc_helper_funcs = {
850 	.mode_valid	= tilcdc_crtc_mode_valid,
851 	.mode_fixup	= tilcdc_crtc_mode_fixup,
852 	.atomic_check	= tilcdc_crtc_atomic_check,
853 	.atomic_enable	= tilcdc_crtc_atomic_enable,
854 	.atomic_disable	= tilcdc_crtc_atomic_disable,
855 	.atomic_flush	= tilcdc_crtc_atomic_flush,
856 };
857 
858 void tilcdc_crtc_set_panel_info(struct drm_crtc *crtc,
859 		const struct tilcdc_panel_info *info)
860 {
861 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
862 	tilcdc_crtc->info = info;
863 }
864 
865 void tilcdc_crtc_set_simulate_vesa_sync(struct drm_crtc *crtc,
866 					bool simulate_vesa_sync)
867 {
868 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
869 
870 	tilcdc_crtc->simulate_vesa_sync = simulate_vesa_sync;
871 }
872 
873 void tilcdc_crtc_update_clk(struct drm_crtc *crtc)
874 {
875 	struct drm_device *dev = crtc->dev;
876 	struct tilcdc_drm_private *priv = dev->dev_private;
877 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
878 
879 	drm_modeset_lock(&crtc->mutex, NULL);
880 	if (tilcdc_crtc->lcd_fck_rate != clk_get_rate(priv->clk)) {
881 		if (tilcdc_crtc_is_on(crtc)) {
882 			pm_runtime_get_sync(dev->dev);
883 			tilcdc_crtc_disable(crtc);
884 
885 			tilcdc_crtc_set_clk(crtc);
886 
887 			tilcdc_crtc_enable(crtc);
888 			pm_runtime_put_sync(dev->dev);
889 		}
890 	}
891 	drm_modeset_unlock(&crtc->mutex);
892 }
893 
894 #define SYNC_LOST_COUNT_LIMIT 50
895 
896 irqreturn_t tilcdc_crtc_irq(struct drm_crtc *crtc)
897 {
898 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
899 	struct drm_device *dev = crtc->dev;
900 	struct tilcdc_drm_private *priv = dev->dev_private;
901 	uint32_t stat, reg;
902 
903 	stat = tilcdc_read_irqstatus(dev);
904 	tilcdc_clear_irqstatus(dev, stat);
905 
906 	if (stat & LCDC_END_OF_FRAME0) {
907 		unsigned long flags;
908 		bool skip_event = false;
909 		ktime_t now;
910 
911 		now = ktime_get();
912 
913 		spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
914 
915 		tilcdc_crtc->last_vblank = now;
916 
917 		if (tilcdc_crtc->next_fb) {
918 			set_scanout(crtc, tilcdc_crtc->next_fb);
919 			tilcdc_crtc->next_fb = NULL;
920 			skip_event = true;
921 		}
922 
923 		spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
924 
925 		drm_crtc_handle_vblank(crtc);
926 
927 		if (!skip_event) {
928 			struct drm_pending_vblank_event *event;
929 
930 			spin_lock_irqsave(&dev->event_lock, flags);
931 
932 			event = tilcdc_crtc->event;
933 			tilcdc_crtc->event = NULL;
934 			if (event)
935 				drm_crtc_send_vblank_event(crtc, event);
936 
937 			spin_unlock_irqrestore(&dev->event_lock, flags);
938 		}
939 
940 		if (tilcdc_crtc->frame_intact)
941 			tilcdc_crtc->sync_lost_count = 0;
942 		else
943 			tilcdc_crtc->frame_intact = true;
944 	}
945 
946 	if (stat & LCDC_FIFO_UNDERFLOW)
947 		dev_err_ratelimited(dev->dev, "%s(0x%08x): FIFO underflow",
948 				    __func__, stat);
949 
950 	if (stat & LCDC_PL_LOAD_DONE) {
951 		complete(&tilcdc_crtc->palette_loaded);
952 		if (priv->rev == 1)
953 			tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
954 				     LCDC_V1_PL_INT_ENA);
955 		else
956 			tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
957 				     LCDC_V2_PL_INT_ENA);
958 	}
959 
960 	if (stat & LCDC_SYNC_LOST) {
961 		dev_err_ratelimited(dev->dev, "%s(0x%08x): Sync lost",
962 				    __func__, stat);
963 		tilcdc_crtc->frame_intact = false;
964 		if (priv->rev == 1) {
965 			reg = tilcdc_read(dev, LCDC_RASTER_CTRL_REG);
966 			if (reg & LCDC_RASTER_ENABLE) {
967 				tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
968 					     LCDC_RASTER_ENABLE);
969 				tilcdc_set(dev, LCDC_RASTER_CTRL_REG,
970 					   LCDC_RASTER_ENABLE);
971 			}
972 		} else {
973 			if (tilcdc_crtc->sync_lost_count++ >
974 			    SYNC_LOST_COUNT_LIMIT) {
975 				dev_err(dev->dev,
976 					"%s(0x%08x): Sync lost flood detected, recovering",
977 					__func__, stat);
978 				queue_work(system_wq,
979 					   &tilcdc_crtc->recover_work);
980 				tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
981 					     LCDC_SYNC_LOST);
982 				tilcdc_crtc->sync_lost_count = 0;
983 			}
984 		}
985 	}
986 
987 	if (stat & LCDC_FRAME_DONE) {
988 		tilcdc_crtc->frame_done = true;
989 		wake_up(&tilcdc_crtc->frame_done_wq);
990 		/* rev 1 lcdc appears to hang if irq is not disbaled here */
991 		if (priv->rev == 1)
992 			tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
993 				     LCDC_V1_FRAME_DONE_INT_ENA);
994 	}
995 
996 	/* For revision 2 only */
997 	if (priv->rev == 2) {
998 		/* Indicate to LCDC that the interrupt service routine has
999 		 * completed, see 13.3.6.1.6 in AM335x TRM.
1000 		 */
1001 		tilcdc_write(dev, LCDC_END_OF_INT_IND_REG, 0);
1002 	}
1003 
1004 	return IRQ_HANDLED;
1005 }
1006 
1007 int tilcdc_crtc_create(struct drm_device *dev)
1008 {
1009 	struct tilcdc_drm_private *priv = dev->dev_private;
1010 	struct tilcdc_crtc *tilcdc_crtc;
1011 	struct drm_crtc *crtc;
1012 	int ret;
1013 
1014 	tilcdc_crtc = devm_kzalloc(dev->dev, sizeof(*tilcdc_crtc), GFP_KERNEL);
1015 	if (!tilcdc_crtc)
1016 		return -ENOMEM;
1017 
1018 	init_completion(&tilcdc_crtc->palette_loaded);
1019 	tilcdc_crtc->palette_base = dmam_alloc_coherent(dev->dev,
1020 					TILCDC_PALETTE_SIZE,
1021 					&tilcdc_crtc->palette_dma_handle,
1022 					GFP_KERNEL | __GFP_ZERO);
1023 	if (!tilcdc_crtc->palette_base)
1024 		return -ENOMEM;
1025 	*tilcdc_crtc->palette_base = TILCDC_PALETTE_FIRST_ENTRY;
1026 
1027 	crtc = &tilcdc_crtc->base;
1028 
1029 	ret = tilcdc_plane_init(dev, &tilcdc_crtc->primary);
1030 	if (ret < 0)
1031 		goto fail;
1032 
1033 	mutex_init(&tilcdc_crtc->enable_lock);
1034 
1035 	init_waitqueue_head(&tilcdc_crtc->frame_done_wq);
1036 
1037 	spin_lock_init(&tilcdc_crtc->irq_lock);
1038 	INIT_WORK(&tilcdc_crtc->recover_work, tilcdc_crtc_recover_work);
1039 
1040 	ret = drm_crtc_init_with_planes(dev, crtc,
1041 					&tilcdc_crtc->primary,
1042 					NULL,
1043 					&tilcdc_crtc_funcs,
1044 					"tilcdc crtc");
1045 	if (ret < 0)
1046 		goto fail;
1047 
1048 	drm_crtc_helper_add(crtc, &tilcdc_crtc_helper_funcs);
1049 
1050 	if (priv->is_componentized) {
1051 		crtc->port = of_graph_get_port_by_id(dev->dev->of_node, 0);
1052 		if (!crtc->port) { /* This should never happen */
1053 			dev_err(dev->dev, "Port node not found in %pOF\n",
1054 				dev->dev->of_node);
1055 			ret = -EINVAL;
1056 			goto fail;
1057 		}
1058 	}
1059 
1060 	priv->crtc = crtc;
1061 	return 0;
1062 
1063 fail:
1064 	tilcdc_crtc_destroy(crtc);
1065 	return ret;
1066 }
1067