1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (C) 2012 Texas Instruments
4  * Author: Rob Clark <robdclark@gmail.com>
5  */
6 
7 #include <linux/delay.h>
8 #include <linux/dma-mapping.h>
9 #include <linux/of_graph.h>
10 #include <linux/pm_runtime.h>
11 
12 #include <drm/drm_atomic.h>
13 #include <drm/drm_atomic_helper.h>
14 #include <drm/drm_crtc.h>
15 #include <drm/drm_fb_cma_helper.h>
16 #include <drm/drm_fourcc.h>
17 #include <drm/drm_gem_cma_helper.h>
18 #include <drm/drm_modeset_helper_vtables.h>
19 #include <drm/drm_print.h>
20 #include <drm/drm_vblank.h>
21 
22 #include "tilcdc_drv.h"
23 #include "tilcdc_regs.h"
24 
25 #define TILCDC_VBLANK_SAFETY_THRESHOLD_US	1000
26 #define TILCDC_PALETTE_SIZE			32
27 #define TILCDC_PALETTE_FIRST_ENTRY		0x4000
28 
29 struct tilcdc_crtc {
30 	struct drm_crtc base;
31 
32 	struct drm_plane primary;
33 	const struct tilcdc_panel_info *info;
34 	struct drm_pending_vblank_event *event;
35 	struct mutex enable_lock;
36 	bool enabled;
37 	bool shutdown;
38 	wait_queue_head_t frame_done_wq;
39 	bool frame_done;
40 	spinlock_t irq_lock;
41 
42 	unsigned int lcd_fck_rate;
43 
44 	ktime_t last_vblank;
45 	unsigned int hvtotal_us;
46 
47 	struct drm_framebuffer *next_fb;
48 
49 	/* Only set if an external encoder is connected */
50 	bool simulate_vesa_sync;
51 
52 	int sync_lost_count;
53 	bool frame_intact;
54 	struct work_struct recover_work;
55 
56 	dma_addr_t palette_dma_handle;
57 	u16 *palette_base;
58 	struct completion palette_loaded;
59 };
60 #define to_tilcdc_crtc(x) container_of(x, struct tilcdc_crtc, base)
61 
62 static void set_scanout(struct drm_crtc *crtc, struct drm_framebuffer *fb)
63 {
64 	struct drm_device *dev = crtc->dev;
65 	struct tilcdc_drm_private *priv = dev->dev_private;
66 	struct drm_gem_cma_object *gem;
67 	dma_addr_t start, end;
68 	u64 dma_base_and_ceiling;
69 
70 	gem = drm_fb_cma_get_gem_obj(fb, 0);
71 
72 	start = gem->paddr + fb->offsets[0] +
73 		crtc->y * fb->pitches[0] +
74 		crtc->x * fb->format->cpp[0];
75 
76 	end = start + (crtc->mode.vdisplay * fb->pitches[0]);
77 
78 	/* Write LCDC_DMA_FB_BASE_ADDR_0_REG and LCDC_DMA_FB_CEILING_ADDR_0_REG
79 	 * with a single insruction, if available. This should make it more
80 	 * unlikely that LCDC would fetch the DMA addresses in the middle of
81 	 * an update.
82 	 */
83 	if (priv->rev == 1)
84 		end -= 1;
85 
86 	dma_base_and_ceiling = (u64)end << 32 | start;
87 	tilcdc_write64(dev, LCDC_DMA_FB_BASE_ADDR_0_REG, dma_base_and_ceiling);
88 }
89 
90 /*
91  * The driver currently only supports only true color formats. For
92  * true color the palette block is bypassed, but a 32 byte palette
93  * should still be loaded. The first 16-bit entry must be 0x4000 while
94  * all other entries must be zeroed.
95  */
96 static void tilcdc_crtc_load_palette(struct drm_crtc *crtc)
97 {
98 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
99 	struct drm_device *dev = crtc->dev;
100 	struct tilcdc_drm_private *priv = dev->dev_private;
101 	int ret;
102 
103 	reinit_completion(&tilcdc_crtc->palette_loaded);
104 
105 	/* Tell the LCDC where the palette is located. */
106 	tilcdc_write(dev, LCDC_DMA_FB_BASE_ADDR_0_REG,
107 		     tilcdc_crtc->palette_dma_handle);
108 	tilcdc_write(dev, LCDC_DMA_FB_CEILING_ADDR_0_REG,
109 		     (u32) tilcdc_crtc->palette_dma_handle +
110 		     TILCDC_PALETTE_SIZE - 1);
111 
112 	/* Set dma load mode for palette loading only. */
113 	tilcdc_write_mask(dev, LCDC_RASTER_CTRL_REG,
114 			  LCDC_PALETTE_LOAD_MODE(PALETTE_ONLY),
115 			  LCDC_PALETTE_LOAD_MODE_MASK);
116 
117 	/* Enable DMA Palette Loaded Interrupt */
118 	if (priv->rev == 1)
119 		tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_V1_PL_INT_ENA);
120 	else
121 		tilcdc_write(dev, LCDC_INT_ENABLE_SET_REG, LCDC_V2_PL_INT_ENA);
122 
123 	/* Enable LCDC DMA and wait for palette to be loaded. */
124 	tilcdc_clear_irqstatus(dev, 0xffffffff);
125 	tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
126 
127 	ret = wait_for_completion_timeout(&tilcdc_crtc->palette_loaded,
128 					  msecs_to_jiffies(50));
129 	if (ret == 0)
130 		dev_err(dev->dev, "%s: Palette loading timeout", __func__);
131 
132 	/* Disable LCDC DMA and DMA Palette Loaded Interrupt. */
133 	tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
134 	if (priv->rev == 1)
135 		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_V1_PL_INT_ENA);
136 	else
137 		tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG, LCDC_V2_PL_INT_ENA);
138 }
139 
140 static void tilcdc_crtc_enable_irqs(struct drm_device *dev)
141 {
142 	struct tilcdc_drm_private *priv = dev->dev_private;
143 
144 	tilcdc_clear_irqstatus(dev, 0xffffffff);
145 
146 	if (priv->rev == 1) {
147 		tilcdc_set(dev, LCDC_RASTER_CTRL_REG,
148 			LCDC_V1_SYNC_LOST_INT_ENA | LCDC_V1_FRAME_DONE_INT_ENA |
149 			LCDC_V1_UNDERFLOW_INT_ENA);
150 	} else {
151 		tilcdc_write(dev, LCDC_INT_ENABLE_SET_REG,
152 			LCDC_V2_UNDERFLOW_INT_ENA |
153 			LCDC_FRAME_DONE | LCDC_SYNC_LOST);
154 	}
155 }
156 
157 static void tilcdc_crtc_disable_irqs(struct drm_device *dev)
158 {
159 	struct tilcdc_drm_private *priv = dev->dev_private;
160 
161 	/* disable irqs that we might have enabled: */
162 	if (priv->rev == 1) {
163 		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
164 			LCDC_V1_SYNC_LOST_INT_ENA | LCDC_V1_FRAME_DONE_INT_ENA |
165 			LCDC_V1_UNDERFLOW_INT_ENA | LCDC_V1_PL_INT_ENA);
166 		tilcdc_clear(dev, LCDC_DMA_CTRL_REG,
167 			LCDC_V1_END_OF_FRAME_INT_ENA);
168 	} else {
169 		tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
170 			LCDC_V2_UNDERFLOW_INT_ENA | LCDC_V2_PL_INT_ENA |
171 			LCDC_V2_END_OF_FRAME0_INT_ENA |
172 			LCDC_FRAME_DONE | LCDC_SYNC_LOST);
173 	}
174 }
175 
176 static void reset(struct drm_crtc *crtc)
177 {
178 	struct drm_device *dev = crtc->dev;
179 	struct tilcdc_drm_private *priv = dev->dev_private;
180 
181 	if (priv->rev != 2)
182 		return;
183 
184 	tilcdc_set(dev, LCDC_CLK_RESET_REG, LCDC_CLK_MAIN_RESET);
185 	usleep_range(250, 1000);
186 	tilcdc_clear(dev, LCDC_CLK_RESET_REG, LCDC_CLK_MAIN_RESET);
187 }
188 
189 /*
190  * Calculate the percentage difference between the requested pixel clock rate
191  * and the effective rate resulting from calculating the clock divider value.
192  */
193 static unsigned int tilcdc_pclk_diff(unsigned long rate,
194 				     unsigned long real_rate)
195 {
196 	int r = rate / 100, rr = real_rate / 100;
197 
198 	return (unsigned int)(abs(((rr - r) * 100) / r));
199 }
200 
201 static void tilcdc_crtc_set_clk(struct drm_crtc *crtc)
202 {
203 	struct drm_device *dev = crtc->dev;
204 	struct tilcdc_drm_private *priv = dev->dev_private;
205 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
206 	unsigned long clk_rate, real_rate, req_rate;
207 	unsigned int clkdiv;
208 	int ret;
209 
210 	clkdiv = 2; /* first try using a standard divider of 2 */
211 
212 	/* mode.clock is in KHz, set_rate wants parameter in Hz */
213 	req_rate = crtc->mode.clock * 1000;
214 
215 	ret = clk_set_rate(priv->clk, req_rate * clkdiv);
216 	clk_rate = clk_get_rate(priv->clk);
217 	if (ret < 0 || tilcdc_pclk_diff(req_rate, clk_rate) > 5) {
218 		/*
219 		 * If we fail to set the clock rate (some architectures don't
220 		 * use the common clock framework yet and may not implement
221 		 * all the clk API calls for every clock), try the next best
222 		 * thing: adjusting the clock divider, unless clk_get_rate()
223 		 * failed as well.
224 		 */
225 		if (!clk_rate) {
226 			/* Nothing more we can do. Just bail out. */
227 			dev_err(dev->dev,
228 				"failed to set the pixel clock - unable to read current lcdc clock rate\n");
229 			return;
230 		}
231 
232 		clkdiv = DIV_ROUND_CLOSEST(clk_rate, req_rate);
233 
234 		/*
235 		 * Emit a warning if the real clock rate resulting from the
236 		 * calculated divider differs much from the requested rate.
237 		 *
238 		 * 5% is an arbitrary value - LCDs are usually quite tolerant
239 		 * about pixel clock rates.
240 		 */
241 		real_rate = clkdiv * req_rate;
242 
243 		if (tilcdc_pclk_diff(clk_rate, real_rate) > 5) {
244 			dev_warn(dev->dev,
245 				 "effective pixel clock rate (%luHz) differs from the calculated rate (%luHz)\n",
246 				 clk_rate, real_rate);
247 		}
248 	}
249 
250 	tilcdc_crtc->lcd_fck_rate = clk_rate;
251 
252 	DBG("lcd_clk=%u, mode clock=%d, div=%u",
253 	    tilcdc_crtc->lcd_fck_rate, crtc->mode.clock, clkdiv);
254 
255 	/* Configure the LCD clock divisor. */
256 	tilcdc_write(dev, LCDC_CTRL_REG, LCDC_CLK_DIVISOR(clkdiv) |
257 		     LCDC_RASTER_MODE);
258 
259 	if (priv->rev == 2)
260 		tilcdc_set(dev, LCDC_CLK_ENABLE_REG,
261 				LCDC_V2_DMA_CLK_EN | LCDC_V2_LIDD_CLK_EN |
262 				LCDC_V2_CORE_CLK_EN);
263 }
264 
265 static uint tilcdc_mode_hvtotal(const struct drm_display_mode *mode)
266 {
267 	return (uint) div_u64(1000llu * mode->htotal * mode->vtotal,
268 			      mode->clock);
269 }
270 
271 static void tilcdc_crtc_set_mode(struct drm_crtc *crtc)
272 {
273 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
274 	struct drm_device *dev = crtc->dev;
275 	struct tilcdc_drm_private *priv = dev->dev_private;
276 	const struct tilcdc_panel_info *info = tilcdc_crtc->info;
277 	uint32_t reg, hbp, hfp, hsw, vbp, vfp, vsw;
278 	struct drm_display_mode *mode = &crtc->state->adjusted_mode;
279 	struct drm_framebuffer *fb = crtc->primary->state->fb;
280 
281 	if (WARN_ON(!info))
282 		return;
283 
284 	if (WARN_ON(!fb))
285 		return;
286 
287 	/* Configure the Burst Size and fifo threshold of DMA: */
288 	reg = tilcdc_read(dev, LCDC_DMA_CTRL_REG) & ~0x00000770;
289 	switch (info->dma_burst_sz) {
290 	case 1:
291 		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_1);
292 		break;
293 	case 2:
294 		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_2);
295 		break;
296 	case 4:
297 		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_4);
298 		break;
299 	case 8:
300 		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_8);
301 		break;
302 	case 16:
303 		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_16);
304 		break;
305 	default:
306 		dev_err(dev->dev, "invalid burst size\n");
307 		return;
308 	}
309 	reg |= (info->fifo_th << 8);
310 	tilcdc_write(dev, LCDC_DMA_CTRL_REG, reg);
311 
312 	/* Configure timings: */
313 	hbp = mode->htotal - mode->hsync_end;
314 	hfp = mode->hsync_start - mode->hdisplay;
315 	hsw = mode->hsync_end - mode->hsync_start;
316 	vbp = mode->vtotal - mode->vsync_end;
317 	vfp = mode->vsync_start - mode->vdisplay;
318 	vsw = mode->vsync_end - mode->vsync_start;
319 
320 	DBG("%dx%d, hbp=%u, hfp=%u, hsw=%u, vbp=%u, vfp=%u, vsw=%u",
321 	    mode->hdisplay, mode->vdisplay, hbp, hfp, hsw, vbp, vfp, vsw);
322 
323 	/* Set AC Bias Period and Number of Transitions per Interrupt: */
324 	reg = tilcdc_read(dev, LCDC_RASTER_TIMING_2_REG) & ~0x000fff00;
325 	reg |= LCDC_AC_BIAS_FREQUENCY(info->ac_bias) |
326 		LCDC_AC_BIAS_TRANSITIONS_PER_INT(info->ac_bias_intrpt);
327 
328 	/*
329 	 * subtract one from hfp, hbp, hsw because the hardware uses
330 	 * a value of 0 as 1
331 	 */
332 	if (priv->rev == 2) {
333 		/* clear bits we're going to set */
334 		reg &= ~0x78000033;
335 		reg |= ((hfp-1) & 0x300) >> 8;
336 		reg |= ((hbp-1) & 0x300) >> 4;
337 		reg |= ((hsw-1) & 0x3c0) << 21;
338 	}
339 	tilcdc_write(dev, LCDC_RASTER_TIMING_2_REG, reg);
340 
341 	reg = (((mode->hdisplay >> 4) - 1) << 4) |
342 		(((hbp-1) & 0xff) << 24) |
343 		(((hfp-1) & 0xff) << 16) |
344 		(((hsw-1) & 0x3f) << 10);
345 	if (priv->rev == 2)
346 		reg |= (((mode->hdisplay >> 4) - 1) & 0x40) >> 3;
347 	tilcdc_write(dev, LCDC_RASTER_TIMING_0_REG, reg);
348 
349 	reg = ((mode->vdisplay - 1) & 0x3ff) |
350 		((vbp & 0xff) << 24) |
351 		((vfp & 0xff) << 16) |
352 		(((vsw-1) & 0x3f) << 10);
353 	tilcdc_write(dev, LCDC_RASTER_TIMING_1_REG, reg);
354 
355 	/*
356 	 * be sure to set Bit 10 for the V2 LCDC controller,
357 	 * otherwise limited to 1024 pixels width, stopping
358 	 * 1920x1080 being supported.
359 	 */
360 	if (priv->rev == 2) {
361 		if ((mode->vdisplay - 1) & 0x400) {
362 			tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG,
363 				LCDC_LPP_B10);
364 		} else {
365 			tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG,
366 				LCDC_LPP_B10);
367 		}
368 	}
369 
370 	/* Configure display type: */
371 	reg = tilcdc_read(dev, LCDC_RASTER_CTRL_REG) &
372 		~(LCDC_TFT_MODE | LCDC_MONO_8BIT_MODE | LCDC_MONOCHROME_MODE |
373 		  LCDC_V2_TFT_24BPP_MODE | LCDC_V2_TFT_24BPP_UNPACK |
374 		  0x000ff000 /* Palette Loading Delay bits */);
375 	reg |= LCDC_TFT_MODE; /* no monochrome/passive support */
376 	if (info->tft_alt_mode)
377 		reg |= LCDC_TFT_ALT_ENABLE;
378 	if (priv->rev == 2) {
379 		switch (fb->format->format) {
380 		case DRM_FORMAT_BGR565:
381 		case DRM_FORMAT_RGB565:
382 			break;
383 		case DRM_FORMAT_XBGR8888:
384 		case DRM_FORMAT_XRGB8888:
385 			reg |= LCDC_V2_TFT_24BPP_UNPACK;
386 			fallthrough;
387 		case DRM_FORMAT_BGR888:
388 		case DRM_FORMAT_RGB888:
389 			reg |= LCDC_V2_TFT_24BPP_MODE;
390 			break;
391 		default:
392 			dev_err(dev->dev, "invalid pixel format\n");
393 			return;
394 		}
395 	}
396 	reg |= info->fdd << 12;
397 	tilcdc_write(dev, LCDC_RASTER_CTRL_REG, reg);
398 
399 	if (info->invert_pxl_clk)
400 		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_PIXEL_CLOCK);
401 	else
402 		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_PIXEL_CLOCK);
403 
404 	if (info->sync_ctrl)
405 		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_CTRL);
406 	else
407 		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_CTRL);
408 
409 	if (info->sync_edge)
410 		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_EDGE);
411 	else
412 		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_EDGE);
413 
414 	if (mode->flags & DRM_MODE_FLAG_NHSYNC)
415 		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_HSYNC);
416 	else
417 		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_HSYNC);
418 
419 	if (mode->flags & DRM_MODE_FLAG_NVSYNC)
420 		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_VSYNC);
421 	else
422 		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_VSYNC);
423 
424 	if (info->raster_order)
425 		tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ORDER);
426 	else
427 		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ORDER);
428 
429 	tilcdc_crtc_set_clk(crtc);
430 
431 	tilcdc_crtc_load_palette(crtc);
432 
433 	set_scanout(crtc, fb);
434 
435 	crtc->hwmode = crtc->state->adjusted_mode;
436 
437 	tilcdc_crtc->hvtotal_us =
438 		tilcdc_mode_hvtotal(&crtc->hwmode);
439 }
440 
441 static void tilcdc_crtc_enable(struct drm_crtc *crtc)
442 {
443 	struct drm_device *dev = crtc->dev;
444 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
445 	unsigned long flags;
446 
447 	mutex_lock(&tilcdc_crtc->enable_lock);
448 	if (tilcdc_crtc->enabled || tilcdc_crtc->shutdown) {
449 		mutex_unlock(&tilcdc_crtc->enable_lock);
450 		return;
451 	}
452 
453 	pm_runtime_get_sync(dev->dev);
454 
455 	reset(crtc);
456 
457 	tilcdc_crtc_set_mode(crtc);
458 
459 	tilcdc_crtc_enable_irqs(dev);
460 
461 	tilcdc_clear(dev, LCDC_DMA_CTRL_REG, LCDC_DUAL_FRAME_BUFFER_ENABLE);
462 	tilcdc_write_mask(dev, LCDC_RASTER_CTRL_REG,
463 			  LCDC_PALETTE_LOAD_MODE(DATA_ONLY),
464 			  LCDC_PALETTE_LOAD_MODE_MASK);
465 
466 	/* There is no real chance for a race here as the time stamp
467 	 * is taken before the raster DMA is started. The spin-lock is
468 	 * taken to have a memory barrier after taking the time-stamp
469 	 * and to avoid a context switch between taking the stamp and
470 	 * enabling the raster.
471 	 */
472 	spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
473 	tilcdc_crtc->last_vblank = ktime_get();
474 	tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
475 	spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
476 
477 	drm_crtc_vblank_on(crtc);
478 
479 	tilcdc_crtc->enabled = true;
480 	mutex_unlock(&tilcdc_crtc->enable_lock);
481 }
482 
483 static void tilcdc_crtc_atomic_enable(struct drm_crtc *crtc,
484 				      struct drm_atomic_state *state)
485 {
486 	tilcdc_crtc_enable(crtc);
487 }
488 
489 static void tilcdc_crtc_off(struct drm_crtc *crtc, bool shutdown)
490 {
491 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
492 	struct drm_device *dev = crtc->dev;
493 	int ret;
494 
495 	mutex_lock(&tilcdc_crtc->enable_lock);
496 	if (shutdown)
497 		tilcdc_crtc->shutdown = true;
498 	if (!tilcdc_crtc->enabled) {
499 		mutex_unlock(&tilcdc_crtc->enable_lock);
500 		return;
501 	}
502 	tilcdc_crtc->frame_done = false;
503 	tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
504 
505 	/*
506 	 * Wait for framedone irq which will still come before putting
507 	 * things to sleep..
508 	 */
509 	ret = wait_event_timeout(tilcdc_crtc->frame_done_wq,
510 				 tilcdc_crtc->frame_done,
511 				 msecs_to_jiffies(500));
512 	if (ret == 0)
513 		dev_err(dev->dev, "%s: timeout waiting for framedone\n",
514 			__func__);
515 
516 	drm_crtc_vblank_off(crtc);
517 
518 	spin_lock_irq(&crtc->dev->event_lock);
519 
520 	if (crtc->state->event) {
521 		drm_crtc_send_vblank_event(crtc, crtc->state->event);
522 		crtc->state->event = NULL;
523 	}
524 
525 	spin_unlock_irq(&crtc->dev->event_lock);
526 
527 	tilcdc_crtc_disable_irqs(dev);
528 
529 	pm_runtime_put_sync(dev->dev);
530 
531 	tilcdc_crtc->enabled = false;
532 	mutex_unlock(&tilcdc_crtc->enable_lock);
533 }
534 
535 static void tilcdc_crtc_disable(struct drm_crtc *crtc)
536 {
537 	tilcdc_crtc_off(crtc, false);
538 }
539 
540 static void tilcdc_crtc_atomic_disable(struct drm_crtc *crtc,
541 				       struct drm_atomic_state *state)
542 {
543 	tilcdc_crtc_disable(crtc);
544 }
545 
546 static void tilcdc_crtc_atomic_flush(struct drm_crtc *crtc,
547 				     struct drm_atomic_state *state)
548 {
549 	if (!crtc->state->event)
550 		return;
551 
552 	spin_lock_irq(&crtc->dev->event_lock);
553 	drm_crtc_send_vblank_event(crtc, crtc->state->event);
554 	crtc->state->event = NULL;
555 	spin_unlock_irq(&crtc->dev->event_lock);
556 }
557 
558 void tilcdc_crtc_shutdown(struct drm_crtc *crtc)
559 {
560 	tilcdc_crtc_off(crtc, true);
561 }
562 
563 static bool tilcdc_crtc_is_on(struct drm_crtc *crtc)
564 {
565 	return crtc->state && crtc->state->enable && crtc->state->active;
566 }
567 
568 static void tilcdc_crtc_recover_work(struct work_struct *work)
569 {
570 	struct tilcdc_crtc *tilcdc_crtc =
571 		container_of(work, struct tilcdc_crtc, recover_work);
572 	struct drm_crtc *crtc = &tilcdc_crtc->base;
573 
574 	dev_info(crtc->dev->dev, "%s: Reset CRTC", __func__);
575 
576 	drm_modeset_lock(&crtc->mutex, NULL);
577 
578 	if (!tilcdc_crtc_is_on(crtc))
579 		goto out;
580 
581 	tilcdc_crtc_disable(crtc);
582 	tilcdc_crtc_enable(crtc);
583 out:
584 	drm_modeset_unlock(&crtc->mutex);
585 }
586 
587 static void tilcdc_crtc_destroy(struct drm_crtc *crtc)
588 {
589 	struct tilcdc_drm_private *priv = crtc->dev->dev_private;
590 
591 	tilcdc_crtc_shutdown(crtc);
592 
593 	flush_workqueue(priv->wq);
594 
595 	of_node_put(crtc->port);
596 	drm_crtc_cleanup(crtc);
597 }
598 
599 int tilcdc_crtc_update_fb(struct drm_crtc *crtc,
600 		struct drm_framebuffer *fb,
601 		struct drm_pending_vblank_event *event)
602 {
603 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
604 	struct drm_device *dev = crtc->dev;
605 
606 	if (tilcdc_crtc->event) {
607 		dev_err(dev->dev, "already pending page flip!\n");
608 		return -EBUSY;
609 	}
610 
611 	tilcdc_crtc->event = event;
612 
613 	mutex_lock(&tilcdc_crtc->enable_lock);
614 
615 	if (tilcdc_crtc->enabled) {
616 		unsigned long flags;
617 		ktime_t next_vblank;
618 		s64 tdiff;
619 
620 		spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
621 
622 		next_vblank = ktime_add_us(tilcdc_crtc->last_vblank,
623 					   tilcdc_crtc->hvtotal_us);
624 		tdiff = ktime_to_us(ktime_sub(next_vblank, ktime_get()));
625 
626 		if (tdiff < TILCDC_VBLANK_SAFETY_THRESHOLD_US)
627 			tilcdc_crtc->next_fb = fb;
628 		else
629 			set_scanout(crtc, fb);
630 
631 		spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
632 	}
633 
634 	mutex_unlock(&tilcdc_crtc->enable_lock);
635 
636 	return 0;
637 }
638 
639 static bool tilcdc_crtc_mode_fixup(struct drm_crtc *crtc,
640 		const struct drm_display_mode *mode,
641 		struct drm_display_mode *adjusted_mode)
642 {
643 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
644 
645 	if (!tilcdc_crtc->simulate_vesa_sync)
646 		return true;
647 
648 	/*
649 	 * tilcdc does not generate VESA-compliant sync but aligns
650 	 * VS on the second edge of HS instead of first edge.
651 	 * We use adjusted_mode, to fixup sync by aligning both rising
652 	 * edges and add HSKEW offset to fix the sync.
653 	 */
654 	adjusted_mode->hskew = mode->hsync_end - mode->hsync_start;
655 	adjusted_mode->flags |= DRM_MODE_FLAG_HSKEW;
656 
657 	if (mode->flags & DRM_MODE_FLAG_NHSYNC) {
658 		adjusted_mode->flags |= DRM_MODE_FLAG_PHSYNC;
659 		adjusted_mode->flags &= ~DRM_MODE_FLAG_NHSYNC;
660 	} else {
661 		adjusted_mode->flags |= DRM_MODE_FLAG_NHSYNC;
662 		adjusted_mode->flags &= ~DRM_MODE_FLAG_PHSYNC;
663 	}
664 
665 	return true;
666 }
667 
668 static int tilcdc_crtc_atomic_check(struct drm_crtc *crtc,
669 				    struct drm_atomic_state *state)
670 {
671 	struct drm_crtc_state *crtc_state = drm_atomic_get_new_crtc_state(state,
672 									  crtc);
673 	/* If we are not active we don't care */
674 	if (!crtc_state->active)
675 		return 0;
676 
677 	if (state->planes[0].ptr != crtc->primary ||
678 	    state->planes[0].state == NULL ||
679 	    state->planes[0].state->crtc != crtc) {
680 		dev_dbg(crtc->dev->dev, "CRTC primary plane must be present");
681 		return -EINVAL;
682 	}
683 
684 	return 0;
685 }
686 
687 static int tilcdc_crtc_enable_vblank(struct drm_crtc *crtc)
688 {
689 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
690 	struct drm_device *dev = crtc->dev;
691 	struct tilcdc_drm_private *priv = dev->dev_private;
692 	unsigned long flags;
693 
694 	spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
695 
696 	tilcdc_clear_irqstatus(dev, LCDC_END_OF_FRAME0);
697 
698 	if (priv->rev == 1)
699 		tilcdc_set(dev, LCDC_DMA_CTRL_REG,
700 			   LCDC_V1_END_OF_FRAME_INT_ENA);
701 	else
702 		tilcdc_set(dev, LCDC_INT_ENABLE_SET_REG,
703 			   LCDC_V2_END_OF_FRAME0_INT_ENA);
704 
705 	spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
706 
707 	return 0;
708 }
709 
710 static void tilcdc_crtc_disable_vblank(struct drm_crtc *crtc)
711 {
712 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
713 	struct drm_device *dev = crtc->dev;
714 	struct tilcdc_drm_private *priv = dev->dev_private;
715 	unsigned long flags;
716 
717 	spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
718 
719 	if (priv->rev == 1)
720 		tilcdc_clear(dev, LCDC_DMA_CTRL_REG,
721 			     LCDC_V1_END_OF_FRAME_INT_ENA);
722 	else
723 		tilcdc_clear(dev, LCDC_INT_ENABLE_SET_REG,
724 			     LCDC_V2_END_OF_FRAME0_INT_ENA);
725 
726 	spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
727 }
728 
729 static void tilcdc_crtc_reset(struct drm_crtc *crtc)
730 {
731 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
732 	struct drm_device *dev = crtc->dev;
733 	int ret;
734 
735 	drm_atomic_helper_crtc_reset(crtc);
736 
737 	/* Turn the raster off if it for some reason is on. */
738 	pm_runtime_get_sync(dev->dev);
739 	if (tilcdc_read(dev, LCDC_RASTER_CTRL_REG) & LCDC_RASTER_ENABLE) {
740 		/* Enable DMA Frame Done Interrupt */
741 		tilcdc_write(dev, LCDC_INT_ENABLE_SET_REG, LCDC_FRAME_DONE);
742 		tilcdc_clear_irqstatus(dev, 0xffffffff);
743 
744 		tilcdc_crtc->frame_done = false;
745 		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
746 
747 		ret = wait_event_timeout(tilcdc_crtc->frame_done_wq,
748 					 tilcdc_crtc->frame_done,
749 					 msecs_to_jiffies(500));
750 		if (ret == 0)
751 			dev_err(dev->dev, "%s: timeout waiting for framedone\n",
752 				__func__);
753 	}
754 	pm_runtime_put_sync(dev->dev);
755 }
756 
757 static const struct drm_crtc_funcs tilcdc_crtc_funcs = {
758 	.destroy        = tilcdc_crtc_destroy,
759 	.set_config     = drm_atomic_helper_set_config,
760 	.page_flip      = drm_atomic_helper_page_flip,
761 	.reset		= tilcdc_crtc_reset,
762 	.atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
763 	.atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
764 	.enable_vblank	= tilcdc_crtc_enable_vblank,
765 	.disable_vblank	= tilcdc_crtc_disable_vblank,
766 };
767 
768 static enum drm_mode_status
769 tilcdc_crtc_mode_valid(struct drm_crtc *crtc,
770 		       const struct drm_display_mode *mode)
771 {
772 	struct tilcdc_drm_private *priv = crtc->dev->dev_private;
773 	unsigned int bandwidth;
774 	uint32_t hbp, hfp, hsw, vbp, vfp, vsw;
775 
776 	/*
777 	 * check to see if the width is within the range that
778 	 * the LCD Controller physically supports
779 	 */
780 	if (mode->hdisplay > priv->max_width)
781 		return MODE_VIRTUAL_X;
782 
783 	/* width must be multiple of 16 */
784 	if (mode->hdisplay & 0xf)
785 		return MODE_VIRTUAL_X;
786 
787 	if (mode->vdisplay > 2048)
788 		return MODE_VIRTUAL_Y;
789 
790 	DBG("Processing mode %dx%d@%d with pixel clock %d",
791 		mode->hdisplay, mode->vdisplay,
792 		drm_mode_vrefresh(mode), mode->clock);
793 
794 	hbp = mode->htotal - mode->hsync_end;
795 	hfp = mode->hsync_start - mode->hdisplay;
796 	hsw = mode->hsync_end - mode->hsync_start;
797 	vbp = mode->vtotal - mode->vsync_end;
798 	vfp = mode->vsync_start - mode->vdisplay;
799 	vsw = mode->vsync_end - mode->vsync_start;
800 
801 	if ((hbp-1) & ~0x3ff) {
802 		DBG("Pruning mode: Horizontal Back Porch out of range");
803 		return MODE_HBLANK_WIDE;
804 	}
805 
806 	if ((hfp-1) & ~0x3ff) {
807 		DBG("Pruning mode: Horizontal Front Porch out of range");
808 		return MODE_HBLANK_WIDE;
809 	}
810 
811 	if ((hsw-1) & ~0x3ff) {
812 		DBG("Pruning mode: Horizontal Sync Width out of range");
813 		return MODE_HSYNC_WIDE;
814 	}
815 
816 	if (vbp & ~0xff) {
817 		DBG("Pruning mode: Vertical Back Porch out of range");
818 		return MODE_VBLANK_WIDE;
819 	}
820 
821 	if (vfp & ~0xff) {
822 		DBG("Pruning mode: Vertical Front Porch out of range");
823 		return MODE_VBLANK_WIDE;
824 	}
825 
826 	if ((vsw-1) & ~0x3f) {
827 		DBG("Pruning mode: Vertical Sync Width out of range");
828 		return MODE_VSYNC_WIDE;
829 	}
830 
831 	/*
832 	 * some devices have a maximum allowed pixel clock
833 	 * configured from the DT
834 	 */
835 	if (mode->clock > priv->max_pixelclock) {
836 		DBG("Pruning mode: pixel clock too high");
837 		return MODE_CLOCK_HIGH;
838 	}
839 
840 	/*
841 	 * some devices further limit the max horizontal resolution
842 	 * configured from the DT
843 	 */
844 	if (mode->hdisplay > priv->max_width)
845 		return MODE_BAD_WIDTH;
846 
847 	/* filter out modes that would require too much memory bandwidth: */
848 	bandwidth = mode->hdisplay * mode->vdisplay *
849 		drm_mode_vrefresh(mode);
850 	if (bandwidth > priv->max_bandwidth) {
851 		DBG("Pruning mode: exceeds defined bandwidth limit");
852 		return MODE_BAD;
853 	}
854 
855 	return MODE_OK;
856 }
857 
858 static const struct drm_crtc_helper_funcs tilcdc_crtc_helper_funcs = {
859 	.mode_valid	= tilcdc_crtc_mode_valid,
860 	.mode_fixup	= tilcdc_crtc_mode_fixup,
861 	.atomic_check	= tilcdc_crtc_atomic_check,
862 	.atomic_enable	= tilcdc_crtc_atomic_enable,
863 	.atomic_disable	= tilcdc_crtc_atomic_disable,
864 	.atomic_flush	= tilcdc_crtc_atomic_flush,
865 };
866 
867 void tilcdc_crtc_set_panel_info(struct drm_crtc *crtc,
868 		const struct tilcdc_panel_info *info)
869 {
870 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
871 	tilcdc_crtc->info = info;
872 }
873 
874 void tilcdc_crtc_set_simulate_vesa_sync(struct drm_crtc *crtc,
875 					bool simulate_vesa_sync)
876 {
877 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
878 
879 	tilcdc_crtc->simulate_vesa_sync = simulate_vesa_sync;
880 }
881 
882 void tilcdc_crtc_update_clk(struct drm_crtc *crtc)
883 {
884 	struct drm_device *dev = crtc->dev;
885 	struct tilcdc_drm_private *priv = dev->dev_private;
886 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
887 
888 	drm_modeset_lock(&crtc->mutex, NULL);
889 	if (tilcdc_crtc->lcd_fck_rate != clk_get_rate(priv->clk)) {
890 		if (tilcdc_crtc_is_on(crtc)) {
891 			pm_runtime_get_sync(dev->dev);
892 			tilcdc_crtc_disable(crtc);
893 
894 			tilcdc_crtc_set_clk(crtc);
895 
896 			tilcdc_crtc_enable(crtc);
897 			pm_runtime_put_sync(dev->dev);
898 		}
899 	}
900 	drm_modeset_unlock(&crtc->mutex);
901 }
902 
903 #define SYNC_LOST_COUNT_LIMIT 50
904 
905 irqreturn_t tilcdc_crtc_irq(struct drm_crtc *crtc)
906 {
907 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
908 	struct drm_device *dev = crtc->dev;
909 	struct tilcdc_drm_private *priv = dev->dev_private;
910 	uint32_t stat, reg;
911 
912 	stat = tilcdc_read_irqstatus(dev);
913 	tilcdc_clear_irqstatus(dev, stat);
914 
915 	if (stat & LCDC_END_OF_FRAME0) {
916 		bool skip_event = false;
917 		ktime_t now;
918 
919 		now = ktime_get();
920 
921 		spin_lock(&tilcdc_crtc->irq_lock);
922 
923 		tilcdc_crtc->last_vblank = now;
924 
925 		if (tilcdc_crtc->next_fb) {
926 			set_scanout(crtc, tilcdc_crtc->next_fb);
927 			tilcdc_crtc->next_fb = NULL;
928 			skip_event = true;
929 		}
930 
931 		spin_unlock(&tilcdc_crtc->irq_lock);
932 
933 		drm_crtc_handle_vblank(crtc);
934 
935 		if (!skip_event) {
936 			struct drm_pending_vblank_event *event;
937 
938 			spin_lock(&dev->event_lock);
939 
940 			event = tilcdc_crtc->event;
941 			tilcdc_crtc->event = NULL;
942 			if (event)
943 				drm_crtc_send_vblank_event(crtc, event);
944 
945 			spin_unlock(&dev->event_lock);
946 		}
947 
948 		if (tilcdc_crtc->frame_intact)
949 			tilcdc_crtc->sync_lost_count = 0;
950 		else
951 			tilcdc_crtc->frame_intact = true;
952 	}
953 
954 	if (stat & LCDC_FIFO_UNDERFLOW)
955 		dev_err_ratelimited(dev->dev, "%s(0x%08x): FIFO underflow",
956 				    __func__, stat);
957 
958 	if (stat & LCDC_PL_LOAD_DONE) {
959 		complete(&tilcdc_crtc->palette_loaded);
960 		if (priv->rev == 1)
961 			tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
962 				     LCDC_V1_PL_INT_ENA);
963 		else
964 			tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
965 				     LCDC_V2_PL_INT_ENA);
966 	}
967 
968 	if (stat & LCDC_SYNC_LOST) {
969 		dev_err_ratelimited(dev->dev, "%s(0x%08x): Sync lost",
970 				    __func__, stat);
971 		tilcdc_crtc->frame_intact = false;
972 		if (priv->rev == 1) {
973 			reg = tilcdc_read(dev, LCDC_RASTER_CTRL_REG);
974 			if (reg & LCDC_RASTER_ENABLE) {
975 				tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
976 					     LCDC_RASTER_ENABLE);
977 				tilcdc_set(dev, LCDC_RASTER_CTRL_REG,
978 					   LCDC_RASTER_ENABLE);
979 			}
980 		} else {
981 			if (tilcdc_crtc->sync_lost_count++ >
982 			    SYNC_LOST_COUNT_LIMIT) {
983 				dev_err(dev->dev,
984 					"%s(0x%08x): Sync lost flood detected, recovering",
985 					__func__, stat);
986 				queue_work(system_wq,
987 					   &tilcdc_crtc->recover_work);
988 				tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
989 					     LCDC_SYNC_LOST);
990 				tilcdc_crtc->sync_lost_count = 0;
991 			}
992 		}
993 	}
994 
995 	if (stat & LCDC_FRAME_DONE) {
996 		tilcdc_crtc->frame_done = true;
997 		wake_up(&tilcdc_crtc->frame_done_wq);
998 		/* rev 1 lcdc appears to hang if irq is not disbaled here */
999 		if (priv->rev == 1)
1000 			tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
1001 				     LCDC_V1_FRAME_DONE_INT_ENA);
1002 	}
1003 
1004 	/* For revision 2 only */
1005 	if (priv->rev == 2) {
1006 		/* Indicate to LCDC that the interrupt service routine has
1007 		 * completed, see 13.3.6.1.6 in AM335x TRM.
1008 		 */
1009 		tilcdc_write(dev, LCDC_END_OF_INT_IND_REG, 0);
1010 	}
1011 
1012 	return IRQ_HANDLED;
1013 }
1014 
1015 int tilcdc_crtc_create(struct drm_device *dev)
1016 {
1017 	struct tilcdc_drm_private *priv = dev->dev_private;
1018 	struct tilcdc_crtc *tilcdc_crtc;
1019 	struct drm_crtc *crtc;
1020 	int ret;
1021 
1022 	tilcdc_crtc = devm_kzalloc(dev->dev, sizeof(*tilcdc_crtc), GFP_KERNEL);
1023 	if (!tilcdc_crtc)
1024 		return -ENOMEM;
1025 
1026 	init_completion(&tilcdc_crtc->palette_loaded);
1027 	tilcdc_crtc->palette_base = dmam_alloc_coherent(dev->dev,
1028 					TILCDC_PALETTE_SIZE,
1029 					&tilcdc_crtc->palette_dma_handle,
1030 					GFP_KERNEL | __GFP_ZERO);
1031 	if (!tilcdc_crtc->palette_base)
1032 		return -ENOMEM;
1033 	*tilcdc_crtc->palette_base = TILCDC_PALETTE_FIRST_ENTRY;
1034 
1035 	crtc = &tilcdc_crtc->base;
1036 
1037 	ret = tilcdc_plane_init(dev, &tilcdc_crtc->primary);
1038 	if (ret < 0)
1039 		goto fail;
1040 
1041 	mutex_init(&tilcdc_crtc->enable_lock);
1042 
1043 	init_waitqueue_head(&tilcdc_crtc->frame_done_wq);
1044 
1045 	spin_lock_init(&tilcdc_crtc->irq_lock);
1046 	INIT_WORK(&tilcdc_crtc->recover_work, tilcdc_crtc_recover_work);
1047 
1048 	ret = drm_crtc_init_with_planes(dev, crtc,
1049 					&tilcdc_crtc->primary,
1050 					NULL,
1051 					&tilcdc_crtc_funcs,
1052 					"tilcdc crtc");
1053 	if (ret < 0)
1054 		goto fail;
1055 
1056 	drm_crtc_helper_add(crtc, &tilcdc_crtc_helper_funcs);
1057 
1058 	if (priv->is_componentized) {
1059 		crtc->port = of_graph_get_port_by_id(dev->dev->of_node, 0);
1060 		if (!crtc->port) { /* This should never happen */
1061 			dev_err(dev->dev, "Port node not found in %pOF\n",
1062 				dev->dev->of_node);
1063 			ret = -EINVAL;
1064 			goto fail;
1065 		}
1066 	}
1067 
1068 	priv->crtc = crtc;
1069 	return 0;
1070 
1071 fail:
1072 	tilcdc_crtc_destroy(crtc);
1073 	return ret;
1074 }
1075