1 /*
2  * Copyright (C) 2012 Texas Instruments
3  * Author: Rob Clark <robdclark@gmail.com>
4  *
5  * This program is free software; you can redistribute it and/or modify it
6  * under the terms of the GNU General Public License version 2 as published by
7  * the Free Software Foundation.
8  *
9  * This program is distributed in the hope that it will be useful, but WITHOUT
10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for
12  * more details.
13  *
14  * You should have received a copy of the GNU General Public License along with
15  * this program.  If not, see <http://www.gnu.org/licenses/>.
16  */
17 
18 #include <drm/drm_atomic.h>
19 #include <drm/drm_atomic_helper.h>
20 #include <drm/drm_crtc.h>
21 #include <drm/drm_flip_work.h>
22 #include <drm/drm_plane_helper.h>
23 #include <linux/workqueue.h>
24 #include <linux/completion.h>
25 #include <linux/dma-mapping.h>
26 #include <linux/of_graph.h>
27 #include <linux/math64.h>
28 
29 #include "tilcdc_drv.h"
30 #include "tilcdc_regs.h"
31 
32 #define TILCDC_VBLANK_SAFETY_THRESHOLD_US	1000
33 #define TILCDC_PALETTE_SIZE			32
34 #define TILCDC_PALETTE_FIRST_ENTRY		0x4000
35 
36 struct tilcdc_crtc {
37 	struct drm_crtc base;
38 
39 	struct drm_plane primary;
40 	const struct tilcdc_panel_info *info;
41 	struct drm_pending_vblank_event *event;
42 	struct mutex enable_lock;
43 	bool enabled;
44 	bool shutdown;
45 	wait_queue_head_t frame_done_wq;
46 	bool frame_done;
47 	spinlock_t irq_lock;
48 
49 	unsigned int lcd_fck_rate;
50 
51 	ktime_t last_vblank;
52 	unsigned int hvtotal_us;
53 
54 	struct drm_framebuffer *curr_fb;
55 	struct drm_framebuffer *next_fb;
56 
57 	/* for deferred fb unref's: */
58 	struct drm_flip_work unref_work;
59 
60 	/* Only set if an external encoder is connected */
61 	bool simulate_vesa_sync;
62 
63 	int sync_lost_count;
64 	bool frame_intact;
65 	struct work_struct recover_work;
66 
67 	dma_addr_t palette_dma_handle;
68 	u16 *palette_base;
69 	struct completion palette_loaded;
70 };
71 #define to_tilcdc_crtc(x) container_of(x, struct tilcdc_crtc, base)
72 
73 static void unref_worker(struct drm_flip_work *work, void *val)
74 {
75 	struct tilcdc_crtc *tilcdc_crtc =
76 		container_of(work, struct tilcdc_crtc, unref_work);
77 	struct drm_device *dev = tilcdc_crtc->base.dev;
78 
79 	mutex_lock(&dev->mode_config.mutex);
80 	drm_framebuffer_put(val);
81 	mutex_unlock(&dev->mode_config.mutex);
82 }
83 
84 static void set_scanout(struct drm_crtc *crtc, struct drm_framebuffer *fb)
85 {
86 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
87 	struct drm_device *dev = crtc->dev;
88 	struct tilcdc_drm_private *priv = dev->dev_private;
89 	struct drm_gem_cma_object *gem;
90 	dma_addr_t start, end;
91 	u64 dma_base_and_ceiling;
92 
93 	gem = drm_fb_cma_get_gem_obj(fb, 0);
94 
95 	start = gem->paddr + fb->offsets[0] +
96 		crtc->y * fb->pitches[0] +
97 		crtc->x * fb->format->cpp[0];
98 
99 	end = start + (crtc->mode.vdisplay * fb->pitches[0]);
100 
101 	/* Write LCDC_DMA_FB_BASE_ADDR_0_REG and LCDC_DMA_FB_CEILING_ADDR_0_REG
102 	 * with a single insruction, if available. This should make it more
103 	 * unlikely that LCDC would fetch the DMA addresses in the middle of
104 	 * an update.
105 	 */
106 	if (priv->rev == 1)
107 		end -= 1;
108 
109 	dma_base_and_ceiling = (u64)end << 32 | start;
110 	tilcdc_write64(dev, LCDC_DMA_FB_BASE_ADDR_0_REG, dma_base_and_ceiling);
111 
112 	if (tilcdc_crtc->curr_fb)
113 		drm_flip_work_queue(&tilcdc_crtc->unref_work,
114 			tilcdc_crtc->curr_fb);
115 
116 	tilcdc_crtc->curr_fb = fb;
117 }
118 
119 /*
120  * The driver currently only supports only true color formats. For
121  * true color the palette block is bypassed, but a 32 byte palette
122  * should still be loaded. The first 16-bit entry must be 0x4000 while
123  * all other entries must be zeroed.
124  */
125 static void tilcdc_crtc_load_palette(struct drm_crtc *crtc)
126 {
127 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
128 	struct drm_device *dev = crtc->dev;
129 	struct tilcdc_drm_private *priv = dev->dev_private;
130 	int ret;
131 
132 	reinit_completion(&tilcdc_crtc->palette_loaded);
133 
134 	/* Tell the LCDC where the palette is located. */
135 	tilcdc_write(dev, LCDC_DMA_FB_BASE_ADDR_0_REG,
136 		     tilcdc_crtc->palette_dma_handle);
137 	tilcdc_write(dev, LCDC_DMA_FB_CEILING_ADDR_0_REG,
138 		     (u32) tilcdc_crtc->palette_dma_handle +
139 		     TILCDC_PALETTE_SIZE - 1);
140 
141 	/* Set dma load mode for palette loading only. */
142 	tilcdc_write_mask(dev, LCDC_RASTER_CTRL_REG,
143 			  LCDC_PALETTE_LOAD_MODE(PALETTE_ONLY),
144 			  LCDC_PALETTE_LOAD_MODE_MASK);
145 
146 	/* Enable DMA Palette Loaded Interrupt */
147 	if (priv->rev == 1)
148 		tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_V1_PL_INT_ENA);
149 	else
150 		tilcdc_write(dev, LCDC_INT_ENABLE_SET_REG, LCDC_V2_PL_INT_ENA);
151 
152 	/* Enable LCDC DMA and wait for palette to be loaded. */
153 	tilcdc_clear_irqstatus(dev, 0xffffffff);
154 	tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
155 
156 	ret = wait_for_completion_timeout(&tilcdc_crtc->palette_loaded,
157 					  msecs_to_jiffies(50));
158 	if (ret == 0)
159 		dev_err(dev->dev, "%s: Palette loading timeout", __func__);
160 
161 	/* Disable LCDC DMA and DMA Palette Loaded Interrupt. */
162 	tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
163 	if (priv->rev == 1)
164 		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_V1_PL_INT_ENA);
165 	else
166 		tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG, LCDC_V2_PL_INT_ENA);
167 }
168 
169 static void tilcdc_crtc_enable_irqs(struct drm_device *dev)
170 {
171 	struct tilcdc_drm_private *priv = dev->dev_private;
172 
173 	tilcdc_clear_irqstatus(dev, 0xffffffff);
174 
175 	if (priv->rev == 1) {
176 		tilcdc_set(dev, LCDC_RASTER_CTRL_REG,
177 			LCDC_V1_SYNC_LOST_INT_ENA | LCDC_V1_FRAME_DONE_INT_ENA |
178 			LCDC_V1_UNDERFLOW_INT_ENA);
179 		tilcdc_set(dev, LCDC_DMA_CTRL_REG,
180 			LCDC_V1_END_OF_FRAME_INT_ENA);
181 	} else {
182 		tilcdc_write(dev, LCDC_INT_ENABLE_SET_REG,
183 			LCDC_V2_UNDERFLOW_INT_ENA |
184 			LCDC_V2_END_OF_FRAME0_INT_ENA |
185 			LCDC_FRAME_DONE | LCDC_SYNC_LOST);
186 	}
187 }
188 
189 static void tilcdc_crtc_disable_irqs(struct drm_device *dev)
190 {
191 	struct tilcdc_drm_private *priv = dev->dev_private;
192 
193 	/* disable irqs that we might have enabled: */
194 	if (priv->rev == 1) {
195 		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
196 			LCDC_V1_SYNC_LOST_INT_ENA | LCDC_V1_FRAME_DONE_INT_ENA |
197 			LCDC_V1_UNDERFLOW_INT_ENA | LCDC_V1_PL_INT_ENA);
198 		tilcdc_clear(dev, LCDC_DMA_CTRL_REG,
199 			LCDC_V1_END_OF_FRAME_INT_ENA);
200 	} else {
201 		tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
202 			LCDC_V2_UNDERFLOW_INT_ENA | LCDC_V2_PL_INT_ENA |
203 			LCDC_V2_END_OF_FRAME0_INT_ENA |
204 			LCDC_FRAME_DONE | LCDC_SYNC_LOST);
205 	}
206 }
207 
208 static void reset(struct drm_crtc *crtc)
209 {
210 	struct drm_device *dev = crtc->dev;
211 	struct tilcdc_drm_private *priv = dev->dev_private;
212 
213 	if (priv->rev != 2)
214 		return;
215 
216 	tilcdc_set(dev, LCDC_CLK_RESET_REG, LCDC_CLK_MAIN_RESET);
217 	usleep_range(250, 1000);
218 	tilcdc_clear(dev, LCDC_CLK_RESET_REG, LCDC_CLK_MAIN_RESET);
219 }
220 
221 /*
222  * Calculate the percentage difference between the requested pixel clock rate
223  * and the effective rate resulting from calculating the clock divider value.
224  */
225 static unsigned int tilcdc_pclk_diff(unsigned long rate,
226 				     unsigned long real_rate)
227 {
228 	int r = rate / 100, rr = real_rate / 100;
229 
230 	return (unsigned int)(abs(((rr - r) * 100) / r));
231 }
232 
233 static void tilcdc_crtc_set_clk(struct drm_crtc *crtc)
234 {
235 	struct drm_device *dev = crtc->dev;
236 	struct tilcdc_drm_private *priv = dev->dev_private;
237 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
238 	unsigned long clk_rate, real_rate, req_rate;
239 	unsigned int clkdiv;
240 	int ret;
241 
242 	clkdiv = 2; /* first try using a standard divider of 2 */
243 
244 	/* mode.clock is in KHz, set_rate wants parameter in Hz */
245 	req_rate = crtc->mode.clock * 1000;
246 
247 	ret = clk_set_rate(priv->clk, req_rate * clkdiv);
248 	clk_rate = clk_get_rate(priv->clk);
249 	if (ret < 0) {
250 		/*
251 		 * If we fail to set the clock rate (some architectures don't
252 		 * use the common clock framework yet and may not implement
253 		 * all the clk API calls for every clock), try the next best
254 		 * thing: adjusting the clock divider, unless clk_get_rate()
255 		 * failed as well.
256 		 */
257 		if (!clk_rate) {
258 			/* Nothing more we can do. Just bail out. */
259 			dev_err(dev->dev,
260 				"failed to set the pixel clock - unable to read current lcdc clock rate\n");
261 			return;
262 		}
263 
264 		clkdiv = DIV_ROUND_CLOSEST(clk_rate, req_rate);
265 
266 		/*
267 		 * Emit a warning if the real clock rate resulting from the
268 		 * calculated divider differs much from the requested rate.
269 		 *
270 		 * 5% is an arbitrary value - LCDs are usually quite tolerant
271 		 * about pixel clock rates.
272 		 */
273 		real_rate = clkdiv * req_rate;
274 
275 		if (tilcdc_pclk_diff(clk_rate, real_rate) > 5) {
276 			dev_warn(dev->dev,
277 				 "effective pixel clock rate (%luHz) differs from the calculated rate (%luHz)\n",
278 				 clk_rate, real_rate);
279 		}
280 	}
281 
282 	tilcdc_crtc->lcd_fck_rate = clk_rate;
283 
284 	DBG("lcd_clk=%u, mode clock=%d, div=%u",
285 	    tilcdc_crtc->lcd_fck_rate, crtc->mode.clock, clkdiv);
286 
287 	/* Configure the LCD clock divisor. */
288 	tilcdc_write(dev, LCDC_CTRL_REG, LCDC_CLK_DIVISOR(clkdiv) |
289 		     LCDC_RASTER_MODE);
290 
291 	if (priv->rev == 2)
292 		tilcdc_set(dev, LCDC_CLK_ENABLE_REG,
293 				LCDC_V2_DMA_CLK_EN | LCDC_V2_LIDD_CLK_EN |
294 				LCDC_V2_CORE_CLK_EN);
295 }
296 
297 uint tilcdc_mode_hvtotal(const struct drm_display_mode *mode)
298 {
299 	return (uint) div_u64(1000llu * mode->htotal * mode->vtotal,
300 			      mode->clock);
301 }
302 
303 static void tilcdc_crtc_set_mode(struct drm_crtc *crtc)
304 {
305 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
306 	struct drm_device *dev = crtc->dev;
307 	struct tilcdc_drm_private *priv = dev->dev_private;
308 	const struct tilcdc_panel_info *info = tilcdc_crtc->info;
309 	uint32_t reg, hbp, hfp, hsw, vbp, vfp, vsw;
310 	struct drm_display_mode *mode = &crtc->state->adjusted_mode;
311 	struct drm_framebuffer *fb = crtc->primary->state->fb;
312 
313 	if (WARN_ON(!info))
314 		return;
315 
316 	if (WARN_ON(!fb))
317 		return;
318 
319 	/* Configure the Burst Size and fifo threshold of DMA: */
320 	reg = tilcdc_read(dev, LCDC_DMA_CTRL_REG) & ~0x00000770;
321 	switch (info->dma_burst_sz) {
322 	case 1:
323 		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_1);
324 		break;
325 	case 2:
326 		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_2);
327 		break;
328 	case 4:
329 		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_4);
330 		break;
331 	case 8:
332 		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_8);
333 		break;
334 	case 16:
335 		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_16);
336 		break;
337 	default:
338 		dev_err(dev->dev, "invalid burst size\n");
339 		return;
340 	}
341 	reg |= (info->fifo_th << 8);
342 	tilcdc_write(dev, LCDC_DMA_CTRL_REG, reg);
343 
344 	/* Configure timings: */
345 	hbp = mode->htotal - mode->hsync_end;
346 	hfp = mode->hsync_start - mode->hdisplay;
347 	hsw = mode->hsync_end - mode->hsync_start;
348 	vbp = mode->vtotal - mode->vsync_end;
349 	vfp = mode->vsync_start - mode->vdisplay;
350 	vsw = mode->vsync_end - mode->vsync_start;
351 
352 	DBG("%dx%d, hbp=%u, hfp=%u, hsw=%u, vbp=%u, vfp=%u, vsw=%u",
353 	    mode->hdisplay, mode->vdisplay, hbp, hfp, hsw, vbp, vfp, vsw);
354 
355 	/* Set AC Bias Period and Number of Transitions per Interrupt: */
356 	reg = tilcdc_read(dev, LCDC_RASTER_TIMING_2_REG) & ~0x000fff00;
357 	reg |= LCDC_AC_BIAS_FREQUENCY(info->ac_bias) |
358 		LCDC_AC_BIAS_TRANSITIONS_PER_INT(info->ac_bias_intrpt);
359 
360 	/*
361 	 * subtract one from hfp, hbp, hsw because the hardware uses
362 	 * a value of 0 as 1
363 	 */
364 	if (priv->rev == 2) {
365 		/* clear bits we're going to set */
366 		reg &= ~0x78000033;
367 		reg |= ((hfp-1) & 0x300) >> 8;
368 		reg |= ((hbp-1) & 0x300) >> 4;
369 		reg |= ((hsw-1) & 0x3c0) << 21;
370 	}
371 	tilcdc_write(dev, LCDC_RASTER_TIMING_2_REG, reg);
372 
373 	reg = (((mode->hdisplay >> 4) - 1) << 4) |
374 		(((hbp-1) & 0xff) << 24) |
375 		(((hfp-1) & 0xff) << 16) |
376 		(((hsw-1) & 0x3f) << 10);
377 	if (priv->rev == 2)
378 		reg |= (((mode->hdisplay >> 4) - 1) & 0x40) >> 3;
379 	tilcdc_write(dev, LCDC_RASTER_TIMING_0_REG, reg);
380 
381 	reg = ((mode->vdisplay - 1) & 0x3ff) |
382 		((vbp & 0xff) << 24) |
383 		((vfp & 0xff) << 16) |
384 		(((vsw-1) & 0x3f) << 10);
385 	tilcdc_write(dev, LCDC_RASTER_TIMING_1_REG, reg);
386 
387 	/*
388 	 * be sure to set Bit 10 for the V2 LCDC controller,
389 	 * otherwise limited to 1024 pixels width, stopping
390 	 * 1920x1080 being supported.
391 	 */
392 	if (priv->rev == 2) {
393 		if ((mode->vdisplay - 1) & 0x400) {
394 			tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG,
395 				LCDC_LPP_B10);
396 		} else {
397 			tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG,
398 				LCDC_LPP_B10);
399 		}
400 	}
401 
402 	/* Configure display type: */
403 	reg = tilcdc_read(dev, LCDC_RASTER_CTRL_REG) &
404 		~(LCDC_TFT_MODE | LCDC_MONO_8BIT_MODE | LCDC_MONOCHROME_MODE |
405 		  LCDC_V2_TFT_24BPP_MODE | LCDC_V2_TFT_24BPP_UNPACK |
406 		  0x000ff000 /* Palette Loading Delay bits */);
407 	reg |= LCDC_TFT_MODE; /* no monochrome/passive support */
408 	if (info->tft_alt_mode)
409 		reg |= LCDC_TFT_ALT_ENABLE;
410 	if (priv->rev == 2) {
411 		switch (fb->format->format) {
412 		case DRM_FORMAT_BGR565:
413 		case DRM_FORMAT_RGB565:
414 			break;
415 		case DRM_FORMAT_XBGR8888:
416 		case DRM_FORMAT_XRGB8888:
417 			reg |= LCDC_V2_TFT_24BPP_UNPACK;
418 			/* fallthrough */
419 		case DRM_FORMAT_BGR888:
420 		case DRM_FORMAT_RGB888:
421 			reg |= LCDC_V2_TFT_24BPP_MODE;
422 			break;
423 		default:
424 			dev_err(dev->dev, "invalid pixel format\n");
425 			return;
426 		}
427 	}
428 	reg |= info->fdd < 12;
429 	tilcdc_write(dev, LCDC_RASTER_CTRL_REG, reg);
430 
431 	if (info->invert_pxl_clk)
432 		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_PIXEL_CLOCK);
433 	else
434 		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_PIXEL_CLOCK);
435 
436 	if (info->sync_ctrl)
437 		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_CTRL);
438 	else
439 		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_CTRL);
440 
441 	if (info->sync_edge)
442 		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_EDGE);
443 	else
444 		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_EDGE);
445 
446 	if (mode->flags & DRM_MODE_FLAG_NHSYNC)
447 		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_HSYNC);
448 	else
449 		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_HSYNC);
450 
451 	if (mode->flags & DRM_MODE_FLAG_NVSYNC)
452 		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_VSYNC);
453 	else
454 		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_VSYNC);
455 
456 	if (info->raster_order)
457 		tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ORDER);
458 	else
459 		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ORDER);
460 
461 	tilcdc_crtc_set_clk(crtc);
462 
463 	tilcdc_crtc_load_palette(crtc);
464 
465 	set_scanout(crtc, fb);
466 
467 	drm_framebuffer_get(fb);
468 
469 	crtc->hwmode = crtc->state->adjusted_mode;
470 
471 	tilcdc_crtc->hvtotal_us =
472 		tilcdc_mode_hvtotal(&crtc->hwmode);
473 }
474 
475 static void tilcdc_crtc_enable(struct drm_crtc *crtc)
476 {
477 	struct drm_device *dev = crtc->dev;
478 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
479 	unsigned long flags;
480 
481 	mutex_lock(&tilcdc_crtc->enable_lock);
482 	if (tilcdc_crtc->enabled || tilcdc_crtc->shutdown) {
483 		mutex_unlock(&tilcdc_crtc->enable_lock);
484 		return;
485 	}
486 
487 	pm_runtime_get_sync(dev->dev);
488 
489 	reset(crtc);
490 
491 	tilcdc_crtc_set_mode(crtc);
492 
493 	tilcdc_crtc_enable_irqs(dev);
494 
495 	tilcdc_clear(dev, LCDC_DMA_CTRL_REG, LCDC_DUAL_FRAME_BUFFER_ENABLE);
496 	tilcdc_write_mask(dev, LCDC_RASTER_CTRL_REG,
497 			  LCDC_PALETTE_LOAD_MODE(DATA_ONLY),
498 			  LCDC_PALETTE_LOAD_MODE_MASK);
499 
500 	/* There is no real chance for a race here as the time stamp
501 	 * is taken before the raster DMA is started. The spin-lock is
502 	 * taken to have a memory barrier after taking the time-stamp
503 	 * and to avoid a context switch between taking the stamp and
504 	 * enabling the raster.
505 	 */
506 	spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
507 	tilcdc_crtc->last_vblank = ktime_get();
508 	tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
509 	spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
510 
511 	drm_crtc_vblank_on(crtc);
512 
513 	tilcdc_crtc->enabled = true;
514 	mutex_unlock(&tilcdc_crtc->enable_lock);
515 }
516 
517 static void tilcdc_crtc_atomic_enable(struct drm_crtc *crtc,
518 				      struct drm_crtc_state *old_state)
519 {
520 	tilcdc_crtc_enable(crtc);
521 }
522 
523 static void tilcdc_crtc_off(struct drm_crtc *crtc, bool shutdown)
524 {
525 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
526 	struct drm_device *dev = crtc->dev;
527 	struct tilcdc_drm_private *priv = dev->dev_private;
528 	int ret;
529 
530 	mutex_lock(&tilcdc_crtc->enable_lock);
531 	if (shutdown)
532 		tilcdc_crtc->shutdown = true;
533 	if (!tilcdc_crtc->enabled) {
534 		mutex_unlock(&tilcdc_crtc->enable_lock);
535 		return;
536 	}
537 	tilcdc_crtc->frame_done = false;
538 	tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
539 
540 	/*
541 	 * Wait for framedone irq which will still come before putting
542 	 * things to sleep..
543 	 */
544 	ret = wait_event_timeout(tilcdc_crtc->frame_done_wq,
545 				 tilcdc_crtc->frame_done,
546 				 msecs_to_jiffies(500));
547 	if (ret == 0)
548 		dev_err(dev->dev, "%s: timeout waiting for framedone\n",
549 			__func__);
550 
551 	drm_crtc_vblank_off(crtc);
552 
553 	tilcdc_crtc_disable_irqs(dev);
554 
555 	pm_runtime_put_sync(dev->dev);
556 
557 	if (tilcdc_crtc->next_fb) {
558 		drm_flip_work_queue(&tilcdc_crtc->unref_work,
559 				    tilcdc_crtc->next_fb);
560 		tilcdc_crtc->next_fb = NULL;
561 	}
562 
563 	if (tilcdc_crtc->curr_fb) {
564 		drm_flip_work_queue(&tilcdc_crtc->unref_work,
565 				    tilcdc_crtc->curr_fb);
566 		tilcdc_crtc->curr_fb = NULL;
567 	}
568 
569 	drm_flip_work_commit(&tilcdc_crtc->unref_work, priv->wq);
570 
571 	tilcdc_crtc->enabled = false;
572 	mutex_unlock(&tilcdc_crtc->enable_lock);
573 }
574 
575 static void tilcdc_crtc_disable(struct drm_crtc *crtc)
576 {
577 	tilcdc_crtc_off(crtc, false);
578 }
579 
580 static void tilcdc_crtc_atomic_disable(struct drm_crtc *crtc,
581 				       struct drm_crtc_state *old_state)
582 {
583 	tilcdc_crtc_disable(crtc);
584 }
585 
586 void tilcdc_crtc_shutdown(struct drm_crtc *crtc)
587 {
588 	tilcdc_crtc_off(crtc, true);
589 }
590 
591 static bool tilcdc_crtc_is_on(struct drm_crtc *crtc)
592 {
593 	return crtc->state && crtc->state->enable && crtc->state->active;
594 }
595 
596 static void tilcdc_crtc_recover_work(struct work_struct *work)
597 {
598 	struct tilcdc_crtc *tilcdc_crtc =
599 		container_of(work, struct tilcdc_crtc, recover_work);
600 	struct drm_crtc *crtc = &tilcdc_crtc->base;
601 
602 	dev_info(crtc->dev->dev, "%s: Reset CRTC", __func__);
603 
604 	drm_modeset_lock(&crtc->mutex, NULL);
605 
606 	if (!tilcdc_crtc_is_on(crtc))
607 		goto out;
608 
609 	tilcdc_crtc_disable(crtc);
610 	tilcdc_crtc_enable(crtc);
611 out:
612 	drm_modeset_unlock(&crtc->mutex);
613 }
614 
615 static void tilcdc_crtc_destroy(struct drm_crtc *crtc)
616 {
617 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
618 	struct tilcdc_drm_private *priv = crtc->dev->dev_private;
619 
620 	tilcdc_crtc_shutdown(crtc);
621 
622 	flush_workqueue(priv->wq);
623 
624 	of_node_put(crtc->port);
625 	drm_crtc_cleanup(crtc);
626 	drm_flip_work_cleanup(&tilcdc_crtc->unref_work);
627 }
628 
629 int tilcdc_crtc_update_fb(struct drm_crtc *crtc,
630 		struct drm_framebuffer *fb,
631 		struct drm_pending_vblank_event *event)
632 {
633 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
634 	struct drm_device *dev = crtc->dev;
635 
636 	if (tilcdc_crtc->event) {
637 		dev_err(dev->dev, "already pending page flip!\n");
638 		return -EBUSY;
639 	}
640 
641 	drm_framebuffer_get(fb);
642 
643 	crtc->primary->fb = fb;
644 	tilcdc_crtc->event = event;
645 
646 	mutex_lock(&tilcdc_crtc->enable_lock);
647 
648 	if (tilcdc_crtc->enabled) {
649 		unsigned long flags;
650 		ktime_t next_vblank;
651 		s64 tdiff;
652 
653 		spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
654 
655 		next_vblank = ktime_add_us(tilcdc_crtc->last_vblank,
656 					   tilcdc_crtc->hvtotal_us);
657 		tdiff = ktime_to_us(ktime_sub(next_vblank, ktime_get()));
658 
659 		if (tdiff < TILCDC_VBLANK_SAFETY_THRESHOLD_US)
660 			tilcdc_crtc->next_fb = fb;
661 		else
662 			set_scanout(crtc, fb);
663 
664 		spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
665 	}
666 
667 	mutex_unlock(&tilcdc_crtc->enable_lock);
668 
669 	return 0;
670 }
671 
672 static bool tilcdc_crtc_mode_fixup(struct drm_crtc *crtc,
673 		const struct drm_display_mode *mode,
674 		struct drm_display_mode *adjusted_mode)
675 {
676 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
677 
678 	if (!tilcdc_crtc->simulate_vesa_sync)
679 		return true;
680 
681 	/*
682 	 * tilcdc does not generate VESA-compliant sync but aligns
683 	 * VS on the second edge of HS instead of first edge.
684 	 * We use adjusted_mode, to fixup sync by aligning both rising
685 	 * edges and add HSKEW offset to fix the sync.
686 	 */
687 	adjusted_mode->hskew = mode->hsync_end - mode->hsync_start;
688 	adjusted_mode->flags |= DRM_MODE_FLAG_HSKEW;
689 
690 	if (mode->flags & DRM_MODE_FLAG_NHSYNC) {
691 		adjusted_mode->flags |= DRM_MODE_FLAG_PHSYNC;
692 		adjusted_mode->flags &= ~DRM_MODE_FLAG_NHSYNC;
693 	} else {
694 		adjusted_mode->flags |= DRM_MODE_FLAG_NHSYNC;
695 		adjusted_mode->flags &= ~DRM_MODE_FLAG_PHSYNC;
696 	}
697 
698 	return true;
699 }
700 
701 static int tilcdc_crtc_atomic_check(struct drm_crtc *crtc,
702 				    struct drm_crtc_state *state)
703 {
704 	struct drm_display_mode *mode = &state->mode;
705 	int ret;
706 
707 	/* If we are not active we don't care */
708 	if (!state->active)
709 		return 0;
710 
711 	if (state->state->planes[0].ptr != crtc->primary ||
712 	    state->state->planes[0].state == NULL ||
713 	    state->state->planes[0].state->crtc != crtc) {
714 		dev_dbg(crtc->dev->dev, "CRTC primary plane must be present");
715 		return -EINVAL;
716 	}
717 
718 	ret = tilcdc_crtc_mode_valid(crtc, mode);
719 	if (ret) {
720 		dev_dbg(crtc->dev->dev, "Mode \"%s\" not valid", mode->name);
721 		return -EINVAL;
722 	}
723 
724 	return 0;
725 }
726 
727 static int tilcdc_crtc_enable_vblank(struct drm_crtc *crtc)
728 {
729 	return 0;
730 }
731 
732 static void tilcdc_crtc_disable_vblank(struct drm_crtc *crtc)
733 {
734 }
735 
736 static void tilcdc_crtc_reset(struct drm_crtc *crtc)
737 {
738 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
739 	struct drm_device *dev = crtc->dev;
740 	int ret;
741 
742 	drm_atomic_helper_crtc_reset(crtc);
743 
744 	/* Turn the raster off if it for some reason is on. */
745 	pm_runtime_get_sync(dev->dev);
746 	if (tilcdc_read(dev, LCDC_RASTER_CTRL_REG) & LCDC_RASTER_ENABLE) {
747 		/* Enable DMA Frame Done Interrupt */
748 		tilcdc_write(dev, LCDC_INT_ENABLE_SET_REG, LCDC_FRAME_DONE);
749 		tilcdc_clear_irqstatus(dev, 0xffffffff);
750 
751 		tilcdc_crtc->frame_done = false;
752 		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
753 
754 		ret = wait_event_timeout(tilcdc_crtc->frame_done_wq,
755 					 tilcdc_crtc->frame_done,
756 					 msecs_to_jiffies(500));
757 		if (ret == 0)
758 			dev_err(dev->dev, "%s: timeout waiting for framedone\n",
759 				__func__);
760 	}
761 	pm_runtime_put_sync(dev->dev);
762 }
763 
764 static const struct drm_crtc_funcs tilcdc_crtc_funcs = {
765 	.destroy        = tilcdc_crtc_destroy,
766 	.set_config     = drm_atomic_helper_set_config,
767 	.page_flip      = drm_atomic_helper_page_flip,
768 	.reset		= tilcdc_crtc_reset,
769 	.atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
770 	.atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
771 	.enable_vblank	= tilcdc_crtc_enable_vblank,
772 	.disable_vblank	= tilcdc_crtc_disable_vblank,
773 };
774 
775 static const struct drm_crtc_helper_funcs tilcdc_crtc_helper_funcs = {
776 		.mode_fixup     = tilcdc_crtc_mode_fixup,
777 		.atomic_check	= tilcdc_crtc_atomic_check,
778 		.atomic_enable	= tilcdc_crtc_atomic_enable,
779 		.atomic_disable	= tilcdc_crtc_atomic_disable,
780 };
781 
782 int tilcdc_crtc_max_width(struct drm_crtc *crtc)
783 {
784 	struct drm_device *dev = crtc->dev;
785 	struct tilcdc_drm_private *priv = dev->dev_private;
786 	int max_width = 0;
787 
788 	if (priv->rev == 1)
789 		max_width = 1024;
790 	else if (priv->rev == 2)
791 		max_width = 2048;
792 
793 	return max_width;
794 }
795 
796 int tilcdc_crtc_mode_valid(struct drm_crtc *crtc, struct drm_display_mode *mode)
797 {
798 	struct tilcdc_drm_private *priv = crtc->dev->dev_private;
799 	unsigned int bandwidth;
800 	uint32_t hbp, hfp, hsw, vbp, vfp, vsw;
801 
802 	/*
803 	 * check to see if the width is within the range that
804 	 * the LCD Controller physically supports
805 	 */
806 	if (mode->hdisplay > tilcdc_crtc_max_width(crtc))
807 		return MODE_VIRTUAL_X;
808 
809 	/* width must be multiple of 16 */
810 	if (mode->hdisplay & 0xf)
811 		return MODE_VIRTUAL_X;
812 
813 	if (mode->vdisplay > 2048)
814 		return MODE_VIRTUAL_Y;
815 
816 	DBG("Processing mode %dx%d@%d with pixel clock %d",
817 		mode->hdisplay, mode->vdisplay,
818 		drm_mode_vrefresh(mode), mode->clock);
819 
820 	hbp = mode->htotal - mode->hsync_end;
821 	hfp = mode->hsync_start - mode->hdisplay;
822 	hsw = mode->hsync_end - mode->hsync_start;
823 	vbp = mode->vtotal - mode->vsync_end;
824 	vfp = mode->vsync_start - mode->vdisplay;
825 	vsw = mode->vsync_end - mode->vsync_start;
826 
827 	if ((hbp-1) & ~0x3ff) {
828 		DBG("Pruning mode: Horizontal Back Porch out of range");
829 		return MODE_HBLANK_WIDE;
830 	}
831 
832 	if ((hfp-1) & ~0x3ff) {
833 		DBG("Pruning mode: Horizontal Front Porch out of range");
834 		return MODE_HBLANK_WIDE;
835 	}
836 
837 	if ((hsw-1) & ~0x3ff) {
838 		DBG("Pruning mode: Horizontal Sync Width out of range");
839 		return MODE_HSYNC_WIDE;
840 	}
841 
842 	if (vbp & ~0xff) {
843 		DBG("Pruning mode: Vertical Back Porch out of range");
844 		return MODE_VBLANK_WIDE;
845 	}
846 
847 	if (vfp & ~0xff) {
848 		DBG("Pruning mode: Vertical Front Porch out of range");
849 		return MODE_VBLANK_WIDE;
850 	}
851 
852 	if ((vsw-1) & ~0x3f) {
853 		DBG("Pruning mode: Vertical Sync Width out of range");
854 		return MODE_VSYNC_WIDE;
855 	}
856 
857 	/*
858 	 * some devices have a maximum allowed pixel clock
859 	 * configured from the DT
860 	 */
861 	if (mode->clock > priv->max_pixelclock) {
862 		DBG("Pruning mode: pixel clock too high");
863 		return MODE_CLOCK_HIGH;
864 	}
865 
866 	/*
867 	 * some devices further limit the max horizontal resolution
868 	 * configured from the DT
869 	 */
870 	if (mode->hdisplay > priv->max_width)
871 		return MODE_BAD_WIDTH;
872 
873 	/* filter out modes that would require too much memory bandwidth: */
874 	bandwidth = mode->hdisplay * mode->vdisplay *
875 		drm_mode_vrefresh(mode);
876 	if (bandwidth > priv->max_bandwidth) {
877 		DBG("Pruning mode: exceeds defined bandwidth limit");
878 		return MODE_BAD;
879 	}
880 
881 	return MODE_OK;
882 }
883 
884 void tilcdc_crtc_set_panel_info(struct drm_crtc *crtc,
885 		const struct tilcdc_panel_info *info)
886 {
887 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
888 	tilcdc_crtc->info = info;
889 }
890 
891 void tilcdc_crtc_set_simulate_vesa_sync(struct drm_crtc *crtc,
892 					bool simulate_vesa_sync)
893 {
894 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
895 
896 	tilcdc_crtc->simulate_vesa_sync = simulate_vesa_sync;
897 }
898 
899 void tilcdc_crtc_update_clk(struct drm_crtc *crtc)
900 {
901 	struct drm_device *dev = crtc->dev;
902 	struct tilcdc_drm_private *priv = dev->dev_private;
903 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
904 
905 	drm_modeset_lock(&crtc->mutex, NULL);
906 	if (tilcdc_crtc->lcd_fck_rate != clk_get_rate(priv->clk)) {
907 		if (tilcdc_crtc_is_on(crtc)) {
908 			pm_runtime_get_sync(dev->dev);
909 			tilcdc_crtc_disable(crtc);
910 
911 			tilcdc_crtc_set_clk(crtc);
912 
913 			tilcdc_crtc_enable(crtc);
914 			pm_runtime_put_sync(dev->dev);
915 		}
916 	}
917 	drm_modeset_unlock(&crtc->mutex);
918 }
919 
920 #define SYNC_LOST_COUNT_LIMIT 50
921 
922 irqreturn_t tilcdc_crtc_irq(struct drm_crtc *crtc)
923 {
924 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
925 	struct drm_device *dev = crtc->dev;
926 	struct tilcdc_drm_private *priv = dev->dev_private;
927 	uint32_t stat, reg;
928 
929 	stat = tilcdc_read_irqstatus(dev);
930 	tilcdc_clear_irqstatus(dev, stat);
931 
932 	if (stat & LCDC_END_OF_FRAME0) {
933 		unsigned long flags;
934 		bool skip_event = false;
935 		ktime_t now;
936 
937 		now = ktime_get();
938 
939 		drm_flip_work_commit(&tilcdc_crtc->unref_work, priv->wq);
940 
941 		spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
942 
943 		tilcdc_crtc->last_vblank = now;
944 
945 		if (tilcdc_crtc->next_fb) {
946 			set_scanout(crtc, tilcdc_crtc->next_fb);
947 			tilcdc_crtc->next_fb = NULL;
948 			skip_event = true;
949 		}
950 
951 		spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
952 
953 		drm_crtc_handle_vblank(crtc);
954 
955 		if (!skip_event) {
956 			struct drm_pending_vblank_event *event;
957 
958 			spin_lock_irqsave(&dev->event_lock, flags);
959 
960 			event = tilcdc_crtc->event;
961 			tilcdc_crtc->event = NULL;
962 			if (event)
963 				drm_crtc_send_vblank_event(crtc, event);
964 
965 			spin_unlock_irqrestore(&dev->event_lock, flags);
966 		}
967 
968 		if (tilcdc_crtc->frame_intact)
969 			tilcdc_crtc->sync_lost_count = 0;
970 		else
971 			tilcdc_crtc->frame_intact = true;
972 	}
973 
974 	if (stat & LCDC_FIFO_UNDERFLOW)
975 		dev_err_ratelimited(dev->dev, "%s(0x%08x): FIFO underflow",
976 				    __func__, stat);
977 
978 	if (stat & LCDC_PL_LOAD_DONE) {
979 		complete(&tilcdc_crtc->palette_loaded);
980 		if (priv->rev == 1)
981 			tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
982 				     LCDC_V1_PL_INT_ENA);
983 		else
984 			tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
985 				     LCDC_V2_PL_INT_ENA);
986 	}
987 
988 	if (stat & LCDC_SYNC_LOST) {
989 		dev_err_ratelimited(dev->dev, "%s(0x%08x): Sync lost",
990 				    __func__, stat);
991 		tilcdc_crtc->frame_intact = false;
992 		if (priv->rev == 1) {
993 			reg = tilcdc_read(dev, LCDC_RASTER_CTRL_REG);
994 			if (reg & LCDC_RASTER_ENABLE) {
995 				tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
996 					     LCDC_RASTER_ENABLE);
997 				tilcdc_set(dev, LCDC_RASTER_CTRL_REG,
998 					   LCDC_RASTER_ENABLE);
999 			}
1000 		} else {
1001 			if (tilcdc_crtc->sync_lost_count++ >
1002 			    SYNC_LOST_COUNT_LIMIT) {
1003 				dev_err(dev->dev,
1004 					"%s(0x%08x): Sync lost flood detected, recovering",
1005 					__func__, stat);
1006 				queue_work(system_wq,
1007 					   &tilcdc_crtc->recover_work);
1008 				tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
1009 					     LCDC_SYNC_LOST);
1010 				tilcdc_crtc->sync_lost_count = 0;
1011 			}
1012 		}
1013 	}
1014 
1015 	if (stat & LCDC_FRAME_DONE) {
1016 		tilcdc_crtc->frame_done = true;
1017 		wake_up(&tilcdc_crtc->frame_done_wq);
1018 		/* rev 1 lcdc appears to hang if irq is not disbaled here */
1019 		if (priv->rev == 1)
1020 			tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
1021 				     LCDC_V1_FRAME_DONE_INT_ENA);
1022 	}
1023 
1024 	/* For revision 2 only */
1025 	if (priv->rev == 2) {
1026 		/* Indicate to LCDC that the interrupt service routine has
1027 		 * completed, see 13.3.6.1.6 in AM335x TRM.
1028 		 */
1029 		tilcdc_write(dev, LCDC_END_OF_INT_IND_REG, 0);
1030 	}
1031 
1032 	return IRQ_HANDLED;
1033 }
1034 
1035 int tilcdc_crtc_create(struct drm_device *dev)
1036 {
1037 	struct tilcdc_drm_private *priv = dev->dev_private;
1038 	struct tilcdc_crtc *tilcdc_crtc;
1039 	struct drm_crtc *crtc;
1040 	int ret;
1041 
1042 	tilcdc_crtc = devm_kzalloc(dev->dev, sizeof(*tilcdc_crtc), GFP_KERNEL);
1043 	if (!tilcdc_crtc) {
1044 		dev_err(dev->dev, "allocation failed\n");
1045 		return -ENOMEM;
1046 	}
1047 
1048 	init_completion(&tilcdc_crtc->palette_loaded);
1049 	tilcdc_crtc->palette_base = dmam_alloc_coherent(dev->dev,
1050 					TILCDC_PALETTE_SIZE,
1051 					&tilcdc_crtc->palette_dma_handle,
1052 					GFP_KERNEL | __GFP_ZERO);
1053 	if (!tilcdc_crtc->palette_base)
1054 		return -ENOMEM;
1055 	*tilcdc_crtc->palette_base = TILCDC_PALETTE_FIRST_ENTRY;
1056 
1057 	crtc = &tilcdc_crtc->base;
1058 
1059 	ret = tilcdc_plane_init(dev, &tilcdc_crtc->primary);
1060 	if (ret < 0)
1061 		goto fail;
1062 
1063 	mutex_init(&tilcdc_crtc->enable_lock);
1064 
1065 	init_waitqueue_head(&tilcdc_crtc->frame_done_wq);
1066 
1067 	drm_flip_work_init(&tilcdc_crtc->unref_work,
1068 			"unref", unref_worker);
1069 
1070 	spin_lock_init(&tilcdc_crtc->irq_lock);
1071 	INIT_WORK(&tilcdc_crtc->recover_work, tilcdc_crtc_recover_work);
1072 
1073 	ret = drm_crtc_init_with_planes(dev, crtc,
1074 					&tilcdc_crtc->primary,
1075 					NULL,
1076 					&tilcdc_crtc_funcs,
1077 					"tilcdc crtc");
1078 	if (ret < 0)
1079 		goto fail;
1080 
1081 	drm_crtc_helper_add(crtc, &tilcdc_crtc_helper_funcs);
1082 
1083 	if (priv->is_componentized) {
1084 		crtc->port = of_graph_get_port_by_id(dev->dev->of_node, 0);
1085 		if (!crtc->port) { /* This should never happen */
1086 			dev_err(dev->dev, "Port node not found in %pOF\n",
1087 				dev->dev->of_node);
1088 			ret = -EINVAL;
1089 			goto fail;
1090 		}
1091 	}
1092 
1093 	priv->crtc = crtc;
1094 	return 0;
1095 
1096 fail:
1097 	tilcdc_crtc_destroy(crtc);
1098 	return ret;
1099 }
1100