1 /*
2  * Copyright (C) 2012 Texas Instruments
3  * Author: Rob Clark <robdclark@gmail.com>
4  *
5  * This program is free software; you can redistribute it and/or modify it
6  * under the terms of the GNU General Public License version 2 as published by
7  * the Free Software Foundation.
8  *
9  * This program is distributed in the hope that it will be useful, but WITHOUT
10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for
12  * more details.
13  *
14  * You should have received a copy of the GNU General Public License along with
15  * this program.  If not, see <http://www.gnu.org/licenses/>.
16  */
17 
18 #include <drm/drm_atomic.h>
19 #include <drm/drm_atomic_helper.h>
20 #include <drm/drm_crtc.h>
21 #include <drm/drm_flip_work.h>
22 #include <drm/drm_plane_helper.h>
23 #include <linux/workqueue.h>
24 #include <linux/completion.h>
25 #include <linux/dma-mapping.h>
26 #include <linux/of_graph.h>
27 
28 #include "tilcdc_drv.h"
29 #include "tilcdc_regs.h"
30 
31 #define TILCDC_VBLANK_SAFETY_THRESHOLD_US	1000
32 #define TILCDC_PALETTE_SIZE			32
33 #define TILCDC_PALETTE_FIRST_ENTRY		0x4000
34 
35 struct tilcdc_crtc {
36 	struct drm_crtc base;
37 
38 	struct drm_plane primary;
39 	const struct tilcdc_panel_info *info;
40 	struct drm_pending_vblank_event *event;
41 	struct mutex enable_lock;
42 	bool enabled;
43 	bool shutdown;
44 	wait_queue_head_t frame_done_wq;
45 	bool frame_done;
46 	spinlock_t irq_lock;
47 
48 	unsigned int lcd_fck_rate;
49 
50 	ktime_t last_vblank;
51 
52 	struct drm_framebuffer *curr_fb;
53 	struct drm_framebuffer *next_fb;
54 
55 	/* for deferred fb unref's: */
56 	struct drm_flip_work unref_work;
57 
58 	/* Only set if an external encoder is connected */
59 	bool simulate_vesa_sync;
60 
61 	int sync_lost_count;
62 	bool frame_intact;
63 	struct work_struct recover_work;
64 
65 	dma_addr_t palette_dma_handle;
66 	u16 *palette_base;
67 	struct completion palette_loaded;
68 };
69 #define to_tilcdc_crtc(x) container_of(x, struct tilcdc_crtc, base)
70 
71 static void unref_worker(struct drm_flip_work *work, void *val)
72 {
73 	struct tilcdc_crtc *tilcdc_crtc =
74 		container_of(work, struct tilcdc_crtc, unref_work);
75 	struct drm_device *dev = tilcdc_crtc->base.dev;
76 
77 	mutex_lock(&dev->mode_config.mutex);
78 	drm_framebuffer_unreference(val);
79 	mutex_unlock(&dev->mode_config.mutex);
80 }
81 
82 static void set_scanout(struct drm_crtc *crtc, struct drm_framebuffer *fb)
83 {
84 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
85 	struct drm_device *dev = crtc->dev;
86 	struct tilcdc_drm_private *priv = dev->dev_private;
87 	struct drm_gem_cma_object *gem;
88 	dma_addr_t start, end;
89 	u64 dma_base_and_ceiling;
90 
91 	gem = drm_fb_cma_get_gem_obj(fb, 0);
92 
93 	start = gem->paddr + fb->offsets[0] +
94 		crtc->y * fb->pitches[0] +
95 		crtc->x * fb->format->cpp[0];
96 
97 	end = start + (crtc->mode.vdisplay * fb->pitches[0]);
98 
99 	/* Write LCDC_DMA_FB_BASE_ADDR_0_REG and LCDC_DMA_FB_CEILING_ADDR_0_REG
100 	 * with a single insruction, if available. This should make it more
101 	 * unlikely that LCDC would fetch the DMA addresses in the middle of
102 	 * an update.
103 	 */
104 	if (priv->rev == 1)
105 		end -= 1;
106 
107 	dma_base_and_ceiling = (u64)end << 32 | start;
108 	tilcdc_write64(dev, LCDC_DMA_FB_BASE_ADDR_0_REG, dma_base_and_ceiling);
109 
110 	if (tilcdc_crtc->curr_fb)
111 		drm_flip_work_queue(&tilcdc_crtc->unref_work,
112 			tilcdc_crtc->curr_fb);
113 
114 	tilcdc_crtc->curr_fb = fb;
115 }
116 
117 /*
118  * The driver currently only supports only true color formats. For
119  * true color the palette block is bypassed, but a 32 byte palette
120  * should still be loaded. The first 16-bit entry must be 0x4000 while
121  * all other entries must be zeroed.
122  */
123 static void tilcdc_crtc_load_palette(struct drm_crtc *crtc)
124 {
125 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
126 	struct drm_device *dev = crtc->dev;
127 	struct tilcdc_drm_private *priv = dev->dev_private;
128 	int ret;
129 
130 	reinit_completion(&tilcdc_crtc->palette_loaded);
131 
132 	/* Tell the LCDC where the palette is located. */
133 	tilcdc_write(dev, LCDC_DMA_FB_BASE_ADDR_0_REG,
134 		     tilcdc_crtc->palette_dma_handle);
135 	tilcdc_write(dev, LCDC_DMA_FB_CEILING_ADDR_0_REG,
136 		     (u32) tilcdc_crtc->palette_dma_handle +
137 		     TILCDC_PALETTE_SIZE - 1);
138 
139 	/* Set dma load mode for palette loading only. */
140 	tilcdc_write_mask(dev, LCDC_RASTER_CTRL_REG,
141 			  LCDC_PALETTE_LOAD_MODE(PALETTE_ONLY),
142 			  LCDC_PALETTE_LOAD_MODE_MASK);
143 
144 	/* Enable DMA Palette Loaded Interrupt */
145 	if (priv->rev == 1)
146 		tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_V1_PL_INT_ENA);
147 	else
148 		tilcdc_write(dev, LCDC_INT_ENABLE_SET_REG, LCDC_V2_PL_INT_ENA);
149 
150 	/* Enable LCDC DMA and wait for palette to be loaded. */
151 	tilcdc_clear_irqstatus(dev, 0xffffffff);
152 	tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
153 
154 	ret = wait_for_completion_timeout(&tilcdc_crtc->palette_loaded,
155 					  msecs_to_jiffies(50));
156 	if (ret == 0)
157 		dev_err(dev->dev, "%s: Palette loading timeout", __func__);
158 
159 	/* Disable LCDC DMA and DMA Palette Loaded Interrupt. */
160 	tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
161 	if (priv->rev == 1)
162 		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_V1_PL_INT_ENA);
163 	else
164 		tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG, LCDC_V2_PL_INT_ENA);
165 }
166 
167 static void tilcdc_crtc_enable_irqs(struct drm_device *dev)
168 {
169 	struct tilcdc_drm_private *priv = dev->dev_private;
170 
171 	tilcdc_clear_irqstatus(dev, 0xffffffff);
172 
173 	if (priv->rev == 1) {
174 		tilcdc_set(dev, LCDC_RASTER_CTRL_REG,
175 			LCDC_V1_SYNC_LOST_INT_ENA | LCDC_V1_FRAME_DONE_INT_ENA |
176 			LCDC_V1_UNDERFLOW_INT_ENA);
177 		tilcdc_set(dev, LCDC_DMA_CTRL_REG,
178 			LCDC_V1_END_OF_FRAME_INT_ENA);
179 	} else {
180 		tilcdc_write(dev, LCDC_INT_ENABLE_SET_REG,
181 			LCDC_V2_UNDERFLOW_INT_ENA |
182 			LCDC_V2_END_OF_FRAME0_INT_ENA |
183 			LCDC_FRAME_DONE | LCDC_SYNC_LOST);
184 	}
185 }
186 
187 static void tilcdc_crtc_disable_irqs(struct drm_device *dev)
188 {
189 	struct tilcdc_drm_private *priv = dev->dev_private;
190 
191 	/* disable irqs that we might have enabled: */
192 	if (priv->rev == 1) {
193 		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
194 			LCDC_V1_SYNC_LOST_INT_ENA | LCDC_V1_FRAME_DONE_INT_ENA |
195 			LCDC_V1_UNDERFLOW_INT_ENA | LCDC_V1_PL_INT_ENA);
196 		tilcdc_clear(dev, LCDC_DMA_CTRL_REG,
197 			LCDC_V1_END_OF_FRAME_INT_ENA);
198 	} else {
199 		tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
200 			LCDC_V2_UNDERFLOW_INT_ENA | LCDC_V2_PL_INT_ENA |
201 			LCDC_V2_END_OF_FRAME0_INT_ENA |
202 			LCDC_FRAME_DONE | LCDC_SYNC_LOST);
203 	}
204 }
205 
206 static void reset(struct drm_crtc *crtc)
207 {
208 	struct drm_device *dev = crtc->dev;
209 	struct tilcdc_drm_private *priv = dev->dev_private;
210 
211 	if (priv->rev != 2)
212 		return;
213 
214 	tilcdc_set(dev, LCDC_CLK_RESET_REG, LCDC_CLK_MAIN_RESET);
215 	usleep_range(250, 1000);
216 	tilcdc_clear(dev, LCDC_CLK_RESET_REG, LCDC_CLK_MAIN_RESET);
217 }
218 
219 /*
220  * Calculate the percentage difference between the requested pixel clock rate
221  * and the effective rate resulting from calculating the clock divider value.
222  */
223 static unsigned int tilcdc_pclk_diff(unsigned long rate,
224 				     unsigned long real_rate)
225 {
226 	int r = rate / 100, rr = real_rate / 100;
227 
228 	return (unsigned int)(abs(((rr - r) * 100) / r));
229 }
230 
231 static void tilcdc_crtc_set_clk(struct drm_crtc *crtc)
232 {
233 	struct drm_device *dev = crtc->dev;
234 	struct tilcdc_drm_private *priv = dev->dev_private;
235 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
236 	unsigned long clk_rate, real_rate, req_rate;
237 	unsigned int clkdiv;
238 	int ret;
239 
240 	clkdiv = 2; /* first try using a standard divider of 2 */
241 
242 	/* mode.clock is in KHz, set_rate wants parameter in Hz */
243 	req_rate = crtc->mode.clock * 1000;
244 
245 	ret = clk_set_rate(priv->clk, req_rate * clkdiv);
246 	clk_rate = clk_get_rate(priv->clk);
247 	if (ret < 0) {
248 		/*
249 		 * If we fail to set the clock rate (some architectures don't
250 		 * use the common clock framework yet and may not implement
251 		 * all the clk API calls for every clock), try the next best
252 		 * thing: adjusting the clock divider, unless clk_get_rate()
253 		 * failed as well.
254 		 */
255 		if (!clk_rate) {
256 			/* Nothing more we can do. Just bail out. */
257 			dev_err(dev->dev,
258 				"failed to set the pixel clock - unable to read current lcdc clock rate\n");
259 			return;
260 		}
261 
262 		clkdiv = DIV_ROUND_CLOSEST(clk_rate, req_rate);
263 
264 		/*
265 		 * Emit a warning if the real clock rate resulting from the
266 		 * calculated divider differs much from the requested rate.
267 		 *
268 		 * 5% is an arbitrary value - LCDs are usually quite tolerant
269 		 * about pixel clock rates.
270 		 */
271 		real_rate = clkdiv * req_rate;
272 
273 		if (tilcdc_pclk_diff(clk_rate, real_rate) > 5) {
274 			dev_warn(dev->dev,
275 				 "effective pixel clock rate (%luHz) differs from the calculated rate (%luHz)\n",
276 				 clk_rate, real_rate);
277 		}
278 	}
279 
280 	tilcdc_crtc->lcd_fck_rate = clk_rate;
281 
282 	DBG("lcd_clk=%u, mode clock=%d, div=%u",
283 	    tilcdc_crtc->lcd_fck_rate, crtc->mode.clock, clkdiv);
284 
285 	/* Configure the LCD clock divisor. */
286 	tilcdc_write(dev, LCDC_CTRL_REG, LCDC_CLK_DIVISOR(clkdiv) |
287 		     LCDC_RASTER_MODE);
288 
289 	if (priv->rev == 2)
290 		tilcdc_set(dev, LCDC_CLK_ENABLE_REG,
291 				LCDC_V2_DMA_CLK_EN | LCDC_V2_LIDD_CLK_EN |
292 				LCDC_V2_CORE_CLK_EN);
293 }
294 
295 static void tilcdc_crtc_set_mode(struct drm_crtc *crtc)
296 {
297 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
298 	struct drm_device *dev = crtc->dev;
299 	struct tilcdc_drm_private *priv = dev->dev_private;
300 	const struct tilcdc_panel_info *info = tilcdc_crtc->info;
301 	uint32_t reg, hbp, hfp, hsw, vbp, vfp, vsw;
302 	struct drm_display_mode *mode = &crtc->state->adjusted_mode;
303 	struct drm_framebuffer *fb = crtc->primary->state->fb;
304 
305 	if (WARN_ON(!info))
306 		return;
307 
308 	if (WARN_ON(!fb))
309 		return;
310 
311 	/* Configure the Burst Size and fifo threshold of DMA: */
312 	reg = tilcdc_read(dev, LCDC_DMA_CTRL_REG) & ~0x00000770;
313 	switch (info->dma_burst_sz) {
314 	case 1:
315 		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_1);
316 		break;
317 	case 2:
318 		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_2);
319 		break;
320 	case 4:
321 		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_4);
322 		break;
323 	case 8:
324 		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_8);
325 		break;
326 	case 16:
327 		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_16);
328 		break;
329 	default:
330 		dev_err(dev->dev, "invalid burst size\n");
331 		return;
332 	}
333 	reg |= (info->fifo_th << 8);
334 	tilcdc_write(dev, LCDC_DMA_CTRL_REG, reg);
335 
336 	/* Configure timings: */
337 	hbp = mode->htotal - mode->hsync_end;
338 	hfp = mode->hsync_start - mode->hdisplay;
339 	hsw = mode->hsync_end - mode->hsync_start;
340 	vbp = mode->vtotal - mode->vsync_end;
341 	vfp = mode->vsync_start - mode->vdisplay;
342 	vsw = mode->vsync_end - mode->vsync_start;
343 
344 	DBG("%dx%d, hbp=%u, hfp=%u, hsw=%u, vbp=%u, vfp=%u, vsw=%u",
345 	    mode->hdisplay, mode->vdisplay, hbp, hfp, hsw, vbp, vfp, vsw);
346 
347 	/* Set AC Bias Period and Number of Transitions per Interrupt: */
348 	reg = tilcdc_read(dev, LCDC_RASTER_TIMING_2_REG) & ~0x000fff00;
349 	reg |= LCDC_AC_BIAS_FREQUENCY(info->ac_bias) |
350 		LCDC_AC_BIAS_TRANSITIONS_PER_INT(info->ac_bias_intrpt);
351 
352 	/*
353 	 * subtract one from hfp, hbp, hsw because the hardware uses
354 	 * a value of 0 as 1
355 	 */
356 	if (priv->rev == 2) {
357 		/* clear bits we're going to set */
358 		reg &= ~0x78000033;
359 		reg |= ((hfp-1) & 0x300) >> 8;
360 		reg |= ((hbp-1) & 0x300) >> 4;
361 		reg |= ((hsw-1) & 0x3c0) << 21;
362 	}
363 	tilcdc_write(dev, LCDC_RASTER_TIMING_2_REG, reg);
364 
365 	reg = (((mode->hdisplay >> 4) - 1) << 4) |
366 		(((hbp-1) & 0xff) << 24) |
367 		(((hfp-1) & 0xff) << 16) |
368 		(((hsw-1) & 0x3f) << 10);
369 	if (priv->rev == 2)
370 		reg |= (((mode->hdisplay >> 4) - 1) & 0x40) >> 3;
371 	tilcdc_write(dev, LCDC_RASTER_TIMING_0_REG, reg);
372 
373 	reg = ((mode->vdisplay - 1) & 0x3ff) |
374 		((vbp & 0xff) << 24) |
375 		((vfp & 0xff) << 16) |
376 		(((vsw-1) & 0x3f) << 10);
377 	tilcdc_write(dev, LCDC_RASTER_TIMING_1_REG, reg);
378 
379 	/*
380 	 * be sure to set Bit 10 for the V2 LCDC controller,
381 	 * otherwise limited to 1024 pixels width, stopping
382 	 * 1920x1080 being supported.
383 	 */
384 	if (priv->rev == 2) {
385 		if ((mode->vdisplay - 1) & 0x400) {
386 			tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG,
387 				LCDC_LPP_B10);
388 		} else {
389 			tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG,
390 				LCDC_LPP_B10);
391 		}
392 	}
393 
394 	/* Configure display type: */
395 	reg = tilcdc_read(dev, LCDC_RASTER_CTRL_REG) &
396 		~(LCDC_TFT_MODE | LCDC_MONO_8BIT_MODE | LCDC_MONOCHROME_MODE |
397 		  LCDC_V2_TFT_24BPP_MODE | LCDC_V2_TFT_24BPP_UNPACK |
398 		  0x000ff000 /* Palette Loading Delay bits */);
399 	reg |= LCDC_TFT_MODE; /* no monochrome/passive support */
400 	if (info->tft_alt_mode)
401 		reg |= LCDC_TFT_ALT_ENABLE;
402 	if (priv->rev == 2) {
403 		switch (fb->format->format) {
404 		case DRM_FORMAT_BGR565:
405 		case DRM_FORMAT_RGB565:
406 			break;
407 		case DRM_FORMAT_XBGR8888:
408 		case DRM_FORMAT_XRGB8888:
409 			reg |= LCDC_V2_TFT_24BPP_UNPACK;
410 			/* fallthrough */
411 		case DRM_FORMAT_BGR888:
412 		case DRM_FORMAT_RGB888:
413 			reg |= LCDC_V2_TFT_24BPP_MODE;
414 			break;
415 		default:
416 			dev_err(dev->dev, "invalid pixel format\n");
417 			return;
418 		}
419 	}
420 	reg |= info->fdd < 12;
421 	tilcdc_write(dev, LCDC_RASTER_CTRL_REG, reg);
422 
423 	if (info->invert_pxl_clk)
424 		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_PIXEL_CLOCK);
425 	else
426 		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_PIXEL_CLOCK);
427 
428 	if (info->sync_ctrl)
429 		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_CTRL);
430 	else
431 		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_CTRL);
432 
433 	if (info->sync_edge)
434 		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_EDGE);
435 	else
436 		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_EDGE);
437 
438 	if (mode->flags & DRM_MODE_FLAG_NHSYNC)
439 		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_HSYNC);
440 	else
441 		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_HSYNC);
442 
443 	if (mode->flags & DRM_MODE_FLAG_NVSYNC)
444 		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_VSYNC);
445 	else
446 		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_VSYNC);
447 
448 	if (info->raster_order)
449 		tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ORDER);
450 	else
451 		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ORDER);
452 
453 	tilcdc_crtc_set_clk(crtc);
454 
455 	tilcdc_crtc_load_palette(crtc);
456 
457 	set_scanout(crtc, fb);
458 
459 	drm_framebuffer_reference(fb);
460 
461 	crtc->hwmode = crtc->state->adjusted_mode;
462 }
463 
464 static void tilcdc_crtc_enable(struct drm_crtc *crtc)
465 {
466 	struct drm_device *dev = crtc->dev;
467 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
468 	unsigned long flags;
469 
470 	WARN_ON(!drm_modeset_is_locked(&crtc->mutex));
471 	mutex_lock(&tilcdc_crtc->enable_lock);
472 	if (tilcdc_crtc->enabled || tilcdc_crtc->shutdown) {
473 		mutex_unlock(&tilcdc_crtc->enable_lock);
474 		return;
475 	}
476 
477 	pm_runtime_get_sync(dev->dev);
478 
479 	reset(crtc);
480 
481 	tilcdc_crtc_set_mode(crtc);
482 
483 	tilcdc_crtc_enable_irqs(dev);
484 
485 	tilcdc_clear(dev, LCDC_DMA_CTRL_REG, LCDC_DUAL_FRAME_BUFFER_ENABLE);
486 	tilcdc_write_mask(dev, LCDC_RASTER_CTRL_REG,
487 			  LCDC_PALETTE_LOAD_MODE(DATA_ONLY),
488 			  LCDC_PALETTE_LOAD_MODE_MASK);
489 
490 	/* There is no real chance for a race here as the time stamp
491 	 * is taken before the raster DMA is started. The spin-lock is
492 	 * taken to have a memory barrier after taking the time-stamp
493 	 * and to avoid a context switch between taking the stamp and
494 	 * enabling the raster.
495 	 */
496 	spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
497 	tilcdc_crtc->last_vblank = ktime_get();
498 	tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
499 	spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
500 
501 	drm_crtc_vblank_on(crtc);
502 
503 	tilcdc_crtc->enabled = true;
504 	mutex_unlock(&tilcdc_crtc->enable_lock);
505 }
506 
507 static void tilcdc_crtc_atomic_enable(struct drm_crtc *crtc,
508 				      struct drm_crtc_state *old_state)
509 {
510 	tilcdc_crtc_enable(crtc);
511 }
512 
513 static void tilcdc_crtc_off(struct drm_crtc *crtc, bool shutdown)
514 {
515 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
516 	struct drm_device *dev = crtc->dev;
517 	struct tilcdc_drm_private *priv = dev->dev_private;
518 	int ret;
519 
520 	mutex_lock(&tilcdc_crtc->enable_lock);
521 	if (shutdown)
522 		tilcdc_crtc->shutdown = true;
523 	if (!tilcdc_crtc->enabled) {
524 		mutex_unlock(&tilcdc_crtc->enable_lock);
525 		return;
526 	}
527 	tilcdc_crtc->frame_done = false;
528 	tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
529 
530 	/*
531 	 * Wait for framedone irq which will still come before putting
532 	 * things to sleep..
533 	 */
534 	ret = wait_event_timeout(tilcdc_crtc->frame_done_wq,
535 				 tilcdc_crtc->frame_done,
536 				 msecs_to_jiffies(500));
537 	if (ret == 0)
538 		dev_err(dev->dev, "%s: timeout waiting for framedone\n",
539 			__func__);
540 
541 	drm_crtc_vblank_off(crtc);
542 
543 	tilcdc_crtc_disable_irqs(dev);
544 
545 	pm_runtime_put_sync(dev->dev);
546 
547 	if (tilcdc_crtc->next_fb) {
548 		drm_flip_work_queue(&tilcdc_crtc->unref_work,
549 				    tilcdc_crtc->next_fb);
550 		tilcdc_crtc->next_fb = NULL;
551 	}
552 
553 	if (tilcdc_crtc->curr_fb) {
554 		drm_flip_work_queue(&tilcdc_crtc->unref_work,
555 				    tilcdc_crtc->curr_fb);
556 		tilcdc_crtc->curr_fb = NULL;
557 	}
558 
559 	drm_flip_work_commit(&tilcdc_crtc->unref_work, priv->wq);
560 
561 	tilcdc_crtc->enabled = false;
562 	mutex_unlock(&tilcdc_crtc->enable_lock);
563 }
564 
565 static void tilcdc_crtc_disable(struct drm_crtc *crtc)
566 {
567 	WARN_ON(!drm_modeset_is_locked(&crtc->mutex));
568 	tilcdc_crtc_off(crtc, false);
569 }
570 
571 static void tilcdc_crtc_atomic_disable(struct drm_crtc *crtc,
572 				       struct drm_crtc_state *old_state)
573 {
574 	tilcdc_crtc_disable(crtc);
575 }
576 
577 void tilcdc_crtc_shutdown(struct drm_crtc *crtc)
578 {
579 	tilcdc_crtc_off(crtc, true);
580 }
581 
582 static bool tilcdc_crtc_is_on(struct drm_crtc *crtc)
583 {
584 	return crtc->state && crtc->state->enable && crtc->state->active;
585 }
586 
587 static void tilcdc_crtc_recover_work(struct work_struct *work)
588 {
589 	struct tilcdc_crtc *tilcdc_crtc =
590 		container_of(work, struct tilcdc_crtc, recover_work);
591 	struct drm_crtc *crtc = &tilcdc_crtc->base;
592 
593 	dev_info(crtc->dev->dev, "%s: Reset CRTC", __func__);
594 
595 	drm_modeset_lock(&crtc->mutex, NULL);
596 
597 	if (!tilcdc_crtc_is_on(crtc))
598 		goto out;
599 
600 	tilcdc_crtc_disable(crtc);
601 	tilcdc_crtc_enable(crtc);
602 out:
603 	drm_modeset_unlock(&crtc->mutex);
604 }
605 
606 static void tilcdc_crtc_destroy(struct drm_crtc *crtc)
607 {
608 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
609 	struct tilcdc_drm_private *priv = crtc->dev->dev_private;
610 
611 	drm_modeset_lock(&crtc->mutex, NULL);
612 	tilcdc_crtc_disable(crtc);
613 	drm_modeset_unlock(&crtc->mutex);
614 
615 	flush_workqueue(priv->wq);
616 
617 	of_node_put(crtc->port);
618 	drm_crtc_cleanup(crtc);
619 	drm_flip_work_cleanup(&tilcdc_crtc->unref_work);
620 }
621 
622 int tilcdc_crtc_update_fb(struct drm_crtc *crtc,
623 		struct drm_framebuffer *fb,
624 		struct drm_pending_vblank_event *event)
625 {
626 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
627 	struct drm_device *dev = crtc->dev;
628 
629 	WARN_ON(!drm_modeset_is_locked(&crtc->mutex));
630 
631 	if (tilcdc_crtc->event) {
632 		dev_err(dev->dev, "already pending page flip!\n");
633 		return -EBUSY;
634 	}
635 
636 	drm_framebuffer_reference(fb);
637 
638 	crtc->primary->fb = fb;
639 	tilcdc_crtc->event = event;
640 
641 	mutex_lock(&tilcdc_crtc->enable_lock);
642 
643 	if (tilcdc_crtc->enabled) {
644 		unsigned long flags;
645 		ktime_t next_vblank;
646 		s64 tdiff;
647 
648 		spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
649 
650 		next_vblank = ktime_add_us(tilcdc_crtc->last_vblank,
651 					   1000000 / crtc->hwmode.vrefresh);
652 		tdiff = ktime_to_us(ktime_sub(next_vblank, ktime_get()));
653 
654 		if (tdiff < TILCDC_VBLANK_SAFETY_THRESHOLD_US)
655 			tilcdc_crtc->next_fb = fb;
656 		else
657 			set_scanout(crtc, fb);
658 
659 		spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
660 	}
661 
662 	mutex_unlock(&tilcdc_crtc->enable_lock);
663 
664 	return 0;
665 }
666 
667 static bool tilcdc_crtc_mode_fixup(struct drm_crtc *crtc,
668 		const struct drm_display_mode *mode,
669 		struct drm_display_mode *adjusted_mode)
670 {
671 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
672 
673 	if (!tilcdc_crtc->simulate_vesa_sync)
674 		return true;
675 
676 	/*
677 	 * tilcdc does not generate VESA-compliant sync but aligns
678 	 * VS on the second edge of HS instead of first edge.
679 	 * We use adjusted_mode, to fixup sync by aligning both rising
680 	 * edges and add HSKEW offset to fix the sync.
681 	 */
682 	adjusted_mode->hskew = mode->hsync_end - mode->hsync_start;
683 	adjusted_mode->flags |= DRM_MODE_FLAG_HSKEW;
684 
685 	if (mode->flags & DRM_MODE_FLAG_NHSYNC) {
686 		adjusted_mode->flags |= DRM_MODE_FLAG_PHSYNC;
687 		adjusted_mode->flags &= ~DRM_MODE_FLAG_NHSYNC;
688 	} else {
689 		adjusted_mode->flags |= DRM_MODE_FLAG_NHSYNC;
690 		adjusted_mode->flags &= ~DRM_MODE_FLAG_PHSYNC;
691 	}
692 
693 	return true;
694 }
695 
696 static int tilcdc_crtc_atomic_check(struct drm_crtc *crtc,
697 				    struct drm_crtc_state *state)
698 {
699 	struct drm_display_mode *mode = &state->mode;
700 	int ret;
701 
702 	/* If we are not active we don't care */
703 	if (!state->active)
704 		return 0;
705 
706 	if (state->state->planes[0].ptr != crtc->primary ||
707 	    state->state->planes[0].state == NULL ||
708 	    state->state->planes[0].state->crtc != crtc) {
709 		dev_dbg(crtc->dev->dev, "CRTC primary plane must be present");
710 		return -EINVAL;
711 	}
712 
713 	ret = tilcdc_crtc_mode_valid(crtc, mode);
714 	if (ret) {
715 		dev_dbg(crtc->dev->dev, "Mode \"%s\" not valid", mode->name);
716 		return -EINVAL;
717 	}
718 
719 	return 0;
720 }
721 
722 static int tilcdc_crtc_enable_vblank(struct drm_crtc *crtc)
723 {
724 	return 0;
725 }
726 
727 static void tilcdc_crtc_disable_vblank(struct drm_crtc *crtc)
728 {
729 }
730 
731 static const struct drm_crtc_funcs tilcdc_crtc_funcs = {
732 	.destroy        = tilcdc_crtc_destroy,
733 	.set_config     = drm_atomic_helper_set_config,
734 	.page_flip      = drm_atomic_helper_page_flip,
735 	.reset		= drm_atomic_helper_crtc_reset,
736 	.atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
737 	.atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
738 	.enable_vblank	= tilcdc_crtc_enable_vblank,
739 	.disable_vblank	= tilcdc_crtc_disable_vblank,
740 };
741 
742 static const struct drm_crtc_helper_funcs tilcdc_crtc_helper_funcs = {
743 		.mode_fixup     = tilcdc_crtc_mode_fixup,
744 		.atomic_check	= tilcdc_crtc_atomic_check,
745 		.atomic_enable	= tilcdc_crtc_atomic_enable,
746 		.atomic_disable	= tilcdc_crtc_atomic_disable,
747 };
748 
749 int tilcdc_crtc_max_width(struct drm_crtc *crtc)
750 {
751 	struct drm_device *dev = crtc->dev;
752 	struct tilcdc_drm_private *priv = dev->dev_private;
753 	int max_width = 0;
754 
755 	if (priv->rev == 1)
756 		max_width = 1024;
757 	else if (priv->rev == 2)
758 		max_width = 2048;
759 
760 	return max_width;
761 }
762 
763 int tilcdc_crtc_mode_valid(struct drm_crtc *crtc, struct drm_display_mode *mode)
764 {
765 	struct tilcdc_drm_private *priv = crtc->dev->dev_private;
766 	unsigned int bandwidth;
767 	uint32_t hbp, hfp, hsw, vbp, vfp, vsw;
768 
769 	/*
770 	 * check to see if the width is within the range that
771 	 * the LCD Controller physically supports
772 	 */
773 	if (mode->hdisplay > tilcdc_crtc_max_width(crtc))
774 		return MODE_VIRTUAL_X;
775 
776 	/* width must be multiple of 16 */
777 	if (mode->hdisplay & 0xf)
778 		return MODE_VIRTUAL_X;
779 
780 	if (mode->vdisplay > 2048)
781 		return MODE_VIRTUAL_Y;
782 
783 	DBG("Processing mode %dx%d@%d with pixel clock %d",
784 		mode->hdisplay, mode->vdisplay,
785 		drm_mode_vrefresh(mode), mode->clock);
786 
787 	hbp = mode->htotal - mode->hsync_end;
788 	hfp = mode->hsync_start - mode->hdisplay;
789 	hsw = mode->hsync_end - mode->hsync_start;
790 	vbp = mode->vtotal - mode->vsync_end;
791 	vfp = mode->vsync_start - mode->vdisplay;
792 	vsw = mode->vsync_end - mode->vsync_start;
793 
794 	if ((hbp-1) & ~0x3ff) {
795 		DBG("Pruning mode: Horizontal Back Porch out of range");
796 		return MODE_HBLANK_WIDE;
797 	}
798 
799 	if ((hfp-1) & ~0x3ff) {
800 		DBG("Pruning mode: Horizontal Front Porch out of range");
801 		return MODE_HBLANK_WIDE;
802 	}
803 
804 	if ((hsw-1) & ~0x3ff) {
805 		DBG("Pruning mode: Horizontal Sync Width out of range");
806 		return MODE_HSYNC_WIDE;
807 	}
808 
809 	if (vbp & ~0xff) {
810 		DBG("Pruning mode: Vertical Back Porch out of range");
811 		return MODE_VBLANK_WIDE;
812 	}
813 
814 	if (vfp & ~0xff) {
815 		DBG("Pruning mode: Vertical Front Porch out of range");
816 		return MODE_VBLANK_WIDE;
817 	}
818 
819 	if ((vsw-1) & ~0x3f) {
820 		DBG("Pruning mode: Vertical Sync Width out of range");
821 		return MODE_VSYNC_WIDE;
822 	}
823 
824 	/*
825 	 * some devices have a maximum allowed pixel clock
826 	 * configured from the DT
827 	 */
828 	if (mode->clock > priv->max_pixelclock) {
829 		DBG("Pruning mode: pixel clock too high");
830 		return MODE_CLOCK_HIGH;
831 	}
832 
833 	/*
834 	 * some devices further limit the max horizontal resolution
835 	 * configured from the DT
836 	 */
837 	if (mode->hdisplay > priv->max_width)
838 		return MODE_BAD_WIDTH;
839 
840 	/* filter out modes that would require too much memory bandwidth: */
841 	bandwidth = mode->hdisplay * mode->vdisplay *
842 		drm_mode_vrefresh(mode);
843 	if (bandwidth > priv->max_bandwidth) {
844 		DBG("Pruning mode: exceeds defined bandwidth limit");
845 		return MODE_BAD;
846 	}
847 
848 	return MODE_OK;
849 }
850 
851 void tilcdc_crtc_set_panel_info(struct drm_crtc *crtc,
852 		const struct tilcdc_panel_info *info)
853 {
854 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
855 	tilcdc_crtc->info = info;
856 }
857 
858 void tilcdc_crtc_set_simulate_vesa_sync(struct drm_crtc *crtc,
859 					bool simulate_vesa_sync)
860 {
861 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
862 
863 	tilcdc_crtc->simulate_vesa_sync = simulate_vesa_sync;
864 }
865 
866 void tilcdc_crtc_update_clk(struct drm_crtc *crtc)
867 {
868 	struct drm_device *dev = crtc->dev;
869 	struct tilcdc_drm_private *priv = dev->dev_private;
870 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
871 
872 	drm_modeset_lock(&crtc->mutex, NULL);
873 	if (tilcdc_crtc->lcd_fck_rate != clk_get_rate(priv->clk)) {
874 		if (tilcdc_crtc_is_on(crtc)) {
875 			pm_runtime_get_sync(dev->dev);
876 			tilcdc_crtc_disable(crtc);
877 
878 			tilcdc_crtc_set_clk(crtc);
879 
880 			tilcdc_crtc_enable(crtc);
881 			pm_runtime_put_sync(dev->dev);
882 		}
883 	}
884 	drm_modeset_unlock(&crtc->mutex);
885 }
886 
887 #define SYNC_LOST_COUNT_LIMIT 50
888 
889 irqreturn_t tilcdc_crtc_irq(struct drm_crtc *crtc)
890 {
891 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
892 	struct drm_device *dev = crtc->dev;
893 	struct tilcdc_drm_private *priv = dev->dev_private;
894 	uint32_t stat, reg;
895 
896 	stat = tilcdc_read_irqstatus(dev);
897 	tilcdc_clear_irqstatus(dev, stat);
898 
899 	if (stat & LCDC_END_OF_FRAME0) {
900 		unsigned long flags;
901 		bool skip_event = false;
902 		ktime_t now;
903 
904 		now = ktime_get();
905 
906 		drm_flip_work_commit(&tilcdc_crtc->unref_work, priv->wq);
907 
908 		spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
909 
910 		tilcdc_crtc->last_vblank = now;
911 
912 		if (tilcdc_crtc->next_fb) {
913 			set_scanout(crtc, tilcdc_crtc->next_fb);
914 			tilcdc_crtc->next_fb = NULL;
915 			skip_event = true;
916 		}
917 
918 		spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
919 
920 		drm_crtc_handle_vblank(crtc);
921 
922 		if (!skip_event) {
923 			struct drm_pending_vblank_event *event;
924 
925 			spin_lock_irqsave(&dev->event_lock, flags);
926 
927 			event = tilcdc_crtc->event;
928 			tilcdc_crtc->event = NULL;
929 			if (event)
930 				drm_crtc_send_vblank_event(crtc, event);
931 
932 			spin_unlock_irqrestore(&dev->event_lock, flags);
933 		}
934 
935 		if (tilcdc_crtc->frame_intact)
936 			tilcdc_crtc->sync_lost_count = 0;
937 		else
938 			tilcdc_crtc->frame_intact = true;
939 	}
940 
941 	if (stat & LCDC_FIFO_UNDERFLOW)
942 		dev_err_ratelimited(dev->dev, "%s(0x%08x): FIFO underflow",
943 				    __func__, stat);
944 
945 	if (stat & LCDC_PL_LOAD_DONE) {
946 		complete(&tilcdc_crtc->palette_loaded);
947 		if (priv->rev == 1)
948 			tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
949 				     LCDC_V1_PL_INT_ENA);
950 		else
951 			tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
952 				     LCDC_V2_PL_INT_ENA);
953 	}
954 
955 	if (stat & LCDC_SYNC_LOST) {
956 		dev_err_ratelimited(dev->dev, "%s(0x%08x): Sync lost",
957 				    __func__, stat);
958 		tilcdc_crtc->frame_intact = false;
959 		if (priv->rev == 1) {
960 			reg = tilcdc_read(dev, LCDC_RASTER_CTRL_REG);
961 			if (reg & LCDC_RASTER_ENABLE) {
962 				tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
963 					     LCDC_RASTER_ENABLE);
964 				tilcdc_set(dev, LCDC_RASTER_CTRL_REG,
965 					   LCDC_RASTER_ENABLE);
966 			}
967 		} else {
968 			if (tilcdc_crtc->sync_lost_count++ >
969 			    SYNC_LOST_COUNT_LIMIT) {
970 				dev_err(dev->dev,
971 					"%s(0x%08x): Sync lost flood detected, recovering",
972 					__func__, stat);
973 				queue_work(system_wq,
974 					   &tilcdc_crtc->recover_work);
975 				tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
976 					     LCDC_SYNC_LOST);
977 				tilcdc_crtc->sync_lost_count = 0;
978 			}
979 		}
980 	}
981 
982 	if (stat & LCDC_FRAME_DONE) {
983 		tilcdc_crtc->frame_done = true;
984 		wake_up(&tilcdc_crtc->frame_done_wq);
985 		/* rev 1 lcdc appears to hang if irq is not disbaled here */
986 		if (priv->rev == 1)
987 			tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
988 				     LCDC_V1_FRAME_DONE_INT_ENA);
989 	}
990 
991 	/* For revision 2 only */
992 	if (priv->rev == 2) {
993 		/* Indicate to LCDC that the interrupt service routine has
994 		 * completed, see 13.3.6.1.6 in AM335x TRM.
995 		 */
996 		tilcdc_write(dev, LCDC_END_OF_INT_IND_REG, 0);
997 	}
998 
999 	return IRQ_HANDLED;
1000 }
1001 
1002 int tilcdc_crtc_create(struct drm_device *dev)
1003 {
1004 	struct tilcdc_drm_private *priv = dev->dev_private;
1005 	struct tilcdc_crtc *tilcdc_crtc;
1006 	struct drm_crtc *crtc;
1007 	int ret;
1008 
1009 	tilcdc_crtc = devm_kzalloc(dev->dev, sizeof(*tilcdc_crtc), GFP_KERNEL);
1010 	if (!tilcdc_crtc) {
1011 		dev_err(dev->dev, "allocation failed\n");
1012 		return -ENOMEM;
1013 	}
1014 
1015 	init_completion(&tilcdc_crtc->palette_loaded);
1016 	tilcdc_crtc->palette_base = dmam_alloc_coherent(dev->dev,
1017 					TILCDC_PALETTE_SIZE,
1018 					&tilcdc_crtc->palette_dma_handle,
1019 					GFP_KERNEL | __GFP_ZERO);
1020 	if (!tilcdc_crtc->palette_base)
1021 		return -ENOMEM;
1022 	*tilcdc_crtc->palette_base = TILCDC_PALETTE_FIRST_ENTRY;
1023 
1024 	crtc = &tilcdc_crtc->base;
1025 
1026 	ret = tilcdc_plane_init(dev, &tilcdc_crtc->primary);
1027 	if (ret < 0)
1028 		goto fail;
1029 
1030 	mutex_init(&tilcdc_crtc->enable_lock);
1031 
1032 	init_waitqueue_head(&tilcdc_crtc->frame_done_wq);
1033 
1034 	drm_flip_work_init(&tilcdc_crtc->unref_work,
1035 			"unref", unref_worker);
1036 
1037 	spin_lock_init(&tilcdc_crtc->irq_lock);
1038 	INIT_WORK(&tilcdc_crtc->recover_work, tilcdc_crtc_recover_work);
1039 
1040 	ret = drm_crtc_init_with_planes(dev, crtc,
1041 					&tilcdc_crtc->primary,
1042 					NULL,
1043 					&tilcdc_crtc_funcs,
1044 					"tilcdc crtc");
1045 	if (ret < 0)
1046 		goto fail;
1047 
1048 	drm_crtc_helper_add(crtc, &tilcdc_crtc_helper_funcs);
1049 
1050 	if (priv->is_componentized) {
1051 		crtc->port = of_graph_get_port_by_id(dev->dev->of_node, 0);
1052 		if (!crtc->port) { /* This should never happen */
1053 			dev_err(dev->dev, "Port node not found in %pOF\n",
1054 				dev->dev->of_node);
1055 			ret = -EINVAL;
1056 			goto fail;
1057 		}
1058 	}
1059 
1060 	priv->crtc = crtc;
1061 	return 0;
1062 
1063 fail:
1064 	tilcdc_crtc_destroy(crtc);
1065 	return ret;
1066 }
1067