1 /*
2  * Copyright (C) 2012 Texas Instruments
3  * Author: Rob Clark <robdclark@gmail.com>
4  *
5  * This program is free software; you can redistribute it and/or modify it
6  * under the terms of the GNU General Public License version 2 as published by
7  * the Free Software Foundation.
8  *
9  * This program is distributed in the hope that it will be useful, but WITHOUT
10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for
12  * more details.
13  *
14  * You should have received a copy of the GNU General Public License along with
15  * this program.  If not, see <http://www.gnu.org/licenses/>.
16  */
17 
18 #include <drm/drm_atomic.h>
19 #include <drm/drm_atomic_helper.h>
20 #include <drm/drm_crtc.h>
21 #include <drm/drm_flip_work.h>
22 #include <drm/drm_plane_helper.h>
23 #include <linux/workqueue.h>
24 #include <linux/completion.h>
25 #include <linux/dma-mapping.h>
26 
27 #include "tilcdc_drv.h"
28 #include "tilcdc_regs.h"
29 
30 #define TILCDC_VBLANK_SAFETY_THRESHOLD_US	1000
31 #define TILCDC_PALETTE_SIZE			32
32 #define TILCDC_PALETTE_FIRST_ENTRY		0x4000
33 
34 struct tilcdc_crtc {
35 	struct drm_crtc base;
36 
37 	struct drm_plane primary;
38 	const struct tilcdc_panel_info *info;
39 	struct drm_pending_vblank_event *event;
40 	struct mutex enable_lock;
41 	bool enabled;
42 	bool shutdown;
43 	wait_queue_head_t frame_done_wq;
44 	bool frame_done;
45 	spinlock_t irq_lock;
46 
47 	unsigned int lcd_fck_rate;
48 
49 	ktime_t last_vblank;
50 
51 	struct drm_framebuffer *curr_fb;
52 	struct drm_framebuffer *next_fb;
53 
54 	/* for deferred fb unref's: */
55 	struct drm_flip_work unref_work;
56 
57 	/* Only set if an external encoder is connected */
58 	bool simulate_vesa_sync;
59 
60 	int sync_lost_count;
61 	bool frame_intact;
62 	struct work_struct recover_work;
63 
64 	dma_addr_t palette_dma_handle;
65 	u16 *palette_base;
66 	struct completion palette_loaded;
67 };
68 #define to_tilcdc_crtc(x) container_of(x, struct tilcdc_crtc, base)
69 
70 static void unref_worker(struct drm_flip_work *work, void *val)
71 {
72 	struct tilcdc_crtc *tilcdc_crtc =
73 		container_of(work, struct tilcdc_crtc, unref_work);
74 	struct drm_device *dev = tilcdc_crtc->base.dev;
75 
76 	mutex_lock(&dev->mode_config.mutex);
77 	drm_framebuffer_unreference(val);
78 	mutex_unlock(&dev->mode_config.mutex);
79 }
80 
81 static void set_scanout(struct drm_crtc *crtc, struct drm_framebuffer *fb)
82 {
83 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
84 	struct drm_device *dev = crtc->dev;
85 	struct tilcdc_drm_private *priv = dev->dev_private;
86 	struct drm_gem_cma_object *gem;
87 	dma_addr_t start, end;
88 	u64 dma_base_and_ceiling;
89 
90 	gem = drm_fb_cma_get_gem_obj(fb, 0);
91 
92 	start = gem->paddr + fb->offsets[0] +
93 		crtc->y * fb->pitches[0] +
94 		crtc->x * drm_format_plane_cpp(fb->pixel_format, 0);
95 
96 	end = start + (crtc->mode.vdisplay * fb->pitches[0]);
97 
98 	/* Write LCDC_DMA_FB_BASE_ADDR_0_REG and LCDC_DMA_FB_CEILING_ADDR_0_REG
99 	 * with a single insruction, if available. This should make it more
100 	 * unlikely that LCDC would fetch the DMA addresses in the middle of
101 	 * an update.
102 	 */
103 	if (priv->rev == 1)
104 		end -= 1;
105 
106 	dma_base_and_ceiling = (u64)end << 32 | start;
107 	tilcdc_write64(dev, LCDC_DMA_FB_BASE_ADDR_0_REG, dma_base_and_ceiling);
108 
109 	if (tilcdc_crtc->curr_fb)
110 		drm_flip_work_queue(&tilcdc_crtc->unref_work,
111 			tilcdc_crtc->curr_fb);
112 
113 	tilcdc_crtc->curr_fb = fb;
114 }
115 
116 /*
117  * The driver currently only supports only true color formats. For
118  * true color the palette block is bypassed, but a 32 byte palette
119  * should still be loaded. The first 16-bit entry must be 0x4000 while
120  * all other entries must be zeroed.
121  */
122 static void tilcdc_crtc_load_palette(struct drm_crtc *crtc)
123 {
124 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
125 	struct drm_device *dev = crtc->dev;
126 	struct tilcdc_drm_private *priv = dev->dev_private;
127 	int ret;
128 
129 	reinit_completion(&tilcdc_crtc->palette_loaded);
130 
131 	/* Tell the LCDC where the palette is located. */
132 	tilcdc_write(dev, LCDC_DMA_FB_BASE_ADDR_0_REG,
133 		     tilcdc_crtc->palette_dma_handle);
134 	tilcdc_write(dev, LCDC_DMA_FB_CEILING_ADDR_0_REG,
135 		     (u32) tilcdc_crtc->palette_dma_handle +
136 		     TILCDC_PALETTE_SIZE - 1);
137 
138 	/* Set dma load mode for palette loading only. */
139 	tilcdc_write_mask(dev, LCDC_RASTER_CTRL_REG,
140 			  LCDC_PALETTE_LOAD_MODE(PALETTE_ONLY),
141 			  LCDC_PALETTE_LOAD_MODE_MASK);
142 
143 	/* Enable DMA Palette Loaded Interrupt */
144 	if (priv->rev == 1)
145 		tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_V1_PL_INT_ENA);
146 	else
147 		tilcdc_write(dev, LCDC_INT_ENABLE_SET_REG, LCDC_V2_PL_INT_ENA);
148 
149 	/* Enable LCDC DMA and wait for palette to be loaded. */
150 	tilcdc_clear_irqstatus(dev, 0xffffffff);
151 	tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
152 
153 	ret = wait_for_completion_timeout(&tilcdc_crtc->palette_loaded,
154 					  msecs_to_jiffies(50));
155 	if (ret == 0)
156 		dev_err(dev->dev, "%s: Palette loading timeout", __func__);
157 
158 	/* Disable LCDC DMA and DMA Palette Loaded Interrupt. */
159 	tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
160 	if (priv->rev == 1)
161 		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_V1_PL_INT_ENA);
162 	else
163 		tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG, LCDC_V2_PL_INT_ENA);
164 }
165 
166 static void tilcdc_crtc_enable_irqs(struct drm_device *dev)
167 {
168 	struct tilcdc_drm_private *priv = dev->dev_private;
169 
170 	tilcdc_clear_irqstatus(dev, 0xffffffff);
171 
172 	if (priv->rev == 1) {
173 		tilcdc_set(dev, LCDC_RASTER_CTRL_REG,
174 			LCDC_V1_SYNC_LOST_INT_ENA | LCDC_V1_FRAME_DONE_INT_ENA |
175 			LCDC_V1_UNDERFLOW_INT_ENA);
176 		tilcdc_set(dev, LCDC_DMA_CTRL_REG,
177 			LCDC_V1_END_OF_FRAME_INT_ENA);
178 	} else {
179 		tilcdc_write(dev, LCDC_INT_ENABLE_SET_REG,
180 			LCDC_V2_UNDERFLOW_INT_ENA |
181 			LCDC_V2_END_OF_FRAME0_INT_ENA |
182 			LCDC_FRAME_DONE | LCDC_SYNC_LOST);
183 	}
184 }
185 
186 static void tilcdc_crtc_disable_irqs(struct drm_device *dev)
187 {
188 	struct tilcdc_drm_private *priv = dev->dev_private;
189 
190 	/* disable irqs that we might have enabled: */
191 	if (priv->rev == 1) {
192 		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
193 			LCDC_V1_SYNC_LOST_INT_ENA | LCDC_V1_FRAME_DONE_INT_ENA |
194 			LCDC_V1_UNDERFLOW_INT_ENA | LCDC_V1_PL_INT_ENA);
195 		tilcdc_clear(dev, LCDC_DMA_CTRL_REG,
196 			LCDC_V1_END_OF_FRAME_INT_ENA);
197 	} else {
198 		tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
199 			LCDC_V2_UNDERFLOW_INT_ENA | LCDC_V2_PL_INT_ENA |
200 			LCDC_V2_END_OF_FRAME0_INT_ENA |
201 			LCDC_FRAME_DONE | LCDC_SYNC_LOST);
202 	}
203 }
204 
205 static void reset(struct drm_crtc *crtc)
206 {
207 	struct drm_device *dev = crtc->dev;
208 	struct tilcdc_drm_private *priv = dev->dev_private;
209 
210 	if (priv->rev != 2)
211 		return;
212 
213 	tilcdc_set(dev, LCDC_CLK_RESET_REG, LCDC_CLK_MAIN_RESET);
214 	usleep_range(250, 1000);
215 	tilcdc_clear(dev, LCDC_CLK_RESET_REG, LCDC_CLK_MAIN_RESET);
216 }
217 
218 /*
219  * Calculate the percentage difference between the requested pixel clock rate
220  * and the effective rate resulting from calculating the clock divider value.
221  */
222 static unsigned int tilcdc_pclk_diff(unsigned long rate,
223 				     unsigned long real_rate)
224 {
225 	int r = rate / 100, rr = real_rate / 100;
226 
227 	return (unsigned int)(abs(((rr - r) * 100) / r));
228 }
229 
230 static void tilcdc_crtc_set_clk(struct drm_crtc *crtc)
231 {
232 	struct drm_device *dev = crtc->dev;
233 	struct tilcdc_drm_private *priv = dev->dev_private;
234 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
235 	unsigned long clk_rate, real_rate, req_rate;
236 	unsigned int clkdiv;
237 	int ret;
238 
239 	clkdiv = 2; /* first try using a standard divider of 2 */
240 
241 	/* mode.clock is in KHz, set_rate wants parameter in Hz */
242 	req_rate = crtc->mode.clock * 1000;
243 
244 	ret = clk_set_rate(priv->clk, req_rate * clkdiv);
245 	clk_rate = clk_get_rate(priv->clk);
246 	if (ret < 0) {
247 		/*
248 		 * If we fail to set the clock rate (some architectures don't
249 		 * use the common clock framework yet and may not implement
250 		 * all the clk API calls for every clock), try the next best
251 		 * thing: adjusting the clock divider, unless clk_get_rate()
252 		 * failed as well.
253 		 */
254 		if (!clk_rate) {
255 			/* Nothing more we can do. Just bail out. */
256 			dev_err(dev->dev,
257 				"failed to set the pixel clock - unable to read current lcdc clock rate\n");
258 			return;
259 		}
260 
261 		clkdiv = DIV_ROUND_CLOSEST(clk_rate, req_rate);
262 
263 		/*
264 		 * Emit a warning if the real clock rate resulting from the
265 		 * calculated divider differs much from the requested rate.
266 		 *
267 		 * 5% is an arbitrary value - LCDs are usually quite tolerant
268 		 * about pixel clock rates.
269 		 */
270 		real_rate = clkdiv * req_rate;
271 
272 		if (tilcdc_pclk_diff(clk_rate, real_rate) > 5) {
273 			dev_warn(dev->dev,
274 				 "effective pixel clock rate (%luHz) differs from the calculated rate (%luHz)\n",
275 				 clk_rate, real_rate);
276 		}
277 	}
278 
279 	tilcdc_crtc->lcd_fck_rate = clk_rate;
280 
281 	DBG("lcd_clk=%u, mode clock=%d, div=%u",
282 	    tilcdc_crtc->lcd_fck_rate, crtc->mode.clock, clkdiv);
283 
284 	/* Configure the LCD clock divisor. */
285 	tilcdc_write(dev, LCDC_CTRL_REG, LCDC_CLK_DIVISOR(clkdiv) |
286 		     LCDC_RASTER_MODE);
287 
288 	if (priv->rev == 2)
289 		tilcdc_set(dev, LCDC_CLK_ENABLE_REG,
290 				LCDC_V2_DMA_CLK_EN | LCDC_V2_LIDD_CLK_EN |
291 				LCDC_V2_CORE_CLK_EN);
292 }
293 
294 static void tilcdc_crtc_set_mode(struct drm_crtc *crtc)
295 {
296 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
297 	struct drm_device *dev = crtc->dev;
298 	struct tilcdc_drm_private *priv = dev->dev_private;
299 	const struct tilcdc_panel_info *info = tilcdc_crtc->info;
300 	uint32_t reg, hbp, hfp, hsw, vbp, vfp, vsw;
301 	struct drm_display_mode *mode = &crtc->state->adjusted_mode;
302 	struct drm_framebuffer *fb = crtc->primary->state->fb;
303 
304 	if (WARN_ON(!info))
305 		return;
306 
307 	if (WARN_ON(!fb))
308 		return;
309 
310 	/* Configure the Burst Size and fifo threshold of DMA: */
311 	reg = tilcdc_read(dev, LCDC_DMA_CTRL_REG) & ~0x00000770;
312 	switch (info->dma_burst_sz) {
313 	case 1:
314 		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_1);
315 		break;
316 	case 2:
317 		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_2);
318 		break;
319 	case 4:
320 		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_4);
321 		break;
322 	case 8:
323 		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_8);
324 		break;
325 	case 16:
326 		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_16);
327 		break;
328 	default:
329 		dev_err(dev->dev, "invalid burst size\n");
330 		return;
331 	}
332 	reg |= (info->fifo_th << 8);
333 	tilcdc_write(dev, LCDC_DMA_CTRL_REG, reg);
334 
335 	/* Configure timings: */
336 	hbp = mode->htotal - mode->hsync_end;
337 	hfp = mode->hsync_start - mode->hdisplay;
338 	hsw = mode->hsync_end - mode->hsync_start;
339 	vbp = mode->vtotal - mode->vsync_end;
340 	vfp = mode->vsync_start - mode->vdisplay;
341 	vsw = mode->vsync_end - mode->vsync_start;
342 
343 	DBG("%dx%d, hbp=%u, hfp=%u, hsw=%u, vbp=%u, vfp=%u, vsw=%u",
344 	    mode->hdisplay, mode->vdisplay, hbp, hfp, hsw, vbp, vfp, vsw);
345 
346 	/* Set AC Bias Period and Number of Transitions per Interrupt: */
347 	reg = tilcdc_read(dev, LCDC_RASTER_TIMING_2_REG) & ~0x000fff00;
348 	reg |= LCDC_AC_BIAS_FREQUENCY(info->ac_bias) |
349 		LCDC_AC_BIAS_TRANSITIONS_PER_INT(info->ac_bias_intrpt);
350 
351 	/*
352 	 * subtract one from hfp, hbp, hsw because the hardware uses
353 	 * a value of 0 as 1
354 	 */
355 	if (priv->rev == 2) {
356 		/* clear bits we're going to set */
357 		reg &= ~0x78000033;
358 		reg |= ((hfp-1) & 0x300) >> 8;
359 		reg |= ((hbp-1) & 0x300) >> 4;
360 		reg |= ((hsw-1) & 0x3c0) << 21;
361 	}
362 	tilcdc_write(dev, LCDC_RASTER_TIMING_2_REG, reg);
363 
364 	reg = (((mode->hdisplay >> 4) - 1) << 4) |
365 		(((hbp-1) & 0xff) << 24) |
366 		(((hfp-1) & 0xff) << 16) |
367 		(((hsw-1) & 0x3f) << 10);
368 	if (priv->rev == 2)
369 		reg |= (((mode->hdisplay >> 4) - 1) & 0x40) >> 3;
370 	tilcdc_write(dev, LCDC_RASTER_TIMING_0_REG, reg);
371 
372 	reg = ((mode->vdisplay - 1) & 0x3ff) |
373 		((vbp & 0xff) << 24) |
374 		((vfp & 0xff) << 16) |
375 		(((vsw-1) & 0x3f) << 10);
376 	tilcdc_write(dev, LCDC_RASTER_TIMING_1_REG, reg);
377 
378 	/*
379 	 * be sure to set Bit 10 for the V2 LCDC controller,
380 	 * otherwise limited to 1024 pixels width, stopping
381 	 * 1920x1080 being supported.
382 	 */
383 	if (priv->rev == 2) {
384 		if ((mode->vdisplay - 1) & 0x400) {
385 			tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG,
386 				LCDC_LPP_B10);
387 		} else {
388 			tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG,
389 				LCDC_LPP_B10);
390 		}
391 	}
392 
393 	/* Configure display type: */
394 	reg = tilcdc_read(dev, LCDC_RASTER_CTRL_REG) &
395 		~(LCDC_TFT_MODE | LCDC_MONO_8BIT_MODE | LCDC_MONOCHROME_MODE |
396 		  LCDC_V2_TFT_24BPP_MODE | LCDC_V2_TFT_24BPP_UNPACK |
397 		  0x000ff000 /* Palette Loading Delay bits */);
398 	reg |= LCDC_TFT_MODE; /* no monochrome/passive support */
399 	if (info->tft_alt_mode)
400 		reg |= LCDC_TFT_ALT_ENABLE;
401 	if (priv->rev == 2) {
402 		switch (fb->pixel_format) {
403 		case DRM_FORMAT_BGR565:
404 		case DRM_FORMAT_RGB565:
405 			break;
406 		case DRM_FORMAT_XBGR8888:
407 		case DRM_FORMAT_XRGB8888:
408 			reg |= LCDC_V2_TFT_24BPP_UNPACK;
409 			/* fallthrough */
410 		case DRM_FORMAT_BGR888:
411 		case DRM_FORMAT_RGB888:
412 			reg |= LCDC_V2_TFT_24BPP_MODE;
413 			break;
414 		default:
415 			dev_err(dev->dev, "invalid pixel format\n");
416 			return;
417 		}
418 	}
419 	reg |= info->fdd < 12;
420 	tilcdc_write(dev, LCDC_RASTER_CTRL_REG, reg);
421 
422 	if (info->invert_pxl_clk)
423 		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_PIXEL_CLOCK);
424 	else
425 		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_PIXEL_CLOCK);
426 
427 	if (info->sync_ctrl)
428 		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_CTRL);
429 	else
430 		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_CTRL);
431 
432 	if (info->sync_edge)
433 		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_EDGE);
434 	else
435 		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_EDGE);
436 
437 	if (mode->flags & DRM_MODE_FLAG_NHSYNC)
438 		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_HSYNC);
439 	else
440 		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_HSYNC);
441 
442 	if (mode->flags & DRM_MODE_FLAG_NVSYNC)
443 		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_VSYNC);
444 	else
445 		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_VSYNC);
446 
447 	if (info->raster_order)
448 		tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ORDER);
449 	else
450 		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ORDER);
451 
452 	tilcdc_crtc_set_clk(crtc);
453 
454 	tilcdc_crtc_load_palette(crtc);
455 
456 	set_scanout(crtc, fb);
457 
458 	drm_framebuffer_reference(fb);
459 
460 	crtc->hwmode = crtc->state->adjusted_mode;
461 }
462 
463 static void tilcdc_crtc_enable(struct drm_crtc *crtc)
464 {
465 	struct drm_device *dev = crtc->dev;
466 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
467 
468 	WARN_ON(!drm_modeset_is_locked(&crtc->mutex));
469 	mutex_lock(&tilcdc_crtc->enable_lock);
470 	if (tilcdc_crtc->enabled || tilcdc_crtc->shutdown) {
471 		mutex_unlock(&tilcdc_crtc->enable_lock);
472 		return;
473 	}
474 
475 	pm_runtime_get_sync(dev->dev);
476 
477 	reset(crtc);
478 
479 	tilcdc_crtc_set_mode(crtc);
480 
481 	tilcdc_crtc_enable_irqs(dev);
482 
483 	tilcdc_clear(dev, LCDC_DMA_CTRL_REG, LCDC_DUAL_FRAME_BUFFER_ENABLE);
484 	tilcdc_write_mask(dev, LCDC_RASTER_CTRL_REG,
485 			  LCDC_PALETTE_LOAD_MODE(DATA_ONLY),
486 			  LCDC_PALETTE_LOAD_MODE_MASK);
487 	tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
488 
489 	drm_crtc_vblank_on(crtc);
490 
491 	tilcdc_crtc->enabled = true;
492 	mutex_unlock(&tilcdc_crtc->enable_lock);
493 }
494 
495 static void tilcdc_crtc_off(struct drm_crtc *crtc, bool shutdown)
496 {
497 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
498 	struct drm_device *dev = crtc->dev;
499 	struct tilcdc_drm_private *priv = dev->dev_private;
500 	int ret;
501 
502 	mutex_lock(&tilcdc_crtc->enable_lock);
503 	if (shutdown)
504 		tilcdc_crtc->shutdown = true;
505 	if (!tilcdc_crtc->enabled) {
506 		mutex_unlock(&tilcdc_crtc->enable_lock);
507 		return;
508 	}
509 	tilcdc_crtc->frame_done = false;
510 	tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
511 
512 	/*
513 	 * Wait for framedone irq which will still come before putting
514 	 * things to sleep..
515 	 */
516 	ret = wait_event_timeout(tilcdc_crtc->frame_done_wq,
517 				 tilcdc_crtc->frame_done,
518 				 msecs_to_jiffies(500));
519 	if (ret == 0)
520 		dev_err(dev->dev, "%s: timeout waiting for framedone\n",
521 			__func__);
522 
523 	drm_crtc_vblank_off(crtc);
524 
525 	tilcdc_crtc_disable_irqs(dev);
526 
527 	pm_runtime_put_sync(dev->dev);
528 
529 	if (tilcdc_crtc->next_fb) {
530 		drm_flip_work_queue(&tilcdc_crtc->unref_work,
531 				    tilcdc_crtc->next_fb);
532 		tilcdc_crtc->next_fb = NULL;
533 	}
534 
535 	if (tilcdc_crtc->curr_fb) {
536 		drm_flip_work_queue(&tilcdc_crtc->unref_work,
537 				    tilcdc_crtc->curr_fb);
538 		tilcdc_crtc->curr_fb = NULL;
539 	}
540 
541 	drm_flip_work_commit(&tilcdc_crtc->unref_work, priv->wq);
542 	tilcdc_crtc->last_vblank = 0;
543 
544 	tilcdc_crtc->enabled = false;
545 	mutex_unlock(&tilcdc_crtc->enable_lock);
546 }
547 
548 static void tilcdc_crtc_disable(struct drm_crtc *crtc)
549 {
550 	WARN_ON(!drm_modeset_is_locked(&crtc->mutex));
551 	tilcdc_crtc_off(crtc, false);
552 }
553 
554 void tilcdc_crtc_shutdown(struct drm_crtc *crtc)
555 {
556 	tilcdc_crtc_off(crtc, true);
557 }
558 
559 static bool tilcdc_crtc_is_on(struct drm_crtc *crtc)
560 {
561 	return crtc->state && crtc->state->enable && crtc->state->active;
562 }
563 
564 static void tilcdc_crtc_recover_work(struct work_struct *work)
565 {
566 	struct tilcdc_crtc *tilcdc_crtc =
567 		container_of(work, struct tilcdc_crtc, recover_work);
568 	struct drm_crtc *crtc = &tilcdc_crtc->base;
569 
570 	dev_info(crtc->dev->dev, "%s: Reset CRTC", __func__);
571 
572 	drm_modeset_lock_crtc(crtc, NULL);
573 
574 	if (!tilcdc_crtc_is_on(crtc))
575 		goto out;
576 
577 	tilcdc_crtc_disable(crtc);
578 	tilcdc_crtc_enable(crtc);
579 out:
580 	drm_modeset_unlock_crtc(crtc);
581 }
582 
583 static void tilcdc_crtc_destroy(struct drm_crtc *crtc)
584 {
585 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
586 	struct tilcdc_drm_private *priv = crtc->dev->dev_private;
587 
588 	drm_modeset_lock_crtc(crtc, NULL);
589 	tilcdc_crtc_disable(crtc);
590 	drm_modeset_unlock_crtc(crtc);
591 
592 	flush_workqueue(priv->wq);
593 
594 	of_node_put(crtc->port);
595 	drm_crtc_cleanup(crtc);
596 	drm_flip_work_cleanup(&tilcdc_crtc->unref_work);
597 }
598 
599 int tilcdc_crtc_update_fb(struct drm_crtc *crtc,
600 		struct drm_framebuffer *fb,
601 		struct drm_pending_vblank_event *event)
602 {
603 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
604 	struct drm_device *dev = crtc->dev;
605 	unsigned long flags;
606 
607 	WARN_ON(!drm_modeset_is_locked(&crtc->mutex));
608 
609 	if (tilcdc_crtc->event) {
610 		dev_err(dev->dev, "already pending page flip!\n");
611 		return -EBUSY;
612 	}
613 
614 	drm_framebuffer_reference(fb);
615 
616 	crtc->primary->fb = fb;
617 
618 	spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
619 
620 	if (crtc->hwmode.vrefresh && ktime_to_ns(tilcdc_crtc->last_vblank)) {
621 		ktime_t next_vblank;
622 		s64 tdiff;
623 
624 		next_vblank = ktime_add_us(tilcdc_crtc->last_vblank,
625 			1000000 / crtc->hwmode.vrefresh);
626 
627 		tdiff = ktime_to_us(ktime_sub(next_vblank, ktime_get()));
628 
629 		if (tdiff < TILCDC_VBLANK_SAFETY_THRESHOLD_US)
630 			tilcdc_crtc->next_fb = fb;
631 	}
632 
633 	if (tilcdc_crtc->next_fb != fb)
634 		set_scanout(crtc, fb);
635 
636 	tilcdc_crtc->event = event;
637 
638 	spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
639 
640 	return 0;
641 }
642 
643 static bool tilcdc_crtc_mode_fixup(struct drm_crtc *crtc,
644 		const struct drm_display_mode *mode,
645 		struct drm_display_mode *adjusted_mode)
646 {
647 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
648 
649 	if (!tilcdc_crtc->simulate_vesa_sync)
650 		return true;
651 
652 	/*
653 	 * tilcdc does not generate VESA-compliant sync but aligns
654 	 * VS on the second edge of HS instead of first edge.
655 	 * We use adjusted_mode, to fixup sync by aligning both rising
656 	 * edges and add HSKEW offset to fix the sync.
657 	 */
658 	adjusted_mode->hskew = mode->hsync_end - mode->hsync_start;
659 	adjusted_mode->flags |= DRM_MODE_FLAG_HSKEW;
660 
661 	if (mode->flags & DRM_MODE_FLAG_NHSYNC) {
662 		adjusted_mode->flags |= DRM_MODE_FLAG_PHSYNC;
663 		adjusted_mode->flags &= ~DRM_MODE_FLAG_NHSYNC;
664 	} else {
665 		adjusted_mode->flags |= DRM_MODE_FLAG_NHSYNC;
666 		adjusted_mode->flags &= ~DRM_MODE_FLAG_PHSYNC;
667 	}
668 
669 	return true;
670 }
671 
672 static int tilcdc_crtc_atomic_check(struct drm_crtc *crtc,
673 				    struct drm_crtc_state *state)
674 {
675 	struct drm_display_mode *mode = &state->mode;
676 	int ret;
677 
678 	/* If we are not active we don't care */
679 	if (!state->active)
680 		return 0;
681 
682 	if (state->state->planes[0].ptr != crtc->primary ||
683 	    state->state->planes[0].state == NULL ||
684 	    state->state->planes[0].state->crtc != crtc) {
685 		dev_dbg(crtc->dev->dev, "CRTC primary plane must be present");
686 		return -EINVAL;
687 	}
688 
689 	ret = tilcdc_crtc_mode_valid(crtc, mode);
690 	if (ret) {
691 		dev_dbg(crtc->dev->dev, "Mode \"%s\" not valid", mode->name);
692 		return -EINVAL;
693 	}
694 
695 	return 0;
696 }
697 
698 static const struct drm_crtc_funcs tilcdc_crtc_funcs = {
699 	.destroy        = tilcdc_crtc_destroy,
700 	.set_config     = drm_atomic_helper_set_config,
701 	.page_flip      = drm_atomic_helper_page_flip,
702 	.reset		= drm_atomic_helper_crtc_reset,
703 	.atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
704 	.atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
705 };
706 
707 static const struct drm_crtc_helper_funcs tilcdc_crtc_helper_funcs = {
708 		.mode_fixup     = tilcdc_crtc_mode_fixup,
709 		.enable		= tilcdc_crtc_enable,
710 		.disable	= tilcdc_crtc_disable,
711 		.atomic_check	= tilcdc_crtc_atomic_check,
712 };
713 
714 int tilcdc_crtc_max_width(struct drm_crtc *crtc)
715 {
716 	struct drm_device *dev = crtc->dev;
717 	struct tilcdc_drm_private *priv = dev->dev_private;
718 	int max_width = 0;
719 
720 	if (priv->rev == 1)
721 		max_width = 1024;
722 	else if (priv->rev == 2)
723 		max_width = 2048;
724 
725 	return max_width;
726 }
727 
728 int tilcdc_crtc_mode_valid(struct drm_crtc *crtc, struct drm_display_mode *mode)
729 {
730 	struct tilcdc_drm_private *priv = crtc->dev->dev_private;
731 	unsigned int bandwidth;
732 	uint32_t hbp, hfp, hsw, vbp, vfp, vsw;
733 
734 	/*
735 	 * check to see if the width is within the range that
736 	 * the LCD Controller physically supports
737 	 */
738 	if (mode->hdisplay > tilcdc_crtc_max_width(crtc))
739 		return MODE_VIRTUAL_X;
740 
741 	/* width must be multiple of 16 */
742 	if (mode->hdisplay & 0xf)
743 		return MODE_VIRTUAL_X;
744 
745 	if (mode->vdisplay > 2048)
746 		return MODE_VIRTUAL_Y;
747 
748 	DBG("Processing mode %dx%d@%d with pixel clock %d",
749 		mode->hdisplay, mode->vdisplay,
750 		drm_mode_vrefresh(mode), mode->clock);
751 
752 	hbp = mode->htotal - mode->hsync_end;
753 	hfp = mode->hsync_start - mode->hdisplay;
754 	hsw = mode->hsync_end - mode->hsync_start;
755 	vbp = mode->vtotal - mode->vsync_end;
756 	vfp = mode->vsync_start - mode->vdisplay;
757 	vsw = mode->vsync_end - mode->vsync_start;
758 
759 	if ((hbp-1) & ~0x3ff) {
760 		DBG("Pruning mode: Horizontal Back Porch out of range");
761 		return MODE_HBLANK_WIDE;
762 	}
763 
764 	if ((hfp-1) & ~0x3ff) {
765 		DBG("Pruning mode: Horizontal Front Porch out of range");
766 		return MODE_HBLANK_WIDE;
767 	}
768 
769 	if ((hsw-1) & ~0x3ff) {
770 		DBG("Pruning mode: Horizontal Sync Width out of range");
771 		return MODE_HSYNC_WIDE;
772 	}
773 
774 	if (vbp & ~0xff) {
775 		DBG("Pruning mode: Vertical Back Porch out of range");
776 		return MODE_VBLANK_WIDE;
777 	}
778 
779 	if (vfp & ~0xff) {
780 		DBG("Pruning mode: Vertical Front Porch out of range");
781 		return MODE_VBLANK_WIDE;
782 	}
783 
784 	if ((vsw-1) & ~0x3f) {
785 		DBG("Pruning mode: Vertical Sync Width out of range");
786 		return MODE_VSYNC_WIDE;
787 	}
788 
789 	/*
790 	 * some devices have a maximum allowed pixel clock
791 	 * configured from the DT
792 	 */
793 	if (mode->clock > priv->max_pixelclock) {
794 		DBG("Pruning mode: pixel clock too high");
795 		return MODE_CLOCK_HIGH;
796 	}
797 
798 	/*
799 	 * some devices further limit the max horizontal resolution
800 	 * configured from the DT
801 	 */
802 	if (mode->hdisplay > priv->max_width)
803 		return MODE_BAD_WIDTH;
804 
805 	/* filter out modes that would require too much memory bandwidth: */
806 	bandwidth = mode->hdisplay * mode->vdisplay *
807 		drm_mode_vrefresh(mode);
808 	if (bandwidth > priv->max_bandwidth) {
809 		DBG("Pruning mode: exceeds defined bandwidth limit");
810 		return MODE_BAD;
811 	}
812 
813 	return MODE_OK;
814 }
815 
816 void tilcdc_crtc_set_panel_info(struct drm_crtc *crtc,
817 		const struct tilcdc_panel_info *info)
818 {
819 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
820 	tilcdc_crtc->info = info;
821 }
822 
823 void tilcdc_crtc_set_simulate_vesa_sync(struct drm_crtc *crtc,
824 					bool simulate_vesa_sync)
825 {
826 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
827 
828 	tilcdc_crtc->simulate_vesa_sync = simulate_vesa_sync;
829 }
830 
831 void tilcdc_crtc_update_clk(struct drm_crtc *crtc)
832 {
833 	struct drm_device *dev = crtc->dev;
834 	struct tilcdc_drm_private *priv = dev->dev_private;
835 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
836 
837 	drm_modeset_lock_crtc(crtc, NULL);
838 	if (tilcdc_crtc->lcd_fck_rate != clk_get_rate(priv->clk)) {
839 		if (tilcdc_crtc_is_on(crtc)) {
840 			pm_runtime_get_sync(dev->dev);
841 			tilcdc_crtc_disable(crtc);
842 
843 			tilcdc_crtc_set_clk(crtc);
844 
845 			tilcdc_crtc_enable(crtc);
846 			pm_runtime_put_sync(dev->dev);
847 		}
848 	}
849 	drm_modeset_unlock_crtc(crtc);
850 }
851 
852 #define SYNC_LOST_COUNT_LIMIT 50
853 
854 irqreturn_t tilcdc_crtc_irq(struct drm_crtc *crtc)
855 {
856 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
857 	struct drm_device *dev = crtc->dev;
858 	struct tilcdc_drm_private *priv = dev->dev_private;
859 	uint32_t stat, reg;
860 
861 	stat = tilcdc_read_irqstatus(dev);
862 	tilcdc_clear_irqstatus(dev, stat);
863 
864 	if (stat & LCDC_END_OF_FRAME0) {
865 		unsigned long flags;
866 		bool skip_event = false;
867 		ktime_t now;
868 
869 		now = ktime_get();
870 
871 		drm_flip_work_commit(&tilcdc_crtc->unref_work, priv->wq);
872 
873 		spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
874 
875 		tilcdc_crtc->last_vblank = now;
876 
877 		if (tilcdc_crtc->next_fb) {
878 			set_scanout(crtc, tilcdc_crtc->next_fb);
879 			tilcdc_crtc->next_fb = NULL;
880 			skip_event = true;
881 		}
882 
883 		spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
884 
885 		drm_crtc_handle_vblank(crtc);
886 
887 		if (!skip_event) {
888 			struct drm_pending_vblank_event *event;
889 
890 			spin_lock_irqsave(&dev->event_lock, flags);
891 
892 			event = tilcdc_crtc->event;
893 			tilcdc_crtc->event = NULL;
894 			if (event)
895 				drm_crtc_send_vblank_event(crtc, event);
896 
897 			spin_unlock_irqrestore(&dev->event_lock, flags);
898 		}
899 
900 		if (tilcdc_crtc->frame_intact)
901 			tilcdc_crtc->sync_lost_count = 0;
902 		else
903 			tilcdc_crtc->frame_intact = true;
904 	}
905 
906 	if (stat & LCDC_FIFO_UNDERFLOW)
907 		dev_err_ratelimited(dev->dev, "%s(0x%08x): FIFO underflow",
908 				    __func__, stat);
909 
910 	if (stat & LCDC_PL_LOAD_DONE) {
911 		complete(&tilcdc_crtc->palette_loaded);
912 		if (priv->rev == 1)
913 			tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
914 				     LCDC_V1_PL_INT_ENA);
915 		else
916 			tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
917 				     LCDC_V2_PL_INT_ENA);
918 	}
919 
920 	if (stat & LCDC_SYNC_LOST) {
921 		dev_err_ratelimited(dev->dev, "%s(0x%08x): Sync lost",
922 				    __func__, stat);
923 		tilcdc_crtc->frame_intact = false;
924 		if (priv->rev == 1) {
925 			reg = tilcdc_read(dev, LCDC_RASTER_CTRL_REG);
926 			if (reg & LCDC_RASTER_ENABLE) {
927 				tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
928 					     LCDC_RASTER_ENABLE);
929 				tilcdc_set(dev, LCDC_RASTER_CTRL_REG,
930 					   LCDC_RASTER_ENABLE);
931 			}
932 		} else {
933 			if (tilcdc_crtc->sync_lost_count++ >
934 			    SYNC_LOST_COUNT_LIMIT) {
935 				dev_err(dev->dev,
936 					"%s(0x%08x): Sync lost flood detected, recovering",
937 					__func__, stat);
938 				queue_work(system_wq,
939 					   &tilcdc_crtc->recover_work);
940 				tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
941 					     LCDC_SYNC_LOST);
942 				tilcdc_crtc->sync_lost_count = 0;
943 			}
944 		}
945 	}
946 
947 	if (stat & LCDC_FRAME_DONE) {
948 		tilcdc_crtc->frame_done = true;
949 		wake_up(&tilcdc_crtc->frame_done_wq);
950 		/* rev 1 lcdc appears to hang if irq is not disbaled here */
951 		if (priv->rev == 1)
952 			tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
953 				     LCDC_V1_FRAME_DONE_INT_ENA);
954 	}
955 
956 	/* For revision 2 only */
957 	if (priv->rev == 2) {
958 		/* Indicate to LCDC that the interrupt service routine has
959 		 * completed, see 13.3.6.1.6 in AM335x TRM.
960 		 */
961 		tilcdc_write(dev, LCDC_END_OF_INT_IND_REG, 0);
962 	}
963 
964 	return IRQ_HANDLED;
965 }
966 
967 int tilcdc_crtc_create(struct drm_device *dev)
968 {
969 	struct tilcdc_drm_private *priv = dev->dev_private;
970 	struct tilcdc_crtc *tilcdc_crtc;
971 	struct drm_crtc *crtc;
972 	int ret;
973 
974 	tilcdc_crtc = devm_kzalloc(dev->dev, sizeof(*tilcdc_crtc), GFP_KERNEL);
975 	if (!tilcdc_crtc) {
976 		dev_err(dev->dev, "allocation failed\n");
977 		return -ENOMEM;
978 	}
979 
980 	init_completion(&tilcdc_crtc->palette_loaded);
981 	tilcdc_crtc->palette_base = dmam_alloc_coherent(dev->dev,
982 					TILCDC_PALETTE_SIZE,
983 					&tilcdc_crtc->palette_dma_handle,
984 					GFP_KERNEL | __GFP_ZERO);
985 	if (!tilcdc_crtc->palette_base)
986 		return -ENOMEM;
987 	*tilcdc_crtc->palette_base = TILCDC_PALETTE_FIRST_ENTRY;
988 
989 	crtc = &tilcdc_crtc->base;
990 
991 	ret = tilcdc_plane_init(dev, &tilcdc_crtc->primary);
992 	if (ret < 0)
993 		goto fail;
994 
995 	mutex_init(&tilcdc_crtc->enable_lock);
996 
997 	init_waitqueue_head(&tilcdc_crtc->frame_done_wq);
998 
999 	drm_flip_work_init(&tilcdc_crtc->unref_work,
1000 			"unref", unref_worker);
1001 
1002 	spin_lock_init(&tilcdc_crtc->irq_lock);
1003 	INIT_WORK(&tilcdc_crtc->recover_work, tilcdc_crtc_recover_work);
1004 
1005 	ret = drm_crtc_init_with_planes(dev, crtc,
1006 					&tilcdc_crtc->primary,
1007 					NULL,
1008 					&tilcdc_crtc_funcs,
1009 					"tilcdc crtc");
1010 	if (ret < 0)
1011 		goto fail;
1012 
1013 	drm_crtc_helper_add(crtc, &tilcdc_crtc_helper_funcs);
1014 
1015 	if (priv->is_componentized) {
1016 		struct device_node *ports =
1017 			of_get_child_by_name(dev->dev->of_node, "ports");
1018 
1019 		if (ports) {
1020 			crtc->port = of_get_child_by_name(ports, "port");
1021 			of_node_put(ports);
1022 		} else {
1023 			crtc->port =
1024 				of_get_child_by_name(dev->dev->of_node, "port");
1025 		}
1026 		if (!crtc->port) { /* This should never happen */
1027 			dev_err(dev->dev, "Port node not found in %s\n",
1028 				dev->dev->of_node->full_name);
1029 			ret = -EINVAL;
1030 			goto fail;
1031 		}
1032 	}
1033 
1034 	priv->crtc = crtc;
1035 	return 0;
1036 
1037 fail:
1038 	tilcdc_crtc_destroy(crtc);
1039 	return -ENOMEM;
1040 }
1041