1 /*
2  * Copyright (C) 2012 Texas Instruments
3  * Author: Rob Clark <robdclark@gmail.com>
4  *
5  * This program is free software; you can redistribute it and/or modify it
6  * under the terms of the GNU General Public License version 2 as published by
7  * the Free Software Foundation.
8  *
9  * This program is distributed in the hope that it will be useful, but WITHOUT
10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for
12  * more details.
13  *
14  * You should have received a copy of the GNU General Public License along with
15  * this program.  If not, see <http://www.gnu.org/licenses/>.
16  */
17 
18 #include <drm/drm_atomic.h>
19 #include <drm/drm_atomic_helper.h>
20 #include <drm/drm_crtc.h>
21 #include <drm/drm_flip_work.h>
22 #include <drm/drm_plane_helper.h>
23 #include <linux/workqueue.h>
24 #include <linux/completion.h>
25 #include <linux/dma-mapping.h>
26 #include <linux/of_graph.h>
27 #include <linux/math64.h>
28 
29 #include "tilcdc_drv.h"
30 #include "tilcdc_regs.h"
31 
32 #define TILCDC_VBLANK_SAFETY_THRESHOLD_US	1000
33 #define TILCDC_PALETTE_SIZE			32
34 #define TILCDC_PALETTE_FIRST_ENTRY		0x4000
35 
36 struct tilcdc_crtc {
37 	struct drm_crtc base;
38 
39 	struct drm_plane primary;
40 	const struct tilcdc_panel_info *info;
41 	struct drm_pending_vblank_event *event;
42 	struct mutex enable_lock;
43 	bool enabled;
44 	bool shutdown;
45 	wait_queue_head_t frame_done_wq;
46 	bool frame_done;
47 	spinlock_t irq_lock;
48 
49 	unsigned int lcd_fck_rate;
50 
51 	ktime_t last_vblank;
52 	unsigned int hvtotal_us;
53 
54 	struct drm_framebuffer *next_fb;
55 
56 	/* Only set if an external encoder is connected */
57 	bool simulate_vesa_sync;
58 
59 	int sync_lost_count;
60 	bool frame_intact;
61 	struct work_struct recover_work;
62 
63 	dma_addr_t palette_dma_handle;
64 	u16 *palette_base;
65 	struct completion palette_loaded;
66 };
67 #define to_tilcdc_crtc(x) container_of(x, struct tilcdc_crtc, base)
68 
69 static void set_scanout(struct drm_crtc *crtc, struct drm_framebuffer *fb)
70 {
71 	struct drm_device *dev = crtc->dev;
72 	struct tilcdc_drm_private *priv = dev->dev_private;
73 	struct drm_gem_cma_object *gem;
74 	dma_addr_t start, end;
75 	u64 dma_base_and_ceiling;
76 
77 	gem = drm_fb_cma_get_gem_obj(fb, 0);
78 
79 	start = gem->paddr + fb->offsets[0] +
80 		crtc->y * fb->pitches[0] +
81 		crtc->x * fb->format->cpp[0];
82 
83 	end = start + (crtc->mode.vdisplay * fb->pitches[0]);
84 
85 	/* Write LCDC_DMA_FB_BASE_ADDR_0_REG and LCDC_DMA_FB_CEILING_ADDR_0_REG
86 	 * with a single insruction, if available. This should make it more
87 	 * unlikely that LCDC would fetch the DMA addresses in the middle of
88 	 * an update.
89 	 */
90 	if (priv->rev == 1)
91 		end -= 1;
92 
93 	dma_base_and_ceiling = (u64)end << 32 | start;
94 	tilcdc_write64(dev, LCDC_DMA_FB_BASE_ADDR_0_REG, dma_base_and_ceiling);
95 }
96 
97 /*
98  * The driver currently only supports only true color formats. For
99  * true color the palette block is bypassed, but a 32 byte palette
100  * should still be loaded. The first 16-bit entry must be 0x4000 while
101  * all other entries must be zeroed.
102  */
103 static void tilcdc_crtc_load_palette(struct drm_crtc *crtc)
104 {
105 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
106 	struct drm_device *dev = crtc->dev;
107 	struct tilcdc_drm_private *priv = dev->dev_private;
108 	int ret;
109 
110 	reinit_completion(&tilcdc_crtc->palette_loaded);
111 
112 	/* Tell the LCDC where the palette is located. */
113 	tilcdc_write(dev, LCDC_DMA_FB_BASE_ADDR_0_REG,
114 		     tilcdc_crtc->palette_dma_handle);
115 	tilcdc_write(dev, LCDC_DMA_FB_CEILING_ADDR_0_REG,
116 		     (u32) tilcdc_crtc->palette_dma_handle +
117 		     TILCDC_PALETTE_SIZE - 1);
118 
119 	/* Set dma load mode for palette loading only. */
120 	tilcdc_write_mask(dev, LCDC_RASTER_CTRL_REG,
121 			  LCDC_PALETTE_LOAD_MODE(PALETTE_ONLY),
122 			  LCDC_PALETTE_LOAD_MODE_MASK);
123 
124 	/* Enable DMA Palette Loaded Interrupt */
125 	if (priv->rev == 1)
126 		tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_V1_PL_INT_ENA);
127 	else
128 		tilcdc_write(dev, LCDC_INT_ENABLE_SET_REG, LCDC_V2_PL_INT_ENA);
129 
130 	/* Enable LCDC DMA and wait for palette to be loaded. */
131 	tilcdc_clear_irqstatus(dev, 0xffffffff);
132 	tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
133 
134 	ret = wait_for_completion_timeout(&tilcdc_crtc->palette_loaded,
135 					  msecs_to_jiffies(50));
136 	if (ret == 0)
137 		dev_err(dev->dev, "%s: Palette loading timeout", __func__);
138 
139 	/* Disable LCDC DMA and DMA Palette Loaded Interrupt. */
140 	tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
141 	if (priv->rev == 1)
142 		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_V1_PL_INT_ENA);
143 	else
144 		tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG, LCDC_V2_PL_INT_ENA);
145 }
146 
147 static void tilcdc_crtc_enable_irqs(struct drm_device *dev)
148 {
149 	struct tilcdc_drm_private *priv = dev->dev_private;
150 
151 	tilcdc_clear_irqstatus(dev, 0xffffffff);
152 
153 	if (priv->rev == 1) {
154 		tilcdc_set(dev, LCDC_RASTER_CTRL_REG,
155 			LCDC_V1_SYNC_LOST_INT_ENA | LCDC_V1_FRAME_DONE_INT_ENA |
156 			LCDC_V1_UNDERFLOW_INT_ENA);
157 		tilcdc_set(dev, LCDC_DMA_CTRL_REG,
158 			LCDC_V1_END_OF_FRAME_INT_ENA);
159 	} else {
160 		tilcdc_write(dev, LCDC_INT_ENABLE_SET_REG,
161 			LCDC_V2_UNDERFLOW_INT_ENA |
162 			LCDC_V2_END_OF_FRAME0_INT_ENA |
163 			LCDC_FRAME_DONE | LCDC_SYNC_LOST);
164 	}
165 }
166 
167 static void tilcdc_crtc_disable_irqs(struct drm_device *dev)
168 {
169 	struct tilcdc_drm_private *priv = dev->dev_private;
170 
171 	/* disable irqs that we might have enabled: */
172 	if (priv->rev == 1) {
173 		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
174 			LCDC_V1_SYNC_LOST_INT_ENA | LCDC_V1_FRAME_DONE_INT_ENA |
175 			LCDC_V1_UNDERFLOW_INT_ENA | LCDC_V1_PL_INT_ENA);
176 		tilcdc_clear(dev, LCDC_DMA_CTRL_REG,
177 			LCDC_V1_END_OF_FRAME_INT_ENA);
178 	} else {
179 		tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
180 			LCDC_V2_UNDERFLOW_INT_ENA | LCDC_V2_PL_INT_ENA |
181 			LCDC_V2_END_OF_FRAME0_INT_ENA |
182 			LCDC_FRAME_DONE | LCDC_SYNC_LOST);
183 	}
184 }
185 
186 static void reset(struct drm_crtc *crtc)
187 {
188 	struct drm_device *dev = crtc->dev;
189 	struct tilcdc_drm_private *priv = dev->dev_private;
190 
191 	if (priv->rev != 2)
192 		return;
193 
194 	tilcdc_set(dev, LCDC_CLK_RESET_REG, LCDC_CLK_MAIN_RESET);
195 	usleep_range(250, 1000);
196 	tilcdc_clear(dev, LCDC_CLK_RESET_REG, LCDC_CLK_MAIN_RESET);
197 }
198 
199 /*
200  * Calculate the percentage difference between the requested pixel clock rate
201  * and the effective rate resulting from calculating the clock divider value.
202  */
203 static unsigned int tilcdc_pclk_diff(unsigned long rate,
204 				     unsigned long real_rate)
205 {
206 	int r = rate / 100, rr = real_rate / 100;
207 
208 	return (unsigned int)(abs(((rr - r) * 100) / r));
209 }
210 
211 static void tilcdc_crtc_set_clk(struct drm_crtc *crtc)
212 {
213 	struct drm_device *dev = crtc->dev;
214 	struct tilcdc_drm_private *priv = dev->dev_private;
215 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
216 	unsigned long clk_rate, real_rate, req_rate;
217 	unsigned int clkdiv;
218 	int ret;
219 
220 	clkdiv = 2; /* first try using a standard divider of 2 */
221 
222 	/* mode.clock is in KHz, set_rate wants parameter in Hz */
223 	req_rate = crtc->mode.clock * 1000;
224 
225 	ret = clk_set_rate(priv->clk, req_rate * clkdiv);
226 	clk_rate = clk_get_rate(priv->clk);
227 	if (ret < 0 || tilcdc_pclk_diff(req_rate, clk_rate) > 5) {
228 		/*
229 		 * If we fail to set the clock rate (some architectures don't
230 		 * use the common clock framework yet and may not implement
231 		 * all the clk API calls for every clock), try the next best
232 		 * thing: adjusting the clock divider, unless clk_get_rate()
233 		 * failed as well.
234 		 */
235 		if (!clk_rate) {
236 			/* Nothing more we can do. Just bail out. */
237 			dev_err(dev->dev,
238 				"failed to set the pixel clock - unable to read current lcdc clock rate\n");
239 			return;
240 		}
241 
242 		clkdiv = DIV_ROUND_CLOSEST(clk_rate, req_rate);
243 
244 		/*
245 		 * Emit a warning if the real clock rate resulting from the
246 		 * calculated divider differs much from the requested rate.
247 		 *
248 		 * 5% is an arbitrary value - LCDs are usually quite tolerant
249 		 * about pixel clock rates.
250 		 */
251 		real_rate = clkdiv * req_rate;
252 
253 		if (tilcdc_pclk_diff(clk_rate, real_rate) > 5) {
254 			dev_warn(dev->dev,
255 				 "effective pixel clock rate (%luHz) differs from the calculated rate (%luHz)\n",
256 				 clk_rate, real_rate);
257 		}
258 	}
259 
260 	tilcdc_crtc->lcd_fck_rate = clk_rate;
261 
262 	DBG("lcd_clk=%u, mode clock=%d, div=%u",
263 	    tilcdc_crtc->lcd_fck_rate, crtc->mode.clock, clkdiv);
264 
265 	/* Configure the LCD clock divisor. */
266 	tilcdc_write(dev, LCDC_CTRL_REG, LCDC_CLK_DIVISOR(clkdiv) |
267 		     LCDC_RASTER_MODE);
268 
269 	if (priv->rev == 2)
270 		tilcdc_set(dev, LCDC_CLK_ENABLE_REG,
271 				LCDC_V2_DMA_CLK_EN | LCDC_V2_LIDD_CLK_EN |
272 				LCDC_V2_CORE_CLK_EN);
273 }
274 
275 static uint tilcdc_mode_hvtotal(const struct drm_display_mode *mode)
276 {
277 	return (uint) div_u64(1000llu * mode->htotal * mode->vtotal,
278 			      mode->clock);
279 }
280 
281 static void tilcdc_crtc_set_mode(struct drm_crtc *crtc)
282 {
283 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
284 	struct drm_device *dev = crtc->dev;
285 	struct tilcdc_drm_private *priv = dev->dev_private;
286 	const struct tilcdc_panel_info *info = tilcdc_crtc->info;
287 	uint32_t reg, hbp, hfp, hsw, vbp, vfp, vsw;
288 	struct drm_display_mode *mode = &crtc->state->adjusted_mode;
289 	struct drm_framebuffer *fb = crtc->primary->state->fb;
290 
291 	if (WARN_ON(!info))
292 		return;
293 
294 	if (WARN_ON(!fb))
295 		return;
296 
297 	/* Configure the Burst Size and fifo threshold of DMA: */
298 	reg = tilcdc_read(dev, LCDC_DMA_CTRL_REG) & ~0x00000770;
299 	switch (info->dma_burst_sz) {
300 	case 1:
301 		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_1);
302 		break;
303 	case 2:
304 		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_2);
305 		break;
306 	case 4:
307 		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_4);
308 		break;
309 	case 8:
310 		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_8);
311 		break;
312 	case 16:
313 		reg |= LCDC_DMA_BURST_SIZE(LCDC_DMA_BURST_16);
314 		break;
315 	default:
316 		dev_err(dev->dev, "invalid burst size\n");
317 		return;
318 	}
319 	reg |= (info->fifo_th << 8);
320 	tilcdc_write(dev, LCDC_DMA_CTRL_REG, reg);
321 
322 	/* Configure timings: */
323 	hbp = mode->htotal - mode->hsync_end;
324 	hfp = mode->hsync_start - mode->hdisplay;
325 	hsw = mode->hsync_end - mode->hsync_start;
326 	vbp = mode->vtotal - mode->vsync_end;
327 	vfp = mode->vsync_start - mode->vdisplay;
328 	vsw = mode->vsync_end - mode->vsync_start;
329 
330 	DBG("%dx%d, hbp=%u, hfp=%u, hsw=%u, vbp=%u, vfp=%u, vsw=%u",
331 	    mode->hdisplay, mode->vdisplay, hbp, hfp, hsw, vbp, vfp, vsw);
332 
333 	/* Set AC Bias Period and Number of Transitions per Interrupt: */
334 	reg = tilcdc_read(dev, LCDC_RASTER_TIMING_2_REG) & ~0x000fff00;
335 	reg |= LCDC_AC_BIAS_FREQUENCY(info->ac_bias) |
336 		LCDC_AC_BIAS_TRANSITIONS_PER_INT(info->ac_bias_intrpt);
337 
338 	/*
339 	 * subtract one from hfp, hbp, hsw because the hardware uses
340 	 * a value of 0 as 1
341 	 */
342 	if (priv->rev == 2) {
343 		/* clear bits we're going to set */
344 		reg &= ~0x78000033;
345 		reg |= ((hfp-1) & 0x300) >> 8;
346 		reg |= ((hbp-1) & 0x300) >> 4;
347 		reg |= ((hsw-1) & 0x3c0) << 21;
348 	}
349 	tilcdc_write(dev, LCDC_RASTER_TIMING_2_REG, reg);
350 
351 	reg = (((mode->hdisplay >> 4) - 1) << 4) |
352 		(((hbp-1) & 0xff) << 24) |
353 		(((hfp-1) & 0xff) << 16) |
354 		(((hsw-1) & 0x3f) << 10);
355 	if (priv->rev == 2)
356 		reg |= (((mode->hdisplay >> 4) - 1) & 0x40) >> 3;
357 	tilcdc_write(dev, LCDC_RASTER_TIMING_0_REG, reg);
358 
359 	reg = ((mode->vdisplay - 1) & 0x3ff) |
360 		((vbp & 0xff) << 24) |
361 		((vfp & 0xff) << 16) |
362 		(((vsw-1) & 0x3f) << 10);
363 	tilcdc_write(dev, LCDC_RASTER_TIMING_1_REG, reg);
364 
365 	/*
366 	 * be sure to set Bit 10 for the V2 LCDC controller,
367 	 * otherwise limited to 1024 pixels width, stopping
368 	 * 1920x1080 being supported.
369 	 */
370 	if (priv->rev == 2) {
371 		if ((mode->vdisplay - 1) & 0x400) {
372 			tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG,
373 				LCDC_LPP_B10);
374 		} else {
375 			tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG,
376 				LCDC_LPP_B10);
377 		}
378 	}
379 
380 	/* Configure display type: */
381 	reg = tilcdc_read(dev, LCDC_RASTER_CTRL_REG) &
382 		~(LCDC_TFT_MODE | LCDC_MONO_8BIT_MODE | LCDC_MONOCHROME_MODE |
383 		  LCDC_V2_TFT_24BPP_MODE | LCDC_V2_TFT_24BPP_UNPACK |
384 		  0x000ff000 /* Palette Loading Delay bits */);
385 	reg |= LCDC_TFT_MODE; /* no monochrome/passive support */
386 	if (info->tft_alt_mode)
387 		reg |= LCDC_TFT_ALT_ENABLE;
388 	if (priv->rev == 2) {
389 		switch (fb->format->format) {
390 		case DRM_FORMAT_BGR565:
391 		case DRM_FORMAT_RGB565:
392 			break;
393 		case DRM_FORMAT_XBGR8888:
394 		case DRM_FORMAT_XRGB8888:
395 			reg |= LCDC_V2_TFT_24BPP_UNPACK;
396 			/* fallthrough */
397 		case DRM_FORMAT_BGR888:
398 		case DRM_FORMAT_RGB888:
399 			reg |= LCDC_V2_TFT_24BPP_MODE;
400 			break;
401 		default:
402 			dev_err(dev->dev, "invalid pixel format\n");
403 			return;
404 		}
405 	}
406 	reg |= info->fdd < 12;
407 	tilcdc_write(dev, LCDC_RASTER_CTRL_REG, reg);
408 
409 	if (info->invert_pxl_clk)
410 		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_PIXEL_CLOCK);
411 	else
412 		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_PIXEL_CLOCK);
413 
414 	if (info->sync_ctrl)
415 		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_CTRL);
416 	else
417 		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_CTRL);
418 
419 	if (info->sync_edge)
420 		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_EDGE);
421 	else
422 		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_SYNC_EDGE);
423 
424 	if (mode->flags & DRM_MODE_FLAG_NHSYNC)
425 		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_HSYNC);
426 	else
427 		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_HSYNC);
428 
429 	if (mode->flags & DRM_MODE_FLAG_NVSYNC)
430 		tilcdc_set(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_VSYNC);
431 	else
432 		tilcdc_clear(dev, LCDC_RASTER_TIMING_2_REG, LCDC_INVERT_VSYNC);
433 
434 	if (info->raster_order)
435 		tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ORDER);
436 	else
437 		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ORDER);
438 
439 	tilcdc_crtc_set_clk(crtc);
440 
441 	tilcdc_crtc_load_palette(crtc);
442 
443 	set_scanout(crtc, fb);
444 
445 	crtc->hwmode = crtc->state->adjusted_mode;
446 
447 	tilcdc_crtc->hvtotal_us =
448 		tilcdc_mode_hvtotal(&crtc->hwmode);
449 }
450 
451 static void tilcdc_crtc_enable(struct drm_crtc *crtc)
452 {
453 	struct drm_device *dev = crtc->dev;
454 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
455 	unsigned long flags;
456 
457 	mutex_lock(&tilcdc_crtc->enable_lock);
458 	if (tilcdc_crtc->enabled || tilcdc_crtc->shutdown) {
459 		mutex_unlock(&tilcdc_crtc->enable_lock);
460 		return;
461 	}
462 
463 	pm_runtime_get_sync(dev->dev);
464 
465 	reset(crtc);
466 
467 	tilcdc_crtc_set_mode(crtc);
468 
469 	tilcdc_crtc_enable_irqs(dev);
470 
471 	tilcdc_clear(dev, LCDC_DMA_CTRL_REG, LCDC_DUAL_FRAME_BUFFER_ENABLE);
472 	tilcdc_write_mask(dev, LCDC_RASTER_CTRL_REG,
473 			  LCDC_PALETTE_LOAD_MODE(DATA_ONLY),
474 			  LCDC_PALETTE_LOAD_MODE_MASK);
475 
476 	/* There is no real chance for a race here as the time stamp
477 	 * is taken before the raster DMA is started. The spin-lock is
478 	 * taken to have a memory barrier after taking the time-stamp
479 	 * and to avoid a context switch between taking the stamp and
480 	 * enabling the raster.
481 	 */
482 	spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
483 	tilcdc_crtc->last_vblank = ktime_get();
484 	tilcdc_set(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
485 	spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
486 
487 	drm_crtc_vblank_on(crtc);
488 
489 	tilcdc_crtc->enabled = true;
490 	mutex_unlock(&tilcdc_crtc->enable_lock);
491 }
492 
493 static void tilcdc_crtc_atomic_enable(struct drm_crtc *crtc,
494 				      struct drm_crtc_state *old_state)
495 {
496 	tilcdc_crtc_enable(crtc);
497 }
498 
499 static void tilcdc_crtc_off(struct drm_crtc *crtc, bool shutdown)
500 {
501 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
502 	struct drm_device *dev = crtc->dev;
503 	int ret;
504 
505 	mutex_lock(&tilcdc_crtc->enable_lock);
506 	if (shutdown)
507 		tilcdc_crtc->shutdown = true;
508 	if (!tilcdc_crtc->enabled) {
509 		mutex_unlock(&tilcdc_crtc->enable_lock);
510 		return;
511 	}
512 	tilcdc_crtc->frame_done = false;
513 	tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
514 
515 	/*
516 	 * Wait for framedone irq which will still come before putting
517 	 * things to sleep..
518 	 */
519 	ret = wait_event_timeout(tilcdc_crtc->frame_done_wq,
520 				 tilcdc_crtc->frame_done,
521 				 msecs_to_jiffies(500));
522 	if (ret == 0)
523 		dev_err(dev->dev, "%s: timeout waiting for framedone\n",
524 			__func__);
525 
526 	drm_crtc_vblank_off(crtc);
527 
528 	tilcdc_crtc_disable_irqs(dev);
529 
530 	pm_runtime_put_sync(dev->dev);
531 
532 	tilcdc_crtc->enabled = false;
533 	mutex_unlock(&tilcdc_crtc->enable_lock);
534 }
535 
536 static void tilcdc_crtc_disable(struct drm_crtc *crtc)
537 {
538 	tilcdc_crtc_off(crtc, false);
539 }
540 
541 static void tilcdc_crtc_atomic_disable(struct drm_crtc *crtc,
542 				       struct drm_crtc_state *old_state)
543 {
544 	tilcdc_crtc_disable(crtc);
545 }
546 
547 void tilcdc_crtc_shutdown(struct drm_crtc *crtc)
548 {
549 	tilcdc_crtc_off(crtc, true);
550 }
551 
552 static bool tilcdc_crtc_is_on(struct drm_crtc *crtc)
553 {
554 	return crtc->state && crtc->state->enable && crtc->state->active;
555 }
556 
557 static void tilcdc_crtc_recover_work(struct work_struct *work)
558 {
559 	struct tilcdc_crtc *tilcdc_crtc =
560 		container_of(work, struct tilcdc_crtc, recover_work);
561 	struct drm_crtc *crtc = &tilcdc_crtc->base;
562 
563 	dev_info(crtc->dev->dev, "%s: Reset CRTC", __func__);
564 
565 	drm_modeset_lock(&crtc->mutex, NULL);
566 
567 	if (!tilcdc_crtc_is_on(crtc))
568 		goto out;
569 
570 	tilcdc_crtc_disable(crtc);
571 	tilcdc_crtc_enable(crtc);
572 out:
573 	drm_modeset_unlock(&crtc->mutex);
574 }
575 
576 static void tilcdc_crtc_destroy(struct drm_crtc *crtc)
577 {
578 	struct tilcdc_drm_private *priv = crtc->dev->dev_private;
579 
580 	tilcdc_crtc_shutdown(crtc);
581 
582 	flush_workqueue(priv->wq);
583 
584 	of_node_put(crtc->port);
585 	drm_crtc_cleanup(crtc);
586 }
587 
588 int tilcdc_crtc_update_fb(struct drm_crtc *crtc,
589 		struct drm_framebuffer *fb,
590 		struct drm_pending_vblank_event *event)
591 {
592 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
593 	struct drm_device *dev = crtc->dev;
594 
595 	if (tilcdc_crtc->event) {
596 		dev_err(dev->dev, "already pending page flip!\n");
597 		return -EBUSY;
598 	}
599 
600 	tilcdc_crtc->event = event;
601 
602 	mutex_lock(&tilcdc_crtc->enable_lock);
603 
604 	if (tilcdc_crtc->enabled) {
605 		unsigned long flags;
606 		ktime_t next_vblank;
607 		s64 tdiff;
608 
609 		spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
610 
611 		next_vblank = ktime_add_us(tilcdc_crtc->last_vblank,
612 					   tilcdc_crtc->hvtotal_us);
613 		tdiff = ktime_to_us(ktime_sub(next_vblank, ktime_get()));
614 
615 		if (tdiff < TILCDC_VBLANK_SAFETY_THRESHOLD_US)
616 			tilcdc_crtc->next_fb = fb;
617 		else
618 			set_scanout(crtc, fb);
619 
620 		spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
621 	}
622 
623 	mutex_unlock(&tilcdc_crtc->enable_lock);
624 
625 	return 0;
626 }
627 
628 static bool tilcdc_crtc_mode_fixup(struct drm_crtc *crtc,
629 		const struct drm_display_mode *mode,
630 		struct drm_display_mode *adjusted_mode)
631 {
632 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
633 
634 	if (!tilcdc_crtc->simulate_vesa_sync)
635 		return true;
636 
637 	/*
638 	 * tilcdc does not generate VESA-compliant sync but aligns
639 	 * VS on the second edge of HS instead of first edge.
640 	 * We use adjusted_mode, to fixup sync by aligning both rising
641 	 * edges and add HSKEW offset to fix the sync.
642 	 */
643 	adjusted_mode->hskew = mode->hsync_end - mode->hsync_start;
644 	adjusted_mode->flags |= DRM_MODE_FLAG_HSKEW;
645 
646 	if (mode->flags & DRM_MODE_FLAG_NHSYNC) {
647 		adjusted_mode->flags |= DRM_MODE_FLAG_PHSYNC;
648 		adjusted_mode->flags &= ~DRM_MODE_FLAG_NHSYNC;
649 	} else {
650 		adjusted_mode->flags |= DRM_MODE_FLAG_NHSYNC;
651 		adjusted_mode->flags &= ~DRM_MODE_FLAG_PHSYNC;
652 	}
653 
654 	return true;
655 }
656 
657 static int tilcdc_crtc_atomic_check(struct drm_crtc *crtc,
658 				    struct drm_crtc_state *state)
659 {
660 	struct drm_display_mode *mode = &state->mode;
661 	int ret;
662 
663 	/* If we are not active we don't care */
664 	if (!state->active)
665 		return 0;
666 
667 	if (state->state->planes[0].ptr != crtc->primary ||
668 	    state->state->planes[0].state == NULL ||
669 	    state->state->planes[0].state->crtc != crtc) {
670 		dev_dbg(crtc->dev->dev, "CRTC primary plane must be present");
671 		return -EINVAL;
672 	}
673 
674 	ret = tilcdc_crtc_mode_valid(crtc, mode);
675 	if (ret) {
676 		dev_dbg(crtc->dev->dev, "Mode \"%s\" not valid", mode->name);
677 		return -EINVAL;
678 	}
679 
680 	return 0;
681 }
682 
683 static int tilcdc_crtc_enable_vblank(struct drm_crtc *crtc)
684 {
685 	return 0;
686 }
687 
688 static void tilcdc_crtc_disable_vblank(struct drm_crtc *crtc)
689 {
690 }
691 
692 static void tilcdc_crtc_reset(struct drm_crtc *crtc)
693 {
694 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
695 	struct drm_device *dev = crtc->dev;
696 	int ret;
697 
698 	drm_atomic_helper_crtc_reset(crtc);
699 
700 	/* Turn the raster off if it for some reason is on. */
701 	pm_runtime_get_sync(dev->dev);
702 	if (tilcdc_read(dev, LCDC_RASTER_CTRL_REG) & LCDC_RASTER_ENABLE) {
703 		/* Enable DMA Frame Done Interrupt */
704 		tilcdc_write(dev, LCDC_INT_ENABLE_SET_REG, LCDC_FRAME_DONE);
705 		tilcdc_clear_irqstatus(dev, 0xffffffff);
706 
707 		tilcdc_crtc->frame_done = false;
708 		tilcdc_clear(dev, LCDC_RASTER_CTRL_REG, LCDC_RASTER_ENABLE);
709 
710 		ret = wait_event_timeout(tilcdc_crtc->frame_done_wq,
711 					 tilcdc_crtc->frame_done,
712 					 msecs_to_jiffies(500));
713 		if (ret == 0)
714 			dev_err(dev->dev, "%s: timeout waiting for framedone\n",
715 				__func__);
716 	}
717 	pm_runtime_put_sync(dev->dev);
718 }
719 
720 static const struct drm_crtc_funcs tilcdc_crtc_funcs = {
721 	.destroy        = tilcdc_crtc_destroy,
722 	.set_config     = drm_atomic_helper_set_config,
723 	.page_flip      = drm_atomic_helper_page_flip,
724 	.reset		= tilcdc_crtc_reset,
725 	.atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
726 	.atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
727 	.enable_vblank	= tilcdc_crtc_enable_vblank,
728 	.disable_vblank	= tilcdc_crtc_disable_vblank,
729 };
730 
731 static const struct drm_crtc_helper_funcs tilcdc_crtc_helper_funcs = {
732 		.mode_fixup     = tilcdc_crtc_mode_fixup,
733 		.atomic_check	= tilcdc_crtc_atomic_check,
734 		.atomic_enable	= tilcdc_crtc_atomic_enable,
735 		.atomic_disable	= tilcdc_crtc_atomic_disable,
736 };
737 
738 int tilcdc_crtc_max_width(struct drm_crtc *crtc)
739 {
740 	struct drm_device *dev = crtc->dev;
741 	struct tilcdc_drm_private *priv = dev->dev_private;
742 	int max_width = 0;
743 
744 	if (priv->rev == 1)
745 		max_width = 1024;
746 	else if (priv->rev == 2)
747 		max_width = 2048;
748 
749 	return max_width;
750 }
751 
752 int tilcdc_crtc_mode_valid(struct drm_crtc *crtc, struct drm_display_mode *mode)
753 {
754 	struct tilcdc_drm_private *priv = crtc->dev->dev_private;
755 	unsigned int bandwidth;
756 	uint32_t hbp, hfp, hsw, vbp, vfp, vsw;
757 
758 	/*
759 	 * check to see if the width is within the range that
760 	 * the LCD Controller physically supports
761 	 */
762 	if (mode->hdisplay > tilcdc_crtc_max_width(crtc))
763 		return MODE_VIRTUAL_X;
764 
765 	/* width must be multiple of 16 */
766 	if (mode->hdisplay & 0xf)
767 		return MODE_VIRTUAL_X;
768 
769 	if (mode->vdisplay > 2048)
770 		return MODE_VIRTUAL_Y;
771 
772 	DBG("Processing mode %dx%d@%d with pixel clock %d",
773 		mode->hdisplay, mode->vdisplay,
774 		drm_mode_vrefresh(mode), mode->clock);
775 
776 	hbp = mode->htotal - mode->hsync_end;
777 	hfp = mode->hsync_start - mode->hdisplay;
778 	hsw = mode->hsync_end - mode->hsync_start;
779 	vbp = mode->vtotal - mode->vsync_end;
780 	vfp = mode->vsync_start - mode->vdisplay;
781 	vsw = mode->vsync_end - mode->vsync_start;
782 
783 	if ((hbp-1) & ~0x3ff) {
784 		DBG("Pruning mode: Horizontal Back Porch out of range");
785 		return MODE_HBLANK_WIDE;
786 	}
787 
788 	if ((hfp-1) & ~0x3ff) {
789 		DBG("Pruning mode: Horizontal Front Porch out of range");
790 		return MODE_HBLANK_WIDE;
791 	}
792 
793 	if ((hsw-1) & ~0x3ff) {
794 		DBG("Pruning mode: Horizontal Sync Width out of range");
795 		return MODE_HSYNC_WIDE;
796 	}
797 
798 	if (vbp & ~0xff) {
799 		DBG("Pruning mode: Vertical Back Porch out of range");
800 		return MODE_VBLANK_WIDE;
801 	}
802 
803 	if (vfp & ~0xff) {
804 		DBG("Pruning mode: Vertical Front Porch out of range");
805 		return MODE_VBLANK_WIDE;
806 	}
807 
808 	if ((vsw-1) & ~0x3f) {
809 		DBG("Pruning mode: Vertical Sync Width out of range");
810 		return MODE_VSYNC_WIDE;
811 	}
812 
813 	/*
814 	 * some devices have a maximum allowed pixel clock
815 	 * configured from the DT
816 	 */
817 	if (mode->clock > priv->max_pixelclock) {
818 		DBG("Pruning mode: pixel clock too high");
819 		return MODE_CLOCK_HIGH;
820 	}
821 
822 	/*
823 	 * some devices further limit the max horizontal resolution
824 	 * configured from the DT
825 	 */
826 	if (mode->hdisplay > priv->max_width)
827 		return MODE_BAD_WIDTH;
828 
829 	/* filter out modes that would require too much memory bandwidth: */
830 	bandwidth = mode->hdisplay * mode->vdisplay *
831 		drm_mode_vrefresh(mode);
832 	if (bandwidth > priv->max_bandwidth) {
833 		DBG("Pruning mode: exceeds defined bandwidth limit");
834 		return MODE_BAD;
835 	}
836 
837 	return MODE_OK;
838 }
839 
840 void tilcdc_crtc_set_panel_info(struct drm_crtc *crtc,
841 		const struct tilcdc_panel_info *info)
842 {
843 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
844 	tilcdc_crtc->info = info;
845 }
846 
847 void tilcdc_crtc_set_simulate_vesa_sync(struct drm_crtc *crtc,
848 					bool simulate_vesa_sync)
849 {
850 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
851 
852 	tilcdc_crtc->simulate_vesa_sync = simulate_vesa_sync;
853 }
854 
855 void tilcdc_crtc_update_clk(struct drm_crtc *crtc)
856 {
857 	struct drm_device *dev = crtc->dev;
858 	struct tilcdc_drm_private *priv = dev->dev_private;
859 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
860 
861 	drm_modeset_lock(&crtc->mutex, NULL);
862 	if (tilcdc_crtc->lcd_fck_rate != clk_get_rate(priv->clk)) {
863 		if (tilcdc_crtc_is_on(crtc)) {
864 			pm_runtime_get_sync(dev->dev);
865 			tilcdc_crtc_disable(crtc);
866 
867 			tilcdc_crtc_set_clk(crtc);
868 
869 			tilcdc_crtc_enable(crtc);
870 			pm_runtime_put_sync(dev->dev);
871 		}
872 	}
873 	drm_modeset_unlock(&crtc->mutex);
874 }
875 
876 #define SYNC_LOST_COUNT_LIMIT 50
877 
878 irqreturn_t tilcdc_crtc_irq(struct drm_crtc *crtc)
879 {
880 	struct tilcdc_crtc *tilcdc_crtc = to_tilcdc_crtc(crtc);
881 	struct drm_device *dev = crtc->dev;
882 	struct tilcdc_drm_private *priv = dev->dev_private;
883 	uint32_t stat, reg;
884 
885 	stat = tilcdc_read_irqstatus(dev);
886 	tilcdc_clear_irqstatus(dev, stat);
887 
888 	if (stat & LCDC_END_OF_FRAME0) {
889 		unsigned long flags;
890 		bool skip_event = false;
891 		ktime_t now;
892 
893 		now = ktime_get();
894 
895 		spin_lock_irqsave(&tilcdc_crtc->irq_lock, flags);
896 
897 		tilcdc_crtc->last_vblank = now;
898 
899 		if (tilcdc_crtc->next_fb) {
900 			set_scanout(crtc, tilcdc_crtc->next_fb);
901 			tilcdc_crtc->next_fb = NULL;
902 			skip_event = true;
903 		}
904 
905 		spin_unlock_irqrestore(&tilcdc_crtc->irq_lock, flags);
906 
907 		drm_crtc_handle_vblank(crtc);
908 
909 		if (!skip_event) {
910 			struct drm_pending_vblank_event *event;
911 
912 			spin_lock_irqsave(&dev->event_lock, flags);
913 
914 			event = tilcdc_crtc->event;
915 			tilcdc_crtc->event = NULL;
916 			if (event)
917 				drm_crtc_send_vblank_event(crtc, event);
918 
919 			spin_unlock_irqrestore(&dev->event_lock, flags);
920 		}
921 
922 		if (tilcdc_crtc->frame_intact)
923 			tilcdc_crtc->sync_lost_count = 0;
924 		else
925 			tilcdc_crtc->frame_intact = true;
926 	}
927 
928 	if (stat & LCDC_FIFO_UNDERFLOW)
929 		dev_err_ratelimited(dev->dev, "%s(0x%08x): FIFO underflow",
930 				    __func__, stat);
931 
932 	if (stat & LCDC_PL_LOAD_DONE) {
933 		complete(&tilcdc_crtc->palette_loaded);
934 		if (priv->rev == 1)
935 			tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
936 				     LCDC_V1_PL_INT_ENA);
937 		else
938 			tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
939 				     LCDC_V2_PL_INT_ENA);
940 	}
941 
942 	if (stat & LCDC_SYNC_LOST) {
943 		dev_err_ratelimited(dev->dev, "%s(0x%08x): Sync lost",
944 				    __func__, stat);
945 		tilcdc_crtc->frame_intact = false;
946 		if (priv->rev == 1) {
947 			reg = tilcdc_read(dev, LCDC_RASTER_CTRL_REG);
948 			if (reg & LCDC_RASTER_ENABLE) {
949 				tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
950 					     LCDC_RASTER_ENABLE);
951 				tilcdc_set(dev, LCDC_RASTER_CTRL_REG,
952 					   LCDC_RASTER_ENABLE);
953 			}
954 		} else {
955 			if (tilcdc_crtc->sync_lost_count++ >
956 			    SYNC_LOST_COUNT_LIMIT) {
957 				dev_err(dev->dev,
958 					"%s(0x%08x): Sync lost flood detected, recovering",
959 					__func__, stat);
960 				queue_work(system_wq,
961 					   &tilcdc_crtc->recover_work);
962 				tilcdc_write(dev, LCDC_INT_ENABLE_CLR_REG,
963 					     LCDC_SYNC_LOST);
964 				tilcdc_crtc->sync_lost_count = 0;
965 			}
966 		}
967 	}
968 
969 	if (stat & LCDC_FRAME_DONE) {
970 		tilcdc_crtc->frame_done = true;
971 		wake_up(&tilcdc_crtc->frame_done_wq);
972 		/* rev 1 lcdc appears to hang if irq is not disbaled here */
973 		if (priv->rev == 1)
974 			tilcdc_clear(dev, LCDC_RASTER_CTRL_REG,
975 				     LCDC_V1_FRAME_DONE_INT_ENA);
976 	}
977 
978 	/* For revision 2 only */
979 	if (priv->rev == 2) {
980 		/* Indicate to LCDC that the interrupt service routine has
981 		 * completed, see 13.3.6.1.6 in AM335x TRM.
982 		 */
983 		tilcdc_write(dev, LCDC_END_OF_INT_IND_REG, 0);
984 	}
985 
986 	return IRQ_HANDLED;
987 }
988 
989 int tilcdc_crtc_create(struct drm_device *dev)
990 {
991 	struct tilcdc_drm_private *priv = dev->dev_private;
992 	struct tilcdc_crtc *tilcdc_crtc;
993 	struct drm_crtc *crtc;
994 	int ret;
995 
996 	tilcdc_crtc = devm_kzalloc(dev->dev, sizeof(*tilcdc_crtc), GFP_KERNEL);
997 	if (!tilcdc_crtc)
998 		return -ENOMEM;
999 
1000 	init_completion(&tilcdc_crtc->palette_loaded);
1001 	tilcdc_crtc->palette_base = dmam_alloc_coherent(dev->dev,
1002 					TILCDC_PALETTE_SIZE,
1003 					&tilcdc_crtc->palette_dma_handle,
1004 					GFP_KERNEL | __GFP_ZERO);
1005 	if (!tilcdc_crtc->palette_base)
1006 		return -ENOMEM;
1007 	*tilcdc_crtc->palette_base = TILCDC_PALETTE_FIRST_ENTRY;
1008 
1009 	crtc = &tilcdc_crtc->base;
1010 
1011 	ret = tilcdc_plane_init(dev, &tilcdc_crtc->primary);
1012 	if (ret < 0)
1013 		goto fail;
1014 
1015 	mutex_init(&tilcdc_crtc->enable_lock);
1016 
1017 	init_waitqueue_head(&tilcdc_crtc->frame_done_wq);
1018 
1019 	spin_lock_init(&tilcdc_crtc->irq_lock);
1020 	INIT_WORK(&tilcdc_crtc->recover_work, tilcdc_crtc_recover_work);
1021 
1022 	ret = drm_crtc_init_with_planes(dev, crtc,
1023 					&tilcdc_crtc->primary,
1024 					NULL,
1025 					&tilcdc_crtc_funcs,
1026 					"tilcdc crtc");
1027 	if (ret < 0)
1028 		goto fail;
1029 
1030 	drm_crtc_helper_add(crtc, &tilcdc_crtc_helper_funcs);
1031 
1032 	if (priv->is_componentized) {
1033 		crtc->port = of_graph_get_port_by_id(dev->dev->of_node, 0);
1034 		if (!crtc->port) { /* This should never happen */
1035 			dev_err(dev->dev, "Port node not found in %pOF\n",
1036 				dev->dev->of_node);
1037 			ret = -EINVAL;
1038 			goto fail;
1039 		}
1040 	}
1041 
1042 	priv->crtc = crtc;
1043 	return 0;
1044 
1045 fail:
1046 	tilcdc_crtc_destroy(crtc);
1047 	return ret;
1048 }
1049