1 /*
2  * Copyright (C) 2014 Free Electrons
3  * Copyright (C) 2014 Atmel
4  *
5  * Author: Boris BREZILLON <boris.brezillon@free-electrons.com>
6  *
7  * This program is free software; you can redistribute it and/or modify it
8  * under the terms of the GNU General Public License version 2 as published by
9  * the Free Software Foundation.
10  *
11  * This program is distributed in the hope that it will be useful, but WITHOUT
12  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for
14  * more details.
15  *
16  * You should have received a copy of the GNU General Public License along with
17  * this program.  If not, see <http://www.gnu.org/licenses/>.
18  */
19 
20 #include "atmel_hlcdc_dc.h"
21 
22 /**
23  * Atmel HLCDC Plane state structure.
24  *
25  * @base: DRM plane state
26  * @crtc_x: x position of the plane relative to the CRTC
27  * @crtc_y: y position of the plane relative to the CRTC
28  * @crtc_w: visible width of the plane
29  * @crtc_h: visible height of the plane
30  * @src_x: x buffer position
31  * @src_y: y buffer position
32  * @src_w: buffer width
33  * @src_h: buffer height
34  * @alpha: alpha blending of the plane
35  * @disc_x: x discard position
36  * @disc_y: y discard position
37  * @disc_w: discard width
38  * @disc_h: discard height
39  * @bpp: bytes per pixel deduced from pixel_format
40  * @offsets: offsets to apply to the GEM buffers
41  * @xstride: value to add to the pixel pointer between each line
42  * @pstride: value to add to the pixel pointer between each pixel
43  * @nplanes: number of planes (deduced from pixel_format)
44  * @dscrs: DMA descriptors
45  */
46 struct atmel_hlcdc_plane_state {
47 	struct drm_plane_state base;
48 	int crtc_x;
49 	int crtc_y;
50 	unsigned int crtc_w;
51 	unsigned int crtc_h;
52 	uint32_t src_x;
53 	uint32_t src_y;
54 	uint32_t src_w;
55 	uint32_t src_h;
56 
57 	u8 alpha;
58 
59 	int disc_x;
60 	int disc_y;
61 	int disc_w;
62 	int disc_h;
63 
64 	int ahb_id;
65 
66 	/* These fields are private and should not be touched */
67 	int bpp[ATMEL_HLCDC_LAYER_MAX_PLANES];
68 	unsigned int offsets[ATMEL_HLCDC_LAYER_MAX_PLANES];
69 	int xstride[ATMEL_HLCDC_LAYER_MAX_PLANES];
70 	int pstride[ATMEL_HLCDC_LAYER_MAX_PLANES];
71 	int nplanes;
72 
73 	/* DMA descriptors. */
74 	struct atmel_hlcdc_dma_channel_dscr *dscrs[ATMEL_HLCDC_LAYER_MAX_PLANES];
75 };
76 
77 static inline struct atmel_hlcdc_plane_state *
78 drm_plane_state_to_atmel_hlcdc_plane_state(struct drm_plane_state *s)
79 {
80 	return container_of(s, struct atmel_hlcdc_plane_state, base);
81 }
82 
83 #define SUBPIXEL_MASK			0xffff
84 
85 static uint32_t rgb_formats[] = {
86 	DRM_FORMAT_C8,
87 	DRM_FORMAT_XRGB4444,
88 	DRM_FORMAT_ARGB4444,
89 	DRM_FORMAT_RGBA4444,
90 	DRM_FORMAT_ARGB1555,
91 	DRM_FORMAT_RGB565,
92 	DRM_FORMAT_RGB888,
93 	DRM_FORMAT_XRGB8888,
94 	DRM_FORMAT_ARGB8888,
95 	DRM_FORMAT_RGBA8888,
96 };
97 
98 struct atmel_hlcdc_formats atmel_hlcdc_plane_rgb_formats = {
99 	.formats = rgb_formats,
100 	.nformats = ARRAY_SIZE(rgb_formats),
101 };
102 
103 static uint32_t rgb_and_yuv_formats[] = {
104 	DRM_FORMAT_C8,
105 	DRM_FORMAT_XRGB4444,
106 	DRM_FORMAT_ARGB4444,
107 	DRM_FORMAT_RGBA4444,
108 	DRM_FORMAT_ARGB1555,
109 	DRM_FORMAT_RGB565,
110 	DRM_FORMAT_RGB888,
111 	DRM_FORMAT_XRGB8888,
112 	DRM_FORMAT_ARGB8888,
113 	DRM_FORMAT_RGBA8888,
114 	DRM_FORMAT_AYUV,
115 	DRM_FORMAT_YUYV,
116 	DRM_FORMAT_UYVY,
117 	DRM_FORMAT_YVYU,
118 	DRM_FORMAT_VYUY,
119 	DRM_FORMAT_NV21,
120 	DRM_FORMAT_NV61,
121 	DRM_FORMAT_YUV422,
122 	DRM_FORMAT_YUV420,
123 };
124 
125 struct atmel_hlcdc_formats atmel_hlcdc_plane_rgb_and_yuv_formats = {
126 	.formats = rgb_and_yuv_formats,
127 	.nformats = ARRAY_SIZE(rgb_and_yuv_formats),
128 };
129 
130 static int atmel_hlcdc_format_to_plane_mode(u32 format, u32 *mode)
131 {
132 	switch (format) {
133 	case DRM_FORMAT_C8:
134 		*mode = ATMEL_HLCDC_C8_MODE;
135 		break;
136 	case DRM_FORMAT_XRGB4444:
137 		*mode = ATMEL_HLCDC_XRGB4444_MODE;
138 		break;
139 	case DRM_FORMAT_ARGB4444:
140 		*mode = ATMEL_HLCDC_ARGB4444_MODE;
141 		break;
142 	case DRM_FORMAT_RGBA4444:
143 		*mode = ATMEL_HLCDC_RGBA4444_MODE;
144 		break;
145 	case DRM_FORMAT_RGB565:
146 		*mode = ATMEL_HLCDC_RGB565_MODE;
147 		break;
148 	case DRM_FORMAT_RGB888:
149 		*mode = ATMEL_HLCDC_RGB888_MODE;
150 		break;
151 	case DRM_FORMAT_ARGB1555:
152 		*mode = ATMEL_HLCDC_ARGB1555_MODE;
153 		break;
154 	case DRM_FORMAT_XRGB8888:
155 		*mode = ATMEL_HLCDC_XRGB8888_MODE;
156 		break;
157 	case DRM_FORMAT_ARGB8888:
158 		*mode = ATMEL_HLCDC_ARGB8888_MODE;
159 		break;
160 	case DRM_FORMAT_RGBA8888:
161 		*mode = ATMEL_HLCDC_RGBA8888_MODE;
162 		break;
163 	case DRM_FORMAT_AYUV:
164 		*mode = ATMEL_HLCDC_AYUV_MODE;
165 		break;
166 	case DRM_FORMAT_YUYV:
167 		*mode = ATMEL_HLCDC_YUYV_MODE;
168 		break;
169 	case DRM_FORMAT_UYVY:
170 		*mode = ATMEL_HLCDC_UYVY_MODE;
171 		break;
172 	case DRM_FORMAT_YVYU:
173 		*mode = ATMEL_HLCDC_YVYU_MODE;
174 		break;
175 	case DRM_FORMAT_VYUY:
176 		*mode = ATMEL_HLCDC_VYUY_MODE;
177 		break;
178 	case DRM_FORMAT_NV21:
179 		*mode = ATMEL_HLCDC_NV21_MODE;
180 		break;
181 	case DRM_FORMAT_NV61:
182 		*mode = ATMEL_HLCDC_NV61_MODE;
183 		break;
184 	case DRM_FORMAT_YUV420:
185 		*mode = ATMEL_HLCDC_YUV420_MODE;
186 		break;
187 	case DRM_FORMAT_YUV422:
188 		*mode = ATMEL_HLCDC_YUV422_MODE;
189 		break;
190 	default:
191 		return -ENOTSUPP;
192 	}
193 
194 	return 0;
195 }
196 
197 static u32 heo_downscaling_xcoef[] = {
198 	0x11343311,
199 	0x000000f7,
200 	0x1635300c,
201 	0x000000f9,
202 	0x1b362c08,
203 	0x000000fb,
204 	0x1f372804,
205 	0x000000fe,
206 	0x24382400,
207 	0x00000000,
208 	0x28371ffe,
209 	0x00000004,
210 	0x2c361bfb,
211 	0x00000008,
212 	0x303516f9,
213 	0x0000000c,
214 };
215 
216 static u32 heo_downscaling_ycoef[] = {
217 	0x00123737,
218 	0x00173732,
219 	0x001b382d,
220 	0x001f3928,
221 	0x00243824,
222 	0x0028391f,
223 	0x002d381b,
224 	0x00323717,
225 };
226 
227 static u32 heo_upscaling_xcoef[] = {
228 	0xf74949f7,
229 	0x00000000,
230 	0xf55f33fb,
231 	0x000000fe,
232 	0xf5701efe,
233 	0x000000ff,
234 	0xf87c0dff,
235 	0x00000000,
236 	0x00800000,
237 	0x00000000,
238 	0x0d7cf800,
239 	0x000000ff,
240 	0x1e70f5ff,
241 	0x000000fe,
242 	0x335ff5fe,
243 	0x000000fb,
244 };
245 
246 static u32 heo_upscaling_ycoef[] = {
247 	0x00004040,
248 	0x00075920,
249 	0x00056f0c,
250 	0x00027b03,
251 	0x00008000,
252 	0x00037b02,
253 	0x000c6f05,
254 	0x00205907,
255 };
256 
257 #define ATMEL_HLCDC_XPHIDEF	4
258 #define ATMEL_HLCDC_YPHIDEF	4
259 
260 static u32 atmel_hlcdc_plane_phiscaler_get_factor(u32 srcsize,
261 						  u32 dstsize,
262 						  u32 phidef)
263 {
264 	u32 factor, max_memsize;
265 
266 	factor = (256 * ((8 * (srcsize - 1)) - phidef)) / (dstsize - 1);
267 	max_memsize = ((factor * (dstsize - 1)) + (256 * phidef)) / 2048;
268 
269 	if (max_memsize > srcsize - 1)
270 		factor--;
271 
272 	return factor;
273 }
274 
275 static void
276 atmel_hlcdc_plane_scaler_set_phicoeff(struct atmel_hlcdc_plane *plane,
277 				      const u32 *coeff_tab, int size,
278 				      unsigned int cfg_offs)
279 {
280 	int i;
281 
282 	for (i = 0; i < size; i++)
283 		atmel_hlcdc_layer_write_cfg(&plane->layer, cfg_offs + i,
284 					    coeff_tab[i]);
285 }
286 
287 void atmel_hlcdc_plane_setup_scaler(struct atmel_hlcdc_plane *plane,
288 				    struct atmel_hlcdc_plane_state *state)
289 {
290 	const struct atmel_hlcdc_layer_desc *desc = plane->layer.desc;
291 	u32 xfactor, yfactor;
292 
293 	if (!desc->layout.scaler_config)
294 		return;
295 
296 	if (state->crtc_w == state->src_w && state->crtc_h == state->src_h) {
297 		atmel_hlcdc_layer_write_cfg(&plane->layer,
298 					    desc->layout.scaler_config, 0);
299 		return;
300 	}
301 
302 	if (desc->layout.phicoeffs.x) {
303 		xfactor = atmel_hlcdc_plane_phiscaler_get_factor(state->src_w,
304 							state->crtc_w,
305 							ATMEL_HLCDC_XPHIDEF);
306 
307 		yfactor = atmel_hlcdc_plane_phiscaler_get_factor(state->src_h,
308 							state->crtc_h,
309 							ATMEL_HLCDC_YPHIDEF);
310 
311 		atmel_hlcdc_plane_scaler_set_phicoeff(plane,
312 				state->crtc_w < state->src_w ?
313 				heo_downscaling_xcoef :
314 				heo_upscaling_xcoef,
315 				ARRAY_SIZE(heo_upscaling_xcoef),
316 				desc->layout.phicoeffs.x);
317 
318 		atmel_hlcdc_plane_scaler_set_phicoeff(plane,
319 				state->crtc_h < state->src_h ?
320 				heo_downscaling_ycoef :
321 				heo_upscaling_ycoef,
322 				ARRAY_SIZE(heo_upscaling_ycoef),
323 				desc->layout.phicoeffs.y);
324 	} else {
325 		xfactor = (1024 * state->src_w) / state->crtc_w;
326 		yfactor = (1024 * state->src_h) / state->crtc_h;
327 	}
328 
329 	atmel_hlcdc_layer_write_cfg(&plane->layer, desc->layout.scaler_config,
330 				    ATMEL_HLCDC_LAYER_SCALER_ENABLE |
331 				    ATMEL_HLCDC_LAYER_SCALER_FACTORS(xfactor,
332 								     yfactor));
333 }
334 
335 static void
336 atmel_hlcdc_plane_update_pos_and_size(struct atmel_hlcdc_plane *plane,
337 				      struct atmel_hlcdc_plane_state *state)
338 {
339 	const struct atmel_hlcdc_layer_desc *desc = plane->layer.desc;
340 
341 	if (desc->layout.size)
342 		atmel_hlcdc_layer_write_cfg(&plane->layer, desc->layout.size,
343 					ATMEL_HLCDC_LAYER_SIZE(state->crtc_w,
344 							       state->crtc_h));
345 
346 	if (desc->layout.memsize)
347 		atmel_hlcdc_layer_write_cfg(&plane->layer,
348 					desc->layout.memsize,
349 					ATMEL_HLCDC_LAYER_SIZE(state->src_w,
350 							       state->src_h));
351 
352 	if (desc->layout.pos)
353 		atmel_hlcdc_layer_write_cfg(&plane->layer, desc->layout.pos,
354 					ATMEL_HLCDC_LAYER_POS(state->crtc_x,
355 							      state->crtc_y));
356 
357 	atmel_hlcdc_plane_setup_scaler(plane, state);
358 }
359 
360 static void
361 atmel_hlcdc_plane_update_general_settings(struct atmel_hlcdc_plane *plane,
362 					struct atmel_hlcdc_plane_state *state)
363 {
364 	unsigned int cfg = ATMEL_HLCDC_LAYER_DMA_BLEN_INCR16 | state->ahb_id;
365 	const struct atmel_hlcdc_layer_desc *desc = plane->layer.desc;
366 	const struct drm_format_info *format = state->base.fb->format;
367 
368 	/*
369 	 * Rotation optimization is not working on RGB888 (rotation is still
370 	 * working but without any optimization).
371 	 */
372 	if (format->format == DRM_FORMAT_RGB888)
373 		cfg |= ATMEL_HLCDC_LAYER_DMA_ROTDIS;
374 
375 	atmel_hlcdc_layer_write_cfg(&plane->layer, ATMEL_HLCDC_LAYER_DMA_CFG,
376 				    cfg);
377 
378 	cfg = ATMEL_HLCDC_LAYER_DMA;
379 
380 	if (plane->base.type != DRM_PLANE_TYPE_PRIMARY) {
381 		cfg |= ATMEL_HLCDC_LAYER_OVR | ATMEL_HLCDC_LAYER_ITER2BL |
382 		       ATMEL_HLCDC_LAYER_ITER;
383 
384 		if (format->has_alpha)
385 			cfg |= ATMEL_HLCDC_LAYER_LAEN;
386 		else
387 			cfg |= ATMEL_HLCDC_LAYER_GAEN |
388 			       ATMEL_HLCDC_LAYER_GA(state->alpha);
389 	}
390 
391 	if (state->disc_h && state->disc_w)
392 		cfg |= ATMEL_HLCDC_LAYER_DISCEN;
393 
394 	atmel_hlcdc_layer_write_cfg(&plane->layer, desc->layout.general_config,
395 				    cfg);
396 }
397 
398 static void atmel_hlcdc_plane_update_format(struct atmel_hlcdc_plane *plane,
399 					struct atmel_hlcdc_plane_state *state)
400 {
401 	u32 cfg;
402 	int ret;
403 
404 	ret = atmel_hlcdc_format_to_plane_mode(state->base.fb->format->format,
405 					       &cfg);
406 	if (ret)
407 		return;
408 
409 	if ((state->base.fb->format->format == DRM_FORMAT_YUV422 ||
410 	     state->base.fb->format->format == DRM_FORMAT_NV61) &&
411 	    drm_rotation_90_or_270(state->base.rotation))
412 		cfg |= ATMEL_HLCDC_YUV422ROT;
413 
414 	atmel_hlcdc_layer_write_cfg(&plane->layer,
415 				    ATMEL_HLCDC_LAYER_FORMAT_CFG, cfg);
416 }
417 
418 static void atmel_hlcdc_plane_update_clut(struct atmel_hlcdc_plane *plane)
419 {
420 	struct drm_crtc *crtc = plane->base.crtc;
421 	struct drm_color_lut *lut;
422 	int idx;
423 
424 	if (!crtc || !crtc->state)
425 		return;
426 
427 	if (!crtc->state->color_mgmt_changed || !crtc->state->gamma_lut)
428 		return;
429 
430 	lut = (struct drm_color_lut *)crtc->state->gamma_lut->data;
431 
432 	for (idx = 0; idx < ATMEL_HLCDC_CLUT_SIZE; idx++, lut++) {
433 		u32 val = ((lut->red << 8) & 0xff0000) |
434 			(lut->green & 0xff00) |
435 			(lut->blue >> 8);
436 
437 		atmel_hlcdc_layer_write_clut(&plane->layer, idx, val);
438 	}
439 }
440 
441 static void atmel_hlcdc_plane_update_buffers(struct atmel_hlcdc_plane *plane,
442 					struct atmel_hlcdc_plane_state *state)
443 {
444 	const struct atmel_hlcdc_layer_desc *desc = plane->layer.desc;
445 	struct drm_framebuffer *fb = state->base.fb;
446 	u32 sr;
447 	int i;
448 
449 	sr = atmel_hlcdc_layer_read_reg(&plane->layer, ATMEL_HLCDC_LAYER_CHSR);
450 
451 	for (i = 0; i < state->nplanes; i++) {
452 		struct drm_gem_cma_object *gem = drm_fb_cma_get_gem_obj(fb, i);
453 
454 		state->dscrs[i]->addr = gem->paddr + state->offsets[i];
455 
456 		atmel_hlcdc_layer_write_reg(&plane->layer,
457 					    ATMEL_HLCDC_LAYER_PLANE_HEAD(i),
458 					    state->dscrs[i]->self);
459 
460 		if (!(sr & ATMEL_HLCDC_LAYER_EN)) {
461 			atmel_hlcdc_layer_write_reg(&plane->layer,
462 					ATMEL_HLCDC_LAYER_PLANE_ADDR(i),
463 					state->dscrs[i]->addr);
464 			atmel_hlcdc_layer_write_reg(&plane->layer,
465 					ATMEL_HLCDC_LAYER_PLANE_CTRL(i),
466 					state->dscrs[i]->ctrl);
467 			atmel_hlcdc_layer_write_reg(&plane->layer,
468 					ATMEL_HLCDC_LAYER_PLANE_NEXT(i),
469 					state->dscrs[i]->self);
470 		}
471 
472 		if (desc->layout.xstride[i])
473 			atmel_hlcdc_layer_write_cfg(&plane->layer,
474 						    desc->layout.xstride[i],
475 						    state->xstride[i]);
476 
477 		if (desc->layout.pstride[i])
478 			atmel_hlcdc_layer_write_cfg(&plane->layer,
479 						    desc->layout.pstride[i],
480 						    state->pstride[i]);
481 	}
482 }
483 
484 int atmel_hlcdc_plane_prepare_ahb_routing(struct drm_crtc_state *c_state)
485 {
486 	unsigned int ahb_load[2] = { };
487 	struct drm_plane *plane;
488 
489 	drm_atomic_crtc_state_for_each_plane(plane, c_state) {
490 		struct atmel_hlcdc_plane_state *plane_state;
491 		struct drm_plane_state *plane_s;
492 		unsigned int pixels, load = 0;
493 		int i;
494 
495 		plane_s = drm_atomic_get_plane_state(c_state->state, plane);
496 		if (IS_ERR(plane_s))
497 			return PTR_ERR(plane_s);
498 
499 		plane_state =
500 			drm_plane_state_to_atmel_hlcdc_plane_state(plane_s);
501 
502 		pixels = (plane_state->src_w * plane_state->src_h) -
503 			 (plane_state->disc_w * plane_state->disc_h);
504 
505 		for (i = 0; i < plane_state->nplanes; i++)
506 			load += pixels * plane_state->bpp[i];
507 
508 		if (ahb_load[0] <= ahb_load[1])
509 			plane_state->ahb_id = 0;
510 		else
511 			plane_state->ahb_id = 1;
512 
513 		ahb_load[plane_state->ahb_id] += load;
514 	}
515 
516 	return 0;
517 }
518 
519 int
520 atmel_hlcdc_plane_prepare_disc_area(struct drm_crtc_state *c_state)
521 {
522 	int disc_x = 0, disc_y = 0, disc_w = 0, disc_h = 0;
523 	const struct atmel_hlcdc_layer_cfg_layout *layout;
524 	struct atmel_hlcdc_plane_state *primary_state;
525 	struct drm_plane_state *primary_s;
526 	struct atmel_hlcdc_plane *primary;
527 	struct drm_plane *ovl;
528 
529 	primary = drm_plane_to_atmel_hlcdc_plane(c_state->crtc->primary);
530 	layout = &primary->layer.desc->layout;
531 	if (!layout->disc_pos || !layout->disc_size)
532 		return 0;
533 
534 	primary_s = drm_atomic_get_plane_state(c_state->state,
535 					       &primary->base);
536 	if (IS_ERR(primary_s))
537 		return PTR_ERR(primary_s);
538 
539 	primary_state = drm_plane_state_to_atmel_hlcdc_plane_state(primary_s);
540 
541 	drm_atomic_crtc_state_for_each_plane(ovl, c_state) {
542 		struct atmel_hlcdc_plane_state *ovl_state;
543 		struct drm_plane_state *ovl_s;
544 
545 		if (ovl == c_state->crtc->primary)
546 			continue;
547 
548 		ovl_s = drm_atomic_get_plane_state(c_state->state, ovl);
549 		if (IS_ERR(ovl_s))
550 			return PTR_ERR(ovl_s);
551 
552 		ovl_state = drm_plane_state_to_atmel_hlcdc_plane_state(ovl_s);
553 
554 		if (!ovl_s->fb ||
555 		    ovl_s->fb->format->has_alpha ||
556 		    ovl_state->alpha != 255)
557 			continue;
558 
559 		/* TODO: implement a smarter hidden area detection */
560 		if (ovl_state->crtc_h * ovl_state->crtc_w < disc_h * disc_w)
561 			continue;
562 
563 		disc_x = ovl_state->crtc_x;
564 		disc_y = ovl_state->crtc_y;
565 		disc_h = ovl_state->crtc_h;
566 		disc_w = ovl_state->crtc_w;
567 	}
568 
569 	primary_state->disc_x = disc_x;
570 	primary_state->disc_y = disc_y;
571 	primary_state->disc_w = disc_w;
572 	primary_state->disc_h = disc_h;
573 
574 	return 0;
575 }
576 
577 static void
578 atmel_hlcdc_plane_update_disc_area(struct atmel_hlcdc_plane *plane,
579 				   struct atmel_hlcdc_plane_state *state)
580 {
581 	const struct atmel_hlcdc_layer_cfg_layout *layout;
582 
583 	layout = &plane->layer.desc->layout;
584 	if (!layout->disc_pos || !layout->disc_size)
585 		return;
586 
587 	atmel_hlcdc_layer_write_cfg(&plane->layer, layout->disc_pos,
588 				ATMEL_HLCDC_LAYER_DISC_POS(state->disc_x,
589 							   state->disc_y));
590 
591 	atmel_hlcdc_layer_write_cfg(&plane->layer, layout->disc_size,
592 				ATMEL_HLCDC_LAYER_DISC_SIZE(state->disc_w,
593 							    state->disc_h));
594 }
595 
596 static int atmel_hlcdc_plane_atomic_check(struct drm_plane *p,
597 					  struct drm_plane_state *s)
598 {
599 	struct atmel_hlcdc_plane *plane = drm_plane_to_atmel_hlcdc_plane(p);
600 	struct atmel_hlcdc_plane_state *state =
601 				drm_plane_state_to_atmel_hlcdc_plane_state(s);
602 	const struct atmel_hlcdc_layer_desc *desc = plane->layer.desc;
603 	struct drm_framebuffer *fb = state->base.fb;
604 	const struct drm_display_mode *mode;
605 	struct drm_crtc_state *crtc_state;
606 	unsigned int patched_crtc_w;
607 	unsigned int patched_crtc_h;
608 	unsigned int patched_src_w;
609 	unsigned int patched_src_h;
610 	unsigned int tmp;
611 	int x_offset = 0;
612 	int y_offset = 0;
613 	int hsub = 1;
614 	int vsub = 1;
615 	int i;
616 
617 	if (!state->base.crtc || !fb)
618 		return 0;
619 
620 	crtc_state = drm_atomic_get_existing_crtc_state(s->state, s->crtc);
621 	mode = &crtc_state->adjusted_mode;
622 
623 	state->src_x = s->src_x;
624 	state->src_y = s->src_y;
625 	state->src_h = s->src_h;
626 	state->src_w = s->src_w;
627 	state->crtc_x = s->crtc_x;
628 	state->crtc_y = s->crtc_y;
629 	state->crtc_h = s->crtc_h;
630 	state->crtc_w = s->crtc_w;
631 	if ((state->src_x | state->src_y | state->src_w | state->src_h) &
632 	    SUBPIXEL_MASK)
633 		return -EINVAL;
634 
635 	state->src_x >>= 16;
636 	state->src_y >>= 16;
637 	state->src_w >>= 16;
638 	state->src_h >>= 16;
639 
640 	state->nplanes = fb->format->num_planes;
641 	if (state->nplanes > ATMEL_HLCDC_LAYER_MAX_PLANES)
642 		return -EINVAL;
643 
644 	/*
645 	 * Swap width and size in case of 90 or 270 degrees rotation
646 	 */
647 	if (drm_rotation_90_or_270(state->base.rotation)) {
648 		tmp = state->crtc_w;
649 		state->crtc_w = state->crtc_h;
650 		state->crtc_h = tmp;
651 		tmp = state->src_w;
652 		state->src_w = state->src_h;
653 		state->src_h = tmp;
654 	}
655 
656 	if (state->crtc_x + state->crtc_w > mode->hdisplay)
657 		patched_crtc_w = mode->hdisplay - state->crtc_x;
658 	else
659 		patched_crtc_w = state->crtc_w;
660 
661 	if (state->crtc_x < 0) {
662 		patched_crtc_w += state->crtc_x;
663 		x_offset = -state->crtc_x;
664 		state->crtc_x = 0;
665 	}
666 
667 	if (state->crtc_y + state->crtc_h > mode->vdisplay)
668 		patched_crtc_h = mode->vdisplay - state->crtc_y;
669 	else
670 		patched_crtc_h = state->crtc_h;
671 
672 	if (state->crtc_y < 0) {
673 		patched_crtc_h += state->crtc_y;
674 		y_offset = -state->crtc_y;
675 		state->crtc_y = 0;
676 	}
677 
678 	patched_src_w = DIV_ROUND_CLOSEST(patched_crtc_w * state->src_w,
679 					  state->crtc_w);
680 	patched_src_h = DIV_ROUND_CLOSEST(patched_crtc_h * state->src_h,
681 					  state->crtc_h);
682 
683 	hsub = drm_format_horz_chroma_subsampling(fb->format->format);
684 	vsub = drm_format_vert_chroma_subsampling(fb->format->format);
685 
686 	for (i = 0; i < state->nplanes; i++) {
687 		unsigned int offset = 0;
688 		int xdiv = i ? hsub : 1;
689 		int ydiv = i ? vsub : 1;
690 
691 		state->bpp[i] = fb->format->cpp[i];
692 		if (!state->bpp[i])
693 			return -EINVAL;
694 
695 		switch (state->base.rotation & DRM_MODE_ROTATE_MASK) {
696 		case DRM_MODE_ROTATE_90:
697 			offset = ((y_offset + state->src_y + patched_src_w - 1) /
698 				  ydiv) * fb->pitches[i];
699 			offset += ((x_offset + state->src_x) / xdiv) *
700 				  state->bpp[i];
701 			state->xstride[i] = ((patched_src_w - 1) / ydiv) *
702 					  fb->pitches[i];
703 			state->pstride[i] = -fb->pitches[i] - state->bpp[i];
704 			break;
705 		case DRM_MODE_ROTATE_180:
706 			offset = ((y_offset + state->src_y + patched_src_h - 1) /
707 				  ydiv) * fb->pitches[i];
708 			offset += ((x_offset + state->src_x + patched_src_w - 1) /
709 				   xdiv) * state->bpp[i];
710 			state->xstride[i] = ((((patched_src_w - 1) / xdiv) - 1) *
711 					   state->bpp[i]) - fb->pitches[i];
712 			state->pstride[i] = -2 * state->bpp[i];
713 			break;
714 		case DRM_MODE_ROTATE_270:
715 			offset = ((y_offset + state->src_y) / ydiv) *
716 				 fb->pitches[i];
717 			offset += ((x_offset + state->src_x + patched_src_h - 1) /
718 				   xdiv) * state->bpp[i];
719 			state->xstride[i] = -(((patched_src_w - 1) / ydiv) *
720 					    fb->pitches[i]) -
721 					  (2 * state->bpp[i]);
722 			state->pstride[i] = fb->pitches[i] - state->bpp[i];
723 			break;
724 		case DRM_MODE_ROTATE_0:
725 		default:
726 			offset = ((y_offset + state->src_y) / ydiv) *
727 				 fb->pitches[i];
728 			offset += ((x_offset + state->src_x) / xdiv) *
729 				  state->bpp[i];
730 			state->xstride[i] = fb->pitches[i] -
731 					  ((patched_src_w / xdiv) *
732 					   state->bpp[i]);
733 			state->pstride[i] = 0;
734 			break;
735 		}
736 
737 		state->offsets[i] = offset + fb->offsets[i];
738 	}
739 
740 	state->src_w = patched_src_w;
741 	state->src_h = patched_src_h;
742 	state->crtc_w = patched_crtc_w;
743 	state->crtc_h = patched_crtc_h;
744 
745 	if (!desc->layout.size &&
746 	    (mode->hdisplay != state->crtc_w ||
747 	     mode->vdisplay != state->crtc_h))
748 		return -EINVAL;
749 
750 	if (desc->max_height && state->crtc_h > desc->max_height)
751 		return -EINVAL;
752 
753 	if (desc->max_width && state->crtc_w > desc->max_width)
754 		return -EINVAL;
755 
756 	if ((state->crtc_h != state->src_h || state->crtc_w != state->src_w) &&
757 	    (!desc->layout.memsize ||
758 	     state->base.fb->format->has_alpha))
759 		return -EINVAL;
760 
761 	if (state->crtc_x < 0 || state->crtc_y < 0)
762 		return -EINVAL;
763 
764 	if (state->crtc_w + state->crtc_x > mode->hdisplay ||
765 	    state->crtc_h + state->crtc_y > mode->vdisplay)
766 		return -EINVAL;
767 
768 	return 0;
769 }
770 
771 static void atmel_hlcdc_plane_atomic_update(struct drm_plane *p,
772 					    struct drm_plane_state *old_s)
773 {
774 	struct atmel_hlcdc_plane *plane = drm_plane_to_atmel_hlcdc_plane(p);
775 	struct atmel_hlcdc_plane_state *state =
776 			drm_plane_state_to_atmel_hlcdc_plane_state(p->state);
777 	u32 sr;
778 
779 	if (!p->state->crtc || !p->state->fb)
780 		return;
781 
782 	atmel_hlcdc_plane_update_pos_and_size(plane, state);
783 	atmel_hlcdc_plane_update_general_settings(plane, state);
784 	atmel_hlcdc_plane_update_format(plane, state);
785 	atmel_hlcdc_plane_update_clut(plane);
786 	atmel_hlcdc_plane_update_buffers(plane, state);
787 	atmel_hlcdc_plane_update_disc_area(plane, state);
788 
789 	/* Enable the overrun interrupts. */
790 	atmel_hlcdc_layer_write_reg(&plane->layer, ATMEL_HLCDC_LAYER_IER,
791 				    ATMEL_HLCDC_LAYER_OVR_IRQ(0) |
792 				    ATMEL_HLCDC_LAYER_OVR_IRQ(1) |
793 				    ATMEL_HLCDC_LAYER_OVR_IRQ(2));
794 
795 	/* Apply the new config at the next SOF event. */
796 	sr = atmel_hlcdc_layer_read_reg(&plane->layer, ATMEL_HLCDC_LAYER_CHSR);
797 	atmel_hlcdc_layer_write_reg(&plane->layer, ATMEL_HLCDC_LAYER_CHER,
798 			ATMEL_HLCDC_LAYER_UPDATE |
799 			(sr & ATMEL_HLCDC_LAYER_EN ?
800 			 ATMEL_HLCDC_LAYER_A2Q : ATMEL_HLCDC_LAYER_EN));
801 }
802 
803 static void atmel_hlcdc_plane_atomic_disable(struct drm_plane *p,
804 					     struct drm_plane_state *old_state)
805 {
806 	struct atmel_hlcdc_plane *plane = drm_plane_to_atmel_hlcdc_plane(p);
807 
808 	/* Disable interrupts */
809 	atmel_hlcdc_layer_write_reg(&plane->layer, ATMEL_HLCDC_LAYER_IDR,
810 				    0xffffffff);
811 
812 	/* Disable the layer */
813 	atmel_hlcdc_layer_write_reg(&plane->layer, ATMEL_HLCDC_LAYER_CHDR,
814 				    ATMEL_HLCDC_LAYER_RST |
815 				    ATMEL_HLCDC_LAYER_A2Q |
816 				    ATMEL_HLCDC_LAYER_UPDATE);
817 
818 	/* Clear all pending interrupts */
819 	atmel_hlcdc_layer_read_reg(&plane->layer, ATMEL_HLCDC_LAYER_ISR);
820 }
821 
822 static void atmel_hlcdc_plane_destroy(struct drm_plane *p)
823 {
824 	struct atmel_hlcdc_plane *plane = drm_plane_to_atmel_hlcdc_plane(p);
825 
826 	if (plane->base.fb)
827 		drm_framebuffer_put(plane->base.fb);
828 
829 	drm_plane_cleanup(p);
830 }
831 
832 static int atmel_hlcdc_plane_atomic_set_property(struct drm_plane *p,
833 						 struct drm_plane_state *s,
834 						 struct drm_property *property,
835 						 uint64_t val)
836 {
837 	struct atmel_hlcdc_plane *plane = drm_plane_to_atmel_hlcdc_plane(p);
838 	struct atmel_hlcdc_plane_properties *props = plane->properties;
839 	struct atmel_hlcdc_plane_state *state =
840 			drm_plane_state_to_atmel_hlcdc_plane_state(s);
841 
842 	if (property == props->alpha)
843 		state->alpha = val;
844 	else
845 		return -EINVAL;
846 
847 	return 0;
848 }
849 
850 static int atmel_hlcdc_plane_atomic_get_property(struct drm_plane *p,
851 					const struct drm_plane_state *s,
852 					struct drm_property *property,
853 					uint64_t *val)
854 {
855 	struct atmel_hlcdc_plane *plane = drm_plane_to_atmel_hlcdc_plane(p);
856 	struct atmel_hlcdc_plane_properties *props = plane->properties;
857 	const struct atmel_hlcdc_plane_state *state =
858 		container_of(s, const struct atmel_hlcdc_plane_state, base);
859 
860 	if (property == props->alpha)
861 		*val = state->alpha;
862 	else
863 		return -EINVAL;
864 
865 	return 0;
866 }
867 
868 static int atmel_hlcdc_plane_init_properties(struct atmel_hlcdc_plane *plane,
869 				struct atmel_hlcdc_plane_properties *props)
870 {
871 	const struct atmel_hlcdc_layer_desc *desc = plane->layer.desc;
872 
873 	if (desc->type == ATMEL_HLCDC_OVERLAY_LAYER ||
874 	    desc->type == ATMEL_HLCDC_CURSOR_LAYER)
875 		drm_object_attach_property(&plane->base.base,
876 					   props->alpha, 255);
877 
878 	if (desc->layout.xstride && desc->layout.pstride) {
879 		int ret;
880 
881 		ret = drm_plane_create_rotation_property(&plane->base,
882 							 DRM_MODE_ROTATE_0,
883 							 DRM_MODE_ROTATE_0 |
884 							 DRM_MODE_ROTATE_90 |
885 							 DRM_MODE_ROTATE_180 |
886 							 DRM_MODE_ROTATE_270);
887 		if (ret)
888 			return ret;
889 	}
890 
891 	if (desc->layout.csc) {
892 		/*
893 		 * TODO: decare a "yuv-to-rgb-conv-factors" property to let
894 		 * userspace modify these factors (using a BLOB property ?).
895 		 */
896 		atmel_hlcdc_layer_write_cfg(&plane->layer,
897 					    desc->layout.csc,
898 					    0x4c900091);
899 		atmel_hlcdc_layer_write_cfg(&plane->layer,
900 					    desc->layout.csc + 1,
901 					    0x7a5f5090);
902 		atmel_hlcdc_layer_write_cfg(&plane->layer,
903 					    desc->layout.csc + 2,
904 					    0x40040890);
905 	}
906 
907 	return 0;
908 }
909 
910 void atmel_hlcdc_plane_irq(struct atmel_hlcdc_plane *plane)
911 {
912 	const struct atmel_hlcdc_layer_desc *desc = plane->layer.desc;
913 	u32 isr;
914 
915 	isr = atmel_hlcdc_layer_read_reg(&plane->layer, ATMEL_HLCDC_LAYER_ISR);
916 
917 	/*
918 	 * There's not much we can do in case of overrun except informing
919 	 * the user. However, we are in interrupt context here, hence the
920 	 * use of dev_dbg().
921 	 */
922 	if (isr &
923 	    (ATMEL_HLCDC_LAYER_OVR_IRQ(0) | ATMEL_HLCDC_LAYER_OVR_IRQ(1) |
924 	     ATMEL_HLCDC_LAYER_OVR_IRQ(2)))
925 		dev_dbg(plane->base.dev->dev, "overrun on plane %s\n",
926 			desc->name);
927 }
928 
929 static const struct drm_plane_helper_funcs atmel_hlcdc_layer_plane_helper_funcs = {
930 	.atomic_check = atmel_hlcdc_plane_atomic_check,
931 	.atomic_update = atmel_hlcdc_plane_atomic_update,
932 	.atomic_disable = atmel_hlcdc_plane_atomic_disable,
933 };
934 
935 static int atmel_hlcdc_plane_alloc_dscrs(struct drm_plane *p,
936 					 struct atmel_hlcdc_plane_state *state)
937 {
938 	struct atmel_hlcdc_dc *dc = p->dev->dev_private;
939 	int i;
940 
941 	for (i = 0; i < ARRAY_SIZE(state->dscrs); i++) {
942 		struct atmel_hlcdc_dma_channel_dscr *dscr;
943 		dma_addr_t dscr_dma;
944 
945 		dscr = dma_pool_alloc(dc->dscrpool, GFP_KERNEL, &dscr_dma);
946 		if (!dscr)
947 			goto err;
948 
949 		dscr->addr = 0;
950 		dscr->next = dscr_dma;
951 		dscr->self = dscr_dma;
952 		dscr->ctrl = ATMEL_HLCDC_LAYER_DFETCH;
953 
954 		state->dscrs[i] = dscr;
955 	}
956 
957 	return 0;
958 
959 err:
960 	for (i--; i >= 0; i--) {
961 		dma_pool_free(dc->dscrpool, state->dscrs[i],
962 			      state->dscrs[i]->self);
963 	}
964 
965 	return -ENOMEM;
966 }
967 
968 static void atmel_hlcdc_plane_reset(struct drm_plane *p)
969 {
970 	struct atmel_hlcdc_plane_state *state;
971 
972 	if (p->state) {
973 		state = drm_plane_state_to_atmel_hlcdc_plane_state(p->state);
974 
975 		if (state->base.fb)
976 			drm_framebuffer_put(state->base.fb);
977 
978 		kfree(state);
979 		p->state = NULL;
980 	}
981 
982 	state = kzalloc(sizeof(*state), GFP_KERNEL);
983 	if (state) {
984 		if (atmel_hlcdc_plane_alloc_dscrs(p, state)) {
985 			kfree(state);
986 			dev_err(p->dev->dev,
987 				"Failed to allocate initial plane state\n");
988 			return;
989 		}
990 
991 		state->alpha = 255;
992 		p->state = &state->base;
993 		p->state->plane = p;
994 	}
995 }
996 
997 static struct drm_plane_state *
998 atmel_hlcdc_plane_atomic_duplicate_state(struct drm_plane *p)
999 {
1000 	struct atmel_hlcdc_plane_state *state =
1001 			drm_plane_state_to_atmel_hlcdc_plane_state(p->state);
1002 	struct atmel_hlcdc_plane_state *copy;
1003 
1004 	copy = kmemdup(state, sizeof(*state), GFP_KERNEL);
1005 	if (!copy)
1006 		return NULL;
1007 
1008 	if (atmel_hlcdc_plane_alloc_dscrs(p, copy)) {
1009 		kfree(copy);
1010 		return NULL;
1011 	}
1012 
1013 	if (copy->base.fb)
1014 		drm_framebuffer_get(copy->base.fb);
1015 
1016 	return &copy->base;
1017 }
1018 
1019 static void atmel_hlcdc_plane_atomic_destroy_state(struct drm_plane *p,
1020 						   struct drm_plane_state *s)
1021 {
1022 	struct atmel_hlcdc_plane_state *state =
1023 			drm_plane_state_to_atmel_hlcdc_plane_state(s);
1024 	struct atmel_hlcdc_dc *dc = p->dev->dev_private;
1025 	int i;
1026 
1027 	for (i = 0; i < ARRAY_SIZE(state->dscrs); i++) {
1028 		dma_pool_free(dc->dscrpool, state->dscrs[i],
1029 			      state->dscrs[i]->self);
1030 	}
1031 
1032 	if (s->fb)
1033 		drm_framebuffer_put(s->fb);
1034 
1035 	kfree(state);
1036 }
1037 
1038 static const struct drm_plane_funcs layer_plane_funcs = {
1039 	.update_plane = drm_atomic_helper_update_plane,
1040 	.disable_plane = drm_atomic_helper_disable_plane,
1041 	.destroy = atmel_hlcdc_plane_destroy,
1042 	.reset = atmel_hlcdc_plane_reset,
1043 	.atomic_duplicate_state = atmel_hlcdc_plane_atomic_duplicate_state,
1044 	.atomic_destroy_state = atmel_hlcdc_plane_atomic_destroy_state,
1045 	.atomic_set_property = atmel_hlcdc_plane_atomic_set_property,
1046 	.atomic_get_property = atmel_hlcdc_plane_atomic_get_property,
1047 };
1048 
1049 static int atmel_hlcdc_plane_create(struct drm_device *dev,
1050 				    const struct atmel_hlcdc_layer_desc *desc,
1051 				    struct atmel_hlcdc_plane_properties *props)
1052 {
1053 	struct atmel_hlcdc_dc *dc = dev->dev_private;
1054 	struct atmel_hlcdc_plane *plane;
1055 	enum drm_plane_type type;
1056 	int ret;
1057 
1058 	plane = devm_kzalloc(dev->dev, sizeof(*plane), GFP_KERNEL);
1059 	if (!plane)
1060 		return -ENOMEM;
1061 
1062 	atmel_hlcdc_layer_init(&plane->layer, desc, dc->hlcdc->regmap);
1063 	plane->properties = props;
1064 
1065 	if (desc->type == ATMEL_HLCDC_BASE_LAYER)
1066 		type = DRM_PLANE_TYPE_PRIMARY;
1067 	else if (desc->type == ATMEL_HLCDC_CURSOR_LAYER)
1068 		type = DRM_PLANE_TYPE_CURSOR;
1069 	else
1070 		type = DRM_PLANE_TYPE_OVERLAY;
1071 
1072 	ret = drm_universal_plane_init(dev, &plane->base, 0,
1073 				       &layer_plane_funcs,
1074 				       desc->formats->formats,
1075 				       desc->formats->nformats,
1076 				       NULL, type, NULL);
1077 	if (ret)
1078 		return ret;
1079 
1080 	drm_plane_helper_add(&plane->base,
1081 			     &atmel_hlcdc_layer_plane_helper_funcs);
1082 
1083 	/* Set default property values*/
1084 	ret = atmel_hlcdc_plane_init_properties(plane, props);
1085 	if (ret)
1086 		return ret;
1087 
1088 	dc->layers[desc->id] = &plane->layer;
1089 
1090 	return 0;
1091 }
1092 
1093 static struct atmel_hlcdc_plane_properties *
1094 atmel_hlcdc_plane_create_properties(struct drm_device *dev)
1095 {
1096 	struct atmel_hlcdc_plane_properties *props;
1097 
1098 	props = devm_kzalloc(dev->dev, sizeof(*props), GFP_KERNEL);
1099 	if (!props)
1100 		return ERR_PTR(-ENOMEM);
1101 
1102 	props->alpha = drm_property_create_range(dev, 0, "alpha", 0, 255);
1103 	if (!props->alpha)
1104 		return ERR_PTR(-ENOMEM);
1105 
1106 	return props;
1107 }
1108 
1109 int atmel_hlcdc_create_planes(struct drm_device *dev)
1110 {
1111 	struct atmel_hlcdc_dc *dc = dev->dev_private;
1112 	struct atmel_hlcdc_plane_properties *props;
1113 	const struct atmel_hlcdc_layer_desc *descs = dc->desc->layers;
1114 	int nlayers = dc->desc->nlayers;
1115 	int i, ret;
1116 
1117 	props = atmel_hlcdc_plane_create_properties(dev);
1118 	if (IS_ERR(props))
1119 		return PTR_ERR(props);
1120 
1121 	dc->dscrpool = dmam_pool_create("atmel-hlcdc-dscr", dev->dev,
1122 				sizeof(struct atmel_hlcdc_dma_channel_dscr),
1123 				sizeof(u64), 0);
1124 	if (!dc->dscrpool)
1125 		return -ENOMEM;
1126 
1127 	for (i = 0; i < nlayers; i++) {
1128 		if (descs[i].type != ATMEL_HLCDC_BASE_LAYER &&
1129 		    descs[i].type != ATMEL_HLCDC_OVERLAY_LAYER &&
1130 		    descs[i].type != ATMEL_HLCDC_CURSOR_LAYER)
1131 			continue;
1132 
1133 		ret = atmel_hlcdc_plane_create(dev, &descs[i], props);
1134 		if (ret)
1135 			return ret;
1136 	}
1137 
1138 	return 0;
1139 }
1140