1 /*
2  * Copyright (C) Fuzhou Rockchip Electronics Co.Ltd
3  * Author: Jacob Chen <jacob-chen@iotwrt.com>
4  *
5  * This software is licensed under the terms of the GNU General Public
6  * License version 2, as published by the Free Software Foundation, and
7  * may be copied, distributed, and modified under those terms.
8  *
9  * This program is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12  * GNU General Public License for more details.
13  */
14 
15 #include <linux/pm_runtime.h>
16 
17 #include "rga-hw.h"
18 #include "rga.h"
19 
20 enum e_rga_start_pos {
21 	LT = 0,
22 	LB = 1,
23 	RT = 2,
24 	RB = 3,
25 };
26 
27 struct rga_addr_offset {
28 	unsigned int y_off;
29 	unsigned int u_off;
30 	unsigned int v_off;
31 };
32 
33 struct rga_corners_addr_offset {
34 	struct rga_addr_offset left_top;
35 	struct rga_addr_offset right_top;
36 	struct rga_addr_offset left_bottom;
37 	struct rga_addr_offset right_bottom;
38 };
39 
40 static unsigned int rga_get_scaling(unsigned int src, unsigned int dst)
41 {
42 	/*
43 	 * The rga hw scaling factor is a normalized inverse of the
44 	 * scaling factor.
45 	 * For example: When source width is 100 and destination width is 200
46 	 * (scaling of 2x), then the hw factor is NC * 100 / 200.
47 	 * The normalization factor (NC) is 2^16 = 0x10000.
48 	 */
49 
50 	return (src > dst) ? ((dst << 16) / src) : ((src << 16) / dst);
51 }
52 
53 static struct rga_corners_addr_offset
54 rga_get_addr_offset(struct rga_frame *frm, unsigned int x, unsigned int y,
55 		    unsigned int w, unsigned int h)
56 {
57 	struct rga_corners_addr_offset offsets;
58 	struct rga_addr_offset *lt, *lb, *rt, *rb;
59 	unsigned int x_div = 0,
60 		     y_div = 0, uv_stride = 0, pixel_width = 0, uv_factor = 0;
61 
62 	lt = &offsets.left_top;
63 	lb = &offsets.left_bottom;
64 	rt = &offsets.right_top;
65 	rb = &offsets.right_bottom;
66 
67 	x_div = frm->fmt->x_div;
68 	y_div = frm->fmt->y_div;
69 	uv_factor = frm->fmt->uv_factor;
70 	uv_stride = frm->stride / x_div;
71 	pixel_width = frm->stride / frm->width;
72 
73 	lt->y_off = y * frm->stride + x * pixel_width;
74 	lt->u_off =
75 		frm->width * frm->height + (y / y_div) * uv_stride + x / x_div;
76 	lt->v_off = lt->u_off + frm->width * frm->height / uv_factor;
77 
78 	lb->y_off = lt->y_off + (h - 1) * frm->stride;
79 	lb->u_off = lt->u_off + (h / y_div - 1) * uv_stride;
80 	lb->v_off = lt->v_off + (h / y_div - 1) * uv_stride;
81 
82 	rt->y_off = lt->y_off + (w - 1) * pixel_width;
83 	rt->u_off = lt->u_off + w / x_div - 1;
84 	rt->v_off = lt->v_off + w / x_div - 1;
85 
86 	rb->y_off = lb->y_off + (w - 1) * pixel_width;
87 	rb->u_off = lb->u_off + w / x_div - 1;
88 	rb->v_off = lb->v_off + w / x_div - 1;
89 
90 	return offsets;
91 }
92 
93 static struct rga_addr_offset *rga_lookup_draw_pos(struct
94 		rga_corners_addr_offset
95 		* offsets, u32 rotate_mode,
96 		u32 mirr_mode)
97 {
98 	static enum e_rga_start_pos rot_mir_point_matrix[4][4] = {
99 		{
100 			LT, RT, LB, RB,
101 		},
102 		{
103 			RT, LT, RB, LB,
104 		},
105 		{
106 			RB, LB, RT, LT,
107 		},
108 		{
109 			LB, RB, LT, RT,
110 		},
111 	};
112 
113 	if (!offsets)
114 		return NULL;
115 
116 	switch (rot_mir_point_matrix[rotate_mode][mirr_mode]) {
117 	case LT:
118 		return &offsets->left_top;
119 	case LB:
120 		return &offsets->left_bottom;
121 	case RT:
122 		return &offsets->right_top;
123 	case RB:
124 		return &offsets->right_bottom;
125 	}
126 
127 	return NULL;
128 }
129 
130 static void rga_cmd_set_src_addr(struct rga_ctx *ctx, void *mmu_pages)
131 {
132 	struct rockchip_rga *rga = ctx->rga;
133 	u32 *dest = rga->cmdbuf_virt;
134 	unsigned int reg;
135 
136 	reg = RGA_MMU_SRC_BASE - RGA_MODE_BASE_REG;
137 	dest[reg >> 2] = virt_to_phys(mmu_pages) >> 4;
138 
139 	reg = RGA_MMU_CTRL1 - RGA_MODE_BASE_REG;
140 	dest[reg >> 2] |= 0x7;
141 }
142 
143 static void rga_cmd_set_src1_addr(struct rga_ctx *ctx, void *mmu_pages)
144 {
145 	struct rockchip_rga *rga = ctx->rga;
146 	u32 *dest = rga->cmdbuf_virt;
147 	unsigned int reg;
148 
149 	reg = RGA_MMU_SRC1_BASE - RGA_MODE_BASE_REG;
150 	dest[reg >> 2] = virt_to_phys(mmu_pages) >> 4;
151 
152 	reg = RGA_MMU_CTRL1 - RGA_MODE_BASE_REG;
153 	dest[reg >> 2] |= 0x7 << 4;
154 }
155 
156 static void rga_cmd_set_dst_addr(struct rga_ctx *ctx, void *mmu_pages)
157 {
158 	struct rockchip_rga *rga = ctx->rga;
159 	u32 *dest = rga->cmdbuf_virt;
160 	unsigned int reg;
161 
162 	reg = RGA_MMU_DST_BASE - RGA_MODE_BASE_REG;
163 	dest[reg >> 2] = virt_to_phys(mmu_pages) >> 4;
164 
165 	reg = RGA_MMU_CTRL1 - RGA_MODE_BASE_REG;
166 	dest[reg >> 2] |= 0x7 << 8;
167 }
168 
169 static void rga_cmd_set_trans_info(struct rga_ctx *ctx)
170 {
171 	struct rockchip_rga *rga = ctx->rga;
172 	u32 *dest = rga->cmdbuf_virt;
173 	unsigned int scale_dst_w, scale_dst_h;
174 	unsigned int src_h, src_w, src_x, src_y, dst_h, dst_w, dst_x, dst_y;
175 	union rga_src_info src_info;
176 	union rga_dst_info dst_info;
177 	union rga_src_x_factor x_factor;
178 	union rga_src_y_factor y_factor;
179 	union rga_src_vir_info src_vir_info;
180 	union rga_src_act_info src_act_info;
181 	union rga_dst_vir_info dst_vir_info;
182 	union rga_dst_act_info dst_act_info;
183 
184 	struct rga_addr_offset *dst_offset;
185 	struct rga_corners_addr_offset offsets;
186 	struct rga_corners_addr_offset src_offsets;
187 
188 	src_h = ctx->in.crop.height;
189 	src_w = ctx->in.crop.width;
190 	src_x = ctx->in.crop.left;
191 	src_y = ctx->in.crop.top;
192 	dst_h = ctx->out.crop.height;
193 	dst_w = ctx->out.crop.width;
194 	dst_x = ctx->out.crop.left;
195 	dst_y = ctx->out.crop.top;
196 
197 	src_info.val = dest[(RGA_SRC_INFO - RGA_MODE_BASE_REG) >> 2];
198 	dst_info.val = dest[(RGA_DST_INFO - RGA_MODE_BASE_REG) >> 2];
199 	x_factor.val = dest[(RGA_SRC_X_FACTOR - RGA_MODE_BASE_REG) >> 2];
200 	y_factor.val = dest[(RGA_SRC_Y_FACTOR - RGA_MODE_BASE_REG) >> 2];
201 	src_vir_info.val = dest[(RGA_SRC_VIR_INFO - RGA_MODE_BASE_REG) >> 2];
202 	src_act_info.val = dest[(RGA_SRC_ACT_INFO - RGA_MODE_BASE_REG) >> 2];
203 	dst_vir_info.val = dest[(RGA_DST_VIR_INFO - RGA_MODE_BASE_REG) >> 2];
204 	dst_act_info.val = dest[(RGA_DST_ACT_INFO - RGA_MODE_BASE_REG) >> 2];
205 
206 	src_info.data.format = ctx->in.fmt->hw_format;
207 	src_info.data.swap = ctx->in.fmt->color_swap;
208 	dst_info.data.format = ctx->out.fmt->hw_format;
209 	dst_info.data.swap = ctx->out.fmt->color_swap;
210 
211 	if (ctx->in.fmt->hw_format >= RGA_COLOR_FMT_YUV422SP) {
212 		if (ctx->out.fmt->hw_format < RGA_COLOR_FMT_YUV422SP) {
213 			switch (ctx->in.colorspace) {
214 			case V4L2_COLORSPACE_REC709:
215 				src_info.data.csc_mode =
216 					RGA_SRC_CSC_MODE_BT709_R0;
217 				break;
218 			default:
219 				src_info.data.csc_mode =
220 					RGA_SRC_CSC_MODE_BT601_R0;
221 				break;
222 			}
223 		}
224 	}
225 
226 	if (ctx->out.fmt->hw_format >= RGA_COLOR_FMT_YUV422SP) {
227 		switch (ctx->out.colorspace) {
228 		case V4L2_COLORSPACE_REC709:
229 			dst_info.data.csc_mode = RGA_SRC_CSC_MODE_BT709_R0;
230 			break;
231 		default:
232 			dst_info.data.csc_mode = RGA_DST_CSC_MODE_BT601_R0;
233 			break;
234 		}
235 	}
236 
237 	if (ctx->vflip)
238 		src_info.data.mir_mode |= RGA_SRC_MIRR_MODE_X;
239 
240 	if (ctx->hflip)
241 		src_info.data.mir_mode |= RGA_SRC_MIRR_MODE_Y;
242 
243 	switch (ctx->rotate) {
244 	case 90:
245 		src_info.data.rot_mode = RGA_SRC_ROT_MODE_90_DEGREE;
246 		break;
247 	case 180:
248 		src_info.data.rot_mode = RGA_SRC_ROT_MODE_180_DEGREE;
249 		break;
250 	case 270:
251 		src_info.data.rot_mode = RGA_SRC_ROT_MODE_270_DEGREE;
252 		break;
253 	default:
254 		src_info.data.rot_mode = RGA_SRC_ROT_MODE_0_DEGREE;
255 		break;
256 	}
257 
258 	/*
259 	 * Cacluate the up/down scaling mode/factor.
260 	 *
261 	 * RGA used to scale the picture first, and then rotate second,
262 	 * so we need to swap the w/h when rotate degree is 90/270.
263 	 */
264 	if (src_info.data.rot_mode == RGA_SRC_ROT_MODE_90_DEGREE ||
265 	    src_info.data.rot_mode == RGA_SRC_ROT_MODE_270_DEGREE) {
266 		if (rga->version.major == 0 || rga->version.minor == 0) {
267 			if (dst_w == src_h)
268 				src_h -= 8;
269 			if (abs(src_w - dst_h) < 16)
270 				src_w -= 16;
271 		}
272 
273 		scale_dst_h = dst_w;
274 		scale_dst_w = dst_h;
275 	} else {
276 		scale_dst_w = dst_w;
277 		scale_dst_h = dst_h;
278 	}
279 
280 	if (src_w == scale_dst_w) {
281 		src_info.data.hscl_mode = RGA_SRC_HSCL_MODE_NO;
282 		x_factor.val = 0;
283 	} else if (src_w > scale_dst_w) {
284 		src_info.data.hscl_mode = RGA_SRC_HSCL_MODE_DOWN;
285 		x_factor.data.down_scale_factor =
286 			rga_get_scaling(src_w, scale_dst_w) + 1;
287 	} else {
288 		src_info.data.hscl_mode = RGA_SRC_HSCL_MODE_UP;
289 		x_factor.data.up_scale_factor =
290 			rga_get_scaling(src_w - 1, scale_dst_w - 1);
291 	}
292 
293 	if (src_h == scale_dst_h) {
294 		src_info.data.vscl_mode = RGA_SRC_VSCL_MODE_NO;
295 		y_factor.val = 0;
296 	} else if (src_h > scale_dst_h) {
297 		src_info.data.vscl_mode = RGA_SRC_VSCL_MODE_DOWN;
298 		y_factor.data.down_scale_factor =
299 			rga_get_scaling(src_h, scale_dst_h) + 1;
300 	} else {
301 		src_info.data.vscl_mode = RGA_SRC_VSCL_MODE_UP;
302 		y_factor.data.up_scale_factor =
303 			rga_get_scaling(src_h - 1, scale_dst_h - 1);
304 	}
305 
306 	/*
307 	 * Cacluate the framebuffer virtual strides and active size,
308 	 * note that the step of vir_stride / vir_width is 4 byte words
309 	 */
310 	src_vir_info.data.vir_stride = ctx->in.stride >> 2;
311 	src_vir_info.data.vir_width = ctx->in.stride >> 2;
312 
313 	src_act_info.data.act_height = src_h - 1;
314 	src_act_info.data.act_width = src_w - 1;
315 
316 	dst_vir_info.data.vir_stride = ctx->out.stride >> 2;
317 	dst_act_info.data.act_height = dst_h - 1;
318 	dst_act_info.data.act_width = dst_w - 1;
319 
320 	/*
321 	 * Cacluate the source framebuffer base address with offset pixel.
322 	 */
323 	src_offsets = rga_get_addr_offset(&ctx->in, src_x, src_y,
324 					  src_w, src_h);
325 
326 	/*
327 	 * Configure the dest framebuffer base address with pixel offset.
328 	 */
329 	offsets = rga_get_addr_offset(&ctx->out, dst_x, dst_y, dst_w, dst_h);
330 	dst_offset = rga_lookup_draw_pos(&offsets, src_info.data.rot_mode,
331 					 src_info.data.mir_mode);
332 
333 	dest[(RGA_SRC_Y_RGB_BASE_ADDR - RGA_MODE_BASE_REG) >> 2] =
334 		src_offsets.left_top.y_off;
335 	dest[(RGA_SRC_CB_BASE_ADDR - RGA_MODE_BASE_REG) >> 2] =
336 		src_offsets.left_top.u_off;
337 	dest[(RGA_SRC_CR_BASE_ADDR - RGA_MODE_BASE_REG) >> 2] =
338 		src_offsets.left_top.v_off;
339 
340 	dest[(RGA_SRC_X_FACTOR - RGA_MODE_BASE_REG) >> 2] = x_factor.val;
341 	dest[(RGA_SRC_Y_FACTOR - RGA_MODE_BASE_REG) >> 2] = y_factor.val;
342 	dest[(RGA_SRC_VIR_INFO - RGA_MODE_BASE_REG) >> 2] = src_vir_info.val;
343 	dest[(RGA_SRC_ACT_INFO - RGA_MODE_BASE_REG) >> 2] = src_act_info.val;
344 
345 	dest[(RGA_SRC_INFO - RGA_MODE_BASE_REG) >> 2] = src_info.val;
346 
347 	dest[(RGA_DST_Y_RGB_BASE_ADDR - RGA_MODE_BASE_REG) >> 2] =
348 		dst_offset->y_off;
349 	dest[(RGA_DST_CB_BASE_ADDR - RGA_MODE_BASE_REG) >> 2] =
350 		dst_offset->u_off;
351 	dest[(RGA_DST_CR_BASE_ADDR - RGA_MODE_BASE_REG) >> 2] =
352 		dst_offset->v_off;
353 
354 	dest[(RGA_DST_VIR_INFO - RGA_MODE_BASE_REG) >> 2] = dst_vir_info.val;
355 	dest[(RGA_DST_ACT_INFO - RGA_MODE_BASE_REG) >> 2] = dst_act_info.val;
356 
357 	dest[(RGA_DST_INFO - RGA_MODE_BASE_REG) >> 2] = dst_info.val;
358 }
359 
360 static void rga_cmd_set_mode(struct rga_ctx *ctx)
361 {
362 	struct rockchip_rga *rga = ctx->rga;
363 	u32 *dest = rga->cmdbuf_virt;
364 	union rga_mode_ctrl mode;
365 	union rga_alpha_ctrl0 alpha_ctrl0;
366 	union rga_alpha_ctrl1 alpha_ctrl1;
367 
368 	mode.val = 0;
369 	alpha_ctrl0.val = 0;
370 	alpha_ctrl1.val = 0;
371 
372 	mode.data.gradient_sat = 1;
373 	mode.data.render = RGA_MODE_RENDER_BITBLT;
374 	mode.data.bitblt = RGA_MODE_BITBLT_MODE_SRC_TO_DST;
375 
376 	/* disable alpha blending */
377 	dest[(RGA_ALPHA_CTRL0 - RGA_MODE_BASE_REG) >> 2] = alpha_ctrl0.val;
378 	dest[(RGA_ALPHA_CTRL1 - RGA_MODE_BASE_REG) >> 2] = alpha_ctrl1.val;
379 
380 	dest[(RGA_MODE_CTRL - RGA_MODE_BASE_REG) >> 2] = mode.val;
381 }
382 
383 static void rga_cmd_set(struct rga_ctx *ctx)
384 {
385 	struct rockchip_rga *rga = ctx->rga;
386 
387 	memset(rga->cmdbuf_virt, 0, RGA_CMDBUF_SIZE * 4);
388 
389 	rga_cmd_set_src_addr(ctx, rga->src_mmu_pages);
390 	/*
391 	 * Due to hardware bug,
392 	 * src1 mmu also should be configured when using alpha blending.
393 	 */
394 	rga_cmd_set_src1_addr(ctx, rga->dst_mmu_pages);
395 
396 	rga_cmd_set_dst_addr(ctx, rga->dst_mmu_pages);
397 	rga_cmd_set_mode(ctx);
398 
399 	rga_cmd_set_trans_info(ctx);
400 
401 	rga_write(rga, RGA_CMD_BASE, rga->cmdbuf_phy);
402 
403 	/* sync CMD buf for RGA */
404 	dma_sync_single_for_device(rga->dev, rga->cmdbuf_phy,
405 		PAGE_SIZE, DMA_BIDIRECTIONAL);
406 }
407 
408 void rga_hw_start(struct rockchip_rga *rga)
409 {
410 	struct rga_ctx *ctx = rga->curr;
411 
412 	rga_cmd_set(ctx);
413 
414 	rga_write(rga, RGA_SYS_CTRL, 0x00);
415 
416 	rga_write(rga, RGA_SYS_CTRL, 0x22);
417 
418 	rga_write(rga, RGA_INT, 0x600);
419 
420 	rga_write(rga, RGA_CMD_CTRL, 0x1);
421 }
422