1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (C) Fuzhou Rockchip Electronics Co.Ltd
4  * Author: Jacob Chen <jacob-chen@iotwrt.com>
5  */
6 
7 #include <linux/pm_runtime.h>
8 
9 #include "rga-hw.h"
10 #include "rga.h"
11 
12 enum e_rga_start_pos {
13 	LT = 0,
14 	LB = 1,
15 	RT = 2,
16 	RB = 3,
17 };
18 
19 struct rga_addr_offset {
20 	unsigned int y_off;
21 	unsigned int u_off;
22 	unsigned int v_off;
23 };
24 
25 struct rga_corners_addr_offset {
26 	struct rga_addr_offset left_top;
27 	struct rga_addr_offset right_top;
28 	struct rga_addr_offset left_bottom;
29 	struct rga_addr_offset right_bottom;
30 };
31 
rga_get_scaling(unsigned int src,unsigned int dst)32 static unsigned int rga_get_scaling(unsigned int src, unsigned int dst)
33 {
34 	/*
35 	 * The rga hw scaling factor is a normalized inverse of the
36 	 * scaling factor.
37 	 * For example: When source width is 100 and destination width is 200
38 	 * (scaling of 2x), then the hw factor is NC * 100 / 200.
39 	 * The normalization factor (NC) is 2^16 = 0x10000.
40 	 */
41 
42 	return (src > dst) ? ((dst << 16) / src) : ((src << 16) / dst);
43 }
44 
45 static struct rga_corners_addr_offset
rga_get_addr_offset(struct rga_frame * frm,unsigned int x,unsigned int y,unsigned int w,unsigned int h)46 rga_get_addr_offset(struct rga_frame *frm, unsigned int x, unsigned int y,
47 		    unsigned int w, unsigned int h)
48 {
49 	struct rga_corners_addr_offset offsets;
50 	struct rga_addr_offset *lt, *lb, *rt, *rb;
51 	unsigned int x_div = 0,
52 		     y_div = 0, uv_stride = 0, pixel_width = 0, uv_factor = 0;
53 
54 	lt = &offsets.left_top;
55 	lb = &offsets.left_bottom;
56 	rt = &offsets.right_top;
57 	rb = &offsets.right_bottom;
58 
59 	x_div = frm->fmt->x_div;
60 	y_div = frm->fmt->y_div;
61 	uv_factor = frm->fmt->uv_factor;
62 	uv_stride = frm->stride / x_div;
63 	pixel_width = frm->stride / frm->width;
64 
65 	lt->y_off = y * frm->stride + x * pixel_width;
66 	lt->u_off =
67 		frm->width * frm->height + (y / y_div) * uv_stride + x / x_div;
68 	lt->v_off = lt->u_off + frm->width * frm->height / uv_factor;
69 
70 	lb->y_off = lt->y_off + (h - 1) * frm->stride;
71 	lb->u_off = lt->u_off + (h / y_div - 1) * uv_stride;
72 	lb->v_off = lt->v_off + (h / y_div - 1) * uv_stride;
73 
74 	rt->y_off = lt->y_off + (w - 1) * pixel_width;
75 	rt->u_off = lt->u_off + w / x_div - 1;
76 	rt->v_off = lt->v_off + w / x_div - 1;
77 
78 	rb->y_off = lb->y_off + (w - 1) * pixel_width;
79 	rb->u_off = lb->u_off + w / x_div - 1;
80 	rb->v_off = lb->v_off + w / x_div - 1;
81 
82 	return offsets;
83 }
84 
rga_lookup_draw_pos(struct rga_corners_addr_offset * offsets,u32 rotate_mode,u32 mirr_mode)85 static struct rga_addr_offset *rga_lookup_draw_pos(struct
86 		rga_corners_addr_offset
87 		* offsets, u32 rotate_mode,
88 		u32 mirr_mode)
89 {
90 	static enum e_rga_start_pos rot_mir_point_matrix[4][4] = {
91 		{
92 			LT, RT, LB, RB,
93 		},
94 		{
95 			RT, LT, RB, LB,
96 		},
97 		{
98 			RB, LB, RT, LT,
99 		},
100 		{
101 			LB, RB, LT, RT,
102 		},
103 	};
104 
105 	if (!offsets)
106 		return NULL;
107 
108 	switch (rot_mir_point_matrix[rotate_mode][mirr_mode]) {
109 	case LT:
110 		return &offsets->left_top;
111 	case LB:
112 		return &offsets->left_bottom;
113 	case RT:
114 		return &offsets->right_top;
115 	case RB:
116 		return &offsets->right_bottom;
117 	}
118 
119 	return NULL;
120 }
121 
rga_cmd_set_src_addr(struct rga_ctx * ctx,void * mmu_pages)122 static void rga_cmd_set_src_addr(struct rga_ctx *ctx, void *mmu_pages)
123 {
124 	struct rockchip_rga *rga = ctx->rga;
125 	u32 *dest = rga->cmdbuf_virt;
126 	unsigned int reg;
127 
128 	reg = RGA_MMU_SRC_BASE - RGA_MODE_BASE_REG;
129 	dest[reg >> 2] = virt_to_phys(mmu_pages) >> 4;
130 
131 	reg = RGA_MMU_CTRL1 - RGA_MODE_BASE_REG;
132 	dest[reg >> 2] |= 0x7;
133 }
134 
rga_cmd_set_src1_addr(struct rga_ctx * ctx,void * mmu_pages)135 static void rga_cmd_set_src1_addr(struct rga_ctx *ctx, void *mmu_pages)
136 {
137 	struct rockchip_rga *rga = ctx->rga;
138 	u32 *dest = rga->cmdbuf_virt;
139 	unsigned int reg;
140 
141 	reg = RGA_MMU_SRC1_BASE - RGA_MODE_BASE_REG;
142 	dest[reg >> 2] = virt_to_phys(mmu_pages) >> 4;
143 
144 	reg = RGA_MMU_CTRL1 - RGA_MODE_BASE_REG;
145 	dest[reg >> 2] |= 0x7 << 4;
146 }
147 
rga_cmd_set_dst_addr(struct rga_ctx * ctx,void * mmu_pages)148 static void rga_cmd_set_dst_addr(struct rga_ctx *ctx, void *mmu_pages)
149 {
150 	struct rockchip_rga *rga = ctx->rga;
151 	u32 *dest = rga->cmdbuf_virt;
152 	unsigned int reg;
153 
154 	reg = RGA_MMU_DST_BASE - RGA_MODE_BASE_REG;
155 	dest[reg >> 2] = virt_to_phys(mmu_pages) >> 4;
156 
157 	reg = RGA_MMU_CTRL1 - RGA_MODE_BASE_REG;
158 	dest[reg >> 2] |= 0x7 << 8;
159 }
160 
rga_cmd_set_trans_info(struct rga_ctx * ctx)161 static void rga_cmd_set_trans_info(struct rga_ctx *ctx)
162 {
163 	struct rockchip_rga *rga = ctx->rga;
164 	u32 *dest = rga->cmdbuf_virt;
165 	unsigned int scale_dst_w, scale_dst_h;
166 	unsigned int src_h, src_w, src_x, src_y, dst_h, dst_w, dst_x, dst_y;
167 	union rga_src_info src_info;
168 	union rga_dst_info dst_info;
169 	union rga_src_x_factor x_factor;
170 	union rga_src_y_factor y_factor;
171 	union rga_src_vir_info src_vir_info;
172 	union rga_src_act_info src_act_info;
173 	union rga_dst_vir_info dst_vir_info;
174 	union rga_dst_act_info dst_act_info;
175 
176 	struct rga_addr_offset *dst_offset;
177 	struct rga_corners_addr_offset offsets;
178 	struct rga_corners_addr_offset src_offsets;
179 
180 	src_h = ctx->in.crop.height;
181 	src_w = ctx->in.crop.width;
182 	src_x = ctx->in.crop.left;
183 	src_y = ctx->in.crop.top;
184 	dst_h = ctx->out.crop.height;
185 	dst_w = ctx->out.crop.width;
186 	dst_x = ctx->out.crop.left;
187 	dst_y = ctx->out.crop.top;
188 
189 	src_info.val = dest[(RGA_SRC_INFO - RGA_MODE_BASE_REG) >> 2];
190 	dst_info.val = dest[(RGA_DST_INFO - RGA_MODE_BASE_REG) >> 2];
191 	x_factor.val = dest[(RGA_SRC_X_FACTOR - RGA_MODE_BASE_REG) >> 2];
192 	y_factor.val = dest[(RGA_SRC_Y_FACTOR - RGA_MODE_BASE_REG) >> 2];
193 	src_vir_info.val = dest[(RGA_SRC_VIR_INFO - RGA_MODE_BASE_REG) >> 2];
194 	src_act_info.val = dest[(RGA_SRC_ACT_INFO - RGA_MODE_BASE_REG) >> 2];
195 	dst_vir_info.val = dest[(RGA_DST_VIR_INFO - RGA_MODE_BASE_REG) >> 2];
196 	dst_act_info.val = dest[(RGA_DST_ACT_INFO - RGA_MODE_BASE_REG) >> 2];
197 
198 	src_info.data.format = ctx->in.fmt->hw_format;
199 	src_info.data.swap = ctx->in.fmt->color_swap;
200 	dst_info.data.format = ctx->out.fmt->hw_format;
201 	dst_info.data.swap = ctx->out.fmt->color_swap;
202 
203 	/*
204 	 * CSC mode must only be set when the colorspace families differ between
205 	 * input and output. It must remain unset (zeroed) if both are the same.
206 	 */
207 
208 	if (RGA_COLOR_FMT_IS_YUV(ctx->in.fmt->hw_format) &&
209 	    RGA_COLOR_FMT_IS_RGB(ctx->out.fmt->hw_format)) {
210 		switch (ctx->in.colorspace) {
211 		case V4L2_COLORSPACE_REC709:
212 			src_info.data.csc_mode = RGA_SRC_CSC_MODE_BT709_R0;
213 			break;
214 		default:
215 			src_info.data.csc_mode = RGA_SRC_CSC_MODE_BT601_R0;
216 			break;
217 		}
218 	}
219 
220 	if (RGA_COLOR_FMT_IS_RGB(ctx->in.fmt->hw_format) &&
221 	    RGA_COLOR_FMT_IS_YUV(ctx->out.fmt->hw_format)) {
222 		switch (ctx->out.colorspace) {
223 		case V4L2_COLORSPACE_REC709:
224 			dst_info.data.csc_mode = RGA_SRC_CSC_MODE_BT709_R0;
225 			break;
226 		default:
227 			dst_info.data.csc_mode = RGA_DST_CSC_MODE_BT601_R0;
228 			break;
229 		}
230 	}
231 
232 	if (ctx->vflip)
233 		src_info.data.mir_mode |= RGA_SRC_MIRR_MODE_X;
234 
235 	if (ctx->hflip)
236 		src_info.data.mir_mode |= RGA_SRC_MIRR_MODE_Y;
237 
238 	switch (ctx->rotate) {
239 	case 90:
240 		src_info.data.rot_mode = RGA_SRC_ROT_MODE_90_DEGREE;
241 		break;
242 	case 180:
243 		src_info.data.rot_mode = RGA_SRC_ROT_MODE_180_DEGREE;
244 		break;
245 	case 270:
246 		src_info.data.rot_mode = RGA_SRC_ROT_MODE_270_DEGREE;
247 		break;
248 	default:
249 		src_info.data.rot_mode = RGA_SRC_ROT_MODE_0_DEGREE;
250 		break;
251 	}
252 
253 	/*
254 	 * Calculate the up/down scaling mode/factor.
255 	 *
256 	 * RGA used to scale the picture first, and then rotate second,
257 	 * so we need to swap the w/h when rotate degree is 90/270.
258 	 */
259 	if (src_info.data.rot_mode == RGA_SRC_ROT_MODE_90_DEGREE ||
260 	    src_info.data.rot_mode == RGA_SRC_ROT_MODE_270_DEGREE) {
261 		if (rga->version.major == 0 || rga->version.minor == 0) {
262 			if (dst_w == src_h)
263 				src_h -= 8;
264 			if (abs(src_w - dst_h) < 16)
265 				src_w -= 16;
266 		}
267 
268 		scale_dst_h = dst_w;
269 		scale_dst_w = dst_h;
270 	} else {
271 		scale_dst_w = dst_w;
272 		scale_dst_h = dst_h;
273 	}
274 
275 	if (src_w == scale_dst_w) {
276 		src_info.data.hscl_mode = RGA_SRC_HSCL_MODE_NO;
277 		x_factor.val = 0;
278 	} else if (src_w > scale_dst_w) {
279 		src_info.data.hscl_mode = RGA_SRC_HSCL_MODE_DOWN;
280 		x_factor.data.down_scale_factor =
281 			rga_get_scaling(src_w, scale_dst_w) + 1;
282 	} else {
283 		src_info.data.hscl_mode = RGA_SRC_HSCL_MODE_UP;
284 		x_factor.data.up_scale_factor =
285 			rga_get_scaling(src_w - 1, scale_dst_w - 1);
286 	}
287 
288 	if (src_h == scale_dst_h) {
289 		src_info.data.vscl_mode = RGA_SRC_VSCL_MODE_NO;
290 		y_factor.val = 0;
291 	} else if (src_h > scale_dst_h) {
292 		src_info.data.vscl_mode = RGA_SRC_VSCL_MODE_DOWN;
293 		y_factor.data.down_scale_factor =
294 			rga_get_scaling(src_h, scale_dst_h) + 1;
295 	} else {
296 		src_info.data.vscl_mode = RGA_SRC_VSCL_MODE_UP;
297 		y_factor.data.up_scale_factor =
298 			rga_get_scaling(src_h - 1, scale_dst_h - 1);
299 	}
300 
301 	/*
302 	 * Calculate the framebuffer virtual strides and active size,
303 	 * note that the step of vir_stride / vir_width is 4 byte words
304 	 */
305 	src_vir_info.data.vir_stride = ctx->in.stride >> 2;
306 	src_vir_info.data.vir_width = ctx->in.stride >> 2;
307 
308 	src_act_info.data.act_height = src_h - 1;
309 	src_act_info.data.act_width = src_w - 1;
310 
311 	dst_vir_info.data.vir_stride = ctx->out.stride >> 2;
312 	dst_act_info.data.act_height = dst_h - 1;
313 	dst_act_info.data.act_width = dst_w - 1;
314 
315 	/*
316 	 * Calculate the source framebuffer base address with offset pixel.
317 	 */
318 	src_offsets = rga_get_addr_offset(&ctx->in, src_x, src_y,
319 					  src_w, src_h);
320 
321 	/*
322 	 * Configure the dest framebuffer base address with pixel offset.
323 	 */
324 	offsets = rga_get_addr_offset(&ctx->out, dst_x, dst_y, dst_w, dst_h);
325 	dst_offset = rga_lookup_draw_pos(&offsets, src_info.data.rot_mode,
326 					 src_info.data.mir_mode);
327 
328 	dest[(RGA_SRC_Y_RGB_BASE_ADDR - RGA_MODE_BASE_REG) >> 2] =
329 		src_offsets.left_top.y_off;
330 	dest[(RGA_SRC_CB_BASE_ADDR - RGA_MODE_BASE_REG) >> 2] =
331 		src_offsets.left_top.u_off;
332 	dest[(RGA_SRC_CR_BASE_ADDR - RGA_MODE_BASE_REG) >> 2] =
333 		src_offsets.left_top.v_off;
334 
335 	dest[(RGA_SRC_X_FACTOR - RGA_MODE_BASE_REG) >> 2] = x_factor.val;
336 	dest[(RGA_SRC_Y_FACTOR - RGA_MODE_BASE_REG) >> 2] = y_factor.val;
337 	dest[(RGA_SRC_VIR_INFO - RGA_MODE_BASE_REG) >> 2] = src_vir_info.val;
338 	dest[(RGA_SRC_ACT_INFO - RGA_MODE_BASE_REG) >> 2] = src_act_info.val;
339 
340 	dest[(RGA_SRC_INFO - RGA_MODE_BASE_REG) >> 2] = src_info.val;
341 
342 	dest[(RGA_DST_Y_RGB_BASE_ADDR - RGA_MODE_BASE_REG) >> 2] =
343 		dst_offset->y_off;
344 	dest[(RGA_DST_CB_BASE_ADDR - RGA_MODE_BASE_REG) >> 2] =
345 		dst_offset->u_off;
346 	dest[(RGA_DST_CR_BASE_ADDR - RGA_MODE_BASE_REG) >> 2] =
347 		dst_offset->v_off;
348 
349 	dest[(RGA_DST_VIR_INFO - RGA_MODE_BASE_REG) >> 2] = dst_vir_info.val;
350 	dest[(RGA_DST_ACT_INFO - RGA_MODE_BASE_REG) >> 2] = dst_act_info.val;
351 
352 	dest[(RGA_DST_INFO - RGA_MODE_BASE_REG) >> 2] = dst_info.val;
353 }
354 
rga_cmd_set_mode(struct rga_ctx * ctx)355 static void rga_cmd_set_mode(struct rga_ctx *ctx)
356 {
357 	struct rockchip_rga *rga = ctx->rga;
358 	u32 *dest = rga->cmdbuf_virt;
359 	union rga_mode_ctrl mode;
360 	union rga_alpha_ctrl0 alpha_ctrl0;
361 	union rga_alpha_ctrl1 alpha_ctrl1;
362 
363 	mode.val = 0;
364 	alpha_ctrl0.val = 0;
365 	alpha_ctrl1.val = 0;
366 
367 	mode.data.gradient_sat = 1;
368 	mode.data.render = RGA_MODE_RENDER_BITBLT;
369 	mode.data.bitblt = RGA_MODE_BITBLT_MODE_SRC_TO_DST;
370 
371 	/* disable alpha blending */
372 	dest[(RGA_ALPHA_CTRL0 - RGA_MODE_BASE_REG) >> 2] = alpha_ctrl0.val;
373 	dest[(RGA_ALPHA_CTRL1 - RGA_MODE_BASE_REG) >> 2] = alpha_ctrl1.val;
374 
375 	dest[(RGA_MODE_CTRL - RGA_MODE_BASE_REG) >> 2] = mode.val;
376 }
377 
rga_cmd_set(struct rga_ctx * ctx)378 static void rga_cmd_set(struct rga_ctx *ctx)
379 {
380 	struct rockchip_rga *rga = ctx->rga;
381 
382 	memset(rga->cmdbuf_virt, 0, RGA_CMDBUF_SIZE * 4);
383 
384 	rga_cmd_set_src_addr(ctx, rga->src_mmu_pages);
385 	/*
386 	 * Due to hardware bug,
387 	 * src1 mmu also should be configured when using alpha blending.
388 	 */
389 	rga_cmd_set_src1_addr(ctx, rga->dst_mmu_pages);
390 
391 	rga_cmd_set_dst_addr(ctx, rga->dst_mmu_pages);
392 	rga_cmd_set_mode(ctx);
393 
394 	rga_cmd_set_trans_info(ctx);
395 
396 	rga_write(rga, RGA_CMD_BASE, rga->cmdbuf_phy);
397 
398 	/* sync CMD buf for RGA */
399 	dma_sync_single_for_device(rga->dev, rga->cmdbuf_phy,
400 		PAGE_SIZE, DMA_BIDIRECTIONAL);
401 }
402 
rga_hw_start(struct rockchip_rga * rga)403 void rga_hw_start(struct rockchip_rga *rga)
404 {
405 	struct rga_ctx *ctx = rga->curr;
406 
407 	rga_cmd_set(ctx);
408 
409 	rga_write(rga, RGA_SYS_CTRL, 0x00);
410 
411 	rga_write(rga, RGA_SYS_CTRL, 0x22);
412 
413 	rga_write(rga, RGA_INT, 0x600);
414 
415 	rga_write(rga, RGA_CMD_CTRL, 0x1);
416 }
417