1 // SPDX-License-Identifier: GPL-2.0-only 2 /* 3 * Copyright (C) Fuzhou Rockchip Electronics Co.Ltd 4 * Author: Jacob Chen <jacob-chen@iotwrt.com> 5 */ 6 7 #include <linux/pm_runtime.h> 8 9 #include "rga-hw.h" 10 #include "rga.h" 11 12 enum e_rga_start_pos { 13 LT = 0, 14 LB = 1, 15 RT = 2, 16 RB = 3, 17 }; 18 19 struct rga_addr_offset { 20 unsigned int y_off; 21 unsigned int u_off; 22 unsigned int v_off; 23 }; 24 25 struct rga_corners_addr_offset { 26 struct rga_addr_offset left_top; 27 struct rga_addr_offset right_top; 28 struct rga_addr_offset left_bottom; 29 struct rga_addr_offset right_bottom; 30 }; 31 32 static unsigned int rga_get_scaling(unsigned int src, unsigned int dst) 33 { 34 /* 35 * The rga hw scaling factor is a normalized inverse of the 36 * scaling factor. 37 * For example: When source width is 100 and destination width is 200 38 * (scaling of 2x), then the hw factor is NC * 100 / 200. 39 * The normalization factor (NC) is 2^16 = 0x10000. 40 */ 41 42 return (src > dst) ? ((dst << 16) / src) : ((src << 16) / dst); 43 } 44 45 static struct rga_corners_addr_offset 46 rga_get_addr_offset(struct rga_frame *frm, unsigned int x, unsigned int y, 47 unsigned int w, unsigned int h) 48 { 49 struct rga_corners_addr_offset offsets; 50 struct rga_addr_offset *lt, *lb, *rt, *rb; 51 unsigned int x_div = 0, 52 y_div = 0, uv_stride = 0, pixel_width = 0, uv_factor = 0; 53 54 lt = &offsets.left_top; 55 lb = &offsets.left_bottom; 56 rt = &offsets.right_top; 57 rb = &offsets.right_bottom; 58 59 x_div = frm->fmt->x_div; 60 y_div = frm->fmt->y_div; 61 uv_factor = frm->fmt->uv_factor; 62 uv_stride = frm->stride / x_div; 63 pixel_width = frm->stride / frm->width; 64 65 lt->y_off = y * frm->stride + x * pixel_width; 66 lt->u_off = 67 frm->width * frm->height + (y / y_div) * uv_stride + x / x_div; 68 lt->v_off = lt->u_off + frm->width * frm->height / uv_factor; 69 70 lb->y_off = lt->y_off + (h - 1) * frm->stride; 71 lb->u_off = lt->u_off + (h / y_div - 1) * uv_stride; 72 lb->v_off = lt->v_off + (h / y_div - 1) * uv_stride; 73 74 rt->y_off = lt->y_off + (w - 1) * pixel_width; 75 rt->u_off = lt->u_off + w / x_div - 1; 76 rt->v_off = lt->v_off + w / x_div - 1; 77 78 rb->y_off = lb->y_off + (w - 1) * pixel_width; 79 rb->u_off = lb->u_off + w / x_div - 1; 80 rb->v_off = lb->v_off + w / x_div - 1; 81 82 return offsets; 83 } 84 85 static struct rga_addr_offset *rga_lookup_draw_pos(struct 86 rga_corners_addr_offset 87 * offsets, u32 rotate_mode, 88 u32 mirr_mode) 89 { 90 static enum e_rga_start_pos rot_mir_point_matrix[4][4] = { 91 { 92 LT, RT, LB, RB, 93 }, 94 { 95 RT, LT, RB, LB, 96 }, 97 { 98 RB, LB, RT, LT, 99 }, 100 { 101 LB, RB, LT, RT, 102 }, 103 }; 104 105 if (!offsets) 106 return NULL; 107 108 switch (rot_mir_point_matrix[rotate_mode][mirr_mode]) { 109 case LT: 110 return &offsets->left_top; 111 case LB: 112 return &offsets->left_bottom; 113 case RT: 114 return &offsets->right_top; 115 case RB: 116 return &offsets->right_bottom; 117 } 118 119 return NULL; 120 } 121 122 static void rga_cmd_set_src_addr(struct rga_ctx *ctx, void *mmu_pages) 123 { 124 struct rockchip_rga *rga = ctx->rga; 125 u32 *dest = rga->cmdbuf_virt; 126 unsigned int reg; 127 128 reg = RGA_MMU_SRC_BASE - RGA_MODE_BASE_REG; 129 dest[reg >> 2] = virt_to_phys(mmu_pages) >> 4; 130 131 reg = RGA_MMU_CTRL1 - RGA_MODE_BASE_REG; 132 dest[reg >> 2] |= 0x7; 133 } 134 135 static void rga_cmd_set_src1_addr(struct rga_ctx *ctx, void *mmu_pages) 136 { 137 struct rockchip_rga *rga = ctx->rga; 138 u32 *dest = rga->cmdbuf_virt; 139 unsigned int reg; 140 141 reg = RGA_MMU_SRC1_BASE - RGA_MODE_BASE_REG; 142 dest[reg >> 2] = virt_to_phys(mmu_pages) >> 4; 143 144 reg = RGA_MMU_CTRL1 - RGA_MODE_BASE_REG; 145 dest[reg >> 2] |= 0x7 << 4; 146 } 147 148 static void rga_cmd_set_dst_addr(struct rga_ctx *ctx, void *mmu_pages) 149 { 150 struct rockchip_rga *rga = ctx->rga; 151 u32 *dest = rga->cmdbuf_virt; 152 unsigned int reg; 153 154 reg = RGA_MMU_DST_BASE - RGA_MODE_BASE_REG; 155 dest[reg >> 2] = virt_to_phys(mmu_pages) >> 4; 156 157 reg = RGA_MMU_CTRL1 - RGA_MODE_BASE_REG; 158 dest[reg >> 2] |= 0x7 << 8; 159 } 160 161 static void rga_cmd_set_trans_info(struct rga_ctx *ctx) 162 { 163 struct rockchip_rga *rga = ctx->rga; 164 u32 *dest = rga->cmdbuf_virt; 165 unsigned int scale_dst_w, scale_dst_h; 166 unsigned int src_h, src_w, src_x, src_y, dst_h, dst_w, dst_x, dst_y; 167 union rga_src_info src_info; 168 union rga_dst_info dst_info; 169 union rga_src_x_factor x_factor; 170 union rga_src_y_factor y_factor; 171 union rga_src_vir_info src_vir_info; 172 union rga_src_act_info src_act_info; 173 union rga_dst_vir_info dst_vir_info; 174 union rga_dst_act_info dst_act_info; 175 176 struct rga_addr_offset *dst_offset; 177 struct rga_corners_addr_offset offsets; 178 struct rga_corners_addr_offset src_offsets; 179 180 src_h = ctx->in.crop.height; 181 src_w = ctx->in.crop.width; 182 src_x = ctx->in.crop.left; 183 src_y = ctx->in.crop.top; 184 dst_h = ctx->out.crop.height; 185 dst_w = ctx->out.crop.width; 186 dst_x = ctx->out.crop.left; 187 dst_y = ctx->out.crop.top; 188 189 src_info.val = dest[(RGA_SRC_INFO - RGA_MODE_BASE_REG) >> 2]; 190 dst_info.val = dest[(RGA_DST_INFO - RGA_MODE_BASE_REG) >> 2]; 191 x_factor.val = dest[(RGA_SRC_X_FACTOR - RGA_MODE_BASE_REG) >> 2]; 192 y_factor.val = dest[(RGA_SRC_Y_FACTOR - RGA_MODE_BASE_REG) >> 2]; 193 src_vir_info.val = dest[(RGA_SRC_VIR_INFO - RGA_MODE_BASE_REG) >> 2]; 194 src_act_info.val = dest[(RGA_SRC_ACT_INFO - RGA_MODE_BASE_REG) >> 2]; 195 dst_vir_info.val = dest[(RGA_DST_VIR_INFO - RGA_MODE_BASE_REG) >> 2]; 196 dst_act_info.val = dest[(RGA_DST_ACT_INFO - RGA_MODE_BASE_REG) >> 2]; 197 198 src_info.data.format = ctx->in.fmt->hw_format; 199 src_info.data.swap = ctx->in.fmt->color_swap; 200 dst_info.data.format = ctx->out.fmt->hw_format; 201 dst_info.data.swap = ctx->out.fmt->color_swap; 202 203 if (ctx->in.fmt->hw_format >= RGA_COLOR_FMT_YUV422SP) { 204 if (ctx->out.fmt->hw_format < RGA_COLOR_FMT_YUV422SP) { 205 switch (ctx->in.colorspace) { 206 case V4L2_COLORSPACE_REC709: 207 src_info.data.csc_mode = 208 RGA_SRC_CSC_MODE_BT709_R0; 209 break; 210 default: 211 src_info.data.csc_mode = 212 RGA_SRC_CSC_MODE_BT601_R0; 213 break; 214 } 215 } 216 } 217 218 if (ctx->out.fmt->hw_format >= RGA_COLOR_FMT_YUV422SP) { 219 switch (ctx->out.colorspace) { 220 case V4L2_COLORSPACE_REC709: 221 dst_info.data.csc_mode = RGA_SRC_CSC_MODE_BT709_R0; 222 break; 223 default: 224 dst_info.data.csc_mode = RGA_DST_CSC_MODE_BT601_R0; 225 break; 226 } 227 } 228 229 if (ctx->vflip) 230 src_info.data.mir_mode |= RGA_SRC_MIRR_MODE_X; 231 232 if (ctx->hflip) 233 src_info.data.mir_mode |= RGA_SRC_MIRR_MODE_Y; 234 235 switch (ctx->rotate) { 236 case 90: 237 src_info.data.rot_mode = RGA_SRC_ROT_MODE_90_DEGREE; 238 break; 239 case 180: 240 src_info.data.rot_mode = RGA_SRC_ROT_MODE_180_DEGREE; 241 break; 242 case 270: 243 src_info.data.rot_mode = RGA_SRC_ROT_MODE_270_DEGREE; 244 break; 245 default: 246 src_info.data.rot_mode = RGA_SRC_ROT_MODE_0_DEGREE; 247 break; 248 } 249 250 /* 251 * Calculate the up/down scaling mode/factor. 252 * 253 * RGA used to scale the picture first, and then rotate second, 254 * so we need to swap the w/h when rotate degree is 90/270. 255 */ 256 if (src_info.data.rot_mode == RGA_SRC_ROT_MODE_90_DEGREE || 257 src_info.data.rot_mode == RGA_SRC_ROT_MODE_270_DEGREE) { 258 if (rga->version.major == 0 || rga->version.minor == 0) { 259 if (dst_w == src_h) 260 src_h -= 8; 261 if (abs(src_w - dst_h) < 16) 262 src_w -= 16; 263 } 264 265 scale_dst_h = dst_w; 266 scale_dst_w = dst_h; 267 } else { 268 scale_dst_w = dst_w; 269 scale_dst_h = dst_h; 270 } 271 272 if (src_w == scale_dst_w) { 273 src_info.data.hscl_mode = RGA_SRC_HSCL_MODE_NO; 274 x_factor.val = 0; 275 } else if (src_w > scale_dst_w) { 276 src_info.data.hscl_mode = RGA_SRC_HSCL_MODE_DOWN; 277 x_factor.data.down_scale_factor = 278 rga_get_scaling(src_w, scale_dst_w) + 1; 279 } else { 280 src_info.data.hscl_mode = RGA_SRC_HSCL_MODE_UP; 281 x_factor.data.up_scale_factor = 282 rga_get_scaling(src_w - 1, scale_dst_w - 1); 283 } 284 285 if (src_h == scale_dst_h) { 286 src_info.data.vscl_mode = RGA_SRC_VSCL_MODE_NO; 287 y_factor.val = 0; 288 } else if (src_h > scale_dst_h) { 289 src_info.data.vscl_mode = RGA_SRC_VSCL_MODE_DOWN; 290 y_factor.data.down_scale_factor = 291 rga_get_scaling(src_h, scale_dst_h) + 1; 292 } else { 293 src_info.data.vscl_mode = RGA_SRC_VSCL_MODE_UP; 294 y_factor.data.up_scale_factor = 295 rga_get_scaling(src_h - 1, scale_dst_h - 1); 296 } 297 298 /* 299 * Calculate the framebuffer virtual strides and active size, 300 * note that the step of vir_stride / vir_width is 4 byte words 301 */ 302 src_vir_info.data.vir_stride = ctx->in.stride >> 2; 303 src_vir_info.data.vir_width = ctx->in.stride >> 2; 304 305 src_act_info.data.act_height = src_h - 1; 306 src_act_info.data.act_width = src_w - 1; 307 308 dst_vir_info.data.vir_stride = ctx->out.stride >> 2; 309 dst_act_info.data.act_height = dst_h - 1; 310 dst_act_info.data.act_width = dst_w - 1; 311 312 /* 313 * Calculate the source framebuffer base address with offset pixel. 314 */ 315 src_offsets = rga_get_addr_offset(&ctx->in, src_x, src_y, 316 src_w, src_h); 317 318 /* 319 * Configure the dest framebuffer base address with pixel offset. 320 */ 321 offsets = rga_get_addr_offset(&ctx->out, dst_x, dst_y, dst_w, dst_h); 322 dst_offset = rga_lookup_draw_pos(&offsets, src_info.data.rot_mode, 323 src_info.data.mir_mode); 324 325 dest[(RGA_SRC_Y_RGB_BASE_ADDR - RGA_MODE_BASE_REG) >> 2] = 326 src_offsets.left_top.y_off; 327 dest[(RGA_SRC_CB_BASE_ADDR - RGA_MODE_BASE_REG) >> 2] = 328 src_offsets.left_top.u_off; 329 dest[(RGA_SRC_CR_BASE_ADDR - RGA_MODE_BASE_REG) >> 2] = 330 src_offsets.left_top.v_off; 331 332 dest[(RGA_SRC_X_FACTOR - RGA_MODE_BASE_REG) >> 2] = x_factor.val; 333 dest[(RGA_SRC_Y_FACTOR - RGA_MODE_BASE_REG) >> 2] = y_factor.val; 334 dest[(RGA_SRC_VIR_INFO - RGA_MODE_BASE_REG) >> 2] = src_vir_info.val; 335 dest[(RGA_SRC_ACT_INFO - RGA_MODE_BASE_REG) >> 2] = src_act_info.val; 336 337 dest[(RGA_SRC_INFO - RGA_MODE_BASE_REG) >> 2] = src_info.val; 338 339 dest[(RGA_DST_Y_RGB_BASE_ADDR - RGA_MODE_BASE_REG) >> 2] = 340 dst_offset->y_off; 341 dest[(RGA_DST_CB_BASE_ADDR - RGA_MODE_BASE_REG) >> 2] = 342 dst_offset->u_off; 343 dest[(RGA_DST_CR_BASE_ADDR - RGA_MODE_BASE_REG) >> 2] = 344 dst_offset->v_off; 345 346 dest[(RGA_DST_VIR_INFO - RGA_MODE_BASE_REG) >> 2] = dst_vir_info.val; 347 dest[(RGA_DST_ACT_INFO - RGA_MODE_BASE_REG) >> 2] = dst_act_info.val; 348 349 dest[(RGA_DST_INFO - RGA_MODE_BASE_REG) >> 2] = dst_info.val; 350 } 351 352 static void rga_cmd_set_mode(struct rga_ctx *ctx) 353 { 354 struct rockchip_rga *rga = ctx->rga; 355 u32 *dest = rga->cmdbuf_virt; 356 union rga_mode_ctrl mode; 357 union rga_alpha_ctrl0 alpha_ctrl0; 358 union rga_alpha_ctrl1 alpha_ctrl1; 359 360 mode.val = 0; 361 alpha_ctrl0.val = 0; 362 alpha_ctrl1.val = 0; 363 364 mode.data.gradient_sat = 1; 365 mode.data.render = RGA_MODE_RENDER_BITBLT; 366 mode.data.bitblt = RGA_MODE_BITBLT_MODE_SRC_TO_DST; 367 368 /* disable alpha blending */ 369 dest[(RGA_ALPHA_CTRL0 - RGA_MODE_BASE_REG) >> 2] = alpha_ctrl0.val; 370 dest[(RGA_ALPHA_CTRL1 - RGA_MODE_BASE_REG) >> 2] = alpha_ctrl1.val; 371 372 dest[(RGA_MODE_CTRL - RGA_MODE_BASE_REG) >> 2] = mode.val; 373 } 374 375 static void rga_cmd_set(struct rga_ctx *ctx) 376 { 377 struct rockchip_rga *rga = ctx->rga; 378 379 memset(rga->cmdbuf_virt, 0, RGA_CMDBUF_SIZE * 4); 380 381 rga_cmd_set_src_addr(ctx, rga->src_mmu_pages); 382 /* 383 * Due to hardware bug, 384 * src1 mmu also should be configured when using alpha blending. 385 */ 386 rga_cmd_set_src1_addr(ctx, rga->dst_mmu_pages); 387 388 rga_cmd_set_dst_addr(ctx, rga->dst_mmu_pages); 389 rga_cmd_set_mode(ctx); 390 391 rga_cmd_set_trans_info(ctx); 392 393 rga_write(rga, RGA_CMD_BASE, rga->cmdbuf_phy); 394 395 /* sync CMD buf for RGA */ 396 dma_sync_single_for_device(rga->dev, rga->cmdbuf_phy, 397 PAGE_SIZE, DMA_BIDIRECTIONAL); 398 } 399 400 void rga_hw_start(struct rockchip_rga *rga) 401 { 402 struct rga_ctx *ctx = rga->curr; 403 404 rga_cmd_set(ctx); 405 406 rga_write(rga, RGA_SYS_CTRL, 0x00); 407 408 rga_write(rga, RGA_SYS_CTRL, 0x22); 409 410 rga_write(rga, RGA_INT, 0x600); 411 412 rga_write(rga, RGA_CMD_CTRL, 0x1); 413 } 414