1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * camss-vfe-4-8.c 4 * 5 * Qualcomm MSM Camera Subsystem - VFE (Video Front End) Module v4.8 6 * 7 * Copyright (c) 2013-2015, The Linux Foundation. All rights reserved. 8 * Copyright (C) 2015-2021 Linaro Ltd. 9 */ 10 11 #include <linux/device.h> 12 #include <linux/interrupt.h> 13 #include <linux/io.h> 14 #include <linux/iopoll.h> 15 16 #include "camss.h" 17 #include "camss-vfe.h" 18 #include "camss-vfe-gen1.h" 19 20 #define VFE_0_HW_VERSION 0x000 21 22 #define VFE_0_GLOBAL_RESET_CMD 0x018 23 #define VFE_0_GLOBAL_RESET_CMD_CORE BIT(0) 24 #define VFE_0_GLOBAL_RESET_CMD_CAMIF BIT(1) 25 #define VFE_0_GLOBAL_RESET_CMD_BUS BIT(2) 26 #define VFE_0_GLOBAL_RESET_CMD_BUS_BDG BIT(3) 27 #define VFE_0_GLOBAL_RESET_CMD_REGISTER BIT(4) 28 #define VFE_0_GLOBAL_RESET_CMD_PM BIT(5) 29 #define VFE_0_GLOBAL_RESET_CMD_BUS_MISR BIT(6) 30 #define VFE_0_GLOBAL_RESET_CMD_TESTGEN BIT(7) 31 #define VFE_0_GLOBAL_RESET_CMD_DSP BIT(8) 32 #define VFE_0_GLOBAL_RESET_CMD_IDLE_CGC BIT(9) 33 34 #define VFE_0_MODULE_LENS_EN 0x040 35 #define VFE_0_MODULE_LENS_EN_DEMUX BIT(2) 36 #define VFE_0_MODULE_LENS_EN_CHROMA_UPSAMPLE BIT(3) 37 38 #define VFE_0_MODULE_ZOOM_EN 0x04c 39 #define VFE_0_MODULE_ZOOM_EN_SCALE_ENC BIT(1) 40 #define VFE_0_MODULE_ZOOM_EN_CROP_ENC BIT(2) 41 #define VFE_0_MODULE_ZOOM_EN_REALIGN_BUF BIT(9) 42 43 #define VFE_0_CORE_CFG 0x050 44 #define VFE_0_CORE_CFG_PIXEL_PATTERN_YCBYCR 0x4 45 #define VFE_0_CORE_CFG_PIXEL_PATTERN_YCRYCB 0x5 46 #define VFE_0_CORE_CFG_PIXEL_PATTERN_CBYCRY 0x6 47 #define VFE_0_CORE_CFG_PIXEL_PATTERN_CRYCBY 0x7 48 #define VFE_0_CORE_CFG_COMPOSITE_REG_UPDATE_EN BIT(4) 49 50 #define VFE_0_IRQ_CMD 0x058 51 #define VFE_0_IRQ_CMD_GLOBAL_CLEAR BIT(0) 52 53 #define VFE_0_IRQ_MASK_0 0x05c 54 #define VFE_0_IRQ_MASK_0_CAMIF_SOF BIT(0) 55 #define VFE_0_IRQ_MASK_0_CAMIF_EOF BIT(1) 56 #define VFE_0_IRQ_MASK_0_RDIn_REG_UPDATE(n) BIT((n) + 5) 57 #define VFE_0_IRQ_MASK_0_line_n_REG_UPDATE(n) \ 58 ((n) == VFE_LINE_PIX ? BIT(4) : VFE_0_IRQ_MASK_0_RDIn_REG_UPDATE(n)) 59 #define VFE_0_IRQ_MASK_0_IMAGE_MASTER_n_PING_PONG(n) BIT((n) + 8) 60 #define VFE_0_IRQ_MASK_0_IMAGE_COMPOSITE_DONE_n(n) BIT((n) + 25) 61 #define VFE_0_IRQ_MASK_0_RESET_ACK BIT(31) 62 #define VFE_0_IRQ_MASK_1 0x060 63 #define VFE_0_IRQ_MASK_1_CAMIF_ERROR BIT(0) 64 #define VFE_0_IRQ_MASK_1_VIOLATION BIT(7) 65 #define VFE_0_IRQ_MASK_1_BUS_BDG_HALT_ACK BIT(8) 66 #define VFE_0_IRQ_MASK_1_IMAGE_MASTER_n_BUS_OVERFLOW(n) BIT((n) + 9) 67 #define VFE_0_IRQ_MASK_1_RDIn_SOF(n) BIT((n) + 29) 68 69 #define VFE_0_IRQ_CLEAR_0 0x064 70 #define VFE_0_IRQ_CLEAR_1 0x068 71 72 #define VFE_0_IRQ_STATUS_0 0x06c 73 #define VFE_0_IRQ_STATUS_0_CAMIF_SOF BIT(0) 74 #define VFE_0_IRQ_STATUS_0_RDIn_REG_UPDATE(n) BIT((n) + 5) 75 #define VFE_0_IRQ_STATUS_0_line_n_REG_UPDATE(n) \ 76 ((n) == VFE_LINE_PIX ? BIT(4) : VFE_0_IRQ_STATUS_0_RDIn_REG_UPDATE(n)) 77 #define VFE_0_IRQ_STATUS_0_IMAGE_MASTER_n_PING_PONG(n) BIT((n) + 8) 78 #define VFE_0_IRQ_STATUS_0_IMAGE_COMPOSITE_DONE_n(n) BIT((n) + 25) 79 #define VFE_0_IRQ_STATUS_0_RESET_ACK BIT(31) 80 #define VFE_0_IRQ_STATUS_1 0x070 81 #define VFE_0_IRQ_STATUS_1_VIOLATION BIT(7) 82 #define VFE_0_IRQ_STATUS_1_BUS_BDG_HALT_ACK BIT(8) 83 #define VFE_0_IRQ_STATUS_1_RDIn_SOF(n) BIT((n) + 29) 84 85 #define VFE_0_IRQ_COMPOSITE_MASK_0 0x074 86 #define VFE_0_VIOLATION_STATUS 0x07c 87 88 #define VFE_0_BUS_CMD 0x80 89 #define VFE_0_BUS_CMD_Mx_RLD_CMD(x) BIT(x) 90 91 #define VFE_0_BUS_CFG 0x084 92 93 #define VFE_0_BUS_XBAR_CFG_x(x) (0x90 + 0x4 * ((x) / 2)) 94 #define VFE_0_BUS_XBAR_CFG_x_M_PAIR_STREAM_EN BIT(2) 95 #define VFE_0_BUS_XBAR_CFG_x_M_REALIGN_BUF_EN BIT(3) 96 #define VFE_0_BUS_XBAR_CFG_x_M_PAIR_STREAM_SWAP_INTRA (0x1 << 4) 97 #define VFE_0_BUS_XBAR_CFG_x_M_PAIR_STREAM_SWAP_INTER (0x2 << 4) 98 #define VFE_0_BUS_XBAR_CFG_x_M_PAIR_STREAM_SWAP_INTER_INTRA (0x3 << 4) 99 #define VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT 8 100 #define VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_LUMA 0x0 101 #define VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI0 0xc 102 #define VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI1 0xd 103 #define VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI2 0xe 104 105 #define VFE_0_BUS_IMAGE_MASTER_n_WR_CFG(n) (0x0a0 + 0x2c * (n)) 106 #define VFE_0_BUS_IMAGE_MASTER_n_WR_CFG_WR_PATH_SHIFT 0 107 #define VFE_0_BUS_IMAGE_MASTER_n_WR_PING_ADDR(n) (0x0a4 + 0x2c * (n)) 108 #define VFE_0_BUS_IMAGE_MASTER_n_WR_PONG_ADDR(n) (0x0ac + 0x2c * (n)) 109 #define VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG(n) (0x0b4 + 0x2c * (n)) 110 #define VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG_FRM_BASED_SHIFT 1 111 #define VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG_FRM_DROP_PER_SHIFT 2 112 #define VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG_FRM_DROP_PER_MASK (0x1f << 2) 113 #define VFE_0_BUS_IMAGE_MASTER_n_WR_UB_CFG(n) (0x0b8 + 0x2c * (n)) 114 #define VFE_0_BUS_IMAGE_MASTER_n_WR_UB_CFG_OFFSET_SHIFT 16 115 #define VFE_0_BUS_IMAGE_MASTER_n_WR_IMAGE_SIZE(n) (0x0bc + 0x2c * (n)) 116 #define VFE_0_BUS_IMAGE_MASTER_n_WR_BUFFER_CFG(n) (0x0c0 + 0x2c * (n)) 117 #define VFE_0_BUS_IMAGE_MASTER_n_WR_FRAMEDROP_PATTERN(n) \ 118 (0x0c4 + 0x2c * (n)) 119 #define VFE_0_BUS_IMAGE_MASTER_n_WR_IRQ_SUBSAMPLE_PATTERN(n) \ 120 (0x0c8 + 0x2c * (n)) 121 #define VFE_0_BUS_IMAGE_MASTER_n_WR_IRQ_SUBSAMPLE_PATTERN_DEF 0xffffffff 122 123 #define VFE_0_BUS_PING_PONG_STATUS 0x338 124 125 #define VFE_0_BUS_BDG_CMD 0x400 126 #define VFE_0_BUS_BDG_CMD_HALT_REQ 1 127 128 #define VFE_0_BUS_BDG_QOS_CFG_0 0x404 129 #define VFE_0_BUS_BDG_QOS_CFG_0_CFG 0xaaa5aaa5 130 #define VFE_0_BUS_BDG_QOS_CFG_1 0x408 131 #define VFE_0_BUS_BDG_QOS_CFG_2 0x40c 132 #define VFE_0_BUS_BDG_QOS_CFG_3 0x410 133 #define VFE_0_BUS_BDG_QOS_CFG_3_CFG 0xaa55aaa5 134 #define VFE_0_BUS_BDG_QOS_CFG_4 0x414 135 #define VFE_0_BUS_BDG_QOS_CFG_4_CFG 0xaa55aa55 136 #define VFE_0_BUS_BDG_QOS_CFG_5 0x418 137 #define VFE_0_BUS_BDG_QOS_CFG_6 0x41c 138 #define VFE_0_BUS_BDG_QOS_CFG_7 0x420 139 #define VFE_0_BUS_BDG_QOS_CFG_7_CFG 0x0005aa55 140 141 #define VFE_0_BUS_BDG_DS_CFG_0 0x424 142 #define VFE_0_BUS_BDG_DS_CFG_0_CFG 0xcccc1111 143 #define VFE_0_BUS_BDG_DS_CFG_1 0x428 144 #define VFE_0_BUS_BDG_DS_CFG_2 0x42c 145 #define VFE_0_BUS_BDG_DS_CFG_3 0x430 146 #define VFE_0_BUS_BDG_DS_CFG_4 0x434 147 #define VFE_0_BUS_BDG_DS_CFG_5 0x438 148 #define VFE_0_BUS_BDG_DS_CFG_6 0x43c 149 #define VFE_0_BUS_BDG_DS_CFG_7 0x440 150 #define VFE_0_BUS_BDG_DS_CFG_8 0x444 151 #define VFE_0_BUS_BDG_DS_CFG_9 0x448 152 #define VFE_0_BUS_BDG_DS_CFG_10 0x44c 153 #define VFE_0_BUS_BDG_DS_CFG_11 0x450 154 #define VFE_0_BUS_BDG_DS_CFG_12 0x454 155 #define VFE_0_BUS_BDG_DS_CFG_13 0x458 156 #define VFE_0_BUS_BDG_DS_CFG_14 0x45c 157 #define VFE_0_BUS_BDG_DS_CFG_15 0x460 158 #define VFE_0_BUS_BDG_DS_CFG_16 0x464 159 #define VFE_0_BUS_BDG_DS_CFG_16_CFG 0x00000110 160 161 #define VFE_0_RDI_CFG_x(x) (0x46c + (0x4 * (x))) 162 #define VFE_0_RDI_CFG_x_RDI_STREAM_SEL_SHIFT 28 163 #define VFE_0_RDI_CFG_x_RDI_STREAM_SEL_MASK (0xf << 28) 164 #define VFE_0_RDI_CFG_x_RDI_M0_SEL_SHIFT 4 165 #define VFE_0_RDI_CFG_x_RDI_M0_SEL_MASK (0xf << 4) 166 #define VFE_0_RDI_CFG_x_RDI_EN_BIT BIT(2) 167 #define VFE_0_RDI_CFG_x_MIPI_EN_BITS 0x3 168 169 #define VFE_0_CAMIF_CMD 0x478 170 #define VFE_0_CAMIF_CMD_DISABLE_FRAME_BOUNDARY 0 171 #define VFE_0_CAMIF_CMD_ENABLE_FRAME_BOUNDARY 1 172 #define VFE_0_CAMIF_CMD_NO_CHANGE 3 173 #define VFE_0_CAMIF_CMD_CLEAR_CAMIF_STATUS BIT(2) 174 #define VFE_0_CAMIF_CFG 0x47c 175 #define VFE_0_CAMIF_CFG_VFE_OUTPUT_EN BIT(6) 176 #define VFE_0_CAMIF_FRAME_CFG 0x484 177 #define VFE_0_CAMIF_WINDOW_WIDTH_CFG 0x488 178 #define VFE_0_CAMIF_WINDOW_HEIGHT_CFG 0x48c 179 #define VFE_0_CAMIF_SUBSAMPLE_CFG 0x490 180 #define VFE_0_CAMIF_IRQ_FRAMEDROP_PATTERN 0x498 181 #define VFE_0_CAMIF_IRQ_SUBSAMPLE_PATTERN 0x49c 182 #define VFE_0_CAMIF_STATUS 0x4a4 183 #define VFE_0_CAMIF_STATUS_HALT BIT(31) 184 185 #define VFE_0_REG_UPDATE 0x4ac 186 #define VFE_0_REG_UPDATE_RDIn(n) BIT(1 + (n)) 187 #define VFE_0_REG_UPDATE_line_n(n) \ 188 ((n) == VFE_LINE_PIX ? 1 : VFE_0_REG_UPDATE_RDIn(n)) 189 190 #define VFE_0_DEMUX_CFG 0x560 191 #define VFE_0_DEMUX_CFG_PERIOD 0x3 192 #define VFE_0_DEMUX_GAIN_0 0x564 193 #define VFE_0_DEMUX_GAIN_0_CH0_EVEN (0x80 << 0) 194 #define VFE_0_DEMUX_GAIN_0_CH0_ODD (0x80 << 16) 195 #define VFE_0_DEMUX_GAIN_1 0x568 196 #define VFE_0_DEMUX_GAIN_1_CH1 (0x80 << 0) 197 #define VFE_0_DEMUX_GAIN_1_CH2 (0x80 << 16) 198 #define VFE_0_DEMUX_EVEN_CFG 0x574 199 #define VFE_0_DEMUX_EVEN_CFG_PATTERN_YUYV 0x9cac 200 #define VFE_0_DEMUX_EVEN_CFG_PATTERN_YVYU 0xac9c 201 #define VFE_0_DEMUX_EVEN_CFG_PATTERN_UYVY 0xc9ca 202 #define VFE_0_DEMUX_EVEN_CFG_PATTERN_VYUY 0xcac9 203 #define VFE_0_DEMUX_ODD_CFG 0x578 204 #define VFE_0_DEMUX_ODD_CFG_PATTERN_YUYV 0x9cac 205 #define VFE_0_DEMUX_ODD_CFG_PATTERN_YVYU 0xac9c 206 #define VFE_0_DEMUX_ODD_CFG_PATTERN_UYVY 0xc9ca 207 #define VFE_0_DEMUX_ODD_CFG_PATTERN_VYUY 0xcac9 208 209 #define VFE_0_SCALE_ENC_Y_CFG 0x91c 210 #define VFE_0_SCALE_ENC_Y_H_IMAGE_SIZE 0x920 211 #define VFE_0_SCALE_ENC_Y_H_PHASE 0x924 212 #define VFE_0_SCALE_ENC_Y_V_IMAGE_SIZE 0x934 213 #define VFE_0_SCALE_ENC_Y_V_PHASE 0x938 214 #define VFE_0_SCALE_ENC_CBCR_CFG 0x948 215 #define VFE_0_SCALE_ENC_CBCR_H_IMAGE_SIZE 0x94c 216 #define VFE_0_SCALE_ENC_CBCR_H_PHASE 0x950 217 #define VFE_0_SCALE_ENC_CBCR_V_IMAGE_SIZE 0x960 218 #define VFE_0_SCALE_ENC_CBCR_V_PHASE 0x964 219 220 #define VFE_0_CROP_ENC_Y_WIDTH 0x974 221 #define VFE_0_CROP_ENC_Y_HEIGHT 0x978 222 #define VFE_0_CROP_ENC_CBCR_WIDTH 0x97c 223 #define VFE_0_CROP_ENC_CBCR_HEIGHT 0x980 224 225 #define VFE_0_CLAMP_ENC_MAX_CFG 0x984 226 #define VFE_0_CLAMP_ENC_MAX_CFG_CH0 (0xff << 0) 227 #define VFE_0_CLAMP_ENC_MAX_CFG_CH1 (0xff << 8) 228 #define VFE_0_CLAMP_ENC_MAX_CFG_CH2 (0xff << 16) 229 #define VFE_0_CLAMP_ENC_MIN_CFG 0x988 230 #define VFE_0_CLAMP_ENC_MIN_CFG_CH0 (0x0 << 0) 231 #define VFE_0_CLAMP_ENC_MIN_CFG_CH1 (0x0 << 8) 232 #define VFE_0_CLAMP_ENC_MIN_CFG_CH2 (0x0 << 16) 233 234 #define VFE_0_REALIGN_BUF_CFG 0xaac 235 #define VFE_0_REALIGN_BUF_CFG_CB_ODD_PIXEL BIT(2) 236 #define VFE_0_REALIGN_BUF_CFG_CR_ODD_PIXEL BIT(3) 237 #define VFE_0_REALIGN_BUF_CFG_HSUB_ENABLE BIT(4) 238 239 #define VFE_0_BUS_IMAGE_MASTER_CMD 0xcec 240 #define VFE_0_BUS_IMAGE_MASTER_n_SHIFT(x) (2 * (x)) 241 242 #define CAMIF_TIMEOUT_SLEEP_US 1000 243 #define CAMIF_TIMEOUT_ALL_US 1000000 244 245 #define MSM_VFE_VFE0_UB_SIZE 2047 246 #define MSM_VFE_VFE0_UB_SIZE_RDI (MSM_VFE_VFE0_UB_SIZE / 3) 247 #define MSM_VFE_VFE1_UB_SIZE 1535 248 #define MSM_VFE_VFE1_UB_SIZE_RDI (MSM_VFE_VFE1_UB_SIZE / 3) 249 250 static u32 vfe_hw_version(struct vfe_device *vfe) 251 { 252 u32 hw_version = readl_relaxed(vfe->base + VFE_0_HW_VERSION); 253 254 dev_dbg(vfe->camss->dev, "VFE HW Version = 0x%08x\n", hw_version); 255 256 return hw_version; 257 } 258 259 static inline void vfe_reg_clr(struct vfe_device *vfe, u32 reg, u32 clr_bits) 260 { 261 u32 bits = readl_relaxed(vfe->base + reg); 262 263 writel_relaxed(bits & ~clr_bits, vfe->base + reg); 264 } 265 266 static inline void vfe_reg_set(struct vfe_device *vfe, u32 reg, u32 set_bits) 267 { 268 u32 bits = readl_relaxed(vfe->base + reg); 269 270 writel_relaxed(bits | set_bits, vfe->base + reg); 271 } 272 273 static void vfe_global_reset(struct vfe_device *vfe) 274 { 275 u32 reset_bits = VFE_0_GLOBAL_RESET_CMD_IDLE_CGC | 276 VFE_0_GLOBAL_RESET_CMD_DSP | 277 VFE_0_GLOBAL_RESET_CMD_TESTGEN | 278 VFE_0_GLOBAL_RESET_CMD_BUS_MISR | 279 VFE_0_GLOBAL_RESET_CMD_PM | 280 VFE_0_GLOBAL_RESET_CMD_REGISTER | 281 VFE_0_GLOBAL_RESET_CMD_BUS_BDG | 282 VFE_0_GLOBAL_RESET_CMD_BUS | 283 VFE_0_GLOBAL_RESET_CMD_CAMIF | 284 VFE_0_GLOBAL_RESET_CMD_CORE; 285 286 writel_relaxed(BIT(31), vfe->base + VFE_0_IRQ_MASK_0); 287 288 /* Enforce barrier between IRQ mask setup and global reset */ 289 wmb(); 290 writel_relaxed(reset_bits, vfe->base + VFE_0_GLOBAL_RESET_CMD); 291 } 292 293 static void vfe_halt_request(struct vfe_device *vfe) 294 { 295 writel_relaxed(VFE_0_BUS_BDG_CMD_HALT_REQ, 296 vfe->base + VFE_0_BUS_BDG_CMD); 297 } 298 299 static void vfe_halt_clear(struct vfe_device *vfe) 300 { 301 writel_relaxed(0x0, vfe->base + VFE_0_BUS_BDG_CMD); 302 } 303 304 static void vfe_wm_frame_based(struct vfe_device *vfe, u8 wm, u8 enable) 305 { 306 if (enable) 307 vfe_reg_set(vfe, VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG(wm), 308 1 << VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG_FRM_BASED_SHIFT); 309 else 310 vfe_reg_clr(vfe, VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG(wm), 311 1 << VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG_FRM_BASED_SHIFT); 312 } 313 314 #define CALC_WORD(width, M, N) (((width) * (M) + (N) - 1) / (N)) 315 316 static int vfe_word_per_line_by_pixel(u32 format, u32 pixel_per_line) 317 { 318 int val = 0; 319 320 switch (format) { 321 case V4L2_PIX_FMT_NV12: 322 case V4L2_PIX_FMT_NV21: 323 case V4L2_PIX_FMT_NV16: 324 case V4L2_PIX_FMT_NV61: 325 val = CALC_WORD(pixel_per_line, 1, 8); 326 break; 327 case V4L2_PIX_FMT_YUYV: 328 case V4L2_PIX_FMT_YVYU: 329 case V4L2_PIX_FMT_UYVY: 330 case V4L2_PIX_FMT_VYUY: 331 val = CALC_WORD(pixel_per_line, 2, 8); 332 break; 333 } 334 335 return val; 336 } 337 338 static int vfe_word_per_line_by_bytes(u32 bytes_per_line) 339 { 340 return CALC_WORD(bytes_per_line, 1, 8); 341 } 342 343 static void vfe_get_wm_sizes(struct v4l2_pix_format_mplane *pix, u8 plane, 344 u16 *width, u16 *height, u16 *bytesperline) 345 { 346 switch (pix->pixelformat) { 347 case V4L2_PIX_FMT_NV12: 348 case V4L2_PIX_FMT_NV21: 349 *width = pix->width; 350 *height = pix->height; 351 *bytesperline = pix->plane_fmt[0].bytesperline; 352 if (plane == 1) 353 *height /= 2; 354 break; 355 case V4L2_PIX_FMT_NV16: 356 case V4L2_PIX_FMT_NV61: 357 *width = pix->width; 358 *height = pix->height; 359 *bytesperline = pix->plane_fmt[0].bytesperline; 360 break; 361 case V4L2_PIX_FMT_YUYV: 362 case V4L2_PIX_FMT_YVYU: 363 case V4L2_PIX_FMT_VYUY: 364 case V4L2_PIX_FMT_UYVY: 365 *width = pix->width; 366 *height = pix->height; 367 *bytesperline = pix->plane_fmt[plane].bytesperline; 368 break; 369 } 370 } 371 372 static void vfe_wm_line_based(struct vfe_device *vfe, u32 wm, 373 struct v4l2_pix_format_mplane *pix, 374 u8 plane, u32 enable) 375 { 376 u32 reg; 377 378 if (enable) { 379 u16 width = 0, height = 0, bytesperline = 0, wpl; 380 381 vfe_get_wm_sizes(pix, plane, &width, &height, &bytesperline); 382 383 wpl = vfe_word_per_line_by_pixel(pix->pixelformat, width); 384 385 reg = height - 1; 386 reg |= ((wpl + 3) / 4 - 1) << 16; 387 388 writel_relaxed(reg, vfe->base + 389 VFE_0_BUS_IMAGE_MASTER_n_WR_IMAGE_SIZE(wm)); 390 391 wpl = vfe_word_per_line_by_bytes(bytesperline); 392 393 reg = 0x3; 394 reg |= (height - 1) << 2; 395 reg |= ((wpl + 1) / 2) << 16; 396 397 writel_relaxed(reg, vfe->base + 398 VFE_0_BUS_IMAGE_MASTER_n_WR_BUFFER_CFG(wm)); 399 } else { 400 writel_relaxed(0, vfe->base + 401 VFE_0_BUS_IMAGE_MASTER_n_WR_IMAGE_SIZE(wm)); 402 writel_relaxed(0, vfe->base + 403 VFE_0_BUS_IMAGE_MASTER_n_WR_BUFFER_CFG(wm)); 404 } 405 } 406 407 static void vfe_wm_set_framedrop_period(struct vfe_device *vfe, u8 wm, u8 per) 408 { 409 u32 reg; 410 411 reg = readl_relaxed(vfe->base + 412 VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG(wm)); 413 414 reg &= ~(VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG_FRM_DROP_PER_MASK); 415 416 reg |= (per << VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG_FRM_DROP_PER_SHIFT) 417 & VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG_FRM_DROP_PER_MASK; 418 419 writel_relaxed(reg, 420 vfe->base + VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG(wm)); 421 } 422 423 static void vfe_wm_set_framedrop_pattern(struct vfe_device *vfe, u8 wm, 424 u32 pattern) 425 { 426 writel_relaxed(pattern, vfe->base + VFE_0_BUS_IMAGE_MASTER_n_WR_FRAMEDROP_PATTERN(wm)); 427 } 428 429 static void vfe_wm_set_ub_cfg(struct vfe_device *vfe, u8 wm, 430 u16 offset, u16 depth) 431 { 432 u32 reg; 433 434 reg = (offset << VFE_0_BUS_IMAGE_MASTER_n_WR_UB_CFG_OFFSET_SHIFT) | 435 depth; 436 writel_relaxed(reg, vfe->base + VFE_0_BUS_IMAGE_MASTER_n_WR_UB_CFG(wm)); 437 } 438 439 static void vfe_bus_reload_wm(struct vfe_device *vfe, u8 wm) 440 { 441 /* Enforce barrier between any outstanding register write */ 442 wmb(); 443 444 writel_relaxed(VFE_0_BUS_CMD_Mx_RLD_CMD(wm), vfe->base + VFE_0_BUS_CMD); 445 446 /* Use barrier to make sure bus reload is issued before anything else */ 447 wmb(); 448 } 449 450 static void vfe_wm_set_ping_addr(struct vfe_device *vfe, u8 wm, u32 addr) 451 { 452 writel_relaxed(addr, 453 vfe->base + VFE_0_BUS_IMAGE_MASTER_n_WR_PING_ADDR(wm)); 454 } 455 456 static void vfe_wm_set_pong_addr(struct vfe_device *vfe, u8 wm, u32 addr) 457 { 458 writel_relaxed(addr, 459 vfe->base + VFE_0_BUS_IMAGE_MASTER_n_WR_PONG_ADDR(wm)); 460 } 461 462 static int vfe_wm_get_ping_pong_status(struct vfe_device *vfe, u8 wm) 463 { 464 u32 reg; 465 466 reg = readl_relaxed(vfe->base + VFE_0_BUS_PING_PONG_STATUS); 467 468 return (reg >> wm) & 0x1; 469 } 470 471 static void vfe_bus_enable_wr_if(struct vfe_device *vfe, u8 enable) 472 { 473 if (enable) 474 writel_relaxed(0x101, vfe->base + VFE_0_BUS_CFG); 475 else 476 writel_relaxed(0, vfe->base + VFE_0_BUS_CFG); 477 } 478 479 static void vfe_bus_connect_wm_to_rdi(struct vfe_device *vfe, u8 wm, 480 enum vfe_line_id id) 481 { 482 u32 reg; 483 484 reg = VFE_0_RDI_CFG_x_MIPI_EN_BITS; 485 vfe_reg_set(vfe, VFE_0_RDI_CFG_x(0), reg); 486 487 reg = VFE_0_RDI_CFG_x_RDI_EN_BIT; 488 reg |= ((3 * id) << VFE_0_RDI_CFG_x_RDI_STREAM_SEL_SHIFT) & 489 VFE_0_RDI_CFG_x_RDI_STREAM_SEL_MASK; 490 vfe_reg_set(vfe, VFE_0_RDI_CFG_x(id), reg); 491 492 switch (id) { 493 case VFE_LINE_RDI0: 494 default: 495 reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI0 << 496 VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT; 497 break; 498 case VFE_LINE_RDI1: 499 reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI1 << 500 VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT; 501 break; 502 case VFE_LINE_RDI2: 503 reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI2 << 504 VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT; 505 break; 506 } 507 508 if (wm % 2 == 1) 509 reg <<= 16; 510 511 vfe_reg_set(vfe, VFE_0_BUS_XBAR_CFG_x(wm), reg); 512 } 513 514 static void vfe_wm_set_subsample(struct vfe_device *vfe, u8 wm) 515 { 516 writel_relaxed(VFE_0_BUS_IMAGE_MASTER_n_WR_IRQ_SUBSAMPLE_PATTERN_DEF, 517 vfe->base + VFE_0_BUS_IMAGE_MASTER_n_WR_IRQ_SUBSAMPLE_PATTERN(wm)); 518 } 519 520 static void vfe_bus_disconnect_wm_from_rdi(struct vfe_device *vfe, u8 wm, 521 enum vfe_line_id id) 522 { 523 u32 reg; 524 525 reg = VFE_0_RDI_CFG_x_RDI_EN_BIT; 526 vfe_reg_clr(vfe, VFE_0_RDI_CFG_x(id), reg); 527 528 switch (id) { 529 case VFE_LINE_RDI0: 530 default: 531 reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI0 << 532 VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT; 533 break; 534 case VFE_LINE_RDI1: 535 reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI1 << 536 VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT; 537 break; 538 case VFE_LINE_RDI2: 539 reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI2 << 540 VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT; 541 break; 542 } 543 544 if (wm % 2 == 1) 545 reg <<= 16; 546 547 vfe_reg_clr(vfe, VFE_0_BUS_XBAR_CFG_x(wm), reg); 548 } 549 550 static void vfe_set_xbar_cfg(struct vfe_device *vfe, struct vfe_output *output, 551 u8 enable) 552 { 553 struct vfe_line *line = container_of(output, struct vfe_line, output); 554 u32 p = line->video_out.active_fmt.fmt.pix_mp.pixelformat; 555 u32 reg; 556 557 switch (p) { 558 case V4L2_PIX_FMT_NV12: 559 case V4L2_PIX_FMT_NV21: 560 case V4L2_PIX_FMT_NV16: 561 case V4L2_PIX_FMT_NV61: 562 reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_LUMA << 563 VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT; 564 565 if (output->wm_idx[0] % 2 == 1) 566 reg <<= 16; 567 568 if (enable) 569 vfe_reg_set(vfe, 570 VFE_0_BUS_XBAR_CFG_x(output->wm_idx[0]), 571 reg); 572 else 573 vfe_reg_clr(vfe, 574 VFE_0_BUS_XBAR_CFG_x(output->wm_idx[0]), 575 reg); 576 577 reg = VFE_0_BUS_XBAR_CFG_x_M_PAIR_STREAM_EN; 578 if (p == V4L2_PIX_FMT_NV12 || p == V4L2_PIX_FMT_NV16) 579 reg |= VFE_0_BUS_XBAR_CFG_x_M_PAIR_STREAM_SWAP_INTER_INTRA; 580 581 if (output->wm_idx[1] % 2 == 1) 582 reg <<= 16; 583 584 if (enable) 585 vfe_reg_set(vfe, 586 VFE_0_BUS_XBAR_CFG_x(output->wm_idx[1]), 587 reg); 588 else 589 vfe_reg_clr(vfe, 590 VFE_0_BUS_XBAR_CFG_x(output->wm_idx[1]), 591 reg); 592 break; 593 case V4L2_PIX_FMT_YUYV: 594 case V4L2_PIX_FMT_YVYU: 595 case V4L2_PIX_FMT_VYUY: 596 case V4L2_PIX_FMT_UYVY: 597 reg = VFE_0_BUS_XBAR_CFG_x_M_REALIGN_BUF_EN; 598 reg |= VFE_0_BUS_XBAR_CFG_x_M_PAIR_STREAM_EN; 599 600 if (p == V4L2_PIX_FMT_YUYV || p == V4L2_PIX_FMT_YVYU) 601 reg |= VFE_0_BUS_XBAR_CFG_x_M_PAIR_STREAM_SWAP_INTER_INTRA; 602 603 if (output->wm_idx[0] % 2 == 1) 604 reg <<= 16; 605 606 if (enable) 607 vfe_reg_set(vfe, 608 VFE_0_BUS_XBAR_CFG_x(output->wm_idx[0]), 609 reg); 610 else 611 vfe_reg_clr(vfe, 612 VFE_0_BUS_XBAR_CFG_x(output->wm_idx[0]), 613 reg); 614 break; 615 default: 616 break; 617 } 618 } 619 620 static void vfe_set_realign_cfg(struct vfe_device *vfe, struct vfe_line *line, 621 u8 enable) 622 { 623 u32 p = line->video_out.active_fmt.fmt.pix_mp.pixelformat; 624 u32 val = VFE_0_MODULE_ZOOM_EN_REALIGN_BUF; 625 626 if (p != V4L2_PIX_FMT_YUYV && p != V4L2_PIX_FMT_YVYU && 627 p != V4L2_PIX_FMT_VYUY && p != V4L2_PIX_FMT_UYVY) 628 return; 629 630 if (enable) { 631 vfe_reg_set(vfe, VFE_0_MODULE_ZOOM_EN, val); 632 } else { 633 vfe_reg_clr(vfe, VFE_0_MODULE_ZOOM_EN, val); 634 return; 635 } 636 637 val = VFE_0_REALIGN_BUF_CFG_HSUB_ENABLE; 638 639 if (p == V4L2_PIX_FMT_UYVY || p == V4L2_PIX_FMT_YUYV) 640 val |= VFE_0_REALIGN_BUF_CFG_CR_ODD_PIXEL; 641 else 642 val |= VFE_0_REALIGN_BUF_CFG_CB_ODD_PIXEL; 643 644 writel_relaxed(val, vfe->base + VFE_0_REALIGN_BUF_CFG); 645 } 646 647 static void vfe_set_rdi_cid(struct vfe_device *vfe, enum vfe_line_id id, u8 cid) 648 { 649 vfe_reg_clr(vfe, VFE_0_RDI_CFG_x(id), 650 VFE_0_RDI_CFG_x_RDI_M0_SEL_MASK); 651 652 vfe_reg_set(vfe, VFE_0_RDI_CFG_x(id), 653 cid << VFE_0_RDI_CFG_x_RDI_M0_SEL_SHIFT); 654 } 655 656 static void vfe_reg_update(struct vfe_device *vfe, enum vfe_line_id line_id) 657 { 658 vfe->reg_update |= VFE_0_REG_UPDATE_line_n(line_id); 659 660 /* Enforce barrier between line update and commit */ 661 wmb(); 662 663 writel_relaxed(vfe->reg_update, vfe->base + VFE_0_REG_UPDATE); 664 665 /* Make sure register update is issued before further reg writes */ 666 wmb(); 667 } 668 669 static inline void vfe_reg_update_clear(struct vfe_device *vfe, 670 enum vfe_line_id line_id) 671 { 672 vfe->reg_update &= ~VFE_0_REG_UPDATE_line_n(line_id); 673 } 674 675 static void vfe_enable_irq_wm_line(struct vfe_device *vfe, u8 wm, 676 enum vfe_line_id line_id, u8 enable) 677 { 678 u32 irq_en0 = VFE_0_IRQ_MASK_0_IMAGE_MASTER_n_PING_PONG(wm) | 679 VFE_0_IRQ_MASK_0_line_n_REG_UPDATE(line_id); 680 u32 irq_en1 = VFE_0_IRQ_MASK_1_IMAGE_MASTER_n_BUS_OVERFLOW(wm) | 681 VFE_0_IRQ_MASK_1_RDIn_SOF(line_id); 682 683 if (enable) { 684 vfe_reg_set(vfe, VFE_0_IRQ_MASK_0, irq_en0); 685 vfe_reg_set(vfe, VFE_0_IRQ_MASK_1, irq_en1); 686 } else { 687 vfe_reg_clr(vfe, VFE_0_IRQ_MASK_0, irq_en0); 688 vfe_reg_clr(vfe, VFE_0_IRQ_MASK_1, irq_en1); 689 } 690 } 691 692 static void vfe_enable_irq_pix_line(struct vfe_device *vfe, u8 comp, 693 enum vfe_line_id line_id, u8 enable) 694 { 695 struct vfe_output *output = &vfe->line[line_id].output; 696 unsigned int i; 697 u32 irq_en0; 698 u32 irq_en1; 699 u32 comp_mask = 0; 700 701 irq_en0 = VFE_0_IRQ_MASK_0_CAMIF_SOF; 702 irq_en0 |= VFE_0_IRQ_MASK_0_CAMIF_EOF; 703 irq_en0 |= VFE_0_IRQ_MASK_0_IMAGE_COMPOSITE_DONE_n(comp); 704 irq_en0 |= VFE_0_IRQ_MASK_0_line_n_REG_UPDATE(line_id); 705 irq_en1 = VFE_0_IRQ_MASK_1_CAMIF_ERROR; 706 for (i = 0; i < output->wm_num; i++) { 707 irq_en1 |= VFE_0_IRQ_MASK_1_IMAGE_MASTER_n_BUS_OVERFLOW(output->wm_idx[i]); 708 comp_mask |= (1 << output->wm_idx[i]) << comp * 8; 709 } 710 711 if (enable) { 712 vfe_reg_set(vfe, VFE_0_IRQ_MASK_0, irq_en0); 713 vfe_reg_set(vfe, VFE_0_IRQ_MASK_1, irq_en1); 714 vfe_reg_set(vfe, VFE_0_IRQ_COMPOSITE_MASK_0, comp_mask); 715 } else { 716 vfe_reg_clr(vfe, VFE_0_IRQ_MASK_0, irq_en0); 717 vfe_reg_clr(vfe, VFE_0_IRQ_MASK_1, irq_en1); 718 vfe_reg_clr(vfe, VFE_0_IRQ_COMPOSITE_MASK_0, comp_mask); 719 } 720 } 721 722 static void vfe_enable_irq_common(struct vfe_device *vfe) 723 { 724 u32 irq_en0 = VFE_0_IRQ_MASK_0_RESET_ACK; 725 u32 irq_en1 = VFE_0_IRQ_MASK_1_VIOLATION | 726 VFE_0_IRQ_MASK_1_BUS_BDG_HALT_ACK; 727 728 vfe_reg_set(vfe, VFE_0_IRQ_MASK_0, irq_en0); 729 vfe_reg_set(vfe, VFE_0_IRQ_MASK_1, irq_en1); 730 } 731 732 static void vfe_set_demux_cfg(struct vfe_device *vfe, struct vfe_line *line) 733 { 734 u32 val, even_cfg, odd_cfg; 735 736 writel_relaxed(VFE_0_DEMUX_CFG_PERIOD, vfe->base + VFE_0_DEMUX_CFG); 737 738 val = VFE_0_DEMUX_GAIN_0_CH0_EVEN | VFE_0_DEMUX_GAIN_0_CH0_ODD; 739 writel_relaxed(val, vfe->base + VFE_0_DEMUX_GAIN_0); 740 741 val = VFE_0_DEMUX_GAIN_1_CH1 | VFE_0_DEMUX_GAIN_1_CH2; 742 writel_relaxed(val, vfe->base + VFE_0_DEMUX_GAIN_1); 743 744 switch (line->fmt[MSM_VFE_PAD_SINK].code) { 745 case MEDIA_BUS_FMT_YUYV8_2X8: 746 even_cfg = VFE_0_DEMUX_EVEN_CFG_PATTERN_YUYV; 747 odd_cfg = VFE_0_DEMUX_ODD_CFG_PATTERN_YUYV; 748 break; 749 case MEDIA_BUS_FMT_YVYU8_2X8: 750 even_cfg = VFE_0_DEMUX_EVEN_CFG_PATTERN_YVYU; 751 odd_cfg = VFE_0_DEMUX_ODD_CFG_PATTERN_YVYU; 752 break; 753 case MEDIA_BUS_FMT_UYVY8_2X8: 754 default: 755 even_cfg = VFE_0_DEMUX_EVEN_CFG_PATTERN_UYVY; 756 odd_cfg = VFE_0_DEMUX_ODD_CFG_PATTERN_UYVY; 757 break; 758 case MEDIA_BUS_FMT_VYUY8_2X8: 759 even_cfg = VFE_0_DEMUX_EVEN_CFG_PATTERN_VYUY; 760 odd_cfg = VFE_0_DEMUX_ODD_CFG_PATTERN_VYUY; 761 break; 762 } 763 764 writel_relaxed(even_cfg, vfe->base + VFE_0_DEMUX_EVEN_CFG); 765 writel_relaxed(odd_cfg, vfe->base + VFE_0_DEMUX_ODD_CFG); 766 } 767 768 static void vfe_set_scale_cfg(struct vfe_device *vfe, struct vfe_line *line) 769 { 770 u32 p = line->video_out.active_fmt.fmt.pix_mp.pixelformat; 771 u32 reg; 772 u16 input, output; 773 u8 interp_reso; 774 u32 phase_mult; 775 776 writel_relaxed(0x3, vfe->base + VFE_0_SCALE_ENC_Y_CFG); 777 778 input = line->fmt[MSM_VFE_PAD_SINK].width - 1; 779 output = line->compose.width - 1; 780 reg = (output << 16) | input; 781 writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_Y_H_IMAGE_SIZE); 782 783 interp_reso = vfe_calc_interp_reso(input, output); 784 phase_mult = input * (1 << (14 + interp_reso)) / output; 785 reg = (interp_reso << 28) | phase_mult; 786 writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_Y_H_PHASE); 787 788 input = line->fmt[MSM_VFE_PAD_SINK].height - 1; 789 output = line->compose.height - 1; 790 reg = (output << 16) | input; 791 writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_Y_V_IMAGE_SIZE); 792 793 interp_reso = vfe_calc_interp_reso(input, output); 794 phase_mult = input * (1 << (14 + interp_reso)) / output; 795 reg = (interp_reso << 28) | phase_mult; 796 writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_Y_V_PHASE); 797 798 writel_relaxed(0x3, vfe->base + VFE_0_SCALE_ENC_CBCR_CFG); 799 800 input = line->fmt[MSM_VFE_PAD_SINK].width - 1; 801 output = line->compose.width / 2 - 1; 802 reg = (output << 16) | input; 803 writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_CBCR_H_IMAGE_SIZE); 804 805 interp_reso = vfe_calc_interp_reso(input, output); 806 phase_mult = input * (1 << (14 + interp_reso)) / output; 807 reg = (interp_reso << 28) | phase_mult; 808 writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_CBCR_H_PHASE); 809 810 input = line->fmt[MSM_VFE_PAD_SINK].height - 1; 811 output = line->compose.height - 1; 812 if (p == V4L2_PIX_FMT_NV12 || p == V4L2_PIX_FMT_NV21) 813 output = line->compose.height / 2 - 1; 814 reg = (output << 16) | input; 815 writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_CBCR_V_IMAGE_SIZE); 816 817 interp_reso = vfe_calc_interp_reso(input, output); 818 phase_mult = input * (1 << (14 + interp_reso)) / output; 819 reg = (interp_reso << 28) | phase_mult; 820 writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_CBCR_V_PHASE); 821 } 822 823 static void vfe_set_crop_cfg(struct vfe_device *vfe, struct vfe_line *line) 824 { 825 u32 p = line->video_out.active_fmt.fmt.pix_mp.pixelformat; 826 u32 reg; 827 u16 first, last; 828 829 first = line->crop.left; 830 last = line->crop.left + line->crop.width - 1; 831 reg = (first << 16) | last; 832 writel_relaxed(reg, vfe->base + VFE_0_CROP_ENC_Y_WIDTH); 833 834 first = line->crop.top; 835 last = line->crop.top + line->crop.height - 1; 836 reg = (first << 16) | last; 837 writel_relaxed(reg, vfe->base + VFE_0_CROP_ENC_Y_HEIGHT); 838 839 first = line->crop.left / 2; 840 last = line->crop.left / 2 + line->crop.width / 2 - 1; 841 reg = (first << 16) | last; 842 writel_relaxed(reg, vfe->base + VFE_0_CROP_ENC_CBCR_WIDTH); 843 844 first = line->crop.top; 845 last = line->crop.top + line->crop.height - 1; 846 if (p == V4L2_PIX_FMT_NV12 || p == V4L2_PIX_FMT_NV21) { 847 first = line->crop.top / 2; 848 last = line->crop.top / 2 + line->crop.height / 2 - 1; 849 } 850 reg = (first << 16) | last; 851 writel_relaxed(reg, vfe->base + VFE_0_CROP_ENC_CBCR_HEIGHT); 852 } 853 854 static void vfe_set_clamp_cfg(struct vfe_device *vfe) 855 { 856 u32 val = VFE_0_CLAMP_ENC_MAX_CFG_CH0 | 857 VFE_0_CLAMP_ENC_MAX_CFG_CH1 | 858 VFE_0_CLAMP_ENC_MAX_CFG_CH2; 859 860 writel_relaxed(val, vfe->base + VFE_0_CLAMP_ENC_MAX_CFG); 861 862 val = VFE_0_CLAMP_ENC_MIN_CFG_CH0 | 863 VFE_0_CLAMP_ENC_MIN_CFG_CH1 | 864 VFE_0_CLAMP_ENC_MIN_CFG_CH2; 865 866 writel_relaxed(val, vfe->base + VFE_0_CLAMP_ENC_MIN_CFG); 867 } 868 869 static void vfe_set_cgc_override(struct vfe_device *vfe, u8 wm, u8 enable) 870 { 871 /* empty */ 872 } 873 874 static void vfe_set_camif_cfg(struct vfe_device *vfe, struct vfe_line *line) 875 { 876 u32 val; 877 878 switch (line->fmt[MSM_VFE_PAD_SINK].code) { 879 case MEDIA_BUS_FMT_YUYV8_2X8: 880 val = VFE_0_CORE_CFG_PIXEL_PATTERN_YCBYCR; 881 break; 882 case MEDIA_BUS_FMT_YVYU8_2X8: 883 val = VFE_0_CORE_CFG_PIXEL_PATTERN_YCRYCB; 884 break; 885 case MEDIA_BUS_FMT_UYVY8_2X8: 886 default: 887 val = VFE_0_CORE_CFG_PIXEL_PATTERN_CBYCRY; 888 break; 889 case MEDIA_BUS_FMT_VYUY8_2X8: 890 val = VFE_0_CORE_CFG_PIXEL_PATTERN_CRYCBY; 891 break; 892 } 893 894 val |= VFE_0_CORE_CFG_COMPOSITE_REG_UPDATE_EN; 895 writel_relaxed(val, vfe->base + VFE_0_CORE_CFG); 896 897 val = line->fmt[MSM_VFE_PAD_SINK].width * 2 - 1; 898 val |= (line->fmt[MSM_VFE_PAD_SINK].height - 1) << 16; 899 writel_relaxed(val, vfe->base + VFE_0_CAMIF_FRAME_CFG); 900 901 val = line->fmt[MSM_VFE_PAD_SINK].width * 2 - 1; 902 writel_relaxed(val, vfe->base + VFE_0_CAMIF_WINDOW_WIDTH_CFG); 903 904 val = line->fmt[MSM_VFE_PAD_SINK].height - 1; 905 writel_relaxed(val, vfe->base + VFE_0_CAMIF_WINDOW_HEIGHT_CFG); 906 907 val = 0xffffffff; 908 writel_relaxed(val, vfe->base + VFE_0_CAMIF_SUBSAMPLE_CFG); 909 910 val = 0xffffffff; 911 writel_relaxed(val, vfe->base + VFE_0_CAMIF_IRQ_FRAMEDROP_PATTERN); 912 913 val = 0xffffffff; 914 writel_relaxed(val, vfe->base + VFE_0_CAMIF_IRQ_SUBSAMPLE_PATTERN); 915 916 val = VFE_0_RDI_CFG_x_MIPI_EN_BITS; 917 vfe_reg_set(vfe, VFE_0_RDI_CFG_x(0), val); 918 919 val = VFE_0_CAMIF_CFG_VFE_OUTPUT_EN; 920 writel_relaxed(val, vfe->base + VFE_0_CAMIF_CFG); 921 } 922 923 static void vfe_set_camif_cmd(struct vfe_device *vfe, u8 enable) 924 { 925 u32 cmd; 926 927 cmd = VFE_0_CAMIF_CMD_CLEAR_CAMIF_STATUS | VFE_0_CAMIF_CMD_NO_CHANGE; 928 writel_relaxed(cmd, vfe->base + VFE_0_CAMIF_CMD); 929 930 /* Make sure camif command is issued written before it is changed again */ 931 wmb(); 932 933 if (enable) 934 cmd = VFE_0_CAMIF_CMD_ENABLE_FRAME_BOUNDARY; 935 else 936 cmd = VFE_0_CAMIF_CMD_DISABLE_FRAME_BOUNDARY; 937 938 writel_relaxed(cmd, vfe->base + VFE_0_CAMIF_CMD); 939 } 940 941 static void vfe_set_module_cfg(struct vfe_device *vfe, u8 enable) 942 { 943 u32 val_lens = VFE_0_MODULE_LENS_EN_DEMUX | 944 VFE_0_MODULE_LENS_EN_CHROMA_UPSAMPLE; 945 u32 val_zoom = VFE_0_MODULE_ZOOM_EN_SCALE_ENC | 946 VFE_0_MODULE_ZOOM_EN_CROP_ENC; 947 948 if (enable) { 949 vfe_reg_set(vfe, VFE_0_MODULE_LENS_EN, val_lens); 950 vfe_reg_set(vfe, VFE_0_MODULE_ZOOM_EN, val_zoom); 951 } else { 952 vfe_reg_clr(vfe, VFE_0_MODULE_LENS_EN, val_lens); 953 vfe_reg_clr(vfe, VFE_0_MODULE_ZOOM_EN, val_zoom); 954 } 955 } 956 957 static int vfe_camif_wait_for_stop(struct vfe_device *vfe, struct device *dev) 958 { 959 u32 val; 960 int ret; 961 962 ret = readl_poll_timeout(vfe->base + VFE_0_CAMIF_STATUS, 963 val, 964 (val & VFE_0_CAMIF_STATUS_HALT), 965 CAMIF_TIMEOUT_SLEEP_US, 966 CAMIF_TIMEOUT_ALL_US); 967 if (ret < 0) 968 dev_err(dev, "%s: camif stop timeout\n", __func__); 969 970 return ret; 971 } 972 973 /* 974 * vfe_isr - VFE module interrupt handler 975 * @irq: Interrupt line 976 * @dev: VFE device 977 * 978 * Return IRQ_HANDLED on success 979 */ 980 static irqreturn_t vfe_isr(int irq, void *dev) 981 { 982 struct vfe_device *vfe = dev; 983 u32 value0, value1; 984 int i, j; 985 986 vfe->ops->isr_read(vfe, &value0, &value1); 987 988 dev_dbg(vfe->camss->dev, "VFE: status0 = 0x%08x, status1 = 0x%08x\n", 989 value0, value1); 990 991 if (value0 & VFE_0_IRQ_STATUS_0_RESET_ACK) 992 vfe->isr_ops.reset_ack(vfe); 993 994 if (value1 & VFE_0_IRQ_STATUS_1_VIOLATION) 995 vfe->ops->violation_read(vfe); 996 997 if (value1 & VFE_0_IRQ_STATUS_1_BUS_BDG_HALT_ACK) 998 vfe->isr_ops.halt_ack(vfe); 999 1000 for (i = VFE_LINE_RDI0; i < vfe->line_num; i++) 1001 if (value0 & VFE_0_IRQ_STATUS_0_line_n_REG_UPDATE(i)) 1002 vfe->isr_ops.reg_update(vfe, i); 1003 1004 if (value0 & VFE_0_IRQ_STATUS_0_CAMIF_SOF) 1005 vfe->isr_ops.sof(vfe, VFE_LINE_PIX); 1006 1007 for (i = VFE_LINE_RDI0; i <= VFE_LINE_RDI2; i++) 1008 if (value1 & VFE_0_IRQ_STATUS_1_RDIn_SOF(i)) 1009 vfe->isr_ops.sof(vfe, i); 1010 1011 for (i = 0; i < MSM_VFE_COMPOSITE_IRQ_NUM; i++) 1012 if (value0 & VFE_0_IRQ_STATUS_0_IMAGE_COMPOSITE_DONE_n(i)) { 1013 vfe->isr_ops.comp_done(vfe, i); 1014 for (j = 0; j < ARRAY_SIZE(vfe->wm_output_map); j++) 1015 if (vfe->wm_output_map[j] == VFE_LINE_PIX) 1016 value0 &= ~VFE_0_IRQ_MASK_0_IMAGE_MASTER_n_PING_PONG(j); 1017 } 1018 1019 for (i = 0; i < MSM_VFE_IMAGE_MASTERS_NUM; i++) 1020 if (value0 & VFE_0_IRQ_STATUS_0_IMAGE_MASTER_n_PING_PONG(i)) 1021 vfe->isr_ops.wm_done(vfe, i); 1022 1023 return IRQ_HANDLED; 1024 } 1025 1026 static u16 vfe_get_ub_size(u8 vfe_id) 1027 { 1028 /* On VFE4.8 the ub-size is the same on both instances */ 1029 return MSM_VFE_VFE0_UB_SIZE_RDI; 1030 } 1031 1032 static void vfe_wm_enable(struct vfe_device *vfe, u8 wm, u8 enable) 1033 { 1034 if (enable) 1035 writel_relaxed(2 << VFE_0_BUS_IMAGE_MASTER_n_SHIFT(wm), 1036 vfe->base + VFE_0_BUS_IMAGE_MASTER_CMD); 1037 else 1038 writel_relaxed(1 << VFE_0_BUS_IMAGE_MASTER_n_SHIFT(wm), 1039 vfe->base + VFE_0_BUS_IMAGE_MASTER_CMD); 1040 1041 /* The WM must be enabled before sending other commands */ 1042 wmb(); 1043 } 1044 1045 static void vfe_set_qos(struct vfe_device *vfe) 1046 { 1047 u32 val = VFE_0_BUS_BDG_QOS_CFG_0_CFG; 1048 u32 val3 = VFE_0_BUS_BDG_QOS_CFG_3_CFG; 1049 u32 val4 = VFE_0_BUS_BDG_QOS_CFG_4_CFG; 1050 u32 val7 = VFE_0_BUS_BDG_QOS_CFG_7_CFG; 1051 1052 writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_0); 1053 writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_1); 1054 writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_2); 1055 writel_relaxed(val3, vfe->base + VFE_0_BUS_BDG_QOS_CFG_3); 1056 writel_relaxed(val4, vfe->base + VFE_0_BUS_BDG_QOS_CFG_4); 1057 writel_relaxed(val4, vfe->base + VFE_0_BUS_BDG_QOS_CFG_5); 1058 writel_relaxed(val4, vfe->base + VFE_0_BUS_BDG_QOS_CFG_6); 1059 writel_relaxed(val7, vfe->base + VFE_0_BUS_BDG_QOS_CFG_7); 1060 } 1061 1062 static void vfe_set_ds(struct vfe_device *vfe) 1063 { 1064 u32 val = VFE_0_BUS_BDG_DS_CFG_0_CFG; 1065 u32 val16 = VFE_0_BUS_BDG_DS_CFG_16_CFG; 1066 1067 writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_DS_CFG_0); 1068 writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_DS_CFG_1); 1069 writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_DS_CFG_2); 1070 writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_DS_CFG_3); 1071 writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_DS_CFG_4); 1072 writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_DS_CFG_5); 1073 writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_DS_CFG_6); 1074 writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_DS_CFG_7); 1075 writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_DS_CFG_8); 1076 writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_DS_CFG_9); 1077 writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_DS_CFG_10); 1078 writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_DS_CFG_11); 1079 writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_DS_CFG_12); 1080 writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_DS_CFG_13); 1081 writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_DS_CFG_14); 1082 writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_DS_CFG_15); 1083 writel_relaxed(val16, vfe->base + VFE_0_BUS_BDG_DS_CFG_16); 1084 } 1085 1086 static void vfe_isr_read(struct vfe_device *vfe, u32 *value0, u32 *value1) 1087 { 1088 *value0 = readl_relaxed(vfe->base + VFE_0_IRQ_STATUS_0); 1089 *value1 = readl_relaxed(vfe->base + VFE_0_IRQ_STATUS_1); 1090 1091 writel_relaxed(*value0, vfe->base + VFE_0_IRQ_CLEAR_0); 1092 writel_relaxed(*value1, vfe->base + VFE_0_IRQ_CLEAR_1); 1093 1094 /* Enforce barrier between local & global IRQ clear */ 1095 wmb(); 1096 writel_relaxed(VFE_0_IRQ_CMD_GLOBAL_CLEAR, vfe->base + VFE_0_IRQ_CMD); 1097 } 1098 1099 /* 1100 * vfe_pm_domain_off - Disable power domains specific to this VFE. 1101 * @vfe: VFE Device 1102 */ 1103 static void vfe_pm_domain_off(struct vfe_device *vfe) 1104 { 1105 struct camss *camss = vfe->camss; 1106 1107 device_link_del(camss->genpd_link[vfe->id]); 1108 } 1109 1110 /* 1111 * vfe_pm_domain_on - Enable power domains specific to this VFE. 1112 * @vfe: VFE Device 1113 */ 1114 static int vfe_pm_domain_on(struct vfe_device *vfe) 1115 { 1116 struct camss *camss = vfe->camss; 1117 enum vfe_line_id id = vfe->id; 1118 1119 camss->genpd_link[id] = device_link_add(camss->dev, camss->genpd[id], DL_FLAG_STATELESS | 1120 DL_FLAG_PM_RUNTIME | DL_FLAG_RPM_ACTIVE); 1121 1122 if (!camss->genpd_link[id]) { 1123 dev_err(vfe->camss->dev, "Failed to add VFE#%d to power domain\n", id); 1124 return -EINVAL; 1125 } 1126 1127 return 0; 1128 } 1129 1130 static void vfe_violation_read(struct vfe_device *vfe) 1131 { 1132 u32 violation = readl_relaxed(vfe->base + VFE_0_VIOLATION_STATUS); 1133 1134 pr_err_ratelimited("VFE: violation = 0x%08x\n", violation); 1135 } 1136 1137 static const struct vfe_hw_ops_gen1 vfe_ops_gen1_4_8 = { 1138 .bus_connect_wm_to_rdi = vfe_bus_connect_wm_to_rdi, 1139 .bus_disconnect_wm_from_rdi = vfe_bus_disconnect_wm_from_rdi, 1140 .bus_enable_wr_if = vfe_bus_enable_wr_if, 1141 .bus_reload_wm = vfe_bus_reload_wm, 1142 .camif_wait_for_stop = vfe_camif_wait_for_stop, 1143 .enable_irq_common = vfe_enable_irq_common, 1144 .enable_irq_pix_line = vfe_enable_irq_pix_line, 1145 .enable_irq_wm_line = vfe_enable_irq_wm_line, 1146 .get_ub_size = vfe_get_ub_size, 1147 .halt_clear = vfe_halt_clear, 1148 .halt_request = vfe_halt_request, 1149 .set_camif_cfg = vfe_set_camif_cfg, 1150 .set_camif_cmd = vfe_set_camif_cmd, 1151 .set_cgc_override = vfe_set_cgc_override, 1152 .set_clamp_cfg = vfe_set_clamp_cfg, 1153 .set_crop_cfg = vfe_set_crop_cfg, 1154 .set_demux_cfg = vfe_set_demux_cfg, 1155 .set_ds = vfe_set_ds, 1156 .set_module_cfg = vfe_set_module_cfg, 1157 .set_qos = vfe_set_qos, 1158 .set_rdi_cid = vfe_set_rdi_cid, 1159 .set_realign_cfg = vfe_set_realign_cfg, 1160 .set_scale_cfg = vfe_set_scale_cfg, 1161 .set_xbar_cfg = vfe_set_xbar_cfg, 1162 .wm_enable = vfe_wm_enable, 1163 .wm_frame_based = vfe_wm_frame_based, 1164 .wm_get_ping_pong_status = vfe_wm_get_ping_pong_status, 1165 .wm_line_based = vfe_wm_line_based, 1166 .wm_set_framedrop_pattern = vfe_wm_set_framedrop_pattern, 1167 .wm_set_framedrop_period = vfe_wm_set_framedrop_period, 1168 .wm_set_ping_addr = vfe_wm_set_ping_addr, 1169 .wm_set_pong_addr = vfe_wm_set_pong_addr, 1170 .wm_set_subsample = vfe_wm_set_subsample, 1171 .wm_set_ub_cfg = vfe_wm_set_ub_cfg, 1172 }; 1173 1174 static void vfe_subdev_init(struct device *dev, struct vfe_device *vfe) 1175 { 1176 vfe->isr_ops = vfe_isr_ops_gen1; 1177 vfe->ops_gen1 = &vfe_ops_gen1_4_8; 1178 vfe->video_ops = vfe_video_ops_gen1; 1179 1180 vfe->line_num = VFE_LINE_NUM_GEN1; 1181 } 1182 1183 const struct vfe_hw_ops vfe_ops_4_8 = { 1184 .global_reset = vfe_global_reset, 1185 .hw_version = vfe_hw_version, 1186 .isr_read = vfe_isr_read, 1187 .isr = vfe_isr, 1188 .pm_domain_off = vfe_pm_domain_off, 1189 .pm_domain_on = vfe_pm_domain_on, 1190 .reg_update_clear = vfe_reg_update_clear, 1191 .reg_update = vfe_reg_update, 1192 .subdev_init = vfe_subdev_init, 1193 .vfe_disable = vfe_gen1_disable, 1194 .vfe_enable = vfe_gen1_enable, 1195 .vfe_halt = vfe_gen1_halt, 1196 .violation_read = vfe_violation_read, 1197 }; 1198