1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * camss-vfe-4-1.c
4  *
5  * Qualcomm MSM Camera Subsystem - VFE (Video Front End) Module v4.1
6  *
7  * Copyright (c) 2013-2015, The Linux Foundation. All rights reserved.
8  * Copyright (C) 2015-2018 Linaro Ltd.
9  */
10 
11 #include <linux/interrupt.h>
12 #include <linux/io.h>
13 #include <linux/iopoll.h>
14 
15 #include "camss.h"
16 #include "camss-vfe.h"
17 #include "camss-vfe-gen1.h"
18 
19 #define VFE_0_HW_VERSION		0x000
20 
21 #define VFE_0_GLOBAL_RESET_CMD		0x00c
22 #define VFE_0_GLOBAL_RESET_CMD_CORE	BIT(0)
23 #define VFE_0_GLOBAL_RESET_CMD_CAMIF	BIT(1)
24 #define VFE_0_GLOBAL_RESET_CMD_BUS	BIT(2)
25 #define VFE_0_GLOBAL_RESET_CMD_BUS_BDG	BIT(3)
26 #define VFE_0_GLOBAL_RESET_CMD_REGISTER	BIT(4)
27 #define VFE_0_GLOBAL_RESET_CMD_TIMER	BIT(5)
28 #define VFE_0_GLOBAL_RESET_CMD_PM	BIT(6)
29 #define VFE_0_GLOBAL_RESET_CMD_BUS_MISR	BIT(7)
30 #define VFE_0_GLOBAL_RESET_CMD_TESTGEN	BIT(8)
31 
32 #define VFE_0_MODULE_CFG		0x018
33 #define VFE_0_MODULE_CFG_DEMUX			BIT(2)
34 #define VFE_0_MODULE_CFG_CHROMA_UPSAMPLE	BIT(3)
35 #define VFE_0_MODULE_CFG_SCALE_ENC		BIT(23)
36 #define VFE_0_MODULE_CFG_CROP_ENC		BIT(27)
37 
38 #define VFE_0_CORE_CFG			0x01c
39 #define VFE_0_CORE_CFG_PIXEL_PATTERN_YCBYCR	0x4
40 #define VFE_0_CORE_CFG_PIXEL_PATTERN_YCRYCB	0x5
41 #define VFE_0_CORE_CFG_PIXEL_PATTERN_CBYCRY	0x6
42 #define VFE_0_CORE_CFG_PIXEL_PATTERN_CRYCBY	0x7
43 
44 #define VFE_0_IRQ_CMD			0x024
45 #define VFE_0_IRQ_CMD_GLOBAL_CLEAR	BIT(0)
46 
47 #define VFE_0_IRQ_MASK_0		0x028
48 #define VFE_0_IRQ_MASK_0_CAMIF_SOF			BIT(0)
49 #define VFE_0_IRQ_MASK_0_CAMIF_EOF			BIT(1)
50 #define VFE_0_IRQ_MASK_0_RDIn_REG_UPDATE(n)		BIT((n) + 5)
51 #define VFE_0_IRQ_MASK_0_line_n_REG_UPDATE(n)		\
52 	((n) == VFE_LINE_PIX ? BIT(4) : VFE_0_IRQ_MASK_0_RDIn_REG_UPDATE(n))
53 #define VFE_0_IRQ_MASK_0_IMAGE_MASTER_n_PING_PONG(n)	BIT((n) + 8)
54 #define VFE_0_IRQ_MASK_0_IMAGE_COMPOSITE_DONE_n(n)	BIT((n) + 25)
55 #define VFE_0_IRQ_MASK_0_RESET_ACK			BIT(31)
56 #define VFE_0_IRQ_MASK_1		0x02c
57 #define VFE_0_IRQ_MASK_1_CAMIF_ERROR			BIT(0)
58 #define VFE_0_IRQ_MASK_1_VIOLATION			BIT(7)
59 #define VFE_0_IRQ_MASK_1_BUS_BDG_HALT_ACK		BIT(8)
60 #define VFE_0_IRQ_MASK_1_IMAGE_MASTER_n_BUS_OVERFLOW(n)	BIT((n) + 9)
61 #define VFE_0_IRQ_MASK_1_RDIn_SOF(n)			BIT((n) + 29)
62 
63 #define VFE_0_IRQ_CLEAR_0		0x030
64 #define VFE_0_IRQ_CLEAR_1		0x034
65 
66 #define VFE_0_IRQ_STATUS_0		0x038
67 #define VFE_0_IRQ_STATUS_0_CAMIF_SOF			BIT(0)
68 #define VFE_0_IRQ_STATUS_0_RDIn_REG_UPDATE(n)		BIT((n) + 5)
69 #define VFE_0_IRQ_STATUS_0_line_n_REG_UPDATE(n)		\
70 	((n) == VFE_LINE_PIX ? BIT(4) : VFE_0_IRQ_STATUS_0_RDIn_REG_UPDATE(n))
71 #define VFE_0_IRQ_STATUS_0_IMAGE_MASTER_n_PING_PONG(n)	BIT((n) + 8)
72 #define VFE_0_IRQ_STATUS_0_IMAGE_COMPOSITE_DONE_n(n)	BIT((n) + 25)
73 #define VFE_0_IRQ_STATUS_0_RESET_ACK			BIT(31)
74 #define VFE_0_IRQ_STATUS_1		0x03c
75 #define VFE_0_IRQ_STATUS_1_VIOLATION			BIT(7)
76 #define VFE_0_IRQ_STATUS_1_BUS_BDG_HALT_ACK		BIT(8)
77 #define VFE_0_IRQ_STATUS_1_RDIn_SOF(n)			BIT((n) + 29)
78 
79 #define VFE_0_IRQ_COMPOSITE_MASK_0	0x40
80 #define VFE_0_VIOLATION_STATUS		0x48
81 
82 #define VFE_0_BUS_CMD			0x4c
83 #define VFE_0_BUS_CMD_Mx_RLD_CMD(x)	BIT(x)
84 
85 #define VFE_0_BUS_CFG			0x050
86 
87 #define VFE_0_BUS_XBAR_CFG_x(x)		(0x58 + 0x4 * ((x) / 2))
88 #define VFE_0_BUS_XBAR_CFG_x_M_PAIR_STREAM_EN			BIT(1)
89 #define VFE_0_BUS_XBAR_CFG_x_M_PAIR_STREAM_SWAP_INTER_INTRA	(0x3 << 4)
90 #define VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT		8
91 #define VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_LUMA		0
92 #define VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI0	5
93 #define VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI1	6
94 #define VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI2	7
95 
96 #define VFE_0_BUS_IMAGE_MASTER_n_WR_CFG(n)		(0x06c + 0x24 * (n))
97 #define VFE_0_BUS_IMAGE_MASTER_n_WR_CFG_WR_PATH_SHIFT	0
98 #define VFE_0_BUS_IMAGE_MASTER_n_WR_CFG_FRM_BASED_SHIFT	1
99 #define VFE_0_BUS_IMAGE_MASTER_n_WR_PING_ADDR(n)	(0x070 + 0x24 * (n))
100 #define VFE_0_BUS_IMAGE_MASTER_n_WR_PONG_ADDR(n)	(0x074 + 0x24 * (n))
101 #define VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG(n)		(0x078 + 0x24 * (n))
102 #define VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG_FRM_DROP_PER_SHIFT	2
103 #define VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG_FRM_DROP_PER_MASK	(0x1f << 2)
104 
105 #define VFE_0_BUS_IMAGE_MASTER_n_WR_UB_CFG(n)		(0x07c + 0x24 * (n))
106 #define VFE_0_BUS_IMAGE_MASTER_n_WR_UB_CFG_OFFSET_SHIFT	16
107 #define VFE_0_BUS_IMAGE_MASTER_n_WR_IMAGE_SIZE(n)	(0x080 + 0x24 * (n))
108 #define VFE_0_BUS_IMAGE_MASTER_n_WR_BUFFER_CFG(n)	(0x084 + 0x24 * (n))
109 #define VFE_0_BUS_IMAGE_MASTER_n_WR_FRAMEDROP_PATTERN(n)	\
110 							(0x088 + 0x24 * (n))
111 #define VFE_0_BUS_IMAGE_MASTER_n_WR_IRQ_SUBSAMPLE_PATTERN(n)	\
112 							(0x08c + 0x24 * (n))
113 #define VFE_0_BUS_IMAGE_MASTER_n_WR_IRQ_SUBSAMPLE_PATTERN_DEF	0xffffffff
114 
115 #define VFE_0_BUS_PING_PONG_STATUS	0x268
116 
117 #define VFE_0_BUS_BDG_CMD		0x2c0
118 #define VFE_0_BUS_BDG_CMD_HALT_REQ	1
119 
120 #define VFE_0_BUS_BDG_QOS_CFG_0		0x2c4
121 #define VFE_0_BUS_BDG_QOS_CFG_0_CFG	0xaaa5aaa5
122 #define VFE_0_BUS_BDG_QOS_CFG_1		0x2c8
123 #define VFE_0_BUS_BDG_QOS_CFG_2		0x2cc
124 #define VFE_0_BUS_BDG_QOS_CFG_3		0x2d0
125 #define VFE_0_BUS_BDG_QOS_CFG_4		0x2d4
126 #define VFE_0_BUS_BDG_QOS_CFG_5		0x2d8
127 #define VFE_0_BUS_BDG_QOS_CFG_6		0x2dc
128 #define VFE_0_BUS_BDG_QOS_CFG_7		0x2e0
129 #define VFE_0_BUS_BDG_QOS_CFG_7_CFG	0x0001aaa5
130 
131 #define VFE_0_RDI_CFG_x(x)		(0x2e8 + (0x4 * (x)))
132 #define VFE_0_RDI_CFG_x_RDI_STREAM_SEL_SHIFT	28
133 #define VFE_0_RDI_CFG_x_RDI_STREAM_SEL_MASK	(0xf << 28)
134 #define VFE_0_RDI_CFG_x_RDI_M0_SEL_SHIFT	4
135 #define VFE_0_RDI_CFG_x_RDI_M0_SEL_MASK		(0xf << 4)
136 #define VFE_0_RDI_CFG_x_RDI_EN_BIT		BIT(2)
137 #define VFE_0_RDI_CFG_x_MIPI_EN_BITS		0x3
138 #define VFE_0_RDI_CFG_x_RDI_Mr_FRAME_BASED_EN(r)	BIT(16 + (r))
139 
140 #define VFE_0_CAMIF_CMD				0x2f4
141 #define VFE_0_CAMIF_CMD_DISABLE_FRAME_BOUNDARY	0
142 #define VFE_0_CAMIF_CMD_ENABLE_FRAME_BOUNDARY	1
143 #define VFE_0_CAMIF_CMD_NO_CHANGE		3
144 #define VFE_0_CAMIF_CMD_CLEAR_CAMIF_STATUS	BIT(2)
145 #define VFE_0_CAMIF_CFG				0x2f8
146 #define VFE_0_CAMIF_CFG_VFE_OUTPUT_EN		BIT(6)
147 #define VFE_0_CAMIF_FRAME_CFG			0x300
148 #define VFE_0_CAMIF_WINDOW_WIDTH_CFG		0x304
149 #define VFE_0_CAMIF_WINDOW_HEIGHT_CFG		0x308
150 #define VFE_0_CAMIF_SUBSAMPLE_CFG_0		0x30c
151 #define VFE_0_CAMIF_IRQ_SUBSAMPLE_PATTERN	0x314
152 #define VFE_0_CAMIF_STATUS			0x31c
153 #define VFE_0_CAMIF_STATUS_HALT			BIT(31)
154 
155 #define VFE_0_REG_UPDATE			0x378
156 #define VFE_0_REG_UPDATE_RDIn(n)		BIT(1 + (n))
157 #define VFE_0_REG_UPDATE_line_n(n)		\
158 			((n) == VFE_LINE_PIX ? 1 : VFE_0_REG_UPDATE_RDIn(n))
159 
160 #define VFE_0_DEMUX_CFG				0x424
161 #define VFE_0_DEMUX_CFG_PERIOD			0x3
162 #define VFE_0_DEMUX_GAIN_0			0x428
163 #define VFE_0_DEMUX_GAIN_0_CH0_EVEN		(0x80 << 0)
164 #define VFE_0_DEMUX_GAIN_0_CH0_ODD		(0x80 << 16)
165 #define VFE_0_DEMUX_GAIN_1			0x42c
166 #define VFE_0_DEMUX_GAIN_1_CH1			(0x80 << 0)
167 #define VFE_0_DEMUX_GAIN_1_CH2			(0x80 << 16)
168 #define VFE_0_DEMUX_EVEN_CFG			0x438
169 #define VFE_0_DEMUX_EVEN_CFG_PATTERN_YUYV	0x9cac
170 #define VFE_0_DEMUX_EVEN_CFG_PATTERN_YVYU	0xac9c
171 #define VFE_0_DEMUX_EVEN_CFG_PATTERN_UYVY	0xc9ca
172 #define VFE_0_DEMUX_EVEN_CFG_PATTERN_VYUY	0xcac9
173 #define VFE_0_DEMUX_ODD_CFG			0x43c
174 #define VFE_0_DEMUX_ODD_CFG_PATTERN_YUYV	0x9cac
175 #define VFE_0_DEMUX_ODD_CFG_PATTERN_YVYU	0xac9c
176 #define VFE_0_DEMUX_ODD_CFG_PATTERN_UYVY	0xc9ca
177 #define VFE_0_DEMUX_ODD_CFG_PATTERN_VYUY	0xcac9
178 
179 #define VFE_0_SCALE_ENC_Y_CFG			0x75c
180 #define VFE_0_SCALE_ENC_Y_H_IMAGE_SIZE		0x760
181 #define VFE_0_SCALE_ENC_Y_H_PHASE		0x764
182 #define VFE_0_SCALE_ENC_Y_V_IMAGE_SIZE		0x76c
183 #define VFE_0_SCALE_ENC_Y_V_PHASE		0x770
184 #define VFE_0_SCALE_ENC_CBCR_CFG		0x778
185 #define VFE_0_SCALE_ENC_CBCR_H_IMAGE_SIZE	0x77c
186 #define VFE_0_SCALE_ENC_CBCR_H_PHASE		0x780
187 #define VFE_0_SCALE_ENC_CBCR_V_IMAGE_SIZE	0x790
188 #define VFE_0_SCALE_ENC_CBCR_V_PHASE		0x794
189 
190 #define VFE_0_CROP_ENC_Y_WIDTH			0x854
191 #define VFE_0_CROP_ENC_Y_HEIGHT			0x858
192 #define VFE_0_CROP_ENC_CBCR_WIDTH		0x85c
193 #define VFE_0_CROP_ENC_CBCR_HEIGHT		0x860
194 
195 #define VFE_0_CLAMP_ENC_MAX_CFG			0x874
196 #define VFE_0_CLAMP_ENC_MAX_CFG_CH0		(0xff << 0)
197 #define VFE_0_CLAMP_ENC_MAX_CFG_CH1		(0xff << 8)
198 #define VFE_0_CLAMP_ENC_MAX_CFG_CH2		(0xff << 16)
199 #define VFE_0_CLAMP_ENC_MIN_CFG			0x878
200 #define VFE_0_CLAMP_ENC_MIN_CFG_CH0		(0x0 << 0)
201 #define VFE_0_CLAMP_ENC_MIN_CFG_CH1		(0x0 << 8)
202 #define VFE_0_CLAMP_ENC_MIN_CFG_CH2		(0x0 << 16)
203 
204 #define VFE_0_CGC_OVERRIDE_1			0x974
205 #define VFE_0_CGC_OVERRIDE_1_IMAGE_Mx_CGC_OVERRIDE(x)	BIT(x)
206 
207 #define CAMIF_TIMEOUT_SLEEP_US 1000
208 #define CAMIF_TIMEOUT_ALL_US 1000000
209 
210 #define MSM_VFE_VFE0_UB_SIZE 1023
211 #define MSM_VFE_VFE0_UB_SIZE_RDI (MSM_VFE_VFE0_UB_SIZE / 3)
212 
213 static void vfe_hw_version_read(struct vfe_device *vfe, struct device *dev)
214 {
215 	u32 hw_version = readl_relaxed(vfe->base + VFE_0_HW_VERSION);
216 
217 	dev_dbg(dev, "VFE HW Version = 0x%08x\n", hw_version);
218 }
219 
220 static u16 vfe_get_ub_size(u8 vfe_id)
221 {
222 	if (vfe_id == 0)
223 		return MSM_VFE_VFE0_UB_SIZE_RDI;
224 
225 	return 0;
226 }
227 
228 static inline void vfe_reg_clr(struct vfe_device *vfe, u32 reg, u32 clr_bits)
229 {
230 	u32 bits = readl_relaxed(vfe->base + reg);
231 
232 	writel_relaxed(bits & ~clr_bits, vfe->base + reg);
233 }
234 
235 static inline void vfe_reg_set(struct vfe_device *vfe, u32 reg, u32 set_bits)
236 {
237 	u32 bits = readl_relaxed(vfe->base + reg);
238 
239 	writel_relaxed(bits | set_bits, vfe->base + reg);
240 }
241 
242 static void vfe_global_reset(struct vfe_device *vfe)
243 {
244 	u32 reset_bits = VFE_0_GLOBAL_RESET_CMD_TESTGEN		|
245 			 VFE_0_GLOBAL_RESET_CMD_BUS_MISR	|
246 			 VFE_0_GLOBAL_RESET_CMD_PM		|
247 			 VFE_0_GLOBAL_RESET_CMD_TIMER		|
248 			 VFE_0_GLOBAL_RESET_CMD_REGISTER	|
249 			 VFE_0_GLOBAL_RESET_CMD_BUS_BDG		|
250 			 VFE_0_GLOBAL_RESET_CMD_BUS		|
251 			 VFE_0_GLOBAL_RESET_CMD_CAMIF		|
252 			 VFE_0_GLOBAL_RESET_CMD_CORE;
253 
254 	writel_relaxed(reset_bits, vfe->base + VFE_0_GLOBAL_RESET_CMD);
255 }
256 
257 static void vfe_halt_request(struct vfe_device *vfe)
258 {
259 	writel_relaxed(VFE_0_BUS_BDG_CMD_HALT_REQ,
260 		       vfe->base + VFE_0_BUS_BDG_CMD);
261 }
262 
263 static void vfe_halt_clear(struct vfe_device *vfe)
264 {
265 	writel_relaxed(0x0, vfe->base + VFE_0_BUS_BDG_CMD);
266 }
267 
268 static void vfe_wm_enable(struct vfe_device *vfe, u8 wm, u8 enable)
269 {
270 	if (enable)
271 		vfe_reg_set(vfe, VFE_0_BUS_IMAGE_MASTER_n_WR_CFG(wm),
272 			    1 << VFE_0_BUS_IMAGE_MASTER_n_WR_CFG_WR_PATH_SHIFT);
273 	else
274 		vfe_reg_clr(vfe, VFE_0_BUS_IMAGE_MASTER_n_WR_CFG(wm),
275 			    1 << VFE_0_BUS_IMAGE_MASTER_n_WR_CFG_WR_PATH_SHIFT);
276 }
277 
278 static void vfe_wm_frame_based(struct vfe_device *vfe, u8 wm, u8 enable)
279 {
280 	if (enable)
281 		vfe_reg_set(vfe, VFE_0_BUS_IMAGE_MASTER_n_WR_CFG(wm),
282 			1 << VFE_0_BUS_IMAGE_MASTER_n_WR_CFG_FRM_BASED_SHIFT);
283 	else
284 		vfe_reg_clr(vfe, VFE_0_BUS_IMAGE_MASTER_n_WR_CFG(wm),
285 			1 << VFE_0_BUS_IMAGE_MASTER_n_WR_CFG_FRM_BASED_SHIFT);
286 }
287 
288 static void vfe_get_wm_sizes(struct v4l2_pix_format_mplane *pix, u8 plane,
289 			     u16 *width, u16 *height, u16 *bytesperline)
290 {
291 	switch (pix->pixelformat) {
292 	case V4L2_PIX_FMT_NV12:
293 	case V4L2_PIX_FMT_NV21:
294 		*width = pix->width;
295 		*height = pix->height;
296 		*bytesperline = pix->plane_fmt[0].bytesperline;
297 		if (plane == 1)
298 			*height /= 2;
299 		break;
300 	case V4L2_PIX_FMT_NV16:
301 	case V4L2_PIX_FMT_NV61:
302 		*width = pix->width;
303 		*height = pix->height;
304 		*bytesperline = pix->plane_fmt[0].bytesperline;
305 		break;
306 	}
307 }
308 
309 static void vfe_wm_line_based(struct vfe_device *vfe, u32 wm,
310 			      struct v4l2_pix_format_mplane *pix,
311 			      u8 plane, u32 enable)
312 {
313 	u32 reg;
314 
315 	if (enable) {
316 		u16 width = 0, height = 0, bytesperline = 0, wpl;
317 
318 		vfe_get_wm_sizes(pix, plane, &width, &height, &bytesperline);
319 
320 		wpl = vfe_word_per_line(pix->pixelformat, width);
321 
322 		reg = height - 1;
323 		reg |= ((wpl + 1) / 2 - 1) << 16;
324 
325 		writel_relaxed(reg, vfe->base +
326 			       VFE_0_BUS_IMAGE_MASTER_n_WR_IMAGE_SIZE(wm));
327 
328 		wpl = vfe_word_per_line(pix->pixelformat, bytesperline);
329 
330 		reg = 0x3;
331 		reg |= (height - 1) << 4;
332 		reg |= wpl << 16;
333 
334 		writel_relaxed(reg, vfe->base +
335 			       VFE_0_BUS_IMAGE_MASTER_n_WR_BUFFER_CFG(wm));
336 	} else {
337 		writel_relaxed(0, vfe->base +
338 			       VFE_0_BUS_IMAGE_MASTER_n_WR_IMAGE_SIZE(wm));
339 		writel_relaxed(0, vfe->base +
340 			       VFE_0_BUS_IMAGE_MASTER_n_WR_BUFFER_CFG(wm));
341 	}
342 }
343 
344 static void vfe_wm_set_framedrop_period(struct vfe_device *vfe, u8 wm, u8 per)
345 {
346 	u32 reg;
347 
348 	reg = readl_relaxed(vfe->base +
349 			    VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG(wm));
350 
351 	reg &= ~(VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG_FRM_DROP_PER_MASK);
352 
353 	reg |= (per << VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG_FRM_DROP_PER_SHIFT)
354 		& VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG_FRM_DROP_PER_MASK;
355 
356 	writel_relaxed(reg,
357 		       vfe->base + VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG(wm));
358 }
359 
360 static void vfe_wm_set_framedrop_pattern(struct vfe_device *vfe, u8 wm,
361 					 u32 pattern)
362 {
363 	writel_relaxed(pattern,
364 	       vfe->base + VFE_0_BUS_IMAGE_MASTER_n_WR_FRAMEDROP_PATTERN(wm));
365 }
366 
367 static void vfe_wm_set_ub_cfg(struct vfe_device *vfe, u8 wm,
368 			      u16 offset, u16 depth)
369 {
370 	u32 reg;
371 
372 	reg = (offset << VFE_0_BUS_IMAGE_MASTER_n_WR_UB_CFG_OFFSET_SHIFT) |
373 		depth;
374 	writel_relaxed(reg, vfe->base + VFE_0_BUS_IMAGE_MASTER_n_WR_UB_CFG(wm));
375 }
376 
377 static void vfe_bus_reload_wm(struct vfe_device *vfe, u8 wm)
378 {
379 	wmb();
380 	writel_relaxed(VFE_0_BUS_CMD_Mx_RLD_CMD(wm), vfe->base + VFE_0_BUS_CMD);
381 	wmb();
382 }
383 
384 static void vfe_wm_set_ping_addr(struct vfe_device *vfe, u8 wm, u32 addr)
385 {
386 	writel_relaxed(addr,
387 		       vfe->base + VFE_0_BUS_IMAGE_MASTER_n_WR_PING_ADDR(wm));
388 }
389 
390 static void vfe_wm_set_pong_addr(struct vfe_device *vfe, u8 wm, u32 addr)
391 {
392 	writel_relaxed(addr,
393 		       vfe->base + VFE_0_BUS_IMAGE_MASTER_n_WR_PONG_ADDR(wm));
394 }
395 
396 static int vfe_wm_get_ping_pong_status(struct vfe_device *vfe, u8 wm)
397 {
398 	u32 reg;
399 
400 	reg = readl_relaxed(vfe->base + VFE_0_BUS_PING_PONG_STATUS);
401 
402 	return (reg >> wm) & 0x1;
403 }
404 
405 static void vfe_bus_enable_wr_if(struct vfe_device *vfe, u8 enable)
406 {
407 	if (enable)
408 		writel_relaxed(0x10000009, vfe->base + VFE_0_BUS_CFG);
409 	else
410 		writel_relaxed(0, vfe->base + VFE_0_BUS_CFG);
411 }
412 
413 static void vfe_bus_connect_wm_to_rdi(struct vfe_device *vfe, u8 wm,
414 				      enum vfe_line_id id)
415 {
416 	u32 reg;
417 
418 	reg = VFE_0_RDI_CFG_x_MIPI_EN_BITS;
419 	reg |= VFE_0_RDI_CFG_x_RDI_Mr_FRAME_BASED_EN(id);
420 	vfe_reg_set(vfe, VFE_0_RDI_CFG_x(0), reg);
421 
422 	reg = VFE_0_RDI_CFG_x_RDI_EN_BIT;
423 	reg |= ((3 * id) << VFE_0_RDI_CFG_x_RDI_STREAM_SEL_SHIFT) &
424 		VFE_0_RDI_CFG_x_RDI_STREAM_SEL_MASK;
425 	vfe_reg_set(vfe, VFE_0_RDI_CFG_x(id), reg);
426 
427 	switch (id) {
428 	case VFE_LINE_RDI0:
429 	default:
430 		reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI0 <<
431 		      VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT;
432 		break;
433 	case VFE_LINE_RDI1:
434 		reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI1 <<
435 		      VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT;
436 		break;
437 	case VFE_LINE_RDI2:
438 		reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI2 <<
439 		      VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT;
440 		break;
441 	}
442 
443 	if (wm % 2 == 1)
444 		reg <<= 16;
445 
446 	vfe_reg_set(vfe, VFE_0_BUS_XBAR_CFG_x(wm), reg);
447 }
448 
449 static void vfe_wm_set_subsample(struct vfe_device *vfe, u8 wm)
450 {
451 	writel_relaxed(VFE_0_BUS_IMAGE_MASTER_n_WR_IRQ_SUBSAMPLE_PATTERN_DEF,
452 		       vfe->base +
453 		       VFE_0_BUS_IMAGE_MASTER_n_WR_IRQ_SUBSAMPLE_PATTERN(wm));
454 }
455 
456 static void vfe_bus_disconnect_wm_from_rdi(struct vfe_device *vfe, u8 wm,
457 					   enum vfe_line_id id)
458 {
459 	u32 reg;
460 
461 	reg = VFE_0_RDI_CFG_x_RDI_Mr_FRAME_BASED_EN(id);
462 	vfe_reg_clr(vfe, VFE_0_RDI_CFG_x(0), reg);
463 
464 	reg = VFE_0_RDI_CFG_x_RDI_EN_BIT;
465 	vfe_reg_clr(vfe, VFE_0_RDI_CFG_x(id), reg);
466 
467 	switch (id) {
468 	case VFE_LINE_RDI0:
469 	default:
470 		reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI0 <<
471 		      VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT;
472 		break;
473 	case VFE_LINE_RDI1:
474 		reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI1 <<
475 		      VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT;
476 		break;
477 	case VFE_LINE_RDI2:
478 		reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI2 <<
479 		      VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT;
480 		break;
481 	}
482 
483 	if (wm % 2 == 1)
484 		reg <<= 16;
485 
486 	vfe_reg_clr(vfe, VFE_0_BUS_XBAR_CFG_x(wm), reg);
487 }
488 
489 static void vfe_set_xbar_cfg(struct vfe_device *vfe, struct vfe_output *output,
490 			     u8 enable)
491 {
492 	struct vfe_line *line = container_of(output, struct vfe_line, output);
493 	u32 p = line->video_out.active_fmt.fmt.pix_mp.pixelformat;
494 	u32 reg;
495 	unsigned int i;
496 
497 	for (i = 0; i < output->wm_num; i++) {
498 		if (i == 0) {
499 			reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_LUMA <<
500 				VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT;
501 		} else if (i == 1) {
502 			reg = VFE_0_BUS_XBAR_CFG_x_M_PAIR_STREAM_EN;
503 			if (p == V4L2_PIX_FMT_NV12 || p == V4L2_PIX_FMT_NV16)
504 				reg |= VFE_0_BUS_XBAR_CFG_x_M_PAIR_STREAM_SWAP_INTER_INTRA;
505 		} else {
506 			/* On current devices output->wm_num is always <= 2 */
507 			break;
508 		}
509 
510 		if (output->wm_idx[i] % 2 == 1)
511 			reg <<= 16;
512 
513 		if (enable)
514 			vfe_reg_set(vfe,
515 				    VFE_0_BUS_XBAR_CFG_x(output->wm_idx[i]),
516 				    reg);
517 		else
518 			vfe_reg_clr(vfe,
519 				    VFE_0_BUS_XBAR_CFG_x(output->wm_idx[i]),
520 				    reg);
521 	}
522 }
523 
524 static void vfe_set_realign_cfg(struct vfe_device *vfe, struct vfe_line *line,
525 				u8 enable)
526 {
527 	/* empty */
528 }
529 static void vfe_set_rdi_cid(struct vfe_device *vfe, enum vfe_line_id id, u8 cid)
530 {
531 	vfe_reg_clr(vfe, VFE_0_RDI_CFG_x(id),
532 		    VFE_0_RDI_CFG_x_RDI_M0_SEL_MASK);
533 
534 	vfe_reg_set(vfe, VFE_0_RDI_CFG_x(id),
535 		    cid << VFE_0_RDI_CFG_x_RDI_M0_SEL_SHIFT);
536 }
537 
538 static void vfe_reg_update(struct vfe_device *vfe, enum vfe_line_id line_id)
539 {
540 	vfe->reg_update |= VFE_0_REG_UPDATE_line_n(line_id);
541 	wmb();
542 	writel_relaxed(vfe->reg_update, vfe->base + VFE_0_REG_UPDATE);
543 	wmb();
544 }
545 
546 static inline void vfe_reg_update_clear(struct vfe_device *vfe,
547 					enum vfe_line_id line_id)
548 {
549 	vfe->reg_update &= ~VFE_0_REG_UPDATE_line_n(line_id);
550 }
551 
552 static void vfe_enable_irq_wm_line(struct vfe_device *vfe, u8 wm,
553 				   enum vfe_line_id line_id, u8 enable)
554 {
555 	u32 irq_en0 = VFE_0_IRQ_MASK_0_IMAGE_MASTER_n_PING_PONG(wm) |
556 		      VFE_0_IRQ_MASK_0_line_n_REG_UPDATE(line_id);
557 	u32 irq_en1 = VFE_0_IRQ_MASK_1_IMAGE_MASTER_n_BUS_OVERFLOW(wm) |
558 		      VFE_0_IRQ_MASK_1_RDIn_SOF(line_id);
559 
560 	if (enable) {
561 		vfe_reg_set(vfe, VFE_0_IRQ_MASK_0, irq_en0);
562 		vfe_reg_set(vfe, VFE_0_IRQ_MASK_1, irq_en1);
563 	} else {
564 		vfe_reg_clr(vfe, VFE_0_IRQ_MASK_0, irq_en0);
565 		vfe_reg_clr(vfe, VFE_0_IRQ_MASK_1, irq_en1);
566 	}
567 }
568 
569 static void vfe_enable_irq_pix_line(struct vfe_device *vfe, u8 comp,
570 				    enum vfe_line_id line_id, u8 enable)
571 {
572 	struct vfe_output *output = &vfe->line[line_id].output;
573 	unsigned int i;
574 	u32 irq_en0;
575 	u32 irq_en1;
576 	u32 comp_mask = 0;
577 
578 	irq_en0 = VFE_0_IRQ_MASK_0_CAMIF_SOF;
579 	irq_en0 |= VFE_0_IRQ_MASK_0_CAMIF_EOF;
580 	irq_en0 |= VFE_0_IRQ_MASK_0_IMAGE_COMPOSITE_DONE_n(comp);
581 	irq_en0 |= VFE_0_IRQ_MASK_0_line_n_REG_UPDATE(line_id);
582 	irq_en1 = VFE_0_IRQ_MASK_1_CAMIF_ERROR;
583 	for (i = 0; i < output->wm_num; i++) {
584 		irq_en1 |= VFE_0_IRQ_MASK_1_IMAGE_MASTER_n_BUS_OVERFLOW(
585 							output->wm_idx[i]);
586 		comp_mask |= (1 << output->wm_idx[i]) << comp * 8;
587 	}
588 
589 	if (enable) {
590 		vfe_reg_set(vfe, VFE_0_IRQ_MASK_0, irq_en0);
591 		vfe_reg_set(vfe, VFE_0_IRQ_MASK_1, irq_en1);
592 		vfe_reg_set(vfe, VFE_0_IRQ_COMPOSITE_MASK_0, comp_mask);
593 	} else {
594 		vfe_reg_clr(vfe, VFE_0_IRQ_MASK_0, irq_en0);
595 		vfe_reg_clr(vfe, VFE_0_IRQ_MASK_1, irq_en1);
596 		vfe_reg_clr(vfe, VFE_0_IRQ_COMPOSITE_MASK_0, comp_mask);
597 	}
598 }
599 
600 static void vfe_enable_irq_common(struct vfe_device *vfe)
601 {
602 	u32 irq_en0 = VFE_0_IRQ_MASK_0_RESET_ACK;
603 	u32 irq_en1 = VFE_0_IRQ_MASK_1_VIOLATION |
604 		      VFE_0_IRQ_MASK_1_BUS_BDG_HALT_ACK;
605 
606 	vfe_reg_set(vfe, VFE_0_IRQ_MASK_0, irq_en0);
607 	vfe_reg_set(vfe, VFE_0_IRQ_MASK_1, irq_en1);
608 }
609 
610 static void vfe_set_demux_cfg(struct vfe_device *vfe, struct vfe_line *line)
611 {
612 	u32 val, even_cfg, odd_cfg;
613 
614 	writel_relaxed(VFE_0_DEMUX_CFG_PERIOD, vfe->base + VFE_0_DEMUX_CFG);
615 
616 	val = VFE_0_DEMUX_GAIN_0_CH0_EVEN | VFE_0_DEMUX_GAIN_0_CH0_ODD;
617 	writel_relaxed(val, vfe->base + VFE_0_DEMUX_GAIN_0);
618 
619 	val = VFE_0_DEMUX_GAIN_1_CH1 | VFE_0_DEMUX_GAIN_1_CH2;
620 	writel_relaxed(val, vfe->base + VFE_0_DEMUX_GAIN_1);
621 
622 	switch (line->fmt[MSM_VFE_PAD_SINK].code) {
623 	case MEDIA_BUS_FMT_YUYV8_2X8:
624 		even_cfg = VFE_0_DEMUX_EVEN_CFG_PATTERN_YUYV;
625 		odd_cfg = VFE_0_DEMUX_ODD_CFG_PATTERN_YUYV;
626 		break;
627 	case MEDIA_BUS_FMT_YVYU8_2X8:
628 		even_cfg = VFE_0_DEMUX_EVEN_CFG_PATTERN_YVYU;
629 		odd_cfg = VFE_0_DEMUX_ODD_CFG_PATTERN_YVYU;
630 		break;
631 	case MEDIA_BUS_FMT_UYVY8_2X8:
632 	default:
633 		even_cfg = VFE_0_DEMUX_EVEN_CFG_PATTERN_UYVY;
634 		odd_cfg = VFE_0_DEMUX_ODD_CFG_PATTERN_UYVY;
635 		break;
636 	case MEDIA_BUS_FMT_VYUY8_2X8:
637 		even_cfg = VFE_0_DEMUX_EVEN_CFG_PATTERN_VYUY;
638 		odd_cfg = VFE_0_DEMUX_ODD_CFG_PATTERN_VYUY;
639 		break;
640 	}
641 
642 	writel_relaxed(even_cfg, vfe->base + VFE_0_DEMUX_EVEN_CFG);
643 	writel_relaxed(odd_cfg, vfe->base + VFE_0_DEMUX_ODD_CFG);
644 }
645 
646 static void vfe_set_scale_cfg(struct vfe_device *vfe, struct vfe_line *line)
647 {
648 	u32 p = line->video_out.active_fmt.fmt.pix_mp.pixelformat;
649 	u32 reg;
650 	u16 input, output;
651 	u8 interp_reso;
652 	u32 phase_mult;
653 
654 	writel_relaxed(0x3, vfe->base + VFE_0_SCALE_ENC_Y_CFG);
655 
656 	input = line->fmt[MSM_VFE_PAD_SINK].width;
657 	output = line->compose.width;
658 	reg = (output << 16) | input;
659 	writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_Y_H_IMAGE_SIZE);
660 
661 	interp_reso = vfe_calc_interp_reso(input, output);
662 	phase_mult = input * (1 << (13 + interp_reso)) / output;
663 	reg = (interp_reso << 20) | phase_mult;
664 	writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_Y_H_PHASE);
665 
666 	input = line->fmt[MSM_VFE_PAD_SINK].height;
667 	output = line->compose.height;
668 	reg = (output << 16) | input;
669 	writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_Y_V_IMAGE_SIZE);
670 
671 	interp_reso = vfe_calc_interp_reso(input, output);
672 	phase_mult = input * (1 << (13 + interp_reso)) / output;
673 	reg = (interp_reso << 20) | phase_mult;
674 	writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_Y_V_PHASE);
675 
676 	writel_relaxed(0x3, vfe->base + VFE_0_SCALE_ENC_CBCR_CFG);
677 
678 	input = line->fmt[MSM_VFE_PAD_SINK].width;
679 	output = line->compose.width / 2;
680 	reg = (output << 16) | input;
681 	writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_CBCR_H_IMAGE_SIZE);
682 
683 	interp_reso = vfe_calc_interp_reso(input, output);
684 	phase_mult = input * (1 << (13 + interp_reso)) / output;
685 	reg = (interp_reso << 20) | phase_mult;
686 	writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_CBCR_H_PHASE);
687 
688 	input = line->fmt[MSM_VFE_PAD_SINK].height;
689 	output = line->compose.height;
690 	if (p == V4L2_PIX_FMT_NV12 || p == V4L2_PIX_FMT_NV21)
691 		output = line->compose.height / 2;
692 	reg = (output << 16) | input;
693 	writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_CBCR_V_IMAGE_SIZE);
694 
695 	interp_reso = vfe_calc_interp_reso(input, output);
696 	phase_mult = input * (1 << (13 + interp_reso)) / output;
697 	reg = (interp_reso << 20) | phase_mult;
698 	writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_CBCR_V_PHASE);
699 }
700 
701 static void vfe_set_crop_cfg(struct vfe_device *vfe, struct vfe_line *line)
702 {
703 	u32 p = line->video_out.active_fmt.fmt.pix_mp.pixelformat;
704 	u32 reg;
705 	u16 first, last;
706 
707 	first = line->crop.left;
708 	last = line->crop.left + line->crop.width - 1;
709 	reg = (first << 16) | last;
710 	writel_relaxed(reg, vfe->base + VFE_0_CROP_ENC_Y_WIDTH);
711 
712 	first = line->crop.top;
713 	last = line->crop.top + line->crop.height - 1;
714 	reg = (first << 16) | last;
715 	writel_relaxed(reg, vfe->base + VFE_0_CROP_ENC_Y_HEIGHT);
716 
717 	first = line->crop.left / 2;
718 	last = line->crop.left / 2 + line->crop.width / 2 - 1;
719 	reg = (first << 16) | last;
720 	writel_relaxed(reg, vfe->base + VFE_0_CROP_ENC_CBCR_WIDTH);
721 
722 	first = line->crop.top;
723 	last = line->crop.top + line->crop.height - 1;
724 	if (p == V4L2_PIX_FMT_NV12 || p == V4L2_PIX_FMT_NV21) {
725 		first = line->crop.top / 2;
726 		last = line->crop.top / 2 + line->crop.height / 2 - 1;
727 	}
728 	reg = (first << 16) | last;
729 	writel_relaxed(reg, vfe->base + VFE_0_CROP_ENC_CBCR_HEIGHT);
730 }
731 
732 static void vfe_set_clamp_cfg(struct vfe_device *vfe)
733 {
734 	u32 val = VFE_0_CLAMP_ENC_MAX_CFG_CH0 |
735 		VFE_0_CLAMP_ENC_MAX_CFG_CH1 |
736 		VFE_0_CLAMP_ENC_MAX_CFG_CH2;
737 
738 	writel_relaxed(val, vfe->base + VFE_0_CLAMP_ENC_MAX_CFG);
739 
740 	val = VFE_0_CLAMP_ENC_MIN_CFG_CH0 |
741 		VFE_0_CLAMP_ENC_MIN_CFG_CH1 |
742 		VFE_0_CLAMP_ENC_MIN_CFG_CH2;
743 
744 	writel_relaxed(val, vfe->base + VFE_0_CLAMP_ENC_MIN_CFG);
745 }
746 
747 static void vfe_set_qos(struct vfe_device *vfe)
748 {
749 	u32 val = VFE_0_BUS_BDG_QOS_CFG_0_CFG;
750 	u32 val7 = VFE_0_BUS_BDG_QOS_CFG_7_CFG;
751 
752 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_0);
753 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_1);
754 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_2);
755 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_3);
756 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_4);
757 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_5);
758 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_6);
759 	writel_relaxed(val7, vfe->base + VFE_0_BUS_BDG_QOS_CFG_7);
760 }
761 
762 static void vfe_set_ds(struct vfe_device *vfe)
763 {
764 	/* empty */
765 }
766 
767 static void vfe_set_cgc_override(struct vfe_device *vfe, u8 wm, u8 enable)
768 {
769 	u32 val = VFE_0_CGC_OVERRIDE_1_IMAGE_Mx_CGC_OVERRIDE(wm);
770 
771 	if (enable)
772 		vfe_reg_set(vfe, VFE_0_CGC_OVERRIDE_1, val);
773 	else
774 		vfe_reg_clr(vfe, VFE_0_CGC_OVERRIDE_1, val);
775 
776 	wmb();
777 }
778 
779 static void vfe_set_camif_cfg(struct vfe_device *vfe, struct vfe_line *line)
780 {
781 	u32 val;
782 
783 	switch (line->fmt[MSM_VFE_PAD_SINK].code) {
784 	case MEDIA_BUS_FMT_YUYV8_2X8:
785 		val = VFE_0_CORE_CFG_PIXEL_PATTERN_YCBYCR;
786 		break;
787 	case MEDIA_BUS_FMT_YVYU8_2X8:
788 		val = VFE_0_CORE_CFG_PIXEL_PATTERN_YCRYCB;
789 		break;
790 	case MEDIA_BUS_FMT_UYVY8_2X8:
791 	default:
792 		val = VFE_0_CORE_CFG_PIXEL_PATTERN_CBYCRY;
793 		break;
794 	case MEDIA_BUS_FMT_VYUY8_2X8:
795 		val = VFE_0_CORE_CFG_PIXEL_PATTERN_CRYCBY;
796 		break;
797 	}
798 
799 	writel_relaxed(val, vfe->base + VFE_0_CORE_CFG);
800 
801 	val = line->fmt[MSM_VFE_PAD_SINK].width * 2;
802 	val |= line->fmt[MSM_VFE_PAD_SINK].height << 16;
803 	writel_relaxed(val, vfe->base + VFE_0_CAMIF_FRAME_CFG);
804 
805 	val = line->fmt[MSM_VFE_PAD_SINK].width * 2 - 1;
806 	writel_relaxed(val, vfe->base + VFE_0_CAMIF_WINDOW_WIDTH_CFG);
807 
808 	val = line->fmt[MSM_VFE_PAD_SINK].height - 1;
809 	writel_relaxed(val, vfe->base + VFE_0_CAMIF_WINDOW_HEIGHT_CFG);
810 
811 	val = 0xffffffff;
812 	writel_relaxed(val, vfe->base + VFE_0_CAMIF_SUBSAMPLE_CFG_0);
813 
814 	val = 0xffffffff;
815 	writel_relaxed(val, vfe->base + VFE_0_CAMIF_IRQ_SUBSAMPLE_PATTERN);
816 
817 	val = VFE_0_RDI_CFG_x_MIPI_EN_BITS;
818 	vfe_reg_set(vfe, VFE_0_RDI_CFG_x(0), val);
819 
820 	val = VFE_0_CAMIF_CFG_VFE_OUTPUT_EN;
821 	writel_relaxed(val, vfe->base + VFE_0_CAMIF_CFG);
822 }
823 
824 static void vfe_set_camif_cmd(struct vfe_device *vfe, u8 enable)
825 {
826 	u32 cmd;
827 
828 	cmd = VFE_0_CAMIF_CMD_CLEAR_CAMIF_STATUS | VFE_0_CAMIF_CMD_NO_CHANGE;
829 	writel_relaxed(cmd, vfe->base + VFE_0_CAMIF_CMD);
830 	wmb();
831 
832 	if (enable)
833 		cmd = VFE_0_CAMIF_CMD_ENABLE_FRAME_BOUNDARY;
834 	else
835 		cmd = VFE_0_CAMIF_CMD_DISABLE_FRAME_BOUNDARY;
836 
837 	writel_relaxed(cmd, vfe->base + VFE_0_CAMIF_CMD);
838 }
839 
840 static void vfe_set_module_cfg(struct vfe_device *vfe, u8 enable)
841 {
842 	u32 val = VFE_0_MODULE_CFG_DEMUX |
843 		  VFE_0_MODULE_CFG_CHROMA_UPSAMPLE |
844 		  VFE_0_MODULE_CFG_SCALE_ENC |
845 		  VFE_0_MODULE_CFG_CROP_ENC;
846 
847 	if (enable)
848 		writel_relaxed(val, vfe->base + VFE_0_MODULE_CFG);
849 	else
850 		writel_relaxed(0x0, vfe->base + VFE_0_MODULE_CFG);
851 }
852 
853 static int vfe_camif_wait_for_stop(struct vfe_device *vfe, struct device *dev)
854 {
855 	u32 val;
856 	int ret;
857 
858 	ret = readl_poll_timeout(vfe->base + VFE_0_CAMIF_STATUS,
859 				 val,
860 				 (val & VFE_0_CAMIF_STATUS_HALT),
861 				 CAMIF_TIMEOUT_SLEEP_US,
862 				 CAMIF_TIMEOUT_ALL_US);
863 	if (ret < 0)
864 		dev_err(dev, "%s: camif stop timeout\n", __func__);
865 
866 	return ret;
867 }
868 
869 static void vfe_isr_read(struct vfe_device *vfe, u32 *value0, u32 *value1)
870 {
871 	*value0 = readl_relaxed(vfe->base + VFE_0_IRQ_STATUS_0);
872 	*value1 = readl_relaxed(vfe->base + VFE_0_IRQ_STATUS_1);
873 
874 	writel_relaxed(*value0, vfe->base + VFE_0_IRQ_CLEAR_0);
875 	writel_relaxed(*value1, vfe->base + VFE_0_IRQ_CLEAR_1);
876 
877 	wmb();
878 	writel_relaxed(VFE_0_IRQ_CMD_GLOBAL_CLEAR, vfe->base + VFE_0_IRQ_CMD);
879 }
880 
881 static void vfe_violation_read(struct vfe_device *vfe)
882 {
883 	u32 violation = readl_relaxed(vfe->base + VFE_0_VIOLATION_STATUS);
884 
885 	pr_err_ratelimited("VFE: violation = 0x%08x\n", violation);
886 }
887 
888 /*
889  * vfe_isr - VFE module interrupt handler
890  * @irq: Interrupt line
891  * @dev: VFE device
892  *
893  * Return IRQ_HANDLED on success
894  */
895 static irqreturn_t vfe_isr(int irq, void *dev)
896 {
897 	struct vfe_device *vfe = dev;
898 	u32 value0, value1;
899 	int i, j;
900 
901 	vfe->ops->isr_read(vfe, &value0, &value1);
902 
903 	dev_dbg(vfe->camss->dev, "VFE: status0 = 0x%08x, status1 = 0x%08x\n",
904 		value0, value1);
905 
906 	if (value0 & VFE_0_IRQ_STATUS_0_RESET_ACK)
907 		vfe->isr_ops.reset_ack(vfe);
908 
909 	if (value1 & VFE_0_IRQ_STATUS_1_VIOLATION)
910 		vfe->ops->violation_read(vfe);
911 
912 	if (value1 & VFE_0_IRQ_STATUS_1_BUS_BDG_HALT_ACK)
913 		vfe->isr_ops.halt_ack(vfe);
914 
915 	for (i = VFE_LINE_RDI0; i <= VFE_LINE_PIX; i++)
916 		if (value0 & VFE_0_IRQ_STATUS_0_line_n_REG_UPDATE(i))
917 			vfe->isr_ops.reg_update(vfe, i);
918 
919 	if (value0 & VFE_0_IRQ_STATUS_0_CAMIF_SOF)
920 		vfe->isr_ops.sof(vfe, VFE_LINE_PIX);
921 
922 	for (i = VFE_LINE_RDI0; i <= VFE_LINE_RDI2; i++)
923 		if (value1 & VFE_0_IRQ_STATUS_1_RDIn_SOF(i))
924 			vfe->isr_ops.sof(vfe, i);
925 
926 	for (i = 0; i < MSM_VFE_COMPOSITE_IRQ_NUM; i++)
927 		if (value0 & VFE_0_IRQ_STATUS_0_IMAGE_COMPOSITE_DONE_n(i)) {
928 			vfe->isr_ops.comp_done(vfe, i);
929 			for (j = 0; j < ARRAY_SIZE(vfe->wm_output_map); j++)
930 				if (vfe->wm_output_map[j] == VFE_LINE_PIX)
931 					value0 &= ~VFE_0_IRQ_MASK_0_IMAGE_MASTER_n_PING_PONG(j);
932 		}
933 
934 	for (i = 0; i < MSM_VFE_IMAGE_MASTERS_NUM; i++)
935 		if (value0 & VFE_0_IRQ_STATUS_0_IMAGE_MASTER_n_PING_PONG(i))
936 			vfe->isr_ops.wm_done(vfe, i);
937 
938 	return IRQ_HANDLED;
939 }
940 
941 static const struct vfe_hw_ops_gen1 vfe_ops_gen1_4_1 = {
942 	.bus_connect_wm_to_rdi = vfe_bus_connect_wm_to_rdi,
943 	.bus_disconnect_wm_from_rdi = vfe_bus_disconnect_wm_from_rdi,
944 	.bus_enable_wr_if = vfe_bus_enable_wr_if,
945 	.bus_reload_wm = vfe_bus_reload_wm,
946 	.camif_wait_for_stop = vfe_camif_wait_for_stop,
947 	.enable_irq_common = vfe_enable_irq_common,
948 	.enable_irq_pix_line = vfe_enable_irq_pix_line,
949 	.enable_irq_wm_line = vfe_enable_irq_wm_line,
950 	.get_ub_size = vfe_get_ub_size,
951 	.halt_clear = vfe_halt_clear,
952 	.halt_request = vfe_halt_request,
953 	.set_camif_cfg = vfe_set_camif_cfg,
954 	.set_camif_cmd = vfe_set_camif_cmd,
955 	.set_cgc_override = vfe_set_cgc_override,
956 	.set_clamp_cfg = vfe_set_clamp_cfg,
957 	.set_crop_cfg = vfe_set_crop_cfg,
958 	.set_demux_cfg = vfe_set_demux_cfg,
959 	.set_ds = vfe_set_ds,
960 	.set_module_cfg = vfe_set_module_cfg,
961 	.set_qos = vfe_set_qos,
962 	.set_rdi_cid = vfe_set_rdi_cid,
963 	.set_realign_cfg = vfe_set_realign_cfg,
964 	.set_scale_cfg = vfe_set_scale_cfg,
965 	.set_xbar_cfg = vfe_set_xbar_cfg,
966 	.wm_enable = vfe_wm_enable,
967 	.wm_frame_based = vfe_wm_frame_based,
968 	.wm_get_ping_pong_status = vfe_wm_get_ping_pong_status,
969 	.wm_line_based = vfe_wm_line_based,
970 	.wm_set_framedrop_pattern = vfe_wm_set_framedrop_pattern,
971 	.wm_set_framedrop_period = vfe_wm_set_framedrop_period,
972 	.wm_set_ping_addr = vfe_wm_set_ping_addr,
973 	.wm_set_pong_addr = vfe_wm_set_pong_addr,
974 	.wm_set_subsample = vfe_wm_set_subsample,
975 	.wm_set_ub_cfg = vfe_wm_set_ub_cfg,
976 };
977 
978 static void vfe_subdev_init(struct device *dev, struct vfe_device *vfe)
979 {
980 	vfe->isr_ops = vfe_isr_ops_gen1;
981 	vfe->ops_gen1 = &vfe_ops_gen1_4_1;
982 	vfe->video_ops = vfe_video_ops_gen1;
983 
984 	vfe->line_num = VFE_LINE_NUM_GEN1;
985 }
986 
987 const struct vfe_hw_ops vfe_ops_4_1 = {
988 	.global_reset = vfe_global_reset,
989 	.hw_version_read = vfe_hw_version_read,
990 	.isr_read = vfe_isr_read,
991 	.isr = vfe_isr,
992 	.reg_update_clear = vfe_reg_update_clear,
993 	.reg_update = vfe_reg_update,
994 	.subdev_init = vfe_subdev_init,
995 	.vfe_disable = vfe_gen1_disable,
996 	.vfe_enable = vfe_gen1_enable,
997 	.vfe_halt = vfe_gen1_halt,
998 	.violation_read = vfe_violation_read,
999 };
1000