1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Rockchip VPU codec vp8 decode driver
4  *
5  * Copyright (C) 2014 Rockchip Electronics Co., Ltd.
6  *	ZhiChao Yu <zhichao.yu@rock-chips.com>
7  *
8  * Copyright (C) 2014 Google LLC.
9  *      Tomasz Figa <tfiga@chromium.org>
10  *
11  * Copyright (C) 2015 Rockchip Electronics Co., Ltd.
12  *      Alpha Lin <alpha.lin@rock-chips.com>
13  */
14 
15 #include <media/v4l2-mem2mem.h>
16 
17 #include "hantro_hw.h"
18 #include "hantro.h"
19 #include "hantro_g1_regs.h"
20 
21 #define VDPU_REG_DEC_CTRL0			0x0c8
22 #define VDPU_REG_STREAM_LEN			0x0cc
23 #define VDPU_REG_DEC_FORMAT			0x0d4
24 #define     VDPU_REG_DEC_CTRL0_DEC_MODE(x)		(((x) & 0xf) << 0)
25 #define VDPU_REG_DATA_ENDIAN			0x0d8
26 #define     VDPU_REG_CONFIG_DEC_STRENDIAN_E		BIT(5)
27 #define     VDPU_REG_CONFIG_DEC_STRSWAP32_E		BIT(4)
28 #define     VDPU_REG_CONFIG_DEC_OUTSWAP32_E		BIT(3)
29 #define     VDPU_REG_CONFIG_DEC_INSWAP32_E		BIT(2)
30 #define     VDPU_REG_CONFIG_DEC_OUT_ENDIAN		BIT(1)
31 #define     VDPU_REG_CONFIG_DEC_IN_ENDIAN		BIT(0)
32 #define VDPU_REG_AXI_CTRL			0x0e0
33 #define     VDPU_REG_CONFIG_DEC_MAX_BURST(x)		(((x) & 0x1f) << 16)
34 #define VDPU_REG_EN_FLAGS			0x0e4
35 #define     VDPU_REG_DEC_CTRL0_PIC_INTER_E		BIT(14)
36 #define     VDPU_REG_CONFIG_DEC_TIMEOUT_E		BIT(5)
37 #define     VDPU_REG_CONFIG_DEC_CLK_GATE_E		BIT(4)
38 #define VDPU_REG_PRED_FLT			0x0ec
39 #define VDPU_REG_ADDR_QTABLE			0x0f4
40 #define VDPU_REG_ADDR_DST			0x0fc
41 #define VDPU_REG_ADDR_STR			0x100
42 #define VDPU_REG_VP8_PIC_MB_SIZE		0x1e0
43 #define VDPU_REG_VP8_DCT_START_BIT		0x1e4
44 #define     VDPU_REG_DEC_CTRL4_VC1_HEIGHT_EXT		BIT(13)
45 #define     VDPU_REG_DEC_CTRL4_BILIN_MC_E		BIT(12)
46 #define VDPU_REG_VP8_CTRL0			0x1e8
47 #define VDPU_REG_VP8_DATA_VAL			0x1f0
48 #define VDPU_REG_PRED_FLT7			0x1f4
49 #define VDPU_REG_PRED_FLT8			0x1f8
50 #define VDPU_REG_PRED_FLT9			0x1fc
51 #define VDPU_REG_PRED_FLT10			0x200
52 #define VDPU_REG_FILTER_LEVEL			0x204
53 #define VDPU_REG_VP8_QUANTER0			0x208
54 #define VDPU_REG_VP8_ADDR_REF0			0x20c
55 #define VDPU_REG_FILTER_MB_ADJ			0x210
56 #define     VDPU_REG_REF_PIC_FILT_TYPE_E		BIT(31)
57 #define     VDPU_REG_REF_PIC_FILT_SHARPNESS(x)		(((x) & 0x7) << 28)
58 #define VDPU_REG_FILTER_REF_ADJ			0x214
59 #define VDPU_REG_VP8_ADDR_REF2_5(i)		(0x218 + ((i) * 0x4))
60 #define     VDPU_REG_VP8_GREF_SIGN_BIAS			BIT(0)
61 #define     VDPU_REG_VP8_AREF_SIGN_BIAS			BIT(0)
62 #define VDPU_REG_VP8_DCT_BASE(i)		\
63 		(0x230 + ((((i) < 5) ? (i) : ((i) + 1)) * 0x4))
64 #define VDPU_REG_VP8_ADDR_CTRL_PART		0x244
65 #define VDPU_REG_VP8_SEGMENT_VAL		0x254
66 #define     VDPU_REG_FWD_PIC1_SEGMENT_BASE(x)		((x) << 0)
67 #define     VDPU_REG_FWD_PIC1_SEGMENT_UPD_E		BIT(1)
68 #define     VDPU_REG_FWD_PIC1_SEGMENT_E			BIT(0)
69 #define VDPU_REG_VP8_DCT_START_BIT2		0x258
70 #define VDPU_REG_VP8_QUANTER1			0x25c
71 #define VDPU_REG_VP8_QUANTER2			0x260
72 #define VDPU_REG_PRED_FLT1			0x264
73 #define VDPU_REG_PRED_FLT2			0x268
74 #define VDPU_REG_PRED_FLT3			0x26c
75 #define VDPU_REG_PRED_FLT4			0x270
76 #define VDPU_REG_PRED_FLT5			0x274
77 #define VDPU_REG_PRED_FLT6			0x278
78 
79 static const struct hantro_reg vp8_dec_dct_base[8] = {
80 	{ VDPU_REG_ADDR_STR, 0, 0xffffffff },
81 	{ VDPU_REG_VP8_DCT_BASE(0), 0, 0xffffffff },
82 	{ VDPU_REG_VP8_DCT_BASE(1), 0, 0xffffffff },
83 	{ VDPU_REG_VP8_DCT_BASE(2), 0, 0xffffffff },
84 	{ VDPU_REG_VP8_DCT_BASE(3), 0, 0xffffffff },
85 	{ VDPU_REG_VP8_DCT_BASE(4), 0, 0xffffffff },
86 	{ VDPU_REG_VP8_DCT_BASE(5), 0, 0xffffffff },
87 	{ VDPU_REG_VP8_DCT_BASE(6), 0, 0xffffffff },
88 };
89 
90 static const struct hantro_reg vp8_dec_lf_level[4] = {
91 	{ VDPU_REG_FILTER_LEVEL, 18, 0x3f },
92 	{ VDPU_REG_FILTER_LEVEL, 12, 0x3f },
93 	{ VDPU_REG_FILTER_LEVEL, 6, 0x3f },
94 	{ VDPU_REG_FILTER_LEVEL, 0, 0x3f },
95 };
96 
97 static const struct hantro_reg vp8_dec_mb_adj[4] = {
98 	{ VDPU_REG_FILTER_MB_ADJ, 21, 0x7f },
99 	{ VDPU_REG_FILTER_MB_ADJ, 14, 0x7f },
100 	{ VDPU_REG_FILTER_MB_ADJ, 7, 0x7f },
101 	{ VDPU_REG_FILTER_MB_ADJ, 0, 0x7f },
102 };
103 
104 static const struct hantro_reg vp8_dec_ref_adj[4] = {
105 	{ VDPU_REG_FILTER_REF_ADJ, 21, 0x7f },
106 	{ VDPU_REG_FILTER_REF_ADJ, 14, 0x7f },
107 	{ VDPU_REG_FILTER_REF_ADJ, 7, 0x7f },
108 	{ VDPU_REG_FILTER_REF_ADJ, 0, 0x7f },
109 };
110 
111 static const struct hantro_reg vp8_dec_quant[4] = {
112 	{ VDPU_REG_VP8_QUANTER0, 11, 0x7ff },
113 	{ VDPU_REG_VP8_QUANTER0, 0, 0x7ff },
114 	{ VDPU_REG_VP8_QUANTER1, 11, 0x7ff },
115 	{ VDPU_REG_VP8_QUANTER1, 0, 0x7ff },
116 };
117 
118 static const struct hantro_reg vp8_dec_quant_delta[5] = {
119 	{ VDPU_REG_VP8_QUANTER0, 27, 0x1f },
120 	{ VDPU_REG_VP8_QUANTER0, 22, 0x1f },
121 	{ VDPU_REG_VP8_QUANTER1, 27, 0x1f },
122 	{ VDPU_REG_VP8_QUANTER1, 22, 0x1f },
123 	{ VDPU_REG_VP8_QUANTER2, 27, 0x1f },
124 };
125 
126 static const struct hantro_reg vp8_dec_dct_start_bits[8] = {
127 	{ VDPU_REG_VP8_CTRL0, 26, 0x3f },
128 	{ VDPU_REG_VP8_DCT_START_BIT, 26, 0x3f },
129 	{ VDPU_REG_VP8_DCT_START_BIT, 20, 0x3f },
130 	{ VDPU_REG_VP8_DCT_START_BIT2, 24, 0x3f },
131 	{ VDPU_REG_VP8_DCT_START_BIT2, 18, 0x3f },
132 	{ VDPU_REG_VP8_DCT_START_BIT2, 12, 0x3f },
133 	{ VDPU_REG_VP8_DCT_START_BIT2, 6, 0x3f },
134 	{ VDPU_REG_VP8_DCT_START_BIT2, 0, 0x3f },
135 };
136 
137 static const struct hantro_reg vp8_dec_pred_bc_tap[8][6] = {
138 	{
139 		{ 0, 0, 0},
140 		{ VDPU_REG_PRED_FLT, 22, 0x3ff },
141 		{ VDPU_REG_PRED_FLT, 12, 0x3ff },
142 		{ VDPU_REG_PRED_FLT, 2, 0x3ff },
143 		{ VDPU_REG_PRED_FLT1, 22, 0x3ff },
144 		{ 0, 0, 0},
145 	}, {
146 		{ 0, 0, 0},
147 		{ VDPU_REG_PRED_FLT1, 12, 0x3ff },
148 		{ VDPU_REG_PRED_FLT1, 2, 0x3ff },
149 		{ VDPU_REG_PRED_FLT2, 22, 0x3ff },
150 		{ VDPU_REG_PRED_FLT2, 12, 0x3ff },
151 		{ 0, 0, 0},
152 	}, {
153 		{ VDPU_REG_PRED_FLT10, 10, 0x3 },
154 		{ VDPU_REG_PRED_FLT2, 2, 0x3ff },
155 		{ VDPU_REG_PRED_FLT3, 22, 0x3ff },
156 		{ VDPU_REG_PRED_FLT3, 12, 0x3ff },
157 		{ VDPU_REG_PRED_FLT3, 2, 0x3ff },
158 		{ VDPU_REG_PRED_FLT10, 8, 0x3},
159 	}, {
160 		{ 0, 0, 0},
161 		{ VDPU_REG_PRED_FLT4, 22, 0x3ff },
162 		{ VDPU_REG_PRED_FLT4, 12, 0x3ff },
163 		{ VDPU_REG_PRED_FLT4, 2, 0x3ff },
164 		{ VDPU_REG_PRED_FLT5, 22, 0x3ff },
165 		{ 0, 0, 0},
166 	}, {
167 		{ VDPU_REG_PRED_FLT10, 6, 0x3 },
168 		{ VDPU_REG_PRED_FLT5, 12, 0x3ff },
169 		{ VDPU_REG_PRED_FLT5, 2, 0x3ff },
170 		{ VDPU_REG_PRED_FLT6, 22, 0x3ff },
171 		{ VDPU_REG_PRED_FLT6, 12, 0x3ff },
172 		{ VDPU_REG_PRED_FLT10, 4, 0x3 },
173 	}, {
174 		{ 0, 0, 0},
175 		{ VDPU_REG_PRED_FLT6, 2, 0x3ff },
176 		{ VDPU_REG_PRED_FLT7, 22, 0x3ff },
177 		{ VDPU_REG_PRED_FLT7, 12, 0x3ff },
178 		{ VDPU_REG_PRED_FLT7, 2, 0x3ff },
179 		{ 0, 0, 0},
180 	}, {
181 		{ VDPU_REG_PRED_FLT10, 2, 0x3 },
182 		{ VDPU_REG_PRED_FLT8, 22, 0x3ff },
183 		{ VDPU_REG_PRED_FLT8, 12, 0x3ff },
184 		{ VDPU_REG_PRED_FLT8, 2, 0x3ff },
185 		{ VDPU_REG_PRED_FLT9, 22, 0x3ff },
186 		{ VDPU_REG_PRED_FLT10, 0, 0x3 },
187 	}, {
188 		{ 0, 0, 0},
189 		{ VDPU_REG_PRED_FLT9, 12, 0x3ff },
190 		{ VDPU_REG_PRED_FLT9, 2, 0x3ff },
191 		{ VDPU_REG_PRED_FLT10, 22, 0x3ff },
192 		{ VDPU_REG_PRED_FLT10, 12, 0x3ff },
193 		{ 0, 0, 0},
194 	},
195 };
196 
197 static const struct hantro_reg vp8_dec_mb_start_bit = {
198 	.base = VDPU_REG_VP8_CTRL0,
199 	.shift = 18,
200 	.mask = 0x3f
201 };
202 
203 static const struct hantro_reg vp8_dec_mb_aligned_data_len = {
204 	.base = VDPU_REG_VP8_DATA_VAL,
205 	.shift = 0,
206 	.mask = 0x3fffff
207 };
208 
209 static const struct hantro_reg vp8_dec_num_dct_partitions = {
210 	.base = VDPU_REG_VP8_DATA_VAL,
211 	.shift = 24,
212 	.mask = 0xf
213 };
214 
215 static const struct hantro_reg vp8_dec_stream_len = {
216 	.base = VDPU_REG_STREAM_LEN,
217 	.shift = 0,
218 	.mask = 0xffffff
219 };
220 
221 static const struct hantro_reg vp8_dec_mb_width = {
222 	.base = VDPU_REG_VP8_PIC_MB_SIZE,
223 	.shift = 23,
224 	.mask = 0x1ff
225 };
226 
227 static const struct hantro_reg vp8_dec_mb_height = {
228 	.base = VDPU_REG_VP8_PIC_MB_SIZE,
229 	.shift = 11,
230 	.mask = 0xff
231 };
232 
233 static const struct hantro_reg vp8_dec_mb_width_ext = {
234 	.base = VDPU_REG_VP8_PIC_MB_SIZE,
235 	.shift = 3,
236 	.mask = 0x7
237 };
238 
239 static const struct hantro_reg vp8_dec_mb_height_ext = {
240 	.base = VDPU_REG_VP8_PIC_MB_SIZE,
241 	.shift = 0,
242 	.mask = 0x7
243 };
244 
245 static const struct hantro_reg vp8_dec_bool_range = {
246 	.base = VDPU_REG_VP8_CTRL0,
247 	.shift = 0,
248 	.mask = 0xff
249 };
250 
251 static const struct hantro_reg vp8_dec_bool_value = {
252 	.base = VDPU_REG_VP8_CTRL0,
253 	.shift = 8,
254 	.mask = 0xff
255 };
256 
257 static const struct hantro_reg vp8_dec_filter_disable = {
258 	.base = VDPU_REG_DEC_CTRL0,
259 	.shift = 8,
260 	.mask = 1
261 };
262 
263 static const struct hantro_reg vp8_dec_skip_mode = {
264 	.base = VDPU_REG_DEC_CTRL0,
265 	.shift = 9,
266 	.mask = 1
267 };
268 
269 static const struct hantro_reg vp8_dec_start_dec = {
270 	.base = VDPU_REG_EN_FLAGS,
271 	.shift = 0,
272 	.mask = 1
273 };
274 
cfg_lf(struct hantro_ctx * ctx,const struct v4l2_ctrl_vp8_frame * hdr)275 static void cfg_lf(struct hantro_ctx *ctx,
276 		   const struct v4l2_ctrl_vp8_frame *hdr)
277 {
278 	const struct v4l2_vp8_segment *seg = &hdr->segment;
279 	const struct v4l2_vp8_loop_filter *lf = &hdr->lf;
280 	struct hantro_dev *vpu = ctx->dev;
281 	unsigned int i;
282 	u32 reg;
283 
284 	if (!(seg->flags & V4L2_VP8_SEGMENT_FLAG_ENABLED)) {
285 		hantro_reg_write(vpu, &vp8_dec_lf_level[0], lf->level);
286 	} else if (seg->flags & V4L2_VP8_SEGMENT_FLAG_DELTA_VALUE_MODE) {
287 		for (i = 0; i < 4; i++) {
288 			u32 lf_level = clamp(lf->level + seg->lf_update[i],
289 					     0, 63);
290 
291 			hantro_reg_write(vpu, &vp8_dec_lf_level[i], lf_level);
292 		}
293 	} else {
294 		for (i = 0; i < 4; i++)
295 			hantro_reg_write(vpu, &vp8_dec_lf_level[i],
296 					 seg->lf_update[i]);
297 	}
298 
299 	reg = VDPU_REG_REF_PIC_FILT_SHARPNESS(lf->sharpness_level);
300 	if (lf->flags & V4L2_VP8_LF_FILTER_TYPE_SIMPLE)
301 		reg |= VDPU_REG_REF_PIC_FILT_TYPE_E;
302 	vdpu_write_relaxed(vpu, reg, VDPU_REG_FILTER_MB_ADJ);
303 
304 	if (lf->flags & V4L2_VP8_LF_ADJ_ENABLE) {
305 		for (i = 0; i < 4; i++) {
306 			hantro_reg_write(vpu, &vp8_dec_mb_adj[i],
307 					 lf->mb_mode_delta[i]);
308 			hantro_reg_write(vpu, &vp8_dec_ref_adj[i],
309 					 lf->ref_frm_delta[i]);
310 		}
311 	}
312 }
313 
cfg_qp(struct hantro_ctx * ctx,const struct v4l2_ctrl_vp8_frame * hdr)314 static void cfg_qp(struct hantro_ctx *ctx,
315 		   const struct v4l2_ctrl_vp8_frame *hdr)
316 {
317 	const struct v4l2_vp8_quantization *q = &hdr->quant;
318 	const struct v4l2_vp8_segment *seg = &hdr->segment;
319 	struct hantro_dev *vpu = ctx->dev;
320 	unsigned int i;
321 
322 	if (!(seg->flags & V4L2_VP8_SEGMENT_FLAG_ENABLED)) {
323 		hantro_reg_write(vpu, &vp8_dec_quant[0], q->y_ac_qi);
324 	} else if (seg->flags & V4L2_VP8_SEGMENT_FLAG_DELTA_VALUE_MODE) {
325 		for (i = 0; i < 4; i++) {
326 			u32 quant = clamp(q->y_ac_qi + seg->quant_update[i],
327 					  0, 127);
328 
329 			hantro_reg_write(vpu, &vp8_dec_quant[i], quant);
330 		}
331 	} else {
332 		for (i = 0; i < 4; i++)
333 			hantro_reg_write(vpu, &vp8_dec_quant[i],
334 					 seg->quant_update[i]);
335 	}
336 
337 	hantro_reg_write(vpu, &vp8_dec_quant_delta[0], q->y_dc_delta);
338 	hantro_reg_write(vpu, &vp8_dec_quant_delta[1], q->y2_dc_delta);
339 	hantro_reg_write(vpu, &vp8_dec_quant_delta[2], q->y2_ac_delta);
340 	hantro_reg_write(vpu, &vp8_dec_quant_delta[3], q->uv_dc_delta);
341 	hantro_reg_write(vpu, &vp8_dec_quant_delta[4], q->uv_ac_delta);
342 }
343 
cfg_parts(struct hantro_ctx * ctx,const struct v4l2_ctrl_vp8_frame * hdr)344 static void cfg_parts(struct hantro_ctx *ctx,
345 		      const struct v4l2_ctrl_vp8_frame *hdr)
346 {
347 	struct hantro_dev *vpu = ctx->dev;
348 	struct vb2_v4l2_buffer *vb2_src;
349 	u32 first_part_offset = V4L2_VP8_FRAME_IS_KEY_FRAME(hdr) ? 10 : 3;
350 	u32 mb_size, mb_offset_bytes, mb_offset_bits, mb_start_bits;
351 	u32 dct_size_part_size, dct_part_offset;
352 	dma_addr_t src_dma;
353 	u32 dct_part_total_len = 0;
354 	u32 count = 0;
355 	unsigned int i;
356 
357 	vb2_src = hantro_get_src_buf(ctx);
358 	src_dma = vb2_dma_contig_plane_dma_addr(&vb2_src->vb2_buf, 0);
359 
360 	/*
361 	 * Calculate control partition mb data info
362 	 * @first_part_header_bits:	bits offset of mb data from first
363 	 *				part start pos
364 	 * @mb_offset_bits:		bits offset of mb data from src_dma
365 	 *				base addr
366 	 * @mb_offset_byte:		bytes offset of mb data from src_dma
367 	 *				base addr
368 	 * @mb_start_bits:		bits offset of mb data from mb data
369 	 *				64bits alignment addr
370 	 */
371 	mb_offset_bits = first_part_offset * 8 +
372 			 hdr->first_part_header_bits + 8;
373 	mb_offset_bytes = mb_offset_bits / 8;
374 	mb_start_bits = mb_offset_bits -
375 			(mb_offset_bytes & (~DEC_8190_ALIGN_MASK)) * 8;
376 	mb_size = hdr->first_part_size -
377 		  (mb_offset_bytes - first_part_offset) +
378 		  (mb_offset_bytes & DEC_8190_ALIGN_MASK);
379 
380 	/* Macroblock data aligned base addr */
381 	vdpu_write_relaxed(vpu, (mb_offset_bytes & (~DEC_8190_ALIGN_MASK)) +
382 			   src_dma, VDPU_REG_VP8_ADDR_CTRL_PART);
383 	hantro_reg_write(vpu, &vp8_dec_mb_start_bit, mb_start_bits);
384 	hantro_reg_write(vpu, &vp8_dec_mb_aligned_data_len, mb_size);
385 
386 	/*
387 	 * Calculate DCT partition info
388 	 * @dct_size_part_size: Containing sizes of DCT part, every DCT part
389 	 *			has 3 bytes to store its size, except the last
390 	 *			DCT part
391 	 * @dct_part_offset:	bytes offset of DCT parts from src_dma base addr
392 	 * @dct_part_total_len: total size of all DCT parts
393 	 */
394 	dct_size_part_size = (hdr->num_dct_parts - 1) * 3;
395 	dct_part_offset = first_part_offset + hdr->first_part_size;
396 	for (i = 0; i < hdr->num_dct_parts; i++)
397 		dct_part_total_len += hdr->dct_part_sizes[i];
398 	dct_part_total_len += dct_size_part_size;
399 	dct_part_total_len += (dct_part_offset & DEC_8190_ALIGN_MASK);
400 
401 	/* Number of DCT partitions */
402 	hantro_reg_write(vpu, &vp8_dec_num_dct_partitions,
403 			 hdr->num_dct_parts - 1);
404 
405 	/* DCT partition length */
406 	hantro_reg_write(vpu, &vp8_dec_stream_len, dct_part_total_len);
407 
408 	/* DCT partitions base address */
409 	for (i = 0; i < hdr->num_dct_parts; i++) {
410 		u32 byte_offset = dct_part_offset + dct_size_part_size + count;
411 		u32 base_addr = byte_offset + src_dma;
412 
413 		hantro_reg_write(vpu, &vp8_dec_dct_base[i],
414 				 base_addr & (~DEC_8190_ALIGN_MASK));
415 
416 		hantro_reg_write(vpu, &vp8_dec_dct_start_bits[i],
417 				 (byte_offset & DEC_8190_ALIGN_MASK) * 8);
418 
419 		count += hdr->dct_part_sizes[i];
420 	}
421 }
422 
423 /*
424  * prediction filter taps
425  * normal 6-tap filters
426  */
cfg_tap(struct hantro_ctx * ctx,const struct v4l2_ctrl_vp8_frame * hdr)427 static void cfg_tap(struct hantro_ctx *ctx,
428 		    const struct v4l2_ctrl_vp8_frame *hdr)
429 {
430 	struct hantro_dev *vpu = ctx->dev;
431 	int i, j;
432 
433 	if ((hdr->version & 0x03) != 0)
434 		return; /* Tap filter not used. */
435 
436 	for (i = 0; i < 8; i++) {
437 		for (j = 0; j < 6; j++) {
438 			if (vp8_dec_pred_bc_tap[i][j].base != 0)
439 				hantro_reg_write(vpu,
440 						 &vp8_dec_pred_bc_tap[i][j],
441 						 hantro_vp8_dec_mc_filter[i][j]);
442 		}
443 	}
444 }
445 
cfg_ref(struct hantro_ctx * ctx,const struct v4l2_ctrl_vp8_frame * hdr,struct vb2_v4l2_buffer * vb2_dst)446 static void cfg_ref(struct hantro_ctx *ctx,
447 		    const struct v4l2_ctrl_vp8_frame *hdr,
448 		    struct vb2_v4l2_buffer *vb2_dst)
449 {
450 	struct hantro_dev *vpu = ctx->dev;
451 	dma_addr_t ref;
452 
453 	ref = hantro_get_ref(ctx, hdr->last_frame_ts);
454 	if (!ref) {
455 		vpu_debug(0, "failed to find last frame ts=%llu\n",
456 			  hdr->last_frame_ts);
457 		ref = vb2_dma_contig_plane_dma_addr(&vb2_dst->vb2_buf, 0);
458 	}
459 	vdpu_write_relaxed(vpu, ref, VDPU_REG_VP8_ADDR_REF0);
460 
461 	ref = hantro_get_ref(ctx, hdr->golden_frame_ts);
462 	if (!ref && hdr->golden_frame_ts)
463 		vpu_debug(0, "failed to find golden frame ts=%llu\n",
464 			  hdr->golden_frame_ts);
465 	if (!ref)
466 		ref = vb2_dma_contig_plane_dma_addr(&vb2_dst->vb2_buf, 0);
467 	if (hdr->flags & V4L2_VP8_FRAME_FLAG_SIGN_BIAS_GOLDEN)
468 		ref |= VDPU_REG_VP8_GREF_SIGN_BIAS;
469 	vdpu_write_relaxed(vpu, ref, VDPU_REG_VP8_ADDR_REF2_5(2));
470 
471 	ref = hantro_get_ref(ctx, hdr->alt_frame_ts);
472 	if (!ref && hdr->alt_frame_ts)
473 		vpu_debug(0, "failed to find alt frame ts=%llu\n",
474 			  hdr->alt_frame_ts);
475 	if (!ref)
476 		ref = vb2_dma_contig_plane_dma_addr(&vb2_dst->vb2_buf, 0);
477 	if (hdr->flags & V4L2_VP8_FRAME_FLAG_SIGN_BIAS_ALT)
478 		ref |= VDPU_REG_VP8_AREF_SIGN_BIAS;
479 	vdpu_write_relaxed(vpu, ref, VDPU_REG_VP8_ADDR_REF2_5(3));
480 }
481 
cfg_buffers(struct hantro_ctx * ctx,const struct v4l2_ctrl_vp8_frame * hdr,struct vb2_v4l2_buffer * vb2_dst)482 static void cfg_buffers(struct hantro_ctx *ctx,
483 			const struct v4l2_ctrl_vp8_frame *hdr,
484 			struct vb2_v4l2_buffer *vb2_dst)
485 {
486 	const struct v4l2_vp8_segment *seg = &hdr->segment;
487 	struct hantro_dev *vpu = ctx->dev;
488 	dma_addr_t dst_dma;
489 	u32 reg;
490 
491 	/* Set probability table buffer address */
492 	vdpu_write_relaxed(vpu, ctx->vp8_dec.prob_tbl.dma,
493 			   VDPU_REG_ADDR_QTABLE);
494 
495 	/* Set segment map address */
496 	reg = VDPU_REG_FWD_PIC1_SEGMENT_BASE(ctx->vp8_dec.segment_map.dma);
497 	if (seg->flags & V4L2_VP8_SEGMENT_FLAG_ENABLED) {
498 		reg |= VDPU_REG_FWD_PIC1_SEGMENT_E;
499 		if (seg->flags & V4L2_VP8_SEGMENT_FLAG_UPDATE_MAP)
500 			reg |= VDPU_REG_FWD_PIC1_SEGMENT_UPD_E;
501 	}
502 	vdpu_write_relaxed(vpu, reg, VDPU_REG_VP8_SEGMENT_VAL);
503 
504 	/* set output frame buffer address */
505 	dst_dma = vb2_dma_contig_plane_dma_addr(&vb2_dst->vb2_buf, 0);
506 	vdpu_write_relaxed(vpu, dst_dma, VDPU_REG_ADDR_DST);
507 }
508 
rockchip_vpu2_vp8_dec_run(struct hantro_ctx * ctx)509 int rockchip_vpu2_vp8_dec_run(struct hantro_ctx *ctx)
510 {
511 	const struct v4l2_ctrl_vp8_frame *hdr;
512 	struct hantro_dev *vpu = ctx->dev;
513 	struct vb2_v4l2_buffer *vb2_dst;
514 	size_t height = ctx->dst_fmt.height;
515 	size_t width = ctx->dst_fmt.width;
516 	u32 mb_width, mb_height;
517 	u32 reg;
518 
519 	hantro_start_prepare_run(ctx);
520 
521 	hdr = hantro_get_ctrl(ctx, V4L2_CID_STATELESS_VP8_FRAME);
522 	if (WARN_ON(!hdr))
523 		return -EINVAL;
524 
525 	/* Reset segment_map buffer in keyframe */
526 	if (V4L2_VP8_FRAME_IS_KEY_FRAME(hdr) && ctx->vp8_dec.segment_map.cpu)
527 		memset(ctx->vp8_dec.segment_map.cpu, 0,
528 		       ctx->vp8_dec.segment_map.size);
529 
530 	hantro_vp8_prob_update(ctx, hdr);
531 
532 	/*
533 	 * Extensive testing shows that the hardware does not properly
534 	 * clear the internal state from previous a decoding run. This
535 	 * causes corruption in decoded frames for multi-instance use cases.
536 	 * A soft reset before programming the registers has been found
537 	 * to resolve those problems.
538 	 */
539 	ctx->codec_ops->reset(ctx);
540 
541 	reg = VDPU_REG_CONFIG_DEC_TIMEOUT_E
542 		| VDPU_REG_CONFIG_DEC_CLK_GATE_E;
543 	if (!V4L2_VP8_FRAME_IS_KEY_FRAME(hdr))
544 		reg |= VDPU_REG_DEC_CTRL0_PIC_INTER_E;
545 	vdpu_write_relaxed(vpu, reg, VDPU_REG_EN_FLAGS);
546 
547 	reg = VDPU_REG_CONFIG_DEC_STRENDIAN_E
548 		| VDPU_REG_CONFIG_DEC_INSWAP32_E
549 		| VDPU_REG_CONFIG_DEC_STRSWAP32_E
550 		| VDPU_REG_CONFIG_DEC_OUTSWAP32_E
551 		| VDPU_REG_CONFIG_DEC_IN_ENDIAN
552 		| VDPU_REG_CONFIG_DEC_OUT_ENDIAN;
553 	vdpu_write_relaxed(vpu, reg, VDPU_REG_DATA_ENDIAN);
554 
555 	reg = VDPU_REG_CONFIG_DEC_MAX_BURST(16);
556 	vdpu_write_relaxed(vpu, reg, VDPU_REG_AXI_CTRL);
557 
558 	reg = VDPU_REG_DEC_CTRL0_DEC_MODE(10);
559 	vdpu_write_relaxed(vpu, reg, VDPU_REG_DEC_FORMAT);
560 
561 	if (!(hdr->flags & V4L2_VP8_FRAME_FLAG_MB_NO_SKIP_COEFF))
562 		hantro_reg_write(vpu, &vp8_dec_skip_mode, 1);
563 	if (hdr->lf.level == 0)
564 		hantro_reg_write(vpu, &vp8_dec_filter_disable, 1);
565 
566 	/* Frame dimensions */
567 	mb_width = MB_WIDTH(width);
568 	mb_height = MB_HEIGHT(height);
569 
570 	hantro_reg_write(vpu, &vp8_dec_mb_width, mb_width);
571 	hantro_reg_write(vpu, &vp8_dec_mb_height, mb_height);
572 	hantro_reg_write(vpu, &vp8_dec_mb_width_ext, mb_width >> 9);
573 	hantro_reg_write(vpu, &vp8_dec_mb_height_ext, mb_height >> 8);
574 
575 	/* Boolean decoder */
576 	hantro_reg_write(vpu, &vp8_dec_bool_range, hdr->coder_state.range);
577 	hantro_reg_write(vpu, &vp8_dec_bool_value, hdr->coder_state.value);
578 
579 	reg = vdpu_read(vpu, VDPU_REG_VP8_DCT_START_BIT);
580 	if (hdr->version != 3)
581 		reg |= VDPU_REG_DEC_CTRL4_VC1_HEIGHT_EXT;
582 	if (hdr->version & 0x3)
583 		reg |= VDPU_REG_DEC_CTRL4_BILIN_MC_E;
584 	vdpu_write_relaxed(vpu, reg, VDPU_REG_VP8_DCT_START_BIT);
585 
586 	cfg_lf(ctx, hdr);
587 	cfg_qp(ctx, hdr);
588 	cfg_parts(ctx, hdr);
589 	cfg_tap(ctx, hdr);
590 
591 	vb2_dst = hantro_get_dst_buf(ctx);
592 	cfg_ref(ctx, hdr, vb2_dst);
593 	cfg_buffers(ctx, hdr, vb2_dst);
594 
595 	hantro_end_prepare_run(ctx);
596 
597 	hantro_reg_write(vpu, &vp8_dec_start_dec, 1);
598 
599 	return 0;
600 }
601