1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Rockchip Video Decoder VP9 backend
4  *
5  * Copyright (C) 2019 Collabora, Ltd.
6  *	Boris Brezillon <boris.brezillon@collabora.com>
7  * Copyright (C) 2021 Collabora, Ltd.
8  *	Andrzej Pietrasiewicz <andrzej.p@collabora.com>
9  *
10  * Copyright (C) 2016 Rockchip Electronics Co., Ltd.
11  *	Alpha Lin <Alpha.Lin@rock-chips.com>
12  */
13 
14 /*
15  * For following the vp9 spec please start reading this driver
16  * code from rkvdec_vp9_run() followed by rkvdec_vp9_done().
17  */
18 
19 #include <linux/kernel.h>
20 #include <linux/vmalloc.h>
21 #include <media/v4l2-mem2mem.h>
22 #include <media/v4l2-vp9.h>
23 
24 #include "rkvdec.h"
25 #include "rkvdec-regs.h"
26 
27 #define RKVDEC_VP9_PROBE_SIZE		4864
28 #define RKVDEC_VP9_COUNT_SIZE		13232
29 #define RKVDEC_VP9_MAX_SEGMAP_SIZE	73728
30 
31 struct rkvdec_vp9_intra_mode_probs {
32 	u8 y_mode[105];
33 	u8 uv_mode[23];
34 };
35 
36 struct rkvdec_vp9_intra_only_frame_probs {
37 	u8 coef_intra[4][2][128];
38 	struct rkvdec_vp9_intra_mode_probs intra_mode[10];
39 };
40 
41 struct rkvdec_vp9_inter_frame_probs {
42 	u8 y_mode[4][9];
43 	u8 comp_mode[5];
44 	u8 comp_ref[5];
45 	u8 single_ref[5][2];
46 	u8 inter_mode[7][3];
47 	u8 interp_filter[4][2];
48 	u8 padding0[11];
49 	u8 coef[2][4][2][128];
50 	u8 uv_mode_0_2[3][9];
51 	u8 padding1[5];
52 	u8 uv_mode_3_5[3][9];
53 	u8 padding2[5];
54 	u8 uv_mode_6_8[3][9];
55 	u8 padding3[5];
56 	u8 uv_mode_9[9];
57 	u8 padding4[7];
58 	u8 padding5[16];
59 	struct {
60 		u8 joint[3];
61 		u8 sign[2];
62 		u8 classes[2][10];
63 		u8 class0_bit[2];
64 		u8 bits[2][10];
65 		u8 class0_fr[2][2][3];
66 		u8 fr[2][3];
67 		u8 class0_hp[2];
68 		u8 hp[2];
69 	} mv;
70 };
71 
72 struct rkvdec_vp9_probs {
73 	u8 partition[16][3];
74 	u8 pred[3];
75 	u8 tree[7];
76 	u8 skip[3];
77 	u8 tx32[2][3];
78 	u8 tx16[2][2];
79 	u8 tx8[2][1];
80 	u8 is_inter[4];
81 	/* 128 bit alignment */
82 	u8 padding0[3];
83 	union {
84 		struct rkvdec_vp9_inter_frame_probs inter;
85 		struct rkvdec_vp9_intra_only_frame_probs intra_only;
86 	};
87 	/* 128 bit alignment */
88 	u8 padding1[11];
89 };
90 
91 /* Data structure describing auxiliary buffer format. */
92 struct rkvdec_vp9_priv_tbl {
93 	struct rkvdec_vp9_probs probs;
94 	u8 segmap[2][RKVDEC_VP9_MAX_SEGMAP_SIZE];
95 };
96 
97 struct rkvdec_vp9_refs_counts {
98 	u32 eob[2];
99 	u32 coeff[3];
100 };
101 
102 struct rkvdec_vp9_inter_frame_symbol_counts {
103 	u32 partition[16][4];
104 	u32 skip[3][2];
105 	u32 inter[4][2];
106 	u32 tx32p[2][4];
107 	u32 tx16p[2][4];
108 	u32 tx8p[2][2];
109 	u32 y_mode[4][10];
110 	u32 uv_mode[10][10];
111 	u32 comp[5][2];
112 	u32 comp_ref[5][2];
113 	u32 single_ref[5][2][2];
114 	u32 mv_mode[7][4];
115 	u32 filter[4][3];
116 	u32 mv_joint[4];
117 	u32 sign[2][2];
118 	/* add 1 element for align */
119 	u32 classes[2][11 + 1];
120 	u32 class0[2][2];
121 	u32 bits[2][10][2];
122 	u32 class0_fp[2][2][4];
123 	u32 fp[2][4];
124 	u32 class0_hp[2][2];
125 	u32 hp[2][2];
126 	struct rkvdec_vp9_refs_counts ref_cnt[2][4][2][6][6];
127 };
128 
129 struct rkvdec_vp9_intra_frame_symbol_counts {
130 	u32 partition[4][4][4];
131 	u32 skip[3][2];
132 	u32 intra[4][2];
133 	u32 tx32p[2][4];
134 	u32 tx16p[2][4];
135 	u32 tx8p[2][2];
136 	struct rkvdec_vp9_refs_counts ref_cnt[2][4][2][6][6];
137 };
138 
139 struct rkvdec_vp9_run {
140 	struct rkvdec_run base;
141 	const struct v4l2_ctrl_vp9_frame *decode_params;
142 };
143 
144 struct rkvdec_vp9_frame_info {
145 	u32 valid : 1;
146 	u32 segmapid : 1;
147 	u32 frame_context_idx : 2;
148 	u32 reference_mode : 2;
149 	u32 tx_mode : 3;
150 	u32 interpolation_filter : 3;
151 	u32 flags;
152 	u64 timestamp;
153 	struct v4l2_vp9_segmentation seg;
154 	struct v4l2_vp9_loop_filter lf;
155 };
156 
157 struct rkvdec_vp9_ctx {
158 	struct rkvdec_aux_buf priv_tbl;
159 	struct rkvdec_aux_buf count_tbl;
160 	struct v4l2_vp9_frame_symbol_counts inter_cnts;
161 	struct v4l2_vp9_frame_symbol_counts intra_cnts;
162 	struct v4l2_vp9_frame_context probability_tables;
163 	struct v4l2_vp9_frame_context frame_context[4];
164 	struct rkvdec_vp9_frame_info cur;
165 	struct rkvdec_vp9_frame_info last;
166 };
167 
168 static void write_coeff_plane(const u8 coef[6][6][3], u8 *coeff_plane)
169 {
170 	unsigned int idx = 0, byte_count = 0;
171 	int k, m, n;
172 	u8 p;
173 
174 	for (k = 0; k < 6; k++) {
175 		for (m = 0; m < 6; m++) {
176 			for (n = 0; n < 3; n++) {
177 				p = coef[k][m][n];
178 				coeff_plane[idx++] = p;
179 				byte_count++;
180 				if (byte_count == 27) {
181 					idx += 5;
182 					byte_count = 0;
183 				}
184 			}
185 		}
186 	}
187 }
188 
189 static void init_intra_only_probs(struct rkvdec_ctx *ctx,
190 				  const struct rkvdec_vp9_run *run)
191 {
192 	struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
193 	struct rkvdec_vp9_priv_tbl *tbl = vp9_ctx->priv_tbl.cpu;
194 	struct rkvdec_vp9_intra_only_frame_probs *rkprobs;
195 	const struct v4l2_vp9_frame_context *probs;
196 	unsigned int i, j, k;
197 
198 	rkprobs = &tbl->probs.intra_only;
199 	probs = &vp9_ctx->probability_tables;
200 
201 	/*
202 	 * intra only 149 x 128 bits ,aligned to 152 x 128 bits coeff related
203 	 * prob 64 x 128 bits
204 	 */
205 	for (i = 0; i < ARRAY_SIZE(probs->coef); i++) {
206 		for (j = 0; j < ARRAY_SIZE(probs->coef[0]); j++)
207 			write_coeff_plane(probs->coef[i][j][0],
208 					  rkprobs->coef_intra[i][j]);
209 	}
210 
211 	/* intra mode prob  80 x 128 bits */
212 	for (i = 0; i < ARRAY_SIZE(v4l2_vp9_kf_y_mode_prob); i++) {
213 		unsigned int byte_count = 0;
214 		int idx = 0;
215 
216 		/* vp9_kf_y_mode_prob */
217 		for (j = 0; j < ARRAY_SIZE(v4l2_vp9_kf_y_mode_prob[0]); j++) {
218 			for (k = 0; k < ARRAY_SIZE(v4l2_vp9_kf_y_mode_prob[0][0]);
219 			     k++) {
220 				u8 val = v4l2_vp9_kf_y_mode_prob[i][j][k];
221 
222 				rkprobs->intra_mode[i].y_mode[idx++] = val;
223 				byte_count++;
224 				if (byte_count == 27) {
225 					byte_count = 0;
226 					idx += 5;
227 				}
228 			}
229 		}
230 
231 	}
232 
233 	for (i = 0; i < sizeof(v4l2_vp9_kf_uv_mode_prob); ++i) {
234 		const u8 *ptr = (const u8 *)v4l2_vp9_kf_uv_mode_prob;
235 
236 		rkprobs->intra_mode[i / 23].uv_mode[i % 23] = ptr[i];
237 	}
238 }
239 
240 static void init_inter_probs(struct rkvdec_ctx *ctx,
241 			     const struct rkvdec_vp9_run *run)
242 {
243 	struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
244 	struct rkvdec_vp9_priv_tbl *tbl = vp9_ctx->priv_tbl.cpu;
245 	struct rkvdec_vp9_inter_frame_probs *rkprobs;
246 	const struct v4l2_vp9_frame_context *probs;
247 	unsigned int i, j, k;
248 
249 	rkprobs = &tbl->probs.inter;
250 	probs = &vp9_ctx->probability_tables;
251 
252 	/*
253 	 * inter probs
254 	 * 151 x 128 bits, aligned to 152 x 128 bits
255 	 * inter only
256 	 * intra_y_mode & inter_block info 6 x 128 bits
257 	 */
258 
259 	memcpy(rkprobs->y_mode, probs->y_mode, sizeof(rkprobs->y_mode));
260 	memcpy(rkprobs->comp_mode, probs->comp_mode,
261 	       sizeof(rkprobs->comp_mode));
262 	memcpy(rkprobs->comp_ref, probs->comp_ref,
263 	       sizeof(rkprobs->comp_ref));
264 	memcpy(rkprobs->single_ref, probs->single_ref,
265 	       sizeof(rkprobs->single_ref));
266 	memcpy(rkprobs->inter_mode, probs->inter_mode,
267 	       sizeof(rkprobs->inter_mode));
268 	memcpy(rkprobs->interp_filter, probs->interp_filter,
269 	       sizeof(rkprobs->interp_filter));
270 
271 	/* 128 x 128 bits coeff related */
272 	for (i = 0; i < ARRAY_SIZE(probs->coef); i++) {
273 		for (j = 0; j < ARRAY_SIZE(probs->coef[0]); j++) {
274 			for (k = 0; k < ARRAY_SIZE(probs->coef[0][0]); k++)
275 				write_coeff_plane(probs->coef[i][j][k],
276 						  rkprobs->coef[k][i][j]);
277 		}
278 	}
279 
280 	/* intra uv mode 6 x 128 */
281 	memcpy(rkprobs->uv_mode_0_2, &probs->uv_mode[0],
282 	       sizeof(rkprobs->uv_mode_0_2));
283 	memcpy(rkprobs->uv_mode_3_5, &probs->uv_mode[3],
284 	       sizeof(rkprobs->uv_mode_3_5));
285 	memcpy(rkprobs->uv_mode_6_8, &probs->uv_mode[6],
286 	       sizeof(rkprobs->uv_mode_6_8));
287 	memcpy(rkprobs->uv_mode_9, &probs->uv_mode[9],
288 	       sizeof(rkprobs->uv_mode_9));
289 
290 	/* mv related 6 x 128 */
291 	memcpy(rkprobs->mv.joint, probs->mv.joint,
292 	       sizeof(rkprobs->mv.joint));
293 	memcpy(rkprobs->mv.sign, probs->mv.sign,
294 	       sizeof(rkprobs->mv.sign));
295 	memcpy(rkprobs->mv.classes, probs->mv.classes,
296 	       sizeof(rkprobs->mv.classes));
297 	memcpy(rkprobs->mv.class0_bit, probs->mv.class0_bit,
298 	       sizeof(rkprobs->mv.class0_bit));
299 	memcpy(rkprobs->mv.bits, probs->mv.bits,
300 	       sizeof(rkprobs->mv.bits));
301 	memcpy(rkprobs->mv.class0_fr, probs->mv.class0_fr,
302 	       sizeof(rkprobs->mv.class0_fr));
303 	memcpy(rkprobs->mv.fr, probs->mv.fr,
304 	       sizeof(rkprobs->mv.fr));
305 	memcpy(rkprobs->mv.class0_hp, probs->mv.class0_hp,
306 	       sizeof(rkprobs->mv.class0_hp));
307 	memcpy(rkprobs->mv.hp, probs->mv.hp,
308 	       sizeof(rkprobs->mv.hp));
309 }
310 
311 static void init_probs(struct rkvdec_ctx *ctx,
312 		       const struct rkvdec_vp9_run *run)
313 {
314 	const struct v4l2_ctrl_vp9_frame *dec_params;
315 	struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
316 	struct rkvdec_vp9_priv_tbl *tbl = vp9_ctx->priv_tbl.cpu;
317 	struct rkvdec_vp9_probs *rkprobs = &tbl->probs;
318 	const struct v4l2_vp9_segmentation *seg;
319 	const struct v4l2_vp9_frame_context *probs;
320 	bool intra_only;
321 
322 	dec_params = run->decode_params;
323 	probs = &vp9_ctx->probability_tables;
324 	seg = &dec_params->seg;
325 
326 	memset(rkprobs, 0, sizeof(*rkprobs));
327 
328 	intra_only = !!(dec_params->flags &
329 			(V4L2_VP9_FRAME_FLAG_KEY_FRAME |
330 			 V4L2_VP9_FRAME_FLAG_INTRA_ONLY));
331 
332 	/* sb info  5 x 128 bit */
333 	memcpy(rkprobs->partition,
334 	       intra_only ? v4l2_vp9_kf_partition_probs : probs->partition,
335 	       sizeof(rkprobs->partition));
336 
337 	memcpy(rkprobs->pred, seg->pred_probs, sizeof(rkprobs->pred));
338 	memcpy(rkprobs->tree, seg->tree_probs, sizeof(rkprobs->tree));
339 	memcpy(rkprobs->skip, probs->skip, sizeof(rkprobs->skip));
340 	memcpy(rkprobs->tx32, probs->tx32, sizeof(rkprobs->tx32));
341 	memcpy(rkprobs->tx16, probs->tx16, sizeof(rkprobs->tx16));
342 	memcpy(rkprobs->tx8, probs->tx8, sizeof(rkprobs->tx8));
343 	memcpy(rkprobs->is_inter, probs->is_inter, sizeof(rkprobs->is_inter));
344 
345 	if (intra_only)
346 		init_intra_only_probs(ctx, run);
347 	else
348 		init_inter_probs(ctx, run);
349 }
350 
351 struct rkvdec_vp9_ref_reg {
352 	u32 reg_frm_size;
353 	u32 reg_hor_stride;
354 	u32 reg_y_stride;
355 	u32 reg_yuv_stride;
356 	u32 reg_ref_base;
357 };
358 
359 static struct rkvdec_vp9_ref_reg ref_regs[] = {
360 	{
361 		.reg_frm_size = RKVDEC_REG_VP9_FRAME_SIZE(0),
362 		.reg_hor_stride = RKVDEC_VP9_HOR_VIRSTRIDE(0),
363 		.reg_y_stride = RKVDEC_VP9_LAST_FRAME_YSTRIDE,
364 		.reg_yuv_stride = RKVDEC_VP9_LAST_FRAME_YUVSTRIDE,
365 		.reg_ref_base = RKVDEC_REG_VP9_LAST_FRAME_BASE,
366 	},
367 	{
368 		.reg_frm_size = RKVDEC_REG_VP9_FRAME_SIZE(1),
369 		.reg_hor_stride = RKVDEC_VP9_HOR_VIRSTRIDE(1),
370 		.reg_y_stride = RKVDEC_VP9_GOLDEN_FRAME_YSTRIDE,
371 		.reg_yuv_stride = 0,
372 		.reg_ref_base = RKVDEC_REG_VP9_GOLDEN_FRAME_BASE,
373 	},
374 	{
375 		.reg_frm_size = RKVDEC_REG_VP9_FRAME_SIZE(2),
376 		.reg_hor_stride = RKVDEC_VP9_HOR_VIRSTRIDE(2),
377 		.reg_y_stride = RKVDEC_VP9_ALTREF_FRAME_YSTRIDE,
378 		.reg_yuv_stride = 0,
379 		.reg_ref_base = RKVDEC_REG_VP9_ALTREF_FRAME_BASE,
380 	}
381 };
382 
383 static struct rkvdec_decoded_buffer *
384 get_ref_buf(struct rkvdec_ctx *ctx, struct vb2_v4l2_buffer *dst, u64 timestamp)
385 {
386 	struct v4l2_m2m_ctx *m2m_ctx = ctx->fh.m2m_ctx;
387 	struct vb2_queue *cap_q = &m2m_ctx->cap_q_ctx.q;
388 	struct vb2_buffer *buf;
389 
390 	/*
391 	 * If a ref is unused or invalid, address of current destination
392 	 * buffer is returned.
393 	 */
394 	buf = vb2_find_buffer(cap_q, timestamp);
395 	if (!buf)
396 		buf = &dst->vb2_buf;
397 
398 	return vb2_to_rkvdec_decoded_buf(buf);
399 }
400 
401 static dma_addr_t get_mv_base_addr(struct rkvdec_decoded_buffer *buf)
402 {
403 	unsigned int aligned_pitch, aligned_height, yuv_len;
404 
405 	aligned_height = round_up(buf->vp9.height, 64);
406 	aligned_pitch = round_up(buf->vp9.width * buf->vp9.bit_depth, 512) / 8;
407 	yuv_len = (aligned_height * aligned_pitch * 3) / 2;
408 
409 	return vb2_dma_contig_plane_dma_addr(&buf->base.vb.vb2_buf, 0) +
410 	       yuv_len;
411 }
412 
413 static void config_ref_registers(struct rkvdec_ctx *ctx,
414 				 const struct rkvdec_vp9_run *run,
415 				 struct rkvdec_decoded_buffer *ref_buf,
416 				 struct rkvdec_vp9_ref_reg *ref_reg)
417 {
418 	unsigned int aligned_pitch, aligned_height, y_len, yuv_len;
419 	struct rkvdec_dev *rkvdec = ctx->dev;
420 
421 	aligned_height = round_up(ref_buf->vp9.height, 64);
422 	writel_relaxed(RKVDEC_VP9_FRAMEWIDTH(ref_buf->vp9.width) |
423 		       RKVDEC_VP9_FRAMEHEIGHT(ref_buf->vp9.height),
424 		       rkvdec->regs + ref_reg->reg_frm_size);
425 
426 	writel_relaxed(vb2_dma_contig_plane_dma_addr(&ref_buf->base.vb.vb2_buf, 0),
427 		       rkvdec->regs + ref_reg->reg_ref_base);
428 
429 	if (&ref_buf->base.vb == run->base.bufs.dst)
430 		return;
431 
432 	aligned_pitch = round_up(ref_buf->vp9.width * ref_buf->vp9.bit_depth, 512) / 8;
433 	y_len = aligned_height * aligned_pitch;
434 	yuv_len = (y_len * 3) / 2;
435 
436 	writel_relaxed(RKVDEC_HOR_Y_VIRSTRIDE(aligned_pitch / 16) |
437 		       RKVDEC_HOR_UV_VIRSTRIDE(aligned_pitch / 16),
438 		       rkvdec->regs + ref_reg->reg_hor_stride);
439 	writel_relaxed(RKVDEC_VP9_REF_YSTRIDE(y_len / 16),
440 		       rkvdec->regs + ref_reg->reg_y_stride);
441 
442 	if (!ref_reg->reg_yuv_stride)
443 		return;
444 
445 	writel_relaxed(RKVDEC_VP9_REF_YUVSTRIDE(yuv_len / 16),
446 		       rkvdec->regs + ref_reg->reg_yuv_stride);
447 }
448 
449 static void config_seg_registers(struct rkvdec_ctx *ctx, unsigned int segid)
450 {
451 	struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
452 	const struct v4l2_vp9_segmentation *seg;
453 	struct rkvdec_dev *rkvdec = ctx->dev;
454 	s16 feature_val;
455 	int feature_id;
456 	u32 val = 0;
457 
458 	seg = vp9_ctx->last.valid ? &vp9_ctx->last.seg : &vp9_ctx->cur.seg;
459 	feature_id = V4L2_VP9_SEG_LVL_ALT_Q;
460 	if (v4l2_vp9_seg_feat_enabled(seg->feature_enabled, feature_id, segid)) {
461 		feature_val = seg->feature_data[segid][feature_id];
462 		val |= RKVDEC_SEGID_FRAME_QP_DELTA_EN(1) |
463 		       RKVDEC_SEGID_FRAME_QP_DELTA(feature_val);
464 	}
465 
466 	feature_id = V4L2_VP9_SEG_LVL_ALT_L;
467 	if (v4l2_vp9_seg_feat_enabled(seg->feature_enabled, feature_id, segid)) {
468 		feature_val = seg->feature_data[segid][feature_id];
469 		val |= RKVDEC_SEGID_FRAME_LOOPFILTER_VALUE_EN(1) |
470 		       RKVDEC_SEGID_FRAME_LOOPFILTER_VALUE(feature_val);
471 	}
472 
473 	feature_id = V4L2_VP9_SEG_LVL_REF_FRAME;
474 	if (v4l2_vp9_seg_feat_enabled(seg->feature_enabled, feature_id, segid)) {
475 		feature_val = seg->feature_data[segid][feature_id];
476 		val |= RKVDEC_SEGID_REFERINFO_EN(1) |
477 		       RKVDEC_SEGID_REFERINFO(feature_val);
478 	}
479 
480 	feature_id = V4L2_VP9_SEG_LVL_SKIP;
481 	if (v4l2_vp9_seg_feat_enabled(seg->feature_enabled, feature_id, segid))
482 		val |= RKVDEC_SEGID_FRAME_SKIP_EN(1);
483 
484 	if (!segid &&
485 	    (seg->flags & V4L2_VP9_SEGMENTATION_FLAG_ABS_OR_DELTA_UPDATE))
486 		val |= RKVDEC_SEGID_ABS_DELTA(1);
487 
488 	writel_relaxed(val, rkvdec->regs + RKVDEC_VP9_SEGID_GRP(segid));
489 }
490 
491 static void update_dec_buf_info(struct rkvdec_decoded_buffer *buf,
492 				const struct v4l2_ctrl_vp9_frame *dec_params)
493 {
494 	buf->vp9.width = dec_params->frame_width_minus_1 + 1;
495 	buf->vp9.height = dec_params->frame_height_minus_1 + 1;
496 	buf->vp9.bit_depth = dec_params->bit_depth;
497 }
498 
499 static void update_ctx_cur_info(struct rkvdec_vp9_ctx *vp9_ctx,
500 				struct rkvdec_decoded_buffer *buf,
501 				const struct v4l2_ctrl_vp9_frame *dec_params)
502 {
503 	vp9_ctx->cur.valid = true;
504 	vp9_ctx->cur.reference_mode = dec_params->reference_mode;
505 	vp9_ctx->cur.interpolation_filter = dec_params->interpolation_filter;
506 	vp9_ctx->cur.flags = dec_params->flags;
507 	vp9_ctx->cur.timestamp = buf->base.vb.vb2_buf.timestamp;
508 	vp9_ctx->cur.seg = dec_params->seg;
509 	vp9_ctx->cur.lf = dec_params->lf;
510 }
511 
512 static void update_ctx_last_info(struct rkvdec_vp9_ctx *vp9_ctx)
513 {
514 	vp9_ctx->last = vp9_ctx->cur;
515 }
516 
517 static void config_registers(struct rkvdec_ctx *ctx,
518 			     const struct rkvdec_vp9_run *run)
519 {
520 	unsigned int y_len, uv_len, yuv_len, bit_depth, aligned_height, aligned_pitch, stream_len;
521 	const struct v4l2_ctrl_vp9_frame *dec_params;
522 	struct rkvdec_decoded_buffer *ref_bufs[3];
523 	struct rkvdec_decoded_buffer *dst, *last, *mv_ref;
524 	struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
525 	u32 val, last_frame_info = 0;
526 	const struct v4l2_vp9_segmentation *seg;
527 	struct rkvdec_dev *rkvdec = ctx->dev;
528 	dma_addr_t addr;
529 	bool intra_only;
530 	unsigned int i;
531 
532 	dec_params = run->decode_params;
533 	dst = vb2_to_rkvdec_decoded_buf(&run->base.bufs.dst->vb2_buf);
534 	ref_bufs[0] = get_ref_buf(ctx, &dst->base.vb, dec_params->last_frame_ts);
535 	ref_bufs[1] = get_ref_buf(ctx, &dst->base.vb, dec_params->golden_frame_ts);
536 	ref_bufs[2] = get_ref_buf(ctx, &dst->base.vb, dec_params->alt_frame_ts);
537 
538 	if (vp9_ctx->last.valid)
539 		last = get_ref_buf(ctx, &dst->base.vb, vp9_ctx->last.timestamp);
540 	else
541 		last = dst;
542 
543 	update_dec_buf_info(dst, dec_params);
544 	update_ctx_cur_info(vp9_ctx, dst, dec_params);
545 	seg = &dec_params->seg;
546 
547 	intra_only = !!(dec_params->flags &
548 			(V4L2_VP9_FRAME_FLAG_KEY_FRAME |
549 			 V4L2_VP9_FRAME_FLAG_INTRA_ONLY));
550 
551 	writel_relaxed(RKVDEC_MODE(RKVDEC_MODE_VP9),
552 		       rkvdec->regs + RKVDEC_REG_SYSCTRL);
553 
554 	bit_depth = dec_params->bit_depth;
555 	aligned_height = round_up(ctx->decoded_fmt.fmt.pix_mp.height, 64);
556 
557 	aligned_pitch = round_up(ctx->decoded_fmt.fmt.pix_mp.width *
558 				 bit_depth,
559 				 512) / 8;
560 	y_len = aligned_height * aligned_pitch;
561 	uv_len = y_len / 2;
562 	yuv_len = y_len + uv_len;
563 
564 	writel_relaxed(RKVDEC_Y_HOR_VIRSTRIDE(aligned_pitch / 16) |
565 		       RKVDEC_UV_HOR_VIRSTRIDE(aligned_pitch / 16),
566 		       rkvdec->regs + RKVDEC_REG_PICPAR);
567 	writel_relaxed(RKVDEC_Y_VIRSTRIDE(y_len / 16),
568 		       rkvdec->regs + RKVDEC_REG_Y_VIRSTRIDE);
569 	writel_relaxed(RKVDEC_YUV_VIRSTRIDE(yuv_len / 16),
570 		       rkvdec->regs + RKVDEC_REG_YUV_VIRSTRIDE);
571 
572 	stream_len = vb2_get_plane_payload(&run->base.bufs.src->vb2_buf, 0);
573 	writel_relaxed(RKVDEC_STRM_LEN(stream_len),
574 		       rkvdec->regs + RKVDEC_REG_STRM_LEN);
575 
576 	/*
577 	 * Reset count buffer, because decoder only output intra related syntax
578 	 * counts when decoding intra frame, but update entropy need to update
579 	 * all the probabilities.
580 	 */
581 	if (intra_only)
582 		memset(vp9_ctx->count_tbl.cpu, 0, vp9_ctx->count_tbl.size);
583 
584 	vp9_ctx->cur.segmapid = vp9_ctx->last.segmapid;
585 	if (!intra_only &&
586 	    !(dec_params->flags & V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT) &&
587 	    (!(seg->flags & V4L2_VP9_SEGMENTATION_FLAG_ENABLED) ||
588 	     (seg->flags & V4L2_VP9_SEGMENTATION_FLAG_UPDATE_MAP)))
589 		vp9_ctx->cur.segmapid++;
590 
591 	for (i = 0; i < ARRAY_SIZE(ref_bufs); i++)
592 		config_ref_registers(ctx, run, ref_bufs[i], &ref_regs[i]);
593 
594 	for (i = 0; i < 8; i++)
595 		config_seg_registers(ctx, i);
596 
597 	writel_relaxed(RKVDEC_VP9_TX_MODE(vp9_ctx->cur.tx_mode) |
598 		       RKVDEC_VP9_FRAME_REF_MODE(dec_params->reference_mode),
599 		       rkvdec->regs + RKVDEC_VP9_CPRHEADER_CONFIG);
600 
601 	if (!intra_only) {
602 		const struct v4l2_vp9_loop_filter *lf;
603 		s8 delta;
604 
605 		if (vp9_ctx->last.valid)
606 			lf = &vp9_ctx->last.lf;
607 		else
608 			lf = &vp9_ctx->cur.lf;
609 
610 		val = 0;
611 		for (i = 0; i < ARRAY_SIZE(lf->ref_deltas); i++) {
612 			delta = lf->ref_deltas[i];
613 			val |= RKVDEC_REF_DELTAS_LASTFRAME(i, delta);
614 		}
615 
616 		writel_relaxed(val,
617 			       rkvdec->regs + RKVDEC_VP9_REF_DELTAS_LASTFRAME);
618 
619 		for (i = 0; i < ARRAY_SIZE(lf->mode_deltas); i++) {
620 			delta = lf->mode_deltas[i];
621 			last_frame_info |= RKVDEC_MODE_DELTAS_LASTFRAME(i,
622 									delta);
623 		}
624 	}
625 
626 	if (vp9_ctx->last.valid && !intra_only &&
627 	    vp9_ctx->last.seg.flags & V4L2_VP9_SEGMENTATION_FLAG_ENABLED)
628 		last_frame_info |= RKVDEC_SEG_EN_LASTFRAME;
629 
630 	if (vp9_ctx->last.valid &&
631 	    vp9_ctx->last.flags & V4L2_VP9_FRAME_FLAG_SHOW_FRAME)
632 		last_frame_info |= RKVDEC_LAST_SHOW_FRAME;
633 
634 	if (vp9_ctx->last.valid &&
635 	    vp9_ctx->last.flags &
636 	    (V4L2_VP9_FRAME_FLAG_KEY_FRAME | V4L2_VP9_FRAME_FLAG_INTRA_ONLY))
637 		last_frame_info |= RKVDEC_LAST_INTRA_ONLY;
638 
639 	if (vp9_ctx->last.valid &&
640 	    last->vp9.width == dst->vp9.width &&
641 	    last->vp9.height == dst->vp9.height)
642 		last_frame_info |= RKVDEC_LAST_WIDHHEIGHT_EQCUR;
643 
644 	writel_relaxed(last_frame_info,
645 		       rkvdec->regs + RKVDEC_VP9_INFO_LASTFRAME);
646 
647 	writel_relaxed(stream_len - dec_params->compressed_header_size -
648 		       dec_params->uncompressed_header_size,
649 		       rkvdec->regs + RKVDEC_VP9_LASTTILE_SIZE);
650 
651 	for (i = 0; !intra_only && i < ARRAY_SIZE(ref_bufs); i++) {
652 		unsigned int refw = ref_bufs[i]->vp9.width;
653 		unsigned int refh = ref_bufs[i]->vp9.height;
654 		u32 hscale, vscale;
655 
656 		hscale = (refw << 14) /	dst->vp9.width;
657 		vscale = (refh << 14) / dst->vp9.height;
658 		writel_relaxed(RKVDEC_VP9_REF_HOR_SCALE(hscale) |
659 			       RKVDEC_VP9_REF_VER_SCALE(vscale),
660 			       rkvdec->regs + RKVDEC_VP9_REF_SCALE(i));
661 	}
662 
663 	addr = vb2_dma_contig_plane_dma_addr(&dst->base.vb.vb2_buf, 0);
664 	writel_relaxed(addr, rkvdec->regs + RKVDEC_REG_DECOUT_BASE);
665 	addr = vb2_dma_contig_plane_dma_addr(&run->base.bufs.src->vb2_buf, 0);
666 	writel_relaxed(addr, rkvdec->regs + RKVDEC_REG_STRM_RLC_BASE);
667 	writel_relaxed(vp9_ctx->priv_tbl.dma +
668 		       offsetof(struct rkvdec_vp9_priv_tbl, probs),
669 		       rkvdec->regs + RKVDEC_REG_CABACTBL_PROB_BASE);
670 	writel_relaxed(vp9_ctx->count_tbl.dma,
671 		       rkvdec->regs + RKVDEC_REG_VP9COUNT_BASE);
672 
673 	writel_relaxed(vp9_ctx->priv_tbl.dma +
674 		       offsetof(struct rkvdec_vp9_priv_tbl, segmap) +
675 		       (RKVDEC_VP9_MAX_SEGMAP_SIZE * vp9_ctx->cur.segmapid),
676 		       rkvdec->regs + RKVDEC_REG_VP9_SEGIDCUR_BASE);
677 	writel_relaxed(vp9_ctx->priv_tbl.dma +
678 		       offsetof(struct rkvdec_vp9_priv_tbl, segmap) +
679 		       (RKVDEC_VP9_MAX_SEGMAP_SIZE * (!vp9_ctx->cur.segmapid)),
680 		       rkvdec->regs + RKVDEC_REG_VP9_SEGIDLAST_BASE);
681 
682 	if (!intra_only &&
683 	    !(dec_params->flags & V4L2_VP9_FRAME_FLAG_ERROR_RESILIENT) &&
684 	    vp9_ctx->last.valid)
685 		mv_ref = last;
686 	else
687 		mv_ref = dst;
688 
689 	writel_relaxed(get_mv_base_addr(mv_ref),
690 		       rkvdec->regs + RKVDEC_VP9_REF_COLMV_BASE);
691 
692 	writel_relaxed(ctx->decoded_fmt.fmt.pix_mp.width |
693 		       (ctx->decoded_fmt.fmt.pix_mp.height << 16),
694 		       rkvdec->regs + RKVDEC_REG_PERFORMANCE_CYCLE);
695 }
696 
697 static int validate_dec_params(struct rkvdec_ctx *ctx,
698 			       const struct v4l2_ctrl_vp9_frame *dec_params)
699 {
700 	unsigned int aligned_width, aligned_height;
701 
702 	/* We only support profile 0. */
703 	if (dec_params->profile != 0) {
704 		dev_err(ctx->dev->dev, "unsupported profile %d\n",
705 			dec_params->profile);
706 		return -EINVAL;
707 	}
708 
709 	aligned_width = round_up(dec_params->frame_width_minus_1 + 1, 64);
710 	aligned_height = round_up(dec_params->frame_height_minus_1 + 1, 64);
711 
712 	/*
713 	 * Userspace should update the capture/decoded format when the
714 	 * resolution changes.
715 	 */
716 	if (aligned_width != ctx->decoded_fmt.fmt.pix_mp.width ||
717 	    aligned_height != ctx->decoded_fmt.fmt.pix_mp.height) {
718 		dev_err(ctx->dev->dev,
719 			"unexpected bitstream resolution %dx%d\n",
720 			dec_params->frame_width_minus_1 + 1,
721 			dec_params->frame_height_minus_1 + 1);
722 		return -EINVAL;
723 	}
724 
725 	return 0;
726 }
727 
728 static int rkvdec_vp9_run_preamble(struct rkvdec_ctx *ctx,
729 				   struct rkvdec_vp9_run *run)
730 {
731 	const struct v4l2_ctrl_vp9_frame *dec_params;
732 	const struct v4l2_ctrl_vp9_compressed_hdr *prob_updates;
733 	struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
734 	struct v4l2_ctrl *ctrl;
735 	unsigned int fctx_idx;
736 	int ret;
737 
738 	/* v4l2-specific stuff */
739 	rkvdec_run_preamble(ctx, &run->base);
740 
741 	ctrl = v4l2_ctrl_find(&ctx->ctrl_hdl,
742 			      V4L2_CID_STATELESS_VP9_FRAME);
743 	if (WARN_ON(!ctrl))
744 		return -EINVAL;
745 	dec_params = ctrl->p_cur.p;
746 
747 	ret = validate_dec_params(ctx, dec_params);
748 	if (ret)
749 		return ret;
750 
751 	run->decode_params = dec_params;
752 
753 	ctrl = v4l2_ctrl_find(&ctx->ctrl_hdl, V4L2_CID_STATELESS_VP9_COMPRESSED_HDR);
754 	if (WARN_ON(!ctrl))
755 		return -EINVAL;
756 	prob_updates = ctrl->p_cur.p;
757 	vp9_ctx->cur.tx_mode = prob_updates->tx_mode;
758 
759 	/*
760 	 * vp9 stuff
761 	 *
762 	 * by this point the userspace has done all parts of 6.2 uncompressed_header()
763 	 * except this fragment:
764 	 * if ( FrameIsIntra || error_resilient_mode ) {
765 	 *	setup_past_independence ( )
766 	 *	if ( frame_type == KEY_FRAME || error_resilient_mode == 1 ||
767 	 *	     reset_frame_context == 3 ) {
768 	 *		for ( i = 0; i < 4; i ++ ) {
769 	 *			save_probs( i )
770 	 *		}
771 	 *	} else if ( reset_frame_context == 2 ) {
772 	 *		save_probs( frame_context_idx )
773 	 *	}
774 	 *	frame_context_idx = 0
775 	 * }
776 	 */
777 	fctx_idx = v4l2_vp9_reset_frame_ctx(dec_params, vp9_ctx->frame_context);
778 	vp9_ctx->cur.frame_context_idx = fctx_idx;
779 
780 	/* 6.1 frame(sz): load_probs() and load_probs2() */
781 	vp9_ctx->probability_tables = vp9_ctx->frame_context[fctx_idx];
782 
783 	/*
784 	 * The userspace has also performed 6.3 compressed_header(), but handling the
785 	 * probs in a special way. All probs which need updating, except MV-related,
786 	 * have been read from the bitstream and translated through inv_map_table[],
787 	 * but no 6.3.6 inv_recenter_nonneg(v, m) has been performed. The values passed
788 	 * by userspace are either translated values (there are no 0 values in
789 	 * inv_map_table[]), or zero to indicate no update. All MV-related probs which need
790 	 * updating have been read from the bitstream and (mv_prob << 1) | 1 has been
791 	 * performed. The values passed by userspace are either new values
792 	 * to replace old ones (the above mentioned shift and bitwise or never result in
793 	 * a zero) or zero to indicate no update.
794 	 * fw_update_probs() performs actual probs updates or leaves probs as-is
795 	 * for values for which a zero was passed from userspace.
796 	 */
797 	v4l2_vp9_fw_update_probs(&vp9_ctx->probability_tables, prob_updates, dec_params);
798 
799 	return 0;
800 }
801 
802 static int rkvdec_vp9_run(struct rkvdec_ctx *ctx)
803 {
804 	struct rkvdec_dev *rkvdec = ctx->dev;
805 	struct rkvdec_vp9_run run = { };
806 	int ret;
807 
808 	ret = rkvdec_vp9_run_preamble(ctx, &run);
809 	if (ret) {
810 		rkvdec_run_postamble(ctx, &run.base);
811 		return ret;
812 	}
813 
814 	/* Prepare probs. */
815 	init_probs(ctx, &run);
816 
817 	/* Configure hardware registers. */
818 	config_registers(ctx, &run);
819 
820 	rkvdec_run_postamble(ctx, &run.base);
821 
822 	schedule_delayed_work(&rkvdec->watchdog_work, msecs_to_jiffies(2000));
823 
824 	writel(1, rkvdec->regs + RKVDEC_REG_PREF_LUMA_CACHE_COMMAND);
825 	writel(1, rkvdec->regs + RKVDEC_REG_PREF_CHR_CACHE_COMMAND);
826 
827 	writel(0xe, rkvdec->regs + RKVDEC_REG_STRMD_ERR_EN);
828 	/* Start decoding! */
829 	writel(RKVDEC_INTERRUPT_DEC_E | RKVDEC_CONFIG_DEC_CLK_GATE_E |
830 	       RKVDEC_TIMEOUT_E | RKVDEC_BUF_EMPTY_E,
831 	       rkvdec->regs + RKVDEC_REG_INTERRUPT);
832 
833 	return 0;
834 }
835 
836 #define copy_tx_and_skip(p1, p2)				\
837 do {								\
838 	memcpy((p1)->tx8, (p2)->tx8, sizeof((p1)->tx8));	\
839 	memcpy((p1)->tx16, (p2)->tx16, sizeof((p1)->tx16));	\
840 	memcpy((p1)->tx32, (p2)->tx32, sizeof((p1)->tx32));	\
841 	memcpy((p1)->skip, (p2)->skip, sizeof((p1)->skip));	\
842 } while (0)
843 
844 static void rkvdec_vp9_done(struct rkvdec_ctx *ctx,
845 			    struct vb2_v4l2_buffer *src_buf,
846 			    struct vb2_v4l2_buffer *dst_buf,
847 			    enum vb2_buffer_state result)
848 {
849 	struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
850 	unsigned int fctx_idx;
851 
852 	/* v4l2-specific stuff */
853 	if (result == VB2_BUF_STATE_ERROR)
854 		goto out_update_last;
855 
856 	/*
857 	 * vp9 stuff
858 	 *
859 	 * 6.1.2 refresh_probs()
860 	 *
861 	 * In the spec a complementary condition goes last in 6.1.2 refresh_probs(),
862 	 * but it makes no sense to perform all the activities from the first "if"
863 	 * there if we actually are not refreshing the frame context. On top of that,
864 	 * because of 6.2 uncompressed_header() whenever error_resilient_mode == 1,
865 	 * refresh_frame_context == 0. Consequently, if we don't jump to out_update_last
866 	 * it means error_resilient_mode must be 0.
867 	 */
868 	if (!(vp9_ctx->cur.flags & V4L2_VP9_FRAME_FLAG_REFRESH_FRAME_CTX))
869 		goto out_update_last;
870 
871 	fctx_idx = vp9_ctx->cur.frame_context_idx;
872 
873 	if (!(vp9_ctx->cur.flags & V4L2_VP9_FRAME_FLAG_PARALLEL_DEC_MODE)) {
874 		/* error_resilient_mode == 0 && frame_parallel_decoding_mode == 0 */
875 		struct v4l2_vp9_frame_context *probs = &vp9_ctx->probability_tables;
876 		bool frame_is_intra = vp9_ctx->cur.flags &
877 		    (V4L2_VP9_FRAME_FLAG_KEY_FRAME | V4L2_VP9_FRAME_FLAG_INTRA_ONLY);
878 		struct tx_and_skip {
879 			u8 tx8[2][1];
880 			u8 tx16[2][2];
881 			u8 tx32[2][3];
882 			u8 skip[3];
883 		} _tx_skip, *tx_skip = &_tx_skip;
884 		struct v4l2_vp9_frame_symbol_counts *counts;
885 
886 		/* buffer the forward-updated TX and skip probs */
887 		if (frame_is_intra)
888 			copy_tx_and_skip(tx_skip, probs);
889 
890 		/* 6.1.2 refresh_probs(): load_probs() and load_probs2() */
891 		*probs = vp9_ctx->frame_context[fctx_idx];
892 
893 		/* if FrameIsIntra then undo the effect of load_probs2() */
894 		if (frame_is_intra)
895 			copy_tx_and_skip(probs, tx_skip);
896 
897 		counts = frame_is_intra ? &vp9_ctx->intra_cnts : &vp9_ctx->inter_cnts;
898 		v4l2_vp9_adapt_coef_probs(probs, counts,
899 					  !vp9_ctx->last.valid ||
900 					  vp9_ctx->last.flags & V4L2_VP9_FRAME_FLAG_KEY_FRAME,
901 					  frame_is_intra);
902 		if (!frame_is_intra) {
903 			const struct rkvdec_vp9_inter_frame_symbol_counts *inter_cnts;
904 			u32 classes[2][11];
905 			int i;
906 
907 			inter_cnts = vp9_ctx->count_tbl.cpu;
908 			for (i = 0; i < ARRAY_SIZE(classes); ++i)
909 				memcpy(classes[i], inter_cnts->classes[i], sizeof(classes[0]));
910 			counts->classes = &classes;
911 
912 			/* load_probs2() already done */
913 			v4l2_vp9_adapt_noncoef_probs(&vp9_ctx->probability_tables, counts,
914 						     vp9_ctx->cur.reference_mode,
915 						     vp9_ctx->cur.interpolation_filter,
916 						     vp9_ctx->cur.tx_mode, vp9_ctx->cur.flags);
917 		}
918 	}
919 
920 	/* 6.1.2 refresh_probs(): save_probs(fctx_idx) */
921 	vp9_ctx->frame_context[fctx_idx] = vp9_ctx->probability_tables;
922 
923 out_update_last:
924 	update_ctx_last_info(vp9_ctx);
925 }
926 
927 static void rkvdec_init_v4l2_vp9_count_tbl(struct rkvdec_ctx *ctx)
928 {
929 	struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
930 	struct rkvdec_vp9_intra_frame_symbol_counts *intra_cnts = vp9_ctx->count_tbl.cpu;
931 	struct rkvdec_vp9_inter_frame_symbol_counts *inter_cnts = vp9_ctx->count_tbl.cpu;
932 	int i, j, k, l, m;
933 
934 	vp9_ctx->inter_cnts.partition = &inter_cnts->partition;
935 	vp9_ctx->inter_cnts.skip = &inter_cnts->skip;
936 	vp9_ctx->inter_cnts.intra_inter = &inter_cnts->inter;
937 	vp9_ctx->inter_cnts.tx32p = &inter_cnts->tx32p;
938 	vp9_ctx->inter_cnts.tx16p = &inter_cnts->tx16p;
939 	vp9_ctx->inter_cnts.tx8p = &inter_cnts->tx8p;
940 
941 	vp9_ctx->intra_cnts.partition = (u32 (*)[16][4])(&intra_cnts->partition);
942 	vp9_ctx->intra_cnts.skip = &intra_cnts->skip;
943 	vp9_ctx->intra_cnts.intra_inter = &intra_cnts->intra;
944 	vp9_ctx->intra_cnts.tx32p = &intra_cnts->tx32p;
945 	vp9_ctx->intra_cnts.tx16p = &intra_cnts->tx16p;
946 	vp9_ctx->intra_cnts.tx8p = &intra_cnts->tx8p;
947 
948 	vp9_ctx->inter_cnts.y_mode = &inter_cnts->y_mode;
949 	vp9_ctx->inter_cnts.uv_mode = &inter_cnts->uv_mode;
950 	vp9_ctx->inter_cnts.comp = &inter_cnts->comp;
951 	vp9_ctx->inter_cnts.comp_ref = &inter_cnts->comp_ref;
952 	vp9_ctx->inter_cnts.single_ref = &inter_cnts->single_ref;
953 	vp9_ctx->inter_cnts.mv_mode = &inter_cnts->mv_mode;
954 	vp9_ctx->inter_cnts.filter = &inter_cnts->filter;
955 	vp9_ctx->inter_cnts.mv_joint = &inter_cnts->mv_joint;
956 	vp9_ctx->inter_cnts.sign = &inter_cnts->sign;
957 	/*
958 	 * rk hardware actually uses "u32 classes[2][11 + 1];"
959 	 * instead of "u32 classes[2][11];", so this must be explicitly
960 	 * copied into vp9_ctx->classes when passing the data to the
961 	 * vp9 library function
962 	 */
963 	vp9_ctx->inter_cnts.class0 = &inter_cnts->class0;
964 	vp9_ctx->inter_cnts.bits = &inter_cnts->bits;
965 	vp9_ctx->inter_cnts.class0_fp = &inter_cnts->class0_fp;
966 	vp9_ctx->inter_cnts.fp = &inter_cnts->fp;
967 	vp9_ctx->inter_cnts.class0_hp = &inter_cnts->class0_hp;
968 	vp9_ctx->inter_cnts.hp = &inter_cnts->hp;
969 
970 #define INNERMOST_LOOP \
971 	do {										\
972 		for (m = 0; m < ARRAY_SIZE(vp9_ctx->inter_cnts.coeff[0][0][0][0]); ++m) {\
973 			vp9_ctx->inter_cnts.coeff[i][j][k][l][m] =			\
974 				&inter_cnts->ref_cnt[k][i][j][l][m].coeff;		\
975 			vp9_ctx->inter_cnts.eob[i][j][k][l][m][0] =			\
976 				&inter_cnts->ref_cnt[k][i][j][l][m].eob[0];		\
977 			vp9_ctx->inter_cnts.eob[i][j][k][l][m][1] =			\
978 				&inter_cnts->ref_cnt[k][i][j][l][m].eob[1];		\
979 											\
980 			vp9_ctx->intra_cnts.coeff[i][j][k][l][m] =			\
981 				&intra_cnts->ref_cnt[k][i][j][l][m].coeff;		\
982 			vp9_ctx->intra_cnts.eob[i][j][k][l][m][0] =			\
983 				&intra_cnts->ref_cnt[k][i][j][l][m].eob[0];		\
984 			vp9_ctx->intra_cnts.eob[i][j][k][l][m][1] =			\
985 				&intra_cnts->ref_cnt[k][i][j][l][m].eob[1];		\
986 		}									\
987 	} while (0)
988 
989 	for (i = 0; i < ARRAY_SIZE(vp9_ctx->inter_cnts.coeff); ++i)
990 		for (j = 0; j < ARRAY_SIZE(vp9_ctx->inter_cnts.coeff[0]); ++j)
991 			for (k = 0; k < ARRAY_SIZE(vp9_ctx->inter_cnts.coeff[0][0]); ++k)
992 				for (l = 0; l < ARRAY_SIZE(vp9_ctx->inter_cnts.coeff[0][0][0]); ++l)
993 					INNERMOST_LOOP;
994 #undef INNERMOST_LOOP
995 }
996 
997 static int rkvdec_vp9_start(struct rkvdec_ctx *ctx)
998 {
999 	struct rkvdec_dev *rkvdec = ctx->dev;
1000 	struct rkvdec_vp9_priv_tbl *priv_tbl;
1001 	struct rkvdec_vp9_ctx *vp9_ctx;
1002 	unsigned char *count_tbl;
1003 	int ret;
1004 
1005 	vp9_ctx = kzalloc(sizeof(*vp9_ctx), GFP_KERNEL);
1006 	if (!vp9_ctx)
1007 		return -ENOMEM;
1008 
1009 	ctx->priv = vp9_ctx;
1010 
1011 	BUILD_BUG_ON(sizeof(priv_tbl->probs) % 16); /* ensure probs size is 128-bit aligned */
1012 	priv_tbl = dma_alloc_coherent(rkvdec->dev, sizeof(*priv_tbl),
1013 				      &vp9_ctx->priv_tbl.dma, GFP_KERNEL);
1014 	if (!priv_tbl) {
1015 		ret = -ENOMEM;
1016 		goto err_free_ctx;
1017 	}
1018 
1019 	vp9_ctx->priv_tbl.size = sizeof(*priv_tbl);
1020 	vp9_ctx->priv_tbl.cpu = priv_tbl;
1021 
1022 	count_tbl = dma_alloc_coherent(rkvdec->dev, RKVDEC_VP9_COUNT_SIZE,
1023 				       &vp9_ctx->count_tbl.dma, GFP_KERNEL);
1024 	if (!count_tbl) {
1025 		ret = -ENOMEM;
1026 		goto err_free_priv_tbl;
1027 	}
1028 
1029 	vp9_ctx->count_tbl.size = RKVDEC_VP9_COUNT_SIZE;
1030 	vp9_ctx->count_tbl.cpu = count_tbl;
1031 	rkvdec_init_v4l2_vp9_count_tbl(ctx);
1032 
1033 	return 0;
1034 
1035 err_free_priv_tbl:
1036 	dma_free_coherent(rkvdec->dev, vp9_ctx->priv_tbl.size,
1037 			  vp9_ctx->priv_tbl.cpu, vp9_ctx->priv_tbl.dma);
1038 
1039 err_free_ctx:
1040 	kfree(vp9_ctx);
1041 	return ret;
1042 }
1043 
1044 static void rkvdec_vp9_stop(struct rkvdec_ctx *ctx)
1045 {
1046 	struct rkvdec_vp9_ctx *vp9_ctx = ctx->priv;
1047 	struct rkvdec_dev *rkvdec = ctx->dev;
1048 
1049 	dma_free_coherent(rkvdec->dev, vp9_ctx->count_tbl.size,
1050 			  vp9_ctx->count_tbl.cpu, vp9_ctx->count_tbl.dma);
1051 	dma_free_coherent(rkvdec->dev, vp9_ctx->priv_tbl.size,
1052 			  vp9_ctx->priv_tbl.cpu, vp9_ctx->priv_tbl.dma);
1053 	kfree(vp9_ctx);
1054 }
1055 
1056 static int rkvdec_vp9_adjust_fmt(struct rkvdec_ctx *ctx,
1057 				 struct v4l2_format *f)
1058 {
1059 	struct v4l2_pix_format_mplane *fmt = &f->fmt.pix_mp;
1060 
1061 	fmt->num_planes = 1;
1062 	if (!fmt->plane_fmt[0].sizeimage)
1063 		fmt->plane_fmt[0].sizeimage = fmt->width * fmt->height * 2;
1064 	return 0;
1065 }
1066 
1067 const struct rkvdec_coded_fmt_ops rkvdec_vp9_fmt_ops = {
1068 	.adjust_fmt = rkvdec_vp9_adjust_fmt,
1069 	.start = rkvdec_vp9_start,
1070 	.stop = rkvdec_vp9_stop,
1071 	.run = rkvdec_vp9_run,
1072 	.done = rkvdec_vp9_done,
1073 };
1074