1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3  * Copyright (C) 2018 Maxime Jourdan <mjourdan@baylibre.com>
4  */
5 
6 #include <media/v4l2-mem2mem.h>
7 #include <media/videobuf2-dma-contig.h>
8 
9 #include "codec_hevc_common.h"
10 #include "vdec_helpers.h"
11 #include "hevc_regs.h"
12 
13 #define MMU_COMPRESS_HEADER_SIZE 0x48000
14 #define MMU_MAP_SIZE 0x4800
15 
16 const u16 vdec_hevc_parser_cmd[] = {
17 	0x0401,	0x8401,	0x0800,	0x0402,
18 	0x9002,	0x1423,	0x8CC3,	0x1423,
19 	0x8804,	0x9825,	0x0800,	0x04FE,
20 	0x8406,	0x8411,	0x1800,	0x8408,
21 	0x8409,	0x8C2A,	0x9C2B,	0x1C00,
22 	0x840F,	0x8407,	0x8000,	0x8408,
23 	0x2000,	0xA800,	0x8410,	0x04DE,
24 	0x840C,	0x840D,	0xAC00,	0xA000,
25 	0x08C0,	0x08E0,	0xA40E,	0xFC00,
26 	0x7C00
27 };
28 
29 /* Configure decode head read mode */
30 void codec_hevc_setup_decode_head(struct amvdec_session *sess, int is_10bit)
31 {
32 	struct amvdec_core *core = sess->core;
33 	u32 body_size = amvdec_am21c_body_size(sess->width, sess->height);
34 	u32 head_size = amvdec_am21c_head_size(sess->width, sess->height);
35 
36 	if (!codec_hevc_use_fbc(sess->pixfmt_cap, is_10bit)) {
37 		/* Enable 2-plane reference read mode */
38 		amvdec_write_dos(core, HEVCD_MPP_DECOMP_CTL1, BIT(31));
39 		return;
40 	}
41 
42 	if (codec_hevc_use_mmu(core->platform->revision,
43 			       sess->pixfmt_cap, is_10bit))
44 		amvdec_write_dos(core, HEVCD_MPP_DECOMP_CTL1, BIT(4));
45 	else
46 		amvdec_write_dos(core, HEVCD_MPP_DECOMP_CTL1, 0);
47 
48 	if (core->platform->revision < VDEC_REVISION_SM1)
49 		amvdec_write_dos(core, HEVCD_MPP_DECOMP_CTL2, body_size / 32);
50 	amvdec_write_dos(core, HEVC_CM_BODY_LENGTH, body_size);
51 	amvdec_write_dos(core, HEVC_CM_HEADER_OFFSET, body_size);
52 	amvdec_write_dos(core, HEVC_CM_HEADER_LENGTH, head_size);
53 }
54 EXPORT_SYMBOL_GPL(codec_hevc_setup_decode_head);
55 
56 static void codec_hevc_setup_buffers_gxbb(struct amvdec_session *sess,
57 					  struct codec_hevc_common *comm,
58 					  int is_10bit)
59 {
60 	struct amvdec_core *core = sess->core;
61 	struct v4l2_m2m_buffer *buf;
62 	u32 buf_num = v4l2_m2m_num_dst_bufs_ready(sess->m2m_ctx);
63 	dma_addr_t buf_y_paddr = 0;
64 	dma_addr_t buf_uv_paddr = 0;
65 	u32 idx = 0;
66 	u32 val;
67 	int i;
68 
69 	amvdec_write_dos(core, HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0);
70 
71 	v4l2_m2m_for_each_dst_buf(sess->m2m_ctx, buf) {
72 		struct vb2_buffer *vb = &buf->vb.vb2_buf;
73 
74 		idx = vb->index;
75 
76 		if (codec_hevc_use_downsample(sess->pixfmt_cap, is_10bit))
77 			buf_y_paddr = comm->fbc_buffer_paddr[idx];
78 		else
79 			buf_y_paddr = vb2_dma_contig_plane_dma_addr(vb, 0);
80 
81 		if (codec_hevc_use_fbc(sess->pixfmt_cap, is_10bit)) {
82 			val = buf_y_paddr | (idx << 8) | 1;
83 			amvdec_write_dos(core, HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
84 					 val);
85 		} else {
86 			buf_uv_paddr = vb2_dma_contig_plane_dma_addr(vb, 1);
87 			val = buf_y_paddr | ((idx * 2) << 8) | 1;
88 			amvdec_write_dos(core, HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
89 					 val);
90 			val = buf_uv_paddr | ((idx * 2 + 1) << 8) | 1;
91 			amvdec_write_dos(core, HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
92 					 val);
93 		}
94 	}
95 
96 	if (codec_hevc_use_fbc(sess->pixfmt_cap, is_10bit))
97 		val = buf_y_paddr | (idx << 8) | 1;
98 	else
99 		val = buf_y_paddr | ((idx * 2) << 8) | 1;
100 
101 	/* Fill the remaining unused slots with the last buffer's Y addr */
102 	for (i = buf_num; i < MAX_REF_PIC_NUM; ++i)
103 		amvdec_write_dos(core, HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR, val);
104 
105 	amvdec_write_dos(core, HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 1);
106 	amvdec_write_dos(core, HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR, 1);
107 	for (i = 0; i < 32; ++i)
108 		amvdec_write_dos(core, HEVCD_MPP_ANC_CANVAS_DATA_ADDR, 0);
109 }
110 
111 static void codec_hevc_setup_buffers_gxl(struct amvdec_session *sess,
112 					 struct codec_hevc_common *comm,
113 					 int is_10bit)
114 {
115 	struct amvdec_core *core = sess->core;
116 	struct v4l2_m2m_buffer *buf;
117 	u32 revision = core->platform->revision;
118 	u32 pixfmt_cap = sess->pixfmt_cap;
119 	int i;
120 
121 	amvdec_write_dos(core, HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR,
122 			 BIT(2) | BIT(1));
123 
124 	v4l2_m2m_for_each_dst_buf(sess->m2m_ctx, buf) {
125 		struct vb2_buffer *vb = &buf->vb.vb2_buf;
126 		dma_addr_t buf_y_paddr = 0;
127 		dma_addr_t buf_uv_paddr = 0;
128 		u32 idx = vb->index;
129 
130 		if (codec_hevc_use_mmu(revision, pixfmt_cap, is_10bit))
131 			buf_y_paddr = comm->mmu_header_paddr[idx];
132 		else if (codec_hevc_use_downsample(pixfmt_cap, is_10bit))
133 			buf_y_paddr = comm->fbc_buffer_paddr[idx];
134 		else
135 			buf_y_paddr = vb2_dma_contig_plane_dma_addr(vb, 0);
136 
137 		amvdec_write_dos(core, HEVCD_MPP_ANC2AXI_TBL_DATA,
138 				 buf_y_paddr >> 5);
139 
140 		if (!codec_hevc_use_fbc(pixfmt_cap, is_10bit)) {
141 			buf_uv_paddr = vb2_dma_contig_plane_dma_addr(vb, 1);
142 			amvdec_write_dos(core, HEVCD_MPP_ANC2AXI_TBL_DATA,
143 					 buf_uv_paddr >> 5);
144 		}
145 	}
146 
147 	amvdec_write_dos(core, HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 1);
148 	amvdec_write_dos(core, HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR, 1);
149 	for (i = 0; i < 32; ++i)
150 		amvdec_write_dos(core, HEVCD_MPP_ANC_CANVAS_DATA_ADDR, 0);
151 }
152 
153 void codec_hevc_free_fbc_buffers(struct amvdec_session *sess,
154 				 struct codec_hevc_common *comm)
155 {
156 	struct device *dev = sess->core->dev;
157 	u32 am21_size = amvdec_am21c_size(sess->width, sess->height);
158 	int i;
159 
160 	for (i = 0; i < MAX_REF_PIC_NUM; ++i) {
161 		if (comm->fbc_buffer_vaddr[i]) {
162 			dma_free_coherent(dev, am21_size,
163 					  comm->fbc_buffer_vaddr[i],
164 					  comm->fbc_buffer_paddr[i]);
165 			comm->fbc_buffer_vaddr[i] = NULL;
166 		}
167 	}
168 }
169 EXPORT_SYMBOL_GPL(codec_hevc_free_fbc_buffers);
170 
171 static int codec_hevc_alloc_fbc_buffers(struct amvdec_session *sess,
172 					struct codec_hevc_common *comm)
173 {
174 	struct device *dev = sess->core->dev;
175 	struct v4l2_m2m_buffer *buf;
176 	u32 am21_size = amvdec_am21c_size(sess->width, sess->height);
177 
178 	v4l2_m2m_for_each_dst_buf(sess->m2m_ctx, buf) {
179 		u32 idx = buf->vb.vb2_buf.index;
180 		dma_addr_t paddr;
181 		void *vaddr = dma_alloc_coherent(dev, am21_size, &paddr,
182 						 GFP_KERNEL);
183 		if (!vaddr) {
184 			codec_hevc_free_fbc_buffers(sess, comm);
185 			return -ENOMEM;
186 		}
187 
188 		comm->fbc_buffer_vaddr[idx] = vaddr;
189 		comm->fbc_buffer_paddr[idx] = paddr;
190 	}
191 
192 	return 0;
193 }
194 
195 void codec_hevc_free_mmu_headers(struct amvdec_session *sess,
196 				 struct codec_hevc_common *comm)
197 {
198 	struct device *dev = sess->core->dev;
199 	int i;
200 
201 	for (i = 0; i < MAX_REF_PIC_NUM; ++i) {
202 		if (comm->mmu_header_vaddr[i]) {
203 			dma_free_coherent(dev, MMU_COMPRESS_HEADER_SIZE,
204 					  comm->mmu_header_vaddr[i],
205 					  comm->mmu_header_paddr[i]);
206 			comm->mmu_header_vaddr[i] = NULL;
207 		}
208 	}
209 
210 	if (comm->mmu_map_vaddr) {
211 		dma_free_coherent(dev, MMU_MAP_SIZE,
212 				  comm->mmu_map_vaddr,
213 				  comm->mmu_map_paddr);
214 		comm->mmu_map_vaddr = NULL;
215 	}
216 }
217 EXPORT_SYMBOL_GPL(codec_hevc_free_mmu_headers);
218 
219 static int codec_hevc_alloc_mmu_headers(struct amvdec_session *sess,
220 					struct codec_hevc_common *comm)
221 {
222 	struct device *dev = sess->core->dev;
223 	struct v4l2_m2m_buffer *buf;
224 
225 	comm->mmu_map_vaddr = dma_alloc_coherent(dev, MMU_MAP_SIZE,
226 						 &comm->mmu_map_paddr,
227 						 GFP_KERNEL);
228 	if (!comm->mmu_map_vaddr)
229 		return -ENOMEM;
230 
231 	v4l2_m2m_for_each_dst_buf(sess->m2m_ctx, buf) {
232 		u32 idx = buf->vb.vb2_buf.index;
233 		dma_addr_t paddr;
234 		void *vaddr = dma_alloc_coherent(dev, MMU_COMPRESS_HEADER_SIZE,
235 						 &paddr, GFP_KERNEL);
236 		if (!vaddr) {
237 			codec_hevc_free_mmu_headers(sess, comm);
238 			return -ENOMEM;
239 		}
240 
241 		comm->mmu_header_vaddr[idx] = vaddr;
242 		comm->mmu_header_paddr[idx] = paddr;
243 	}
244 
245 	return 0;
246 }
247 
248 int codec_hevc_setup_buffers(struct amvdec_session *sess,
249 			     struct codec_hevc_common *comm,
250 			     int is_10bit)
251 {
252 	struct amvdec_core *core = sess->core;
253 	int ret;
254 
255 	if (codec_hevc_use_downsample(sess->pixfmt_cap, is_10bit)) {
256 		ret = codec_hevc_alloc_fbc_buffers(sess, comm);
257 		if (ret)
258 			return ret;
259 	}
260 
261 	if (codec_hevc_use_mmu(core->platform->revision,
262 			       sess->pixfmt_cap, is_10bit)) {
263 		ret = codec_hevc_alloc_mmu_headers(sess, comm);
264 		if (ret) {
265 			codec_hevc_free_fbc_buffers(sess, comm);
266 			return ret;
267 		}
268 	}
269 
270 	if (core->platform->revision == VDEC_REVISION_GXBB)
271 		codec_hevc_setup_buffers_gxbb(sess, comm, is_10bit);
272 	else
273 		codec_hevc_setup_buffers_gxl(sess, comm, is_10bit);
274 
275 	return 0;
276 }
277 EXPORT_SYMBOL_GPL(codec_hevc_setup_buffers);
278 
279 void codec_hevc_fill_mmu_map(struct amvdec_session *sess,
280 			     struct codec_hevc_common *comm,
281 			     struct vb2_buffer *vb)
282 {
283 	u32 size = amvdec_am21c_size(sess->width, sess->height);
284 	u32 nb_pages = size / PAGE_SIZE;
285 	u32 *mmu_map = comm->mmu_map_vaddr;
286 	u32 first_page;
287 	u32 i;
288 
289 	if (sess->pixfmt_cap == V4L2_PIX_FMT_NV12M)
290 		first_page = comm->fbc_buffer_paddr[vb->index] >> PAGE_SHIFT;
291 	else
292 		first_page = vb2_dma_contig_plane_dma_addr(vb, 0) >> PAGE_SHIFT;
293 
294 	for (i = 0; i < nb_pages; ++i)
295 		mmu_map[i] = first_page + i;
296 }
297 EXPORT_SYMBOL_GPL(codec_hevc_fill_mmu_map);
298