1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (c) 2019 MediaTek Inc.
4  * Author: Xia Jiang <xia.jiang@mediatek.com>
5  *
6  */
7 
8 #include <linux/clk.h>
9 #include <linux/interrupt.h>
10 #include <linux/irq.h>
11 #include <linux/io.h>
12 #include <linux/kernel.h>
13 #include <linux/module.h>
14 #include <linux/of.h>
15 #include <linux/of_device.h>
16 #include <linux/pm_runtime.h>
17 #include <linux/slab.h>
18 #include <media/media-device.h>
19 #include <media/videobuf2-core.h>
20 #include <media/videobuf2-dma-contig.h>
21 #include <media/videobuf2-v4l2.h>
22 #include <media/v4l2-mem2mem.h>
23 #include <media/v4l2-dev.h>
24 #include <media/v4l2-device.h>
25 #include <media/v4l2-fh.h>
26 #include <media/v4l2-event.h>
27 
28 #include "mtk_jpeg_core.h"
29 #include "mtk_jpeg_enc_hw.h"
30 
31 static const struct mtk_jpeg_enc_qlt mtk_jpeg_enc_quality[] = {
32 	{.quality_param = 34, .hardware_value = JPEG_ENC_QUALITY_Q34},
33 	{.quality_param = 39, .hardware_value = JPEG_ENC_QUALITY_Q39},
34 	{.quality_param = 48, .hardware_value = JPEG_ENC_QUALITY_Q48},
35 	{.quality_param = 60, .hardware_value = JPEG_ENC_QUALITY_Q60},
36 	{.quality_param = 64, .hardware_value = JPEG_ENC_QUALITY_Q64},
37 	{.quality_param = 68, .hardware_value = JPEG_ENC_QUALITY_Q68},
38 	{.quality_param = 74, .hardware_value = JPEG_ENC_QUALITY_Q74},
39 	{.quality_param = 80, .hardware_value = JPEG_ENC_QUALITY_Q80},
40 	{.quality_param = 82, .hardware_value = JPEG_ENC_QUALITY_Q82},
41 	{.quality_param = 84, .hardware_value = JPEG_ENC_QUALITY_Q84},
42 	{.quality_param = 87, .hardware_value = JPEG_ENC_QUALITY_Q87},
43 	{.quality_param = 90, .hardware_value = JPEG_ENC_QUALITY_Q90},
44 	{.quality_param = 92, .hardware_value = JPEG_ENC_QUALITY_Q92},
45 	{.quality_param = 95, .hardware_value = JPEG_ENC_QUALITY_Q95},
46 	{.quality_param = 97, .hardware_value = JPEG_ENC_QUALITY_Q97},
47 };
48 
49 #if defined(CONFIG_OF)
50 static const struct of_device_id mtk_jpegenc_drv_ids[] = {
51 	{
52 		.compatible = "mediatek,mt8195-jpgenc-hw",
53 	},
54 	{},
55 };
56 MODULE_DEVICE_TABLE(of, mtk_jpegenc_drv_ids);
57 #endif
58 
59 void mtk_jpeg_enc_reset(void __iomem *base)
60 {
61 	writel(0, base + JPEG_ENC_RSTB);
62 	writel(JPEG_ENC_RESET_BIT, base + JPEG_ENC_RSTB);
63 	writel(0, base + JPEG_ENC_CODEC_SEL);
64 }
65 EXPORT_SYMBOL_GPL(mtk_jpeg_enc_reset);
66 
67 u32 mtk_jpeg_enc_get_file_size(void __iomem *base)
68 {
69 	return readl(base + JPEG_ENC_DMA_ADDR0) -
70 	       readl(base + JPEG_ENC_DST_ADDR0);
71 }
72 EXPORT_SYMBOL_GPL(mtk_jpeg_enc_get_file_size);
73 
74 void mtk_jpeg_enc_start(void __iomem *base)
75 {
76 	u32 value;
77 
78 	value = readl(base + JPEG_ENC_CTRL);
79 	value |= JPEG_ENC_CTRL_INT_EN_BIT | JPEG_ENC_CTRL_ENABLE_BIT;
80 	writel(value, base + JPEG_ENC_CTRL);
81 }
82 EXPORT_SYMBOL_GPL(mtk_jpeg_enc_start);
83 
84 void mtk_jpeg_set_enc_src(struct mtk_jpeg_ctx *ctx,  void __iomem *base,
85 			  struct vb2_buffer *src_buf)
86 {
87 	int i;
88 	dma_addr_t dma_addr;
89 
90 	for (i = 0; i < src_buf->num_planes; i++) {
91 		dma_addr = vb2_dma_contig_plane_dma_addr(src_buf, i) +
92 			   src_buf->planes[i].data_offset;
93 		if (!i)
94 			writel(dma_addr, base + JPEG_ENC_SRC_LUMA_ADDR);
95 		else
96 			writel(dma_addr, base + JPEG_ENC_SRC_CHROMA_ADDR);
97 	}
98 }
99 EXPORT_SYMBOL_GPL(mtk_jpeg_set_enc_src);
100 
101 void mtk_jpeg_set_enc_dst(struct mtk_jpeg_ctx *ctx, void __iomem *base,
102 			  struct vb2_buffer *dst_buf)
103 {
104 	dma_addr_t dma_addr;
105 	size_t size;
106 	u32 dma_addr_offset;
107 	u32 dma_addr_offsetmask;
108 
109 	dma_addr = vb2_dma_contig_plane_dma_addr(dst_buf, 0);
110 	dma_addr_offset = ctx->enable_exif ? MTK_JPEG_MAX_EXIF_SIZE : 0;
111 	dma_addr_offsetmask = dma_addr & JPEG_ENC_DST_ADDR_OFFSET_MASK;
112 	size = vb2_plane_size(dst_buf, 0);
113 
114 	writel(dma_addr_offset & ~0xf, base + JPEG_ENC_OFFSET_ADDR);
115 	writel(dma_addr_offsetmask & 0xf, base + JPEG_ENC_BYTE_OFFSET_MASK);
116 	writel(dma_addr & ~0xf, base + JPEG_ENC_DST_ADDR0);
117 	writel((dma_addr + size) & ~0xf, base + JPEG_ENC_STALL_ADDR0);
118 }
119 EXPORT_SYMBOL_GPL(mtk_jpeg_set_enc_dst);
120 
121 void mtk_jpeg_set_enc_params(struct mtk_jpeg_ctx *ctx,  void __iomem *base)
122 {
123 	u32 value;
124 	u32 width = ctx->out_q.enc_crop_rect.width;
125 	u32 height = ctx->out_q.enc_crop_rect.height;
126 	u32 enc_format = ctx->out_q.fmt->fourcc;
127 	u32 bytesperline = ctx->out_q.pix_mp.plane_fmt[0].bytesperline;
128 	u32 blk_num;
129 	u32 img_stride;
130 	u32 mem_stride;
131 	u32 i, enc_quality;
132 
133 	value = width << 16 | height;
134 	writel(value, base + JPEG_ENC_IMG_SIZE);
135 
136 	if (enc_format == V4L2_PIX_FMT_NV12M ||
137 	    enc_format == V4L2_PIX_FMT_NV21M)
138 	    /*
139 	     * Total 8 x 8 block number of luma and chroma.
140 	     * The number of blocks is counted from 0.
141 	     */
142 		blk_num = DIV_ROUND_UP(width, 16) *
143 			  DIV_ROUND_UP(height, 16) * 6 - 1;
144 	else
145 		blk_num = DIV_ROUND_UP(width, 16) *
146 			  DIV_ROUND_UP(height, 8) * 4 - 1;
147 	writel(blk_num, base + JPEG_ENC_BLK_NUM);
148 
149 	if (enc_format == V4L2_PIX_FMT_NV12M ||
150 	    enc_format == V4L2_PIX_FMT_NV21M) {
151 		/* 4:2:0 */
152 		img_stride = round_up(width, 16);
153 		mem_stride = bytesperline;
154 	} else {
155 		/* 4:2:2 */
156 		img_stride = round_up(width * 2, 32);
157 		mem_stride = img_stride;
158 	}
159 	writel(img_stride, base + JPEG_ENC_IMG_STRIDE);
160 	writel(mem_stride, base + JPEG_ENC_STRIDE);
161 
162 	enc_quality = mtk_jpeg_enc_quality[0].hardware_value;
163 	for (i = 0; i < ARRAY_SIZE(mtk_jpeg_enc_quality); i++) {
164 		if (ctx->enc_quality <= mtk_jpeg_enc_quality[i].quality_param) {
165 			enc_quality = mtk_jpeg_enc_quality[i].hardware_value;
166 			break;
167 		}
168 	}
169 	writel(enc_quality, base + JPEG_ENC_QUALITY);
170 
171 	value = readl(base + JPEG_ENC_CTRL);
172 	value &= ~JPEG_ENC_CTRL_YUV_FORMAT_MASK;
173 	value |= (ctx->out_q.fmt->hw_format & 3) << 3;
174 	if (ctx->enable_exif)
175 		value |= JPEG_ENC_CTRL_FILE_FORMAT_BIT;
176 	else
177 		value &= ~JPEG_ENC_CTRL_FILE_FORMAT_BIT;
178 	if (ctx->restart_interval)
179 		value |= JPEG_ENC_CTRL_RESTART_EN_BIT;
180 	else
181 		value &= ~JPEG_ENC_CTRL_RESTART_EN_BIT;
182 	writel(value, base + JPEG_ENC_CTRL);
183 
184 	writel(ctx->restart_interval, base + JPEG_ENC_RST_MCU_NUM);
185 }
186 EXPORT_SYMBOL_GPL(mtk_jpeg_set_enc_params);
187 
188 static void mtk_jpegenc_put_buf(struct mtk_jpegenc_comp_dev *jpeg)
189 {
190 	struct mtk_jpeg_ctx *ctx;
191 	struct vb2_v4l2_buffer *dst_buffer;
192 	struct list_head *temp_entry;
193 	struct list_head *pos = NULL;
194 	struct mtk_jpeg_src_buf *dst_done_buf, *tmp_dst_done_buf;
195 	unsigned long flags;
196 
197 	ctx = jpeg->hw_param.curr_ctx;
198 	if (!ctx) {
199 		dev_err(jpeg->dev, "comp_jpeg ctx fail !!!\n");
200 		return;
201 	}
202 
203 	dst_buffer = jpeg->hw_param.dst_buffer;
204 	if (!dst_buffer) {
205 		dev_err(jpeg->dev, "comp_jpeg dst_buffer fail !!!\n");
206 		return;
207 	}
208 
209 	dst_done_buf = container_of(dst_buffer,
210 				    struct mtk_jpeg_src_buf, b);
211 
212 	spin_lock_irqsave(&ctx->done_queue_lock, flags);
213 	list_add_tail(&dst_done_buf->list, &ctx->dst_done_queue);
214 	while (!list_empty(&ctx->dst_done_queue) &&
215 	       (pos != &ctx->dst_done_queue)) {
216 		list_for_each_prev_safe(pos, temp_entry, &ctx->dst_done_queue) {
217 			tmp_dst_done_buf = list_entry(pos,
218 						      struct mtk_jpeg_src_buf,
219 						      list);
220 			if (tmp_dst_done_buf->frame_num ==
221 				ctx->last_done_frame_num) {
222 				list_del(&tmp_dst_done_buf->list);
223 				v4l2_m2m_buf_done(&tmp_dst_done_buf->b,
224 						  VB2_BUF_STATE_DONE);
225 				ctx->last_done_frame_num++;
226 			}
227 		}
228 	}
229 	spin_unlock_irqrestore(&ctx->done_queue_lock, flags);
230 }
231 
232 static void mtk_jpegenc_timeout_work(struct work_struct *work)
233 {
234 	struct delayed_work *dly_work = to_delayed_work(work);
235 	struct mtk_jpegenc_comp_dev *cjpeg =
236 		container_of(dly_work,
237 			     struct mtk_jpegenc_comp_dev,
238 			     job_timeout_work);
239 	struct mtk_jpeg_dev *master_jpeg = cjpeg->master_dev;
240 	enum vb2_buffer_state buf_state = VB2_BUF_STATE_ERROR;
241 	struct vb2_v4l2_buffer *src_buf, *dst_buf;
242 
243 	src_buf = cjpeg->hw_param.src_buffer;
244 	dst_buf = cjpeg->hw_param.dst_buffer;
245 	v4l2_m2m_buf_copy_metadata(src_buf, dst_buf, true);
246 
247 	mtk_jpeg_enc_reset(cjpeg->reg_base);
248 	clk_disable_unprepare(cjpeg->venc_clk.clks->clk);
249 	pm_runtime_put(cjpeg->dev);
250 	cjpeg->hw_state = MTK_JPEG_HW_IDLE;
251 	atomic_inc(&master_jpeg->enchw_rdy);
252 	wake_up(&master_jpeg->enc_hw_wq);
253 	v4l2_m2m_buf_done(src_buf, buf_state);
254 	mtk_jpegenc_put_buf(cjpeg);
255 }
256 
257 static irqreturn_t mtk_jpegenc_hw_irq_handler(int irq, void *priv)
258 {
259 	struct vb2_v4l2_buffer *src_buf, *dst_buf;
260 	enum vb2_buffer_state buf_state;
261 	struct mtk_jpeg_ctx *ctx;
262 	u32 result_size;
263 	u32 irq_status;
264 
265 	struct mtk_jpegenc_comp_dev *jpeg = priv;
266 	struct mtk_jpeg_dev *master_jpeg = jpeg->master_dev;
267 
268 	cancel_delayed_work(&jpeg->job_timeout_work);
269 
270 	ctx = jpeg->hw_param.curr_ctx;
271 	src_buf = jpeg->hw_param.src_buffer;
272 	dst_buf = jpeg->hw_param.dst_buffer;
273 	v4l2_m2m_buf_copy_metadata(src_buf, dst_buf, true);
274 
275 	irq_status = readl(jpeg->reg_base + JPEG_ENC_INT_STS) &
276 		JPEG_ENC_INT_STATUS_MASK_ALLIRQ;
277 	if (irq_status)
278 		writel(0, jpeg->reg_base + JPEG_ENC_INT_STS);
279 	if (!(irq_status & JPEG_ENC_INT_STATUS_DONE))
280 		dev_warn(jpeg->dev, "Jpg Enc occurs unknown Err.");
281 
282 	result_size = mtk_jpeg_enc_get_file_size(jpeg->reg_base);
283 	vb2_set_plane_payload(&dst_buf->vb2_buf, 0, result_size);
284 	buf_state = VB2_BUF_STATE_DONE;
285 	v4l2_m2m_buf_done(src_buf, buf_state);
286 	mtk_jpegenc_put_buf(jpeg);
287 	pm_runtime_put(ctx->jpeg->dev);
288 	clk_disable_unprepare(jpeg->venc_clk.clks->clk);
289 	if (!list_empty(&ctx->fh.m2m_ctx->out_q_ctx.rdy_queue) ||
290 	    !list_empty(&ctx->fh.m2m_ctx->cap_q_ctx.rdy_queue)) {
291 		queue_work(master_jpeg->workqueue, &ctx->jpeg_work);
292 	}
293 
294 	jpeg->hw_state = MTK_JPEG_HW_IDLE;
295 	wake_up(&master_jpeg->enc_hw_wq);
296 	atomic_inc(&master_jpeg->enchw_rdy);
297 
298 	return IRQ_HANDLED;
299 }
300 
301 static int mtk_jpegenc_hw_init_irq(struct mtk_jpegenc_comp_dev *dev)
302 {
303 	struct platform_device *pdev = dev->plat_dev;
304 	int ret;
305 
306 	dev->jpegenc_irq = platform_get_irq(pdev, 0);
307 	if (dev->jpegenc_irq < 0)
308 		return dev->jpegenc_irq;
309 
310 	ret = devm_request_irq(&pdev->dev,
311 			       dev->jpegenc_irq,
312 			       mtk_jpegenc_hw_irq_handler,
313 			       0,
314 			       pdev->name, dev);
315 	if (ret) {
316 		dev_err(&pdev->dev, "Failed to devm_request_irq %d (%d)",
317 			dev->jpegenc_irq, ret);
318 		return ret;
319 	}
320 
321 	return 0;
322 }
323 
324 static int mtk_jpegenc_hw_probe(struct platform_device *pdev)
325 {
326 	struct mtk_jpegenc_clk *jpegenc_clk;
327 	struct mtk_jpeg_dev *master_dev;
328 	struct mtk_jpegenc_comp_dev *dev;
329 	int ret, i;
330 
331 	struct device *decs = &pdev->dev;
332 
333 	if (!decs->parent)
334 		return -EPROBE_DEFER;
335 
336 	master_dev = dev_get_drvdata(decs->parent);
337 	if (!master_dev)
338 		return -EPROBE_DEFER;
339 
340 	dev = devm_kzalloc(&pdev->dev, sizeof(*dev), GFP_KERNEL);
341 	if (!dev)
342 		return -ENOMEM;
343 
344 	dev->plat_dev = pdev;
345 	dev->dev = &pdev->dev;
346 
347 	if (!master_dev->is_jpgenc_multihw) {
348 		master_dev->is_jpgenc_multihw = true;
349 		for (i = 0; i < MTK_JPEGENC_HW_MAX; i++)
350 			master_dev->enc_hw_dev[i] = NULL;
351 
352 		init_waitqueue_head(&master_dev->enc_hw_wq);
353 		master_dev->workqueue = alloc_ordered_workqueue(MTK_JPEG_NAME,
354 								WQ_MEM_RECLAIM
355 								| WQ_FREEZABLE);
356 		if (!master_dev->workqueue)
357 			return -EINVAL;
358 	}
359 
360 	atomic_set(&master_dev->enchw_rdy, MTK_JPEGENC_HW_MAX);
361 	spin_lock_init(&dev->hw_lock);
362 	dev->hw_state = MTK_JPEG_HW_IDLE;
363 
364 	INIT_DELAYED_WORK(&dev->job_timeout_work,
365 			  mtk_jpegenc_timeout_work);
366 
367 	jpegenc_clk = &dev->venc_clk;
368 
369 	jpegenc_clk->clk_num = devm_clk_bulk_get_all(&pdev->dev,
370 						     &jpegenc_clk->clks);
371 	if (jpegenc_clk->clk_num < 0)
372 		return dev_err_probe(&pdev->dev, jpegenc_clk->clk_num,
373 				     "Failed to get jpegenc clock count\n");
374 
375 	dev->reg_base = devm_platform_ioremap_resource(pdev, 0);
376 	if (IS_ERR(dev->reg_base))
377 		return PTR_ERR(dev->reg_base);
378 
379 	ret = mtk_jpegenc_hw_init_irq(dev);
380 	if (ret)
381 		return ret;
382 
383 	for (i = 0; i < MTK_JPEGENC_HW_MAX; i++) {
384 		if (master_dev->enc_hw_dev[i])
385 			continue;
386 
387 		master_dev->enc_hw_dev[i] = dev;
388 		master_dev->reg_encbase[i] = dev->reg_base;
389 		dev->master_dev = master_dev;
390 	}
391 
392 	platform_set_drvdata(pdev, dev);
393 	pm_runtime_enable(&pdev->dev);
394 
395 	return 0;
396 }
397 
398 static struct platform_driver mtk_jpegenc_hw_driver = {
399 	.probe = mtk_jpegenc_hw_probe,
400 	.driver = {
401 		.name = "mtk-jpegenc-hw",
402 		.of_match_table = of_match_ptr(mtk_jpegenc_drv_ids),
403 	},
404 };
405 
406 module_platform_driver(mtk_jpegenc_hw_driver);
407 
408 MODULE_DESCRIPTION("MediaTek JPEG encode HW driver");
409 MODULE_LICENSE("GPL");
410