1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * The Marvell camera core. This device appears in a number of settings,
4 * so it needs platform-specific support outside of the core.
5 *
6 * Copyright 2011 Jonathan Corbet corbet@lwn.net
7 * Copyright 2018 Lubomir Rintel <lkundrak@v3.sk>
8 */
9 #include <linux/kernel.h>
10 #include <linux/module.h>
11 #include <linux/fs.h>
12 #include <linux/mm.h>
13 #include <linux/i2c.h>
14 #include <linux/interrupt.h>
15 #include <linux/spinlock.h>
16 #include <linux/slab.h>
17 #include <linux/device.h>
18 #include <linux/wait.h>
19 #include <linux/list.h>
20 #include <linux/dma-mapping.h>
21 #include <linux/delay.h>
22 #include <linux/vmalloc.h>
23 #include <linux/io.h>
24 #include <linux/clk.h>
25 #include <linux/clk-provider.h>
26 #include <linux/videodev2.h>
27 #include <linux/pm_runtime.h>
28 #include <media/v4l2-device.h>
29 #include <media/v4l2-ioctl.h>
30 #include <media/v4l2-ctrls.h>
31 #include <media/v4l2-event.h>
32 #include <media/videobuf2-vmalloc.h>
33 #include <media/videobuf2-dma-contig.h>
34 #include <media/videobuf2-dma-sg.h>
35
36 #include "mcam-core.h"
37
38 #ifdef MCAM_MODE_VMALLOC
39 /*
40 * Internal DMA buffer management. Since the controller cannot do S/G I/O,
41 * we must have physically contiguous buffers to bring frames into.
42 * These parameters control how many buffers we use, whether we
43 * allocate them at load time (better chance of success, but nails down
44 * memory) or when somebody tries to use the camera (riskier), and,
45 * for load-time allocation, how big they should be.
46 *
47 * The controller can cycle through three buffers. We could use
48 * more by flipping pointers around, but it probably makes little
49 * sense.
50 */
51
52 static bool alloc_bufs_at_read;
53 module_param(alloc_bufs_at_read, bool, 0444);
54 MODULE_PARM_DESC(alloc_bufs_at_read,
55 "Non-zero value causes DMA buffers to be allocated when the video capture device is read, rather than at module load time. This saves memory, but decreases the chances of successfully getting those buffers. This parameter is only used in the vmalloc buffer mode");
56
57 static int n_dma_bufs = 3;
58 module_param(n_dma_bufs, uint, 0644);
59 MODULE_PARM_DESC(n_dma_bufs,
60 "The number of DMA buffers to allocate. Can be either two (saves memory, makes timing tighter) or three.");
61
62 static int dma_buf_size = VGA_WIDTH * VGA_HEIGHT * 2; /* Worst case */
63 module_param(dma_buf_size, uint, 0444);
64 MODULE_PARM_DESC(dma_buf_size,
65 "The size of the allocated DMA buffers. If actual operating parameters require larger buffers, an attempt to reallocate will be made.");
66 #else /* MCAM_MODE_VMALLOC */
67 static const bool alloc_bufs_at_read;
68 static const int n_dma_bufs = 3; /* Used by S/G_PARM */
69 #endif /* MCAM_MODE_VMALLOC */
70
71 static bool flip;
72 module_param(flip, bool, 0444);
73 MODULE_PARM_DESC(flip,
74 "If set, the sensor will be instructed to flip the image vertically.");
75
76 static int buffer_mode = -1;
77 module_param(buffer_mode, int, 0444);
78 MODULE_PARM_DESC(buffer_mode,
79 "Set the buffer mode to be used; default is to go with what the platform driver asks for. Set to 0 for vmalloc, 1 for DMA contiguous.");
80
81 /*
82 * Status flags. Always manipulated with bit operations.
83 */
84 #define CF_BUF0_VALID 0 /* Buffers valid - first three */
85 #define CF_BUF1_VALID 1
86 #define CF_BUF2_VALID 2
87 #define CF_DMA_ACTIVE 3 /* A frame is incoming */
88 #define CF_CONFIG_NEEDED 4 /* Must configure hardware */
89 #define CF_SINGLE_BUFFER 5 /* Running with a single buffer */
90 #define CF_SG_RESTART 6 /* SG restart needed */
91 #define CF_FRAME_SOF0 7 /* Frame 0 started */
92 #define CF_FRAME_SOF1 8
93 #define CF_FRAME_SOF2 9
94
95 #define sensor_call(cam, o, f, args...) \
96 v4l2_subdev_call(cam->sensor, o, f, ##args)
97
98 #define notifier_to_mcam(notifier) \
99 container_of(notifier, struct mcam_camera, notifier)
100
101 static struct mcam_format_struct {
102 __u32 pixelformat;
103 int bpp; /* Bytes per pixel */
104 bool planar;
105 u32 mbus_code;
106 } mcam_formats[] = {
107 {
108 .pixelformat = V4L2_PIX_FMT_YUYV,
109 .mbus_code = MEDIA_BUS_FMT_YUYV8_2X8,
110 .bpp = 2,
111 .planar = false,
112 },
113 {
114 .pixelformat = V4L2_PIX_FMT_YVYU,
115 .mbus_code = MEDIA_BUS_FMT_YUYV8_2X8,
116 .bpp = 2,
117 .planar = false,
118 },
119 {
120 .pixelformat = V4L2_PIX_FMT_YUV420,
121 .mbus_code = MEDIA_BUS_FMT_YUYV8_2X8,
122 .bpp = 1,
123 .planar = true,
124 },
125 {
126 .pixelformat = V4L2_PIX_FMT_YVU420,
127 .mbus_code = MEDIA_BUS_FMT_YUYV8_2X8,
128 .bpp = 1,
129 .planar = true,
130 },
131 {
132 .pixelformat = V4L2_PIX_FMT_XRGB444,
133 .mbus_code = MEDIA_BUS_FMT_RGB444_2X8_PADHI_LE,
134 .bpp = 2,
135 .planar = false,
136 },
137 {
138 .pixelformat = V4L2_PIX_FMT_RGB565,
139 .mbus_code = MEDIA_BUS_FMT_RGB565_2X8_LE,
140 .bpp = 2,
141 .planar = false,
142 },
143 {
144 .pixelformat = V4L2_PIX_FMT_SBGGR8,
145 .mbus_code = MEDIA_BUS_FMT_SBGGR8_1X8,
146 .bpp = 1,
147 .planar = false,
148 },
149 };
150 #define N_MCAM_FMTS ARRAY_SIZE(mcam_formats)
151
mcam_find_format(u32 pixelformat)152 static struct mcam_format_struct *mcam_find_format(u32 pixelformat)
153 {
154 unsigned i;
155
156 for (i = 0; i < N_MCAM_FMTS; i++)
157 if (mcam_formats[i].pixelformat == pixelformat)
158 return mcam_formats + i;
159 /* Not found? Then return the first format. */
160 return mcam_formats;
161 }
162
163 /*
164 * The default format we use until somebody says otherwise.
165 */
166 static const struct v4l2_pix_format mcam_def_pix_format = {
167 .width = VGA_WIDTH,
168 .height = VGA_HEIGHT,
169 .pixelformat = V4L2_PIX_FMT_YUYV,
170 .field = V4L2_FIELD_NONE,
171 .bytesperline = VGA_WIDTH*2,
172 .sizeimage = VGA_WIDTH*VGA_HEIGHT*2,
173 .colorspace = V4L2_COLORSPACE_SRGB,
174 };
175
176 static const u32 mcam_def_mbus_code = MEDIA_BUS_FMT_YUYV8_2X8;
177
178
179 /*
180 * The two-word DMA descriptor format used by the Armada 610 and like. There
181 * Is a three-word format as well (set C1_DESC_3WORD) where the third
182 * word is a pointer to the next descriptor, but we don't use it. Two-word
183 * descriptors have to be contiguous in memory.
184 */
185 struct mcam_dma_desc {
186 u32 dma_addr;
187 u32 segment_len;
188 };
189
190 /*
191 * Our buffer type for working with videobuf2. Note that the vb2
192 * developers have decreed that struct vb2_v4l2_buffer must be at the
193 * beginning of this structure.
194 */
195 struct mcam_vb_buffer {
196 struct vb2_v4l2_buffer vb_buf;
197 struct list_head queue;
198 struct mcam_dma_desc *dma_desc; /* Descriptor virtual address */
199 dma_addr_t dma_desc_pa; /* Descriptor physical address */
200 };
201
vb_to_mvb(struct vb2_v4l2_buffer * vb)202 static inline struct mcam_vb_buffer *vb_to_mvb(struct vb2_v4l2_buffer *vb)
203 {
204 return container_of(vb, struct mcam_vb_buffer, vb_buf);
205 }
206
207 /*
208 * Hand a completed buffer back to user space.
209 */
mcam_buffer_done(struct mcam_camera * cam,int frame,struct vb2_v4l2_buffer * vbuf)210 static void mcam_buffer_done(struct mcam_camera *cam, int frame,
211 struct vb2_v4l2_buffer *vbuf)
212 {
213 vbuf->vb2_buf.planes[0].bytesused = cam->pix_format.sizeimage;
214 vbuf->sequence = cam->buf_seq[frame];
215 vbuf->field = V4L2_FIELD_NONE;
216 vbuf->vb2_buf.timestamp = ktime_get_ns();
217 vb2_set_plane_payload(&vbuf->vb2_buf, 0, cam->pix_format.sizeimage);
218 vb2_buffer_done(&vbuf->vb2_buf, VB2_BUF_STATE_DONE);
219 }
220
221
222
223 /*
224 * Debugging and related.
225 */
226 #define cam_err(cam, fmt, arg...) \
227 dev_err((cam)->dev, fmt, ##arg);
228 #define cam_warn(cam, fmt, arg...) \
229 dev_warn((cam)->dev, fmt, ##arg);
230 #define cam_dbg(cam, fmt, arg...) \
231 dev_dbg((cam)->dev, fmt, ##arg);
232
233
234 /*
235 * Flag manipulation helpers
236 */
mcam_reset_buffers(struct mcam_camera * cam)237 static void mcam_reset_buffers(struct mcam_camera *cam)
238 {
239 int i;
240
241 cam->next_buf = -1;
242 for (i = 0; i < cam->nbufs; i++) {
243 clear_bit(i, &cam->flags);
244 clear_bit(CF_FRAME_SOF0 + i, &cam->flags);
245 }
246 }
247
mcam_needs_config(struct mcam_camera * cam)248 static inline int mcam_needs_config(struct mcam_camera *cam)
249 {
250 return test_bit(CF_CONFIG_NEEDED, &cam->flags);
251 }
252
mcam_set_config_needed(struct mcam_camera * cam,int needed)253 static void mcam_set_config_needed(struct mcam_camera *cam, int needed)
254 {
255 if (needed)
256 set_bit(CF_CONFIG_NEEDED, &cam->flags);
257 else
258 clear_bit(CF_CONFIG_NEEDED, &cam->flags);
259 }
260
261 /* ------------------------------------------------------------------- */
262 /*
263 * Make the controller start grabbing images. Everything must
264 * be set up before doing this.
265 */
mcam_ctlr_start(struct mcam_camera * cam)266 static void mcam_ctlr_start(struct mcam_camera *cam)
267 {
268 /* set_bit performs a read, so no other barrier should be
269 needed here */
270 mcam_reg_set_bit(cam, REG_CTRL0, C0_ENABLE);
271 }
272
mcam_ctlr_stop(struct mcam_camera * cam)273 static void mcam_ctlr_stop(struct mcam_camera *cam)
274 {
275 mcam_reg_clear_bit(cam, REG_CTRL0, C0_ENABLE);
276 }
277
mcam_enable_mipi(struct mcam_camera * mcam)278 static void mcam_enable_mipi(struct mcam_camera *mcam)
279 {
280 /* Using MIPI mode and enable MIPI */
281 if (mcam->calc_dphy)
282 mcam->calc_dphy(mcam);
283 cam_dbg(mcam, "camera: DPHY3=0x%x, DPHY5=0x%x, DPHY6=0x%x\n",
284 mcam->dphy[0], mcam->dphy[1], mcam->dphy[2]);
285 mcam_reg_write(mcam, REG_CSI2_DPHY3, mcam->dphy[0]);
286 mcam_reg_write(mcam, REG_CSI2_DPHY5, mcam->dphy[1]);
287 mcam_reg_write(mcam, REG_CSI2_DPHY6, mcam->dphy[2]);
288
289 if (!mcam->mipi_enabled) {
290 if (mcam->lane > 4 || mcam->lane <= 0) {
291 cam_warn(mcam, "lane number error\n");
292 mcam->lane = 1; /* set the default value */
293 }
294 /*
295 * 0x41 actives 1 lane
296 * 0x43 actives 2 lanes
297 * 0x45 actives 3 lanes (never happen)
298 * 0x47 actives 4 lanes
299 */
300 mcam_reg_write(mcam, REG_CSI2_CTRL0,
301 CSI2_C0_MIPI_EN | CSI2_C0_ACT_LANE(mcam->lane));
302 mcam->mipi_enabled = true;
303 }
304 }
305
mcam_disable_mipi(struct mcam_camera * mcam)306 static void mcam_disable_mipi(struct mcam_camera *mcam)
307 {
308 /* Using Parallel mode or disable MIPI */
309 mcam_reg_write(mcam, REG_CSI2_CTRL0, 0x0);
310 mcam_reg_write(mcam, REG_CSI2_DPHY3, 0x0);
311 mcam_reg_write(mcam, REG_CSI2_DPHY5, 0x0);
312 mcam_reg_write(mcam, REG_CSI2_DPHY6, 0x0);
313 mcam->mipi_enabled = false;
314 }
315
mcam_fmt_is_planar(__u32 pfmt)316 static bool mcam_fmt_is_planar(__u32 pfmt)
317 {
318 struct mcam_format_struct *f;
319
320 f = mcam_find_format(pfmt);
321 return f->planar;
322 }
323
mcam_write_yuv_bases(struct mcam_camera * cam,unsigned frame,dma_addr_t base)324 static void mcam_write_yuv_bases(struct mcam_camera *cam,
325 unsigned frame, dma_addr_t base)
326 {
327 struct v4l2_pix_format *fmt = &cam->pix_format;
328 u32 pixel_count = fmt->width * fmt->height;
329 dma_addr_t y, u = 0, v = 0;
330
331 y = base;
332
333 switch (fmt->pixelformat) {
334 case V4L2_PIX_FMT_YUV420:
335 u = y + pixel_count;
336 v = u + pixel_count / 4;
337 break;
338 case V4L2_PIX_FMT_YVU420:
339 v = y + pixel_count;
340 u = v + pixel_count / 4;
341 break;
342 default:
343 break;
344 }
345
346 mcam_reg_write(cam, REG_Y0BAR + frame * 4, y);
347 if (mcam_fmt_is_planar(fmt->pixelformat)) {
348 mcam_reg_write(cam, REG_U0BAR + frame * 4, u);
349 mcam_reg_write(cam, REG_V0BAR + frame * 4, v);
350 }
351 }
352
353 /* ------------------------------------------------------------------- */
354
355 #ifdef MCAM_MODE_VMALLOC
356 /*
357 * Code specific to the vmalloc buffer mode.
358 */
359
360 /*
361 * Allocate in-kernel DMA buffers for vmalloc mode.
362 */
mcam_alloc_dma_bufs(struct mcam_camera * cam,int loadtime)363 static int mcam_alloc_dma_bufs(struct mcam_camera *cam, int loadtime)
364 {
365 int i;
366
367 mcam_set_config_needed(cam, 1);
368 if (loadtime)
369 cam->dma_buf_size = dma_buf_size;
370 else
371 cam->dma_buf_size = cam->pix_format.sizeimage;
372 if (n_dma_bufs > 3)
373 n_dma_bufs = 3;
374
375 cam->nbufs = 0;
376 for (i = 0; i < n_dma_bufs; i++) {
377 cam->dma_bufs[i] = dma_alloc_coherent(cam->dev,
378 cam->dma_buf_size, cam->dma_handles + i,
379 GFP_KERNEL);
380 if (cam->dma_bufs[i] == NULL) {
381 cam_warn(cam, "Failed to allocate DMA buffer\n");
382 break;
383 }
384 (cam->nbufs)++;
385 }
386
387 switch (cam->nbufs) {
388 case 1:
389 dma_free_coherent(cam->dev, cam->dma_buf_size,
390 cam->dma_bufs[0], cam->dma_handles[0]);
391 cam->nbufs = 0;
392 fallthrough;
393 case 0:
394 cam_err(cam, "Insufficient DMA buffers, cannot operate\n");
395 return -ENOMEM;
396
397 case 2:
398 if (n_dma_bufs > 2)
399 cam_warn(cam, "Will limp along with only 2 buffers\n");
400 break;
401 }
402 return 0;
403 }
404
mcam_free_dma_bufs(struct mcam_camera * cam)405 static void mcam_free_dma_bufs(struct mcam_camera *cam)
406 {
407 int i;
408
409 for (i = 0; i < cam->nbufs; i++) {
410 dma_free_coherent(cam->dev, cam->dma_buf_size,
411 cam->dma_bufs[i], cam->dma_handles[i]);
412 cam->dma_bufs[i] = NULL;
413 }
414 cam->nbufs = 0;
415 }
416
417
418 /*
419 * Set up DMA buffers when operating in vmalloc mode
420 */
mcam_ctlr_dma_vmalloc(struct mcam_camera * cam)421 static void mcam_ctlr_dma_vmalloc(struct mcam_camera *cam)
422 {
423 /*
424 * Store the first two YUV buffers. Then either
425 * set the third if it exists, or tell the controller
426 * to just use two.
427 */
428 mcam_write_yuv_bases(cam, 0, cam->dma_handles[0]);
429 mcam_write_yuv_bases(cam, 1, cam->dma_handles[1]);
430 if (cam->nbufs > 2) {
431 mcam_write_yuv_bases(cam, 2, cam->dma_handles[2]);
432 mcam_reg_clear_bit(cam, REG_CTRL1, C1_TWOBUFS);
433 } else
434 mcam_reg_set_bit(cam, REG_CTRL1, C1_TWOBUFS);
435 if (cam->chip_id == MCAM_CAFE)
436 mcam_reg_write(cam, REG_UBAR, 0); /* 32 bits only */
437 }
438
439 /*
440 * Copy data out to user space in the vmalloc case
441 */
mcam_frame_tasklet(struct tasklet_struct * t)442 static void mcam_frame_tasklet(struct tasklet_struct *t)
443 {
444 struct mcam_camera *cam = from_tasklet(cam, t, s_tasklet);
445 int i;
446 unsigned long flags;
447 struct mcam_vb_buffer *buf;
448
449 spin_lock_irqsave(&cam->dev_lock, flags);
450 for (i = 0; i < cam->nbufs; i++) {
451 int bufno = cam->next_buf;
452
453 if (cam->state != S_STREAMING || bufno < 0)
454 break; /* I/O got stopped */
455 if (++(cam->next_buf) >= cam->nbufs)
456 cam->next_buf = 0;
457 if (!test_bit(bufno, &cam->flags))
458 continue;
459 if (list_empty(&cam->buffers)) {
460 cam->frame_state.singles++;
461 break; /* Leave it valid, hope for better later */
462 }
463 cam->frame_state.delivered++;
464 clear_bit(bufno, &cam->flags);
465 buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer,
466 queue);
467 list_del_init(&buf->queue);
468 /*
469 * Drop the lock during the big copy. This *should* be safe...
470 */
471 spin_unlock_irqrestore(&cam->dev_lock, flags);
472 memcpy(vb2_plane_vaddr(&buf->vb_buf.vb2_buf, 0),
473 cam->dma_bufs[bufno],
474 cam->pix_format.sizeimage);
475 mcam_buffer_done(cam, bufno, &buf->vb_buf);
476 spin_lock_irqsave(&cam->dev_lock, flags);
477 }
478 spin_unlock_irqrestore(&cam->dev_lock, flags);
479 }
480
481
482 /*
483 * Make sure our allocated buffers are up to the task.
484 */
mcam_check_dma_buffers(struct mcam_camera * cam)485 static int mcam_check_dma_buffers(struct mcam_camera *cam)
486 {
487 if (cam->nbufs > 0 && cam->dma_buf_size < cam->pix_format.sizeimage)
488 mcam_free_dma_bufs(cam);
489 if (cam->nbufs == 0)
490 return mcam_alloc_dma_bufs(cam, 0);
491 return 0;
492 }
493
mcam_vmalloc_done(struct mcam_camera * cam,int frame)494 static void mcam_vmalloc_done(struct mcam_camera *cam, int frame)
495 {
496 tasklet_schedule(&cam->s_tasklet);
497 }
498
499 #else /* MCAM_MODE_VMALLOC */
500
mcam_alloc_dma_bufs(struct mcam_camera * cam,int loadtime)501 static inline int mcam_alloc_dma_bufs(struct mcam_camera *cam, int loadtime)
502 {
503 return 0;
504 }
505
mcam_free_dma_bufs(struct mcam_camera * cam)506 static inline void mcam_free_dma_bufs(struct mcam_camera *cam)
507 {
508 return;
509 }
510
mcam_check_dma_buffers(struct mcam_camera * cam)511 static inline int mcam_check_dma_buffers(struct mcam_camera *cam)
512 {
513 return 0;
514 }
515
516
517
518 #endif /* MCAM_MODE_VMALLOC */
519
520
521 #ifdef MCAM_MODE_DMA_CONTIG
522 /* ---------------------------------------------------------------------- */
523 /*
524 * DMA-contiguous code.
525 */
526
527 /*
528 * Set up a contiguous buffer for the given frame. Here also is where
529 * the underrun strategy is set: if there is no buffer available, reuse
530 * the buffer from the other BAR and set the CF_SINGLE_BUFFER flag to
531 * keep the interrupt handler from giving that buffer back to user
532 * space. In this way, we always have a buffer to DMA to and don't
533 * have to try to play games stopping and restarting the controller.
534 */
mcam_set_contig_buffer(struct mcam_camera * cam,int frame)535 static void mcam_set_contig_buffer(struct mcam_camera *cam, int frame)
536 {
537 struct mcam_vb_buffer *buf;
538 dma_addr_t dma_handle;
539 struct vb2_v4l2_buffer *vb;
540
541 /*
542 * If there are no available buffers, go into single mode
543 */
544 if (list_empty(&cam->buffers)) {
545 buf = cam->vb_bufs[frame ^ 0x1];
546 set_bit(CF_SINGLE_BUFFER, &cam->flags);
547 cam->frame_state.singles++;
548 } else {
549 /*
550 * OK, we have a buffer we can use.
551 */
552 buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer,
553 queue);
554 list_del_init(&buf->queue);
555 clear_bit(CF_SINGLE_BUFFER, &cam->flags);
556 }
557
558 cam->vb_bufs[frame] = buf;
559 vb = &buf->vb_buf;
560
561 dma_handle = vb2_dma_contig_plane_dma_addr(&vb->vb2_buf, 0);
562 mcam_write_yuv_bases(cam, frame, dma_handle);
563 }
564
565 /*
566 * Initial B_DMA_contig setup.
567 */
mcam_ctlr_dma_contig(struct mcam_camera * cam)568 static void mcam_ctlr_dma_contig(struct mcam_camera *cam)
569 {
570 mcam_reg_set_bit(cam, REG_CTRL1, C1_TWOBUFS);
571 cam->nbufs = 2;
572 mcam_set_contig_buffer(cam, 0);
573 mcam_set_contig_buffer(cam, 1);
574 }
575
576 /*
577 * Frame completion handling.
578 */
mcam_dma_contig_done(struct mcam_camera * cam,int frame)579 static void mcam_dma_contig_done(struct mcam_camera *cam, int frame)
580 {
581 struct mcam_vb_buffer *buf = cam->vb_bufs[frame];
582
583 if (!test_bit(CF_SINGLE_BUFFER, &cam->flags)) {
584 cam->frame_state.delivered++;
585 cam->vb_bufs[frame] = NULL;
586 mcam_buffer_done(cam, frame, &buf->vb_buf);
587 }
588 mcam_set_contig_buffer(cam, frame);
589 }
590
591 #endif /* MCAM_MODE_DMA_CONTIG */
592
593 #ifdef MCAM_MODE_DMA_SG
594 /* ---------------------------------------------------------------------- */
595 /*
596 * Scatter/gather-specific code.
597 */
598
599 /*
600 * Set up the next buffer for S/G I/O; caller should be sure that
601 * the controller is stopped and a buffer is available.
602 */
mcam_sg_next_buffer(struct mcam_camera * cam)603 static void mcam_sg_next_buffer(struct mcam_camera *cam)
604 {
605 struct mcam_vb_buffer *buf;
606 struct sg_table *sg_table;
607
608 buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer, queue);
609 list_del_init(&buf->queue);
610 sg_table = vb2_dma_sg_plane_desc(&buf->vb_buf.vb2_buf, 0);
611 /*
612 * Very Bad Not Good Things happen if you don't clear
613 * C1_DESC_ENA before making any descriptor changes.
614 */
615 mcam_reg_clear_bit(cam, REG_CTRL1, C1_DESC_ENA);
616 mcam_reg_write(cam, REG_DMA_DESC_Y, buf->dma_desc_pa);
617 mcam_reg_write(cam, REG_DESC_LEN_Y,
618 sg_table->nents * sizeof(struct mcam_dma_desc));
619 mcam_reg_write(cam, REG_DESC_LEN_U, 0);
620 mcam_reg_write(cam, REG_DESC_LEN_V, 0);
621 mcam_reg_set_bit(cam, REG_CTRL1, C1_DESC_ENA);
622 cam->vb_bufs[0] = buf;
623 }
624
625 /*
626 * Initial B_DMA_sg setup
627 */
mcam_ctlr_dma_sg(struct mcam_camera * cam)628 static void mcam_ctlr_dma_sg(struct mcam_camera *cam)
629 {
630 /*
631 * The list-empty condition can hit us at resume time
632 * if the buffer list was empty when the system was suspended.
633 */
634 if (list_empty(&cam->buffers)) {
635 set_bit(CF_SG_RESTART, &cam->flags);
636 return;
637 }
638
639 mcam_reg_clear_bit(cam, REG_CTRL1, C1_DESC_3WORD);
640 mcam_sg_next_buffer(cam);
641 cam->nbufs = 3;
642 }
643
644
645 /*
646 * Frame completion with S/G is trickier. We can't muck with
647 * a descriptor chain on the fly, since the controller buffers it
648 * internally. So we have to actually stop and restart; Marvell
649 * says this is the way to do it.
650 *
651 * Of course, stopping is easier said than done; experience shows
652 * that the controller can start a frame *after* C0_ENABLE has been
653 * cleared. So when running in S/G mode, the controller is "stopped"
654 * on receipt of the start-of-frame interrupt. That means we can
655 * safely change the DMA descriptor array here and restart things
656 * (assuming there's another buffer waiting to go).
657 */
mcam_dma_sg_done(struct mcam_camera * cam,int frame)658 static void mcam_dma_sg_done(struct mcam_camera *cam, int frame)
659 {
660 struct mcam_vb_buffer *buf = cam->vb_bufs[0];
661
662 /*
663 * If we're no longer supposed to be streaming, don't do anything.
664 */
665 if (cam->state != S_STREAMING)
666 return;
667 /*
668 * If we have another buffer available, put it in and
669 * restart the engine.
670 */
671 if (!list_empty(&cam->buffers)) {
672 mcam_sg_next_buffer(cam);
673 mcam_ctlr_start(cam);
674 /*
675 * Otherwise set CF_SG_RESTART and the controller will
676 * be restarted once another buffer shows up.
677 */
678 } else {
679 set_bit(CF_SG_RESTART, &cam->flags);
680 cam->frame_state.singles++;
681 cam->vb_bufs[0] = NULL;
682 }
683 /*
684 * Now we can give the completed frame back to user space.
685 */
686 cam->frame_state.delivered++;
687 mcam_buffer_done(cam, frame, &buf->vb_buf);
688 }
689
690
691 /*
692 * Scatter/gather mode requires stopping the controller between
693 * frames so we can put in a new DMA descriptor array. If no new
694 * buffer exists at frame completion, the controller is left stopped;
695 * this function is charged with getting things going again.
696 */
mcam_sg_restart(struct mcam_camera * cam)697 static void mcam_sg_restart(struct mcam_camera *cam)
698 {
699 mcam_ctlr_dma_sg(cam);
700 mcam_ctlr_start(cam);
701 clear_bit(CF_SG_RESTART, &cam->flags);
702 }
703
704 #else /* MCAM_MODE_DMA_SG */
705
mcam_sg_restart(struct mcam_camera * cam)706 static inline void mcam_sg_restart(struct mcam_camera *cam)
707 {
708 return;
709 }
710
711 #endif /* MCAM_MODE_DMA_SG */
712
713 /* ---------------------------------------------------------------------- */
714 /*
715 * Buffer-mode-independent controller code.
716 */
717
718 /*
719 * Image format setup
720 */
mcam_ctlr_image(struct mcam_camera * cam)721 static void mcam_ctlr_image(struct mcam_camera *cam)
722 {
723 struct v4l2_pix_format *fmt = &cam->pix_format;
724 u32 widthy = 0, widthuv = 0, imgsz_h, imgsz_w;
725
726 cam_dbg(cam, "camera: bytesperline = %d; height = %d\n",
727 fmt->bytesperline, fmt->sizeimage / fmt->bytesperline);
728 imgsz_h = (fmt->height << IMGSZ_V_SHIFT) & IMGSZ_V_MASK;
729 imgsz_w = (fmt->width * 2) & IMGSZ_H_MASK;
730
731 switch (fmt->pixelformat) {
732 case V4L2_PIX_FMT_YUYV:
733 case V4L2_PIX_FMT_YVYU:
734 widthy = fmt->width * 2;
735 widthuv = 0;
736 break;
737 case V4L2_PIX_FMT_YUV420:
738 case V4L2_PIX_FMT_YVU420:
739 widthy = fmt->width;
740 widthuv = fmt->width / 2;
741 break;
742 default:
743 widthy = fmt->bytesperline;
744 widthuv = 0;
745 break;
746 }
747
748 mcam_reg_write_mask(cam, REG_IMGPITCH, widthuv << 16 | widthy,
749 IMGP_YP_MASK | IMGP_UVP_MASK);
750 mcam_reg_write(cam, REG_IMGSIZE, imgsz_h | imgsz_w);
751 mcam_reg_write(cam, REG_IMGOFFSET, 0x0);
752
753 /*
754 * Tell the controller about the image format we are using.
755 */
756 switch (fmt->pixelformat) {
757 case V4L2_PIX_FMT_YUV420:
758 case V4L2_PIX_FMT_YVU420:
759 mcam_reg_write_mask(cam, REG_CTRL0,
760 C0_DF_YUV | C0_YUV_420PL | C0_YUVE_VYUY, C0_DF_MASK);
761 break;
762 case V4L2_PIX_FMT_YUYV:
763 mcam_reg_write_mask(cam, REG_CTRL0,
764 C0_DF_YUV | C0_YUV_PACKED | C0_YUVE_NOSWAP, C0_DF_MASK);
765 break;
766 case V4L2_PIX_FMT_YVYU:
767 mcam_reg_write_mask(cam, REG_CTRL0,
768 C0_DF_YUV | C0_YUV_PACKED | C0_YUVE_SWAP24, C0_DF_MASK);
769 break;
770 case V4L2_PIX_FMT_XRGB444:
771 mcam_reg_write_mask(cam, REG_CTRL0,
772 C0_DF_RGB | C0_RGBF_444 | C0_RGB4_XBGR, C0_DF_MASK);
773 break;
774 case V4L2_PIX_FMT_RGB565:
775 mcam_reg_write_mask(cam, REG_CTRL0,
776 C0_DF_RGB | C0_RGBF_565 | C0_RGB5_BGGR, C0_DF_MASK);
777 break;
778 case V4L2_PIX_FMT_SBGGR8:
779 mcam_reg_write_mask(cam, REG_CTRL0,
780 C0_DF_RGB | C0_RGB5_GRBG, C0_DF_MASK);
781 break;
782 default:
783 cam_err(cam, "camera: unknown format: %#x\n", fmt->pixelformat);
784 break;
785 }
786
787 /*
788 * Make sure it knows we want to use hsync/vsync.
789 */
790 mcam_reg_write_mask(cam, REG_CTRL0, C0_SIF_HVSYNC, C0_SIFM_MASK);
791 }
792
793
794 /*
795 * Configure the controller for operation; caller holds the
796 * device mutex.
797 */
mcam_ctlr_configure(struct mcam_camera * cam)798 static int mcam_ctlr_configure(struct mcam_camera *cam)
799 {
800 unsigned long flags;
801
802 spin_lock_irqsave(&cam->dev_lock, flags);
803 clear_bit(CF_SG_RESTART, &cam->flags);
804 cam->dma_setup(cam);
805 mcam_ctlr_image(cam);
806 mcam_set_config_needed(cam, 0);
807 spin_unlock_irqrestore(&cam->dev_lock, flags);
808 return 0;
809 }
810
mcam_ctlr_irq_enable(struct mcam_camera * cam)811 static void mcam_ctlr_irq_enable(struct mcam_camera *cam)
812 {
813 /*
814 * Clear any pending interrupts, since we do not
815 * expect to have I/O active prior to enabling.
816 */
817 mcam_reg_write(cam, REG_IRQSTAT, FRAMEIRQS);
818 mcam_reg_set_bit(cam, REG_IRQMASK, FRAMEIRQS);
819 }
820
mcam_ctlr_irq_disable(struct mcam_camera * cam)821 static void mcam_ctlr_irq_disable(struct mcam_camera *cam)
822 {
823 mcam_reg_clear_bit(cam, REG_IRQMASK, FRAMEIRQS);
824 }
825
826 /*
827 * Stop the controller, and don't return until we're really sure that no
828 * further DMA is going on.
829 */
mcam_ctlr_stop_dma(struct mcam_camera * cam)830 static void mcam_ctlr_stop_dma(struct mcam_camera *cam)
831 {
832 unsigned long flags;
833
834 /*
835 * Theory: stop the camera controller (whether it is operating
836 * or not). Delay briefly just in case we race with the SOF
837 * interrupt, then wait until no DMA is active.
838 */
839 spin_lock_irqsave(&cam->dev_lock, flags);
840 clear_bit(CF_SG_RESTART, &cam->flags);
841 mcam_ctlr_stop(cam);
842 cam->state = S_IDLE;
843 spin_unlock_irqrestore(&cam->dev_lock, flags);
844 /*
845 * This is a brutally long sleep, but experience shows that
846 * it can take the controller a while to get the message that
847 * it needs to stop grabbing frames. In particular, we can
848 * sometimes (on mmp) get a frame at the end WITHOUT the
849 * start-of-frame indication.
850 */
851 msleep(150);
852 if (test_bit(CF_DMA_ACTIVE, &cam->flags))
853 cam_err(cam, "Timeout waiting for DMA to end\n");
854 /* This would be bad news - what now? */
855 spin_lock_irqsave(&cam->dev_lock, flags);
856 mcam_ctlr_irq_disable(cam);
857 spin_unlock_irqrestore(&cam->dev_lock, flags);
858 }
859
860 /*
861 * Power up and down.
862 */
mcam_ctlr_power_up(struct mcam_camera * cam)863 static int mcam_ctlr_power_up(struct mcam_camera *cam)
864 {
865 unsigned long flags;
866 int ret;
867
868 spin_lock_irqsave(&cam->dev_lock, flags);
869 if (cam->plat_power_up) {
870 ret = cam->plat_power_up(cam);
871 if (ret) {
872 spin_unlock_irqrestore(&cam->dev_lock, flags);
873 return ret;
874 }
875 }
876 mcam_reg_clear_bit(cam, REG_CTRL1, C1_PWRDWN);
877 spin_unlock_irqrestore(&cam->dev_lock, flags);
878 return 0;
879 }
880
mcam_ctlr_power_down(struct mcam_camera * cam)881 static void mcam_ctlr_power_down(struct mcam_camera *cam)
882 {
883 unsigned long flags;
884
885 spin_lock_irqsave(&cam->dev_lock, flags);
886 /*
887 * School of hard knocks department: be sure we do any register
888 * twiddling on the controller *before* calling the platform
889 * power down routine.
890 */
891 mcam_reg_set_bit(cam, REG_CTRL1, C1_PWRDWN);
892 if (cam->plat_power_down)
893 cam->plat_power_down(cam);
894 spin_unlock_irqrestore(&cam->dev_lock, flags);
895 }
896
897 /* ---------------------------------------------------------------------- */
898 /*
899 * Master sensor clock.
900 */
mclk_prepare(struct clk_hw * hw)901 static int mclk_prepare(struct clk_hw *hw)
902 {
903 struct mcam_camera *cam = container_of(hw, struct mcam_camera, mclk_hw);
904
905 clk_prepare(cam->clk[0]);
906 return 0;
907 }
908
mclk_unprepare(struct clk_hw * hw)909 static void mclk_unprepare(struct clk_hw *hw)
910 {
911 struct mcam_camera *cam = container_of(hw, struct mcam_camera, mclk_hw);
912
913 clk_unprepare(cam->clk[0]);
914 }
915
mclk_enable(struct clk_hw * hw)916 static int mclk_enable(struct clk_hw *hw)
917 {
918 struct mcam_camera *cam = container_of(hw, struct mcam_camera, mclk_hw);
919 int mclk_src;
920 int mclk_div;
921 int ret;
922
923 /*
924 * Clock the sensor appropriately. Controller clock should
925 * be 48MHz, sensor "typical" value is half that.
926 */
927 if (cam->bus_type == V4L2_MBUS_CSI2_DPHY) {
928 mclk_src = cam->mclk_src;
929 mclk_div = cam->mclk_div;
930 } else {
931 mclk_src = 3;
932 mclk_div = 2;
933 }
934
935 ret = pm_runtime_resume_and_get(cam->dev);
936 if (ret < 0)
937 return ret;
938 ret = clk_enable(cam->clk[0]);
939 if (ret) {
940 pm_runtime_put(cam->dev);
941 return ret;
942 }
943
944 mcam_reg_write(cam, REG_CLKCTRL, (mclk_src << 29) | mclk_div);
945 mcam_ctlr_power_up(cam);
946
947 return 0;
948 }
949
mclk_disable(struct clk_hw * hw)950 static void mclk_disable(struct clk_hw *hw)
951 {
952 struct mcam_camera *cam = container_of(hw, struct mcam_camera, mclk_hw);
953
954 mcam_ctlr_power_down(cam);
955 clk_disable(cam->clk[0]);
956 pm_runtime_put(cam->dev);
957 }
958
mclk_recalc_rate(struct clk_hw * hw,unsigned long parent_rate)959 static unsigned long mclk_recalc_rate(struct clk_hw *hw,
960 unsigned long parent_rate)
961 {
962 return 48000000;
963 }
964
965 static const struct clk_ops mclk_ops = {
966 .prepare = mclk_prepare,
967 .unprepare = mclk_unprepare,
968 .enable = mclk_enable,
969 .disable = mclk_disable,
970 .recalc_rate = mclk_recalc_rate,
971 };
972
973 /* -------------------------------------------------------------------- */
974 /*
975 * Communications with the sensor.
976 */
977
__mcam_cam_reset(struct mcam_camera * cam)978 static int __mcam_cam_reset(struct mcam_camera *cam)
979 {
980 return sensor_call(cam, core, reset, 0);
981 }
982
983 /*
984 * We have found the sensor on the i2c. Let's try to have a
985 * conversation.
986 */
mcam_cam_init(struct mcam_camera * cam)987 static int mcam_cam_init(struct mcam_camera *cam)
988 {
989 int ret;
990
991 if (cam->state != S_NOTREADY)
992 cam_warn(cam, "Cam init with device in funky state %d",
993 cam->state);
994 ret = __mcam_cam_reset(cam);
995 /* Get/set parameters? */
996 cam->state = S_IDLE;
997 return ret;
998 }
999
1000 /*
1001 * Configure the sensor to match the parameters we have. Caller should
1002 * hold s_mutex
1003 */
mcam_cam_set_flip(struct mcam_camera * cam)1004 static int mcam_cam_set_flip(struct mcam_camera *cam)
1005 {
1006 struct v4l2_control ctrl;
1007
1008 memset(&ctrl, 0, sizeof(ctrl));
1009 ctrl.id = V4L2_CID_VFLIP;
1010 ctrl.value = flip;
1011 return v4l2_s_ctrl(NULL, cam->sensor->ctrl_handler, &ctrl);
1012 }
1013
1014
mcam_cam_configure(struct mcam_camera * cam)1015 static int mcam_cam_configure(struct mcam_camera *cam)
1016 {
1017 struct v4l2_subdev_format format = {
1018 .which = V4L2_SUBDEV_FORMAT_ACTIVE,
1019 };
1020 int ret;
1021
1022 v4l2_fill_mbus_format(&format.format, &cam->pix_format, cam->mbus_code);
1023 ret = sensor_call(cam, core, init, 0);
1024 if (ret == 0)
1025 ret = sensor_call(cam, pad, set_fmt, NULL, &format);
1026 /*
1027 * OV7670 does weird things if flip is set *before* format...
1028 */
1029 ret += mcam_cam_set_flip(cam);
1030 return ret;
1031 }
1032
1033 /*
1034 * Get everything ready, and start grabbing frames.
1035 */
mcam_read_setup(struct mcam_camera * cam)1036 static int mcam_read_setup(struct mcam_camera *cam)
1037 {
1038 int ret;
1039 unsigned long flags;
1040
1041 /*
1042 * Configuration. If we still don't have DMA buffers,
1043 * make one last, desperate attempt.
1044 */
1045 if (cam->buffer_mode == B_vmalloc && cam->nbufs == 0 &&
1046 mcam_alloc_dma_bufs(cam, 0))
1047 return -ENOMEM;
1048
1049 if (mcam_needs_config(cam)) {
1050 mcam_cam_configure(cam);
1051 ret = mcam_ctlr_configure(cam);
1052 if (ret)
1053 return ret;
1054 }
1055
1056 /*
1057 * Turn it loose.
1058 */
1059 spin_lock_irqsave(&cam->dev_lock, flags);
1060 clear_bit(CF_DMA_ACTIVE, &cam->flags);
1061 mcam_reset_buffers(cam);
1062 if (cam->bus_type == V4L2_MBUS_CSI2_DPHY)
1063 mcam_enable_mipi(cam);
1064 else
1065 mcam_disable_mipi(cam);
1066 mcam_ctlr_irq_enable(cam);
1067 cam->state = S_STREAMING;
1068 if (!test_bit(CF_SG_RESTART, &cam->flags))
1069 mcam_ctlr_start(cam);
1070 spin_unlock_irqrestore(&cam->dev_lock, flags);
1071 return 0;
1072 }
1073
1074 /* ----------------------------------------------------------------------- */
1075 /*
1076 * Videobuf2 interface code.
1077 */
1078
mcam_vb_queue_setup(struct vb2_queue * vq,unsigned int * nbufs,unsigned int * num_planes,unsigned int sizes[],struct device * alloc_devs[])1079 static int mcam_vb_queue_setup(struct vb2_queue *vq,
1080 unsigned int *nbufs,
1081 unsigned int *num_planes, unsigned int sizes[],
1082 struct device *alloc_devs[])
1083 {
1084 struct mcam_camera *cam = vb2_get_drv_priv(vq);
1085 int minbufs = (cam->buffer_mode == B_DMA_contig) ? 3 : 2;
1086 unsigned size = cam->pix_format.sizeimage;
1087
1088 if (*nbufs < minbufs)
1089 *nbufs = minbufs;
1090
1091 if (*num_planes)
1092 return sizes[0] < size ? -EINVAL : 0;
1093 sizes[0] = size;
1094 *num_planes = 1; /* Someday we have to support planar formats... */
1095 return 0;
1096 }
1097
1098
mcam_vb_buf_queue(struct vb2_buffer * vb)1099 static void mcam_vb_buf_queue(struct vb2_buffer *vb)
1100 {
1101 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
1102 struct mcam_vb_buffer *mvb = vb_to_mvb(vbuf);
1103 struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
1104 unsigned long flags;
1105 int start;
1106
1107 spin_lock_irqsave(&cam->dev_lock, flags);
1108 start = (cam->state == S_BUFWAIT) && !list_empty(&cam->buffers);
1109 list_add(&mvb->queue, &cam->buffers);
1110 if (cam->state == S_STREAMING && test_bit(CF_SG_RESTART, &cam->flags))
1111 mcam_sg_restart(cam);
1112 spin_unlock_irqrestore(&cam->dev_lock, flags);
1113 if (start)
1114 mcam_read_setup(cam);
1115 }
1116
mcam_vb_requeue_bufs(struct vb2_queue * vq,enum vb2_buffer_state state)1117 static void mcam_vb_requeue_bufs(struct vb2_queue *vq,
1118 enum vb2_buffer_state state)
1119 {
1120 struct mcam_camera *cam = vb2_get_drv_priv(vq);
1121 struct mcam_vb_buffer *buf, *node;
1122 unsigned long flags;
1123 unsigned i;
1124
1125 spin_lock_irqsave(&cam->dev_lock, flags);
1126 list_for_each_entry_safe(buf, node, &cam->buffers, queue) {
1127 vb2_buffer_done(&buf->vb_buf.vb2_buf, state);
1128 list_del(&buf->queue);
1129 }
1130 for (i = 0; i < MAX_DMA_BUFS; i++) {
1131 buf = cam->vb_bufs[i];
1132
1133 if (buf) {
1134 vb2_buffer_done(&buf->vb_buf.vb2_buf, state);
1135 cam->vb_bufs[i] = NULL;
1136 }
1137 }
1138 spin_unlock_irqrestore(&cam->dev_lock, flags);
1139 }
1140
1141 /*
1142 * These need to be called with the mutex held from vb2
1143 */
mcam_vb_start_streaming(struct vb2_queue * vq,unsigned int count)1144 static int mcam_vb_start_streaming(struct vb2_queue *vq, unsigned int count)
1145 {
1146 struct mcam_camera *cam = vb2_get_drv_priv(vq);
1147 unsigned int frame;
1148 int ret;
1149
1150 if (cam->state != S_IDLE) {
1151 mcam_vb_requeue_bufs(vq, VB2_BUF_STATE_QUEUED);
1152 return -EINVAL;
1153 }
1154 cam->frame_state.frames = 0;
1155 cam->frame_state.singles = 0;
1156 cam->frame_state.delivered = 0;
1157 cam->sequence = 0;
1158 /*
1159 * Videobuf2 sneakily hoards all the buffers and won't
1160 * give them to us until *after* streaming starts. But
1161 * we can't actually start streaming until we have a
1162 * destination. So go into a wait state and hope they
1163 * give us buffers soon.
1164 */
1165 if (cam->buffer_mode != B_vmalloc && list_empty(&cam->buffers)) {
1166 cam->state = S_BUFWAIT;
1167 return 0;
1168 }
1169
1170 /*
1171 * Ensure clear the left over frame flags
1172 * before every really start streaming
1173 */
1174 for (frame = 0; frame < cam->nbufs; frame++)
1175 clear_bit(CF_FRAME_SOF0 + frame, &cam->flags);
1176
1177 ret = mcam_read_setup(cam);
1178 if (ret)
1179 mcam_vb_requeue_bufs(vq, VB2_BUF_STATE_QUEUED);
1180 return ret;
1181 }
1182
mcam_vb_stop_streaming(struct vb2_queue * vq)1183 static void mcam_vb_stop_streaming(struct vb2_queue *vq)
1184 {
1185 struct mcam_camera *cam = vb2_get_drv_priv(vq);
1186
1187 cam_dbg(cam, "stop_streaming: %d frames, %d singles, %d delivered\n",
1188 cam->frame_state.frames, cam->frame_state.singles,
1189 cam->frame_state.delivered);
1190 if (cam->state == S_BUFWAIT) {
1191 /* They never gave us buffers */
1192 cam->state = S_IDLE;
1193 return;
1194 }
1195 if (cam->state != S_STREAMING)
1196 return;
1197 mcam_ctlr_stop_dma(cam);
1198 /*
1199 * VB2 reclaims the buffers, so we need to forget
1200 * about them.
1201 */
1202 mcam_vb_requeue_bufs(vq, VB2_BUF_STATE_ERROR);
1203 }
1204
1205
1206 static const struct vb2_ops mcam_vb2_ops = {
1207 .queue_setup = mcam_vb_queue_setup,
1208 .buf_queue = mcam_vb_buf_queue,
1209 .start_streaming = mcam_vb_start_streaming,
1210 .stop_streaming = mcam_vb_stop_streaming,
1211 .wait_prepare = vb2_ops_wait_prepare,
1212 .wait_finish = vb2_ops_wait_finish,
1213 };
1214
1215
1216 #ifdef MCAM_MODE_DMA_SG
1217 /*
1218 * Scatter/gather mode uses all of the above functions plus a
1219 * few extras to deal with DMA mapping.
1220 */
mcam_vb_sg_buf_init(struct vb2_buffer * vb)1221 static int mcam_vb_sg_buf_init(struct vb2_buffer *vb)
1222 {
1223 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
1224 struct mcam_vb_buffer *mvb = vb_to_mvb(vbuf);
1225 struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
1226 int ndesc = cam->pix_format.sizeimage/PAGE_SIZE + 1;
1227
1228 mvb->dma_desc = dma_alloc_coherent(cam->dev,
1229 ndesc * sizeof(struct mcam_dma_desc),
1230 &mvb->dma_desc_pa, GFP_KERNEL);
1231 if (mvb->dma_desc == NULL) {
1232 cam_err(cam, "Unable to get DMA descriptor array\n");
1233 return -ENOMEM;
1234 }
1235 return 0;
1236 }
1237
mcam_vb_sg_buf_prepare(struct vb2_buffer * vb)1238 static int mcam_vb_sg_buf_prepare(struct vb2_buffer *vb)
1239 {
1240 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
1241 struct mcam_vb_buffer *mvb = vb_to_mvb(vbuf);
1242 struct sg_table *sg_table = vb2_dma_sg_plane_desc(vb, 0);
1243 struct mcam_dma_desc *desc = mvb->dma_desc;
1244 struct scatterlist *sg;
1245 int i;
1246
1247 for_each_sg(sg_table->sgl, sg, sg_table->nents, i) {
1248 desc->dma_addr = sg_dma_address(sg);
1249 desc->segment_len = sg_dma_len(sg);
1250 desc++;
1251 }
1252 return 0;
1253 }
1254
mcam_vb_sg_buf_cleanup(struct vb2_buffer * vb)1255 static void mcam_vb_sg_buf_cleanup(struct vb2_buffer *vb)
1256 {
1257 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
1258 struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue);
1259 struct mcam_vb_buffer *mvb = vb_to_mvb(vbuf);
1260 int ndesc = cam->pix_format.sizeimage/PAGE_SIZE + 1;
1261
1262 dma_free_coherent(cam->dev, ndesc * sizeof(struct mcam_dma_desc),
1263 mvb->dma_desc, mvb->dma_desc_pa);
1264 }
1265
1266
1267 static const struct vb2_ops mcam_vb2_sg_ops = {
1268 .queue_setup = mcam_vb_queue_setup,
1269 .buf_init = mcam_vb_sg_buf_init,
1270 .buf_prepare = mcam_vb_sg_buf_prepare,
1271 .buf_queue = mcam_vb_buf_queue,
1272 .buf_cleanup = mcam_vb_sg_buf_cleanup,
1273 .start_streaming = mcam_vb_start_streaming,
1274 .stop_streaming = mcam_vb_stop_streaming,
1275 .wait_prepare = vb2_ops_wait_prepare,
1276 .wait_finish = vb2_ops_wait_finish,
1277 };
1278
1279 #endif /* MCAM_MODE_DMA_SG */
1280
mcam_setup_vb2(struct mcam_camera * cam)1281 static int mcam_setup_vb2(struct mcam_camera *cam)
1282 {
1283 struct vb2_queue *vq = &cam->vb_queue;
1284
1285 memset(vq, 0, sizeof(*vq));
1286 vq->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
1287 vq->drv_priv = cam;
1288 vq->lock = &cam->s_mutex;
1289 vq->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC;
1290 vq->io_modes = VB2_MMAP | VB2_USERPTR | VB2_DMABUF | VB2_READ;
1291 vq->buf_struct_size = sizeof(struct mcam_vb_buffer);
1292 vq->dev = cam->dev;
1293 INIT_LIST_HEAD(&cam->buffers);
1294 switch (cam->buffer_mode) {
1295 case B_DMA_contig:
1296 #ifdef MCAM_MODE_DMA_CONTIG
1297 vq->ops = &mcam_vb2_ops;
1298 vq->mem_ops = &vb2_dma_contig_memops;
1299 cam->dma_setup = mcam_ctlr_dma_contig;
1300 cam->frame_complete = mcam_dma_contig_done;
1301 #endif
1302 break;
1303 case B_DMA_sg:
1304 #ifdef MCAM_MODE_DMA_SG
1305 vq->ops = &mcam_vb2_sg_ops;
1306 vq->mem_ops = &vb2_dma_sg_memops;
1307 cam->dma_setup = mcam_ctlr_dma_sg;
1308 cam->frame_complete = mcam_dma_sg_done;
1309 #endif
1310 break;
1311 case B_vmalloc:
1312 #ifdef MCAM_MODE_VMALLOC
1313 tasklet_setup(&cam->s_tasklet, mcam_frame_tasklet);
1314 vq->ops = &mcam_vb2_ops;
1315 vq->mem_ops = &vb2_vmalloc_memops;
1316 cam->dma_setup = mcam_ctlr_dma_vmalloc;
1317 cam->frame_complete = mcam_vmalloc_done;
1318 #endif
1319 break;
1320 }
1321 return vb2_queue_init(vq);
1322 }
1323
1324
1325 /* ---------------------------------------------------------------------- */
1326 /*
1327 * The long list of V4L2 ioctl() operations.
1328 */
1329
mcam_vidioc_querycap(struct file * file,void * priv,struct v4l2_capability * cap)1330 static int mcam_vidioc_querycap(struct file *file, void *priv,
1331 struct v4l2_capability *cap)
1332 {
1333 struct mcam_camera *cam = video_drvdata(file);
1334
1335 strscpy(cap->driver, "marvell_ccic", sizeof(cap->driver));
1336 strscpy(cap->card, "marvell_ccic", sizeof(cap->card));
1337 strscpy(cap->bus_info, cam->bus_info, sizeof(cap->bus_info));
1338 return 0;
1339 }
1340
1341
mcam_vidioc_enum_fmt_vid_cap(struct file * filp,void * priv,struct v4l2_fmtdesc * fmt)1342 static int mcam_vidioc_enum_fmt_vid_cap(struct file *filp,
1343 void *priv, struct v4l2_fmtdesc *fmt)
1344 {
1345 if (fmt->index >= N_MCAM_FMTS)
1346 return -EINVAL;
1347 fmt->pixelformat = mcam_formats[fmt->index].pixelformat;
1348 return 0;
1349 }
1350
mcam_vidioc_try_fmt_vid_cap(struct file * filp,void * priv,struct v4l2_format * fmt)1351 static int mcam_vidioc_try_fmt_vid_cap(struct file *filp, void *priv,
1352 struct v4l2_format *fmt)
1353 {
1354 struct mcam_camera *cam = video_drvdata(filp);
1355 struct mcam_format_struct *f;
1356 struct v4l2_pix_format *pix = &fmt->fmt.pix;
1357 struct v4l2_subdev_pad_config pad_cfg;
1358 struct v4l2_subdev_state pad_state = {
1359 .pads = &pad_cfg,
1360 };
1361 struct v4l2_subdev_format format = {
1362 .which = V4L2_SUBDEV_FORMAT_TRY,
1363 };
1364 int ret;
1365
1366 f = mcam_find_format(pix->pixelformat);
1367 pix->pixelformat = f->pixelformat;
1368 v4l2_fill_mbus_format(&format.format, pix, f->mbus_code);
1369 ret = sensor_call(cam, pad, set_fmt, &pad_state, &format);
1370 v4l2_fill_pix_format(pix, &format.format);
1371 pix->bytesperline = pix->width * f->bpp;
1372 switch (f->pixelformat) {
1373 case V4L2_PIX_FMT_YUV420:
1374 case V4L2_PIX_FMT_YVU420:
1375 pix->sizeimage = pix->height * pix->bytesperline * 3 / 2;
1376 break;
1377 default:
1378 pix->sizeimage = pix->height * pix->bytesperline;
1379 break;
1380 }
1381 pix->colorspace = V4L2_COLORSPACE_SRGB;
1382 return ret;
1383 }
1384
mcam_vidioc_s_fmt_vid_cap(struct file * filp,void * priv,struct v4l2_format * fmt)1385 static int mcam_vidioc_s_fmt_vid_cap(struct file *filp, void *priv,
1386 struct v4l2_format *fmt)
1387 {
1388 struct mcam_camera *cam = video_drvdata(filp);
1389 struct mcam_format_struct *f;
1390 int ret;
1391
1392 /*
1393 * Can't do anything if the device is not idle
1394 * Also can't if there are streaming buffers in place.
1395 */
1396 if (cam->state != S_IDLE || vb2_is_busy(&cam->vb_queue))
1397 return -EBUSY;
1398
1399 f = mcam_find_format(fmt->fmt.pix.pixelformat);
1400
1401 /*
1402 * See if the formatting works in principle.
1403 */
1404 ret = mcam_vidioc_try_fmt_vid_cap(filp, priv, fmt);
1405 if (ret)
1406 return ret;
1407 /*
1408 * Now we start to change things for real, so let's do it
1409 * under lock.
1410 */
1411 cam->pix_format = fmt->fmt.pix;
1412 cam->mbus_code = f->mbus_code;
1413
1414 /*
1415 * Make sure we have appropriate DMA buffers.
1416 */
1417 if (cam->buffer_mode == B_vmalloc) {
1418 ret = mcam_check_dma_buffers(cam);
1419 if (ret)
1420 goto out;
1421 }
1422 mcam_set_config_needed(cam, 1);
1423 out:
1424 return ret;
1425 }
1426
1427 /*
1428 * Return our stored notion of how the camera is/should be configured.
1429 * The V4l2 spec wants us to be smarter, and actually get this from
1430 * the camera (and not mess with it at open time). Someday.
1431 */
mcam_vidioc_g_fmt_vid_cap(struct file * filp,void * priv,struct v4l2_format * f)1432 static int mcam_vidioc_g_fmt_vid_cap(struct file *filp, void *priv,
1433 struct v4l2_format *f)
1434 {
1435 struct mcam_camera *cam = video_drvdata(filp);
1436
1437 f->fmt.pix = cam->pix_format;
1438 return 0;
1439 }
1440
1441 /*
1442 * We only have one input - the sensor - so minimize the nonsense here.
1443 */
mcam_vidioc_enum_input(struct file * filp,void * priv,struct v4l2_input * input)1444 static int mcam_vidioc_enum_input(struct file *filp, void *priv,
1445 struct v4l2_input *input)
1446 {
1447 if (input->index != 0)
1448 return -EINVAL;
1449
1450 input->type = V4L2_INPUT_TYPE_CAMERA;
1451 strscpy(input->name, "Camera", sizeof(input->name));
1452 return 0;
1453 }
1454
mcam_vidioc_g_input(struct file * filp,void * priv,unsigned int * i)1455 static int mcam_vidioc_g_input(struct file *filp, void *priv, unsigned int *i)
1456 {
1457 *i = 0;
1458 return 0;
1459 }
1460
mcam_vidioc_s_input(struct file * filp,void * priv,unsigned int i)1461 static int mcam_vidioc_s_input(struct file *filp, void *priv, unsigned int i)
1462 {
1463 if (i != 0)
1464 return -EINVAL;
1465 return 0;
1466 }
1467
1468 /*
1469 * G/S_PARM. Most of this is done by the sensor, but we are
1470 * the level which controls the number of read buffers.
1471 */
mcam_vidioc_g_parm(struct file * filp,void * priv,struct v4l2_streamparm * a)1472 static int mcam_vidioc_g_parm(struct file *filp, void *priv,
1473 struct v4l2_streamparm *a)
1474 {
1475 struct mcam_camera *cam = video_drvdata(filp);
1476 int ret;
1477
1478 ret = v4l2_g_parm_cap(video_devdata(filp), cam->sensor, a);
1479 a->parm.capture.readbuffers = n_dma_bufs;
1480 return ret;
1481 }
1482
mcam_vidioc_s_parm(struct file * filp,void * priv,struct v4l2_streamparm * a)1483 static int mcam_vidioc_s_parm(struct file *filp, void *priv,
1484 struct v4l2_streamparm *a)
1485 {
1486 struct mcam_camera *cam = video_drvdata(filp);
1487 int ret;
1488
1489 ret = v4l2_s_parm_cap(video_devdata(filp), cam->sensor, a);
1490 a->parm.capture.readbuffers = n_dma_bufs;
1491 return ret;
1492 }
1493
mcam_vidioc_enum_framesizes(struct file * filp,void * priv,struct v4l2_frmsizeenum * sizes)1494 static int mcam_vidioc_enum_framesizes(struct file *filp, void *priv,
1495 struct v4l2_frmsizeenum *sizes)
1496 {
1497 struct mcam_camera *cam = video_drvdata(filp);
1498 struct mcam_format_struct *f;
1499 struct v4l2_subdev_frame_size_enum fse = {
1500 .index = sizes->index,
1501 .which = V4L2_SUBDEV_FORMAT_ACTIVE,
1502 };
1503 int ret;
1504
1505 f = mcam_find_format(sizes->pixel_format);
1506 if (f->pixelformat != sizes->pixel_format)
1507 return -EINVAL;
1508 fse.code = f->mbus_code;
1509 ret = sensor_call(cam, pad, enum_frame_size, NULL, &fse);
1510 if (ret)
1511 return ret;
1512 if (fse.min_width == fse.max_width &&
1513 fse.min_height == fse.max_height) {
1514 sizes->type = V4L2_FRMSIZE_TYPE_DISCRETE;
1515 sizes->discrete.width = fse.min_width;
1516 sizes->discrete.height = fse.min_height;
1517 return 0;
1518 }
1519 sizes->type = V4L2_FRMSIZE_TYPE_CONTINUOUS;
1520 sizes->stepwise.min_width = fse.min_width;
1521 sizes->stepwise.max_width = fse.max_width;
1522 sizes->stepwise.min_height = fse.min_height;
1523 sizes->stepwise.max_height = fse.max_height;
1524 sizes->stepwise.step_width = 1;
1525 sizes->stepwise.step_height = 1;
1526 return 0;
1527 }
1528
mcam_vidioc_enum_frameintervals(struct file * filp,void * priv,struct v4l2_frmivalenum * interval)1529 static int mcam_vidioc_enum_frameintervals(struct file *filp, void *priv,
1530 struct v4l2_frmivalenum *interval)
1531 {
1532 struct mcam_camera *cam = video_drvdata(filp);
1533 struct mcam_format_struct *f;
1534 struct v4l2_subdev_frame_interval_enum fie = {
1535 .index = interval->index,
1536 .width = interval->width,
1537 .height = interval->height,
1538 .which = V4L2_SUBDEV_FORMAT_ACTIVE,
1539 };
1540 int ret;
1541
1542 f = mcam_find_format(interval->pixel_format);
1543 if (f->pixelformat != interval->pixel_format)
1544 return -EINVAL;
1545 fie.code = f->mbus_code;
1546 ret = sensor_call(cam, pad, enum_frame_interval, NULL, &fie);
1547 if (ret)
1548 return ret;
1549 interval->type = V4L2_FRMIVAL_TYPE_DISCRETE;
1550 interval->discrete = fie.interval;
1551 return 0;
1552 }
1553
1554 #ifdef CONFIG_VIDEO_ADV_DEBUG
mcam_vidioc_g_register(struct file * file,void * priv,struct v4l2_dbg_register * reg)1555 static int mcam_vidioc_g_register(struct file *file, void *priv,
1556 struct v4l2_dbg_register *reg)
1557 {
1558 struct mcam_camera *cam = video_drvdata(file);
1559
1560 if (reg->reg > cam->regs_size - 4)
1561 return -EINVAL;
1562 reg->val = mcam_reg_read(cam, reg->reg);
1563 reg->size = 4;
1564 return 0;
1565 }
1566
mcam_vidioc_s_register(struct file * file,void * priv,const struct v4l2_dbg_register * reg)1567 static int mcam_vidioc_s_register(struct file *file, void *priv,
1568 const struct v4l2_dbg_register *reg)
1569 {
1570 struct mcam_camera *cam = video_drvdata(file);
1571
1572 if (reg->reg > cam->regs_size - 4)
1573 return -EINVAL;
1574 mcam_reg_write(cam, reg->reg, reg->val);
1575 return 0;
1576 }
1577 #endif
1578
1579 static const struct v4l2_ioctl_ops mcam_v4l_ioctl_ops = {
1580 .vidioc_querycap = mcam_vidioc_querycap,
1581 .vidioc_enum_fmt_vid_cap = mcam_vidioc_enum_fmt_vid_cap,
1582 .vidioc_try_fmt_vid_cap = mcam_vidioc_try_fmt_vid_cap,
1583 .vidioc_s_fmt_vid_cap = mcam_vidioc_s_fmt_vid_cap,
1584 .vidioc_g_fmt_vid_cap = mcam_vidioc_g_fmt_vid_cap,
1585 .vidioc_enum_input = mcam_vidioc_enum_input,
1586 .vidioc_g_input = mcam_vidioc_g_input,
1587 .vidioc_s_input = mcam_vidioc_s_input,
1588 .vidioc_reqbufs = vb2_ioctl_reqbufs,
1589 .vidioc_create_bufs = vb2_ioctl_create_bufs,
1590 .vidioc_querybuf = vb2_ioctl_querybuf,
1591 .vidioc_qbuf = vb2_ioctl_qbuf,
1592 .vidioc_dqbuf = vb2_ioctl_dqbuf,
1593 .vidioc_expbuf = vb2_ioctl_expbuf,
1594 .vidioc_streamon = vb2_ioctl_streamon,
1595 .vidioc_streamoff = vb2_ioctl_streamoff,
1596 .vidioc_g_parm = mcam_vidioc_g_parm,
1597 .vidioc_s_parm = mcam_vidioc_s_parm,
1598 .vidioc_enum_framesizes = mcam_vidioc_enum_framesizes,
1599 .vidioc_enum_frameintervals = mcam_vidioc_enum_frameintervals,
1600 .vidioc_subscribe_event = v4l2_ctrl_subscribe_event,
1601 .vidioc_unsubscribe_event = v4l2_event_unsubscribe,
1602 #ifdef CONFIG_VIDEO_ADV_DEBUG
1603 .vidioc_g_register = mcam_vidioc_g_register,
1604 .vidioc_s_register = mcam_vidioc_s_register,
1605 #endif
1606 };
1607
1608 /* ---------------------------------------------------------------------- */
1609 /*
1610 * Our various file operations.
1611 */
mcam_v4l_open(struct file * filp)1612 static int mcam_v4l_open(struct file *filp)
1613 {
1614 struct mcam_camera *cam = video_drvdata(filp);
1615 int ret;
1616
1617 mutex_lock(&cam->s_mutex);
1618 ret = v4l2_fh_open(filp);
1619 if (ret)
1620 goto out;
1621 if (v4l2_fh_is_singular_file(filp)) {
1622 ret = sensor_call(cam, core, s_power, 1);
1623 if (ret)
1624 goto out;
1625 ret = pm_runtime_resume_and_get(cam->dev);
1626 if (ret < 0)
1627 goto out;
1628 __mcam_cam_reset(cam);
1629 mcam_set_config_needed(cam, 1);
1630 }
1631 out:
1632 mutex_unlock(&cam->s_mutex);
1633 if (ret)
1634 v4l2_fh_release(filp);
1635 return ret;
1636 }
1637
1638
mcam_v4l_release(struct file * filp)1639 static int mcam_v4l_release(struct file *filp)
1640 {
1641 struct mcam_camera *cam = video_drvdata(filp);
1642 bool last_open;
1643
1644 mutex_lock(&cam->s_mutex);
1645 last_open = v4l2_fh_is_singular_file(filp);
1646 _vb2_fop_release(filp, NULL);
1647 if (last_open) {
1648 mcam_disable_mipi(cam);
1649 sensor_call(cam, core, s_power, 0);
1650 pm_runtime_put(cam->dev);
1651 if (cam->buffer_mode == B_vmalloc && alloc_bufs_at_read)
1652 mcam_free_dma_bufs(cam);
1653 }
1654
1655 mutex_unlock(&cam->s_mutex);
1656 return 0;
1657 }
1658
1659 static const struct v4l2_file_operations mcam_v4l_fops = {
1660 .owner = THIS_MODULE,
1661 .open = mcam_v4l_open,
1662 .release = mcam_v4l_release,
1663 .read = vb2_fop_read,
1664 .poll = vb2_fop_poll,
1665 .mmap = vb2_fop_mmap,
1666 .unlocked_ioctl = video_ioctl2,
1667 };
1668
1669
1670 /*
1671 * This template device holds all of those v4l2 methods; we
1672 * clone it for specific real devices.
1673 */
1674 static const struct video_device mcam_v4l_template = {
1675 .name = "mcam",
1676 .fops = &mcam_v4l_fops,
1677 .ioctl_ops = &mcam_v4l_ioctl_ops,
1678 .release = video_device_release_empty,
1679 .device_caps = V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_READWRITE |
1680 V4L2_CAP_STREAMING,
1681 };
1682
1683 /* ---------------------------------------------------------------------- */
1684 /*
1685 * Interrupt handler stuff
1686 */
mcam_frame_complete(struct mcam_camera * cam,int frame)1687 static void mcam_frame_complete(struct mcam_camera *cam, int frame)
1688 {
1689 /*
1690 * Basic frame housekeeping.
1691 */
1692 set_bit(frame, &cam->flags);
1693 clear_bit(CF_DMA_ACTIVE, &cam->flags);
1694 cam->next_buf = frame;
1695 cam->buf_seq[frame] = cam->sequence++;
1696 cam->frame_state.frames++;
1697 /*
1698 * "This should never happen"
1699 */
1700 if (cam->state != S_STREAMING)
1701 return;
1702 /*
1703 * Process the frame and set up the next one.
1704 */
1705 cam->frame_complete(cam, frame);
1706 }
1707
1708
1709 /*
1710 * The interrupt handler; this needs to be called from the
1711 * platform irq handler with the lock held.
1712 */
mccic_irq(struct mcam_camera * cam,unsigned int irqs)1713 int mccic_irq(struct mcam_camera *cam, unsigned int irqs)
1714 {
1715 unsigned int frame, handled = 0;
1716
1717 mcam_reg_write(cam, REG_IRQSTAT, FRAMEIRQS); /* Clear'em all */
1718 /*
1719 * Handle any frame completions. There really should
1720 * not be more than one of these, or we have fallen
1721 * far behind.
1722 *
1723 * When running in S/G mode, the frame number lacks any
1724 * real meaning - there's only one descriptor array - but
1725 * the controller still picks a different one to signal
1726 * each time.
1727 */
1728 for (frame = 0; frame < cam->nbufs; frame++)
1729 if (irqs & (IRQ_EOF0 << frame) &&
1730 test_bit(CF_FRAME_SOF0 + frame, &cam->flags)) {
1731 mcam_frame_complete(cam, frame);
1732 handled = 1;
1733 clear_bit(CF_FRAME_SOF0 + frame, &cam->flags);
1734 if (cam->buffer_mode == B_DMA_sg)
1735 break;
1736 }
1737 /*
1738 * If a frame starts, note that we have DMA active. This
1739 * code assumes that we won't get multiple frame interrupts
1740 * at once; may want to rethink that.
1741 */
1742 for (frame = 0; frame < cam->nbufs; frame++) {
1743 if (irqs & (IRQ_SOF0 << frame)) {
1744 set_bit(CF_FRAME_SOF0 + frame, &cam->flags);
1745 handled = IRQ_HANDLED;
1746 }
1747 }
1748
1749 if (handled == IRQ_HANDLED) {
1750 set_bit(CF_DMA_ACTIVE, &cam->flags);
1751 if (cam->buffer_mode == B_DMA_sg)
1752 mcam_ctlr_stop(cam);
1753 }
1754 return handled;
1755 }
1756 EXPORT_SYMBOL_GPL(mccic_irq);
1757
1758 /* ---------------------------------------------------------------------- */
1759 /*
1760 * Registration and such.
1761 */
1762
mccic_notify_bound(struct v4l2_async_notifier * notifier,struct v4l2_subdev * subdev,struct v4l2_async_connection * asd)1763 static int mccic_notify_bound(struct v4l2_async_notifier *notifier,
1764 struct v4l2_subdev *subdev, struct v4l2_async_connection *asd)
1765 {
1766 struct mcam_camera *cam = notifier_to_mcam(notifier);
1767 int ret;
1768
1769 mutex_lock(&cam->s_mutex);
1770 if (cam->sensor) {
1771 cam_err(cam, "sensor already bound\n");
1772 ret = -EBUSY;
1773 goto out;
1774 }
1775
1776 v4l2_set_subdev_hostdata(subdev, cam);
1777 cam->sensor = subdev;
1778
1779 ret = mcam_cam_init(cam);
1780 if (ret) {
1781 cam->sensor = NULL;
1782 goto out;
1783 }
1784
1785 ret = mcam_setup_vb2(cam);
1786 if (ret) {
1787 cam->sensor = NULL;
1788 goto out;
1789 }
1790
1791 cam->vdev = mcam_v4l_template;
1792 cam->vdev.v4l2_dev = &cam->v4l2_dev;
1793 cam->vdev.lock = &cam->s_mutex;
1794 cam->vdev.queue = &cam->vb_queue;
1795 video_set_drvdata(&cam->vdev, cam);
1796 ret = video_register_device(&cam->vdev, VFL_TYPE_VIDEO, -1);
1797 if (ret) {
1798 cam->sensor = NULL;
1799 goto out;
1800 }
1801
1802 cam_dbg(cam, "sensor %s bound\n", subdev->name);
1803 out:
1804 mutex_unlock(&cam->s_mutex);
1805 return ret;
1806 }
1807
mccic_notify_unbind(struct v4l2_async_notifier * notifier,struct v4l2_subdev * subdev,struct v4l2_async_connection * asd)1808 static void mccic_notify_unbind(struct v4l2_async_notifier *notifier,
1809 struct v4l2_subdev *subdev, struct v4l2_async_connection *asd)
1810 {
1811 struct mcam_camera *cam = notifier_to_mcam(notifier);
1812
1813 mutex_lock(&cam->s_mutex);
1814 if (cam->sensor != subdev) {
1815 cam_err(cam, "sensor %s not bound\n", subdev->name);
1816 goto out;
1817 }
1818
1819 video_unregister_device(&cam->vdev);
1820 cam->sensor = NULL;
1821 cam_dbg(cam, "sensor %s unbound\n", subdev->name);
1822
1823 out:
1824 mutex_unlock(&cam->s_mutex);
1825 }
1826
mccic_notify_complete(struct v4l2_async_notifier * notifier)1827 static int mccic_notify_complete(struct v4l2_async_notifier *notifier)
1828 {
1829 struct mcam_camera *cam = notifier_to_mcam(notifier);
1830 int ret;
1831
1832 /*
1833 * Get the v4l2 setup done.
1834 */
1835 ret = v4l2_ctrl_handler_init(&cam->ctrl_handler, 10);
1836 if (!ret)
1837 cam->v4l2_dev.ctrl_handler = &cam->ctrl_handler;
1838
1839 return ret;
1840 }
1841
1842 static const struct v4l2_async_notifier_operations mccic_notify_ops = {
1843 .bound = mccic_notify_bound,
1844 .unbind = mccic_notify_unbind,
1845 .complete = mccic_notify_complete,
1846 };
1847
mccic_register(struct mcam_camera * cam)1848 int mccic_register(struct mcam_camera *cam)
1849 {
1850 struct clk_init_data mclk_init = { };
1851 int ret;
1852
1853 /*
1854 * Validate the requested buffer mode.
1855 */
1856 if (buffer_mode >= 0)
1857 cam->buffer_mode = buffer_mode;
1858 if (cam->buffer_mode == B_DMA_sg &&
1859 cam->chip_id == MCAM_CAFE) {
1860 printk(KERN_ERR "marvell-cam: Cafe can't do S/G I/O, attempting vmalloc mode instead\n");
1861 cam->buffer_mode = B_vmalloc;
1862 }
1863
1864 if (!mcam_buffer_mode_supported(cam->buffer_mode)) {
1865 printk(KERN_ERR "marvell-cam: buffer mode %d unsupported\n",
1866 cam->buffer_mode);
1867 ret = -EINVAL;
1868 goto out;
1869 }
1870
1871 mutex_init(&cam->s_mutex);
1872 cam->state = S_NOTREADY;
1873 mcam_set_config_needed(cam, 1);
1874 cam->pix_format = mcam_def_pix_format;
1875 cam->mbus_code = mcam_def_mbus_code;
1876
1877 cam->notifier.ops = &mccic_notify_ops;
1878 ret = v4l2_async_nf_register(&cam->notifier);
1879 if (ret < 0) {
1880 cam_warn(cam, "failed to register a sensor notifier");
1881 goto out;
1882 }
1883
1884 /*
1885 * Register sensor master clock.
1886 */
1887 mclk_init.parent_names = NULL;
1888 mclk_init.num_parents = 0;
1889 mclk_init.ops = &mclk_ops;
1890 mclk_init.name = "mclk";
1891
1892 of_property_read_string(cam->dev->of_node, "clock-output-names",
1893 &mclk_init.name);
1894
1895 cam->mclk_hw.init = &mclk_init;
1896
1897 cam->mclk = devm_clk_register(cam->dev, &cam->mclk_hw);
1898 if (IS_ERR(cam->mclk)) {
1899 ret = PTR_ERR(cam->mclk);
1900 dev_err(cam->dev, "can't register clock\n");
1901 goto out;
1902 }
1903
1904 /*
1905 * If so requested, try to get our DMA buffers now.
1906 */
1907 if (cam->buffer_mode == B_vmalloc && !alloc_bufs_at_read) {
1908 if (mcam_alloc_dma_bufs(cam, 1))
1909 cam_warn(cam, "Unable to alloc DMA buffers at load will try again later.");
1910 }
1911
1912 return 0;
1913
1914 out:
1915 v4l2_async_nf_unregister(&cam->notifier);
1916 v4l2_async_nf_cleanup(&cam->notifier);
1917 return ret;
1918 }
1919 EXPORT_SYMBOL_GPL(mccic_register);
1920
mccic_shutdown(struct mcam_camera * cam)1921 void mccic_shutdown(struct mcam_camera *cam)
1922 {
1923 /*
1924 * If we have no users (and we really, really should have no
1925 * users) the device will already be powered down. Trying to
1926 * take it down again will wedge the machine, which is frowned
1927 * upon.
1928 */
1929 if (!list_empty(&cam->vdev.fh_list)) {
1930 cam_warn(cam, "Removing a device with users!\n");
1931 sensor_call(cam, core, s_power, 0);
1932 }
1933 if (cam->buffer_mode == B_vmalloc)
1934 mcam_free_dma_bufs(cam);
1935 v4l2_ctrl_handler_free(&cam->ctrl_handler);
1936 v4l2_async_nf_unregister(&cam->notifier);
1937 v4l2_async_nf_cleanup(&cam->notifier);
1938 }
1939 EXPORT_SYMBOL_GPL(mccic_shutdown);
1940
1941 /*
1942 * Power management
1943 */
mccic_suspend(struct mcam_camera * cam)1944 void mccic_suspend(struct mcam_camera *cam)
1945 {
1946 mutex_lock(&cam->s_mutex);
1947 if (!list_empty(&cam->vdev.fh_list)) {
1948 enum mcam_state cstate = cam->state;
1949
1950 mcam_ctlr_stop_dma(cam);
1951 sensor_call(cam, core, s_power, 0);
1952 cam->state = cstate;
1953 }
1954 mutex_unlock(&cam->s_mutex);
1955 }
1956 EXPORT_SYMBOL_GPL(mccic_suspend);
1957
mccic_resume(struct mcam_camera * cam)1958 int mccic_resume(struct mcam_camera *cam)
1959 {
1960 int ret = 0;
1961
1962 mutex_lock(&cam->s_mutex);
1963 if (!list_empty(&cam->vdev.fh_list)) {
1964 ret = sensor_call(cam, core, s_power, 1);
1965 if (ret) {
1966 mutex_unlock(&cam->s_mutex);
1967 return ret;
1968 }
1969 __mcam_cam_reset(cam);
1970 } else {
1971 sensor_call(cam, core, s_power, 0);
1972 }
1973 mutex_unlock(&cam->s_mutex);
1974
1975 set_bit(CF_CONFIG_NEEDED, &cam->flags);
1976 if (cam->state == S_STREAMING) {
1977 /*
1978 * If there was a buffer in the DMA engine at suspend
1979 * time, put it back on the queue or we'll forget about it.
1980 */
1981 if (cam->buffer_mode == B_DMA_sg && cam->vb_bufs[0])
1982 list_add(&cam->vb_bufs[0]->queue, &cam->buffers);
1983 ret = mcam_read_setup(cam);
1984 }
1985 return ret;
1986 }
1987 EXPORT_SYMBOL_GPL(mccic_resume);
1988
1989 MODULE_LICENSE("GPL v2");
1990 MODULE_AUTHOR("Jonathan Corbet <corbet@lwn.net>");
1991