1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * The Marvell camera core. This device appears in a number of settings, 4 * so it needs platform-specific support outside of the core. 5 * 6 * Copyright 2011 Jonathan Corbet corbet@lwn.net 7 * Copyright 2018 Lubomir Rintel <lkundrak@v3.sk> 8 */ 9 #include <linux/kernel.h> 10 #include <linux/module.h> 11 #include <linux/fs.h> 12 #include <linux/mm.h> 13 #include <linux/i2c.h> 14 #include <linux/interrupt.h> 15 #include <linux/spinlock.h> 16 #include <linux/slab.h> 17 #include <linux/device.h> 18 #include <linux/wait.h> 19 #include <linux/list.h> 20 #include <linux/dma-mapping.h> 21 #include <linux/delay.h> 22 #include <linux/vmalloc.h> 23 #include <linux/io.h> 24 #include <linux/clk.h> 25 #include <linux/clk-provider.h> 26 #include <linux/videodev2.h> 27 #include <linux/pm_runtime.h> 28 #include <media/v4l2-device.h> 29 #include <media/v4l2-ioctl.h> 30 #include <media/v4l2-ctrls.h> 31 #include <media/v4l2-event.h> 32 #include <media/videobuf2-vmalloc.h> 33 #include <media/videobuf2-dma-contig.h> 34 #include <media/videobuf2-dma-sg.h> 35 36 #include "mcam-core.h" 37 38 #ifdef MCAM_MODE_VMALLOC 39 /* 40 * Internal DMA buffer management. Since the controller cannot do S/G I/O, 41 * we must have physically contiguous buffers to bring frames into. 42 * These parameters control how many buffers we use, whether we 43 * allocate them at load time (better chance of success, but nails down 44 * memory) or when somebody tries to use the camera (riskier), and, 45 * for load-time allocation, how big they should be. 46 * 47 * The controller can cycle through three buffers. We could use 48 * more by flipping pointers around, but it probably makes little 49 * sense. 50 */ 51 52 static bool alloc_bufs_at_read; 53 module_param(alloc_bufs_at_read, bool, 0444); 54 MODULE_PARM_DESC(alloc_bufs_at_read, 55 "Non-zero value causes DMA buffers to be allocated when the video capture device is read, rather than at module load time. This saves memory, but decreases the chances of successfully getting those buffers. This parameter is only used in the vmalloc buffer mode"); 56 57 static int n_dma_bufs = 3; 58 module_param(n_dma_bufs, uint, 0644); 59 MODULE_PARM_DESC(n_dma_bufs, 60 "The number of DMA buffers to allocate. Can be either two (saves memory, makes timing tighter) or three."); 61 62 static int dma_buf_size = VGA_WIDTH * VGA_HEIGHT * 2; /* Worst case */ 63 module_param(dma_buf_size, uint, 0444); 64 MODULE_PARM_DESC(dma_buf_size, 65 "The size of the allocated DMA buffers. If actual operating parameters require larger buffers, an attempt to reallocate will be made."); 66 #else /* MCAM_MODE_VMALLOC */ 67 static const bool alloc_bufs_at_read; 68 static const int n_dma_bufs = 3; /* Used by S/G_PARM */ 69 #endif /* MCAM_MODE_VMALLOC */ 70 71 static bool flip; 72 module_param(flip, bool, 0444); 73 MODULE_PARM_DESC(flip, 74 "If set, the sensor will be instructed to flip the image vertically."); 75 76 static int buffer_mode = -1; 77 module_param(buffer_mode, int, 0444); 78 MODULE_PARM_DESC(buffer_mode, 79 "Set the buffer mode to be used; default is to go with what the platform driver asks for. Set to 0 for vmalloc, 1 for DMA contiguous."); 80 81 /* 82 * Status flags. Always manipulated with bit operations. 83 */ 84 #define CF_BUF0_VALID 0 /* Buffers valid - first three */ 85 #define CF_BUF1_VALID 1 86 #define CF_BUF2_VALID 2 87 #define CF_DMA_ACTIVE 3 /* A frame is incoming */ 88 #define CF_CONFIG_NEEDED 4 /* Must configure hardware */ 89 #define CF_SINGLE_BUFFER 5 /* Running with a single buffer */ 90 #define CF_SG_RESTART 6 /* SG restart needed */ 91 #define CF_FRAME_SOF0 7 /* Frame 0 started */ 92 #define CF_FRAME_SOF1 8 93 #define CF_FRAME_SOF2 9 94 95 #define sensor_call(cam, o, f, args...) \ 96 v4l2_subdev_call(cam->sensor, o, f, ##args) 97 98 #define notifier_to_mcam(notifier) \ 99 container_of(notifier, struct mcam_camera, notifier) 100 101 static struct mcam_format_struct { 102 __u32 pixelformat; 103 int bpp; /* Bytes per pixel */ 104 bool planar; 105 u32 mbus_code; 106 } mcam_formats[] = { 107 { 108 .pixelformat = V4L2_PIX_FMT_YUYV, 109 .mbus_code = MEDIA_BUS_FMT_YUYV8_2X8, 110 .bpp = 2, 111 .planar = false, 112 }, 113 { 114 .pixelformat = V4L2_PIX_FMT_YVYU, 115 .mbus_code = MEDIA_BUS_FMT_YUYV8_2X8, 116 .bpp = 2, 117 .planar = false, 118 }, 119 { 120 .pixelformat = V4L2_PIX_FMT_YUV420, 121 .mbus_code = MEDIA_BUS_FMT_YUYV8_2X8, 122 .bpp = 1, 123 .planar = true, 124 }, 125 { 126 .pixelformat = V4L2_PIX_FMT_YVU420, 127 .mbus_code = MEDIA_BUS_FMT_YUYV8_2X8, 128 .bpp = 1, 129 .planar = true, 130 }, 131 { 132 .pixelformat = V4L2_PIX_FMT_XRGB444, 133 .mbus_code = MEDIA_BUS_FMT_RGB444_2X8_PADHI_LE, 134 .bpp = 2, 135 .planar = false, 136 }, 137 { 138 .pixelformat = V4L2_PIX_FMT_RGB565, 139 .mbus_code = MEDIA_BUS_FMT_RGB565_2X8_LE, 140 .bpp = 2, 141 .planar = false, 142 }, 143 { 144 .pixelformat = V4L2_PIX_FMT_SBGGR8, 145 .mbus_code = MEDIA_BUS_FMT_SBGGR8_1X8, 146 .bpp = 1, 147 .planar = false, 148 }, 149 }; 150 #define N_MCAM_FMTS ARRAY_SIZE(mcam_formats) 151 152 static struct mcam_format_struct *mcam_find_format(u32 pixelformat) 153 { 154 unsigned i; 155 156 for (i = 0; i < N_MCAM_FMTS; i++) 157 if (mcam_formats[i].pixelformat == pixelformat) 158 return mcam_formats + i; 159 /* Not found? Then return the first format. */ 160 return mcam_formats; 161 } 162 163 /* 164 * The default format we use until somebody says otherwise. 165 */ 166 static const struct v4l2_pix_format mcam_def_pix_format = { 167 .width = VGA_WIDTH, 168 .height = VGA_HEIGHT, 169 .pixelformat = V4L2_PIX_FMT_YUYV, 170 .field = V4L2_FIELD_NONE, 171 .bytesperline = VGA_WIDTH*2, 172 .sizeimage = VGA_WIDTH*VGA_HEIGHT*2, 173 .colorspace = V4L2_COLORSPACE_SRGB, 174 }; 175 176 static const u32 mcam_def_mbus_code = MEDIA_BUS_FMT_YUYV8_2X8; 177 178 179 /* 180 * The two-word DMA descriptor format used by the Armada 610 and like. There 181 * Is a three-word format as well (set C1_DESC_3WORD) where the third 182 * word is a pointer to the next descriptor, but we don't use it. Two-word 183 * descriptors have to be contiguous in memory. 184 */ 185 struct mcam_dma_desc { 186 u32 dma_addr; 187 u32 segment_len; 188 }; 189 190 /* 191 * Our buffer type for working with videobuf2. Note that the vb2 192 * developers have decreed that struct vb2_v4l2_buffer must be at the 193 * beginning of this structure. 194 */ 195 struct mcam_vb_buffer { 196 struct vb2_v4l2_buffer vb_buf; 197 struct list_head queue; 198 struct mcam_dma_desc *dma_desc; /* Descriptor virtual address */ 199 dma_addr_t dma_desc_pa; /* Descriptor physical address */ 200 }; 201 202 static inline struct mcam_vb_buffer *vb_to_mvb(struct vb2_v4l2_buffer *vb) 203 { 204 return container_of(vb, struct mcam_vb_buffer, vb_buf); 205 } 206 207 /* 208 * Hand a completed buffer back to user space. 209 */ 210 static void mcam_buffer_done(struct mcam_camera *cam, int frame, 211 struct vb2_v4l2_buffer *vbuf) 212 { 213 vbuf->vb2_buf.planes[0].bytesused = cam->pix_format.sizeimage; 214 vbuf->sequence = cam->buf_seq[frame]; 215 vbuf->field = V4L2_FIELD_NONE; 216 vbuf->vb2_buf.timestamp = ktime_get_ns(); 217 vb2_set_plane_payload(&vbuf->vb2_buf, 0, cam->pix_format.sizeimage); 218 vb2_buffer_done(&vbuf->vb2_buf, VB2_BUF_STATE_DONE); 219 } 220 221 222 223 /* 224 * Debugging and related. 225 */ 226 #define cam_err(cam, fmt, arg...) \ 227 dev_err((cam)->dev, fmt, ##arg); 228 #define cam_warn(cam, fmt, arg...) \ 229 dev_warn((cam)->dev, fmt, ##arg); 230 #define cam_dbg(cam, fmt, arg...) \ 231 dev_dbg((cam)->dev, fmt, ##arg); 232 233 234 /* 235 * Flag manipulation helpers 236 */ 237 static void mcam_reset_buffers(struct mcam_camera *cam) 238 { 239 int i; 240 241 cam->next_buf = -1; 242 for (i = 0; i < cam->nbufs; i++) { 243 clear_bit(i, &cam->flags); 244 clear_bit(CF_FRAME_SOF0 + i, &cam->flags); 245 } 246 } 247 248 static inline int mcam_needs_config(struct mcam_camera *cam) 249 { 250 return test_bit(CF_CONFIG_NEEDED, &cam->flags); 251 } 252 253 static void mcam_set_config_needed(struct mcam_camera *cam, int needed) 254 { 255 if (needed) 256 set_bit(CF_CONFIG_NEEDED, &cam->flags); 257 else 258 clear_bit(CF_CONFIG_NEEDED, &cam->flags); 259 } 260 261 /* ------------------------------------------------------------------- */ 262 /* 263 * Make the controller start grabbing images. Everything must 264 * be set up before doing this. 265 */ 266 static void mcam_ctlr_start(struct mcam_camera *cam) 267 { 268 /* set_bit performs a read, so no other barrier should be 269 needed here */ 270 mcam_reg_set_bit(cam, REG_CTRL0, C0_ENABLE); 271 } 272 273 static void mcam_ctlr_stop(struct mcam_camera *cam) 274 { 275 mcam_reg_clear_bit(cam, REG_CTRL0, C0_ENABLE); 276 } 277 278 static void mcam_enable_mipi(struct mcam_camera *mcam) 279 { 280 /* Using MIPI mode and enable MIPI */ 281 if (mcam->calc_dphy) 282 mcam->calc_dphy(mcam); 283 cam_dbg(mcam, "camera: DPHY3=0x%x, DPHY5=0x%x, DPHY6=0x%x\n", 284 mcam->dphy[0], mcam->dphy[1], mcam->dphy[2]); 285 mcam_reg_write(mcam, REG_CSI2_DPHY3, mcam->dphy[0]); 286 mcam_reg_write(mcam, REG_CSI2_DPHY5, mcam->dphy[1]); 287 mcam_reg_write(mcam, REG_CSI2_DPHY6, mcam->dphy[2]); 288 289 if (!mcam->mipi_enabled) { 290 if (mcam->lane > 4 || mcam->lane <= 0) { 291 cam_warn(mcam, "lane number error\n"); 292 mcam->lane = 1; /* set the default value */ 293 } 294 /* 295 * 0x41 actives 1 lane 296 * 0x43 actives 2 lanes 297 * 0x45 actives 3 lanes (never happen) 298 * 0x47 actives 4 lanes 299 */ 300 mcam_reg_write(mcam, REG_CSI2_CTRL0, 301 CSI2_C0_MIPI_EN | CSI2_C0_ACT_LANE(mcam->lane)); 302 mcam->mipi_enabled = true; 303 } 304 } 305 306 static void mcam_disable_mipi(struct mcam_camera *mcam) 307 { 308 /* Using Parallel mode or disable MIPI */ 309 mcam_reg_write(mcam, REG_CSI2_CTRL0, 0x0); 310 mcam_reg_write(mcam, REG_CSI2_DPHY3, 0x0); 311 mcam_reg_write(mcam, REG_CSI2_DPHY5, 0x0); 312 mcam_reg_write(mcam, REG_CSI2_DPHY6, 0x0); 313 mcam->mipi_enabled = false; 314 } 315 316 static bool mcam_fmt_is_planar(__u32 pfmt) 317 { 318 struct mcam_format_struct *f; 319 320 f = mcam_find_format(pfmt); 321 return f->planar; 322 } 323 324 static void mcam_write_yuv_bases(struct mcam_camera *cam, 325 unsigned frame, dma_addr_t base) 326 { 327 struct v4l2_pix_format *fmt = &cam->pix_format; 328 u32 pixel_count = fmt->width * fmt->height; 329 dma_addr_t y, u = 0, v = 0; 330 331 y = base; 332 333 switch (fmt->pixelformat) { 334 case V4L2_PIX_FMT_YUV420: 335 u = y + pixel_count; 336 v = u + pixel_count / 4; 337 break; 338 case V4L2_PIX_FMT_YVU420: 339 v = y + pixel_count; 340 u = v + pixel_count / 4; 341 break; 342 default: 343 break; 344 } 345 346 mcam_reg_write(cam, REG_Y0BAR + frame * 4, y); 347 if (mcam_fmt_is_planar(fmt->pixelformat)) { 348 mcam_reg_write(cam, REG_U0BAR + frame * 4, u); 349 mcam_reg_write(cam, REG_V0BAR + frame * 4, v); 350 } 351 } 352 353 /* ------------------------------------------------------------------- */ 354 355 #ifdef MCAM_MODE_VMALLOC 356 /* 357 * Code specific to the vmalloc buffer mode. 358 */ 359 360 /* 361 * Allocate in-kernel DMA buffers for vmalloc mode. 362 */ 363 static int mcam_alloc_dma_bufs(struct mcam_camera *cam, int loadtime) 364 { 365 int i; 366 367 mcam_set_config_needed(cam, 1); 368 if (loadtime) 369 cam->dma_buf_size = dma_buf_size; 370 else 371 cam->dma_buf_size = cam->pix_format.sizeimage; 372 if (n_dma_bufs > 3) 373 n_dma_bufs = 3; 374 375 cam->nbufs = 0; 376 for (i = 0; i < n_dma_bufs; i++) { 377 cam->dma_bufs[i] = dma_alloc_coherent(cam->dev, 378 cam->dma_buf_size, cam->dma_handles + i, 379 GFP_KERNEL); 380 if (cam->dma_bufs[i] == NULL) { 381 cam_warn(cam, "Failed to allocate DMA buffer\n"); 382 break; 383 } 384 (cam->nbufs)++; 385 } 386 387 switch (cam->nbufs) { 388 case 1: 389 dma_free_coherent(cam->dev, cam->dma_buf_size, 390 cam->dma_bufs[0], cam->dma_handles[0]); 391 cam->nbufs = 0; 392 fallthrough; 393 case 0: 394 cam_err(cam, "Insufficient DMA buffers, cannot operate\n"); 395 return -ENOMEM; 396 397 case 2: 398 if (n_dma_bufs > 2) 399 cam_warn(cam, "Will limp along with only 2 buffers\n"); 400 break; 401 } 402 return 0; 403 } 404 405 static void mcam_free_dma_bufs(struct mcam_camera *cam) 406 { 407 int i; 408 409 for (i = 0; i < cam->nbufs; i++) { 410 dma_free_coherent(cam->dev, cam->dma_buf_size, 411 cam->dma_bufs[i], cam->dma_handles[i]); 412 cam->dma_bufs[i] = NULL; 413 } 414 cam->nbufs = 0; 415 } 416 417 418 /* 419 * Set up DMA buffers when operating in vmalloc mode 420 */ 421 static void mcam_ctlr_dma_vmalloc(struct mcam_camera *cam) 422 { 423 /* 424 * Store the first two YUV buffers. Then either 425 * set the third if it exists, or tell the controller 426 * to just use two. 427 */ 428 mcam_write_yuv_bases(cam, 0, cam->dma_handles[0]); 429 mcam_write_yuv_bases(cam, 1, cam->dma_handles[1]); 430 if (cam->nbufs > 2) { 431 mcam_write_yuv_bases(cam, 2, cam->dma_handles[2]); 432 mcam_reg_clear_bit(cam, REG_CTRL1, C1_TWOBUFS); 433 } else 434 mcam_reg_set_bit(cam, REG_CTRL1, C1_TWOBUFS); 435 if (cam->chip_id == MCAM_CAFE) 436 mcam_reg_write(cam, REG_UBAR, 0); /* 32 bits only */ 437 } 438 439 /* 440 * Copy data out to user space in the vmalloc case 441 */ 442 static void mcam_frame_tasklet(struct tasklet_struct *t) 443 { 444 struct mcam_camera *cam = from_tasklet(cam, t, s_tasklet); 445 int i; 446 unsigned long flags; 447 struct mcam_vb_buffer *buf; 448 449 spin_lock_irqsave(&cam->dev_lock, flags); 450 for (i = 0; i < cam->nbufs; i++) { 451 int bufno = cam->next_buf; 452 453 if (cam->state != S_STREAMING || bufno < 0) 454 break; /* I/O got stopped */ 455 if (++(cam->next_buf) >= cam->nbufs) 456 cam->next_buf = 0; 457 if (!test_bit(bufno, &cam->flags)) 458 continue; 459 if (list_empty(&cam->buffers)) { 460 cam->frame_state.singles++; 461 break; /* Leave it valid, hope for better later */ 462 } 463 cam->frame_state.delivered++; 464 clear_bit(bufno, &cam->flags); 465 buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer, 466 queue); 467 list_del_init(&buf->queue); 468 /* 469 * Drop the lock during the big copy. This *should* be safe... 470 */ 471 spin_unlock_irqrestore(&cam->dev_lock, flags); 472 memcpy(vb2_plane_vaddr(&buf->vb_buf.vb2_buf, 0), 473 cam->dma_bufs[bufno], 474 cam->pix_format.sizeimage); 475 mcam_buffer_done(cam, bufno, &buf->vb_buf); 476 spin_lock_irqsave(&cam->dev_lock, flags); 477 } 478 spin_unlock_irqrestore(&cam->dev_lock, flags); 479 } 480 481 482 /* 483 * Make sure our allocated buffers are up to the task. 484 */ 485 static int mcam_check_dma_buffers(struct mcam_camera *cam) 486 { 487 if (cam->nbufs > 0 && cam->dma_buf_size < cam->pix_format.sizeimage) 488 mcam_free_dma_bufs(cam); 489 if (cam->nbufs == 0) 490 return mcam_alloc_dma_bufs(cam, 0); 491 return 0; 492 } 493 494 static void mcam_vmalloc_done(struct mcam_camera *cam, int frame) 495 { 496 tasklet_schedule(&cam->s_tasklet); 497 } 498 499 #else /* MCAM_MODE_VMALLOC */ 500 501 static inline int mcam_alloc_dma_bufs(struct mcam_camera *cam, int loadtime) 502 { 503 return 0; 504 } 505 506 static inline void mcam_free_dma_bufs(struct mcam_camera *cam) 507 { 508 return; 509 } 510 511 static inline int mcam_check_dma_buffers(struct mcam_camera *cam) 512 { 513 return 0; 514 } 515 516 517 518 #endif /* MCAM_MODE_VMALLOC */ 519 520 521 #ifdef MCAM_MODE_DMA_CONTIG 522 /* ---------------------------------------------------------------------- */ 523 /* 524 * DMA-contiguous code. 525 */ 526 527 /* 528 * Set up a contiguous buffer for the given frame. Here also is where 529 * the underrun strategy is set: if there is no buffer available, reuse 530 * the buffer from the other BAR and set the CF_SINGLE_BUFFER flag to 531 * keep the interrupt handler from giving that buffer back to user 532 * space. In this way, we always have a buffer to DMA to and don't 533 * have to try to play games stopping and restarting the controller. 534 */ 535 static void mcam_set_contig_buffer(struct mcam_camera *cam, int frame) 536 { 537 struct mcam_vb_buffer *buf; 538 dma_addr_t dma_handle; 539 struct vb2_v4l2_buffer *vb; 540 541 /* 542 * If there are no available buffers, go into single mode 543 */ 544 if (list_empty(&cam->buffers)) { 545 buf = cam->vb_bufs[frame ^ 0x1]; 546 set_bit(CF_SINGLE_BUFFER, &cam->flags); 547 cam->frame_state.singles++; 548 } else { 549 /* 550 * OK, we have a buffer we can use. 551 */ 552 buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer, 553 queue); 554 list_del_init(&buf->queue); 555 clear_bit(CF_SINGLE_BUFFER, &cam->flags); 556 } 557 558 cam->vb_bufs[frame] = buf; 559 vb = &buf->vb_buf; 560 561 dma_handle = vb2_dma_contig_plane_dma_addr(&vb->vb2_buf, 0); 562 mcam_write_yuv_bases(cam, frame, dma_handle); 563 } 564 565 /* 566 * Initial B_DMA_contig setup. 567 */ 568 static void mcam_ctlr_dma_contig(struct mcam_camera *cam) 569 { 570 mcam_reg_set_bit(cam, REG_CTRL1, C1_TWOBUFS); 571 cam->nbufs = 2; 572 mcam_set_contig_buffer(cam, 0); 573 mcam_set_contig_buffer(cam, 1); 574 } 575 576 /* 577 * Frame completion handling. 578 */ 579 static void mcam_dma_contig_done(struct mcam_camera *cam, int frame) 580 { 581 struct mcam_vb_buffer *buf = cam->vb_bufs[frame]; 582 583 if (!test_bit(CF_SINGLE_BUFFER, &cam->flags)) { 584 cam->frame_state.delivered++; 585 cam->vb_bufs[frame] = NULL; 586 mcam_buffer_done(cam, frame, &buf->vb_buf); 587 } 588 mcam_set_contig_buffer(cam, frame); 589 } 590 591 #endif /* MCAM_MODE_DMA_CONTIG */ 592 593 #ifdef MCAM_MODE_DMA_SG 594 /* ---------------------------------------------------------------------- */ 595 /* 596 * Scatter/gather-specific code. 597 */ 598 599 /* 600 * Set up the next buffer for S/G I/O; caller should be sure that 601 * the controller is stopped and a buffer is available. 602 */ 603 static void mcam_sg_next_buffer(struct mcam_camera *cam) 604 { 605 struct mcam_vb_buffer *buf; 606 struct sg_table *sg_table; 607 608 buf = list_first_entry(&cam->buffers, struct mcam_vb_buffer, queue); 609 list_del_init(&buf->queue); 610 sg_table = vb2_dma_sg_plane_desc(&buf->vb_buf.vb2_buf, 0); 611 /* 612 * Very Bad Not Good Things happen if you don't clear 613 * C1_DESC_ENA before making any descriptor changes. 614 */ 615 mcam_reg_clear_bit(cam, REG_CTRL1, C1_DESC_ENA); 616 mcam_reg_write(cam, REG_DMA_DESC_Y, buf->dma_desc_pa); 617 mcam_reg_write(cam, REG_DESC_LEN_Y, 618 sg_table->nents * sizeof(struct mcam_dma_desc)); 619 mcam_reg_write(cam, REG_DESC_LEN_U, 0); 620 mcam_reg_write(cam, REG_DESC_LEN_V, 0); 621 mcam_reg_set_bit(cam, REG_CTRL1, C1_DESC_ENA); 622 cam->vb_bufs[0] = buf; 623 } 624 625 /* 626 * Initial B_DMA_sg setup 627 */ 628 static void mcam_ctlr_dma_sg(struct mcam_camera *cam) 629 { 630 /* 631 * The list-empty condition can hit us at resume time 632 * if the buffer list was empty when the system was suspended. 633 */ 634 if (list_empty(&cam->buffers)) { 635 set_bit(CF_SG_RESTART, &cam->flags); 636 return; 637 } 638 639 mcam_reg_clear_bit(cam, REG_CTRL1, C1_DESC_3WORD); 640 mcam_sg_next_buffer(cam); 641 cam->nbufs = 3; 642 } 643 644 645 /* 646 * Frame completion with S/G is trickier. We can't muck with 647 * a descriptor chain on the fly, since the controller buffers it 648 * internally. So we have to actually stop and restart; Marvell 649 * says this is the way to do it. 650 * 651 * Of course, stopping is easier said than done; experience shows 652 * that the controller can start a frame *after* C0_ENABLE has been 653 * cleared. So when running in S/G mode, the controller is "stopped" 654 * on receipt of the start-of-frame interrupt. That means we can 655 * safely change the DMA descriptor array here and restart things 656 * (assuming there's another buffer waiting to go). 657 */ 658 static void mcam_dma_sg_done(struct mcam_camera *cam, int frame) 659 { 660 struct mcam_vb_buffer *buf = cam->vb_bufs[0]; 661 662 /* 663 * If we're no longer supposed to be streaming, don't do anything. 664 */ 665 if (cam->state != S_STREAMING) 666 return; 667 /* 668 * If we have another buffer available, put it in and 669 * restart the engine. 670 */ 671 if (!list_empty(&cam->buffers)) { 672 mcam_sg_next_buffer(cam); 673 mcam_ctlr_start(cam); 674 /* 675 * Otherwise set CF_SG_RESTART and the controller will 676 * be restarted once another buffer shows up. 677 */ 678 } else { 679 set_bit(CF_SG_RESTART, &cam->flags); 680 cam->frame_state.singles++; 681 cam->vb_bufs[0] = NULL; 682 } 683 /* 684 * Now we can give the completed frame back to user space. 685 */ 686 cam->frame_state.delivered++; 687 mcam_buffer_done(cam, frame, &buf->vb_buf); 688 } 689 690 691 /* 692 * Scatter/gather mode requires stopping the controller between 693 * frames so we can put in a new DMA descriptor array. If no new 694 * buffer exists at frame completion, the controller is left stopped; 695 * this function is charged with getting things going again. 696 */ 697 static void mcam_sg_restart(struct mcam_camera *cam) 698 { 699 mcam_ctlr_dma_sg(cam); 700 mcam_ctlr_start(cam); 701 clear_bit(CF_SG_RESTART, &cam->flags); 702 } 703 704 #else /* MCAM_MODE_DMA_SG */ 705 706 static inline void mcam_sg_restart(struct mcam_camera *cam) 707 { 708 return; 709 } 710 711 #endif /* MCAM_MODE_DMA_SG */ 712 713 /* ---------------------------------------------------------------------- */ 714 /* 715 * Buffer-mode-independent controller code. 716 */ 717 718 /* 719 * Image format setup 720 */ 721 static void mcam_ctlr_image(struct mcam_camera *cam) 722 { 723 struct v4l2_pix_format *fmt = &cam->pix_format; 724 u32 widthy = 0, widthuv = 0, imgsz_h, imgsz_w; 725 726 cam_dbg(cam, "camera: bytesperline = %d; height = %d\n", 727 fmt->bytesperline, fmt->sizeimage / fmt->bytesperline); 728 imgsz_h = (fmt->height << IMGSZ_V_SHIFT) & IMGSZ_V_MASK; 729 imgsz_w = (fmt->width * 2) & IMGSZ_H_MASK; 730 731 switch (fmt->pixelformat) { 732 case V4L2_PIX_FMT_YUYV: 733 case V4L2_PIX_FMT_YVYU: 734 widthy = fmt->width * 2; 735 widthuv = 0; 736 break; 737 case V4L2_PIX_FMT_YUV420: 738 case V4L2_PIX_FMT_YVU420: 739 widthy = fmt->width; 740 widthuv = fmt->width / 2; 741 break; 742 default: 743 widthy = fmt->bytesperline; 744 widthuv = 0; 745 break; 746 } 747 748 mcam_reg_write_mask(cam, REG_IMGPITCH, widthuv << 16 | widthy, 749 IMGP_YP_MASK | IMGP_UVP_MASK); 750 mcam_reg_write(cam, REG_IMGSIZE, imgsz_h | imgsz_w); 751 mcam_reg_write(cam, REG_IMGOFFSET, 0x0); 752 753 /* 754 * Tell the controller about the image format we are using. 755 */ 756 switch (fmt->pixelformat) { 757 case V4L2_PIX_FMT_YUV420: 758 case V4L2_PIX_FMT_YVU420: 759 mcam_reg_write_mask(cam, REG_CTRL0, 760 C0_DF_YUV | C0_YUV_420PL | C0_YUVE_VYUY, C0_DF_MASK); 761 break; 762 case V4L2_PIX_FMT_YUYV: 763 mcam_reg_write_mask(cam, REG_CTRL0, 764 C0_DF_YUV | C0_YUV_PACKED | C0_YUVE_NOSWAP, C0_DF_MASK); 765 break; 766 case V4L2_PIX_FMT_YVYU: 767 mcam_reg_write_mask(cam, REG_CTRL0, 768 C0_DF_YUV | C0_YUV_PACKED | C0_YUVE_SWAP24, C0_DF_MASK); 769 break; 770 case V4L2_PIX_FMT_XRGB444: 771 mcam_reg_write_mask(cam, REG_CTRL0, 772 C0_DF_RGB | C0_RGBF_444 | C0_RGB4_XBGR, C0_DF_MASK); 773 break; 774 case V4L2_PIX_FMT_RGB565: 775 mcam_reg_write_mask(cam, REG_CTRL0, 776 C0_DF_RGB | C0_RGBF_565 | C0_RGB5_BGGR, C0_DF_MASK); 777 break; 778 case V4L2_PIX_FMT_SBGGR8: 779 mcam_reg_write_mask(cam, REG_CTRL0, 780 C0_DF_RGB | C0_RGB5_GRBG, C0_DF_MASK); 781 break; 782 default: 783 cam_err(cam, "camera: unknown format: %#x\n", fmt->pixelformat); 784 break; 785 } 786 787 /* 788 * Make sure it knows we want to use hsync/vsync. 789 */ 790 mcam_reg_write_mask(cam, REG_CTRL0, C0_SIF_HVSYNC, C0_SIFM_MASK); 791 } 792 793 794 /* 795 * Configure the controller for operation; caller holds the 796 * device mutex. 797 */ 798 static int mcam_ctlr_configure(struct mcam_camera *cam) 799 { 800 unsigned long flags; 801 802 spin_lock_irqsave(&cam->dev_lock, flags); 803 clear_bit(CF_SG_RESTART, &cam->flags); 804 cam->dma_setup(cam); 805 mcam_ctlr_image(cam); 806 mcam_set_config_needed(cam, 0); 807 spin_unlock_irqrestore(&cam->dev_lock, flags); 808 return 0; 809 } 810 811 static void mcam_ctlr_irq_enable(struct mcam_camera *cam) 812 { 813 /* 814 * Clear any pending interrupts, since we do not 815 * expect to have I/O active prior to enabling. 816 */ 817 mcam_reg_write(cam, REG_IRQSTAT, FRAMEIRQS); 818 mcam_reg_set_bit(cam, REG_IRQMASK, FRAMEIRQS); 819 } 820 821 static void mcam_ctlr_irq_disable(struct mcam_camera *cam) 822 { 823 mcam_reg_clear_bit(cam, REG_IRQMASK, FRAMEIRQS); 824 } 825 826 /* 827 * Stop the controller, and don't return until we're really sure that no 828 * further DMA is going on. 829 */ 830 static void mcam_ctlr_stop_dma(struct mcam_camera *cam) 831 { 832 unsigned long flags; 833 834 /* 835 * Theory: stop the camera controller (whether it is operating 836 * or not). Delay briefly just in case we race with the SOF 837 * interrupt, then wait until no DMA is active. 838 */ 839 spin_lock_irqsave(&cam->dev_lock, flags); 840 clear_bit(CF_SG_RESTART, &cam->flags); 841 mcam_ctlr_stop(cam); 842 cam->state = S_IDLE; 843 spin_unlock_irqrestore(&cam->dev_lock, flags); 844 /* 845 * This is a brutally long sleep, but experience shows that 846 * it can take the controller a while to get the message that 847 * it needs to stop grabbing frames. In particular, we can 848 * sometimes (on mmp) get a frame at the end WITHOUT the 849 * start-of-frame indication. 850 */ 851 msleep(150); 852 if (test_bit(CF_DMA_ACTIVE, &cam->flags)) 853 cam_err(cam, "Timeout waiting for DMA to end\n"); 854 /* This would be bad news - what now? */ 855 spin_lock_irqsave(&cam->dev_lock, flags); 856 mcam_ctlr_irq_disable(cam); 857 spin_unlock_irqrestore(&cam->dev_lock, flags); 858 } 859 860 /* 861 * Power up and down. 862 */ 863 static int mcam_ctlr_power_up(struct mcam_camera *cam) 864 { 865 unsigned long flags; 866 int ret; 867 868 spin_lock_irqsave(&cam->dev_lock, flags); 869 if (cam->plat_power_up) { 870 ret = cam->plat_power_up(cam); 871 if (ret) { 872 spin_unlock_irqrestore(&cam->dev_lock, flags); 873 return ret; 874 } 875 } 876 mcam_reg_clear_bit(cam, REG_CTRL1, C1_PWRDWN); 877 spin_unlock_irqrestore(&cam->dev_lock, flags); 878 return 0; 879 } 880 881 static void mcam_ctlr_power_down(struct mcam_camera *cam) 882 { 883 unsigned long flags; 884 885 spin_lock_irqsave(&cam->dev_lock, flags); 886 /* 887 * School of hard knocks department: be sure we do any register 888 * twiddling on the controller *before* calling the platform 889 * power down routine. 890 */ 891 mcam_reg_set_bit(cam, REG_CTRL1, C1_PWRDWN); 892 if (cam->plat_power_down) 893 cam->plat_power_down(cam); 894 spin_unlock_irqrestore(&cam->dev_lock, flags); 895 } 896 897 /* ---------------------------------------------------------------------- */ 898 /* 899 * Master sensor clock. 900 */ 901 static int mclk_prepare(struct clk_hw *hw) 902 { 903 struct mcam_camera *cam = container_of(hw, struct mcam_camera, mclk_hw); 904 905 clk_prepare(cam->clk[0]); 906 return 0; 907 } 908 909 static void mclk_unprepare(struct clk_hw *hw) 910 { 911 struct mcam_camera *cam = container_of(hw, struct mcam_camera, mclk_hw); 912 913 clk_unprepare(cam->clk[0]); 914 } 915 916 static int mclk_enable(struct clk_hw *hw) 917 { 918 struct mcam_camera *cam = container_of(hw, struct mcam_camera, mclk_hw); 919 int mclk_src; 920 int mclk_div; 921 int ret; 922 923 /* 924 * Clock the sensor appropriately. Controller clock should 925 * be 48MHz, sensor "typical" value is half that. 926 */ 927 if (cam->bus_type == V4L2_MBUS_CSI2_DPHY) { 928 mclk_src = cam->mclk_src; 929 mclk_div = cam->mclk_div; 930 } else { 931 mclk_src = 3; 932 mclk_div = 2; 933 } 934 935 ret = pm_runtime_resume_and_get(cam->dev); 936 if (ret < 0) 937 return ret; 938 clk_enable(cam->clk[0]); 939 mcam_reg_write(cam, REG_CLKCTRL, (mclk_src << 29) | mclk_div); 940 mcam_ctlr_power_up(cam); 941 942 return 0; 943 } 944 945 static void mclk_disable(struct clk_hw *hw) 946 { 947 struct mcam_camera *cam = container_of(hw, struct mcam_camera, mclk_hw); 948 949 mcam_ctlr_power_down(cam); 950 clk_disable(cam->clk[0]); 951 pm_runtime_put(cam->dev); 952 } 953 954 static unsigned long mclk_recalc_rate(struct clk_hw *hw, 955 unsigned long parent_rate) 956 { 957 return 48000000; 958 } 959 960 static const struct clk_ops mclk_ops = { 961 .prepare = mclk_prepare, 962 .unprepare = mclk_unprepare, 963 .enable = mclk_enable, 964 .disable = mclk_disable, 965 .recalc_rate = mclk_recalc_rate, 966 }; 967 968 /* -------------------------------------------------------------------- */ 969 /* 970 * Communications with the sensor. 971 */ 972 973 static int __mcam_cam_reset(struct mcam_camera *cam) 974 { 975 return sensor_call(cam, core, reset, 0); 976 } 977 978 /* 979 * We have found the sensor on the i2c. Let's try to have a 980 * conversation. 981 */ 982 static int mcam_cam_init(struct mcam_camera *cam) 983 { 984 int ret; 985 986 if (cam->state != S_NOTREADY) 987 cam_warn(cam, "Cam init with device in funky state %d", 988 cam->state); 989 ret = __mcam_cam_reset(cam); 990 /* Get/set parameters? */ 991 cam->state = S_IDLE; 992 return ret; 993 } 994 995 /* 996 * Configure the sensor to match the parameters we have. Caller should 997 * hold s_mutex 998 */ 999 static int mcam_cam_set_flip(struct mcam_camera *cam) 1000 { 1001 struct v4l2_control ctrl; 1002 1003 memset(&ctrl, 0, sizeof(ctrl)); 1004 ctrl.id = V4L2_CID_VFLIP; 1005 ctrl.value = flip; 1006 return v4l2_s_ctrl(NULL, cam->sensor->ctrl_handler, &ctrl); 1007 } 1008 1009 1010 static int mcam_cam_configure(struct mcam_camera *cam) 1011 { 1012 struct v4l2_subdev_format format = { 1013 .which = V4L2_SUBDEV_FORMAT_ACTIVE, 1014 }; 1015 int ret; 1016 1017 v4l2_fill_mbus_format(&format.format, &cam->pix_format, cam->mbus_code); 1018 ret = sensor_call(cam, core, init, 0); 1019 if (ret == 0) 1020 ret = sensor_call(cam, pad, set_fmt, NULL, &format); 1021 /* 1022 * OV7670 does weird things if flip is set *before* format... 1023 */ 1024 ret += mcam_cam_set_flip(cam); 1025 return ret; 1026 } 1027 1028 /* 1029 * Get everything ready, and start grabbing frames. 1030 */ 1031 static int mcam_read_setup(struct mcam_camera *cam) 1032 { 1033 int ret; 1034 unsigned long flags; 1035 1036 /* 1037 * Configuration. If we still don't have DMA buffers, 1038 * make one last, desperate attempt. 1039 */ 1040 if (cam->buffer_mode == B_vmalloc && cam->nbufs == 0 && 1041 mcam_alloc_dma_bufs(cam, 0)) 1042 return -ENOMEM; 1043 1044 if (mcam_needs_config(cam)) { 1045 mcam_cam_configure(cam); 1046 ret = mcam_ctlr_configure(cam); 1047 if (ret) 1048 return ret; 1049 } 1050 1051 /* 1052 * Turn it loose. 1053 */ 1054 spin_lock_irqsave(&cam->dev_lock, flags); 1055 clear_bit(CF_DMA_ACTIVE, &cam->flags); 1056 mcam_reset_buffers(cam); 1057 if (cam->bus_type == V4L2_MBUS_CSI2_DPHY) 1058 mcam_enable_mipi(cam); 1059 else 1060 mcam_disable_mipi(cam); 1061 mcam_ctlr_irq_enable(cam); 1062 cam->state = S_STREAMING; 1063 if (!test_bit(CF_SG_RESTART, &cam->flags)) 1064 mcam_ctlr_start(cam); 1065 spin_unlock_irqrestore(&cam->dev_lock, flags); 1066 return 0; 1067 } 1068 1069 /* ----------------------------------------------------------------------- */ 1070 /* 1071 * Videobuf2 interface code. 1072 */ 1073 1074 static int mcam_vb_queue_setup(struct vb2_queue *vq, 1075 unsigned int *nbufs, 1076 unsigned int *num_planes, unsigned int sizes[], 1077 struct device *alloc_devs[]) 1078 { 1079 struct mcam_camera *cam = vb2_get_drv_priv(vq); 1080 int minbufs = (cam->buffer_mode == B_DMA_contig) ? 3 : 2; 1081 unsigned size = cam->pix_format.sizeimage; 1082 1083 if (*nbufs < minbufs) 1084 *nbufs = minbufs; 1085 1086 if (*num_planes) 1087 return sizes[0] < size ? -EINVAL : 0; 1088 sizes[0] = size; 1089 *num_planes = 1; /* Someday we have to support planar formats... */ 1090 return 0; 1091 } 1092 1093 1094 static void mcam_vb_buf_queue(struct vb2_buffer *vb) 1095 { 1096 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb); 1097 struct mcam_vb_buffer *mvb = vb_to_mvb(vbuf); 1098 struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue); 1099 unsigned long flags; 1100 int start; 1101 1102 spin_lock_irqsave(&cam->dev_lock, flags); 1103 start = (cam->state == S_BUFWAIT) && !list_empty(&cam->buffers); 1104 list_add(&mvb->queue, &cam->buffers); 1105 if (cam->state == S_STREAMING && test_bit(CF_SG_RESTART, &cam->flags)) 1106 mcam_sg_restart(cam); 1107 spin_unlock_irqrestore(&cam->dev_lock, flags); 1108 if (start) 1109 mcam_read_setup(cam); 1110 } 1111 1112 static void mcam_vb_requeue_bufs(struct vb2_queue *vq, 1113 enum vb2_buffer_state state) 1114 { 1115 struct mcam_camera *cam = vb2_get_drv_priv(vq); 1116 struct mcam_vb_buffer *buf, *node; 1117 unsigned long flags; 1118 unsigned i; 1119 1120 spin_lock_irqsave(&cam->dev_lock, flags); 1121 list_for_each_entry_safe(buf, node, &cam->buffers, queue) { 1122 vb2_buffer_done(&buf->vb_buf.vb2_buf, state); 1123 list_del(&buf->queue); 1124 } 1125 for (i = 0; i < MAX_DMA_BUFS; i++) { 1126 buf = cam->vb_bufs[i]; 1127 1128 if (buf) { 1129 vb2_buffer_done(&buf->vb_buf.vb2_buf, state); 1130 cam->vb_bufs[i] = NULL; 1131 } 1132 } 1133 spin_unlock_irqrestore(&cam->dev_lock, flags); 1134 } 1135 1136 /* 1137 * These need to be called with the mutex held from vb2 1138 */ 1139 static int mcam_vb_start_streaming(struct vb2_queue *vq, unsigned int count) 1140 { 1141 struct mcam_camera *cam = vb2_get_drv_priv(vq); 1142 unsigned int frame; 1143 int ret; 1144 1145 if (cam->state != S_IDLE) { 1146 mcam_vb_requeue_bufs(vq, VB2_BUF_STATE_QUEUED); 1147 return -EINVAL; 1148 } 1149 cam->frame_state.frames = 0; 1150 cam->frame_state.singles = 0; 1151 cam->frame_state.delivered = 0; 1152 cam->sequence = 0; 1153 /* 1154 * Videobuf2 sneakily hoards all the buffers and won't 1155 * give them to us until *after* streaming starts. But 1156 * we can't actually start streaming until we have a 1157 * destination. So go into a wait state and hope they 1158 * give us buffers soon. 1159 */ 1160 if (cam->buffer_mode != B_vmalloc && list_empty(&cam->buffers)) { 1161 cam->state = S_BUFWAIT; 1162 return 0; 1163 } 1164 1165 /* 1166 * Ensure clear the left over frame flags 1167 * before every really start streaming 1168 */ 1169 for (frame = 0; frame < cam->nbufs; frame++) 1170 clear_bit(CF_FRAME_SOF0 + frame, &cam->flags); 1171 1172 ret = mcam_read_setup(cam); 1173 if (ret) 1174 mcam_vb_requeue_bufs(vq, VB2_BUF_STATE_QUEUED); 1175 return ret; 1176 } 1177 1178 static void mcam_vb_stop_streaming(struct vb2_queue *vq) 1179 { 1180 struct mcam_camera *cam = vb2_get_drv_priv(vq); 1181 1182 cam_dbg(cam, "stop_streaming: %d frames, %d singles, %d delivered\n", 1183 cam->frame_state.frames, cam->frame_state.singles, 1184 cam->frame_state.delivered); 1185 if (cam->state == S_BUFWAIT) { 1186 /* They never gave us buffers */ 1187 cam->state = S_IDLE; 1188 return; 1189 } 1190 if (cam->state != S_STREAMING) 1191 return; 1192 mcam_ctlr_stop_dma(cam); 1193 /* 1194 * VB2 reclaims the buffers, so we need to forget 1195 * about them. 1196 */ 1197 mcam_vb_requeue_bufs(vq, VB2_BUF_STATE_ERROR); 1198 } 1199 1200 1201 static const struct vb2_ops mcam_vb2_ops = { 1202 .queue_setup = mcam_vb_queue_setup, 1203 .buf_queue = mcam_vb_buf_queue, 1204 .start_streaming = mcam_vb_start_streaming, 1205 .stop_streaming = mcam_vb_stop_streaming, 1206 .wait_prepare = vb2_ops_wait_prepare, 1207 .wait_finish = vb2_ops_wait_finish, 1208 }; 1209 1210 1211 #ifdef MCAM_MODE_DMA_SG 1212 /* 1213 * Scatter/gather mode uses all of the above functions plus a 1214 * few extras to deal with DMA mapping. 1215 */ 1216 static int mcam_vb_sg_buf_init(struct vb2_buffer *vb) 1217 { 1218 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb); 1219 struct mcam_vb_buffer *mvb = vb_to_mvb(vbuf); 1220 struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue); 1221 int ndesc = cam->pix_format.sizeimage/PAGE_SIZE + 1; 1222 1223 mvb->dma_desc = dma_alloc_coherent(cam->dev, 1224 ndesc * sizeof(struct mcam_dma_desc), 1225 &mvb->dma_desc_pa, GFP_KERNEL); 1226 if (mvb->dma_desc == NULL) { 1227 cam_err(cam, "Unable to get DMA descriptor array\n"); 1228 return -ENOMEM; 1229 } 1230 return 0; 1231 } 1232 1233 static int mcam_vb_sg_buf_prepare(struct vb2_buffer *vb) 1234 { 1235 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb); 1236 struct mcam_vb_buffer *mvb = vb_to_mvb(vbuf); 1237 struct sg_table *sg_table = vb2_dma_sg_plane_desc(vb, 0); 1238 struct mcam_dma_desc *desc = mvb->dma_desc; 1239 struct scatterlist *sg; 1240 int i; 1241 1242 for_each_sg(sg_table->sgl, sg, sg_table->nents, i) { 1243 desc->dma_addr = sg_dma_address(sg); 1244 desc->segment_len = sg_dma_len(sg); 1245 desc++; 1246 } 1247 return 0; 1248 } 1249 1250 static void mcam_vb_sg_buf_cleanup(struct vb2_buffer *vb) 1251 { 1252 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb); 1253 struct mcam_camera *cam = vb2_get_drv_priv(vb->vb2_queue); 1254 struct mcam_vb_buffer *mvb = vb_to_mvb(vbuf); 1255 int ndesc = cam->pix_format.sizeimage/PAGE_SIZE + 1; 1256 1257 dma_free_coherent(cam->dev, ndesc * sizeof(struct mcam_dma_desc), 1258 mvb->dma_desc, mvb->dma_desc_pa); 1259 } 1260 1261 1262 static const struct vb2_ops mcam_vb2_sg_ops = { 1263 .queue_setup = mcam_vb_queue_setup, 1264 .buf_init = mcam_vb_sg_buf_init, 1265 .buf_prepare = mcam_vb_sg_buf_prepare, 1266 .buf_queue = mcam_vb_buf_queue, 1267 .buf_cleanup = mcam_vb_sg_buf_cleanup, 1268 .start_streaming = mcam_vb_start_streaming, 1269 .stop_streaming = mcam_vb_stop_streaming, 1270 .wait_prepare = vb2_ops_wait_prepare, 1271 .wait_finish = vb2_ops_wait_finish, 1272 }; 1273 1274 #endif /* MCAM_MODE_DMA_SG */ 1275 1276 static int mcam_setup_vb2(struct mcam_camera *cam) 1277 { 1278 struct vb2_queue *vq = &cam->vb_queue; 1279 1280 memset(vq, 0, sizeof(*vq)); 1281 vq->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 1282 vq->drv_priv = cam; 1283 vq->lock = &cam->s_mutex; 1284 vq->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC; 1285 vq->io_modes = VB2_MMAP | VB2_USERPTR | VB2_DMABUF | VB2_READ; 1286 vq->buf_struct_size = sizeof(struct mcam_vb_buffer); 1287 vq->dev = cam->dev; 1288 INIT_LIST_HEAD(&cam->buffers); 1289 switch (cam->buffer_mode) { 1290 case B_DMA_contig: 1291 #ifdef MCAM_MODE_DMA_CONTIG 1292 vq->ops = &mcam_vb2_ops; 1293 vq->mem_ops = &vb2_dma_contig_memops; 1294 cam->dma_setup = mcam_ctlr_dma_contig; 1295 cam->frame_complete = mcam_dma_contig_done; 1296 #endif 1297 break; 1298 case B_DMA_sg: 1299 #ifdef MCAM_MODE_DMA_SG 1300 vq->ops = &mcam_vb2_sg_ops; 1301 vq->mem_ops = &vb2_dma_sg_memops; 1302 cam->dma_setup = mcam_ctlr_dma_sg; 1303 cam->frame_complete = mcam_dma_sg_done; 1304 #endif 1305 break; 1306 case B_vmalloc: 1307 #ifdef MCAM_MODE_VMALLOC 1308 tasklet_setup(&cam->s_tasklet, mcam_frame_tasklet); 1309 vq->ops = &mcam_vb2_ops; 1310 vq->mem_ops = &vb2_vmalloc_memops; 1311 cam->dma_setup = mcam_ctlr_dma_vmalloc; 1312 cam->frame_complete = mcam_vmalloc_done; 1313 #endif 1314 break; 1315 } 1316 return vb2_queue_init(vq); 1317 } 1318 1319 1320 /* ---------------------------------------------------------------------- */ 1321 /* 1322 * The long list of V4L2 ioctl() operations. 1323 */ 1324 1325 static int mcam_vidioc_querycap(struct file *file, void *priv, 1326 struct v4l2_capability *cap) 1327 { 1328 struct mcam_camera *cam = video_drvdata(file); 1329 1330 strscpy(cap->driver, "marvell_ccic", sizeof(cap->driver)); 1331 strscpy(cap->card, "marvell_ccic", sizeof(cap->card)); 1332 strscpy(cap->bus_info, cam->bus_info, sizeof(cap->bus_info)); 1333 return 0; 1334 } 1335 1336 1337 static int mcam_vidioc_enum_fmt_vid_cap(struct file *filp, 1338 void *priv, struct v4l2_fmtdesc *fmt) 1339 { 1340 if (fmt->index >= N_MCAM_FMTS) 1341 return -EINVAL; 1342 fmt->pixelformat = mcam_formats[fmt->index].pixelformat; 1343 return 0; 1344 } 1345 1346 static int mcam_vidioc_try_fmt_vid_cap(struct file *filp, void *priv, 1347 struct v4l2_format *fmt) 1348 { 1349 struct mcam_camera *cam = video_drvdata(filp); 1350 struct mcam_format_struct *f; 1351 struct v4l2_pix_format *pix = &fmt->fmt.pix; 1352 struct v4l2_subdev_pad_config pad_cfg; 1353 struct v4l2_subdev_state pad_state = { 1354 .pads = &pad_cfg 1355 }; 1356 struct v4l2_subdev_format format = { 1357 .which = V4L2_SUBDEV_FORMAT_TRY, 1358 }; 1359 int ret; 1360 1361 f = mcam_find_format(pix->pixelformat); 1362 pix->pixelformat = f->pixelformat; 1363 v4l2_fill_mbus_format(&format.format, pix, f->mbus_code); 1364 ret = sensor_call(cam, pad, set_fmt, &pad_state, &format); 1365 v4l2_fill_pix_format(pix, &format.format); 1366 pix->bytesperline = pix->width * f->bpp; 1367 switch (f->pixelformat) { 1368 case V4L2_PIX_FMT_YUV420: 1369 case V4L2_PIX_FMT_YVU420: 1370 pix->sizeimage = pix->height * pix->bytesperline * 3 / 2; 1371 break; 1372 default: 1373 pix->sizeimage = pix->height * pix->bytesperline; 1374 break; 1375 } 1376 pix->colorspace = V4L2_COLORSPACE_SRGB; 1377 return ret; 1378 } 1379 1380 static int mcam_vidioc_s_fmt_vid_cap(struct file *filp, void *priv, 1381 struct v4l2_format *fmt) 1382 { 1383 struct mcam_camera *cam = video_drvdata(filp); 1384 struct mcam_format_struct *f; 1385 int ret; 1386 1387 /* 1388 * Can't do anything if the device is not idle 1389 * Also can't if there are streaming buffers in place. 1390 */ 1391 if (cam->state != S_IDLE || vb2_is_busy(&cam->vb_queue)) 1392 return -EBUSY; 1393 1394 f = mcam_find_format(fmt->fmt.pix.pixelformat); 1395 1396 /* 1397 * See if the formatting works in principle. 1398 */ 1399 ret = mcam_vidioc_try_fmt_vid_cap(filp, priv, fmt); 1400 if (ret) 1401 return ret; 1402 /* 1403 * Now we start to change things for real, so let's do it 1404 * under lock. 1405 */ 1406 cam->pix_format = fmt->fmt.pix; 1407 cam->mbus_code = f->mbus_code; 1408 1409 /* 1410 * Make sure we have appropriate DMA buffers. 1411 */ 1412 if (cam->buffer_mode == B_vmalloc) { 1413 ret = mcam_check_dma_buffers(cam); 1414 if (ret) 1415 goto out; 1416 } 1417 mcam_set_config_needed(cam, 1); 1418 out: 1419 return ret; 1420 } 1421 1422 /* 1423 * Return our stored notion of how the camera is/should be configured. 1424 * The V4l2 spec wants us to be smarter, and actually get this from 1425 * the camera (and not mess with it at open time). Someday. 1426 */ 1427 static int mcam_vidioc_g_fmt_vid_cap(struct file *filp, void *priv, 1428 struct v4l2_format *f) 1429 { 1430 struct mcam_camera *cam = video_drvdata(filp); 1431 1432 f->fmt.pix = cam->pix_format; 1433 return 0; 1434 } 1435 1436 /* 1437 * We only have one input - the sensor - so minimize the nonsense here. 1438 */ 1439 static int mcam_vidioc_enum_input(struct file *filp, void *priv, 1440 struct v4l2_input *input) 1441 { 1442 if (input->index != 0) 1443 return -EINVAL; 1444 1445 input->type = V4L2_INPUT_TYPE_CAMERA; 1446 strscpy(input->name, "Camera", sizeof(input->name)); 1447 return 0; 1448 } 1449 1450 static int mcam_vidioc_g_input(struct file *filp, void *priv, unsigned int *i) 1451 { 1452 *i = 0; 1453 return 0; 1454 } 1455 1456 static int mcam_vidioc_s_input(struct file *filp, void *priv, unsigned int i) 1457 { 1458 if (i != 0) 1459 return -EINVAL; 1460 return 0; 1461 } 1462 1463 /* 1464 * G/S_PARM. Most of this is done by the sensor, but we are 1465 * the level which controls the number of read buffers. 1466 */ 1467 static int mcam_vidioc_g_parm(struct file *filp, void *priv, 1468 struct v4l2_streamparm *a) 1469 { 1470 struct mcam_camera *cam = video_drvdata(filp); 1471 int ret; 1472 1473 ret = v4l2_g_parm_cap(video_devdata(filp), cam->sensor, a); 1474 a->parm.capture.readbuffers = n_dma_bufs; 1475 return ret; 1476 } 1477 1478 static int mcam_vidioc_s_parm(struct file *filp, void *priv, 1479 struct v4l2_streamparm *a) 1480 { 1481 struct mcam_camera *cam = video_drvdata(filp); 1482 int ret; 1483 1484 ret = v4l2_s_parm_cap(video_devdata(filp), cam->sensor, a); 1485 a->parm.capture.readbuffers = n_dma_bufs; 1486 return ret; 1487 } 1488 1489 static int mcam_vidioc_enum_framesizes(struct file *filp, void *priv, 1490 struct v4l2_frmsizeenum *sizes) 1491 { 1492 struct mcam_camera *cam = video_drvdata(filp); 1493 struct mcam_format_struct *f; 1494 struct v4l2_subdev_frame_size_enum fse = { 1495 .index = sizes->index, 1496 .which = V4L2_SUBDEV_FORMAT_ACTIVE, 1497 }; 1498 int ret; 1499 1500 f = mcam_find_format(sizes->pixel_format); 1501 if (f->pixelformat != sizes->pixel_format) 1502 return -EINVAL; 1503 fse.code = f->mbus_code; 1504 ret = sensor_call(cam, pad, enum_frame_size, NULL, &fse); 1505 if (ret) 1506 return ret; 1507 if (fse.min_width == fse.max_width && 1508 fse.min_height == fse.max_height) { 1509 sizes->type = V4L2_FRMSIZE_TYPE_DISCRETE; 1510 sizes->discrete.width = fse.min_width; 1511 sizes->discrete.height = fse.min_height; 1512 return 0; 1513 } 1514 sizes->type = V4L2_FRMSIZE_TYPE_CONTINUOUS; 1515 sizes->stepwise.min_width = fse.min_width; 1516 sizes->stepwise.max_width = fse.max_width; 1517 sizes->stepwise.min_height = fse.min_height; 1518 sizes->stepwise.max_height = fse.max_height; 1519 sizes->stepwise.step_width = 1; 1520 sizes->stepwise.step_height = 1; 1521 return 0; 1522 } 1523 1524 static int mcam_vidioc_enum_frameintervals(struct file *filp, void *priv, 1525 struct v4l2_frmivalenum *interval) 1526 { 1527 struct mcam_camera *cam = video_drvdata(filp); 1528 struct mcam_format_struct *f; 1529 struct v4l2_subdev_frame_interval_enum fie = { 1530 .index = interval->index, 1531 .width = interval->width, 1532 .height = interval->height, 1533 .which = V4L2_SUBDEV_FORMAT_ACTIVE, 1534 }; 1535 int ret; 1536 1537 f = mcam_find_format(interval->pixel_format); 1538 if (f->pixelformat != interval->pixel_format) 1539 return -EINVAL; 1540 fie.code = f->mbus_code; 1541 ret = sensor_call(cam, pad, enum_frame_interval, NULL, &fie); 1542 if (ret) 1543 return ret; 1544 interval->type = V4L2_FRMIVAL_TYPE_DISCRETE; 1545 interval->discrete = fie.interval; 1546 return 0; 1547 } 1548 1549 #ifdef CONFIG_VIDEO_ADV_DEBUG 1550 static int mcam_vidioc_g_register(struct file *file, void *priv, 1551 struct v4l2_dbg_register *reg) 1552 { 1553 struct mcam_camera *cam = video_drvdata(file); 1554 1555 if (reg->reg > cam->regs_size - 4) 1556 return -EINVAL; 1557 reg->val = mcam_reg_read(cam, reg->reg); 1558 reg->size = 4; 1559 return 0; 1560 } 1561 1562 static int mcam_vidioc_s_register(struct file *file, void *priv, 1563 const struct v4l2_dbg_register *reg) 1564 { 1565 struct mcam_camera *cam = video_drvdata(file); 1566 1567 if (reg->reg > cam->regs_size - 4) 1568 return -EINVAL; 1569 mcam_reg_write(cam, reg->reg, reg->val); 1570 return 0; 1571 } 1572 #endif 1573 1574 static const struct v4l2_ioctl_ops mcam_v4l_ioctl_ops = { 1575 .vidioc_querycap = mcam_vidioc_querycap, 1576 .vidioc_enum_fmt_vid_cap = mcam_vidioc_enum_fmt_vid_cap, 1577 .vidioc_try_fmt_vid_cap = mcam_vidioc_try_fmt_vid_cap, 1578 .vidioc_s_fmt_vid_cap = mcam_vidioc_s_fmt_vid_cap, 1579 .vidioc_g_fmt_vid_cap = mcam_vidioc_g_fmt_vid_cap, 1580 .vidioc_enum_input = mcam_vidioc_enum_input, 1581 .vidioc_g_input = mcam_vidioc_g_input, 1582 .vidioc_s_input = mcam_vidioc_s_input, 1583 .vidioc_reqbufs = vb2_ioctl_reqbufs, 1584 .vidioc_create_bufs = vb2_ioctl_create_bufs, 1585 .vidioc_querybuf = vb2_ioctl_querybuf, 1586 .vidioc_qbuf = vb2_ioctl_qbuf, 1587 .vidioc_dqbuf = vb2_ioctl_dqbuf, 1588 .vidioc_expbuf = vb2_ioctl_expbuf, 1589 .vidioc_streamon = vb2_ioctl_streamon, 1590 .vidioc_streamoff = vb2_ioctl_streamoff, 1591 .vidioc_g_parm = mcam_vidioc_g_parm, 1592 .vidioc_s_parm = mcam_vidioc_s_parm, 1593 .vidioc_enum_framesizes = mcam_vidioc_enum_framesizes, 1594 .vidioc_enum_frameintervals = mcam_vidioc_enum_frameintervals, 1595 .vidioc_subscribe_event = v4l2_ctrl_subscribe_event, 1596 .vidioc_unsubscribe_event = v4l2_event_unsubscribe, 1597 #ifdef CONFIG_VIDEO_ADV_DEBUG 1598 .vidioc_g_register = mcam_vidioc_g_register, 1599 .vidioc_s_register = mcam_vidioc_s_register, 1600 #endif 1601 }; 1602 1603 /* ---------------------------------------------------------------------- */ 1604 /* 1605 * Our various file operations. 1606 */ 1607 static int mcam_v4l_open(struct file *filp) 1608 { 1609 struct mcam_camera *cam = video_drvdata(filp); 1610 int ret; 1611 1612 mutex_lock(&cam->s_mutex); 1613 ret = v4l2_fh_open(filp); 1614 if (ret) 1615 goto out; 1616 if (v4l2_fh_is_singular_file(filp)) { 1617 ret = sensor_call(cam, core, s_power, 1); 1618 if (ret) 1619 goto out; 1620 ret = pm_runtime_resume_and_get(cam->dev); 1621 if (ret < 0) 1622 goto out; 1623 __mcam_cam_reset(cam); 1624 mcam_set_config_needed(cam, 1); 1625 } 1626 out: 1627 mutex_unlock(&cam->s_mutex); 1628 if (ret) 1629 v4l2_fh_release(filp); 1630 return ret; 1631 } 1632 1633 1634 static int mcam_v4l_release(struct file *filp) 1635 { 1636 struct mcam_camera *cam = video_drvdata(filp); 1637 bool last_open; 1638 1639 mutex_lock(&cam->s_mutex); 1640 last_open = v4l2_fh_is_singular_file(filp); 1641 _vb2_fop_release(filp, NULL); 1642 if (last_open) { 1643 mcam_disable_mipi(cam); 1644 sensor_call(cam, core, s_power, 0); 1645 pm_runtime_put(cam->dev); 1646 if (cam->buffer_mode == B_vmalloc && alloc_bufs_at_read) 1647 mcam_free_dma_bufs(cam); 1648 } 1649 1650 mutex_unlock(&cam->s_mutex); 1651 return 0; 1652 } 1653 1654 static const struct v4l2_file_operations mcam_v4l_fops = { 1655 .owner = THIS_MODULE, 1656 .open = mcam_v4l_open, 1657 .release = mcam_v4l_release, 1658 .read = vb2_fop_read, 1659 .poll = vb2_fop_poll, 1660 .mmap = vb2_fop_mmap, 1661 .unlocked_ioctl = video_ioctl2, 1662 }; 1663 1664 1665 /* 1666 * This template device holds all of those v4l2 methods; we 1667 * clone it for specific real devices. 1668 */ 1669 static const struct video_device mcam_v4l_template = { 1670 .name = "mcam", 1671 .fops = &mcam_v4l_fops, 1672 .ioctl_ops = &mcam_v4l_ioctl_ops, 1673 .release = video_device_release_empty, 1674 .device_caps = V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_READWRITE | 1675 V4L2_CAP_STREAMING, 1676 }; 1677 1678 /* ---------------------------------------------------------------------- */ 1679 /* 1680 * Interrupt handler stuff 1681 */ 1682 static void mcam_frame_complete(struct mcam_camera *cam, int frame) 1683 { 1684 /* 1685 * Basic frame housekeeping. 1686 */ 1687 set_bit(frame, &cam->flags); 1688 clear_bit(CF_DMA_ACTIVE, &cam->flags); 1689 cam->next_buf = frame; 1690 cam->buf_seq[frame] = cam->sequence++; 1691 cam->frame_state.frames++; 1692 /* 1693 * "This should never happen" 1694 */ 1695 if (cam->state != S_STREAMING) 1696 return; 1697 /* 1698 * Process the frame and set up the next one. 1699 */ 1700 cam->frame_complete(cam, frame); 1701 } 1702 1703 1704 /* 1705 * The interrupt handler; this needs to be called from the 1706 * platform irq handler with the lock held. 1707 */ 1708 int mccic_irq(struct mcam_camera *cam, unsigned int irqs) 1709 { 1710 unsigned int frame, handled = 0; 1711 1712 mcam_reg_write(cam, REG_IRQSTAT, FRAMEIRQS); /* Clear'em all */ 1713 /* 1714 * Handle any frame completions. There really should 1715 * not be more than one of these, or we have fallen 1716 * far behind. 1717 * 1718 * When running in S/G mode, the frame number lacks any 1719 * real meaning - there's only one descriptor array - but 1720 * the controller still picks a different one to signal 1721 * each time. 1722 */ 1723 for (frame = 0; frame < cam->nbufs; frame++) 1724 if (irqs & (IRQ_EOF0 << frame) && 1725 test_bit(CF_FRAME_SOF0 + frame, &cam->flags)) { 1726 mcam_frame_complete(cam, frame); 1727 handled = 1; 1728 clear_bit(CF_FRAME_SOF0 + frame, &cam->flags); 1729 if (cam->buffer_mode == B_DMA_sg) 1730 break; 1731 } 1732 /* 1733 * If a frame starts, note that we have DMA active. This 1734 * code assumes that we won't get multiple frame interrupts 1735 * at once; may want to rethink that. 1736 */ 1737 for (frame = 0; frame < cam->nbufs; frame++) { 1738 if (irqs & (IRQ_SOF0 << frame)) { 1739 set_bit(CF_FRAME_SOF0 + frame, &cam->flags); 1740 handled = IRQ_HANDLED; 1741 } 1742 } 1743 1744 if (handled == IRQ_HANDLED) { 1745 set_bit(CF_DMA_ACTIVE, &cam->flags); 1746 if (cam->buffer_mode == B_DMA_sg) 1747 mcam_ctlr_stop(cam); 1748 } 1749 return handled; 1750 } 1751 EXPORT_SYMBOL_GPL(mccic_irq); 1752 1753 /* ---------------------------------------------------------------------- */ 1754 /* 1755 * Registration and such. 1756 */ 1757 1758 static int mccic_notify_bound(struct v4l2_async_notifier *notifier, 1759 struct v4l2_subdev *subdev, struct v4l2_async_subdev *asd) 1760 { 1761 struct mcam_camera *cam = notifier_to_mcam(notifier); 1762 int ret; 1763 1764 mutex_lock(&cam->s_mutex); 1765 if (cam->sensor) { 1766 cam_err(cam, "sensor already bound\n"); 1767 ret = -EBUSY; 1768 goto out; 1769 } 1770 1771 v4l2_set_subdev_hostdata(subdev, cam); 1772 cam->sensor = subdev; 1773 1774 ret = mcam_cam_init(cam); 1775 if (ret) { 1776 cam->sensor = NULL; 1777 goto out; 1778 } 1779 1780 ret = mcam_setup_vb2(cam); 1781 if (ret) { 1782 cam->sensor = NULL; 1783 goto out; 1784 } 1785 1786 cam->vdev = mcam_v4l_template; 1787 cam->vdev.v4l2_dev = &cam->v4l2_dev; 1788 cam->vdev.lock = &cam->s_mutex; 1789 cam->vdev.queue = &cam->vb_queue; 1790 video_set_drvdata(&cam->vdev, cam); 1791 ret = video_register_device(&cam->vdev, VFL_TYPE_VIDEO, -1); 1792 if (ret) { 1793 cam->sensor = NULL; 1794 goto out; 1795 } 1796 1797 cam_dbg(cam, "sensor %s bound\n", subdev->name); 1798 out: 1799 mutex_unlock(&cam->s_mutex); 1800 return ret; 1801 } 1802 1803 static void mccic_notify_unbind(struct v4l2_async_notifier *notifier, 1804 struct v4l2_subdev *subdev, struct v4l2_async_subdev *asd) 1805 { 1806 struct mcam_camera *cam = notifier_to_mcam(notifier); 1807 1808 mutex_lock(&cam->s_mutex); 1809 if (cam->sensor != subdev) { 1810 cam_err(cam, "sensor %s not bound\n", subdev->name); 1811 goto out; 1812 } 1813 1814 video_unregister_device(&cam->vdev); 1815 cam->sensor = NULL; 1816 cam_dbg(cam, "sensor %s unbound\n", subdev->name); 1817 1818 out: 1819 mutex_unlock(&cam->s_mutex); 1820 } 1821 1822 static int mccic_notify_complete(struct v4l2_async_notifier *notifier) 1823 { 1824 struct mcam_camera *cam = notifier_to_mcam(notifier); 1825 int ret; 1826 1827 /* 1828 * Get the v4l2 setup done. 1829 */ 1830 ret = v4l2_ctrl_handler_init(&cam->ctrl_handler, 10); 1831 if (!ret) 1832 cam->v4l2_dev.ctrl_handler = &cam->ctrl_handler; 1833 1834 return ret; 1835 } 1836 1837 static const struct v4l2_async_notifier_operations mccic_notify_ops = { 1838 .bound = mccic_notify_bound, 1839 .unbind = mccic_notify_unbind, 1840 .complete = mccic_notify_complete, 1841 }; 1842 1843 int mccic_register(struct mcam_camera *cam) 1844 { 1845 struct clk_init_data mclk_init = { }; 1846 int ret; 1847 1848 /* 1849 * Validate the requested buffer mode. 1850 */ 1851 if (buffer_mode >= 0) 1852 cam->buffer_mode = buffer_mode; 1853 if (cam->buffer_mode == B_DMA_sg && 1854 cam->chip_id == MCAM_CAFE) { 1855 printk(KERN_ERR "marvell-cam: Cafe can't do S/G I/O, attempting vmalloc mode instead\n"); 1856 cam->buffer_mode = B_vmalloc; 1857 } 1858 1859 if (!mcam_buffer_mode_supported(cam->buffer_mode)) { 1860 printk(KERN_ERR "marvell-cam: buffer mode %d unsupported\n", 1861 cam->buffer_mode); 1862 ret = -EINVAL; 1863 goto out; 1864 } 1865 1866 /* 1867 * Register with V4L 1868 */ 1869 ret = v4l2_device_register(cam->dev, &cam->v4l2_dev); 1870 if (ret) 1871 goto out; 1872 1873 mutex_init(&cam->s_mutex); 1874 cam->state = S_NOTREADY; 1875 mcam_set_config_needed(cam, 1); 1876 cam->pix_format = mcam_def_pix_format; 1877 cam->mbus_code = mcam_def_mbus_code; 1878 1879 cam->notifier.ops = &mccic_notify_ops; 1880 ret = v4l2_async_nf_register(&cam->v4l2_dev, &cam->notifier); 1881 if (ret < 0) { 1882 cam_warn(cam, "failed to register a sensor notifier"); 1883 goto out; 1884 } 1885 1886 /* 1887 * Register sensor master clock. 1888 */ 1889 mclk_init.parent_names = NULL; 1890 mclk_init.num_parents = 0; 1891 mclk_init.ops = &mclk_ops; 1892 mclk_init.name = "mclk"; 1893 1894 of_property_read_string(cam->dev->of_node, "clock-output-names", 1895 &mclk_init.name); 1896 1897 cam->mclk_hw.init = &mclk_init; 1898 1899 cam->mclk = devm_clk_register(cam->dev, &cam->mclk_hw); 1900 if (IS_ERR(cam->mclk)) { 1901 ret = PTR_ERR(cam->mclk); 1902 dev_err(cam->dev, "can't register clock\n"); 1903 goto out; 1904 } 1905 1906 /* 1907 * If so requested, try to get our DMA buffers now. 1908 */ 1909 if (cam->buffer_mode == B_vmalloc && !alloc_bufs_at_read) { 1910 if (mcam_alloc_dma_bufs(cam, 1)) 1911 cam_warn(cam, "Unable to alloc DMA buffers at load will try again later."); 1912 } 1913 1914 return 0; 1915 1916 out: 1917 v4l2_async_nf_unregister(&cam->notifier); 1918 v4l2_device_unregister(&cam->v4l2_dev); 1919 v4l2_async_nf_cleanup(&cam->notifier); 1920 return ret; 1921 } 1922 EXPORT_SYMBOL_GPL(mccic_register); 1923 1924 void mccic_shutdown(struct mcam_camera *cam) 1925 { 1926 /* 1927 * If we have no users (and we really, really should have no 1928 * users) the device will already be powered down. Trying to 1929 * take it down again will wedge the machine, which is frowned 1930 * upon. 1931 */ 1932 if (!list_empty(&cam->vdev.fh_list)) { 1933 cam_warn(cam, "Removing a device with users!\n"); 1934 sensor_call(cam, core, s_power, 0); 1935 } 1936 if (cam->buffer_mode == B_vmalloc) 1937 mcam_free_dma_bufs(cam); 1938 v4l2_ctrl_handler_free(&cam->ctrl_handler); 1939 v4l2_async_nf_unregister(&cam->notifier); 1940 v4l2_device_unregister(&cam->v4l2_dev); 1941 v4l2_async_nf_cleanup(&cam->notifier); 1942 } 1943 EXPORT_SYMBOL_GPL(mccic_shutdown); 1944 1945 /* 1946 * Power management 1947 */ 1948 void mccic_suspend(struct mcam_camera *cam) 1949 { 1950 mutex_lock(&cam->s_mutex); 1951 if (!list_empty(&cam->vdev.fh_list)) { 1952 enum mcam_state cstate = cam->state; 1953 1954 mcam_ctlr_stop_dma(cam); 1955 sensor_call(cam, core, s_power, 0); 1956 cam->state = cstate; 1957 } 1958 mutex_unlock(&cam->s_mutex); 1959 } 1960 EXPORT_SYMBOL_GPL(mccic_suspend); 1961 1962 int mccic_resume(struct mcam_camera *cam) 1963 { 1964 int ret = 0; 1965 1966 mutex_lock(&cam->s_mutex); 1967 if (!list_empty(&cam->vdev.fh_list)) { 1968 ret = sensor_call(cam, core, s_power, 1); 1969 if (ret) { 1970 mutex_unlock(&cam->s_mutex); 1971 return ret; 1972 } 1973 __mcam_cam_reset(cam); 1974 } else { 1975 sensor_call(cam, core, s_power, 0); 1976 } 1977 mutex_unlock(&cam->s_mutex); 1978 1979 set_bit(CF_CONFIG_NEEDED, &cam->flags); 1980 if (cam->state == S_STREAMING) { 1981 /* 1982 * If there was a buffer in the DMA engine at suspend 1983 * time, put it back on the queue or we'll forget about it. 1984 */ 1985 if (cam->buffer_mode == B_DMA_sg && cam->vb_bufs[0]) 1986 list_add(&cam->vb_bufs[0]->queue, &cam->buffers); 1987 ret = mcam_read_setup(cam); 1988 } 1989 return ret; 1990 } 1991 EXPORT_SYMBOL_GPL(mccic_resume); 1992 1993 MODULE_LICENSE("GPL v2"); 1994 MODULE_AUTHOR("Jonathan Corbet <corbet@lwn.net>"); 1995