1 /* 2 * TI OMAP4 ISS V4L2 Driver - Generic video node 3 * 4 * Copyright (C) 2012 Texas Instruments, Inc. 5 * 6 * Author: Sergio Aguirre <sergio.a.aguirre@gmail.com> 7 * 8 * This program is free software; you can redistribute it and/or modify 9 * it under the terms of the GNU General Public License as published by 10 * the Free Software Foundation; either version 2 of the License, or 11 * (at your option) any later version. 12 */ 13 14 #include <asm/cacheflush.h> 15 #include <linux/clk.h> 16 #include <linux/mm.h> 17 #include <linux/pagemap.h> 18 #include <linux/sched.h> 19 #include <linux/slab.h> 20 #include <linux/vmalloc.h> 21 #include <linux/module.h> 22 23 #include <media/v4l2-dev.h> 24 #include <media/v4l2-ioctl.h> 25 #include <media/v4l2-mc.h> 26 27 #include "iss_video.h" 28 #include "iss.h" 29 30 /* ----------------------------------------------------------------------------- 31 * Helper functions 32 */ 33 34 static struct iss_format_info formats[] = { 35 { MEDIA_BUS_FMT_Y8_1X8, MEDIA_BUS_FMT_Y8_1X8, 36 MEDIA_BUS_FMT_Y8_1X8, MEDIA_BUS_FMT_Y8_1X8, 37 V4L2_PIX_FMT_GREY, 8, "Greyscale 8 bpp", }, 38 { MEDIA_BUS_FMT_Y10_1X10, MEDIA_BUS_FMT_Y10_1X10, 39 MEDIA_BUS_FMT_Y10_1X10, MEDIA_BUS_FMT_Y8_1X8, 40 V4L2_PIX_FMT_Y10, 10, "Greyscale 10 bpp", }, 41 { MEDIA_BUS_FMT_Y12_1X12, MEDIA_BUS_FMT_Y10_1X10, 42 MEDIA_BUS_FMT_Y12_1X12, MEDIA_BUS_FMT_Y8_1X8, 43 V4L2_PIX_FMT_Y12, 12, "Greyscale 12 bpp", }, 44 { MEDIA_BUS_FMT_SBGGR8_1X8, MEDIA_BUS_FMT_SBGGR8_1X8, 45 MEDIA_BUS_FMT_SBGGR8_1X8, MEDIA_BUS_FMT_SBGGR8_1X8, 46 V4L2_PIX_FMT_SBGGR8, 8, "BGGR Bayer 8 bpp", }, 47 { MEDIA_BUS_FMT_SGBRG8_1X8, MEDIA_BUS_FMT_SGBRG8_1X8, 48 MEDIA_BUS_FMT_SGBRG8_1X8, MEDIA_BUS_FMT_SGBRG8_1X8, 49 V4L2_PIX_FMT_SGBRG8, 8, "GBRG Bayer 8 bpp", }, 50 { MEDIA_BUS_FMT_SGRBG8_1X8, MEDIA_BUS_FMT_SGRBG8_1X8, 51 MEDIA_BUS_FMT_SGRBG8_1X8, MEDIA_BUS_FMT_SGRBG8_1X8, 52 V4L2_PIX_FMT_SGRBG8, 8, "GRBG Bayer 8 bpp", }, 53 { MEDIA_BUS_FMT_SRGGB8_1X8, MEDIA_BUS_FMT_SRGGB8_1X8, 54 MEDIA_BUS_FMT_SRGGB8_1X8, MEDIA_BUS_FMT_SRGGB8_1X8, 55 V4L2_PIX_FMT_SRGGB8, 8, "RGGB Bayer 8 bpp", }, 56 { MEDIA_BUS_FMT_SGRBG10_DPCM8_1X8, MEDIA_BUS_FMT_SGRBG10_DPCM8_1X8, 57 MEDIA_BUS_FMT_SGRBG10_1X10, 0, 58 V4L2_PIX_FMT_SGRBG10DPCM8, 8, "GRBG Bayer 10 bpp DPCM8", }, 59 { MEDIA_BUS_FMT_SBGGR10_1X10, MEDIA_BUS_FMT_SBGGR10_1X10, 60 MEDIA_BUS_FMT_SBGGR10_1X10, MEDIA_BUS_FMT_SBGGR8_1X8, 61 V4L2_PIX_FMT_SBGGR10, 10, "BGGR Bayer 10 bpp", }, 62 { MEDIA_BUS_FMT_SGBRG10_1X10, MEDIA_BUS_FMT_SGBRG10_1X10, 63 MEDIA_BUS_FMT_SGBRG10_1X10, MEDIA_BUS_FMT_SGBRG8_1X8, 64 V4L2_PIX_FMT_SGBRG10, 10, "GBRG Bayer 10 bpp", }, 65 { MEDIA_BUS_FMT_SGRBG10_1X10, MEDIA_BUS_FMT_SGRBG10_1X10, 66 MEDIA_BUS_FMT_SGRBG10_1X10, MEDIA_BUS_FMT_SGRBG8_1X8, 67 V4L2_PIX_FMT_SGRBG10, 10, "GRBG Bayer 10 bpp", }, 68 { MEDIA_BUS_FMT_SRGGB10_1X10, MEDIA_BUS_FMT_SRGGB10_1X10, 69 MEDIA_BUS_FMT_SRGGB10_1X10, MEDIA_BUS_FMT_SRGGB8_1X8, 70 V4L2_PIX_FMT_SRGGB10, 10, "RGGB Bayer 10 bpp", }, 71 { MEDIA_BUS_FMT_SBGGR12_1X12, MEDIA_BUS_FMT_SBGGR10_1X10, 72 MEDIA_BUS_FMT_SBGGR12_1X12, MEDIA_BUS_FMT_SBGGR8_1X8, 73 V4L2_PIX_FMT_SBGGR12, 12, "BGGR Bayer 12 bpp", }, 74 { MEDIA_BUS_FMT_SGBRG12_1X12, MEDIA_BUS_FMT_SGBRG10_1X10, 75 MEDIA_BUS_FMT_SGBRG12_1X12, MEDIA_BUS_FMT_SGBRG8_1X8, 76 V4L2_PIX_FMT_SGBRG12, 12, "GBRG Bayer 12 bpp", }, 77 { MEDIA_BUS_FMT_SGRBG12_1X12, MEDIA_BUS_FMT_SGRBG10_1X10, 78 MEDIA_BUS_FMT_SGRBG12_1X12, MEDIA_BUS_FMT_SGRBG8_1X8, 79 V4L2_PIX_FMT_SGRBG12, 12, "GRBG Bayer 12 bpp", }, 80 { MEDIA_BUS_FMT_SRGGB12_1X12, MEDIA_BUS_FMT_SRGGB10_1X10, 81 MEDIA_BUS_FMT_SRGGB12_1X12, MEDIA_BUS_FMT_SRGGB8_1X8, 82 V4L2_PIX_FMT_SRGGB12, 12, "RGGB Bayer 12 bpp", }, 83 { MEDIA_BUS_FMT_UYVY8_1X16, MEDIA_BUS_FMT_UYVY8_1X16, 84 MEDIA_BUS_FMT_UYVY8_1X16, 0, 85 V4L2_PIX_FMT_UYVY, 16, "YUV 4:2:2 (UYVY)", }, 86 { MEDIA_BUS_FMT_YUYV8_1X16, MEDIA_BUS_FMT_YUYV8_1X16, 87 MEDIA_BUS_FMT_YUYV8_1X16, 0, 88 V4L2_PIX_FMT_YUYV, 16, "YUV 4:2:2 (YUYV)", }, 89 { MEDIA_BUS_FMT_YUYV8_1_5X8, MEDIA_BUS_FMT_YUYV8_1_5X8, 90 MEDIA_BUS_FMT_YUYV8_1_5X8, 0, 91 V4L2_PIX_FMT_NV12, 8, "YUV 4:2:0 (NV12)", }, 92 }; 93 94 const struct iss_format_info * 95 omap4iss_video_format_info(u32 code) 96 { 97 unsigned int i; 98 99 for (i = 0; i < ARRAY_SIZE(formats); ++i) { 100 if (formats[i].code == code) 101 return &formats[i]; 102 } 103 104 return NULL; 105 } 106 107 /* 108 * iss_video_mbus_to_pix - Convert v4l2_mbus_framefmt to v4l2_pix_format 109 * @video: ISS video instance 110 * @mbus: v4l2_mbus_framefmt format (input) 111 * @pix: v4l2_pix_format format (output) 112 * 113 * Fill the output pix structure with information from the input mbus format. 114 * The bytesperline and sizeimage fields are computed from the requested bytes 115 * per line value in the pix format and information from the video instance. 116 * 117 * Return the number of padding bytes at end of line. 118 */ 119 static unsigned int iss_video_mbus_to_pix(const struct iss_video *video, 120 const struct v4l2_mbus_framefmt *mbus, 121 struct v4l2_pix_format *pix) 122 { 123 unsigned int bpl = pix->bytesperline; 124 unsigned int min_bpl; 125 unsigned int i; 126 127 memset(pix, 0, sizeof(*pix)); 128 pix->width = mbus->width; 129 pix->height = mbus->height; 130 131 /* 132 * Skip the last format in the loop so that it will be selected if no 133 * match is found. 134 */ 135 for (i = 0; i < ARRAY_SIZE(formats) - 1; ++i) { 136 if (formats[i].code == mbus->code) 137 break; 138 } 139 140 min_bpl = pix->width * ALIGN(formats[i].bpp, 8) / 8; 141 142 /* 143 * Clamp the requested bytes per line value. If the maximum bytes per 144 * line value is zero, the module doesn't support user configurable line 145 * sizes. Override the requested value with the minimum in that case. 146 */ 147 if (video->bpl_max) 148 bpl = clamp(bpl, min_bpl, video->bpl_max); 149 else 150 bpl = min_bpl; 151 152 if (!video->bpl_zero_padding || bpl != min_bpl) 153 bpl = ALIGN(bpl, video->bpl_alignment); 154 155 pix->pixelformat = formats[i].pixelformat; 156 pix->bytesperline = bpl; 157 pix->sizeimage = pix->bytesperline * pix->height; 158 pix->colorspace = mbus->colorspace; 159 pix->field = mbus->field; 160 161 /* FIXME: Special case for NV12! We should make this nicer... */ 162 if (pix->pixelformat == V4L2_PIX_FMT_NV12) 163 pix->sizeimage += (pix->bytesperline * pix->height) / 2; 164 165 return bpl - min_bpl; 166 } 167 168 static void iss_video_pix_to_mbus(const struct v4l2_pix_format *pix, 169 struct v4l2_mbus_framefmt *mbus) 170 { 171 unsigned int i; 172 173 memset(mbus, 0, sizeof(*mbus)); 174 mbus->width = pix->width; 175 mbus->height = pix->height; 176 177 /* 178 * Skip the last format in the loop so that it will be selected if no 179 * match is found. 180 */ 181 for (i = 0; i < ARRAY_SIZE(formats) - 1; ++i) { 182 if (formats[i].pixelformat == pix->pixelformat) 183 break; 184 } 185 186 mbus->code = formats[i].code; 187 mbus->colorspace = pix->colorspace; 188 mbus->field = pix->field; 189 } 190 191 static struct v4l2_subdev * 192 iss_video_remote_subdev(struct iss_video *video, u32 *pad) 193 { 194 struct media_pad *remote; 195 196 remote = media_entity_remote_pad(&video->pad); 197 198 if (!remote || !is_media_entity_v4l2_subdev(remote->entity)) 199 return NULL; 200 201 if (pad) 202 *pad = remote->index; 203 204 return media_entity_to_v4l2_subdev(remote->entity); 205 } 206 207 /* Return a pointer to the ISS video instance at the far end of the pipeline. */ 208 static struct iss_video * 209 iss_video_far_end(struct iss_video *video) 210 { 211 struct media_graph graph; 212 struct media_entity *entity = &video->video.entity; 213 struct media_device *mdev = entity->graph_obj.mdev; 214 struct iss_video *far_end = NULL; 215 216 mutex_lock(&mdev->graph_mutex); 217 218 if (media_graph_walk_init(&graph, mdev)) { 219 mutex_unlock(&mdev->graph_mutex); 220 return NULL; 221 } 222 223 media_graph_walk_start(&graph, entity); 224 225 while ((entity = media_graph_walk_next(&graph))) { 226 if (entity == &video->video.entity) 227 continue; 228 229 if (!is_media_entity_v4l2_video_device(entity)) 230 continue; 231 232 far_end = to_iss_video(media_entity_to_video_device(entity)); 233 if (far_end->type != video->type) 234 break; 235 236 far_end = NULL; 237 } 238 239 mutex_unlock(&mdev->graph_mutex); 240 241 media_graph_walk_cleanup(&graph); 242 243 return far_end; 244 } 245 246 static int 247 __iss_video_get_format(struct iss_video *video, 248 struct v4l2_mbus_framefmt *format) 249 { 250 struct v4l2_subdev_format fmt; 251 struct v4l2_subdev *subdev; 252 u32 pad; 253 int ret; 254 255 subdev = iss_video_remote_subdev(video, &pad); 256 if (!subdev) 257 return -EINVAL; 258 259 memset(&fmt, 0, sizeof(fmt)); 260 fmt.pad = pad; 261 fmt.which = V4L2_SUBDEV_FORMAT_ACTIVE; 262 263 mutex_lock(&video->mutex); 264 ret = v4l2_subdev_call(subdev, pad, get_fmt, NULL, &fmt); 265 mutex_unlock(&video->mutex); 266 267 if (ret) 268 return ret; 269 270 *format = fmt.format; 271 return 0; 272 } 273 274 static int 275 iss_video_check_format(struct iss_video *video, struct iss_video_fh *vfh) 276 { 277 struct v4l2_mbus_framefmt format; 278 struct v4l2_pix_format pixfmt; 279 int ret; 280 281 ret = __iss_video_get_format(video, &format); 282 if (ret < 0) 283 return ret; 284 285 pixfmt.bytesperline = 0; 286 ret = iss_video_mbus_to_pix(video, &format, &pixfmt); 287 288 if (vfh->format.fmt.pix.pixelformat != pixfmt.pixelformat || 289 vfh->format.fmt.pix.height != pixfmt.height || 290 vfh->format.fmt.pix.width != pixfmt.width || 291 vfh->format.fmt.pix.bytesperline != pixfmt.bytesperline || 292 vfh->format.fmt.pix.sizeimage != pixfmt.sizeimage) 293 return -EINVAL; 294 295 return ret; 296 } 297 298 /* ----------------------------------------------------------------------------- 299 * Video queue operations 300 */ 301 302 static int iss_video_queue_setup(struct vb2_queue *vq, 303 unsigned int *count, unsigned int *num_planes, 304 unsigned int sizes[], 305 struct device *alloc_devs[]) 306 { 307 struct iss_video_fh *vfh = vb2_get_drv_priv(vq); 308 struct iss_video *video = vfh->video; 309 310 /* Revisit multi-planar support for NV12 */ 311 *num_planes = 1; 312 313 sizes[0] = vfh->format.fmt.pix.sizeimage; 314 if (sizes[0] == 0) 315 return -EINVAL; 316 317 *count = min(*count, video->capture_mem / PAGE_ALIGN(sizes[0])); 318 319 return 0; 320 } 321 322 static void iss_video_buf_cleanup(struct vb2_buffer *vb) 323 { 324 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb); 325 struct iss_buffer *buffer = container_of(vbuf, struct iss_buffer, vb); 326 327 if (buffer->iss_addr) 328 buffer->iss_addr = 0; 329 } 330 331 static int iss_video_buf_prepare(struct vb2_buffer *vb) 332 { 333 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb); 334 struct iss_video_fh *vfh = vb2_get_drv_priv(vb->vb2_queue); 335 struct iss_buffer *buffer = container_of(vbuf, struct iss_buffer, vb); 336 struct iss_video *video = vfh->video; 337 unsigned long size = vfh->format.fmt.pix.sizeimage; 338 dma_addr_t addr; 339 340 if (vb2_plane_size(vb, 0) < size) 341 return -ENOBUFS; 342 343 addr = vb2_dma_contig_plane_dma_addr(vb, 0); 344 if (!IS_ALIGNED(addr, 32)) { 345 dev_dbg(video->iss->dev, 346 "Buffer address must be aligned to 32 bytes boundary.\n"); 347 return -EINVAL; 348 } 349 350 vb2_set_plane_payload(vb, 0, size); 351 buffer->iss_addr = addr; 352 return 0; 353 } 354 355 static void iss_video_buf_queue(struct vb2_buffer *vb) 356 { 357 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb); 358 struct iss_video_fh *vfh = vb2_get_drv_priv(vb->vb2_queue); 359 struct iss_video *video = vfh->video; 360 struct iss_buffer *buffer = container_of(vbuf, struct iss_buffer, vb); 361 struct iss_pipeline *pipe = to_iss_pipeline(&video->video.entity); 362 unsigned long flags; 363 bool empty; 364 365 spin_lock_irqsave(&video->qlock, flags); 366 367 /* 368 * Mark the buffer is faulty and give it back to the queue immediately 369 * if the video node has registered an error. vb2 will perform the same 370 * check when preparing the buffer, but that is inherently racy, so we 371 * need to handle the race condition with an authoritative check here. 372 */ 373 if (unlikely(video->error)) { 374 vb2_buffer_done(vb, VB2_BUF_STATE_ERROR); 375 spin_unlock_irqrestore(&video->qlock, flags); 376 return; 377 } 378 379 empty = list_empty(&video->dmaqueue); 380 list_add_tail(&buffer->list, &video->dmaqueue); 381 382 spin_unlock_irqrestore(&video->qlock, flags); 383 384 if (empty) { 385 enum iss_pipeline_state state; 386 unsigned int start; 387 388 if (video->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) 389 state = ISS_PIPELINE_QUEUE_OUTPUT; 390 else 391 state = ISS_PIPELINE_QUEUE_INPUT; 392 393 spin_lock_irqsave(&pipe->lock, flags); 394 pipe->state |= state; 395 video->ops->queue(video, buffer); 396 video->dmaqueue_flags |= ISS_VIDEO_DMAQUEUE_QUEUED; 397 398 start = iss_pipeline_ready(pipe); 399 if (start) 400 pipe->state |= ISS_PIPELINE_STREAM; 401 spin_unlock_irqrestore(&pipe->lock, flags); 402 403 if (start) 404 omap4iss_pipeline_set_stream(pipe, 405 ISS_PIPELINE_STREAM_SINGLESHOT); 406 } 407 } 408 409 static const struct vb2_ops iss_video_vb2ops = { 410 .queue_setup = iss_video_queue_setup, 411 .buf_prepare = iss_video_buf_prepare, 412 .buf_queue = iss_video_buf_queue, 413 .buf_cleanup = iss_video_buf_cleanup, 414 }; 415 416 /* 417 * omap4iss_video_buffer_next - Complete the current buffer and return the next 418 * @video: ISS video object 419 * 420 * Remove the current video buffer from the DMA queue and fill its timestamp, 421 * field count and state fields before waking up its completion handler. 422 * 423 * For capture video nodes, the buffer state is set to VB2_BUF_STATE_DONE if no 424 * error has been flagged in the pipeline, or to VB2_BUF_STATE_ERROR otherwise. 425 * 426 * The DMA queue is expected to contain at least one buffer. 427 * 428 * Return a pointer to the next buffer in the DMA queue, or NULL if the queue is 429 * empty. 430 */ 431 struct iss_buffer *omap4iss_video_buffer_next(struct iss_video *video) 432 { 433 struct iss_pipeline *pipe = to_iss_pipeline(&video->video.entity); 434 enum iss_pipeline_state state; 435 struct iss_buffer *buf; 436 unsigned long flags; 437 438 spin_lock_irqsave(&video->qlock, flags); 439 if (WARN_ON(list_empty(&video->dmaqueue))) { 440 spin_unlock_irqrestore(&video->qlock, flags); 441 return NULL; 442 } 443 444 buf = list_first_entry(&video->dmaqueue, struct iss_buffer, 445 list); 446 list_del(&buf->list); 447 spin_unlock_irqrestore(&video->qlock, flags); 448 449 buf->vb.vb2_buf.timestamp = ktime_get_ns(); 450 451 /* 452 * Do frame number propagation only if this is the output video node. 453 * Frame number either comes from the CSI receivers or it gets 454 * incremented here if H3A is not active. 455 * Note: There is no guarantee that the output buffer will finish 456 * first, so the input number might lag behind by 1 in some cases. 457 */ 458 if (video == pipe->output && !pipe->do_propagation) 459 buf->vb.sequence = 460 atomic_inc_return(&pipe->frame_number); 461 else 462 buf->vb.sequence = atomic_read(&pipe->frame_number); 463 464 vb2_buffer_done(&buf->vb.vb2_buf, pipe->error ? 465 VB2_BUF_STATE_ERROR : VB2_BUF_STATE_DONE); 466 pipe->error = false; 467 468 spin_lock_irqsave(&video->qlock, flags); 469 if (list_empty(&video->dmaqueue)) { 470 spin_unlock_irqrestore(&video->qlock, flags); 471 if (video->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) 472 state = ISS_PIPELINE_QUEUE_OUTPUT 473 | ISS_PIPELINE_STREAM; 474 else 475 state = ISS_PIPELINE_QUEUE_INPUT 476 | ISS_PIPELINE_STREAM; 477 478 spin_lock_irqsave(&pipe->lock, flags); 479 pipe->state &= ~state; 480 if (video->pipe.stream_state == ISS_PIPELINE_STREAM_CONTINUOUS) 481 video->dmaqueue_flags |= ISS_VIDEO_DMAQUEUE_UNDERRUN; 482 spin_unlock_irqrestore(&pipe->lock, flags); 483 return NULL; 484 } 485 486 if (video->type == V4L2_BUF_TYPE_VIDEO_CAPTURE && pipe->input) { 487 spin_lock(&pipe->lock); 488 pipe->state &= ~ISS_PIPELINE_STREAM; 489 spin_unlock(&pipe->lock); 490 } 491 492 buf = list_first_entry(&video->dmaqueue, struct iss_buffer, 493 list); 494 spin_unlock_irqrestore(&video->qlock, flags); 495 buf->vb.vb2_buf.state = VB2_BUF_STATE_ACTIVE; 496 return buf; 497 } 498 499 /* 500 * omap4iss_video_cancel_stream - Cancel stream on a video node 501 * @video: ISS video object 502 * 503 * Cancelling a stream mark all buffers on the video node as erroneous and makes 504 * sure no new buffer can be queued. 505 */ 506 void omap4iss_video_cancel_stream(struct iss_video *video) 507 { 508 unsigned long flags; 509 510 spin_lock_irqsave(&video->qlock, flags); 511 512 while (!list_empty(&video->dmaqueue)) { 513 struct iss_buffer *buf; 514 515 buf = list_first_entry(&video->dmaqueue, struct iss_buffer, 516 list); 517 list_del(&buf->list); 518 vb2_buffer_done(&buf->vb.vb2_buf, VB2_BUF_STATE_ERROR); 519 } 520 521 vb2_queue_error(video->queue); 522 video->error = true; 523 524 spin_unlock_irqrestore(&video->qlock, flags); 525 } 526 527 /* ----------------------------------------------------------------------------- 528 * V4L2 ioctls 529 */ 530 531 static int 532 iss_video_querycap(struct file *file, void *fh, struct v4l2_capability *cap) 533 { 534 struct iss_video *video = video_drvdata(file); 535 536 strlcpy(cap->driver, ISS_VIDEO_DRIVER_NAME, sizeof(cap->driver)); 537 strlcpy(cap->card, video->video.name, sizeof(cap->card)); 538 strlcpy(cap->bus_info, "media", sizeof(cap->bus_info)); 539 540 if (video->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) 541 cap->device_caps = V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_STREAMING; 542 else 543 cap->device_caps = V4L2_CAP_VIDEO_OUTPUT | V4L2_CAP_STREAMING; 544 545 cap->capabilities = V4L2_CAP_DEVICE_CAPS | V4L2_CAP_STREAMING 546 | V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_VIDEO_OUTPUT; 547 548 return 0; 549 } 550 551 static int 552 iss_video_enum_format(struct file *file, void *fh, struct v4l2_fmtdesc *f) 553 { 554 struct iss_video *video = video_drvdata(file); 555 struct v4l2_mbus_framefmt format; 556 unsigned int index = f->index; 557 unsigned int i; 558 int ret; 559 560 if (f->type != video->type) 561 return -EINVAL; 562 563 ret = __iss_video_get_format(video, &format); 564 if (ret < 0) 565 return ret; 566 567 for (i = 0; i < ARRAY_SIZE(formats); ++i) { 568 const struct iss_format_info *info = &formats[i]; 569 570 if (format.code != info->code) 571 continue; 572 573 if (index == 0) { 574 f->pixelformat = info->pixelformat; 575 strlcpy(f->description, info->description, 576 sizeof(f->description)); 577 return 0; 578 } 579 580 index--; 581 } 582 583 return -EINVAL; 584 } 585 586 static int 587 iss_video_get_format(struct file *file, void *fh, struct v4l2_format *format) 588 { 589 struct iss_video_fh *vfh = to_iss_video_fh(fh); 590 struct iss_video *video = video_drvdata(file); 591 592 if (format->type != video->type) 593 return -EINVAL; 594 595 mutex_lock(&video->mutex); 596 *format = vfh->format; 597 mutex_unlock(&video->mutex); 598 599 return 0; 600 } 601 602 static int 603 iss_video_set_format(struct file *file, void *fh, struct v4l2_format *format) 604 { 605 struct iss_video_fh *vfh = to_iss_video_fh(fh); 606 struct iss_video *video = video_drvdata(file); 607 struct v4l2_mbus_framefmt fmt; 608 609 if (format->type != video->type) 610 return -EINVAL; 611 612 mutex_lock(&video->mutex); 613 614 /* 615 * Fill the bytesperline and sizeimage fields by converting to media bus 616 * format and back to pixel format. 617 */ 618 iss_video_pix_to_mbus(&format->fmt.pix, &fmt); 619 iss_video_mbus_to_pix(video, &fmt, &format->fmt.pix); 620 621 vfh->format = *format; 622 623 mutex_unlock(&video->mutex); 624 return 0; 625 } 626 627 static int 628 iss_video_try_format(struct file *file, void *fh, struct v4l2_format *format) 629 { 630 struct iss_video *video = video_drvdata(file); 631 struct v4l2_subdev_format fmt; 632 struct v4l2_subdev *subdev; 633 u32 pad; 634 int ret; 635 636 if (format->type != video->type) 637 return -EINVAL; 638 639 subdev = iss_video_remote_subdev(video, &pad); 640 if (!subdev) 641 return -EINVAL; 642 643 iss_video_pix_to_mbus(&format->fmt.pix, &fmt.format); 644 645 fmt.pad = pad; 646 fmt.which = V4L2_SUBDEV_FORMAT_ACTIVE; 647 ret = v4l2_subdev_call(subdev, pad, get_fmt, NULL, &fmt); 648 if (ret) 649 return ret; 650 651 iss_video_mbus_to_pix(video, &fmt.format, &format->fmt.pix); 652 return 0; 653 } 654 655 static int 656 iss_video_get_selection(struct file *file, void *fh, struct v4l2_selection *sel) 657 { 658 struct iss_video *video = video_drvdata(file); 659 struct v4l2_subdev_format format; 660 struct v4l2_subdev *subdev; 661 struct v4l2_subdev_selection sdsel = { 662 .which = V4L2_SUBDEV_FORMAT_ACTIVE, 663 .target = sel->target, 664 }; 665 u32 pad; 666 int ret; 667 668 switch (sel->target) { 669 case V4L2_SEL_TGT_CROP: 670 case V4L2_SEL_TGT_CROP_BOUNDS: 671 case V4L2_SEL_TGT_CROP_DEFAULT: 672 if (video->type == V4L2_BUF_TYPE_VIDEO_OUTPUT) 673 return -EINVAL; 674 break; 675 case V4L2_SEL_TGT_COMPOSE: 676 case V4L2_SEL_TGT_COMPOSE_BOUNDS: 677 case V4L2_SEL_TGT_COMPOSE_DEFAULT: 678 if (video->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) 679 return -EINVAL; 680 break; 681 default: 682 return -EINVAL; 683 } 684 subdev = iss_video_remote_subdev(video, &pad); 685 if (subdev == NULL) 686 return -EINVAL; 687 688 /* 689 * Try the get selection operation first and fallback to get format if 690 * not implemented. 691 */ 692 sdsel.pad = pad; 693 ret = v4l2_subdev_call(subdev, pad, get_selection, NULL, &sdsel); 694 if (!ret) 695 sel->r = sdsel.r; 696 if (ret != -ENOIOCTLCMD) 697 return ret; 698 699 format.pad = pad; 700 format.which = V4L2_SUBDEV_FORMAT_ACTIVE; 701 ret = v4l2_subdev_call(subdev, pad, get_fmt, NULL, &format); 702 if (ret < 0) 703 return ret == -ENOIOCTLCMD ? -ENOTTY : ret; 704 705 sel->r.left = 0; 706 sel->r.top = 0; 707 sel->r.width = format.format.width; 708 sel->r.height = format.format.height; 709 710 return 0; 711 } 712 713 static int 714 iss_video_set_selection(struct file *file, void *fh, struct v4l2_selection *sel) 715 { 716 struct iss_video *video = video_drvdata(file); 717 struct v4l2_subdev *subdev; 718 struct v4l2_subdev_selection sdsel = { 719 .which = V4L2_SUBDEV_FORMAT_ACTIVE, 720 .target = sel->target, 721 .flags = sel->flags, 722 .r = sel->r, 723 }; 724 u32 pad; 725 int ret; 726 727 switch (sel->target) { 728 case V4L2_SEL_TGT_CROP: 729 if (video->type == V4L2_BUF_TYPE_VIDEO_OUTPUT) 730 return -EINVAL; 731 break; 732 case V4L2_SEL_TGT_COMPOSE: 733 if (video->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) 734 return -EINVAL; 735 break; 736 default: 737 return -EINVAL; 738 } 739 subdev = iss_video_remote_subdev(video, &pad); 740 if (subdev == NULL) 741 return -EINVAL; 742 743 sdsel.pad = pad; 744 mutex_lock(&video->mutex); 745 ret = v4l2_subdev_call(subdev, pad, set_selection, NULL, &sdsel); 746 mutex_unlock(&video->mutex); 747 if (!ret) 748 sel->r = sdsel.r; 749 750 return ret == -ENOIOCTLCMD ? -ENOTTY : ret; 751 } 752 753 static int 754 iss_video_get_param(struct file *file, void *fh, struct v4l2_streamparm *a) 755 { 756 struct iss_video_fh *vfh = to_iss_video_fh(fh); 757 struct iss_video *video = video_drvdata(file); 758 759 if (video->type != V4L2_BUF_TYPE_VIDEO_OUTPUT || 760 video->type != a->type) 761 return -EINVAL; 762 763 memset(a, 0, sizeof(*a)); 764 a->type = V4L2_BUF_TYPE_VIDEO_OUTPUT; 765 a->parm.output.capability = V4L2_CAP_TIMEPERFRAME; 766 a->parm.output.timeperframe = vfh->timeperframe; 767 768 return 0; 769 } 770 771 static int 772 iss_video_set_param(struct file *file, void *fh, struct v4l2_streamparm *a) 773 { 774 struct iss_video_fh *vfh = to_iss_video_fh(fh); 775 struct iss_video *video = video_drvdata(file); 776 777 if (video->type != V4L2_BUF_TYPE_VIDEO_OUTPUT || 778 video->type != a->type) 779 return -EINVAL; 780 781 if (a->parm.output.timeperframe.denominator == 0) 782 a->parm.output.timeperframe.denominator = 1; 783 784 vfh->timeperframe = a->parm.output.timeperframe; 785 786 return 0; 787 } 788 789 static int 790 iss_video_reqbufs(struct file *file, void *fh, struct v4l2_requestbuffers *rb) 791 { 792 struct iss_video_fh *vfh = to_iss_video_fh(fh); 793 794 return vb2_reqbufs(&vfh->queue, rb); 795 } 796 797 static int 798 iss_video_querybuf(struct file *file, void *fh, struct v4l2_buffer *b) 799 { 800 struct iss_video_fh *vfh = to_iss_video_fh(fh); 801 802 return vb2_querybuf(&vfh->queue, b); 803 } 804 805 static int 806 iss_video_qbuf(struct file *file, void *fh, struct v4l2_buffer *b) 807 { 808 struct iss_video_fh *vfh = to_iss_video_fh(fh); 809 810 return vb2_qbuf(&vfh->queue, b); 811 } 812 813 static int 814 iss_video_expbuf(struct file *file, void *fh, struct v4l2_exportbuffer *e) 815 { 816 struct iss_video_fh *vfh = to_iss_video_fh(fh); 817 818 return vb2_expbuf(&vfh->queue, e); 819 } 820 821 static int 822 iss_video_dqbuf(struct file *file, void *fh, struct v4l2_buffer *b) 823 { 824 struct iss_video_fh *vfh = to_iss_video_fh(fh); 825 826 return vb2_dqbuf(&vfh->queue, b, file->f_flags & O_NONBLOCK); 827 } 828 829 /* 830 * Stream management 831 * 832 * Every ISS pipeline has a single input and a single output. The input can be 833 * either a sensor or a video node. The output is always a video node. 834 * 835 * As every pipeline has an output video node, the ISS video objects at the 836 * pipeline output stores the pipeline state. It tracks the streaming state of 837 * both the input and output, as well as the availability of buffers. 838 * 839 * In sensor-to-memory mode, frames are always available at the pipeline input. 840 * Starting the sensor usually requires I2C transfers and must be done in 841 * interruptible context. The pipeline is started and stopped synchronously 842 * to the stream on/off commands. All modules in the pipeline will get their 843 * subdev set stream handler called. The module at the end of the pipeline must 844 * delay starting the hardware until buffers are available at its output. 845 * 846 * In memory-to-memory mode, starting/stopping the stream requires 847 * synchronization between the input and output. ISS modules can't be stopped 848 * in the middle of a frame, and at least some of the modules seem to become 849 * busy as soon as they're started, even if they don't receive a frame start 850 * event. For that reason frames need to be processed in single-shot mode. The 851 * driver needs to wait until a frame is completely processed and written to 852 * memory before restarting the pipeline for the next frame. Pipelined 853 * processing might be possible but requires more testing. 854 * 855 * Stream start must be delayed until buffers are available at both the input 856 * and output. The pipeline must be started in the videobuf queue callback with 857 * the buffers queue spinlock held. The modules subdev set stream operation must 858 * not sleep. 859 */ 860 static int 861 iss_video_streamon(struct file *file, void *fh, enum v4l2_buf_type type) 862 { 863 struct iss_video_fh *vfh = to_iss_video_fh(fh); 864 struct iss_video *video = video_drvdata(file); 865 struct media_graph graph; 866 struct media_entity *entity = &video->video.entity; 867 enum iss_pipeline_state state; 868 struct iss_pipeline *pipe; 869 struct iss_video *far_end; 870 unsigned long flags; 871 int ret; 872 873 if (type != video->type) 874 return -EINVAL; 875 876 mutex_lock(&video->stream_lock); 877 878 /* 879 * Start streaming on the pipeline. No link touching an entity in the 880 * pipeline can be activated or deactivated once streaming is started. 881 */ 882 pipe = entity->pipe 883 ? to_iss_pipeline(entity) : &video->pipe; 884 pipe->external = NULL; 885 pipe->external_rate = 0; 886 pipe->external_bpp = 0; 887 888 ret = media_entity_enum_init(&pipe->ent_enum, entity->graph_obj.mdev); 889 if (ret) 890 goto err_graph_walk_init; 891 892 ret = media_graph_walk_init(&graph, entity->graph_obj.mdev); 893 if (ret) 894 goto err_graph_walk_init; 895 896 if (video->iss->pdata->set_constraints) 897 video->iss->pdata->set_constraints(video->iss, true); 898 899 ret = media_pipeline_start(entity, &pipe->pipe); 900 if (ret < 0) 901 goto err_media_pipeline_start; 902 903 media_graph_walk_start(&graph, entity); 904 while ((entity = media_graph_walk_next(&graph))) 905 media_entity_enum_set(&pipe->ent_enum, entity); 906 907 /* 908 * Verify that the currently configured format matches the output of 909 * the connected subdev. 910 */ 911 ret = iss_video_check_format(video, vfh); 912 if (ret < 0) 913 goto err_iss_video_check_format; 914 915 video->bpl_padding = ret; 916 video->bpl_value = vfh->format.fmt.pix.bytesperline; 917 918 /* 919 * Find the ISS video node connected at the far end of the pipeline and 920 * update the pipeline. 921 */ 922 far_end = iss_video_far_end(video); 923 924 if (video->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) { 925 state = ISS_PIPELINE_STREAM_OUTPUT | ISS_PIPELINE_IDLE_OUTPUT; 926 pipe->input = far_end; 927 pipe->output = video; 928 } else { 929 if (!far_end) { 930 ret = -EPIPE; 931 goto err_iss_video_check_format; 932 } 933 934 state = ISS_PIPELINE_STREAM_INPUT | ISS_PIPELINE_IDLE_INPUT; 935 pipe->input = video; 936 pipe->output = far_end; 937 } 938 939 spin_lock_irqsave(&pipe->lock, flags); 940 pipe->state &= ~ISS_PIPELINE_STREAM; 941 pipe->state |= state; 942 spin_unlock_irqrestore(&pipe->lock, flags); 943 944 /* 945 * Set the maximum time per frame as the value requested by userspace. 946 * This is a soft limit that can be overridden if the hardware doesn't 947 * support the request limit. 948 */ 949 if (video->type == V4L2_BUF_TYPE_VIDEO_OUTPUT) 950 pipe->max_timeperframe = vfh->timeperframe; 951 952 video->queue = &vfh->queue; 953 INIT_LIST_HEAD(&video->dmaqueue); 954 video->error = false; 955 atomic_set(&pipe->frame_number, -1); 956 957 ret = vb2_streamon(&vfh->queue, type); 958 if (ret < 0) 959 goto err_iss_video_check_format; 960 961 /* 962 * In sensor-to-memory mode, the stream can be started synchronously 963 * to the stream on command. In memory-to-memory mode, it will be 964 * started when buffers are queued on both the input and output. 965 */ 966 if (!pipe->input) { 967 unsigned long flags; 968 969 ret = omap4iss_pipeline_set_stream(pipe, 970 ISS_PIPELINE_STREAM_CONTINUOUS); 971 if (ret < 0) 972 goto err_omap4iss_set_stream; 973 spin_lock_irqsave(&video->qlock, flags); 974 if (list_empty(&video->dmaqueue)) 975 video->dmaqueue_flags |= ISS_VIDEO_DMAQUEUE_UNDERRUN; 976 spin_unlock_irqrestore(&video->qlock, flags); 977 } 978 979 media_graph_walk_cleanup(&graph); 980 981 mutex_unlock(&video->stream_lock); 982 983 return 0; 984 985 err_omap4iss_set_stream: 986 vb2_streamoff(&vfh->queue, type); 987 err_iss_video_check_format: 988 media_pipeline_stop(&video->video.entity); 989 err_media_pipeline_start: 990 if (video->iss->pdata->set_constraints) 991 video->iss->pdata->set_constraints(video->iss, false); 992 video->queue = NULL; 993 994 media_graph_walk_cleanup(&graph); 995 996 err_graph_walk_init: 997 media_entity_enum_cleanup(&pipe->ent_enum); 998 999 mutex_unlock(&video->stream_lock); 1000 1001 return ret; 1002 } 1003 1004 static int 1005 iss_video_streamoff(struct file *file, void *fh, enum v4l2_buf_type type) 1006 { 1007 struct iss_video_fh *vfh = to_iss_video_fh(fh); 1008 struct iss_video *video = video_drvdata(file); 1009 struct iss_pipeline *pipe = to_iss_pipeline(&video->video.entity); 1010 enum iss_pipeline_state state; 1011 unsigned long flags; 1012 1013 if (type != video->type) 1014 return -EINVAL; 1015 1016 mutex_lock(&video->stream_lock); 1017 1018 if (!vb2_is_streaming(&vfh->queue)) 1019 goto done; 1020 1021 /* Update the pipeline state. */ 1022 if (video->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) 1023 state = ISS_PIPELINE_STREAM_OUTPUT 1024 | ISS_PIPELINE_QUEUE_OUTPUT; 1025 else 1026 state = ISS_PIPELINE_STREAM_INPUT 1027 | ISS_PIPELINE_QUEUE_INPUT; 1028 1029 spin_lock_irqsave(&pipe->lock, flags); 1030 pipe->state &= ~state; 1031 spin_unlock_irqrestore(&pipe->lock, flags); 1032 1033 /* Stop the stream. */ 1034 omap4iss_pipeline_set_stream(pipe, ISS_PIPELINE_STREAM_STOPPED); 1035 vb2_streamoff(&vfh->queue, type); 1036 video->queue = NULL; 1037 1038 media_entity_enum_cleanup(&pipe->ent_enum); 1039 1040 if (video->iss->pdata->set_constraints) 1041 video->iss->pdata->set_constraints(video->iss, false); 1042 media_pipeline_stop(&video->video.entity); 1043 1044 done: 1045 mutex_unlock(&video->stream_lock); 1046 return 0; 1047 } 1048 1049 static int 1050 iss_video_enum_input(struct file *file, void *fh, struct v4l2_input *input) 1051 { 1052 if (input->index > 0) 1053 return -EINVAL; 1054 1055 strlcpy(input->name, "camera", sizeof(input->name)); 1056 input->type = V4L2_INPUT_TYPE_CAMERA; 1057 1058 return 0; 1059 } 1060 1061 static int 1062 iss_video_g_input(struct file *file, void *fh, unsigned int *input) 1063 { 1064 *input = 0; 1065 1066 return 0; 1067 } 1068 1069 static int 1070 iss_video_s_input(struct file *file, void *fh, unsigned int input) 1071 { 1072 return input == 0 ? 0 : -EINVAL; 1073 } 1074 1075 static const struct v4l2_ioctl_ops iss_video_ioctl_ops = { 1076 .vidioc_querycap = iss_video_querycap, 1077 .vidioc_enum_fmt_vid_cap = iss_video_enum_format, 1078 .vidioc_g_fmt_vid_cap = iss_video_get_format, 1079 .vidioc_s_fmt_vid_cap = iss_video_set_format, 1080 .vidioc_try_fmt_vid_cap = iss_video_try_format, 1081 .vidioc_g_fmt_vid_out = iss_video_get_format, 1082 .vidioc_s_fmt_vid_out = iss_video_set_format, 1083 .vidioc_try_fmt_vid_out = iss_video_try_format, 1084 .vidioc_g_selection = iss_video_get_selection, 1085 .vidioc_s_selection = iss_video_set_selection, 1086 .vidioc_g_parm = iss_video_get_param, 1087 .vidioc_s_parm = iss_video_set_param, 1088 .vidioc_reqbufs = iss_video_reqbufs, 1089 .vidioc_querybuf = iss_video_querybuf, 1090 .vidioc_qbuf = iss_video_qbuf, 1091 .vidioc_expbuf = iss_video_expbuf, 1092 .vidioc_dqbuf = iss_video_dqbuf, 1093 .vidioc_streamon = iss_video_streamon, 1094 .vidioc_streamoff = iss_video_streamoff, 1095 .vidioc_enum_input = iss_video_enum_input, 1096 .vidioc_g_input = iss_video_g_input, 1097 .vidioc_s_input = iss_video_s_input, 1098 }; 1099 1100 /* ----------------------------------------------------------------------------- 1101 * V4L2 file operations 1102 */ 1103 1104 static int iss_video_open(struct file *file) 1105 { 1106 struct iss_video *video = video_drvdata(file); 1107 struct iss_video_fh *handle; 1108 struct vb2_queue *q; 1109 int ret = 0; 1110 1111 handle = kzalloc(sizeof(*handle), GFP_KERNEL); 1112 if (!handle) 1113 return -ENOMEM; 1114 1115 v4l2_fh_init(&handle->vfh, &video->video); 1116 v4l2_fh_add(&handle->vfh); 1117 1118 /* If this is the first user, initialise the pipeline. */ 1119 if (!omap4iss_get(video->iss)) { 1120 ret = -EBUSY; 1121 goto done; 1122 } 1123 1124 ret = v4l2_pipeline_pm_use(&video->video.entity, 1); 1125 if (ret < 0) { 1126 omap4iss_put(video->iss); 1127 goto done; 1128 } 1129 1130 q = &handle->queue; 1131 1132 q->type = video->type; 1133 q->io_modes = VB2_MMAP | VB2_DMABUF; 1134 q->drv_priv = handle; 1135 q->ops = &iss_video_vb2ops; 1136 q->mem_ops = &vb2_dma_contig_memops; 1137 q->buf_struct_size = sizeof(struct iss_buffer); 1138 q->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC; 1139 q->dev = video->iss->dev; 1140 1141 ret = vb2_queue_init(q); 1142 if (ret) { 1143 omap4iss_put(video->iss); 1144 goto done; 1145 } 1146 1147 memset(&handle->format, 0, sizeof(handle->format)); 1148 handle->format.type = video->type; 1149 handle->timeperframe.denominator = 1; 1150 1151 handle->video = video; 1152 file->private_data = &handle->vfh; 1153 1154 done: 1155 if (ret < 0) { 1156 v4l2_fh_del(&handle->vfh); 1157 v4l2_fh_exit(&handle->vfh); 1158 kfree(handle); 1159 } 1160 1161 return ret; 1162 } 1163 1164 static int iss_video_release(struct file *file) 1165 { 1166 struct iss_video *video = video_drvdata(file); 1167 struct v4l2_fh *vfh = file->private_data; 1168 struct iss_video_fh *handle = to_iss_video_fh(vfh); 1169 1170 /* Disable streaming and free the buffers queue resources. */ 1171 iss_video_streamoff(file, vfh, video->type); 1172 1173 v4l2_pipeline_pm_use(&video->video.entity, 0); 1174 1175 /* Release the videobuf2 queue */ 1176 vb2_queue_release(&handle->queue); 1177 1178 v4l2_fh_del(vfh); 1179 v4l2_fh_exit(vfh); 1180 kfree(handle); 1181 file->private_data = NULL; 1182 1183 omap4iss_put(video->iss); 1184 1185 return 0; 1186 } 1187 1188 static unsigned int iss_video_poll(struct file *file, poll_table *wait) 1189 { 1190 struct iss_video_fh *vfh = to_iss_video_fh(file->private_data); 1191 1192 return vb2_poll(&vfh->queue, file, wait); 1193 } 1194 1195 static int iss_video_mmap(struct file *file, struct vm_area_struct *vma) 1196 { 1197 struct iss_video_fh *vfh = to_iss_video_fh(file->private_data); 1198 1199 return vb2_mmap(&vfh->queue, vma); 1200 } 1201 1202 static struct v4l2_file_operations iss_video_fops = { 1203 .owner = THIS_MODULE, 1204 .unlocked_ioctl = video_ioctl2, 1205 .open = iss_video_open, 1206 .release = iss_video_release, 1207 .poll = iss_video_poll, 1208 .mmap = iss_video_mmap, 1209 }; 1210 1211 /* ----------------------------------------------------------------------------- 1212 * ISS video core 1213 */ 1214 1215 static const struct iss_video_operations iss_video_dummy_ops = { 1216 }; 1217 1218 int omap4iss_video_init(struct iss_video *video, const char *name) 1219 { 1220 const char *direction; 1221 int ret; 1222 1223 switch (video->type) { 1224 case V4L2_BUF_TYPE_VIDEO_CAPTURE: 1225 direction = "output"; 1226 video->pad.flags = MEDIA_PAD_FL_SINK; 1227 break; 1228 case V4L2_BUF_TYPE_VIDEO_OUTPUT: 1229 direction = "input"; 1230 video->pad.flags = MEDIA_PAD_FL_SOURCE; 1231 break; 1232 1233 default: 1234 return -EINVAL; 1235 } 1236 1237 ret = media_entity_pads_init(&video->video.entity, 1, &video->pad); 1238 if (ret < 0) 1239 return ret; 1240 1241 spin_lock_init(&video->qlock); 1242 mutex_init(&video->mutex); 1243 atomic_set(&video->active, 0); 1244 1245 spin_lock_init(&video->pipe.lock); 1246 mutex_init(&video->stream_lock); 1247 1248 /* Initialize the video device. */ 1249 if (!video->ops) 1250 video->ops = &iss_video_dummy_ops; 1251 1252 video->video.fops = &iss_video_fops; 1253 snprintf(video->video.name, sizeof(video->video.name), 1254 "OMAP4 ISS %s %s", name, direction); 1255 video->video.vfl_type = VFL_TYPE_GRABBER; 1256 video->video.release = video_device_release_empty; 1257 video->video.ioctl_ops = &iss_video_ioctl_ops; 1258 video->pipe.stream_state = ISS_PIPELINE_STREAM_STOPPED; 1259 1260 video_set_drvdata(&video->video, video); 1261 1262 return 0; 1263 } 1264 1265 void omap4iss_video_cleanup(struct iss_video *video) 1266 { 1267 media_entity_cleanup(&video->video.entity); 1268 mutex_destroy(&video->stream_lock); 1269 mutex_destroy(&video->mutex); 1270 } 1271 1272 int omap4iss_video_register(struct iss_video *video, struct v4l2_device *vdev) 1273 { 1274 int ret; 1275 1276 video->video.v4l2_dev = vdev; 1277 1278 ret = video_register_device(&video->video, VFL_TYPE_GRABBER, -1); 1279 if (ret < 0) 1280 dev_err(video->iss->dev, 1281 "could not register video device (%d)\n", ret); 1282 1283 return ret; 1284 } 1285 1286 void omap4iss_video_unregister(struct iss_video *video) 1287 { 1288 video_unregister_device(&video->video); 1289 } 1290