Lines Matching +full:camerrx +full:- +full:control
1 // SPDX-License-Identifier: GPL-2.0-only
3 * TI Camera Access Layer (CAL) - Driver
5 * Copyright (c) 2015-2020 Texas Instruments Inc.
23 #include <media/media-device.h>
24 #include <media/v4l2-async.h>
25 #include <media/v4l2-common.h>
26 #include <media/v4l2-device.h>
27 #include <media/videobuf2-core.h>
28 #include <media/videobuf2-dma-contig.h>
38 int cal_video_nr = -1;
40 MODULE_PARM_DESC(video_nr, "videoX start number, -1 is autodetect");
56 /* ------------------------------------------------------------------
58 * ------------------------------------------------------------------
159 /* ------------------------------------------------------------------
161 * ------------------------------------------------------------------
238 /* ------------------------------------------------------------------
240 * ------------------------------------------------------------------
247 cal_info(cal, "CAL Registers @ 0x%pa:\n", &cal->res->start); in cal_quickdump_regs()
249 (__force const void *)cal->base, in cal_quickdump_regs()
250 resource_size(cal->res), false); in cal_quickdump_regs()
252 for (i = 0; i < cal->data->num_csi2_phy; ++i) { in cal_quickdump_regs()
253 struct cal_camerarx *phy = cal->phy[i]; in cal_quickdump_regs()
256 &phy->res->start); in cal_quickdump_regs()
258 (__force const void *)phy->base, in cal_quickdump_regs()
259 resource_size(phy->res), in cal_quickdump_regs()
264 /* ------------------------------------------------------------------
266 * ------------------------------------------------------------------
275 spin_lock(&cal->v4l2_dev.lock); in cal_reserve_pix_proc()
277 ret = find_first_zero_bit(&cal->reserved_pix_proc_mask, CAL_MAX_PIX_PROC); in cal_reserve_pix_proc()
280 spin_unlock(&cal->v4l2_dev.lock); in cal_reserve_pix_proc()
281 return -ENOSPC; in cal_reserve_pix_proc()
284 cal->reserved_pix_proc_mask |= BIT(ret); in cal_reserve_pix_proc()
286 spin_unlock(&cal->v4l2_dev.lock); in cal_reserve_pix_proc()
293 spin_lock(&cal->v4l2_dev.lock); in cal_release_pix_proc()
295 cal->reserved_pix_proc_mask &= ~BIT(pix_proc_num); in cal_release_pix_proc()
297 spin_unlock(&cal->v4l2_dev.lock); in cal_release_pix_proc()
304 val = cal_read(ctx->cal, CAL_CSI2_CTX(ctx->phy->instance, ctx->csi2_ctx)); in cal_ctx_csi2_config()
305 cal_set_field(&val, ctx->cport, CAL_CSI2_CTX_CPORT_MASK); in cal_ctx_csi2_config()
307 * DT type: MIPI CSI-2 Specs in cal_ctx_csi2_config()
308 * 0x1: All - DT filter is disabled in cal_ctx_csi2_config()
314 cal_set_field(&val, ctx->datatype, CAL_CSI2_CTX_DT_MASK); in cal_ctx_csi2_config()
315 cal_set_field(&val, ctx->vc, CAL_CSI2_CTX_VC_MASK); in cal_ctx_csi2_config()
316 cal_set_field(&val, ctx->v_fmt.fmt.pix.height, CAL_CSI2_CTX_LINES_MASK); in cal_ctx_csi2_config()
320 cal_write(ctx->cal, CAL_CSI2_CTX(ctx->phy->instance, ctx->csi2_ctx), val); in cal_ctx_csi2_config()
322 ctx->phy->instance, ctx->csi2_ctx, in cal_ctx_csi2_config()
323 cal_read(ctx->cal, CAL_CSI2_CTX(ctx->phy->instance, ctx->csi2_ctx))); in cal_ctx_csi2_config()
330 switch (ctx->fmtinfo->bpp) { in cal_ctx_pix_proc_config()
357 dev_warn_once(ctx->cal->dev, in cal_ctx_pix_proc_config()
359 __FILE__, __LINE__, __func__, ctx->fmtinfo->bpp); in cal_ctx_pix_proc_config()
365 val = cal_read(ctx->cal, CAL_PIX_PROC(ctx->pix_proc)); in cal_ctx_pix_proc_config()
370 cal_set_field(&val, ctx->cport, CAL_PIX_PROC_CPORT_MASK); in cal_ctx_pix_proc_config()
372 cal_write(ctx->cal, CAL_PIX_PROC(ctx->pix_proc), val); in cal_ctx_pix_proc_config()
373 ctx_dbg(3, ctx, "CAL_PIX_PROC(%u) = 0x%08x\n", ctx->pix_proc, in cal_ctx_pix_proc_config()
374 cal_read(ctx->cal, CAL_PIX_PROC(ctx->pix_proc))); in cal_ctx_pix_proc_config()
379 unsigned int stride = ctx->v_fmt.fmt.pix.bytesperline; in cal_ctx_wr_dma_config()
382 val = cal_read(ctx->cal, CAL_WR_DMA_CTRL(ctx->dma_ctx)); in cal_ctx_wr_dma_config()
383 cal_set_field(&val, ctx->cport, CAL_WR_DMA_CTRL_CPORT_MASK); in cal_ctx_wr_dma_config()
384 cal_set_field(&val, ctx->v_fmt.fmt.pix.height, in cal_ctx_wr_dma_config()
391 cal_write(ctx->cal, CAL_WR_DMA_CTRL(ctx->dma_ctx), val); in cal_ctx_wr_dma_config()
392 ctx_dbg(3, ctx, "CAL_WR_DMA_CTRL(%d) = 0x%08x\n", ctx->dma_ctx, in cal_ctx_wr_dma_config()
393 cal_read(ctx->cal, CAL_WR_DMA_CTRL(ctx->dma_ctx))); in cal_ctx_wr_dma_config()
395 cal_write_field(ctx->cal, CAL_WR_DMA_OFST(ctx->dma_ctx), in cal_ctx_wr_dma_config()
397 ctx_dbg(3, ctx, "CAL_WR_DMA_OFST(%d) = 0x%08x\n", ctx->dma_ctx, in cal_ctx_wr_dma_config()
398 cal_read(ctx->cal, CAL_WR_DMA_OFST(ctx->dma_ctx))); in cal_ctx_wr_dma_config()
400 val = cal_read(ctx->cal, CAL_WR_DMA_XSIZE(ctx->dma_ctx)); in cal_ctx_wr_dma_config()
404 * The XSIZE field is expressed in 64-bit units and prevents overflows in cal_ctx_wr_dma_config()
409 cal_write(ctx->cal, CAL_WR_DMA_XSIZE(ctx->dma_ctx), val); in cal_ctx_wr_dma_config()
410 ctx_dbg(3, ctx, "CAL_WR_DMA_XSIZE(%d) = 0x%08x\n", ctx->dma_ctx, in cal_ctx_wr_dma_config()
411 cal_read(ctx->cal, CAL_WR_DMA_XSIZE(ctx->dma_ctx))); in cal_ctx_wr_dma_config()
416 cal_write(ctx->cal, CAL_WR_DMA_ADDR(ctx->dma_ctx), addr); in cal_ctx_set_dma_addr()
421 u32 val = cal_read(ctx->cal, CAL_WR_DMA_CTRL(ctx->dma_ctx)); in cal_ctx_wr_dma_enable()
425 cal_write(ctx->cal, CAL_WR_DMA_CTRL(ctx->dma_ctx), val); in cal_ctx_wr_dma_enable()
430 u32 val = cal_read(ctx->cal, CAL_WR_DMA_CTRL(ctx->dma_ctx)); in cal_ctx_wr_dma_disable()
434 cal_write(ctx->cal, CAL_WR_DMA_CTRL(ctx->dma_ctx), val); in cal_ctx_wr_dma_disable()
441 spin_lock_irq(&ctx->dma.lock); in cal_ctx_wr_dma_stopped()
442 stopped = ctx->dma.state == CAL_DMA_STOPPED; in cal_ctx_wr_dma_stopped()
443 spin_unlock_irq(&ctx->dma.lock); in cal_ctx_wr_dma_stopped()
456 phy_source_pad = media_pad_remote_pad_first(&ctx->pad); in cal_get_remote_frame_desc_entry()
458 return -ENODEV; in cal_get_remote_frame_desc_entry()
460 ret = v4l2_subdev_call(&ctx->phy->subdev, pad, get_frame_desc, in cal_get_remote_frame_desc_entry()
461 phy_source_pad->index, &fd); in cal_get_remote_frame_desc_entry()
466 return -EINVAL; in cal_get_remote_frame_desc_entry()
480 if (ret == -ENOIOCTLCMD) { in cal_ctx_prepare()
481 ctx->vc = 0; in cal_ctx_prepare()
482 ctx->datatype = CAL_CSI2_CTX_DT_ANY; in cal_ctx_prepare()
487 ctx->vc = entry.bus.csi2.vc; in cal_ctx_prepare()
488 ctx->datatype = entry.bus.csi2.dt; in cal_ctx_prepare()
493 ctx->use_pix_proc = !ctx->fmtinfo->meta; in cal_ctx_prepare()
495 if (ctx->use_pix_proc) { in cal_ctx_prepare()
496 ret = cal_reserve_pix_proc(ctx->cal); in cal_ctx_prepare()
502 ctx->pix_proc = ret; in cal_ctx_prepare()
510 if (ctx->use_pix_proc) in cal_ctx_unprepare()
511 cal_release_pix_proc(ctx->cal, ctx->pix_proc); in cal_ctx_unprepare()
516 struct cal_camerarx *phy = ctx->phy; in cal_ctx_start()
523 spin_lock(&phy->vc_lock); in cal_ctx_start()
525 if (phy->vc_enable_count[ctx->vc]++ == 0) { in cal_ctx_start()
526 phy->vc_frame_number[ctx->vc] = 0; in cal_ctx_start()
527 phy->vc_sequence[ctx->vc] = 0; in cal_ctx_start()
530 spin_unlock(&phy->vc_lock); in cal_ctx_start()
532 ctx->dma.state = CAL_DMA_RUNNING; in cal_ctx_start()
534 /* Configure the CSI-2, pixel processing and write DMA contexts. */ in cal_ctx_start()
536 if (ctx->use_pix_proc) in cal_ctx_start()
541 cal_write(ctx->cal, CAL_HL_IRQENABLE_SET(1), in cal_ctx_start()
542 CAL_HL_IRQ_WDMA_END_MASK(ctx->dma_ctx)); in cal_ctx_start()
543 cal_write(ctx->cal, CAL_HL_IRQENABLE_SET(2), in cal_ctx_start()
544 CAL_HL_IRQ_WDMA_START_MASK(ctx->dma_ctx)); in cal_ctx_start()
551 struct cal_camerarx *phy = ctx->phy; in cal_ctx_stop()
554 WARN_ON(phy->vc_enable_count[ctx->vc] == 0); in cal_ctx_stop()
556 spin_lock(&phy->vc_lock); in cal_ctx_stop()
557 phy->vc_enable_count[ctx->vc]--; in cal_ctx_stop()
558 spin_unlock(&phy->vc_lock); in cal_ctx_stop()
564 spin_lock_irq(&ctx->dma.lock); in cal_ctx_stop()
565 ctx->dma.state = CAL_DMA_STOP_REQUESTED; in cal_ctx_stop()
566 spin_unlock_irq(&ctx->dma.lock); in cal_ctx_stop()
568 timeout = wait_event_timeout(ctx->dma.wait, cal_ctx_wr_dma_stopped(ctx), in cal_ctx_stop()
576 cal_write(ctx->cal, CAL_HL_IRQENABLE_CLR(1), in cal_ctx_stop()
577 CAL_HL_IRQ_WDMA_END_MASK(ctx->dma_ctx)); in cal_ctx_stop()
578 cal_write(ctx->cal, CAL_HL_IRQENABLE_CLR(2), in cal_ctx_stop()
579 CAL_HL_IRQ_WDMA_START_MASK(ctx->dma_ctx)); in cal_ctx_stop()
581 ctx->dma.state = CAL_DMA_STOPPED; in cal_ctx_stop()
584 cal_write(ctx->cal, CAL_CSI2_CTX(ctx->phy->instance, ctx->csi2_ctx), 0); in cal_ctx_stop()
587 if (ctx->use_pix_proc) in cal_ctx_stop()
588 cal_write(ctx->cal, CAL_PIX_PROC(ctx->pix_proc), 0); in cal_ctx_stop()
591 /* ------------------------------------------------------------------
593 * ------------------------------------------------------------------
599 * CSI-2 frame number as a base.
603 struct cal_dev *cal = ctx->cal; in cal_update_seq_number()
604 struct cal_camerarx *phy = ctx->phy; in cal_update_seq_number()
606 u8 vc = ctx->vc; in cal_update_seq_number()
609 cal_read(cal, CAL_CSI2_STATUS(phy->instance, ctx->csi2_ctx)) & in cal_update_seq_number()
612 if (phy->vc_frame_number[vc] != frame_num) { in cal_update_seq_number()
613 prev_frame_num = phy->vc_frame_number[vc]; in cal_update_seq_number()
616 phy->vc_sequence[vc] += 1; in cal_update_seq_number()
618 phy->vc_sequence[vc] += frame_num - prev_frame_num; in cal_update_seq_number()
620 phy->vc_frame_number[vc] = frame_num; in cal_update_seq_number()
626 spin_lock(&ctx->dma.lock); in cal_irq_wdma_start()
628 if (ctx->dma.state == CAL_DMA_STOP_REQUESTED) { in cal_irq_wdma_start()
635 ctx->dma.state = CAL_DMA_STOP_PENDING; in cal_irq_wdma_start()
636 } else if (!list_empty(&ctx->dma.queue) && !ctx->dma.pending) { in cal_irq_wdma_start()
644 buf = list_first_entry(&ctx->dma.queue, struct cal_buffer, in cal_irq_wdma_start()
646 addr = vb2_dma_contig_plane_dma_addr(&buf->vb.vb2_buf, 0); in cal_irq_wdma_start()
649 ctx->dma.pending = buf; in cal_irq_wdma_start()
650 list_del(&buf->list); in cal_irq_wdma_start()
653 spin_unlock(&ctx->dma.lock); in cal_irq_wdma_start()
662 spin_lock(&ctx->dma.lock); in cal_irq_wdma_end()
665 if (ctx->dma.state == CAL_DMA_STOP_PENDING) { in cal_irq_wdma_end()
666 ctx->dma.state = CAL_DMA_STOPPED; in cal_irq_wdma_end()
667 wake_up(&ctx->dma.wait); in cal_irq_wdma_end()
671 if (ctx->dma.pending) { in cal_irq_wdma_end()
672 buf = ctx->dma.active; in cal_irq_wdma_end()
673 ctx->dma.active = ctx->dma.pending; in cal_irq_wdma_end()
674 ctx->dma.pending = NULL; in cal_irq_wdma_end()
677 spin_unlock(&ctx->dma.lock); in cal_irq_wdma_end()
680 buf->vb.vb2_buf.timestamp = ktime_get_ns(); in cal_irq_wdma_end()
681 buf->vb.field = ctx->v_fmt.fmt.pix.field; in cal_irq_wdma_end()
682 buf->vb.sequence = ctx->phy->vc_sequence[ctx->vc]; in cal_irq_wdma_end()
684 vb2_buffer_done(&buf->vb.vb2_buf, VB2_BUF_STATE_DONE); in cal_irq_wdma_end()
705 if (ctx->v_fmt.fmt.pix.height < 10) { in cal_irq_handle_wdma()
734 dev_err_ratelimited(cal->dev, "OCPO ERROR\n"); in cal_irq()
736 for (i = 0; i < cal->data->num_csi2_phy; ++i) { in cal_irq()
741 dev_err_ratelimited(cal->dev, in cal_irq()
751 dev_err_ratelimited(cal->dev, in cal_irq()
760 for (i = 0; i < cal->num_contexts; ++i) { in cal_irq()
765 cal_irq_handle_wdma(cal->ctx[i], start, end); in cal_irq()
771 /* ------------------------------------------------------------------
773 * ------------------------------------------------------------------
791 struct cal_camerarx *phy = to_cal_asd(asd)->phy; in cal_async_notifier_bound()
795 if (phy->source) { in cal_async_notifier_bound()
797 subdev->name); in cal_async_notifier_bound()
801 phy->source = subdev; in cal_async_notifier_bound()
802 phy_dbg(1, phy, "Using source %s for capture\n", subdev->name); in cal_async_notifier_bound()
804 pad = media_entity_get_fwnode_pad(&subdev->entity, in cal_async_notifier_bound()
805 of_fwnode_handle(phy->source_ep_node), in cal_async_notifier_bound()
809 subdev->name); in cal_async_notifier_bound()
813 ret = media_create_pad_link(&subdev->entity, pad, in cal_async_notifier_bound()
814 &phy->subdev.entity, CAL_CAMERARX_PAD_SINK, in cal_async_notifier_bound()
819 subdev->name); in cal_async_notifier_bound()
832 for (i = 0; i < cal->num_contexts; ++i) { in cal_async_notifier_complete()
833 ret = cal_ctx_v4l2_register(cal->ctx[i]); in cal_async_notifier_complete()
841 ret = v4l2_device_register_subdev_nodes(&cal->v4l2_dev); in cal_async_notifier_complete()
848 for (; i > 0; --i) { in cal_async_notifier_complete()
849 if (!cal->ctx[i - 1]) in cal_async_notifier_complete()
852 cal_ctx_v4l2_unregister(cal->ctx[i - 1]); in cal_async_notifier_complete()
868 v4l2_async_nf_init(&cal->notifier, &cal->v4l2_dev); in cal_async_notifier_register()
869 cal->notifier.ops = &cal_async_notifier_ops; in cal_async_notifier_register()
871 for (i = 0; i < cal->data->num_csi2_phy; ++i) { in cal_async_notifier_register()
872 struct cal_camerarx *phy = cal->phy[i]; in cal_async_notifier_register()
876 if (!phy->source_node) in cal_async_notifier_register()
879 fwnode = of_fwnode_handle(phy->source_node); in cal_async_notifier_register()
880 casd = v4l2_async_nf_add_fwnode(&cal->notifier, in cal_async_notifier_register()
889 casd->phy = phy; in cal_async_notifier_register()
892 ret = v4l2_async_nf_register(&cal->notifier); in cal_async_notifier_register()
901 v4l2_async_nf_cleanup(&cal->notifier); in cal_async_notifier_register()
907 v4l2_async_nf_unregister(&cal->notifier); in cal_async_notifier_unregister()
908 v4l2_async_nf_cleanup(&cal->notifier); in cal_async_notifier_unregister()
911 /* ------------------------------------------------------------------
913 * ------------------------------------------------------------------
917 * Register user-facing devices. To be called at the end of the probe function
924 ret = media_device_register(&cal->mdev); in cal_media_register()
936 media_device_unregister(&cal->mdev); in cal_media_register()
944 * Unregister the user-facing devices, but don't free memory yet. To be called
952 for (i = 0; i < cal->num_contexts; i++) in cal_media_unregister()
953 cal_ctx_v4l2_unregister(cal->ctx[i]); in cal_media_unregister()
956 media_device_unregister(&cal->mdev); in cal_media_unregister()
960 * Initialize the in-kernel objects. To be called at the beginning of the probe
965 struct media_device *mdev = &cal->mdev; in cal_media_init()
968 mdev->dev = cal->dev; in cal_media_init()
969 mdev->hw_revision = cal->revision; in cal_media_init()
970 strscpy(mdev->model, "CAL", sizeof(mdev->model)); in cal_media_init()
977 cal->v4l2_dev.mdev = mdev; in cal_media_init()
978 ret = v4l2_device_register(cal->dev, &cal->v4l2_dev); in cal_media_init()
984 vb2_dma_contig_set_max_seg_size(cal->dev, DMA_BIT_MASK(32)); in cal_media_init()
990 * Cleanup the in-kernel objects, freeing memory. To be called at the very end
996 v4l2_device_unregister(&cal->v4l2_dev); in cal_media_cleanup()
997 media_device_cleanup(&cal->mdev); in cal_media_cleanup()
999 vb2_dma_contig_clear_max_seg_size(cal->dev); in cal_media_cleanup()
1002 /* ------------------------------------------------------------------
1004 * ------------------------------------------------------------------
1016 ctx->cal = cal; in cal_ctx_create()
1017 ctx->phy = cal->phy[inst]; in cal_ctx_create()
1018 ctx->dma_ctx = inst; in cal_ctx_create()
1019 ctx->csi2_ctx = inst; in cal_ctx_create()
1020 ctx->cport = inst; in cal_ctx_create()
1040 .compatible = "ti,dra72-cal",
1044 .compatible = "ti,dra72-pre-es2-cal",
1048 .compatible = "ti,dra76-cal",
1052 .compatible = "ti,am654-cal",
1067 cal->revision = cal_read(cal, CAL_HL_REVISION); in cal_get_hwinfo()
1068 switch (FIELD_GET(CAL_HL_REVISION_SCHEME_MASK, cal->revision)) { in cal_get_hwinfo()
1071 FIELD_GET(CAL_HL_REVISION_MAJOR_MASK, cal->revision), in cal_get_hwinfo()
1072 FIELD_GET(CAL_HL_REVISION_MINOR_MASK, cal->revision), in cal_get_hwinfo()
1073 FIELD_GET(CAL_HL_REVISION_RTL_MASK, cal->revision), in cal_get_hwinfo()
1074 cal->revision); in cal_get_hwinfo()
1080 cal->revision); in cal_get_hwinfo()
1092 struct platform_device *pdev = to_platform_device(cal->dev); in cal_init_camerarx_regmap()
1093 struct device_node *np = cal->dev->of_node; in cal_init_camerarx_regmap()
1100 syscon = syscon_regmap_lookup_by_phandle_args(np, "ti,camerrx-control", in cal_init_camerarx_regmap()
1103 cal->syscon_camerrx = syscon; in cal_init_camerarx_regmap()
1104 cal->syscon_camerrx_offset = offset; in cal_init_camerarx_regmap()
1108 dev_warn(cal->dev, "failed to get ti,camerrx-control: %ld\n", in cal_init_camerarx_regmap()
1117 base = devm_ioremap_resource(cal->dev, res); in cal_init_camerarx_regmap()
1123 cal_dbg(1, cal, "ioresource %s at %pa - %pa\n", in cal_init_camerarx_regmap()
1124 res->name, &res->start, &res->end); in cal_init_camerarx_regmap()
1129 config.max_register = resource_size(res) - 4; in cal_init_camerarx_regmap()
1141 cal->syscon_camerrx = syscon; in cal_init_camerarx_regmap()
1142 cal->syscon_camerrx_offset = 0; in cal_init_camerarx_regmap()
1155 cal = devm_kzalloc(&pdev->dev, sizeof(*cal), GFP_KERNEL); in cal_probe()
1157 return -ENOMEM; in cal_probe()
1159 cal->data = of_device_get_match_data(&pdev->dev); in cal_probe()
1160 if (!cal->data) { in cal_probe()
1161 dev_err(&pdev->dev, "Could not get feature data based on compatible version\n"); in cal_probe()
1162 return -ENODEV; in cal_probe()
1165 cal->dev = &pdev->dev; in cal_probe()
1169 cal->fclk = devm_clk_get(&pdev->dev, "fck"); in cal_probe()
1170 if (IS_ERR(cal->fclk)) { in cal_probe()
1171 dev_err(&pdev->dev, "cannot get CAL fclk\n"); in cal_probe()
1172 return PTR_ERR(cal->fclk); in cal_probe()
1179 cal->res = platform_get_resource_byname(pdev, IORESOURCE_MEM, in cal_probe()
1181 cal->base = devm_ioremap_resource(&pdev->dev, cal->res); in cal_probe()
1182 if (IS_ERR(cal->base)) in cal_probe()
1183 return PTR_ERR(cal->base); in cal_probe()
1185 cal_dbg(1, cal, "ioresource %s at %pa - %pa\n", in cal_probe()
1186 cal->res->name, &cal->res->start, &cal->res->end); in cal_probe()
1190 ret = devm_request_irq(&pdev->dev, irq, cal_irq, 0, CAL_MODULE_NAME, in cal_probe()
1196 pm_runtime_enable(&pdev->dev); in cal_probe()
1197 ret = pm_runtime_resume_and_get(&pdev->dev); in cal_probe()
1202 pm_runtime_put_sync(&pdev->dev); in cal_probe()
1210 for (i = 0; i < cal->data->num_csi2_phy; ++i) { in cal_probe()
1211 cal->phy[i] = cal_camerarx_create(cal, i); in cal_probe()
1212 if (IS_ERR(cal->phy[i])) { in cal_probe()
1213 ret = PTR_ERR(cal->phy[i]); in cal_probe()
1214 cal->phy[i] = NULL; in cal_probe()
1218 if (cal->phy[i]->source_node) in cal_probe()
1224 ret = -ENODEV; in cal_probe()
1229 for (i = 0; i < cal->data->num_csi2_phy; ++i) { in cal_probe()
1230 if (!cal->phy[i]->source_node) in cal_probe()
1233 cal->ctx[cal->num_contexts] = cal_ctx_create(cal, i); in cal_probe()
1234 if (!cal->ctx[cal->num_contexts]) { in cal_probe()
1235 cal_err(cal, "Failed to create context %u\n", cal->num_contexts); in cal_probe()
1236 ret = -ENODEV; in cal_probe()
1240 cal->num_contexts++; in cal_probe()
1251 for (i = 0; i < cal->num_contexts; i++) in cal_probe()
1252 cal_ctx_destroy(cal->ctx[i]); in cal_probe()
1255 for (i = 0; i < cal->data->num_csi2_phy; i++) in cal_probe()
1256 cal_camerarx_destroy(cal->phy[i]); in cal_probe()
1261 pm_runtime_disable(&pdev->dev); in cal_probe()
1274 ret = pm_runtime_resume_and_get(&pdev->dev); in cal_remove()
1278 for (i = 0; i < cal->data->num_csi2_phy; i++) in cal_remove()
1279 cal_camerarx_disable(cal->phy[i]); in cal_remove()
1281 for (i = 0; i < cal->num_contexts; i++) in cal_remove()
1282 cal_ctx_destroy(cal->ctx[i]); in cal_remove()
1284 for (i = 0; i < cal->data->num_csi2_phy; i++) in cal_remove()
1285 cal_camerarx_destroy(cal->phy[i]); in cal_remove()
1290 pm_runtime_put_sync(&pdev->dev); in cal_remove()
1291 pm_runtime_disable(&pdev->dev); in cal_remove()
1300 if (cal->data->flags & DRA72_CAL_PRE_ES2_LDO_DISABLE) { in cal_runtime_resume()
1302 * Apply errata on both port everytime we (re-)enable in cal_runtime_resume()
1305 for (i = 0; i < cal->data->num_csi2_phy; i++) in cal_runtime_resume()
1306 cal_camerarx_i913_errata(cal->phy[i]); in cal_runtime_resume()