1caab277bSThomas Gleixner // SPDX-License-Identifier: GPL-2.0-only
225fdd593SJeykumar Sankaran /*
325fdd593SJeykumar Sankaran * Copyright (C) 2013 Red Hat
4ae4d721cSAbhinav Kumar * Copyright (c) 2014-2018, 2020-2021 The Linux Foundation. All rights reserved.
5173b2472SJessica Zhang * Copyright (c) 2022-2023 Qualcomm Innovation Center, Inc. All rights reserved.
6ae4d721cSAbhinav Kumar *
725fdd593SJeykumar Sankaran * Author: Rob Clark <robdclark@gmail.com>
825fdd593SJeykumar Sankaran */
925fdd593SJeykumar Sankaran
1025fdd593SJeykumar Sankaran #define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__
1125fdd593SJeykumar Sankaran #include <linux/debugfs.h>
12feea39a8SSam Ravnborg #include <linux/kthread.h>
1325fdd593SJeykumar Sankaran #include <linux/seq_file.h>
1425fdd593SJeykumar Sankaran
1511226978SVinod Polimera #include <drm/drm_atomic.h>
16feea39a8SSam Ravnborg #include <drm/drm_crtc.h>
17feea39a8SSam Ravnborg #include <drm/drm_file.h>
18feea39a8SSam Ravnborg #include <drm/drm_probe_helper.h>
19feea39a8SSam Ravnborg
2025fdd593SJeykumar Sankaran #include "msm_drv.h"
2125fdd593SJeykumar Sankaran #include "dpu_kms.h"
2225fdd593SJeykumar Sankaran #include "dpu_hwio.h"
2325fdd593SJeykumar Sankaran #include "dpu_hw_catalog.h"
2425fdd593SJeykumar Sankaran #include "dpu_hw_intf.h"
2525fdd593SJeykumar Sankaran #include "dpu_hw_ctl.h"
26e47616dfSKalyan Thota #include "dpu_hw_dspp.h"
2758dca981SVinod Koul #include "dpu_hw_dsc.h"
28ae4d721cSAbhinav Kumar #include "dpu_hw_merge3d.h"
2925fdd593SJeykumar Sankaran #include "dpu_formats.h"
3025fdd593SJeykumar Sankaran #include "dpu_encoder_phys.h"
3125fdd593SJeykumar Sankaran #include "dpu_crtc.h"
3225fdd593SJeykumar Sankaran #include "dpu_trace.h"
3325fdd593SJeykumar Sankaran #include "dpu_core_irq.h"
342ec5b3dcSAbhinav Kumar #include "disp/msm_disp_snapshot.h"
3525fdd593SJeykumar Sankaran
365b702d78SStephen Boyd #define DPU_DEBUG_ENC(e, fmt, ...) DRM_DEBUG_ATOMIC("enc%d " fmt,\
3725fdd593SJeykumar Sankaran (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
3825fdd593SJeykumar Sankaran
3925fdd593SJeykumar Sankaran #define DPU_ERROR_ENC(e, fmt, ...) DPU_ERROR("enc%d " fmt,\
4025fdd593SJeykumar Sankaran (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
4125fdd593SJeykumar Sankaran
4282828282SRob Clark #define DPU_ERROR_ENC_RATELIMITED(e, fmt, ...) DPU_ERROR_RATELIMITED("enc%d " fmt,\
4382828282SRob Clark (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
4482828282SRob Clark
4525fdd593SJeykumar Sankaran /*
4625fdd593SJeykumar Sankaran * Two to anticipate panels that can do cmd/vid dynamic switching
4725fdd593SJeykumar Sankaran * plan is to create all possible physical encoder types, and switch between
4825fdd593SJeykumar Sankaran * them at runtime
4925fdd593SJeykumar Sankaran */
5025fdd593SJeykumar Sankaran #define NUM_PHYS_ENCODER_TYPES 2
5125fdd593SJeykumar Sankaran
5225fdd593SJeykumar Sankaran #define MAX_PHYS_ENCODERS_PER_VIRTUAL \
5325fdd593SJeykumar Sankaran (MAX_H_TILES_PER_DISPLAY * NUM_PHYS_ENCODER_TYPES)
5425fdd593SJeykumar Sankaran
5525fdd593SJeykumar Sankaran #define MAX_CHANNELS_PER_ENC 2
5625fdd593SJeykumar Sankaran
5725fdd593SJeykumar Sankaran #define IDLE_SHORT_TIMEOUT 1
5825fdd593SJeykumar Sankaran
5942a558b7SKalyan Thota #define MAX_HDISPLAY_SPLIT 1080
6025fdd593SJeykumar Sankaran
6170df9610SSean Paul /* timeout in frames waiting for frame done */
6270df9610SSean Paul #define DPU_ENCODER_FRAME_DONE_TIMEOUT_FRAMES 5
6370df9610SSean Paul
6425fdd593SJeykumar Sankaran /**
6525fdd593SJeykumar Sankaran * enum dpu_enc_rc_events - events for resource control state machine
6625fdd593SJeykumar Sankaran * @DPU_ENC_RC_EVENT_KICKOFF:
6725fdd593SJeykumar Sankaran * This event happens at NORMAL priority.
6825fdd593SJeykumar Sankaran * Event that signals the start of the transfer. When this event is
6925fdd593SJeykumar Sankaran * received, enable MDP/DSI core clocks. Regardless of the previous
7025fdd593SJeykumar Sankaran * state, the resource should be in ON state at the end of this event.
7125fdd593SJeykumar Sankaran * @DPU_ENC_RC_EVENT_FRAME_DONE:
7225fdd593SJeykumar Sankaran * This event happens at INTERRUPT level.
7325fdd593SJeykumar Sankaran * Event signals the end of the data transfer after the PP FRAME_DONE
7425fdd593SJeykumar Sankaran * event. At the end of this event, a delayed work is scheduled to go to
7525fdd593SJeykumar Sankaran * IDLE_PC state after IDLE_TIMEOUT time.
7625fdd593SJeykumar Sankaran * @DPU_ENC_RC_EVENT_PRE_STOP:
7725fdd593SJeykumar Sankaran * This event happens at NORMAL priority.
7825fdd593SJeykumar Sankaran * This event, when received during the ON state, leave the RC STATE
7925fdd593SJeykumar Sankaran * in the PRE_OFF state. It should be followed by the STOP event as
8025fdd593SJeykumar Sankaran * part of encoder disable.
8125fdd593SJeykumar Sankaran * If received during IDLE or OFF states, it will do nothing.
8225fdd593SJeykumar Sankaran * @DPU_ENC_RC_EVENT_STOP:
8325fdd593SJeykumar Sankaran * This event happens at NORMAL priority.
8425fdd593SJeykumar Sankaran * When this event is received, disable all the MDP/DSI core clocks, and
8525fdd593SJeykumar Sankaran * disable IRQs. It should be called from the PRE_OFF or IDLE states.
8625fdd593SJeykumar Sankaran * IDLE is expected when IDLE_PC has run, and PRE_OFF did nothing.
8725fdd593SJeykumar Sankaran * PRE_OFF is expected when PRE_STOP was executed during the ON state.
8825fdd593SJeykumar Sankaran * Resource state should be in OFF at the end of the event.
8925fdd593SJeykumar Sankaran * @DPU_ENC_RC_EVENT_ENTER_IDLE:
9025fdd593SJeykumar Sankaran * This event happens at NORMAL priority from a work item.
9125fdd593SJeykumar Sankaran * Event signals that there were no frame updates for IDLE_TIMEOUT time.
9225fdd593SJeykumar Sankaran * This would disable MDP/DSI core clocks and change the resource state
9325fdd593SJeykumar Sankaran * to IDLE.
9425fdd593SJeykumar Sankaran */
9525fdd593SJeykumar Sankaran enum dpu_enc_rc_events {
9625fdd593SJeykumar Sankaran DPU_ENC_RC_EVENT_KICKOFF = 1,
9725fdd593SJeykumar Sankaran DPU_ENC_RC_EVENT_FRAME_DONE,
9825fdd593SJeykumar Sankaran DPU_ENC_RC_EVENT_PRE_STOP,
9925fdd593SJeykumar Sankaran DPU_ENC_RC_EVENT_STOP,
10025fdd593SJeykumar Sankaran DPU_ENC_RC_EVENT_ENTER_IDLE
10125fdd593SJeykumar Sankaran };
10225fdd593SJeykumar Sankaran
10325fdd593SJeykumar Sankaran /*
10425fdd593SJeykumar Sankaran * enum dpu_enc_rc_states - states that the resource control maintains
10525fdd593SJeykumar Sankaran * @DPU_ENC_RC_STATE_OFF: Resource is in OFF state
10625fdd593SJeykumar Sankaran * @DPU_ENC_RC_STATE_PRE_OFF: Resource is transitioning to OFF state
10725fdd593SJeykumar Sankaran * @DPU_ENC_RC_STATE_ON: Resource is in ON state
10825fdd593SJeykumar Sankaran * @DPU_ENC_RC_STATE_MODESET: Resource is in modeset state
10925fdd593SJeykumar Sankaran * @DPU_ENC_RC_STATE_IDLE: Resource is in IDLE state
11025fdd593SJeykumar Sankaran */
11125fdd593SJeykumar Sankaran enum dpu_enc_rc_states {
11225fdd593SJeykumar Sankaran DPU_ENC_RC_STATE_OFF,
11325fdd593SJeykumar Sankaran DPU_ENC_RC_STATE_PRE_OFF,
11425fdd593SJeykumar Sankaran DPU_ENC_RC_STATE_ON,
11525fdd593SJeykumar Sankaran DPU_ENC_RC_STATE_IDLE
11625fdd593SJeykumar Sankaran };
11725fdd593SJeykumar Sankaran
11825fdd593SJeykumar Sankaran /**
11925fdd593SJeykumar Sankaran * struct dpu_encoder_virt - virtual encoder. Container of one or more physical
12025fdd593SJeykumar Sankaran * encoders. Virtual encoder manages one "logical" display. Physical
12125fdd593SJeykumar Sankaran * encoders manage one intf block, tied to a specific panel/sub-panel.
12225fdd593SJeykumar Sankaran * Virtual encoder defers as much as possible to the physical encoders.
12325fdd593SJeykumar Sankaran * Virtual encoder registers itself with the DRM Framework as the encoder.
12425fdd593SJeykumar Sankaran * @base: drm_encoder base class for registration with DRM
125585b3f94SSean Paul * @enc_spinlock: Virtual-Encoder-Wide Spin Lock for IRQ purposes
126fba7427eSSean Paul * @enabled: True if the encoder is active, protected by enc_lock
1278e7ef27eSDmitry Baryshkov * @commit_done_timedout: True if there has been a timeout on commit after
1288e7ef27eSDmitry Baryshkov * enabling the encoder.
12925fdd593SJeykumar Sankaran * @num_phys_encs: Actual number of physical encoders contained.
13025fdd593SJeykumar Sankaran * @phys_encs: Container of physical encoders managed.
13125fdd593SJeykumar Sankaran * @cur_master: Pointer to the current master in this mode. Optimization
13225fdd593SJeykumar Sankaran * Only valid after enable. Cleared as disable.
133cca5ff94SLee Jones * @cur_slave: As above but for the slave encoder.
134cca5ff94SLee Jones * @hw_pp: Handle to the pingpong blocks used for the display. No.
13525fdd593SJeykumar Sankaran * pingpong blocks can be different than num_phys_encs.
13658dca981SVinod Koul * @hw_dsc: Handle to the DSC blocks used for the display.
13758dca981SVinod Koul * @dsc_mask: Bitmask of used DSC blocks.
138cca5ff94SLee Jones * @intfs_swapped: Whether or not the phys_enc interfaces have been swapped
13925fdd593SJeykumar Sankaran * for partial update right-only cases, such as pingpong
14025fdd593SJeykumar Sankaran * split where virtual pingpong does not generate IRQs
141e4914867SSean Paul * @crtc: Pointer to the currently assigned crtc. Normally you
142e4914867SSean Paul * would use crtc->state->encoder_mask to determine the
143e4914867SSean Paul * link between encoder/crtc. However in this case we need
144e4914867SSean Paul * to track crtc in the disable() hook which is called
145e4914867SSean Paul * _after_ encoder_mask is cleared.
1466b6921e5SDmitry Baryshkov * @connector: If a mode is set, cached pointer to the active connector
14725fdd593SJeykumar Sankaran * @crtc_kickoff_cb: Callback into CRTC that will flush & start
14825fdd593SJeykumar Sankaran * all CTL paths
14925fdd593SJeykumar Sankaran * @crtc_kickoff_cb_data: Opaque user data given to crtc_kickoff_cb
15025fdd593SJeykumar Sankaran * @debugfs_root: Debug file system root file node
151fba7427eSSean Paul * @enc_lock: Lock around physical encoder
152fba7427eSSean Paul * create/destroy/enable/disable
15325fdd593SJeykumar Sankaran * @frame_busy_mask: Bitmask tracking which phys_enc we are still
15425fdd593SJeykumar Sankaran * busy processing current command.
15525fdd593SJeykumar Sankaran * Bit0 = phys_encs[0] etc.
15625fdd593SJeykumar Sankaran * @crtc_frame_event_cb: callback handler for frame event
15725fdd593SJeykumar Sankaran * @crtc_frame_event_cb_data: callback handler private data
15870df9610SSean Paul * @frame_done_timeout_ms: frame done timeout in ms
15925fdd593SJeykumar Sankaran * @frame_done_timer: watchdog timer for frame done event
16025fdd593SJeykumar Sankaran * @disp_info: local copy of msm_display_info struct
16125fdd593SJeykumar Sankaran * @idle_pc_supported: indicate if idle power collaps is supported
16225fdd593SJeykumar Sankaran * @rc_lock: resource control mutex lock to protect
16325fdd593SJeykumar Sankaran * virt encoder over various state changes
16425fdd593SJeykumar Sankaran * @rc_state: resource controller state
16525fdd593SJeykumar Sankaran * @delayed_off_work: delayed worker to schedule disabling of
16625fdd593SJeykumar Sankaran * clks and resources after IDLE_TIMEOUT time.
16725fdd593SJeykumar Sankaran * @topology: topology of the display
16825fdd593SJeykumar Sankaran * @idle_timeout: idle timeout duration in milliseconds
1694b27f469SDmitry Baryshkov * @wide_bus_en: wide bus is enabled on this interface
17046dd0c06SDmitry Baryshkov * @dsc: drm_dsc_config pointer, for DSC-enabled encoders
17125fdd593SJeykumar Sankaran */
17225fdd593SJeykumar Sankaran struct dpu_encoder_virt {
17325fdd593SJeykumar Sankaran struct drm_encoder base;
17425fdd593SJeykumar Sankaran spinlock_t enc_spinlock;
17525fdd593SJeykumar Sankaran
176fba7427eSSean Paul bool enabled;
1778e7ef27eSDmitry Baryshkov bool commit_done_timedout;
178fba7427eSSean Paul
17925fdd593SJeykumar Sankaran unsigned int num_phys_encs;
18025fdd593SJeykumar Sankaran struct dpu_encoder_phys *phys_encs[MAX_PHYS_ENCODERS_PER_VIRTUAL];
18125fdd593SJeykumar Sankaran struct dpu_encoder_phys *cur_master;
18286b89080SJeykumar Sankaran struct dpu_encoder_phys *cur_slave;
18325fdd593SJeykumar Sankaran struct dpu_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
18458dca981SVinod Koul struct dpu_hw_dsc *hw_dsc[MAX_CHANNELS_PER_ENC];
18558dca981SVinod Koul
18658dca981SVinod Koul unsigned int dsc_mask;
18725fdd593SJeykumar Sankaran
18825fdd593SJeykumar Sankaran bool intfs_swapped;
18925fdd593SJeykumar Sankaran
190e4914867SSean Paul struct drm_crtc *crtc;
1916b6921e5SDmitry Baryshkov struct drm_connector *connector;
19225fdd593SJeykumar Sankaran
19325fdd593SJeykumar Sankaran struct dentry *debugfs_root;
19425fdd593SJeykumar Sankaran struct mutex enc_lock;
19525fdd593SJeykumar Sankaran DECLARE_BITMAP(frame_busy_mask, MAX_PHYS_ENCODERS_PER_VIRTUAL);
19625fdd593SJeykumar Sankaran void (*crtc_frame_event_cb)(void *, u32 event);
19725fdd593SJeykumar Sankaran void *crtc_frame_event_cb_data;
19825fdd593SJeykumar Sankaran
19970df9610SSean Paul atomic_t frame_done_timeout_ms;
20025fdd593SJeykumar Sankaran struct timer_list frame_done_timer;
20125fdd593SJeykumar Sankaran
20225fdd593SJeykumar Sankaran struct msm_display_info disp_info;
20325fdd593SJeykumar Sankaran
20425fdd593SJeykumar Sankaran bool idle_pc_supported;
20525fdd593SJeykumar Sankaran struct mutex rc_lock;
20625fdd593SJeykumar Sankaran enum dpu_enc_rc_states rc_state;
207e077fe75SJeykumar Sankaran struct delayed_work delayed_off_work;
20825fdd593SJeykumar Sankaran struct msm_display_topology topology;
20925fdd593SJeykumar Sankaran
21025fdd593SJeykumar Sankaran u32 idle_timeout;
2113309a756SKuogee Hsieh
2123309a756SKuogee Hsieh bool wide_bus_en;
21358dca981SVinod Koul
21458dca981SVinod Koul /* DSC configuration */
21546dd0c06SDmitry Baryshkov struct drm_dsc_config *dsc;
21625fdd593SJeykumar Sankaran };
21725fdd593SJeykumar Sankaran
21825fdd593SJeykumar Sankaran #define to_dpu_encoder_virt(x) container_of(x, struct dpu_encoder_virt, base)
21925fdd593SJeykumar Sankaran
2203c128638SKalyan Thota static u32 dither_matrix[DITHER_MATRIX_SZ] = {
2213c128638SKalyan Thota 15, 7, 13, 5, 3, 11, 1, 9, 12, 4, 14, 6, 0, 8, 2, 10
2223c128638SKalyan Thota };
2233c128638SKalyan Thota
2243309a756SKuogee Hsieh
dpu_encoder_is_widebus_enabled(const struct drm_encoder * drm_enc)2253309a756SKuogee Hsieh bool dpu_encoder_is_widebus_enabled(const struct drm_encoder *drm_enc)
2263309a756SKuogee Hsieh {
2273309a756SKuogee Hsieh const struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
2283309a756SKuogee Hsieh
2293309a756SKuogee Hsieh return dpu_enc->wide_bus_en;
2303309a756SKuogee Hsieh }
2313309a756SKuogee Hsieh
dpu_encoder_is_dsc_enabled(const struct drm_encoder * drm_enc)2323bb4f4b1SAbhinav Kumar bool dpu_encoder_is_dsc_enabled(const struct drm_encoder *drm_enc)
2333bb4f4b1SAbhinav Kumar {
2343bb4f4b1SAbhinav Kumar const struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
2353bb4f4b1SAbhinav Kumar
2363bb4f4b1SAbhinav Kumar return dpu_enc->dsc ? true : false;
2373bb4f4b1SAbhinav Kumar }
2383bb4f4b1SAbhinav Kumar
dpu_encoder_get_crc_values_cnt(const struct drm_encoder * drm_enc)239b1665047SJessica Zhang int dpu_encoder_get_crc_values_cnt(const struct drm_encoder *drm_enc)
240b1665047SJessica Zhang {
241b1665047SJessica Zhang struct dpu_encoder_virt *dpu_enc;
242b1665047SJessica Zhang int i, num_intf = 0;
243b1665047SJessica Zhang
244b1665047SJessica Zhang dpu_enc = to_dpu_encoder_virt(drm_enc);
245b1665047SJessica Zhang
246b1665047SJessica Zhang for (i = 0; i < dpu_enc->num_phys_encs; i++) {
247b1665047SJessica Zhang struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
248b1665047SJessica Zhang
249b1665047SJessica Zhang if (phys->hw_intf && phys->hw_intf->ops.setup_misr
250b1665047SJessica Zhang && phys->hw_intf->ops.collect_misr)
251b1665047SJessica Zhang num_intf++;
252b1665047SJessica Zhang }
253b1665047SJessica Zhang
254b1665047SJessica Zhang return num_intf;
255b1665047SJessica Zhang }
256b1665047SJessica Zhang
dpu_encoder_setup_misr(const struct drm_encoder * drm_enc)257b1665047SJessica Zhang void dpu_encoder_setup_misr(const struct drm_encoder *drm_enc)
258b1665047SJessica Zhang {
259b1665047SJessica Zhang struct dpu_encoder_virt *dpu_enc;
260b1665047SJessica Zhang
261b1665047SJessica Zhang int i;
262b1665047SJessica Zhang
263b1665047SJessica Zhang dpu_enc = to_dpu_encoder_virt(drm_enc);
264b1665047SJessica Zhang
265b1665047SJessica Zhang for (i = 0; i < dpu_enc->num_phys_encs; i++) {
266b1665047SJessica Zhang struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
267b1665047SJessica Zhang
268b1665047SJessica Zhang if (!phys->hw_intf || !phys->hw_intf->ops.setup_misr)
269b1665047SJessica Zhang continue;
270b1665047SJessica Zhang
271173b2472SJessica Zhang phys->hw_intf->ops.setup_misr(phys->hw_intf);
272b1665047SJessica Zhang }
273b1665047SJessica Zhang }
274b1665047SJessica Zhang
dpu_encoder_get_crc(const struct drm_encoder * drm_enc,u32 * crcs,int pos)275b1665047SJessica Zhang int dpu_encoder_get_crc(const struct drm_encoder *drm_enc, u32 *crcs, int pos)
276b1665047SJessica Zhang {
277b1665047SJessica Zhang struct dpu_encoder_virt *dpu_enc;
278b1665047SJessica Zhang
279b1665047SJessica Zhang int i, rc = 0, entries_added = 0;
280b1665047SJessica Zhang
281b1665047SJessica Zhang if (!drm_enc->crtc) {
282b1665047SJessica Zhang DRM_ERROR("no crtc found for encoder %d\n", drm_enc->index);
283b1665047SJessica Zhang return -EINVAL;
284b1665047SJessica Zhang }
285b1665047SJessica Zhang
286b1665047SJessica Zhang dpu_enc = to_dpu_encoder_virt(drm_enc);
287b1665047SJessica Zhang
288b1665047SJessica Zhang for (i = 0; i < dpu_enc->num_phys_encs; i++) {
289b1665047SJessica Zhang struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
290b1665047SJessica Zhang
291b1665047SJessica Zhang if (!phys->hw_intf || !phys->hw_intf->ops.collect_misr)
292b1665047SJessica Zhang continue;
293b1665047SJessica Zhang
294b1665047SJessica Zhang rc = phys->hw_intf->ops.collect_misr(phys->hw_intf, &crcs[pos + entries_added]);
295b1665047SJessica Zhang if (rc)
296b1665047SJessica Zhang return rc;
297b1665047SJessica Zhang entries_added++;
298b1665047SJessica Zhang }
299b1665047SJessica Zhang
300b1665047SJessica Zhang return entries_added;
301b1665047SJessica Zhang }
302b1665047SJessica Zhang
_dpu_encoder_setup_dither(struct dpu_hw_pingpong * hw_pp,unsigned bpc)303b8afe9f8SRob Clark static void _dpu_encoder_setup_dither(struct dpu_hw_pingpong *hw_pp, unsigned bpc)
3043c128638SKalyan Thota {
3053c128638SKalyan Thota struct dpu_hw_dither_cfg dither_cfg = { 0 };
3063c128638SKalyan Thota
307b8afe9f8SRob Clark if (!hw_pp->ops.setup_dither)
3083c128638SKalyan Thota return;
3093c128638SKalyan Thota
310b8afe9f8SRob Clark switch (bpc) {
3113c128638SKalyan Thota case 6:
3123c128638SKalyan Thota dither_cfg.c0_bitdepth = 6;
3133c128638SKalyan Thota dither_cfg.c1_bitdepth = 6;
3143c128638SKalyan Thota dither_cfg.c2_bitdepth = 6;
3153c128638SKalyan Thota dither_cfg.c3_bitdepth = 6;
3163c128638SKalyan Thota dither_cfg.temporal_en = 0;
3173c128638SKalyan Thota break;
3183c128638SKalyan Thota default:
319b8afe9f8SRob Clark hw_pp->ops.setup_dither(hw_pp, NULL);
3203c128638SKalyan Thota return;
3213c128638SKalyan Thota }
3223c128638SKalyan Thota
3233c128638SKalyan Thota memcpy(&dither_cfg.matrix, dither_matrix,
3243c128638SKalyan Thota sizeof(u32) * DITHER_MATRIX_SZ);
3253c128638SKalyan Thota
326b8afe9f8SRob Clark hw_pp->ops.setup_dither(hw_pp, &dither_cfg);
3273c128638SKalyan Thota }
3283c128638SKalyan Thota
dpu_encoder_helper_get_intf_type(enum dpu_intf_mode intf_mode)329750e78a1SAbhinav Kumar static char *dpu_encoder_helper_get_intf_type(enum dpu_intf_mode intf_mode)
330750e78a1SAbhinav Kumar {
331750e78a1SAbhinav Kumar switch (intf_mode) {
332750e78a1SAbhinav Kumar case INTF_MODE_VIDEO:
333750e78a1SAbhinav Kumar return "INTF_MODE_VIDEO";
334750e78a1SAbhinav Kumar case INTF_MODE_CMD:
335750e78a1SAbhinav Kumar return "INTF_MODE_CMD";
336750e78a1SAbhinav Kumar case INTF_MODE_WB_BLOCK:
337750e78a1SAbhinav Kumar return "INTF_MODE_WB_BLOCK";
338750e78a1SAbhinav Kumar case INTF_MODE_WB_LINE:
339750e78a1SAbhinav Kumar return "INTF_MODE_WB_LINE";
340750e78a1SAbhinav Kumar default:
341750e78a1SAbhinav Kumar return "INTF_MODE_UNKNOWN";
342750e78a1SAbhinav Kumar }
343750e78a1SAbhinav Kumar }
344750e78a1SAbhinav Kumar
dpu_encoder_helper_report_irq_timeout(struct dpu_encoder_phys * phys_enc,enum dpu_intr_idx intr_idx)34525fdd593SJeykumar Sankaran void dpu_encoder_helper_report_irq_timeout(struct dpu_encoder_phys *phys_enc,
34625fdd593SJeykumar Sankaran enum dpu_intr_idx intr_idx)
34725fdd593SJeykumar Sankaran {
348750e78a1SAbhinav Kumar DRM_ERROR("irq timeout id=%u, intf_mode=%s intf=%d wb=%d, pp=%d, intr=%d\n",
349750e78a1SAbhinav Kumar DRMID(phys_enc->parent),
350750e78a1SAbhinav Kumar dpu_encoder_helper_get_intf_type(phys_enc->intf_mode),
3518ea432b8SDmitry Baryshkov phys_enc->hw_intf ? phys_enc->hw_intf->idx - INTF_0 : -1,
3528ea432b8SDmitry Baryshkov phys_enc->hw_wb ? phys_enc->hw_wb->idx - WB_0 : -1,
35325fdd593SJeykumar Sankaran phys_enc->hw_pp->idx - PINGPONG_0, intr_idx);
35425fdd593SJeykumar Sankaran
35559f0182aSDmitry Baryshkov dpu_encoder_frame_done_callback(phys_enc->parent, phys_enc,
35625fdd593SJeykumar Sankaran DPU_ENCODER_FRAME_EVENT_ERROR);
35725fdd593SJeykumar Sankaran }
35825fdd593SJeykumar Sankaran
359fba33caeSJordan Crouse static int dpu_encoder_helper_wait_event_timeout(int32_t drm_id,
360667e9985SDmitry Baryshkov u32 irq_idx, struct dpu_encoder_wait_info *info);
361fba33caeSJordan Crouse
dpu_encoder_helper_wait_for_irq(struct dpu_encoder_phys * phys_enc,int irq_idx,void (* func)(void * arg),struct dpu_encoder_wait_info * wait_info)36225fdd593SJeykumar Sankaran int dpu_encoder_helper_wait_for_irq(struct dpu_encoder_phys *phys_enc,
36304c2fca4SDmitry Baryshkov int irq_idx,
36450cf1608SDmitry Baryshkov void (*func)(void *arg),
36525fdd593SJeykumar Sankaran struct dpu_encoder_wait_info *wait_info)
36625fdd593SJeykumar Sankaran {
36725fdd593SJeykumar Sankaran u32 irq_status;
36825fdd593SJeykumar Sankaran int ret;
36925fdd593SJeykumar Sankaran
3701e7ac595SDmitry Baryshkov if (!wait_info) {
37125fdd593SJeykumar Sankaran DPU_ERROR("invalid params\n");
37225fdd593SJeykumar Sankaran return -EINVAL;
37325fdd593SJeykumar Sankaran }
37425fdd593SJeykumar Sankaran /* note: do master / slave checking outside */
37525fdd593SJeykumar Sankaran
37625fdd593SJeykumar Sankaran /* return EWOULDBLOCK since we know the wait isn't necessary */
37725fdd593SJeykumar Sankaran if (phys_enc->enable_state == DPU_ENC_DISABLED) {
37804c2fca4SDmitry Baryshkov DRM_ERROR("encoder is disabled id=%u, callback=%ps, IRQ=[%d, %d]\n",
3791e7ac595SDmitry Baryshkov DRMID(phys_enc->parent), func,
38004c2fca4SDmitry Baryshkov DPU_IRQ_REG(irq_idx), DPU_IRQ_BIT(irq_idx));
38125fdd593SJeykumar Sankaran return -EWOULDBLOCK;
38225fdd593SJeykumar Sankaran }
38325fdd593SJeykumar Sankaran
38404c2fca4SDmitry Baryshkov if (irq_idx < 0) {
3851e7ac595SDmitry Baryshkov DRM_DEBUG_KMS("skip irq wait id=%u, callback=%ps\n",
3861e7ac595SDmitry Baryshkov DRMID(phys_enc->parent), func);
38725fdd593SJeykumar Sankaran return 0;
38825fdd593SJeykumar Sankaran }
38925fdd593SJeykumar Sankaran
39004c2fca4SDmitry Baryshkov DRM_DEBUG_KMS("id=%u, callback=%ps, IRQ=[%d, %d], pp=%d, pending_cnt=%d\n",
3911e7ac595SDmitry Baryshkov DRMID(phys_enc->parent), func,
39204c2fca4SDmitry Baryshkov DPU_IRQ_REG(irq_idx), DPU_IRQ_BIT(irq_idx), phys_enc->hw_pp->idx - PINGPONG_0,
39325fdd593SJeykumar Sankaran atomic_read(wait_info->atomic_cnt));
39425fdd593SJeykumar Sankaran
39525fdd593SJeykumar Sankaran ret = dpu_encoder_helper_wait_event_timeout(
39625fdd593SJeykumar Sankaran DRMID(phys_enc->parent),
39704c2fca4SDmitry Baryshkov irq_idx,
39825fdd593SJeykumar Sankaran wait_info);
39925fdd593SJeykumar Sankaran
40025fdd593SJeykumar Sankaran if (ret <= 0) {
40104c2fca4SDmitry Baryshkov irq_status = dpu_core_irq_read(phys_enc->dpu_kms, irq_idx);
40225fdd593SJeykumar Sankaran if (irq_status) {
40325fdd593SJeykumar Sankaran unsigned long flags;
40425fdd593SJeykumar Sankaran
40504c2fca4SDmitry Baryshkov DRM_DEBUG_KMS("IRQ=[%d, %d] not triggered id=%u, callback=%ps, pp=%d, atomic_cnt=%d\n",
40604c2fca4SDmitry Baryshkov DPU_IRQ_REG(irq_idx), DPU_IRQ_BIT(irq_idx),
4071e7ac595SDmitry Baryshkov DRMID(phys_enc->parent), func,
40825fdd593SJeykumar Sankaran phys_enc->hw_pp->idx - PINGPONG_0,
40925fdd593SJeykumar Sankaran atomic_read(wait_info->atomic_cnt));
41025fdd593SJeykumar Sankaran local_irq_save(flags);
41150cf1608SDmitry Baryshkov func(phys_enc);
41225fdd593SJeykumar Sankaran local_irq_restore(flags);
41325fdd593SJeykumar Sankaran ret = 0;
41425fdd593SJeykumar Sankaran } else {
41525fdd593SJeykumar Sankaran ret = -ETIMEDOUT;
41604c2fca4SDmitry Baryshkov DRM_DEBUG_KMS("IRQ=[%d, %d] timeout id=%u, callback=%ps, pp=%d, atomic_cnt=%d\n",
41704c2fca4SDmitry Baryshkov DPU_IRQ_REG(irq_idx), DPU_IRQ_BIT(irq_idx),
4181e7ac595SDmitry Baryshkov DRMID(phys_enc->parent), func,
41925fdd593SJeykumar Sankaran phys_enc->hw_pp->idx - PINGPONG_0,
42025fdd593SJeykumar Sankaran atomic_read(wait_info->atomic_cnt));
42125fdd593SJeykumar Sankaran }
42225fdd593SJeykumar Sankaran } else {
42325fdd593SJeykumar Sankaran ret = 0;
42425fdd593SJeykumar Sankaran trace_dpu_enc_irq_wait_success(DRMID(phys_enc->parent),
42504c2fca4SDmitry Baryshkov func, irq_idx,
42625fdd593SJeykumar Sankaran phys_enc->hw_pp->idx - PINGPONG_0,
42725fdd593SJeykumar Sankaran atomic_read(wait_info->atomic_cnt));
42825fdd593SJeykumar Sankaran }
42925fdd593SJeykumar Sankaran
43025fdd593SJeykumar Sankaran return ret;
43125fdd593SJeykumar Sankaran }
43225fdd593SJeykumar Sankaran
dpu_encoder_get_vsync_count(struct drm_encoder * drm_enc)433885455d6SMark Yacoub int dpu_encoder_get_vsync_count(struct drm_encoder *drm_enc)
43473743e72SKalyan Thota {
435885455d6SMark Yacoub struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
436885455d6SMark Yacoub struct dpu_encoder_phys *phys = dpu_enc ? dpu_enc->cur_master : NULL;
437885455d6SMark Yacoub return phys ? atomic_read(&phys->vsync_cnt) : 0;
43873743e72SKalyan Thota }
43973743e72SKalyan Thota
dpu_encoder_get_linecount(struct drm_encoder * drm_enc)44073743e72SKalyan Thota int dpu_encoder_get_linecount(struct drm_encoder *drm_enc)
44173743e72SKalyan Thota {
44273743e72SKalyan Thota struct dpu_encoder_virt *dpu_enc;
44373743e72SKalyan Thota struct dpu_encoder_phys *phys;
44473743e72SKalyan Thota int linecount = 0;
44573743e72SKalyan Thota
44673743e72SKalyan Thota dpu_enc = to_dpu_encoder_virt(drm_enc);
44773743e72SKalyan Thota phys = dpu_enc ? dpu_enc->cur_master : NULL;
44873743e72SKalyan Thota
44973743e72SKalyan Thota if (phys && phys->ops.get_line_count)
45073743e72SKalyan Thota linecount = phys->ops.get_line_count(phys);
45173743e72SKalyan Thota
45273743e72SKalyan Thota return linecount;
45373743e72SKalyan Thota }
45473743e72SKalyan Thota
dpu_encoder_destroy(struct drm_encoder * drm_enc)455fba33caeSJordan Crouse static void dpu_encoder_destroy(struct drm_encoder *drm_enc)
45625fdd593SJeykumar Sankaran {
45725fdd593SJeykumar Sankaran struct dpu_encoder_virt *dpu_enc = NULL;
45825fdd593SJeykumar Sankaran int i = 0;
45925fdd593SJeykumar Sankaran
46025fdd593SJeykumar Sankaran if (!drm_enc) {
46125fdd593SJeykumar Sankaran DPU_ERROR("invalid encoder\n");
46225fdd593SJeykumar Sankaran return;
46325fdd593SJeykumar Sankaran }
46425fdd593SJeykumar Sankaran
46525fdd593SJeykumar Sankaran dpu_enc = to_dpu_encoder_virt(drm_enc);
46625fdd593SJeykumar Sankaran DPU_DEBUG_ENC(dpu_enc, "\n");
46725fdd593SJeykumar Sankaran
46825fdd593SJeykumar Sankaran mutex_lock(&dpu_enc->enc_lock);
46925fdd593SJeykumar Sankaran
47025fdd593SJeykumar Sankaran for (i = 0; i < dpu_enc->num_phys_encs; i++) {
47125fdd593SJeykumar Sankaran struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
47225fdd593SJeykumar Sankaran
473b6fadcadSDrew Davenport if (phys->ops.destroy) {
47425fdd593SJeykumar Sankaran phys->ops.destroy(phys);
47525fdd593SJeykumar Sankaran --dpu_enc->num_phys_encs;
47625fdd593SJeykumar Sankaran dpu_enc->phys_encs[i] = NULL;
47725fdd593SJeykumar Sankaran }
47825fdd593SJeykumar Sankaran }
47925fdd593SJeykumar Sankaran
48025fdd593SJeykumar Sankaran if (dpu_enc->num_phys_encs)
48125fdd593SJeykumar Sankaran DPU_ERROR_ENC(dpu_enc, "expected 0 num_phys_encs not %d\n",
48225fdd593SJeykumar Sankaran dpu_enc->num_phys_encs);
48325fdd593SJeykumar Sankaran dpu_enc->num_phys_encs = 0;
48425fdd593SJeykumar Sankaran mutex_unlock(&dpu_enc->enc_lock);
48525fdd593SJeykumar Sankaran
48625fdd593SJeykumar Sankaran drm_encoder_cleanup(drm_enc);
48725fdd593SJeykumar Sankaran mutex_destroy(&dpu_enc->enc_lock);
48825fdd593SJeykumar Sankaran }
48925fdd593SJeykumar Sankaran
dpu_encoder_helper_split_config(struct dpu_encoder_phys * phys_enc,enum dpu_intf interface)49025fdd593SJeykumar Sankaran void dpu_encoder_helper_split_config(
49125fdd593SJeykumar Sankaran struct dpu_encoder_phys *phys_enc,
49225fdd593SJeykumar Sankaran enum dpu_intf interface)
49325fdd593SJeykumar Sankaran {
49425fdd593SJeykumar Sankaran struct dpu_encoder_virt *dpu_enc;
49525fdd593SJeykumar Sankaran struct split_pipe_cfg cfg = { 0 };
49625fdd593SJeykumar Sankaran struct dpu_hw_mdp *hw_mdptop;
49725fdd593SJeykumar Sankaran struct msm_display_info *disp_info;
49825fdd593SJeykumar Sankaran
499b6fadcadSDrew Davenport if (!phys_enc->hw_mdptop || !phys_enc->parent) {
500e6790f72SZheng Bin DPU_ERROR("invalid arg(s), encoder %d\n", phys_enc != NULL);
50125fdd593SJeykumar Sankaran return;
50225fdd593SJeykumar Sankaran }
50325fdd593SJeykumar Sankaran
50425fdd593SJeykumar Sankaran dpu_enc = to_dpu_encoder_virt(phys_enc->parent);
50525fdd593SJeykumar Sankaran hw_mdptop = phys_enc->hw_mdptop;
50625fdd593SJeykumar Sankaran disp_info = &dpu_enc->disp_info;
50725fdd593SJeykumar Sankaran
5085a7a86bfSDmitry Baryshkov if (disp_info->intf_type != INTF_DSI)
50925fdd593SJeykumar Sankaran return;
51025fdd593SJeykumar Sankaran
51125fdd593SJeykumar Sankaran /**
51225fdd593SJeykumar Sankaran * disable split modes since encoder will be operating in as the only
51325fdd593SJeykumar Sankaran * encoder, either for the entire use case in the case of, for example,
51425fdd593SJeykumar Sankaran * single DSI, or for this frame in the case of left/right only partial
51525fdd593SJeykumar Sankaran * update.
51625fdd593SJeykumar Sankaran */
51725fdd593SJeykumar Sankaran if (phys_enc->split_role == ENC_ROLE_SOLO) {
51825fdd593SJeykumar Sankaran if (hw_mdptop->ops.setup_split_pipe)
51925fdd593SJeykumar Sankaran hw_mdptop->ops.setup_split_pipe(hw_mdptop, &cfg);
52025fdd593SJeykumar Sankaran return;
52125fdd593SJeykumar Sankaran }
52225fdd593SJeykumar Sankaran
52325fdd593SJeykumar Sankaran cfg.en = true;
52425fdd593SJeykumar Sankaran cfg.mode = phys_enc->intf_mode;
52525fdd593SJeykumar Sankaran cfg.intf = interface;
52625fdd593SJeykumar Sankaran
52725fdd593SJeykumar Sankaran if (cfg.en && phys_enc->ops.needs_single_flush &&
52825fdd593SJeykumar Sankaran phys_enc->ops.needs_single_flush(phys_enc))
52925fdd593SJeykumar Sankaran cfg.split_flush_en = true;
53025fdd593SJeykumar Sankaran
53125fdd593SJeykumar Sankaran if (phys_enc->split_role == ENC_ROLE_MASTER) {
53225fdd593SJeykumar Sankaran DPU_DEBUG_ENC(dpu_enc, "enable %d\n", cfg.en);
53325fdd593SJeykumar Sankaran
53425fdd593SJeykumar Sankaran if (hw_mdptop->ops.setup_split_pipe)
53525fdd593SJeykumar Sankaran hw_mdptop->ops.setup_split_pipe(hw_mdptop, &cfg);
53625fdd593SJeykumar Sankaran }
53725fdd593SJeykumar Sankaran }
53825fdd593SJeykumar Sankaran
dpu_encoder_use_dsc_merge(struct drm_encoder * drm_enc)53963f4a7beSDmitry Baryshkov bool dpu_encoder_use_dsc_merge(struct drm_encoder *drm_enc)
54063f4a7beSDmitry Baryshkov {
54163f4a7beSDmitry Baryshkov struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
54263f4a7beSDmitry Baryshkov int i, intf_count = 0, num_dsc = 0;
54363f4a7beSDmitry Baryshkov
54463f4a7beSDmitry Baryshkov for (i = 0; i < MAX_PHYS_ENCODERS_PER_VIRTUAL; i++)
54563f4a7beSDmitry Baryshkov if (dpu_enc->phys_encs[i])
54663f4a7beSDmitry Baryshkov intf_count++;
54763f4a7beSDmitry Baryshkov
54863f4a7beSDmitry Baryshkov /* See dpu_encoder_get_topology, we only support 2:2:1 topology */
54963f4a7beSDmitry Baryshkov if (dpu_enc->dsc)
55063f4a7beSDmitry Baryshkov num_dsc = 2;
55163f4a7beSDmitry Baryshkov
55263f4a7beSDmitry Baryshkov return (num_dsc > 0) && (num_dsc > intf_count);
55363f4a7beSDmitry Baryshkov }
55463f4a7beSDmitry Baryshkov
dpu_encoder_get_dsc_config(struct drm_encoder * drm_enc)5559ed6141bSKuogee Hsieh static struct drm_dsc_config *dpu_encoder_get_dsc_config(struct drm_encoder *drm_enc)
5569ed6141bSKuogee Hsieh {
5579ed6141bSKuogee Hsieh struct msm_drm_private *priv = drm_enc->dev->dev_private;
5589ed6141bSKuogee Hsieh struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
5599ed6141bSKuogee Hsieh int index = dpu_enc->disp_info.h_tile_instance[0];
5609ed6141bSKuogee Hsieh
5619ed6141bSKuogee Hsieh if (dpu_enc->disp_info.intf_type == INTF_DSI)
5629ed6141bSKuogee Hsieh return msm_dsi_get_dsc_config(priv->dsi[index]);
5639ed6141bSKuogee Hsieh
5649ed6141bSKuogee Hsieh return NULL;
5659ed6141bSKuogee Hsieh }
5669ed6141bSKuogee Hsieh
dpu_encoder_get_topology(struct dpu_encoder_virt * dpu_enc,struct dpu_kms * dpu_kms,struct drm_display_mode * mode,struct drm_crtc_state * crtc_state,struct drm_dsc_config * dsc)56725fdd593SJeykumar Sankaran static struct msm_display_topology dpu_encoder_get_topology(
56825fdd593SJeykumar Sankaran struct dpu_encoder_virt *dpu_enc,
56925fdd593SJeykumar Sankaran struct dpu_kms *dpu_kms,
5708aa22aaaSKalyan Thota struct drm_display_mode *mode,
5719ed6141bSKuogee Hsieh struct drm_crtc_state *crtc_state,
5729ed6141bSKuogee Hsieh struct drm_dsc_config *dsc)
57325fdd593SJeykumar Sankaran {
5745fddd4f5SKalyan Thota struct msm_display_topology topology = {0};
57525fdd593SJeykumar Sankaran int i, intf_count = 0;
57625fdd593SJeykumar Sankaran
57725fdd593SJeykumar Sankaran for (i = 0; i < MAX_PHYS_ENCODERS_PER_VIRTUAL; i++)
57825fdd593SJeykumar Sankaran if (dpu_enc->phys_encs[i])
57925fdd593SJeykumar Sankaran intf_count++;
58025fdd593SJeykumar Sankaran
58142a558b7SKalyan Thota /* Datapath topology selection
58242a558b7SKalyan Thota *
58342a558b7SKalyan Thota * Dual display
58442a558b7SKalyan Thota * 2 LM, 2 INTF ( Split display using 2 interfaces)
58542a558b7SKalyan Thota *
58642a558b7SKalyan Thota * Single display
58742a558b7SKalyan Thota * 1 LM, 1 INTF
58842a558b7SKalyan Thota * 2 LM, 1 INTF (stream merge to support high resolution interfaces)
58942a558b7SKalyan Thota *
5908aa22aaaSKalyan Thota * Add dspps to the reservation requirements if ctm is requested
59142a558b7SKalyan Thota */
59242a558b7SKalyan Thota if (intf_count == 2)
59342a558b7SKalyan Thota topology.num_lm = 2;
59442a558b7SKalyan Thota else if (!dpu_kms->catalog->caps->has_3d_merge)
59542a558b7SKalyan Thota topology.num_lm = 1;
59642a558b7SKalyan Thota else
59742a558b7SKalyan Thota topology.num_lm = (mode->hdisplay > MAX_HDISPLAY_SPLIT) ? 2 : 1;
59842a558b7SKalyan Thota
5998aa22aaaSKalyan Thota if (crtc_state->ctm)
600e47616dfSKalyan Thota topology.num_dspp = topology.num_lm;
601e47616dfSKalyan Thota
60225fdd593SJeykumar Sankaran topology.num_intf = intf_count;
60325fdd593SJeykumar Sankaran
6049ed6141bSKuogee Hsieh if (dsc) {
6054ba5a4adSMarijn Suijten /*
6064ba5a4adSMarijn Suijten * In case of Display Stream Compression (DSC), we would use
6074ba5a4adSMarijn Suijten * 2 DSC encoders, 2 layer mixers and 1 interface
6087e9cc175SVinod Koul * this is power optimal and can drive up to (including) 4k
6097e9cc175SVinod Koul * screens
6107e9cc175SVinod Koul */
6117e9cc175SVinod Koul topology.num_dsc = 2;
6127e9cc175SVinod Koul topology.num_lm = 2;
6134ba5a4adSMarijn Suijten topology.num_intf = 1;
6147e9cc175SVinod Koul }
6157e9cc175SVinod Koul
61625fdd593SJeykumar Sankaran return topology;
61725fdd593SJeykumar Sankaran }
6187e9cc175SVinod Koul
dpu_encoder_virt_atomic_check(struct drm_encoder * drm_enc,struct drm_crtc_state * crtc_state,struct drm_connector_state * conn_state)61925fdd593SJeykumar Sankaran static int dpu_encoder_virt_atomic_check(
62025fdd593SJeykumar Sankaran struct drm_encoder *drm_enc,
62125fdd593SJeykumar Sankaran struct drm_crtc_state *crtc_state,
62225fdd593SJeykumar Sankaran struct drm_connector_state *conn_state)
62325fdd593SJeykumar Sankaran {
62425fdd593SJeykumar Sankaran struct dpu_encoder_virt *dpu_enc;
62525fdd593SJeykumar Sankaran struct msm_drm_private *priv;
62625fdd593SJeykumar Sankaran struct dpu_kms *dpu_kms;
62725fdd593SJeykumar Sankaran struct drm_display_mode *adj_mode;
62825fdd593SJeykumar Sankaran struct msm_display_topology topology;
629de3916c7SDrew Davenport struct dpu_global_state *global_state;
6309ed6141bSKuogee Hsieh struct drm_dsc_config *dsc;
63125fdd593SJeykumar Sankaran int i = 0;
63225fdd593SJeykumar Sankaran int ret = 0;
63325fdd593SJeykumar Sankaran
63425fdd593SJeykumar Sankaran if (!drm_enc || !crtc_state || !conn_state) {
63525fdd593SJeykumar Sankaran DPU_ERROR("invalid arg(s), drm_enc %d, crtc/conn state %d/%d\n",
636e6790f72SZheng Bin drm_enc != NULL, crtc_state != NULL, conn_state != NULL);
63725fdd593SJeykumar Sankaran return -EINVAL;
63825fdd593SJeykumar Sankaran }
63925fdd593SJeykumar Sankaran
64025fdd593SJeykumar Sankaran dpu_enc = to_dpu_encoder_virt(drm_enc);
64125fdd593SJeykumar Sankaran DPU_DEBUG_ENC(dpu_enc, "\n");
64225fdd593SJeykumar Sankaran
64325fdd593SJeykumar Sankaran priv = drm_enc->dev->dev_private;
64425fdd593SJeykumar Sankaran dpu_kms = to_dpu_kms(priv->kms);
64525fdd593SJeykumar Sankaran adj_mode = &crtc_state->adjusted_mode;
646ccc862b9SKalyan Thota global_state = dpu_kms_get_global_state(crtc_state->state);
647ccc862b9SKalyan Thota if (IS_ERR(global_state))
648ccc862b9SKalyan Thota return PTR_ERR(global_state);
649ccc862b9SKalyan Thota
65025fdd593SJeykumar Sankaran trace_dpu_enc_atomic_check(DRMID(drm_enc));
65125fdd593SJeykumar Sankaran
65225fdd593SJeykumar Sankaran /* perform atomic check on the first physical encoder (master) */
65325fdd593SJeykumar Sankaran for (i = 0; i < dpu_enc->num_phys_encs; i++) {
65425fdd593SJeykumar Sankaran struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
65525fdd593SJeykumar Sankaran
656b6fadcadSDrew Davenport if (phys->ops.atomic_check)
65725fdd593SJeykumar Sankaran ret = phys->ops.atomic_check(phys, crtc_state,
65825fdd593SJeykumar Sankaran conn_state);
65925fdd593SJeykumar Sankaran if (ret) {
66025fdd593SJeykumar Sankaran DPU_ERROR_ENC(dpu_enc,
66125fdd593SJeykumar Sankaran "mode unsupported, phys idx %d\n", i);
662f4eddf1dSKalyan Thota return ret;
66325fdd593SJeykumar Sankaran }
66425fdd593SJeykumar Sankaran }
66525fdd593SJeykumar Sankaran
6669ed6141bSKuogee Hsieh dsc = dpu_encoder_get_dsc_config(drm_enc);
6679ed6141bSKuogee Hsieh
6689ed6141bSKuogee Hsieh topology = dpu_encoder_get_topology(dpu_enc, dpu_kms, adj_mode, crtc_state, dsc);
66925fdd593SJeykumar Sankaran
67025fdd593SJeykumar Sankaran /*
671ccc862b9SKalyan Thota * Release and Allocate resources on every modeset
672ccc862b9SKalyan Thota * Dont allocate when active is false.
67325fdd593SJeykumar Sankaran */
674de3916c7SDrew Davenport if (drm_atomic_crtc_needs_modeset(crtc_state)) {
675ccc862b9SKalyan Thota dpu_rm_release(global_state, drm_enc);
676ccc862b9SKalyan Thota
677b6975693SVinod Polimera if (!crtc_state->active_changed || crtc_state->enable)
678de3916c7SDrew Davenport ret = dpu_rm_reserve(&dpu_kms->rm, global_state,
679de3916c7SDrew Davenport drm_enc, crtc_state, topology);
68025fdd593SJeykumar Sankaran }
68125fdd593SJeykumar Sankaran
6825dce0c0bSVille Syrjälä trace_dpu_enc_atomic_check_flags(DRMID(drm_enc), adj_mode->flags);
68325fdd593SJeykumar Sankaran
68425fdd593SJeykumar Sankaran return ret;
68525fdd593SJeykumar Sankaran }
68625fdd593SJeykumar Sankaran
_dpu_encoder_update_vsync_source(struct dpu_encoder_virt * dpu_enc,struct msm_display_info * disp_info)68725fdd593SJeykumar Sankaran static void _dpu_encoder_update_vsync_source(struct dpu_encoder_virt *dpu_enc,
68825fdd593SJeykumar Sankaran struct msm_display_info *disp_info)
68925fdd593SJeykumar Sankaran {
69025fdd593SJeykumar Sankaran struct dpu_vsync_source_cfg vsync_cfg = { 0 };
69125fdd593SJeykumar Sankaran struct msm_drm_private *priv;
69225fdd593SJeykumar Sankaran struct dpu_kms *dpu_kms;
69325fdd593SJeykumar Sankaran struct dpu_hw_mdp *hw_mdptop;
69425fdd593SJeykumar Sankaran struct drm_encoder *drm_enc;
695e955a3f0SMarijn Suijten struct dpu_encoder_phys *phys_enc;
69625fdd593SJeykumar Sankaran int i;
69725fdd593SJeykumar Sankaran
69825fdd593SJeykumar Sankaran if (!dpu_enc || !disp_info) {
69925fdd593SJeykumar Sankaran DPU_ERROR("invalid param dpu_enc:%d or disp_info:%d\n",
70025fdd593SJeykumar Sankaran dpu_enc != NULL, disp_info != NULL);
70125fdd593SJeykumar Sankaran return;
70225fdd593SJeykumar Sankaran } else if (dpu_enc->num_phys_encs > ARRAY_SIZE(dpu_enc->hw_pp)) {
70325fdd593SJeykumar Sankaran DPU_ERROR("invalid num phys enc %d/%d\n",
70425fdd593SJeykumar Sankaran dpu_enc->num_phys_encs,
70525fdd593SJeykumar Sankaran (int) ARRAY_SIZE(dpu_enc->hw_pp));
70625fdd593SJeykumar Sankaran return;
70725fdd593SJeykumar Sankaran }
70825fdd593SJeykumar Sankaran
70925fdd593SJeykumar Sankaran drm_enc = &dpu_enc->base;
71025fdd593SJeykumar Sankaran /* this pointers are checked in virt_enable_helper */
71125fdd593SJeykumar Sankaran priv = drm_enc->dev->dev_private;
71225fdd593SJeykumar Sankaran
71325fdd593SJeykumar Sankaran dpu_kms = to_dpu_kms(priv->kms);
71425fdd593SJeykumar Sankaran hw_mdptop = dpu_kms->hw_mdp;
71525fdd593SJeykumar Sankaran if (!hw_mdptop) {
71625fdd593SJeykumar Sankaran DPU_ERROR("invalid mdptop\n");
71725fdd593SJeykumar Sankaran return;
71825fdd593SJeykumar Sankaran }
71925fdd593SJeykumar Sankaran
72025fdd593SJeykumar Sankaran if (hw_mdptop->ops.setup_vsync_source &&
721b6529e33SDmitry Baryshkov disp_info->is_cmd_mode) {
72225fdd593SJeykumar Sankaran for (i = 0; i < dpu_enc->num_phys_encs; i++)
72325fdd593SJeykumar Sankaran vsync_cfg.ppnumber[i] = dpu_enc->hw_pp[i]->idx;
72425fdd593SJeykumar Sankaran
72525fdd593SJeykumar Sankaran vsync_cfg.pp_count = dpu_enc->num_phys_encs;
726e955a3f0SMarijn Suijten vsync_cfg.frame_rate = drm_mode_vrefresh(&dpu_enc->base.crtc->state->adjusted_mode);
727e955a3f0SMarijn Suijten
72825fdd593SJeykumar Sankaran if (disp_info->is_te_using_watchdog_timer)
72925fdd593SJeykumar Sankaran vsync_cfg.vsync_source = DPU_VSYNC_SOURCE_WD_TIMER_0;
73025fdd593SJeykumar Sankaran else
73125fdd593SJeykumar Sankaran vsync_cfg.vsync_source = DPU_VSYNC0_SOURCE_GPIO;
73225fdd593SJeykumar Sankaran
73325fdd593SJeykumar Sankaran hw_mdptop->ops.setup_vsync_source(hw_mdptop, &vsync_cfg);
734e955a3f0SMarijn Suijten
735e955a3f0SMarijn Suijten for (i = 0; i < dpu_enc->num_phys_encs; i++) {
736e955a3f0SMarijn Suijten phys_enc = dpu_enc->phys_encs[i];
737e955a3f0SMarijn Suijten
738e955a3f0SMarijn Suijten if (phys_enc->has_intf_te && phys_enc->hw_intf->ops.vsync_sel)
739e955a3f0SMarijn Suijten phys_enc->hw_intf->ops.vsync_sel(phys_enc->hw_intf,
740e955a3f0SMarijn Suijten vsync_cfg.vsync_source);
741e955a3f0SMarijn Suijten }
74225fdd593SJeykumar Sankaran }
74325fdd593SJeykumar Sankaran }
74425fdd593SJeykumar Sankaran
_dpu_encoder_irq_control(struct drm_encoder * drm_enc,bool enable)74525fdd593SJeykumar Sankaran static void _dpu_encoder_irq_control(struct drm_encoder *drm_enc, bool enable)
74625fdd593SJeykumar Sankaran {
74725fdd593SJeykumar Sankaran struct dpu_encoder_virt *dpu_enc;
74825fdd593SJeykumar Sankaran int i;
74925fdd593SJeykumar Sankaran
75025fdd593SJeykumar Sankaran if (!drm_enc) {
75125fdd593SJeykumar Sankaran DPU_ERROR("invalid encoder\n");
75225fdd593SJeykumar Sankaran return;
75325fdd593SJeykumar Sankaran }
75425fdd593SJeykumar Sankaran
75525fdd593SJeykumar Sankaran dpu_enc = to_dpu_encoder_virt(drm_enc);
75625fdd593SJeykumar Sankaran
75725fdd593SJeykumar Sankaran DPU_DEBUG_ENC(dpu_enc, "enable:%d\n", enable);
75825fdd593SJeykumar Sankaran for (i = 0; i < dpu_enc->num_phys_encs; i++) {
75925fdd593SJeykumar Sankaran struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
76025fdd593SJeykumar Sankaran
761b6fadcadSDrew Davenport if (phys->ops.irq_control)
76225fdd593SJeykumar Sankaran phys->ops.irq_control(phys, enable);
76325fdd593SJeykumar Sankaran }
76425fdd593SJeykumar Sankaran
76525fdd593SJeykumar Sankaran }
76625fdd593SJeykumar Sankaran
_dpu_encoder_resource_control_helper(struct drm_encoder * drm_enc,bool enable)76725fdd593SJeykumar Sankaran static void _dpu_encoder_resource_control_helper(struct drm_encoder *drm_enc,
76825fdd593SJeykumar Sankaran bool enable)
76925fdd593SJeykumar Sankaran {
77025fdd593SJeykumar Sankaran struct msm_drm_private *priv;
77125fdd593SJeykumar Sankaran struct dpu_kms *dpu_kms;
77225fdd593SJeykumar Sankaran struct dpu_encoder_virt *dpu_enc;
77325fdd593SJeykumar Sankaran
77425fdd593SJeykumar Sankaran dpu_enc = to_dpu_encoder_virt(drm_enc);
77525fdd593SJeykumar Sankaran priv = drm_enc->dev->dev_private;
77625fdd593SJeykumar Sankaran dpu_kms = to_dpu_kms(priv->kms);
77725fdd593SJeykumar Sankaran
77825fdd593SJeykumar Sankaran trace_dpu_enc_rc_helper(DRMID(drm_enc), enable);
77925fdd593SJeykumar Sankaran
78025fdd593SJeykumar Sankaran if (!dpu_enc->cur_master) {
78125fdd593SJeykumar Sankaran DPU_ERROR("encoder master not set\n");
78225fdd593SJeykumar Sankaran return;
78325fdd593SJeykumar Sankaran }
78425fdd593SJeykumar Sankaran
78525fdd593SJeykumar Sankaran if (enable) {
78625fdd593SJeykumar Sankaran /* enable DPU core clks */
78725fdd593SJeykumar Sankaran pm_runtime_get_sync(&dpu_kms->pdev->dev);
78825fdd593SJeykumar Sankaran
78925fdd593SJeykumar Sankaran /* enable all the irq */
79025fdd593SJeykumar Sankaran _dpu_encoder_irq_control(drm_enc, true);
79125fdd593SJeykumar Sankaran
79225fdd593SJeykumar Sankaran } else {
79325fdd593SJeykumar Sankaran /* disable all the irq */
79425fdd593SJeykumar Sankaran _dpu_encoder_irq_control(drm_enc, false);
79525fdd593SJeykumar Sankaran
79625fdd593SJeykumar Sankaran /* disable DPU core clks */
79725fdd593SJeykumar Sankaran pm_runtime_put_sync(&dpu_kms->pdev->dev);
79825fdd593SJeykumar Sankaran }
79925fdd593SJeykumar Sankaran
80025fdd593SJeykumar Sankaran }
80125fdd593SJeykumar Sankaran
dpu_encoder_resource_control(struct drm_encoder * drm_enc,u32 sw_event)80225fdd593SJeykumar Sankaran static int dpu_encoder_resource_control(struct drm_encoder *drm_enc,
80325fdd593SJeykumar Sankaran u32 sw_event)
80425fdd593SJeykumar Sankaran {
80525fdd593SJeykumar Sankaran struct dpu_encoder_virt *dpu_enc;
80625fdd593SJeykumar Sankaran struct msm_drm_private *priv;
80725fdd593SJeykumar Sankaran bool is_vid_mode = false;
80825fdd593SJeykumar Sankaran
809422ed755SDrew Davenport if (!drm_enc || !drm_enc->dev || !drm_enc->crtc) {
81025fdd593SJeykumar Sankaran DPU_ERROR("invalid parameters\n");
81125fdd593SJeykumar Sankaran return -EINVAL;
81225fdd593SJeykumar Sankaran }
81325fdd593SJeykumar Sankaran dpu_enc = to_dpu_encoder_virt(drm_enc);
81425fdd593SJeykumar Sankaran priv = drm_enc->dev->dev_private;
815b6529e33SDmitry Baryshkov is_vid_mode = !dpu_enc->disp_info.is_cmd_mode;
81625fdd593SJeykumar Sankaran
81725fdd593SJeykumar Sankaran /*
81825fdd593SJeykumar Sankaran * when idle_pc is not supported, process only KICKOFF, STOP and MODESET
81925fdd593SJeykumar Sankaran * events and return early for other events (ie wb display).
82025fdd593SJeykumar Sankaran */
82125fdd593SJeykumar Sankaran if (!dpu_enc->idle_pc_supported &&
82225fdd593SJeykumar Sankaran (sw_event != DPU_ENC_RC_EVENT_KICKOFF &&
82325fdd593SJeykumar Sankaran sw_event != DPU_ENC_RC_EVENT_STOP &&
82425fdd593SJeykumar Sankaran sw_event != DPU_ENC_RC_EVENT_PRE_STOP))
82525fdd593SJeykumar Sankaran return 0;
82625fdd593SJeykumar Sankaran
82725fdd593SJeykumar Sankaran trace_dpu_enc_rc(DRMID(drm_enc), sw_event, dpu_enc->idle_pc_supported,
82825fdd593SJeykumar Sankaran dpu_enc->rc_state, "begin");
82925fdd593SJeykumar Sankaran
83025fdd593SJeykumar Sankaran switch (sw_event) {
83125fdd593SJeykumar Sankaran case DPU_ENC_RC_EVENT_KICKOFF:
83225fdd593SJeykumar Sankaran /* cancel delayed off work, if any */
833e077fe75SJeykumar Sankaran if (cancel_delayed_work_sync(&dpu_enc->delayed_off_work))
83425fdd593SJeykumar Sankaran DPU_DEBUG_ENC(dpu_enc, "sw_event:%d, work cancelled\n",
83525fdd593SJeykumar Sankaran sw_event);
83625fdd593SJeykumar Sankaran
83725fdd593SJeykumar Sankaran mutex_lock(&dpu_enc->rc_lock);
83825fdd593SJeykumar Sankaran
83925fdd593SJeykumar Sankaran /* return if the resource control is already in ON state */
84025fdd593SJeykumar Sankaran if (dpu_enc->rc_state == DPU_ENC_RC_STATE_ON) {
8415b702d78SStephen Boyd DRM_DEBUG_ATOMIC("id;%u, sw_event:%d, rc in ON state\n",
84225fdd593SJeykumar Sankaran DRMID(drm_enc), sw_event);
84325fdd593SJeykumar Sankaran mutex_unlock(&dpu_enc->rc_lock);
84425fdd593SJeykumar Sankaran return 0;
84525fdd593SJeykumar Sankaran } else if (dpu_enc->rc_state != DPU_ENC_RC_STATE_OFF &&
84625fdd593SJeykumar Sankaran dpu_enc->rc_state != DPU_ENC_RC_STATE_IDLE) {
8475b702d78SStephen Boyd DRM_DEBUG_ATOMIC("id;%u, sw_event:%d, rc in state %d\n",
84825fdd593SJeykumar Sankaran DRMID(drm_enc), sw_event,
84925fdd593SJeykumar Sankaran dpu_enc->rc_state);
85025fdd593SJeykumar Sankaran mutex_unlock(&dpu_enc->rc_lock);
85125fdd593SJeykumar Sankaran return -EINVAL;
85225fdd593SJeykumar Sankaran }
85325fdd593SJeykumar Sankaran
85425fdd593SJeykumar Sankaran if (is_vid_mode && dpu_enc->rc_state == DPU_ENC_RC_STATE_IDLE)
85525fdd593SJeykumar Sankaran _dpu_encoder_irq_control(drm_enc, true);
85625fdd593SJeykumar Sankaran else
85725fdd593SJeykumar Sankaran _dpu_encoder_resource_control_helper(drm_enc, true);
85825fdd593SJeykumar Sankaran
85925fdd593SJeykumar Sankaran dpu_enc->rc_state = DPU_ENC_RC_STATE_ON;
86025fdd593SJeykumar Sankaran
86125fdd593SJeykumar Sankaran trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
86225fdd593SJeykumar Sankaran dpu_enc->idle_pc_supported, dpu_enc->rc_state,
86325fdd593SJeykumar Sankaran "kickoff");
86425fdd593SJeykumar Sankaran
86525fdd593SJeykumar Sankaran mutex_unlock(&dpu_enc->rc_lock);
86625fdd593SJeykumar Sankaran break;
86725fdd593SJeykumar Sankaran
86825fdd593SJeykumar Sankaran case DPU_ENC_RC_EVENT_FRAME_DONE:
86925fdd593SJeykumar Sankaran /*
87025fdd593SJeykumar Sankaran * mutex lock is not used as this event happens at interrupt
87125fdd593SJeykumar Sankaran * context. And locking is not required as, the other events
87225fdd593SJeykumar Sankaran * like KICKOFF and STOP does a wait-for-idle before executing
87325fdd593SJeykumar Sankaran * the resource_control
87425fdd593SJeykumar Sankaran */
87525fdd593SJeykumar Sankaran if (dpu_enc->rc_state != DPU_ENC_RC_STATE_ON) {
87625fdd593SJeykumar Sankaran DRM_DEBUG_KMS("id:%d, sw_event:%d,rc:%d-unexpected\n",
87725fdd593SJeykumar Sankaran DRMID(drm_enc), sw_event,
87825fdd593SJeykumar Sankaran dpu_enc->rc_state);
87925fdd593SJeykumar Sankaran return -EINVAL;
88025fdd593SJeykumar Sankaran }
88125fdd593SJeykumar Sankaran
88225fdd593SJeykumar Sankaran /*
88325fdd593SJeykumar Sankaran * schedule off work item only when there are no
88425fdd593SJeykumar Sankaran * frames pending
88525fdd593SJeykumar Sankaran */
88625fdd593SJeykumar Sankaran if (dpu_crtc_frame_pending(drm_enc->crtc) > 1) {
88725fdd593SJeykumar Sankaran DRM_DEBUG_KMS("id:%d skip schedule work\n",
88825fdd593SJeykumar Sankaran DRMID(drm_enc));
88925fdd593SJeykumar Sankaran return 0;
89025fdd593SJeykumar Sankaran }
89125fdd593SJeykumar Sankaran
892e077fe75SJeykumar Sankaran queue_delayed_work(priv->wq, &dpu_enc->delayed_off_work,
89325fdd593SJeykumar Sankaran msecs_to_jiffies(dpu_enc->idle_timeout));
89425fdd593SJeykumar Sankaran
89525fdd593SJeykumar Sankaran trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
89625fdd593SJeykumar Sankaran dpu_enc->idle_pc_supported, dpu_enc->rc_state,
89725fdd593SJeykumar Sankaran "frame done");
89825fdd593SJeykumar Sankaran break;
89925fdd593SJeykumar Sankaran
90025fdd593SJeykumar Sankaran case DPU_ENC_RC_EVENT_PRE_STOP:
90125fdd593SJeykumar Sankaran /* cancel delayed off work, if any */
902e077fe75SJeykumar Sankaran if (cancel_delayed_work_sync(&dpu_enc->delayed_off_work))
90325fdd593SJeykumar Sankaran DPU_DEBUG_ENC(dpu_enc, "sw_event:%d, work cancelled\n",
90425fdd593SJeykumar Sankaran sw_event);
90525fdd593SJeykumar Sankaran
90625fdd593SJeykumar Sankaran mutex_lock(&dpu_enc->rc_lock);
90725fdd593SJeykumar Sankaran
90825fdd593SJeykumar Sankaran if (is_vid_mode &&
90925fdd593SJeykumar Sankaran dpu_enc->rc_state == DPU_ENC_RC_STATE_IDLE) {
91025fdd593SJeykumar Sankaran _dpu_encoder_irq_control(drm_enc, true);
91125fdd593SJeykumar Sankaran }
91225fdd593SJeykumar Sankaran /* skip if is already OFF or IDLE, resources are off already */
91325fdd593SJeykumar Sankaran else if (dpu_enc->rc_state == DPU_ENC_RC_STATE_OFF ||
91425fdd593SJeykumar Sankaran dpu_enc->rc_state == DPU_ENC_RC_STATE_IDLE) {
91525fdd593SJeykumar Sankaran DRM_DEBUG_KMS("id:%u, sw_event:%d, rc in %d state\n",
91625fdd593SJeykumar Sankaran DRMID(drm_enc), sw_event,
91725fdd593SJeykumar Sankaran dpu_enc->rc_state);
91825fdd593SJeykumar Sankaran mutex_unlock(&dpu_enc->rc_lock);
91925fdd593SJeykumar Sankaran return 0;
92025fdd593SJeykumar Sankaran }
92125fdd593SJeykumar Sankaran
92225fdd593SJeykumar Sankaran dpu_enc->rc_state = DPU_ENC_RC_STATE_PRE_OFF;
92325fdd593SJeykumar Sankaran
92425fdd593SJeykumar Sankaran trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
92525fdd593SJeykumar Sankaran dpu_enc->idle_pc_supported, dpu_enc->rc_state,
92625fdd593SJeykumar Sankaran "pre stop");
92725fdd593SJeykumar Sankaran
92825fdd593SJeykumar Sankaran mutex_unlock(&dpu_enc->rc_lock);
92925fdd593SJeykumar Sankaran break;
93025fdd593SJeykumar Sankaran
93125fdd593SJeykumar Sankaran case DPU_ENC_RC_EVENT_STOP:
93225fdd593SJeykumar Sankaran mutex_lock(&dpu_enc->rc_lock);
93325fdd593SJeykumar Sankaran
93425fdd593SJeykumar Sankaran /* return if the resource control is already in OFF state */
93525fdd593SJeykumar Sankaran if (dpu_enc->rc_state == DPU_ENC_RC_STATE_OFF) {
93625fdd593SJeykumar Sankaran DRM_DEBUG_KMS("id: %u, sw_event:%d, rc in OFF state\n",
93725fdd593SJeykumar Sankaran DRMID(drm_enc), sw_event);
93825fdd593SJeykumar Sankaran mutex_unlock(&dpu_enc->rc_lock);
93925fdd593SJeykumar Sankaran return 0;
94025fdd593SJeykumar Sankaran } else if (dpu_enc->rc_state == DPU_ENC_RC_STATE_ON) {
94125fdd593SJeykumar Sankaran DRM_ERROR("id: %u, sw_event:%d, rc in state %d\n",
94225fdd593SJeykumar Sankaran DRMID(drm_enc), sw_event, dpu_enc->rc_state);
94325fdd593SJeykumar Sankaran mutex_unlock(&dpu_enc->rc_lock);
94425fdd593SJeykumar Sankaran return -EINVAL;
94525fdd593SJeykumar Sankaran }
94625fdd593SJeykumar Sankaran
94725fdd593SJeykumar Sankaran /**
94825fdd593SJeykumar Sankaran * expect to arrive here only if in either idle state or pre-off
94925fdd593SJeykumar Sankaran * and in IDLE state the resources are already disabled
95025fdd593SJeykumar Sankaran */
95125fdd593SJeykumar Sankaran if (dpu_enc->rc_state == DPU_ENC_RC_STATE_PRE_OFF)
95225fdd593SJeykumar Sankaran _dpu_encoder_resource_control_helper(drm_enc, false);
95325fdd593SJeykumar Sankaran
95425fdd593SJeykumar Sankaran dpu_enc->rc_state = DPU_ENC_RC_STATE_OFF;
95525fdd593SJeykumar Sankaran
95625fdd593SJeykumar Sankaran trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
95725fdd593SJeykumar Sankaran dpu_enc->idle_pc_supported, dpu_enc->rc_state,
95825fdd593SJeykumar Sankaran "stop");
95925fdd593SJeykumar Sankaran
96025fdd593SJeykumar Sankaran mutex_unlock(&dpu_enc->rc_lock);
96125fdd593SJeykumar Sankaran break;
96225fdd593SJeykumar Sankaran
96325fdd593SJeykumar Sankaran case DPU_ENC_RC_EVENT_ENTER_IDLE:
96425fdd593SJeykumar Sankaran mutex_lock(&dpu_enc->rc_lock);
96525fdd593SJeykumar Sankaran
96625fdd593SJeykumar Sankaran if (dpu_enc->rc_state != DPU_ENC_RC_STATE_ON) {
96725fdd593SJeykumar Sankaran DRM_ERROR("id: %u, sw_event:%d, rc:%d !ON state\n",
96825fdd593SJeykumar Sankaran DRMID(drm_enc), sw_event, dpu_enc->rc_state);
96925fdd593SJeykumar Sankaran mutex_unlock(&dpu_enc->rc_lock);
97025fdd593SJeykumar Sankaran return 0;
97125fdd593SJeykumar Sankaran }
97225fdd593SJeykumar Sankaran
97325fdd593SJeykumar Sankaran /*
97425fdd593SJeykumar Sankaran * if we are in ON but a frame was just kicked off,
97525fdd593SJeykumar Sankaran * ignore the IDLE event, it's probably a stale timer event
97625fdd593SJeykumar Sankaran */
97725fdd593SJeykumar Sankaran if (dpu_enc->frame_busy_mask[0]) {
97825fdd593SJeykumar Sankaran DRM_ERROR("id:%u, sw_event:%d, rc:%d frame pending\n",
97925fdd593SJeykumar Sankaran DRMID(drm_enc), sw_event, dpu_enc->rc_state);
98025fdd593SJeykumar Sankaran mutex_unlock(&dpu_enc->rc_lock);
98125fdd593SJeykumar Sankaran return 0;
98225fdd593SJeykumar Sankaran }
98325fdd593SJeykumar Sankaran
98425fdd593SJeykumar Sankaran if (is_vid_mode)
98525fdd593SJeykumar Sankaran _dpu_encoder_irq_control(drm_enc, false);
98625fdd593SJeykumar Sankaran else
98725fdd593SJeykumar Sankaran _dpu_encoder_resource_control_helper(drm_enc, false);
98825fdd593SJeykumar Sankaran
98925fdd593SJeykumar Sankaran dpu_enc->rc_state = DPU_ENC_RC_STATE_IDLE;
99025fdd593SJeykumar Sankaran
99125fdd593SJeykumar Sankaran trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
99225fdd593SJeykumar Sankaran dpu_enc->idle_pc_supported, dpu_enc->rc_state,
99325fdd593SJeykumar Sankaran "idle");
99425fdd593SJeykumar Sankaran
99525fdd593SJeykumar Sankaran mutex_unlock(&dpu_enc->rc_lock);
99625fdd593SJeykumar Sankaran break;
99725fdd593SJeykumar Sankaran
99825fdd593SJeykumar Sankaran default:
99925fdd593SJeykumar Sankaran DRM_ERROR("id:%u, unexpected sw_event: %d\n", DRMID(drm_enc),
100025fdd593SJeykumar Sankaran sw_event);
100125fdd593SJeykumar Sankaran trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
100225fdd593SJeykumar Sankaran dpu_enc->idle_pc_supported, dpu_enc->rc_state,
100325fdd593SJeykumar Sankaran "error");
100425fdd593SJeykumar Sankaran break;
100525fdd593SJeykumar Sankaran }
100625fdd593SJeykumar Sankaran
100725fdd593SJeykumar Sankaran trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
100825fdd593SJeykumar Sankaran dpu_enc->idle_pc_supported, dpu_enc->rc_state,
100925fdd593SJeykumar Sankaran "end");
101025fdd593SJeykumar Sankaran return 0;
101125fdd593SJeykumar Sankaran }
101225fdd593SJeykumar Sankaran
dpu_encoder_prepare_wb_job(struct drm_encoder * drm_enc,struct drm_writeback_job * job)1013d4e5f450SAbhinav Kumar void dpu_encoder_prepare_wb_job(struct drm_encoder *drm_enc,
1014d4e5f450SAbhinav Kumar struct drm_writeback_job *job)
1015d4e5f450SAbhinav Kumar {
1016d4e5f450SAbhinav Kumar struct dpu_encoder_virt *dpu_enc;
1017d4e5f450SAbhinav Kumar int i;
1018d4e5f450SAbhinav Kumar
1019d4e5f450SAbhinav Kumar dpu_enc = to_dpu_encoder_virt(drm_enc);
1020d4e5f450SAbhinav Kumar
1021d4e5f450SAbhinav Kumar for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1022d4e5f450SAbhinav Kumar struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
1023d4e5f450SAbhinav Kumar
1024d4e5f450SAbhinav Kumar if (phys->ops.prepare_wb_job)
1025d4e5f450SAbhinav Kumar phys->ops.prepare_wb_job(phys, job);
1026d4e5f450SAbhinav Kumar
1027d4e5f450SAbhinav Kumar }
1028d4e5f450SAbhinav Kumar }
1029d4e5f450SAbhinav Kumar
dpu_encoder_cleanup_wb_job(struct drm_encoder * drm_enc,struct drm_writeback_job * job)1030d4e5f450SAbhinav Kumar void dpu_encoder_cleanup_wb_job(struct drm_encoder *drm_enc,
1031d4e5f450SAbhinav Kumar struct drm_writeback_job *job)
1032d4e5f450SAbhinav Kumar {
1033d4e5f450SAbhinav Kumar struct dpu_encoder_virt *dpu_enc;
1034d4e5f450SAbhinav Kumar int i;
1035d4e5f450SAbhinav Kumar
1036d4e5f450SAbhinav Kumar dpu_enc = to_dpu_encoder_virt(drm_enc);
1037d4e5f450SAbhinav Kumar
1038d4e5f450SAbhinav Kumar for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1039d4e5f450SAbhinav Kumar struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
1040d4e5f450SAbhinav Kumar
1041d4e5f450SAbhinav Kumar if (phys->ops.cleanup_wb_job)
1042d4e5f450SAbhinav Kumar phys->ops.cleanup_wb_job(phys, job);
1043d4e5f450SAbhinav Kumar
1044d4e5f450SAbhinav Kumar }
1045d4e5f450SAbhinav Kumar }
1046d4e5f450SAbhinav Kumar
dpu_encoder_virt_atomic_mode_set(struct drm_encoder * drm_enc,struct drm_crtc_state * crtc_state,struct drm_connector_state * conn_state)1047764332bfSDmitry Baryshkov static void dpu_encoder_virt_atomic_mode_set(struct drm_encoder *drm_enc,
1048764332bfSDmitry Baryshkov struct drm_crtc_state *crtc_state,
1049764332bfSDmitry Baryshkov struct drm_connector_state *conn_state)
105025fdd593SJeykumar Sankaran {
105125fdd593SJeykumar Sankaran struct dpu_encoder_virt *dpu_enc;
105225fdd593SJeykumar Sankaran struct msm_drm_private *priv;
105325fdd593SJeykumar Sankaran struct dpu_kms *dpu_kms;
1054b107603bSJeykumar Sankaran struct dpu_crtc_state *cstate;
1055de3916c7SDrew Davenport struct dpu_global_state *global_state;
1056b954fa6bSDrew Davenport struct dpu_hw_blk *hw_pp[MAX_CHANNELS_PER_ENC];
1057b954fa6bSDrew Davenport struct dpu_hw_blk *hw_ctl[MAX_CHANNELS_PER_ENC];
1058b954fa6bSDrew Davenport struct dpu_hw_blk *hw_lm[MAX_CHANNELS_PER_ENC];
1059e47616dfSKalyan Thota struct dpu_hw_blk *hw_dspp[MAX_CHANNELS_PER_ENC] = { NULL };
106058dca981SVinod Koul struct dpu_hw_blk *hw_dsc[MAX_CHANNELS_PER_ENC];
106158dca981SVinod Koul int num_lm, num_ctl, num_pp, num_dsc;
106258dca981SVinod Koul unsigned int dsc_mask = 0;
1063ef58e0adSDmitry Baryshkov int i;
106425fdd593SJeykumar Sankaran
106525fdd593SJeykumar Sankaran if (!drm_enc) {
106625fdd593SJeykumar Sankaran DPU_ERROR("invalid encoder\n");
106725fdd593SJeykumar Sankaran return;
106825fdd593SJeykumar Sankaran }
106925fdd593SJeykumar Sankaran
107025fdd593SJeykumar Sankaran dpu_enc = to_dpu_encoder_virt(drm_enc);
107125fdd593SJeykumar Sankaran DPU_DEBUG_ENC(dpu_enc, "\n");
107225fdd593SJeykumar Sankaran
107325fdd593SJeykumar Sankaran priv = drm_enc->dev->dev_private;
107425fdd593SJeykumar Sankaran dpu_kms = to_dpu_kms(priv->kms);
107525fdd593SJeykumar Sankaran
1076de3916c7SDrew Davenport global_state = dpu_kms_get_existing_global_state(dpu_kms);
1077de3916c7SDrew Davenport if (IS_ERR_OR_NULL(global_state)) {
1078de3916c7SDrew Davenport DPU_ERROR("Failed to get global state");
1079de3916c7SDrew Davenport return;
1080de3916c7SDrew Davenport }
1081de3916c7SDrew Davenport
108225fdd593SJeykumar Sankaran trace_dpu_enc_mode_set(DRMID(drm_enc));
108325fdd593SJeykumar Sankaran
1084de3916c7SDrew Davenport /* Query resource that have been reserved in atomic check step. */
1085de3916c7SDrew Davenport num_pp = dpu_rm_get_assigned_resources(&dpu_kms->rm, global_state,
1086de3916c7SDrew Davenport drm_enc->base.id, DPU_HW_BLK_PINGPONG, hw_pp,
1087de3916c7SDrew Davenport ARRAY_SIZE(hw_pp));
1088de3916c7SDrew Davenport num_ctl = dpu_rm_get_assigned_resources(&dpu_kms->rm, global_state,
1089de3916c7SDrew Davenport drm_enc->base.id, DPU_HW_BLK_CTL, hw_ctl, ARRAY_SIZE(hw_ctl));
1090de3916c7SDrew Davenport num_lm = dpu_rm_get_assigned_resources(&dpu_kms->rm, global_state,
1091de3916c7SDrew Davenport drm_enc->base.id, DPU_HW_BLK_LM, hw_lm, ARRAY_SIZE(hw_lm));
1092ff8b941aSLee Jones dpu_rm_get_assigned_resources(&dpu_kms->rm, global_state,
1093e47616dfSKalyan Thota drm_enc->base.id, DPU_HW_BLK_DSPP, hw_dspp,
1094e47616dfSKalyan Thota ARRAY_SIZE(hw_dspp));
109525fdd593SJeykumar Sankaran
1096b954fa6bSDrew Davenport for (i = 0; i < MAX_CHANNELS_PER_ENC; i++)
1097b954fa6bSDrew Davenport dpu_enc->hw_pp[i] = i < num_pp ? to_dpu_hw_pingpong(hw_pp[i])
1098b954fa6bSDrew Davenport : NULL;
1099b107603bSJeykumar Sankaran
110058dca981SVinod Koul num_dsc = dpu_rm_get_assigned_resources(&dpu_kms->rm, global_state,
110158dca981SVinod Koul drm_enc->base.id, DPU_HW_BLK_DSC,
110258dca981SVinod Koul hw_dsc, ARRAY_SIZE(hw_dsc));
110358dca981SVinod Koul for (i = 0; i < num_dsc; i++) {
110458dca981SVinod Koul dpu_enc->hw_dsc[i] = to_dpu_hw_dsc(hw_dsc[i]);
110558dca981SVinod Koul dsc_mask |= BIT(dpu_enc->hw_dsc[i]->idx - DSC_0);
110658dca981SVinod Koul }
110758dca981SVinod Koul
110858dca981SVinod Koul dpu_enc->dsc_mask = dsc_mask;
110958dca981SVinod Koul
1110764332bfSDmitry Baryshkov cstate = to_dpu_crtc_state(crtc_state);
1111b107603bSJeykumar Sankaran
1112b107603bSJeykumar Sankaran for (i = 0; i < num_lm; i++) {
1113b107603bSJeykumar Sankaran int ctl_idx = (i < num_ctl) ? i : (num_ctl-1);
1114b107603bSJeykumar Sankaran
1115b954fa6bSDrew Davenport cstate->mixers[i].hw_lm = to_dpu_hw_mixer(hw_lm[i]);
1116b954fa6bSDrew Davenport cstate->mixers[i].lm_ctl = to_dpu_hw_ctl(hw_ctl[ctl_idx]);
1117e47616dfSKalyan Thota cstate->mixers[i].hw_dspp = to_dpu_hw_dspp(hw_dspp[i]);
1118b107603bSJeykumar Sankaran }
1119b107603bSJeykumar Sankaran
1120b107603bSJeykumar Sankaran cstate->num_mixers = num_lm;
1121b107603bSJeykumar Sankaran
112225fdd593SJeykumar Sankaran for (i = 0; i < dpu_enc->num_phys_encs; i++) {
112325fdd593SJeykumar Sankaran struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
112425fdd593SJeykumar Sankaran
1125adde6c68SDmitry Baryshkov phys->hw_pp = dpu_enc->hw_pp[i];
1126adde6c68SDmitry Baryshkov if (!phys->hw_pp) {
1127b6fadcadSDrew Davenport DPU_ERROR_ENC(dpu_enc,
1128b6fadcadSDrew Davenport "no pp block assigned at idx: %d\n", i);
1129de3916c7SDrew Davenport return;
113025fdd593SJeykumar Sankaran }
11313f4db2e2SJeykumar Sankaran
1132adde6c68SDmitry Baryshkov phys->hw_ctl = i < num_ctl ? to_dpu_hw_ctl(hw_ctl[i]) : NULL;
1133adde6c68SDmitry Baryshkov if (!phys->hw_ctl) {
1134b6fadcadSDrew Davenport DPU_ERROR_ENC(dpu_enc,
1135b6fadcadSDrew Davenport "no ctl block assigned at idx: %d\n", i);
1136de3916c7SDrew Davenport return;
11373f4db2e2SJeykumar Sankaran }
11383f4db2e2SJeykumar Sankaran
1139764332bfSDmitry Baryshkov phys->cached_mode = crtc_state->adjusted_mode;
1140764332bfSDmitry Baryshkov if (phys->ops.atomic_mode_set)
1141764332bfSDmitry Baryshkov phys->ops.atomic_mode_set(phys, crtc_state, conn_state);
114225fdd593SJeykumar Sankaran }
114325fdd593SJeykumar Sankaran }
114425fdd593SJeykumar Sankaran
_dpu_encoder_virt_enable_helper(struct drm_encoder * drm_enc)114525fdd593SJeykumar Sankaran static void _dpu_encoder_virt_enable_helper(struct drm_encoder *drm_enc)
114625fdd593SJeykumar Sankaran {
114725fdd593SJeykumar Sankaran struct dpu_encoder_virt *dpu_enc = NULL;
11483c128638SKalyan Thota int i;
114925fdd593SJeykumar Sankaran
1150422ed755SDrew Davenport if (!drm_enc || !drm_enc->dev) {
115125fdd593SJeykumar Sankaran DPU_ERROR("invalid parameters\n");
115225fdd593SJeykumar Sankaran return;
115325fdd593SJeykumar Sankaran }
115425fdd593SJeykumar Sankaran
115525fdd593SJeykumar Sankaran dpu_enc = to_dpu_encoder_virt(drm_enc);
115625fdd593SJeykumar Sankaran if (!dpu_enc || !dpu_enc->cur_master) {
115725fdd593SJeykumar Sankaran DPU_ERROR("invalid dpu encoder/master\n");
115825fdd593SJeykumar Sankaran return;
115925fdd593SJeykumar Sankaran }
116025fdd593SJeykumar Sankaran
1161d13e36d7SAbhinav Kumar
11625a7a86bfSDmitry Baryshkov if (dpu_enc->disp_info.intf_type == INTF_DP &&
1163d13e36d7SAbhinav Kumar dpu_enc->cur_master->hw_mdptop &&
1164d13e36d7SAbhinav Kumar dpu_enc->cur_master->hw_mdptop->ops.intf_audio_select)
1165d13e36d7SAbhinav Kumar dpu_enc->cur_master->hw_mdptop->ops.intf_audio_select(
1166d13e36d7SAbhinav Kumar dpu_enc->cur_master->hw_mdptop);
1167d13e36d7SAbhinav Kumar
116825fdd593SJeykumar Sankaran _dpu_encoder_update_vsync_source(dpu_enc, &dpu_enc->disp_info);
11693c128638SKalyan Thota
11705a7a86bfSDmitry Baryshkov if (dpu_enc->disp_info.intf_type == INTF_DSI &&
1171b8afe9f8SRob Clark !WARN_ON(dpu_enc->num_phys_encs == 0)) {
11726b6921e5SDmitry Baryshkov unsigned bpc = dpu_enc->connector->display_info.bpc;
1173b8afe9f8SRob Clark for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
1174b8afe9f8SRob Clark if (!dpu_enc->hw_pp[i])
1175b8afe9f8SRob Clark continue;
1176b8afe9f8SRob Clark _dpu_encoder_setup_dither(dpu_enc->hw_pp[i], bpc);
11773c128638SKalyan Thota }
11783c128638SKalyan Thota }
117925fdd593SJeykumar Sankaran }
118025fdd593SJeykumar Sankaran
dpu_encoder_virt_runtime_resume(struct drm_encoder * drm_enc)118118a63b3cSSean Paul void dpu_encoder_virt_runtime_resume(struct drm_encoder *drm_enc)
118225fdd593SJeykumar Sankaran {
118318a63b3cSSean Paul struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
118425fdd593SJeykumar Sankaran
118518a63b3cSSean Paul mutex_lock(&dpu_enc->enc_lock);
118625fdd593SJeykumar Sankaran
118718a63b3cSSean Paul if (!dpu_enc->enabled)
118818a63b3cSSean Paul goto out;
118925fdd593SJeykumar Sankaran
119018a63b3cSSean Paul if (dpu_enc->cur_slave && dpu_enc->cur_slave->ops.restore)
119118a63b3cSSean Paul dpu_enc->cur_slave->ops.restore(dpu_enc->cur_slave);
119225fdd593SJeykumar Sankaran if (dpu_enc->cur_master && dpu_enc->cur_master->ops.restore)
119325fdd593SJeykumar Sankaran dpu_enc->cur_master->ops.restore(dpu_enc->cur_master);
119425fdd593SJeykumar Sankaran
119525fdd593SJeykumar Sankaran _dpu_encoder_virt_enable_helper(drm_enc);
119618a63b3cSSean Paul
119718a63b3cSSean Paul out:
119818a63b3cSSean Paul mutex_unlock(&dpu_enc->enc_lock);
119925fdd593SJeykumar Sankaran }
120025fdd593SJeykumar Sankaran
dpu_encoder_virt_atomic_enable(struct drm_encoder * drm_enc,struct drm_atomic_state * state)1201c0cd12a5SVinod Polimera static void dpu_encoder_virt_atomic_enable(struct drm_encoder *drm_enc,
1202c0cd12a5SVinod Polimera struct drm_atomic_state *state)
120325fdd593SJeykumar Sankaran {
120425fdd593SJeykumar Sankaran struct dpu_encoder_virt *dpu_enc = NULL;
120586b89080SJeykumar Sankaran int ret = 0;
120625fdd593SJeykumar Sankaran struct drm_display_mode *cur_mode = NULL;
120725fdd593SJeykumar Sankaran
120825fdd593SJeykumar Sankaran dpu_enc = to_dpu_encoder_virt(drm_enc);
1209fba7427eSSean Paul
12109ed6141bSKuogee Hsieh dpu_enc->dsc = dpu_encoder_get_dsc_config(drm_enc);
12119ed6141bSKuogee Hsieh
1212fba7427eSSean Paul mutex_lock(&dpu_enc->enc_lock);
12138e7ef27eSDmitry Baryshkov
12148e7ef27eSDmitry Baryshkov dpu_enc->commit_done_timedout = false;
12158e7ef27eSDmitry Baryshkov
12163fb61718SAbhinav Kumar dpu_enc->connector = drm_atomic_get_new_connector_for_encoder(state, drm_enc);
12173fb61718SAbhinav Kumar
121825fdd593SJeykumar Sankaran cur_mode = &dpu_enc->base.crtc->state->adjusted_mode;
121925fdd593SJeykumar Sankaran
122025fdd593SJeykumar Sankaran trace_dpu_enc_enable(DRMID(drm_enc), cur_mode->hdisplay,
122125fdd593SJeykumar Sankaran cur_mode->vdisplay);
122225fdd593SJeykumar Sankaran
122386b89080SJeykumar Sankaran /* always enable slave encoder before master */
122486b89080SJeykumar Sankaran if (dpu_enc->cur_slave && dpu_enc->cur_slave->ops.enable)
122586b89080SJeykumar Sankaran dpu_enc->cur_slave->ops.enable(dpu_enc->cur_slave);
122625fdd593SJeykumar Sankaran
122786b89080SJeykumar Sankaran if (dpu_enc->cur_master && dpu_enc->cur_master->ops.enable)
122886b89080SJeykumar Sankaran dpu_enc->cur_master->ops.enable(dpu_enc->cur_master);
122925fdd593SJeykumar Sankaran
123025fdd593SJeykumar Sankaran ret = dpu_encoder_resource_control(drm_enc, DPU_ENC_RC_EVENT_KICKOFF);
123125fdd593SJeykumar Sankaran if (ret) {
123225fdd593SJeykumar Sankaran DPU_ERROR_ENC(dpu_enc, "dpu resource control failed: %d\n",
123325fdd593SJeykumar Sankaran ret);
1234fba7427eSSean Paul goto out;
123525fdd593SJeykumar Sankaran }
123625fdd593SJeykumar Sankaran
123725fdd593SJeykumar Sankaran _dpu_encoder_virt_enable_helper(drm_enc);
1238fba7427eSSean Paul
1239fba7427eSSean Paul dpu_enc->enabled = true;
1240fba7427eSSean Paul
1241fba7427eSSean Paul out:
1242fba7427eSSean Paul mutex_unlock(&dpu_enc->enc_lock);
124325fdd593SJeykumar Sankaran }
124425fdd593SJeykumar Sankaran
dpu_encoder_virt_atomic_disable(struct drm_encoder * drm_enc,struct drm_atomic_state * state)1245c0cd12a5SVinod Polimera static void dpu_encoder_virt_atomic_disable(struct drm_encoder *drm_enc,
1246c0cd12a5SVinod Polimera struct drm_atomic_state *state)
124725fdd593SJeykumar Sankaran {
124825fdd593SJeykumar Sankaran struct dpu_encoder_virt *dpu_enc = NULL;
124911226978SVinod Polimera struct drm_crtc *crtc;
125011226978SVinod Polimera struct drm_crtc_state *old_state = NULL;
125125fdd593SJeykumar Sankaran int i = 0;
125225fdd593SJeykumar Sankaran
125325fdd593SJeykumar Sankaran dpu_enc = to_dpu_encoder_virt(drm_enc);
125425fdd593SJeykumar Sankaran DPU_DEBUG_ENC(dpu_enc, "\n");
125525fdd593SJeykumar Sankaran
125611226978SVinod Polimera crtc = drm_atomic_get_old_crtc_for_encoder(state, drm_enc);
125711226978SVinod Polimera if (crtc)
125811226978SVinod Polimera old_state = drm_atomic_get_old_crtc_state(state, crtc);
125911226978SVinod Polimera
126011226978SVinod Polimera /*
126111226978SVinod Polimera * The encoder is already disabled if self refresh mode was set earlier,
126211226978SVinod Polimera * in the old_state for the corresponding crtc.
126311226978SVinod Polimera */
126411226978SVinod Polimera if (old_state && old_state->self_refresh_active)
126511226978SVinod Polimera return;
126611226978SVinod Polimera
1267fba7427eSSean Paul mutex_lock(&dpu_enc->enc_lock);
1268fba7427eSSean Paul dpu_enc->enabled = false;
1269fba7427eSSean Paul
127025fdd593SJeykumar Sankaran trace_dpu_enc_disable(DRMID(drm_enc));
127125fdd593SJeykumar Sankaran
127225fdd593SJeykumar Sankaran /* wait for idle */
1273801f49c8SDmitry Baryshkov dpu_encoder_wait_for_tx_complete(drm_enc);
127425fdd593SJeykumar Sankaran
127525fdd593SJeykumar Sankaran dpu_encoder_resource_control(drm_enc, DPU_ENC_RC_EVENT_PRE_STOP);
127625fdd593SJeykumar Sankaran
127725fdd593SJeykumar Sankaran for (i = 0; i < dpu_enc->num_phys_encs; i++) {
127825fdd593SJeykumar Sankaran struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
127925fdd593SJeykumar Sankaran
1280b6fadcadSDrew Davenport if (phys->ops.disable)
128125fdd593SJeykumar Sankaran phys->ops.disable(phys);
128225fdd593SJeykumar Sankaran }
128325fdd593SJeykumar Sankaran
12848ede2eccSKuogee Hsieh
128525fdd593SJeykumar Sankaran /* after phys waits for frame-done, should be no more frames pending */
128670df9610SSean Paul if (atomic_xchg(&dpu_enc->frame_done_timeout_ms, 0)) {
128725fdd593SJeykumar Sankaran DPU_ERROR("enc%d timeout pending\n", drm_enc->base.id);
128825fdd593SJeykumar Sankaran del_timer_sync(&dpu_enc->frame_done_timer);
128925fdd593SJeykumar Sankaran }
129025fdd593SJeykumar Sankaran
129125fdd593SJeykumar Sankaran dpu_encoder_resource_control(drm_enc, DPU_ENC_RC_EVENT_STOP);
129225fdd593SJeykumar Sankaran
12936b6921e5SDmitry Baryshkov dpu_enc->connector = NULL;
129425fdd593SJeykumar Sankaran
129525fdd593SJeykumar Sankaran DPU_DEBUG_ENC(dpu_enc, "encoder disabled\n");
129625fdd593SJeykumar Sankaran
1297fba7427eSSean Paul mutex_unlock(&dpu_enc->enc_lock);
129825fdd593SJeykumar Sankaran }
129925fdd593SJeykumar Sankaran
dpu_encoder_get_intf(const struct dpu_mdss_cfg * catalog,struct dpu_rm * dpu_rm,enum dpu_intf_type type,u32 controller_id)1300004be386SDmitry Baryshkov static struct dpu_hw_intf *dpu_encoder_get_intf(const struct dpu_mdss_cfg *catalog,
1301004be386SDmitry Baryshkov struct dpu_rm *dpu_rm,
130225fdd593SJeykumar Sankaran enum dpu_intf_type type, u32 controller_id)
130325fdd593SJeykumar Sankaran {
130425fdd593SJeykumar Sankaran int i = 0;
130525fdd593SJeykumar Sankaran
13062709935bSDmitry Baryshkov if (type == INTF_WB)
1307004be386SDmitry Baryshkov return NULL;
13082709935bSDmitry Baryshkov
130925fdd593SJeykumar Sankaran for (i = 0; i < catalog->intf_count; i++) {
131025fdd593SJeykumar Sankaran if (catalog->intf[i].type == type
131125fdd593SJeykumar Sankaran && catalog->intf[i].controller_id == controller_id) {
1312004be386SDmitry Baryshkov return dpu_rm_get_intf(dpu_rm, catalog->intf[i].id);
131325fdd593SJeykumar Sankaran }
131425fdd593SJeykumar Sankaran }
131525fdd593SJeykumar Sankaran
1316004be386SDmitry Baryshkov return NULL;
1317e02a559aSAbhinav Kumar }
1318e02a559aSAbhinav Kumar
dpu_encoder_vblank_callback(struct drm_encoder * drm_enc,struct dpu_encoder_phys * phy_enc)131959f0182aSDmitry Baryshkov void dpu_encoder_vblank_callback(struct drm_encoder *drm_enc,
132025fdd593SJeykumar Sankaran struct dpu_encoder_phys *phy_enc)
132125fdd593SJeykumar Sankaran {
132225fdd593SJeykumar Sankaran struct dpu_encoder_virt *dpu_enc = NULL;
132325fdd593SJeykumar Sankaran unsigned long lock_flags;
132425fdd593SJeykumar Sankaran
132525fdd593SJeykumar Sankaran if (!drm_enc || !phy_enc)
132625fdd593SJeykumar Sankaran return;
132725fdd593SJeykumar Sankaran
132825fdd593SJeykumar Sankaran DPU_ATRACE_BEGIN("encoder_vblank_callback");
132925fdd593SJeykumar Sankaran dpu_enc = to_dpu_encoder_virt(drm_enc);
133025fdd593SJeykumar Sankaran
1331c28d76d3SStephen Boyd atomic_inc(&phy_enc->vsync_cnt);
1332c28d76d3SStephen Boyd
133325fdd593SJeykumar Sankaran spin_lock_irqsave(&dpu_enc->enc_spinlock, lock_flags);
1334e4914867SSean Paul if (dpu_enc->crtc)
1335e4914867SSean Paul dpu_crtc_vblank_callback(dpu_enc->crtc);
133625fdd593SJeykumar Sankaran spin_unlock_irqrestore(&dpu_enc->enc_spinlock, lock_flags);
133725fdd593SJeykumar Sankaran
133825fdd593SJeykumar Sankaran DPU_ATRACE_END("encoder_vblank_callback");
133925fdd593SJeykumar Sankaran }
134025fdd593SJeykumar Sankaran
dpu_encoder_underrun_callback(struct drm_encoder * drm_enc,struct dpu_encoder_phys * phy_enc)134159f0182aSDmitry Baryshkov void dpu_encoder_underrun_callback(struct drm_encoder *drm_enc,
134225fdd593SJeykumar Sankaran struct dpu_encoder_phys *phy_enc)
134325fdd593SJeykumar Sankaran {
134425fdd593SJeykumar Sankaran if (!phy_enc)
134525fdd593SJeykumar Sankaran return;
134625fdd593SJeykumar Sankaran
134725fdd593SJeykumar Sankaran DPU_ATRACE_BEGIN("encoder_underrun_callback");
134825fdd593SJeykumar Sankaran atomic_inc(&phy_enc->underrun_cnt);
13492ec5b3dcSAbhinav Kumar
13502ec5b3dcSAbhinav Kumar /* trigger dump only on the first underrun */
13512ec5b3dcSAbhinav Kumar if (atomic_read(&phy_enc->underrun_cnt) == 1)
13522ec5b3dcSAbhinav Kumar msm_disp_snapshot_state(drm_enc->dev);
13532ec5b3dcSAbhinav Kumar
135425fdd593SJeykumar Sankaran trace_dpu_enc_underrun_cb(DRMID(drm_enc),
135525fdd593SJeykumar Sankaran atomic_read(&phy_enc->underrun_cnt));
135625fdd593SJeykumar Sankaran DPU_ATRACE_END("encoder_underrun_callback");
135725fdd593SJeykumar Sankaran }
135825fdd593SJeykumar Sankaran
dpu_encoder_assign_crtc(struct drm_encoder * drm_enc,struct drm_crtc * crtc)1359e4914867SSean Paul void dpu_encoder_assign_crtc(struct drm_encoder *drm_enc, struct drm_crtc *crtc)
136025fdd593SJeykumar Sankaran {
136125fdd593SJeykumar Sankaran struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
136225fdd593SJeykumar Sankaran unsigned long lock_flags;
136325fdd593SJeykumar Sankaran
136425fdd593SJeykumar Sankaran spin_lock_irqsave(&dpu_enc->enc_spinlock, lock_flags);
1365e4914867SSean Paul /* crtc should always be cleared before re-assigning */
1366e4914867SSean Paul WARN_ON(crtc && dpu_enc->crtc);
1367e4914867SSean Paul dpu_enc->crtc = crtc;
136825fdd593SJeykumar Sankaran spin_unlock_irqrestore(&dpu_enc->enc_spinlock, lock_flags);
1369a796ba2cSSean Paul }
1370a796ba2cSSean Paul
dpu_encoder_toggle_vblank_for_crtc(struct drm_encoder * drm_enc,struct drm_crtc * crtc,bool enable)1371a796ba2cSSean Paul void dpu_encoder_toggle_vblank_for_crtc(struct drm_encoder *drm_enc,
1372a796ba2cSSean Paul struct drm_crtc *crtc, bool enable)
1373a796ba2cSSean Paul {
1374a796ba2cSSean Paul struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
1375a796ba2cSSean Paul unsigned long lock_flags;
137625fdd593SJeykumar Sankaran int i;
137725fdd593SJeykumar Sankaran
137825fdd593SJeykumar Sankaran trace_dpu_enc_vblank_cb(DRMID(drm_enc), enable);
137925fdd593SJeykumar Sankaran
138025fdd593SJeykumar Sankaran spin_lock_irqsave(&dpu_enc->enc_spinlock, lock_flags);
1381a796ba2cSSean Paul if (dpu_enc->crtc != crtc) {
1382a796ba2cSSean Paul spin_unlock_irqrestore(&dpu_enc->enc_spinlock, lock_flags);
1383a796ba2cSSean Paul return;
1384a796ba2cSSean Paul }
138525fdd593SJeykumar Sankaran spin_unlock_irqrestore(&dpu_enc->enc_spinlock, lock_flags);
138625fdd593SJeykumar Sankaran
138725fdd593SJeykumar Sankaran for (i = 0; i < dpu_enc->num_phys_encs; i++) {
138825fdd593SJeykumar Sankaran struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
138925fdd593SJeykumar Sankaran
1390b6fadcadSDrew Davenport if (phys->ops.control_vblank_irq)
139125fdd593SJeykumar Sankaran phys->ops.control_vblank_irq(phys, enable);
139225fdd593SJeykumar Sankaran }
139325fdd593SJeykumar Sankaran }
139425fdd593SJeykumar Sankaran
dpu_encoder_register_frame_event_callback(struct drm_encoder * drm_enc,void (* frame_event_cb)(void *,u32 event),void * frame_event_cb_data)139525fdd593SJeykumar Sankaran void dpu_encoder_register_frame_event_callback(struct drm_encoder *drm_enc,
139625fdd593SJeykumar Sankaran void (*frame_event_cb)(void *, u32 event),
139725fdd593SJeykumar Sankaran void *frame_event_cb_data)
139825fdd593SJeykumar Sankaran {
139925fdd593SJeykumar Sankaran struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
140025fdd593SJeykumar Sankaran unsigned long lock_flags;
140125fdd593SJeykumar Sankaran bool enable;
140225fdd593SJeykumar Sankaran
140325fdd593SJeykumar Sankaran enable = frame_event_cb ? true : false;
140425fdd593SJeykumar Sankaran
140525fdd593SJeykumar Sankaran if (!drm_enc) {
140625fdd593SJeykumar Sankaran DPU_ERROR("invalid encoder\n");
140725fdd593SJeykumar Sankaran return;
140825fdd593SJeykumar Sankaran }
140925fdd593SJeykumar Sankaran trace_dpu_enc_frame_event_cb(DRMID(drm_enc), enable);
141025fdd593SJeykumar Sankaran
141125fdd593SJeykumar Sankaran spin_lock_irqsave(&dpu_enc->enc_spinlock, lock_flags);
141225fdd593SJeykumar Sankaran dpu_enc->crtc_frame_event_cb = frame_event_cb;
141325fdd593SJeykumar Sankaran dpu_enc->crtc_frame_event_cb_data = frame_event_cb_data;
141425fdd593SJeykumar Sankaran spin_unlock_irqrestore(&dpu_enc->enc_spinlock, lock_flags);
141525fdd593SJeykumar Sankaran }
141625fdd593SJeykumar Sankaran
dpu_encoder_frame_done_callback(struct drm_encoder * drm_enc,struct dpu_encoder_phys * ready_phys,u32 event)141759f0182aSDmitry Baryshkov void dpu_encoder_frame_done_callback(
141825fdd593SJeykumar Sankaran struct drm_encoder *drm_enc,
141925fdd593SJeykumar Sankaran struct dpu_encoder_phys *ready_phys, u32 event)
142025fdd593SJeykumar Sankaran {
142125fdd593SJeykumar Sankaran struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
142225fdd593SJeykumar Sankaran unsigned int i;
142325fdd593SJeykumar Sankaran
142425fdd593SJeykumar Sankaran if (event & (DPU_ENCODER_FRAME_EVENT_DONE
142525fdd593SJeykumar Sankaran | DPU_ENCODER_FRAME_EVENT_ERROR
142625fdd593SJeykumar Sankaran | DPU_ENCODER_FRAME_EVENT_PANEL_DEAD)) {
142725fdd593SJeykumar Sankaran
142825fdd593SJeykumar Sankaran if (!dpu_enc->frame_busy_mask[0]) {
142925fdd593SJeykumar Sankaran /**
143025fdd593SJeykumar Sankaran * suppress frame_done without waiter,
143125fdd593SJeykumar Sankaran * likely autorefresh
143225fdd593SJeykumar Sankaran */
1433da10e280SAbhinav Kumar trace_dpu_enc_frame_done_cb_not_busy(DRMID(drm_enc), event,
1434da10e280SAbhinav Kumar dpu_encoder_helper_get_intf_type(ready_phys->intf_mode),
14358ea432b8SDmitry Baryshkov ready_phys->hw_intf ? ready_phys->hw_intf->idx : -1,
14368ea432b8SDmitry Baryshkov ready_phys->hw_wb ? ready_phys->hw_wb->idx : -1);
143725fdd593SJeykumar Sankaran return;
143825fdd593SJeykumar Sankaran }
143925fdd593SJeykumar Sankaran
144025fdd593SJeykumar Sankaran /* One of the physical encoders has become idle */
144125fdd593SJeykumar Sankaran for (i = 0; i < dpu_enc->num_phys_encs; i++) {
144225fdd593SJeykumar Sankaran if (dpu_enc->phys_encs[i] == ready_phys) {
144325fdd593SJeykumar Sankaran trace_dpu_enc_frame_done_cb(DRMID(drm_enc), i,
144425fdd593SJeykumar Sankaran dpu_enc->frame_busy_mask[0]);
1445b65bd045SSean Paul clear_bit(i, dpu_enc->frame_busy_mask);
144625fdd593SJeykumar Sankaran }
144725fdd593SJeykumar Sankaran }
144825fdd593SJeykumar Sankaran
144925fdd593SJeykumar Sankaran if (!dpu_enc->frame_busy_mask[0]) {
145070df9610SSean Paul atomic_set(&dpu_enc->frame_done_timeout_ms, 0);
145125fdd593SJeykumar Sankaran del_timer(&dpu_enc->frame_done_timer);
145225fdd593SJeykumar Sankaran
145325fdd593SJeykumar Sankaran dpu_encoder_resource_control(drm_enc,
145425fdd593SJeykumar Sankaran DPU_ENC_RC_EVENT_FRAME_DONE);
145525fdd593SJeykumar Sankaran
145625fdd593SJeykumar Sankaran if (dpu_enc->crtc_frame_event_cb)
145725fdd593SJeykumar Sankaran dpu_enc->crtc_frame_event_cb(
145825fdd593SJeykumar Sankaran dpu_enc->crtc_frame_event_cb_data,
145925fdd593SJeykumar Sankaran event);
146025fdd593SJeykumar Sankaran }
146125fdd593SJeykumar Sankaran } else {
146225fdd593SJeykumar Sankaran if (dpu_enc->crtc_frame_event_cb)
146325fdd593SJeykumar Sankaran dpu_enc->crtc_frame_event_cb(
146425fdd593SJeykumar Sankaran dpu_enc->crtc_frame_event_cb_data, event);
146525fdd593SJeykumar Sankaran }
146625fdd593SJeykumar Sankaran }
146725fdd593SJeykumar Sankaran
dpu_encoder_off_work(struct work_struct * work)1468e077fe75SJeykumar Sankaran static void dpu_encoder_off_work(struct work_struct *work)
146925fdd593SJeykumar Sankaran {
147025fdd593SJeykumar Sankaran struct dpu_encoder_virt *dpu_enc = container_of(work,
147125fdd593SJeykumar Sankaran struct dpu_encoder_virt, delayed_off_work.work);
147225fdd593SJeykumar Sankaran
147325fdd593SJeykumar Sankaran dpu_encoder_resource_control(&dpu_enc->base,
147425fdd593SJeykumar Sankaran DPU_ENC_RC_EVENT_ENTER_IDLE);
147525fdd593SJeykumar Sankaran
147625fdd593SJeykumar Sankaran dpu_encoder_frame_done_callback(&dpu_enc->base, NULL,
147725fdd593SJeykumar Sankaran DPU_ENCODER_FRAME_EVENT_IDLE);
147825fdd593SJeykumar Sankaran }
147925fdd593SJeykumar Sankaran
148025fdd593SJeykumar Sankaran /**
148125fdd593SJeykumar Sankaran * _dpu_encoder_trigger_flush - trigger flush for a physical encoder
1482cca5ff94SLee Jones * @drm_enc: Pointer to drm encoder structure
1483cca5ff94SLee Jones * @phys: Pointer to physical encoder structure
1484cca5ff94SLee Jones * @extra_flush_bits: Additional bit mask to include in flush trigger
148525fdd593SJeykumar Sankaran */
_dpu_encoder_trigger_flush(struct drm_encoder * drm_enc,struct dpu_encoder_phys * phys,uint32_t extra_flush_bits)148658fba464SSean Paul static void _dpu_encoder_trigger_flush(struct drm_encoder *drm_enc,
1487b4bb9f15SRob Clark struct dpu_encoder_phys *phys, uint32_t extra_flush_bits)
148825fdd593SJeykumar Sankaran {
148925fdd593SJeykumar Sankaran struct dpu_hw_ctl *ctl;
149025fdd593SJeykumar Sankaran int pending_kickoff_cnt;
149125fdd593SJeykumar Sankaran u32 ret = UINT_MAX;
149225fdd593SJeykumar Sankaran
149325fdd593SJeykumar Sankaran if (!phys->hw_pp) {
149425fdd593SJeykumar Sankaran DPU_ERROR("invalid pingpong hw\n");
149525fdd593SJeykumar Sankaran return;
149625fdd593SJeykumar Sankaran }
149725fdd593SJeykumar Sankaran
149825fdd593SJeykumar Sankaran ctl = phys->hw_ctl;
149999beed68SDrew Davenport if (!ctl->ops.trigger_flush) {
150025fdd593SJeykumar Sankaran DPU_ERROR("missing trigger cb\n");
150125fdd593SJeykumar Sankaran return;
150225fdd593SJeykumar Sankaran }
150325fdd593SJeykumar Sankaran
150425fdd593SJeykumar Sankaran pending_kickoff_cnt = dpu_encoder_phys_inc_pending(phys);
150525fdd593SJeykumar Sankaran
150625fdd593SJeykumar Sankaran if (extra_flush_bits && ctl->ops.update_pending_flush)
150725fdd593SJeykumar Sankaran ctl->ops.update_pending_flush(ctl, extra_flush_bits);
150825fdd593SJeykumar Sankaran
150925fdd593SJeykumar Sankaran ctl->ops.trigger_flush(ctl);
151025fdd593SJeykumar Sankaran
151125fdd593SJeykumar Sankaran if (ctl->ops.get_pending_flush)
151225fdd593SJeykumar Sankaran ret = ctl->ops.get_pending_flush(ctl);
151325fdd593SJeykumar Sankaran
1514da10e280SAbhinav Kumar trace_dpu_enc_trigger_flush(DRMID(drm_enc),
1515da10e280SAbhinav Kumar dpu_encoder_helper_get_intf_type(phys->intf_mode),
15168ea432b8SDmitry Baryshkov phys->hw_intf ? phys->hw_intf->idx : -1,
15178ea432b8SDmitry Baryshkov phys->hw_wb ? phys->hw_wb->idx : -1,
15181bb4e701SSean Paul pending_kickoff_cnt, ctl->idx,
15191bb4e701SSean Paul extra_flush_bits, ret);
152025fdd593SJeykumar Sankaran }
152125fdd593SJeykumar Sankaran
152225fdd593SJeykumar Sankaran /**
152325fdd593SJeykumar Sankaran * _dpu_encoder_trigger_start - trigger start for a physical encoder
1524cca5ff94SLee Jones * @phys: Pointer to physical encoder structure
152525fdd593SJeykumar Sankaran */
_dpu_encoder_trigger_start(struct dpu_encoder_phys * phys)152658fba464SSean Paul static void _dpu_encoder_trigger_start(struct dpu_encoder_phys *phys)
152725fdd593SJeykumar Sankaran {
152825fdd593SJeykumar Sankaran if (!phys) {
152925fdd593SJeykumar Sankaran DPU_ERROR("invalid argument(s)\n");
153025fdd593SJeykumar Sankaran return;
153125fdd593SJeykumar Sankaran }
153225fdd593SJeykumar Sankaran
153325fdd593SJeykumar Sankaran if (!phys->hw_pp) {
153425fdd593SJeykumar Sankaran DPU_ERROR("invalid pingpong hw\n");
153525fdd593SJeykumar Sankaran return;
153625fdd593SJeykumar Sankaran }
153725fdd593SJeykumar Sankaran
153825fdd593SJeykumar Sankaran if (phys->ops.trigger_start && phys->enable_state != DPU_ENC_DISABLED)
153925fdd593SJeykumar Sankaran phys->ops.trigger_start(phys);
154025fdd593SJeykumar Sankaran }
154125fdd593SJeykumar Sankaran
dpu_encoder_helper_trigger_start(struct dpu_encoder_phys * phys_enc)154225fdd593SJeykumar Sankaran void dpu_encoder_helper_trigger_start(struct dpu_encoder_phys *phys_enc)
154325fdd593SJeykumar Sankaran {
154425fdd593SJeykumar Sankaran struct dpu_hw_ctl *ctl;
154525fdd593SJeykumar Sankaran
154625fdd593SJeykumar Sankaran ctl = phys_enc->hw_ctl;
154799beed68SDrew Davenport if (ctl->ops.trigger_start) {
154825fdd593SJeykumar Sankaran ctl->ops.trigger_start(ctl);
154925fdd593SJeykumar Sankaran trace_dpu_enc_trigger_start(DRMID(phys_enc->parent), ctl->idx);
155025fdd593SJeykumar Sankaran }
155125fdd593SJeykumar Sankaran }
155225fdd593SJeykumar Sankaran
dpu_encoder_helper_wait_event_timeout(int32_t drm_id,u32 irq_idx,struct dpu_encoder_wait_info * info)1553fba33caeSJordan Crouse static int dpu_encoder_helper_wait_event_timeout(
155425fdd593SJeykumar Sankaran int32_t drm_id,
1555667e9985SDmitry Baryshkov u32 irq_idx,
155625fdd593SJeykumar Sankaran struct dpu_encoder_wait_info *info)
155725fdd593SJeykumar Sankaran {
155825fdd593SJeykumar Sankaran int rc = 0;
155925fdd593SJeykumar Sankaran s64 expected_time = ktime_to_ms(ktime_get()) + info->timeout_ms;
156025fdd593SJeykumar Sankaran s64 jiffies = msecs_to_jiffies(info->timeout_ms);
156125fdd593SJeykumar Sankaran s64 time;
156225fdd593SJeykumar Sankaran
156325fdd593SJeykumar Sankaran do {
156425fdd593SJeykumar Sankaran rc = wait_event_timeout(*(info->wq),
156525fdd593SJeykumar Sankaran atomic_read(info->atomic_cnt) == 0, jiffies);
156625fdd593SJeykumar Sankaran time = ktime_to_ms(ktime_get());
156725fdd593SJeykumar Sankaran
1568667e9985SDmitry Baryshkov trace_dpu_enc_wait_event_timeout(drm_id, irq_idx, rc, time,
156925fdd593SJeykumar Sankaran expected_time,
157025fdd593SJeykumar Sankaran atomic_read(info->atomic_cnt));
157125fdd593SJeykumar Sankaran /* If we timed out, counter is valid and time is less, wait again */
157225fdd593SJeykumar Sankaran } while (atomic_read(info->atomic_cnt) && (rc == 0) &&
157325fdd593SJeykumar Sankaran (time < expected_time));
157425fdd593SJeykumar Sankaran
157525fdd593SJeykumar Sankaran return rc;
157625fdd593SJeykumar Sankaran }
157725fdd593SJeykumar Sankaran
dpu_encoder_helper_hw_reset(struct dpu_encoder_phys * phys_enc)157801b09d53SSean Paul static void dpu_encoder_helper_hw_reset(struct dpu_encoder_phys *phys_enc)
157925fdd593SJeykumar Sankaran {
158025fdd593SJeykumar Sankaran struct dpu_encoder_virt *dpu_enc;
158125fdd593SJeykumar Sankaran struct dpu_hw_ctl *ctl;
158225fdd593SJeykumar Sankaran int rc;
15832ec5b3dcSAbhinav Kumar struct drm_encoder *drm_enc;
158425fdd593SJeykumar Sankaran
158525fdd593SJeykumar Sankaran dpu_enc = to_dpu_encoder_virt(phys_enc->parent);
158625fdd593SJeykumar Sankaran ctl = phys_enc->hw_ctl;
15872ec5b3dcSAbhinav Kumar drm_enc = phys_enc->parent;
158825fdd593SJeykumar Sankaran
158999beed68SDrew Davenport if (!ctl->ops.reset)
159025fdd593SJeykumar Sankaran return;
159125fdd593SJeykumar Sankaran
15922ec5b3dcSAbhinav Kumar DRM_DEBUG_KMS("id:%u ctl %d reset\n", DRMID(drm_enc),
159325fdd593SJeykumar Sankaran ctl->idx);
159425fdd593SJeykumar Sankaran
159525fdd593SJeykumar Sankaran rc = ctl->ops.reset(ctl);
15962ec5b3dcSAbhinav Kumar if (rc) {
159725fdd593SJeykumar Sankaran DPU_ERROR_ENC(dpu_enc, "ctl %d reset failure\n", ctl->idx);
15982ec5b3dcSAbhinav Kumar msm_disp_snapshot_state(drm_enc->dev);
15992ec5b3dcSAbhinav Kumar }
160025fdd593SJeykumar Sankaran
160125fdd593SJeykumar Sankaran phys_enc->enable_state = DPU_ENC_ENABLED;
160225fdd593SJeykumar Sankaran }
160325fdd593SJeykumar Sankaran
160425fdd593SJeykumar Sankaran /**
160525fdd593SJeykumar Sankaran * _dpu_encoder_kickoff_phys - handle physical encoder kickoff
160625fdd593SJeykumar Sankaran * Iterate through the physical encoders and perform consolidated flush
160725fdd593SJeykumar Sankaran * and/or control start triggering as needed. This is done in the virtual
160825fdd593SJeykumar Sankaran * encoder rather than the individual physical ones in order to handle
160925fdd593SJeykumar Sankaran * use cases that require visibility into multiple physical encoders at
161025fdd593SJeykumar Sankaran * a time.
1611cca5ff94SLee Jones * @dpu_enc: Pointer to virtual encoder structure
161225fdd593SJeykumar Sankaran */
_dpu_encoder_kickoff_phys(struct dpu_encoder_virt * dpu_enc)1613b4bb9f15SRob Clark static void _dpu_encoder_kickoff_phys(struct dpu_encoder_virt *dpu_enc)
161425fdd593SJeykumar Sankaran {
161525fdd593SJeykumar Sankaran struct dpu_hw_ctl *ctl;
161625fdd593SJeykumar Sankaran uint32_t i, pending_flush;
161725fdd593SJeykumar Sankaran unsigned long lock_flags;
161825fdd593SJeykumar Sankaran
161925fdd593SJeykumar Sankaran pending_flush = 0x0;
162025fdd593SJeykumar Sankaran
162125fdd593SJeykumar Sankaran /* update pending counts and trigger kickoff ctl flush atomically */
162225fdd593SJeykumar Sankaran spin_lock_irqsave(&dpu_enc->enc_spinlock, lock_flags);
162325fdd593SJeykumar Sankaran
162425fdd593SJeykumar Sankaran /* don't perform flush/start operations for slave encoders */
162525fdd593SJeykumar Sankaran for (i = 0; i < dpu_enc->num_phys_encs; i++) {
162625fdd593SJeykumar Sankaran struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
162725fdd593SJeykumar Sankaran
1628b6fadcadSDrew Davenport if (phys->enable_state == DPU_ENC_DISABLED)
162925fdd593SJeykumar Sankaran continue;
163025fdd593SJeykumar Sankaran
163125fdd593SJeykumar Sankaran ctl = phys->hw_ctl;
163225fdd593SJeykumar Sankaran
1633f98baa31SSean Paul /*
1634f98baa31SSean Paul * This is cleared in frame_done worker, which isn't invoked
1635f98baa31SSean Paul * for async commits. So don't set this for async, since it'll
1636f98baa31SSean Paul * roll over to the next commit.
1637f98baa31SSean Paul */
1638b4bb9f15SRob Clark if (phys->split_role != ENC_ROLE_SLAVE)
163925fdd593SJeykumar Sankaran set_bit(i, dpu_enc->frame_busy_mask);
1640f98baa31SSean Paul
164125fdd593SJeykumar Sankaran if (!phys->ops.needs_single_flush ||
164225fdd593SJeykumar Sankaran !phys->ops.needs_single_flush(phys))
1643b4bb9f15SRob Clark _dpu_encoder_trigger_flush(&dpu_enc->base, phys, 0x0);
164425fdd593SJeykumar Sankaran else if (ctl->ops.get_pending_flush)
164525fdd593SJeykumar Sankaran pending_flush |= ctl->ops.get_pending_flush(ctl);
164625fdd593SJeykumar Sankaran }
164725fdd593SJeykumar Sankaran
164825fdd593SJeykumar Sankaran /* for split flush, combine pending flush masks and send to master */
164925fdd593SJeykumar Sankaran if (pending_flush && dpu_enc->cur_master) {
165025fdd593SJeykumar Sankaran _dpu_encoder_trigger_flush(
165125fdd593SJeykumar Sankaran &dpu_enc->base,
165225fdd593SJeykumar Sankaran dpu_enc->cur_master,
1653b4bb9f15SRob Clark pending_flush);
165425fdd593SJeykumar Sankaran }
165525fdd593SJeykumar Sankaran
165625fdd593SJeykumar Sankaran _dpu_encoder_trigger_start(dpu_enc->cur_master);
165725fdd593SJeykumar Sankaran
165825fdd593SJeykumar Sankaran spin_unlock_irqrestore(&dpu_enc->enc_spinlock, lock_flags);
165925fdd593SJeykumar Sankaran }
166025fdd593SJeykumar Sankaran
dpu_encoder_trigger_kickoff_pending(struct drm_encoder * drm_enc)166125fdd593SJeykumar Sankaran void dpu_encoder_trigger_kickoff_pending(struct drm_encoder *drm_enc)
166225fdd593SJeykumar Sankaran {
166325fdd593SJeykumar Sankaran struct dpu_encoder_virt *dpu_enc;
166425fdd593SJeykumar Sankaran struct dpu_encoder_phys *phys;
166525fdd593SJeykumar Sankaran unsigned int i;
166625fdd593SJeykumar Sankaran struct dpu_hw_ctl *ctl;
166725fdd593SJeykumar Sankaran struct msm_display_info *disp_info;
166825fdd593SJeykumar Sankaran
166925fdd593SJeykumar Sankaran if (!drm_enc) {
167025fdd593SJeykumar Sankaran DPU_ERROR("invalid encoder\n");
167125fdd593SJeykumar Sankaran return;
167225fdd593SJeykumar Sankaran }
167325fdd593SJeykumar Sankaran dpu_enc = to_dpu_encoder_virt(drm_enc);
167425fdd593SJeykumar Sankaran disp_info = &dpu_enc->disp_info;
167525fdd593SJeykumar Sankaran
167625fdd593SJeykumar Sankaran for (i = 0; i < dpu_enc->num_phys_encs; i++) {
167725fdd593SJeykumar Sankaran phys = dpu_enc->phys_encs[i];
167825fdd593SJeykumar Sankaran
167925fdd593SJeykumar Sankaran ctl = phys->hw_ctl;
168025fdd593SJeykumar Sankaran ctl->ops.clear_pending_flush(ctl);
168125fdd593SJeykumar Sankaran
168225fdd593SJeykumar Sankaran /* update only for command mode primary ctl */
168325fdd593SJeykumar Sankaran if ((phys == dpu_enc->cur_master) &&
1684b6529e33SDmitry Baryshkov disp_info->is_cmd_mode
168525fdd593SJeykumar Sankaran && ctl->ops.trigger_pending)
168625fdd593SJeykumar Sankaran ctl->ops.trigger_pending(ctl);
168725fdd593SJeykumar Sankaran }
168825fdd593SJeykumar Sankaran }
168925fdd593SJeykumar Sankaran
_dpu_encoder_calculate_linetime(struct dpu_encoder_virt * dpu_enc,struct drm_display_mode * mode)169025fdd593SJeykumar Sankaran static u32 _dpu_encoder_calculate_linetime(struct dpu_encoder_virt *dpu_enc,
169125fdd593SJeykumar Sankaran struct drm_display_mode *mode)
169225fdd593SJeykumar Sankaran {
169325fdd593SJeykumar Sankaran u64 pclk_rate;
169425fdd593SJeykumar Sankaran u32 pclk_period;
169525fdd593SJeykumar Sankaran u32 line_time;
169625fdd593SJeykumar Sankaran
169725fdd593SJeykumar Sankaran /*
169825fdd593SJeykumar Sankaran * For linetime calculation, only operate on master encoder.
169925fdd593SJeykumar Sankaran */
170025fdd593SJeykumar Sankaran if (!dpu_enc->cur_master)
170125fdd593SJeykumar Sankaran return 0;
170225fdd593SJeykumar Sankaran
170325fdd593SJeykumar Sankaran if (!dpu_enc->cur_master->ops.get_line_count) {
170425fdd593SJeykumar Sankaran DPU_ERROR("get_line_count function not defined\n");
170525fdd593SJeykumar Sankaran return 0;
170625fdd593SJeykumar Sankaran }
170725fdd593SJeykumar Sankaran
170825fdd593SJeykumar Sankaran pclk_rate = mode->clock; /* pixel clock in kHz */
170925fdd593SJeykumar Sankaran if (pclk_rate == 0) {
171025fdd593SJeykumar Sankaran DPU_ERROR("pclk is 0, cannot calculate line time\n");
171125fdd593SJeykumar Sankaran return 0;
171225fdd593SJeykumar Sankaran }
171325fdd593SJeykumar Sankaran
171425fdd593SJeykumar Sankaran pclk_period = DIV_ROUND_UP_ULL(1000000000ull, pclk_rate);
171525fdd593SJeykumar Sankaran if (pclk_period == 0) {
171625fdd593SJeykumar Sankaran DPU_ERROR("pclk period is 0\n");
171725fdd593SJeykumar Sankaran return 0;
171825fdd593SJeykumar Sankaran }
171925fdd593SJeykumar Sankaran
172025fdd593SJeykumar Sankaran /*
172125fdd593SJeykumar Sankaran * Line time calculation based on Pixel clock and HTOTAL.
172225fdd593SJeykumar Sankaran * Final unit is in ns.
172325fdd593SJeykumar Sankaran */
172425fdd593SJeykumar Sankaran line_time = (pclk_period * mode->htotal) / 1000;
172525fdd593SJeykumar Sankaran if (line_time == 0) {
172625fdd593SJeykumar Sankaran DPU_ERROR("line time calculation is 0\n");
172725fdd593SJeykumar Sankaran return 0;
172825fdd593SJeykumar Sankaran }
172925fdd593SJeykumar Sankaran
173025fdd593SJeykumar Sankaran DPU_DEBUG_ENC(dpu_enc,
173125fdd593SJeykumar Sankaran "clk_rate=%lldkHz, clk_period=%d, linetime=%dns\n",
173225fdd593SJeykumar Sankaran pclk_rate, pclk_period, line_time);
173325fdd593SJeykumar Sankaran
173425fdd593SJeykumar Sankaran return line_time;
173525fdd593SJeykumar Sankaran }
173625fdd593SJeykumar Sankaran
dpu_encoder_vsync_time(struct drm_encoder * drm_enc,ktime_t * wakeup_time)1737cd6d9231SRob Clark int dpu_encoder_vsync_time(struct drm_encoder *drm_enc, ktime_t *wakeup_time)
173825fdd593SJeykumar Sankaran {
173925fdd593SJeykumar Sankaran struct drm_display_mode *mode;
174025fdd593SJeykumar Sankaran struct dpu_encoder_virt *dpu_enc;
174125fdd593SJeykumar Sankaran u32 cur_line;
174225fdd593SJeykumar Sankaran u32 line_time;
174325fdd593SJeykumar Sankaran u32 vtotal, time_to_vsync;
174425fdd593SJeykumar Sankaran ktime_t cur_time;
174525fdd593SJeykumar Sankaran
174625fdd593SJeykumar Sankaran dpu_enc = to_dpu_encoder_virt(drm_enc);
174725fdd593SJeykumar Sankaran
174825fdd593SJeykumar Sankaran if (!drm_enc->crtc || !drm_enc->crtc->state) {
174925fdd593SJeykumar Sankaran DPU_ERROR("crtc/crtc state object is NULL\n");
175025fdd593SJeykumar Sankaran return -EINVAL;
175125fdd593SJeykumar Sankaran }
175225fdd593SJeykumar Sankaran mode = &drm_enc->crtc->state->adjusted_mode;
175325fdd593SJeykumar Sankaran
175425fdd593SJeykumar Sankaran line_time = _dpu_encoder_calculate_linetime(dpu_enc, mode);
175525fdd593SJeykumar Sankaran if (!line_time)
175625fdd593SJeykumar Sankaran return -EINVAL;
175725fdd593SJeykumar Sankaran
175825fdd593SJeykumar Sankaran cur_line = dpu_enc->cur_master->ops.get_line_count(dpu_enc->cur_master);
175925fdd593SJeykumar Sankaran
176025fdd593SJeykumar Sankaran vtotal = mode->vtotal;
176125fdd593SJeykumar Sankaran if (cur_line >= vtotal)
176225fdd593SJeykumar Sankaran time_to_vsync = line_time * vtotal;
176325fdd593SJeykumar Sankaran else
176425fdd593SJeykumar Sankaran time_to_vsync = line_time * (vtotal - cur_line);
176525fdd593SJeykumar Sankaran
176625fdd593SJeykumar Sankaran if (time_to_vsync == 0) {
176725fdd593SJeykumar Sankaran DPU_ERROR("time to vsync should not be zero, vtotal=%d\n",
176825fdd593SJeykumar Sankaran vtotal);
176925fdd593SJeykumar Sankaran return -EINVAL;
177025fdd593SJeykumar Sankaran }
177125fdd593SJeykumar Sankaran
177225fdd593SJeykumar Sankaran cur_time = ktime_get();
177325fdd593SJeykumar Sankaran *wakeup_time = ktime_add_ns(cur_time, time_to_vsync);
177425fdd593SJeykumar Sankaran
177525fdd593SJeykumar Sankaran DPU_DEBUG_ENC(dpu_enc,
177625fdd593SJeykumar Sankaran "cur_line=%u vtotal=%u time_to_vsync=%u, cur_time=%lld, wakeup_time=%lld\n",
177725fdd593SJeykumar Sankaran cur_line, vtotal, time_to_vsync,
177825fdd593SJeykumar Sankaran ktime_to_ms(cur_time),
177925fdd593SJeykumar Sankaran ktime_to_ms(*wakeup_time));
178025fdd593SJeykumar Sankaran return 0;
178125fdd593SJeykumar Sankaran }
178225fdd593SJeykumar Sankaran
178358dca981SVinod Koul static u32
dpu_encoder_dsc_initial_line_calc(struct drm_dsc_config * dsc,u32 enc_ip_width)178446dd0c06SDmitry Baryshkov dpu_encoder_dsc_initial_line_calc(struct drm_dsc_config *dsc,
178558dca981SVinod Koul u32 enc_ip_width)
178658dca981SVinod Koul {
178758dca981SVinod Koul int ssm_delay, total_pixels, soft_slice_per_enc;
178858dca981SVinod Koul
178946dd0c06SDmitry Baryshkov soft_slice_per_enc = enc_ip_width / dsc->slice_width;
179058dca981SVinod Koul
179158dca981SVinod Koul /*
179258dca981SVinod Koul * minimum number of initial line pixels is a sum of:
179358dca981SVinod Koul * 1. sub-stream multiplexer delay (83 groups for 8bpc,
179458dca981SVinod Koul * 91 for 10 bpc) * 3
179558dca981SVinod Koul * 2. for two soft slice cases, add extra sub-stream multiplexer * 3
179658dca981SVinod Koul * 3. the initial xmit delay
179758dca981SVinod Koul * 4. total pipeline delay through the "lock step" of encoder (47)
179858dca981SVinod Koul * 5. 6 additional pixels as the output of the rate buffer is
179958dca981SVinod Koul * 48 bits wide
180058dca981SVinod Koul */
180146dd0c06SDmitry Baryshkov ssm_delay = ((dsc->bits_per_component < 10) ? 84 : 92);
180246dd0c06SDmitry Baryshkov total_pixels = ssm_delay * 3 + dsc->initial_xmit_delay + 47;
180358dca981SVinod Koul if (soft_slice_per_enc > 1)
180458dca981SVinod Koul total_pixels += (ssm_delay * 3);
180546dd0c06SDmitry Baryshkov return DIV_ROUND_UP(total_pixels, dsc->slice_width);
180658dca981SVinod Koul }
180758dca981SVinod Koul
dpu_encoder_dsc_pipe_cfg(struct dpu_hw_ctl * ctl,struct dpu_hw_dsc * hw_dsc,struct dpu_hw_pingpong * hw_pp,struct drm_dsc_config * dsc,u32 common_mode,u32 initial_lines)1808761c629dSKuogee Hsieh static void dpu_encoder_dsc_pipe_cfg(struct dpu_hw_ctl *ctl,
1809761c629dSKuogee Hsieh struct dpu_hw_dsc *hw_dsc,
181058dca981SVinod Koul struct dpu_hw_pingpong *hw_pp,
181146dd0c06SDmitry Baryshkov struct drm_dsc_config *dsc,
181258dca981SVinod Koul u32 common_mode,
181358dca981SVinod Koul u32 initial_lines)
181458dca981SVinod Koul {
181558dca981SVinod Koul if (hw_dsc->ops.dsc_config)
181658dca981SVinod Koul hw_dsc->ops.dsc_config(hw_dsc, dsc, common_mode, initial_lines);
181758dca981SVinod Koul
181858dca981SVinod Koul if (hw_dsc->ops.dsc_config_thresh)
181958dca981SVinod Koul hw_dsc->ops.dsc_config_thresh(hw_dsc, dsc);
182058dca981SVinod Koul
182158dca981SVinod Koul if (hw_pp->ops.setup_dsc)
182258dca981SVinod Koul hw_pp->ops.setup_dsc(hw_pp);
182358dca981SVinod Koul
18247aa6f1a1SMarijn Suijten if (hw_dsc->ops.dsc_bind_pingpong_blk)
1825d45be1ccSKuogee Hsieh hw_dsc->ops.dsc_bind_pingpong_blk(hw_dsc, hw_pp->idx);
18267aa6f1a1SMarijn Suijten
182758dca981SVinod Koul if (hw_pp->ops.enable_dsc)
182858dca981SVinod Koul hw_pp->ops.enable_dsc(hw_pp);
1829761c629dSKuogee Hsieh
1830761c629dSKuogee Hsieh if (ctl->ops.update_pending_flush_dsc)
1831761c629dSKuogee Hsieh ctl->ops.update_pending_flush_dsc(ctl, hw_dsc->idx);
183258dca981SVinod Koul }
183358dca981SVinod Koul
dpu_encoder_prep_dsc(struct dpu_encoder_virt * dpu_enc,struct drm_dsc_config * dsc)183458dca981SVinod Koul static void dpu_encoder_prep_dsc(struct dpu_encoder_virt *dpu_enc,
183546dd0c06SDmitry Baryshkov struct drm_dsc_config *dsc)
183658dca981SVinod Koul {
183758dca981SVinod Koul /* coding only for 2LM, 2enc, 1 dsc config */
183858dca981SVinod Koul struct dpu_encoder_phys *enc_master = dpu_enc->cur_master;
1839761c629dSKuogee Hsieh struct dpu_hw_ctl *ctl = enc_master->hw_ctl;
184058dca981SVinod Koul struct dpu_hw_dsc *hw_dsc[MAX_CHANNELS_PER_ENC];
184158dca981SVinod Koul struct dpu_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
184258dca981SVinod Koul int this_frame_slices;
184358dca981SVinod Koul int intf_ip_w, enc_ip_w;
184458dca981SVinod Koul int dsc_common_mode;
184558dca981SVinod Koul int pic_width;
184658dca981SVinod Koul u32 initial_lines;
184758dca981SVinod Koul int i;
184858dca981SVinod Koul
184958dca981SVinod Koul for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
185058dca981SVinod Koul hw_pp[i] = dpu_enc->hw_pp[i];
185158dca981SVinod Koul hw_dsc[i] = dpu_enc->hw_dsc[i];
185258dca981SVinod Koul
185358dca981SVinod Koul if (!hw_pp[i] || !hw_dsc[i]) {
185458dca981SVinod Koul DPU_ERROR_ENC(dpu_enc, "invalid params for DSC\n");
185558dca981SVinod Koul return;
185658dca981SVinod Koul }
185758dca981SVinod Koul }
185858dca981SVinod Koul
185946dd0c06SDmitry Baryshkov dsc_common_mode = 0;
186046dd0c06SDmitry Baryshkov pic_width = dsc->pic_width;
186158dca981SVinod Koul
1862fd0ce43cSMarijn Suijten dsc_common_mode = DSC_MODE_SPLIT_PANEL;
1863fd0ce43cSMarijn Suijten if (dpu_encoder_use_dsc_merge(enc_master->parent))
1864fd0ce43cSMarijn Suijten dsc_common_mode |= DSC_MODE_MULTIPLEX;
186558dca981SVinod Koul if (enc_master->intf_mode == INTF_MODE_VIDEO)
186658dca981SVinod Koul dsc_common_mode |= DSC_MODE_VIDEO;
186758dca981SVinod Koul
186846dd0c06SDmitry Baryshkov this_frame_slices = pic_width / dsc->slice_width;
186946dd0c06SDmitry Baryshkov intf_ip_w = this_frame_slices * dsc->slice_width;
187058dca981SVinod Koul
187158dca981SVinod Koul /*
187258dca981SVinod Koul * dsc merge case: when using 2 encoders for the same stream,
187358dca981SVinod Koul * no. of slices need to be same on both the encoders.
187458dca981SVinod Koul */
187558dca981SVinod Koul enc_ip_w = intf_ip_w / 2;
187658dca981SVinod Koul initial_lines = dpu_encoder_dsc_initial_line_calc(dsc, enc_ip_w);
187758dca981SVinod Koul
187858dca981SVinod Koul for (i = 0; i < MAX_CHANNELS_PER_ENC; i++)
1879761c629dSKuogee Hsieh dpu_encoder_dsc_pipe_cfg(ctl, hw_dsc[i], hw_pp[i],
1880761c629dSKuogee Hsieh dsc, dsc_common_mode, initial_lines);
188158dca981SVinod Koul }
188258dca981SVinod Koul
dpu_encoder_prepare_for_kickoff(struct drm_encoder * drm_enc)18830c91ed51SRob Clark void dpu_encoder_prepare_for_kickoff(struct drm_encoder *drm_enc)
188425fdd593SJeykumar Sankaran {
188525fdd593SJeykumar Sankaran struct dpu_encoder_virt *dpu_enc;
188625fdd593SJeykumar Sankaran struct dpu_encoder_phys *phys;
188725fdd593SJeykumar Sankaran bool needs_hw_reset = false;
188825fdd593SJeykumar Sankaran unsigned int i;
188925fdd593SJeykumar Sankaran
189025fdd593SJeykumar Sankaran dpu_enc = to_dpu_encoder_virt(drm_enc);
189125fdd593SJeykumar Sankaran
189225fdd593SJeykumar Sankaran trace_dpu_enc_prepare_kickoff(DRMID(drm_enc));
189325fdd593SJeykumar Sankaran
189425fdd593SJeykumar Sankaran /* prepare for next kickoff, may include waiting on previous kickoff */
189525fdd593SJeykumar Sankaran DPU_ATRACE_BEGIN("enc_prepare_for_kickoff");
189625fdd593SJeykumar Sankaran for (i = 0; i < dpu_enc->num_phys_encs; i++) {
189725fdd593SJeykumar Sankaran phys = dpu_enc->phys_encs[i];
189825fdd593SJeykumar Sankaran if (phys->ops.prepare_for_kickoff)
1899d3db61caSBruce Wang phys->ops.prepare_for_kickoff(phys);
190025fdd593SJeykumar Sankaran if (phys->enable_state == DPU_ENC_ERR_NEEDS_HW_RESET)
190125fdd593SJeykumar Sankaran needs_hw_reset = true;
190225fdd593SJeykumar Sankaran }
190325fdd593SJeykumar Sankaran DPU_ATRACE_END("enc_prepare_for_kickoff");
190425fdd593SJeykumar Sankaran
190525fdd593SJeykumar Sankaran dpu_encoder_resource_control(drm_enc, DPU_ENC_RC_EVENT_KICKOFF);
190625fdd593SJeykumar Sankaran
190725fdd593SJeykumar Sankaran /* if any phys needs reset, reset all phys, in-order */
190825fdd593SJeykumar Sankaran if (needs_hw_reset) {
190925fdd593SJeykumar Sankaran trace_dpu_enc_prepare_kickoff_reset(DRMID(drm_enc));
191025fdd593SJeykumar Sankaran for (i = 0; i < dpu_enc->num_phys_encs; i++) {
191101b09d53SSean Paul dpu_encoder_helper_hw_reset(dpu_enc->phys_encs[i]);
191225fdd593SJeykumar Sankaran }
191325fdd593SJeykumar Sankaran }
191458dca981SVinod Koul
191558dca981SVinod Koul if (dpu_enc->dsc)
191658dca981SVinod Koul dpu_encoder_prep_dsc(dpu_enc, dpu_enc->dsc);
191725fdd593SJeykumar Sankaran }
191825fdd593SJeykumar Sankaran
dpu_encoder_is_valid_for_commit(struct drm_encoder * drm_enc)1919f2969c49SAbhinav Kumar bool dpu_encoder_is_valid_for_commit(struct drm_encoder *drm_enc)
1920f2969c49SAbhinav Kumar {
1921f2969c49SAbhinav Kumar struct dpu_encoder_virt *dpu_enc;
1922f2969c49SAbhinav Kumar unsigned int i;
1923f2969c49SAbhinav Kumar struct dpu_encoder_phys *phys;
1924f2969c49SAbhinav Kumar
1925f2969c49SAbhinav Kumar dpu_enc = to_dpu_encoder_virt(drm_enc);
1926f2969c49SAbhinav Kumar
1927f2969c49SAbhinav Kumar if (drm_enc->encoder_type == DRM_MODE_ENCODER_VIRTUAL) {
1928f2969c49SAbhinav Kumar for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1929f2969c49SAbhinav Kumar phys = dpu_enc->phys_encs[i];
1930f2969c49SAbhinav Kumar if (phys->ops.is_valid_for_commit && !phys->ops.is_valid_for_commit(phys)) {
1931f2969c49SAbhinav Kumar DPU_DEBUG("invalid FB not kicking off\n");
1932f2969c49SAbhinav Kumar return false;
1933f2969c49SAbhinav Kumar }
1934f2969c49SAbhinav Kumar }
1935f2969c49SAbhinav Kumar }
1936f2969c49SAbhinav Kumar
1937f2969c49SAbhinav Kumar return true;
1938f2969c49SAbhinav Kumar }
1939f2969c49SAbhinav Kumar
dpu_encoder_kickoff(struct drm_encoder * drm_enc)1940b4bb9f15SRob Clark void dpu_encoder_kickoff(struct drm_encoder *drm_enc)
194125fdd593SJeykumar Sankaran {
194225fdd593SJeykumar Sankaran struct dpu_encoder_virt *dpu_enc;
194325fdd593SJeykumar Sankaran struct dpu_encoder_phys *phys;
1944b4bb9f15SRob Clark unsigned long timeout_ms;
194525fdd593SJeykumar Sankaran unsigned int i;
194625fdd593SJeykumar Sankaran
194725fdd593SJeykumar Sankaran DPU_ATRACE_BEGIN("encoder_kickoff");
194825fdd593SJeykumar Sankaran dpu_enc = to_dpu_encoder_virt(drm_enc);
194925fdd593SJeykumar Sankaran
195025fdd593SJeykumar Sankaran trace_dpu_enc_kickoff(DRMID(drm_enc));
195125fdd593SJeykumar Sankaran
195270df9610SSean Paul timeout_ms = DPU_ENCODER_FRAME_DONE_TIMEOUT_FRAMES * 1000 /
19532e039186SSean Paul drm_mode_vrefresh(&drm_enc->crtc->state->adjusted_mode);
19542e039186SSean Paul
195570df9610SSean Paul atomic_set(&dpu_enc->frame_done_timeout_ms, timeout_ms);
19562e039186SSean Paul mod_timer(&dpu_enc->frame_done_timer,
19572e039186SSean Paul jiffies + msecs_to_jiffies(timeout_ms));
195825fdd593SJeykumar Sankaran
195925fdd593SJeykumar Sankaran /* All phys encs are ready to go, trigger the kickoff */
1960b4bb9f15SRob Clark _dpu_encoder_kickoff_phys(dpu_enc);
196125fdd593SJeykumar Sankaran
196225fdd593SJeykumar Sankaran /* allow phys encs to handle any post-kickoff business */
196325fdd593SJeykumar Sankaran for (i = 0; i < dpu_enc->num_phys_encs; i++) {
196425fdd593SJeykumar Sankaran phys = dpu_enc->phys_encs[i];
1965b6fadcadSDrew Davenport if (phys->ops.handle_post_kickoff)
196625fdd593SJeykumar Sankaran phys->ops.handle_post_kickoff(phys);
196725fdd593SJeykumar Sankaran }
196825fdd593SJeykumar Sankaran
196925fdd593SJeykumar Sankaran DPU_ATRACE_END("encoder_kickoff");
197025fdd593SJeykumar Sankaran }
197125fdd593SJeykumar Sankaran
dpu_encoder_helper_reset_mixers(struct dpu_encoder_phys * phys_enc)1972ae4d721cSAbhinav Kumar static void dpu_encoder_helper_reset_mixers(struct dpu_encoder_phys *phys_enc)
1973ae4d721cSAbhinav Kumar {
1974ae4d721cSAbhinav Kumar struct dpu_hw_mixer_cfg mixer;
1975ae4d721cSAbhinav Kumar int i, num_lm;
1976ae4d721cSAbhinav Kumar struct dpu_global_state *global_state;
1977ae4d721cSAbhinav Kumar struct dpu_hw_blk *hw_lm[2];
1978ae4d721cSAbhinav Kumar struct dpu_hw_mixer *hw_mixer[2];
1979ae4d721cSAbhinav Kumar struct dpu_hw_ctl *ctl = phys_enc->hw_ctl;
1980ae4d721cSAbhinav Kumar
1981ae4d721cSAbhinav Kumar memset(&mixer, 0, sizeof(mixer));
1982ae4d721cSAbhinav Kumar
1983ae4d721cSAbhinav Kumar /* reset all mixers for this encoder */
1984ae4d721cSAbhinav Kumar if (phys_enc->hw_ctl->ops.clear_all_blendstages)
1985ae4d721cSAbhinav Kumar phys_enc->hw_ctl->ops.clear_all_blendstages(phys_enc->hw_ctl);
1986ae4d721cSAbhinav Kumar
1987ae4d721cSAbhinav Kumar global_state = dpu_kms_get_existing_global_state(phys_enc->dpu_kms);
1988ae4d721cSAbhinav Kumar
1989ae4d721cSAbhinav Kumar num_lm = dpu_rm_get_assigned_resources(&phys_enc->dpu_kms->rm, global_state,
1990ae4d721cSAbhinav Kumar phys_enc->parent->base.id, DPU_HW_BLK_LM, hw_lm, ARRAY_SIZE(hw_lm));
1991ae4d721cSAbhinav Kumar
1992ae4d721cSAbhinav Kumar for (i = 0; i < num_lm; i++) {
1993ae4d721cSAbhinav Kumar hw_mixer[i] = to_dpu_hw_mixer(hw_lm[i]);
19943cde792aSDmitry Baryshkov if (phys_enc->hw_ctl->ops.update_pending_flush_mixer)
19953cde792aSDmitry Baryshkov phys_enc->hw_ctl->ops.update_pending_flush_mixer(ctl, hw_mixer[i]->idx);
1996ae4d721cSAbhinav Kumar
1997ae4d721cSAbhinav Kumar /* clear all blendstages */
1998ae4d721cSAbhinav Kumar if (phys_enc->hw_ctl->ops.setup_blendstage)
1999ae4d721cSAbhinav Kumar phys_enc->hw_ctl->ops.setup_blendstage(ctl, hw_mixer[i]->idx, NULL);
2000ae4d721cSAbhinav Kumar }
2001ae4d721cSAbhinav Kumar }
2002ae4d721cSAbhinav Kumar
dpu_encoder_dsc_pipe_clr(struct dpu_hw_ctl * ctl,struct dpu_hw_dsc * hw_dsc,struct dpu_hw_pingpong * hw_pp)2003997ed53dSKuogee Hsieh static void dpu_encoder_dsc_pipe_clr(struct dpu_hw_ctl *ctl,
2004997ed53dSKuogee Hsieh struct dpu_hw_dsc *hw_dsc,
2005997ed53dSKuogee Hsieh struct dpu_hw_pingpong *hw_pp)
2006997ed53dSKuogee Hsieh {
2007997ed53dSKuogee Hsieh if (hw_dsc->ops.dsc_disable)
2008997ed53dSKuogee Hsieh hw_dsc->ops.dsc_disable(hw_dsc);
2009997ed53dSKuogee Hsieh
2010997ed53dSKuogee Hsieh if (hw_pp->ops.disable_dsc)
2011997ed53dSKuogee Hsieh hw_pp->ops.disable_dsc(hw_pp);
2012997ed53dSKuogee Hsieh
2013997ed53dSKuogee Hsieh if (hw_dsc->ops.dsc_bind_pingpong_blk)
2014997ed53dSKuogee Hsieh hw_dsc->ops.dsc_bind_pingpong_blk(hw_dsc, PINGPONG_NONE);
2015997ed53dSKuogee Hsieh
2016997ed53dSKuogee Hsieh if (ctl->ops.update_pending_flush_dsc)
2017997ed53dSKuogee Hsieh ctl->ops.update_pending_flush_dsc(ctl, hw_dsc->idx);
2018997ed53dSKuogee Hsieh }
2019997ed53dSKuogee Hsieh
dpu_encoder_unprep_dsc(struct dpu_encoder_virt * dpu_enc)2020997ed53dSKuogee Hsieh static void dpu_encoder_unprep_dsc(struct dpu_encoder_virt *dpu_enc)
2021997ed53dSKuogee Hsieh {
2022997ed53dSKuogee Hsieh /* coding only for 2LM, 2enc, 1 dsc config */
2023997ed53dSKuogee Hsieh struct dpu_encoder_phys *enc_master = dpu_enc->cur_master;
2024997ed53dSKuogee Hsieh struct dpu_hw_ctl *ctl = enc_master->hw_ctl;
2025997ed53dSKuogee Hsieh struct dpu_hw_dsc *hw_dsc[MAX_CHANNELS_PER_ENC];
2026997ed53dSKuogee Hsieh struct dpu_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
2027997ed53dSKuogee Hsieh int i;
2028997ed53dSKuogee Hsieh
2029997ed53dSKuogee Hsieh for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
2030997ed53dSKuogee Hsieh hw_pp[i] = dpu_enc->hw_pp[i];
2031997ed53dSKuogee Hsieh hw_dsc[i] = dpu_enc->hw_dsc[i];
2032997ed53dSKuogee Hsieh
2033997ed53dSKuogee Hsieh if (hw_pp[i] && hw_dsc[i])
2034997ed53dSKuogee Hsieh dpu_encoder_dsc_pipe_clr(ctl, hw_dsc[i], hw_pp[i]);
2035997ed53dSKuogee Hsieh }
2036997ed53dSKuogee Hsieh }
2037997ed53dSKuogee Hsieh
dpu_encoder_helper_phys_cleanup(struct dpu_encoder_phys * phys_enc)2038ae4d721cSAbhinav Kumar void dpu_encoder_helper_phys_cleanup(struct dpu_encoder_phys *phys_enc)
2039ae4d721cSAbhinav Kumar {
2040ae4d721cSAbhinav Kumar struct dpu_hw_ctl *ctl = phys_enc->hw_ctl;
2041ae4d721cSAbhinav Kumar struct dpu_hw_intf_cfg intf_cfg = { 0 };
2042ae4d721cSAbhinav Kumar int i;
2043ae4d721cSAbhinav Kumar struct dpu_encoder_virt *dpu_enc;
2044ae4d721cSAbhinav Kumar
2045ae4d721cSAbhinav Kumar dpu_enc = to_dpu_encoder_virt(phys_enc->parent);
2046ae4d721cSAbhinav Kumar
2047ae4d721cSAbhinav Kumar phys_enc->hw_ctl->ops.reset(ctl);
2048ae4d721cSAbhinav Kumar
2049ae4d721cSAbhinav Kumar dpu_encoder_helper_reset_mixers(phys_enc);
2050ae4d721cSAbhinav Kumar
2051e02a559aSAbhinav Kumar /*
2052e02a559aSAbhinav Kumar * TODO: move the once-only operation like CTL flush/trigger
2053e02a559aSAbhinav Kumar * into dpu_encoder_virt_disable() and all operations which need
2054e02a559aSAbhinav Kumar * to be done per phys encoder into the phys_disable() op.
2055e02a559aSAbhinav Kumar */
2056e02a559aSAbhinav Kumar if (phys_enc->hw_wb) {
2057e02a559aSAbhinav Kumar /* disable the PP block */
2058e02a559aSAbhinav Kumar if (phys_enc->hw_wb->ops.bind_pingpong_blk)
20590f86d9c9SDmitry Baryshkov phys_enc->hw_wb->ops.bind_pingpong_blk(phys_enc->hw_wb, PINGPONG_NONE);
2060e02a559aSAbhinav Kumar
2061e02a559aSAbhinav Kumar /* mark WB flush as pending */
2062e02a559aSAbhinav Kumar if (phys_enc->hw_ctl->ops.update_pending_flush_wb)
2063e02a559aSAbhinav Kumar phys_enc->hw_ctl->ops.update_pending_flush_wb(ctl, phys_enc->hw_wb->idx);
2064e02a559aSAbhinav Kumar } else {
2065ae4d721cSAbhinav Kumar for (i = 0; i < dpu_enc->num_phys_encs; i++) {
2066ae4d721cSAbhinav Kumar if (dpu_enc->phys_encs[i] && phys_enc->hw_intf->ops.bind_pingpong_blk)
2067ae4d721cSAbhinav Kumar phys_enc->hw_intf->ops.bind_pingpong_blk(
2068a03b7c46SDmitry Baryshkov dpu_enc->phys_encs[i]->hw_intf,
2069a03b7c46SDmitry Baryshkov PINGPONG_NONE);
2070ae4d721cSAbhinav Kumar
2071ae4d721cSAbhinav Kumar /* mark INTF flush as pending */
2072ae4d721cSAbhinav Kumar if (phys_enc->hw_ctl->ops.update_pending_flush_intf)
2073ae4d721cSAbhinav Kumar phys_enc->hw_ctl->ops.update_pending_flush_intf(phys_enc->hw_ctl,
2074ae4d721cSAbhinav Kumar dpu_enc->phys_encs[i]->hw_intf->idx);
2075ae4d721cSAbhinav Kumar }
2076e02a559aSAbhinav Kumar }
2077ae4d721cSAbhinav Kumar
2078*9546aaf0SJessica Zhang if (phys_enc->hw_pp && phys_enc->hw_pp->ops.setup_dither)
2079*9546aaf0SJessica Zhang phys_enc->hw_pp->ops.setup_dither(phys_enc->hw_pp, NULL);
2080*9546aaf0SJessica Zhang
2081ae4d721cSAbhinav Kumar /* reset the merge 3D HW block */
208279592a6eSAbhinav Kumar if (phys_enc->hw_pp && phys_enc->hw_pp->merge_3d) {
2083ae4d721cSAbhinav Kumar phys_enc->hw_pp->merge_3d->ops.setup_3d_mode(phys_enc->hw_pp->merge_3d,
2084ae4d721cSAbhinav Kumar BLEND_3D_NONE);
2085ae4d721cSAbhinav Kumar if (phys_enc->hw_ctl->ops.update_pending_flush_merge_3d)
2086ae4d721cSAbhinav Kumar phys_enc->hw_ctl->ops.update_pending_flush_merge_3d(ctl,
2087ae4d721cSAbhinav Kumar phys_enc->hw_pp->merge_3d->idx);
2088ae4d721cSAbhinav Kumar }
2089ae4d721cSAbhinav Kumar
20909ed6141bSKuogee Hsieh if (dpu_enc->dsc) {
2091997ed53dSKuogee Hsieh dpu_encoder_unprep_dsc(dpu_enc);
20929ed6141bSKuogee Hsieh dpu_enc->dsc = NULL;
20939ed6141bSKuogee Hsieh }
2094997ed53dSKuogee Hsieh
2095ae4d721cSAbhinav Kumar intf_cfg.stream_sel = 0; /* Don't care value for video mode */
2096ae4d721cSAbhinav Kumar intf_cfg.mode_3d = dpu_encoder_helper_get_3d_blend_mode(phys_enc);
2097997ed53dSKuogee Hsieh intf_cfg.dsc = dpu_encoder_helper_get_dsc(phys_enc);
20989811913aSAbhinav Kumar
20999811913aSAbhinav Kumar if (phys_enc->hw_intf)
21009811913aSAbhinav Kumar intf_cfg.intf = phys_enc->hw_intf->idx;
21019811913aSAbhinav Kumar if (phys_enc->hw_wb)
21029811913aSAbhinav Kumar intf_cfg.wb = phys_enc->hw_wb->idx;
21039811913aSAbhinav Kumar
210479592a6eSAbhinav Kumar if (phys_enc->hw_pp && phys_enc->hw_pp->merge_3d)
2105ae4d721cSAbhinav Kumar intf_cfg.merge_3d = phys_enc->hw_pp->merge_3d->idx;
2106ae4d721cSAbhinav Kumar
2107ae4d721cSAbhinav Kumar if (ctl->ops.reset_intf_cfg)
2108ae4d721cSAbhinav Kumar ctl->ops.reset_intf_cfg(ctl, &intf_cfg);
2109ae4d721cSAbhinav Kumar
2110ae4d721cSAbhinav Kumar ctl->ops.trigger_flush(ctl);
2111ae4d721cSAbhinav Kumar ctl->ops.trigger_start(ctl);
2112ae4d721cSAbhinav Kumar ctl->ops.clear_pending_flush(ctl);
2113ae4d721cSAbhinav Kumar }
2114ae4d721cSAbhinav Kumar
211525fdd593SJeykumar Sankaran #ifdef CONFIG_DEBUG_FS
_dpu_encoder_status_show(struct seq_file * s,void * data)211625fdd593SJeykumar Sankaran static int _dpu_encoder_status_show(struct seq_file *s, void *data)
211725fdd593SJeykumar Sankaran {
21183d688410SJordan Crouse struct dpu_encoder_virt *dpu_enc = s->private;
211925fdd593SJeykumar Sankaran int i;
212025fdd593SJeykumar Sankaran
212125fdd593SJeykumar Sankaran mutex_lock(&dpu_enc->enc_lock);
212225fdd593SJeykumar Sankaran for (i = 0; i < dpu_enc->num_phys_encs; i++) {
212325fdd593SJeykumar Sankaran struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
212425fdd593SJeykumar Sankaran
2125750e78a1SAbhinav Kumar seq_printf(s, "intf:%d wb:%d vsync:%8d underrun:%8d ",
21268ea432b8SDmitry Baryshkov phys->hw_intf ? phys->hw_intf->idx - INTF_0 : -1,
21278ea432b8SDmitry Baryshkov phys->hw_wb ? phys->hw_wb->idx - WB_0 : -1,
212825fdd593SJeykumar Sankaran atomic_read(&phys->vsync_cnt),
212925fdd593SJeykumar Sankaran atomic_read(&phys->underrun_cnt));
213025fdd593SJeykumar Sankaran
2131750e78a1SAbhinav Kumar seq_printf(s, "mode: %s\n", dpu_encoder_helper_get_intf_type(phys->intf_mode));
213225fdd593SJeykumar Sankaran }
213325fdd593SJeykumar Sankaran mutex_unlock(&dpu_enc->enc_lock);
213425fdd593SJeykumar Sankaran
213525fdd593SJeykumar Sankaran return 0;
213625fdd593SJeykumar Sankaran }
213725fdd593SJeykumar Sankaran
2138341a361cSQinglang Miao DEFINE_SHOW_ATTRIBUTE(_dpu_encoder_status);
213925fdd593SJeykumar Sankaran
_dpu_encoder_init_debugfs(struct drm_encoder * drm_enc)214025fdd593SJeykumar Sankaran static int _dpu_encoder_init_debugfs(struct drm_encoder *drm_enc)
214125fdd593SJeykumar Sankaran {
21423d688410SJordan Crouse struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
214325fdd593SJeykumar Sankaran
2144a659098dSDmitry Baryshkov char name[12];
214525fdd593SJeykumar Sankaran
2146422ed755SDrew Davenport if (!drm_enc->dev) {
214725fdd593SJeykumar Sankaran DPU_ERROR("invalid encoder or kms\n");
214825fdd593SJeykumar Sankaran return -EINVAL;
214925fdd593SJeykumar Sankaran }
215025fdd593SJeykumar Sankaran
2151a659098dSDmitry Baryshkov snprintf(name, sizeof(name), "encoder%u", drm_enc->base.id);
215225fdd593SJeykumar Sankaran
215325fdd593SJeykumar Sankaran /* create overall sub-directory for the encoder */
215425fdd593SJeykumar Sankaran dpu_enc->debugfs_root = debugfs_create_dir(name,
215525fdd593SJeykumar Sankaran drm_enc->dev->primary->debugfs_root);
215625fdd593SJeykumar Sankaran
215725fdd593SJeykumar Sankaran /* don't error check these */
215825fdd593SJeykumar Sankaran debugfs_create_file("status", 0600,
2159341a361cSQinglang Miao dpu_enc->debugfs_root, dpu_enc, &_dpu_encoder_status_fops);
216025fdd593SJeykumar Sankaran
216125fdd593SJeykumar Sankaran return 0;
216225fdd593SJeykumar Sankaran }
216325fdd593SJeykumar Sankaran #else
_dpu_encoder_init_debugfs(struct drm_encoder * drm_enc)216425fdd593SJeykumar Sankaran static int _dpu_encoder_init_debugfs(struct drm_encoder *drm_enc)
216525fdd593SJeykumar Sankaran {
216625fdd593SJeykumar Sankaran return 0;
216725fdd593SJeykumar Sankaran }
216825fdd593SJeykumar Sankaran #endif
216925fdd593SJeykumar Sankaran
dpu_encoder_late_register(struct drm_encoder * encoder)217025fdd593SJeykumar Sankaran static int dpu_encoder_late_register(struct drm_encoder *encoder)
217125fdd593SJeykumar Sankaran {
217225fdd593SJeykumar Sankaran return _dpu_encoder_init_debugfs(encoder);
217325fdd593SJeykumar Sankaran }
217425fdd593SJeykumar Sankaran
dpu_encoder_early_unregister(struct drm_encoder * encoder)217525fdd593SJeykumar Sankaran static void dpu_encoder_early_unregister(struct drm_encoder *encoder)
217625fdd593SJeykumar Sankaran {
21773d688410SJordan Crouse struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(encoder);
21783d688410SJordan Crouse
21793d688410SJordan Crouse debugfs_remove_recursive(dpu_enc->debugfs_root);
218025fdd593SJeykumar Sankaran }
218125fdd593SJeykumar Sankaran
dpu_encoder_virt_add_phys_encs(struct drm_device * dev,struct msm_display_info * disp_info,struct dpu_encoder_virt * dpu_enc,struct dpu_enc_phys_init_params * params)218225fdd593SJeykumar Sankaran static int dpu_encoder_virt_add_phys_encs(
2183e86721b0SDmitry Baryshkov struct drm_device *dev,
21845cf5afcdSAbhinav Kumar struct msm_display_info *disp_info,
218525fdd593SJeykumar Sankaran struct dpu_encoder_virt *dpu_enc,
218625fdd593SJeykumar Sankaran struct dpu_enc_phys_init_params *params)
218725fdd593SJeykumar Sankaran {
218825fdd593SJeykumar Sankaran struct dpu_encoder_phys *enc = NULL;
218925fdd593SJeykumar Sankaran
219025fdd593SJeykumar Sankaran DPU_DEBUG_ENC(dpu_enc, "\n");
219125fdd593SJeykumar Sankaran
219225fdd593SJeykumar Sankaran /*
219325fdd593SJeykumar Sankaran * We may create up to NUM_PHYS_ENCODER_TYPES physical encoder types
219425fdd593SJeykumar Sankaran * in this function, check up-front.
219525fdd593SJeykumar Sankaran */
219625fdd593SJeykumar Sankaran if (dpu_enc->num_phys_encs + NUM_PHYS_ENCODER_TYPES >=
219725fdd593SJeykumar Sankaran ARRAY_SIZE(dpu_enc->phys_encs)) {
219825fdd593SJeykumar Sankaran DPU_ERROR_ENC(dpu_enc, "too many physical encoders %d\n",
219925fdd593SJeykumar Sankaran dpu_enc->num_phys_encs);
220025fdd593SJeykumar Sankaran return -EINVAL;
220125fdd593SJeykumar Sankaran }
220225fdd593SJeykumar Sankaran
2203b6529e33SDmitry Baryshkov
22045a7a86bfSDmitry Baryshkov if (disp_info->intf_type == INTF_WB) {
2205e86721b0SDmitry Baryshkov enc = dpu_encoder_phys_wb_init(dev, params);
220625fdd593SJeykumar Sankaran
2207b1ed585aSDmitry Baryshkov if (IS_ERR(enc)) {
2208b6529e33SDmitry Baryshkov DPU_ERROR_ENC(dpu_enc, "failed to init wb enc: %ld\n",
220925fdd593SJeykumar Sankaran PTR_ERR(enc));
2210b1ed585aSDmitry Baryshkov return PTR_ERR(enc);
221125fdd593SJeykumar Sankaran }
221225fdd593SJeykumar Sankaran
221325fdd593SJeykumar Sankaran dpu_enc->phys_encs[dpu_enc->num_phys_encs] = enc;
221425fdd593SJeykumar Sankaran ++dpu_enc->num_phys_encs;
2215b6529e33SDmitry Baryshkov } else if (disp_info->is_cmd_mode) {
2216e86721b0SDmitry Baryshkov enc = dpu_encoder_phys_cmd_init(dev, params);
221725fdd593SJeykumar Sankaran
2218b1ed585aSDmitry Baryshkov if (IS_ERR(enc)) {
221925fdd593SJeykumar Sankaran DPU_ERROR_ENC(dpu_enc, "failed to init cmd enc: %ld\n",
222025fdd593SJeykumar Sankaran PTR_ERR(enc));
2221b1ed585aSDmitry Baryshkov return PTR_ERR(enc);
222225fdd593SJeykumar Sankaran }
222325fdd593SJeykumar Sankaran
222425fdd593SJeykumar Sankaran dpu_enc->phys_encs[dpu_enc->num_phys_encs] = enc;
222525fdd593SJeykumar Sankaran ++dpu_enc->num_phys_encs;
2226b6529e33SDmitry Baryshkov } else {
2227e86721b0SDmitry Baryshkov enc = dpu_encoder_phys_vid_init(dev, params);
22285cf5afcdSAbhinav Kumar
2229b1ed585aSDmitry Baryshkov if (IS_ERR(enc)) {
2230b6529e33SDmitry Baryshkov DPU_ERROR_ENC(dpu_enc, "failed to init vid enc: %ld\n",
22315cf5afcdSAbhinav Kumar PTR_ERR(enc));
2232b1ed585aSDmitry Baryshkov return PTR_ERR(enc);
22335cf5afcdSAbhinav Kumar }
22345cf5afcdSAbhinav Kumar
22355cf5afcdSAbhinav Kumar dpu_enc->phys_encs[dpu_enc->num_phys_encs] = enc;
22365cf5afcdSAbhinav Kumar ++dpu_enc->num_phys_encs;
22375cf5afcdSAbhinav Kumar }
22385cf5afcdSAbhinav Kumar
223986b89080SJeykumar Sankaran if (params->split_role == ENC_ROLE_SLAVE)
224086b89080SJeykumar Sankaran dpu_enc->cur_slave = enc;
224186b89080SJeykumar Sankaran else
224286b89080SJeykumar Sankaran dpu_enc->cur_master = enc;
224386b89080SJeykumar Sankaran
224425fdd593SJeykumar Sankaran return 0;
224525fdd593SJeykumar Sankaran }
224625fdd593SJeykumar Sankaran
dpu_encoder_setup_display(struct dpu_encoder_virt * dpu_enc,struct dpu_kms * dpu_kms,struct msm_display_info * disp_info)224725fdd593SJeykumar Sankaran static int dpu_encoder_setup_display(struct dpu_encoder_virt *dpu_enc,
224825fdd593SJeykumar Sankaran struct dpu_kms *dpu_kms,
224948a8ef72SJeykumar Sankaran struct msm_display_info *disp_info)
225025fdd593SJeykumar Sankaran {
225125fdd593SJeykumar Sankaran int ret = 0;
225225fdd593SJeykumar Sankaran int i = 0;
225325fdd593SJeykumar Sankaran struct dpu_enc_phys_init_params phys_params;
225425fdd593SJeykumar Sankaran
225596630140SDrew Davenport if (!dpu_enc) {
2256e6790f72SZheng Bin DPU_ERROR("invalid arg(s), enc %d\n", dpu_enc != NULL);
225725fdd593SJeykumar Sankaran return -EINVAL;
225825fdd593SJeykumar Sankaran }
225925fdd593SJeykumar Sankaran
22609027b871SSean Paul dpu_enc->cur_master = NULL;
22619027b871SSean Paul
226225fdd593SJeykumar Sankaran memset(&phys_params, 0, sizeof(phys_params));
226325fdd593SJeykumar Sankaran phys_params.dpu_kms = dpu_kms;
226425fdd593SJeykumar Sankaran phys_params.parent = &dpu_enc->base;
226525fdd593SJeykumar Sankaran phys_params.enc_spinlock = &dpu_enc->enc_spinlock;
226625fdd593SJeykumar Sankaran
226725fdd593SJeykumar Sankaran WARN_ON(disp_info->num_of_h_tiles < 1);
226825fdd593SJeykumar Sankaran
226925fdd593SJeykumar Sankaran DPU_DEBUG("dsi_info->num_of_h_tiles %d\n", disp_info->num_of_h_tiles);
227025fdd593SJeykumar Sankaran
22715a7a86bfSDmitry Baryshkov if (disp_info->intf_type != INTF_WB)
227225fdd593SJeykumar Sankaran dpu_enc->idle_pc_supported =
227325fdd593SJeykumar Sankaran dpu_kms->catalog->caps->has_idle_pc;
227425fdd593SJeykumar Sankaran
227525fdd593SJeykumar Sankaran mutex_lock(&dpu_enc->enc_lock);
227625fdd593SJeykumar Sankaran for (i = 0; i < disp_info->num_of_h_tiles && !ret; i++) {
227725fdd593SJeykumar Sankaran /*
227825fdd593SJeykumar Sankaran * Left-most tile is at index 0, content is controller id
227925fdd593SJeykumar Sankaran * h_tile_instance_ids[2] = {0, 1}; DSI0 = left, DSI1 = right
228025fdd593SJeykumar Sankaran * h_tile_instance_ids[2] = {1, 0}; DSI1 = left, DSI0 = right
228125fdd593SJeykumar Sankaran */
228225fdd593SJeykumar Sankaran u32 controller_id = disp_info->h_tile_instance[i];
228325fdd593SJeykumar Sankaran
228425fdd593SJeykumar Sankaran if (disp_info->num_of_h_tiles > 1) {
228525fdd593SJeykumar Sankaran if (i == 0)
228625fdd593SJeykumar Sankaran phys_params.split_role = ENC_ROLE_MASTER;
228725fdd593SJeykumar Sankaran else
228825fdd593SJeykumar Sankaran phys_params.split_role = ENC_ROLE_SLAVE;
228925fdd593SJeykumar Sankaran } else {
229025fdd593SJeykumar Sankaran phys_params.split_role = ENC_ROLE_SOLO;
229125fdd593SJeykumar Sankaran }
229225fdd593SJeykumar Sankaran
229325fdd593SJeykumar Sankaran DPU_DEBUG("h_tile_instance %d = %d, split_role %d\n",
229425fdd593SJeykumar Sankaran i, controller_id, phys_params.split_role);
229525fdd593SJeykumar Sankaran
2296004be386SDmitry Baryshkov phys_params.hw_intf = dpu_encoder_get_intf(dpu_kms->catalog, &dpu_kms->rm,
22975a7a86bfSDmitry Baryshkov disp_info->intf_type,
229825fdd593SJeykumar Sankaran controller_id);
2299e02a559aSAbhinav Kumar
230004ada3d8SDmitry Baryshkov if (disp_info->intf_type == INTF_WB && controller_id < WB_MAX)
230104ada3d8SDmitry Baryshkov phys_params.hw_wb = dpu_rm_get_wb(&dpu_kms->rm, controller_id);
2302e02a559aSAbhinav Kumar
23038ea432b8SDmitry Baryshkov if (!phys_params.hw_intf && !phys_params.hw_wb) {
2304b4a624acSAbhinav Kumar DPU_ERROR_ENC(dpu_enc, "no intf or wb block assigned at idx: %d\n", i);
2305b4a624acSAbhinav Kumar ret = -EINVAL;
23068ea432b8SDmitry Baryshkov break;
230725fdd593SJeykumar Sankaran }
2308b4a624acSAbhinav Kumar
23098ea432b8SDmitry Baryshkov if (phys_params.hw_intf && phys_params.hw_wb) {
2310b4a624acSAbhinav Kumar DPU_ERROR_ENC(dpu_enc,
2311b4a624acSAbhinav Kumar "invalid phys both intf and wb block at idx: %d\n", i);
2312b4a624acSAbhinav Kumar ret = -EINVAL;
23138ea432b8SDmitry Baryshkov break;
2314b4a624acSAbhinav Kumar }
23158ea432b8SDmitry Baryshkov
2316e86721b0SDmitry Baryshkov ret = dpu_encoder_virt_add_phys_encs(dpu_kms->dev, disp_info,
23178ea432b8SDmitry Baryshkov dpu_enc, &phys_params);
23188ea432b8SDmitry Baryshkov if (ret) {
23198ea432b8SDmitry Baryshkov DPU_ERROR_ENC(dpu_enc, "failed to add phys encs\n");
23208ea432b8SDmitry Baryshkov break;
2321b4a624acSAbhinav Kumar }
2322b4a624acSAbhinav Kumar }
2323b4a624acSAbhinav Kumar
232425fdd593SJeykumar Sankaran mutex_unlock(&dpu_enc->enc_lock);
232525fdd593SJeykumar Sankaran
232625fdd593SJeykumar Sankaran return ret;
232725fdd593SJeykumar Sankaran }
232825fdd593SJeykumar Sankaran
dpu_encoder_frame_done_timeout(struct timer_list * t)232925fdd593SJeykumar Sankaran static void dpu_encoder_frame_done_timeout(struct timer_list *t)
233025fdd593SJeykumar Sankaran {
233125fdd593SJeykumar Sankaran struct dpu_encoder_virt *dpu_enc = from_timer(dpu_enc, t,
233225fdd593SJeykumar Sankaran frame_done_timer);
233325fdd593SJeykumar Sankaran struct drm_encoder *drm_enc = &dpu_enc->base;
233425fdd593SJeykumar Sankaran u32 event;
233525fdd593SJeykumar Sankaran
2336422ed755SDrew Davenport if (!drm_enc->dev) {
233725fdd593SJeykumar Sankaran DPU_ERROR("invalid parameters\n");
233825fdd593SJeykumar Sankaran return;
233925fdd593SJeykumar Sankaran }
234025fdd593SJeykumar Sankaran
234125fdd593SJeykumar Sankaran if (!dpu_enc->frame_busy_mask[0] || !dpu_enc->crtc_frame_event_cb) {
234225fdd593SJeykumar Sankaran DRM_DEBUG_KMS("id:%u invalid timeout frame_busy_mask=%lu\n",
234325fdd593SJeykumar Sankaran DRMID(drm_enc), dpu_enc->frame_busy_mask[0]);
234425fdd593SJeykumar Sankaran return;
234570df9610SSean Paul } else if (!atomic_xchg(&dpu_enc->frame_done_timeout_ms, 0)) {
234625fdd593SJeykumar Sankaran DRM_DEBUG_KMS("id:%u invalid timeout\n", DRMID(drm_enc));
234725fdd593SJeykumar Sankaran return;
234825fdd593SJeykumar Sankaran }
234925fdd593SJeykumar Sankaran
235082828282SRob Clark DPU_ERROR_ENC_RATELIMITED(dpu_enc, "frame done timeout\n");
235125fdd593SJeykumar Sankaran
235225fdd593SJeykumar Sankaran event = DPU_ENCODER_FRAME_EVENT_ERROR;
235325fdd593SJeykumar Sankaran trace_dpu_enc_frame_done_timeout(DRMID(drm_enc), event);
235425fdd593SJeykumar Sankaran dpu_enc->crtc_frame_event_cb(dpu_enc->crtc_frame_event_cb_data, event);
235525fdd593SJeykumar Sankaran }
235625fdd593SJeykumar Sankaran
235725fdd593SJeykumar Sankaran static const struct drm_encoder_helper_funcs dpu_encoder_helper_funcs = {
2358764332bfSDmitry Baryshkov .atomic_mode_set = dpu_encoder_virt_atomic_mode_set,
2359c0cd12a5SVinod Polimera .atomic_disable = dpu_encoder_virt_atomic_disable,
2360c0cd12a5SVinod Polimera .atomic_enable = dpu_encoder_virt_atomic_enable,
236125fdd593SJeykumar Sankaran .atomic_check = dpu_encoder_virt_atomic_check,
236225fdd593SJeykumar Sankaran };
236325fdd593SJeykumar Sankaran
236425fdd593SJeykumar Sankaran static const struct drm_encoder_funcs dpu_encoder_funcs = {
236525fdd593SJeykumar Sankaran .destroy = dpu_encoder_destroy,
236625fdd593SJeykumar Sankaran .late_register = dpu_encoder_late_register,
236725fdd593SJeykumar Sankaran .early_unregister = dpu_encoder_early_unregister,
236825fdd593SJeykumar Sankaran };
236925fdd593SJeykumar Sankaran
dpu_encoder_init(struct drm_device * dev,int drm_enc_mode,struct msm_display_info * disp_info)23709b930f14SDmitry Baryshkov struct drm_encoder *dpu_encoder_init(struct drm_device *dev,
23719b930f14SDmitry Baryshkov int drm_enc_mode,
237225fdd593SJeykumar Sankaran struct msm_display_info *disp_info)
237325fdd593SJeykumar Sankaran {
237425fdd593SJeykumar Sankaran struct msm_drm_private *priv = dev->dev_private;
237525fdd593SJeykumar Sankaran struct dpu_kms *dpu_kms = to_dpu_kms(priv->kms);
237625fdd593SJeykumar Sankaran struct drm_encoder *drm_enc = NULL;
237725fdd593SJeykumar Sankaran struct dpu_encoder_virt *dpu_enc = NULL;
237825fdd593SJeykumar Sankaran int ret = 0;
237925fdd593SJeykumar Sankaran
23809b930f14SDmitry Baryshkov dpu_enc = devm_kzalloc(dev->dev, sizeof(*dpu_enc), GFP_KERNEL);
23819b930f14SDmitry Baryshkov if (!dpu_enc)
23829b930f14SDmitry Baryshkov return ERR_PTR(-ENOMEM);
23839b930f14SDmitry Baryshkov
23849b930f14SDmitry Baryshkov ret = drm_encoder_init(dev, &dpu_enc->base, &dpu_encoder_funcs,
23859b930f14SDmitry Baryshkov drm_enc_mode, NULL);
23869b930f14SDmitry Baryshkov if (ret) {
23879b930f14SDmitry Baryshkov devm_kfree(dev->dev, dpu_enc);
23889b930f14SDmitry Baryshkov return ERR_PTR(ret);
23899b930f14SDmitry Baryshkov }
23909b930f14SDmitry Baryshkov
23919b930f14SDmitry Baryshkov drm_encoder_helper_add(&dpu_enc->base, &dpu_encoder_helper_funcs);
23929b930f14SDmitry Baryshkov
23939b930f14SDmitry Baryshkov spin_lock_init(&dpu_enc->enc_spinlock);
23949b930f14SDmitry Baryshkov dpu_enc->enabled = false;
23959b930f14SDmitry Baryshkov mutex_init(&dpu_enc->enc_lock);
23969b930f14SDmitry Baryshkov mutex_init(&dpu_enc->rc_lock);
239725fdd593SJeykumar Sankaran
239848a8ef72SJeykumar Sankaran ret = dpu_encoder_setup_display(dpu_enc, dpu_kms, disp_info);
239925fdd593SJeykumar Sankaran if (ret)
240025fdd593SJeykumar Sankaran goto fail;
240125fdd593SJeykumar Sankaran
240270df9610SSean Paul atomic_set(&dpu_enc->frame_done_timeout_ms, 0);
240325fdd593SJeykumar Sankaran timer_setup(&dpu_enc->frame_done_timer,
240425fdd593SJeykumar Sankaran dpu_encoder_frame_done_timeout, 0);
240525fdd593SJeykumar Sankaran
2406fdcb8fe0SJessica Zhang if (disp_info->intf_type == INTF_DP)
2407757a2f36SKuogee Hsieh dpu_enc->wide_bus_en = msm_dp_wide_bus_available(
2408757a2f36SKuogee Hsieh priv->dp[disp_info->h_tile_instance[0]]);
240925fdd593SJeykumar Sankaran
2410e077fe75SJeykumar Sankaran INIT_DELAYED_WORK(&dpu_enc->delayed_off_work,
241125fdd593SJeykumar Sankaran dpu_encoder_off_work);
241225fdd593SJeykumar Sankaran dpu_enc->idle_timeout = IDLE_TIMEOUT;
241325fdd593SJeykumar Sankaran
241425fdd593SJeykumar Sankaran memcpy(&dpu_enc->disp_info, disp_info, sizeof(*disp_info));
241525fdd593SJeykumar Sankaran
241625fdd593SJeykumar Sankaran DPU_DEBUG_ENC(dpu_enc, "created\n");
241725fdd593SJeykumar Sankaran
24189b930f14SDmitry Baryshkov return &dpu_enc->base;
241925fdd593SJeykumar Sankaran
242025fdd593SJeykumar Sankaran fail:
242125fdd593SJeykumar Sankaran DPU_ERROR("failed to create encoder\n");
242225fdd593SJeykumar Sankaran if (drm_enc)
242325fdd593SJeykumar Sankaran dpu_encoder_destroy(drm_enc);
242425fdd593SJeykumar Sankaran
24259b930f14SDmitry Baryshkov return ERR_PTR(ret);
242625fdd593SJeykumar Sankaran }
242725fdd593SJeykumar Sankaran
2428801f49c8SDmitry Baryshkov /**
2429801f49c8SDmitry Baryshkov * dpu_encoder_wait_for_commit_done() - Wait for encoder to flush pending state
2430801f49c8SDmitry Baryshkov * @drm_enc: encoder pointer
2431801f49c8SDmitry Baryshkov *
2432801f49c8SDmitry Baryshkov * Wait for hardware to have flushed the current pending changes to hardware at
2433801f49c8SDmitry Baryshkov * a vblank or CTL_START. Physical encoders will map this differently depending
2434801f49c8SDmitry Baryshkov * on the type: vid mode -> vsync_irq, cmd mode -> CTL_START.
2435801f49c8SDmitry Baryshkov *
2436801f49c8SDmitry Baryshkov * Return: 0 on success, -EWOULDBLOCK if already signaled, error otherwise
2437801f49c8SDmitry Baryshkov */
dpu_encoder_wait_for_commit_done(struct drm_encoder * drm_enc)2438801f49c8SDmitry Baryshkov int dpu_encoder_wait_for_commit_done(struct drm_encoder *drm_enc)
243925fdd593SJeykumar Sankaran {
244025fdd593SJeykumar Sankaran struct dpu_encoder_virt *dpu_enc = NULL;
244125fdd593SJeykumar Sankaran int i, ret = 0;
244225fdd593SJeykumar Sankaran
244325fdd593SJeykumar Sankaran if (!drm_enc) {
244425fdd593SJeykumar Sankaran DPU_ERROR("invalid encoder\n");
244525fdd593SJeykumar Sankaran return -EINVAL;
244625fdd593SJeykumar Sankaran }
244725fdd593SJeykumar Sankaran dpu_enc = to_dpu_encoder_virt(drm_enc);
244825fdd593SJeykumar Sankaran DPU_DEBUG_ENC(dpu_enc, "\n");
244925fdd593SJeykumar Sankaran
245025fdd593SJeykumar Sankaran for (i = 0; i < dpu_enc->num_phys_encs; i++) {
245125fdd593SJeykumar Sankaran struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
245225fdd593SJeykumar Sankaran
2453801f49c8SDmitry Baryshkov if (phys->ops.wait_for_commit_done) {
2454801f49c8SDmitry Baryshkov DPU_ATRACE_BEGIN("wait_for_commit_done");
2455801f49c8SDmitry Baryshkov ret = phys->ops.wait_for_commit_done(phys);
2456801f49c8SDmitry Baryshkov DPU_ATRACE_END("wait_for_commit_done");
24578e7ef27eSDmitry Baryshkov if (ret == -ETIMEDOUT && !dpu_enc->commit_done_timedout) {
24588e7ef27eSDmitry Baryshkov dpu_enc->commit_done_timedout = true;
24598e7ef27eSDmitry Baryshkov msm_disp_snapshot_state(drm_enc->dev);
24608e7ef27eSDmitry Baryshkov }
2461801f49c8SDmitry Baryshkov if (ret)
2462801f49c8SDmitry Baryshkov return ret;
2463801f49c8SDmitry Baryshkov }
246499c85c32Szhengbin }
246525fdd593SJeykumar Sankaran
2466801f49c8SDmitry Baryshkov return ret;
2467801f49c8SDmitry Baryshkov }
2468801f49c8SDmitry Baryshkov
2469801f49c8SDmitry Baryshkov /**
2470801f49c8SDmitry Baryshkov * dpu_encoder_wait_for_tx_complete() - Wait for encoder to transfer pixels to panel
2471801f49c8SDmitry Baryshkov * @drm_enc: encoder pointer
2472801f49c8SDmitry Baryshkov *
2473801f49c8SDmitry Baryshkov * Wait for the hardware to transfer all the pixels to the panel. Physical
2474801f49c8SDmitry Baryshkov * encoders will map this differently depending on the type: vid mode -> vsync_irq,
2475801f49c8SDmitry Baryshkov * cmd mode -> pp_done.
2476801f49c8SDmitry Baryshkov *
2477801f49c8SDmitry Baryshkov * Return: 0 on success, -EWOULDBLOCK if already signaled, error otherwise
2478801f49c8SDmitry Baryshkov */
dpu_encoder_wait_for_tx_complete(struct drm_encoder * drm_enc)2479801f49c8SDmitry Baryshkov int dpu_encoder_wait_for_tx_complete(struct drm_encoder *drm_enc)
2480801f49c8SDmitry Baryshkov {
2481801f49c8SDmitry Baryshkov struct dpu_encoder_virt *dpu_enc = NULL;
2482801f49c8SDmitry Baryshkov int i, ret = 0;
2483801f49c8SDmitry Baryshkov
2484801f49c8SDmitry Baryshkov if (!drm_enc) {
2485801f49c8SDmitry Baryshkov DPU_ERROR("invalid encoder\n");
2486801f49c8SDmitry Baryshkov return -EINVAL;
2487801f49c8SDmitry Baryshkov }
2488801f49c8SDmitry Baryshkov dpu_enc = to_dpu_encoder_virt(drm_enc);
2489801f49c8SDmitry Baryshkov DPU_DEBUG_ENC(dpu_enc, "\n");
2490801f49c8SDmitry Baryshkov
2491801f49c8SDmitry Baryshkov for (i = 0; i < dpu_enc->num_phys_encs; i++) {
2492801f49c8SDmitry Baryshkov struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
2493801f49c8SDmitry Baryshkov
2494801f49c8SDmitry Baryshkov if (phys->ops.wait_for_tx_complete) {
2495801f49c8SDmitry Baryshkov DPU_ATRACE_BEGIN("wait_for_tx_complete");
2496801f49c8SDmitry Baryshkov ret = phys->ops.wait_for_tx_complete(phys);
2497801f49c8SDmitry Baryshkov DPU_ATRACE_END("wait_for_tx_complete");
249825fdd593SJeykumar Sankaran if (ret)
249925fdd593SJeykumar Sankaran return ret;
250025fdd593SJeykumar Sankaran }
250125fdd593SJeykumar Sankaran }
250225fdd593SJeykumar Sankaran
250325fdd593SJeykumar Sankaran return ret;
250425fdd593SJeykumar Sankaran }
250525fdd593SJeykumar Sankaran
dpu_encoder_get_intf_mode(struct drm_encoder * encoder)250625fdd593SJeykumar Sankaran enum dpu_intf_mode dpu_encoder_get_intf_mode(struct drm_encoder *encoder)
250725fdd593SJeykumar Sankaran {
250825fdd593SJeykumar Sankaran struct dpu_encoder_virt *dpu_enc = NULL;
250925fdd593SJeykumar Sankaran
251025fdd593SJeykumar Sankaran if (!encoder) {
251125fdd593SJeykumar Sankaran DPU_ERROR("invalid encoder\n");
251225fdd593SJeykumar Sankaran return INTF_MODE_NONE;
251325fdd593SJeykumar Sankaran }
251425fdd593SJeykumar Sankaran dpu_enc = to_dpu_encoder_virt(encoder);
251525fdd593SJeykumar Sankaran
251625fdd593SJeykumar Sankaran if (dpu_enc->cur_master)
251725fdd593SJeykumar Sankaran return dpu_enc->cur_master->intf_mode;
251825fdd593SJeykumar Sankaran
2519b6fadcadSDrew Davenport if (dpu_enc->num_phys_encs)
2520b6fadcadSDrew Davenport return dpu_enc->phys_encs[0]->intf_mode;
252125fdd593SJeykumar Sankaran
252225fdd593SJeykumar Sankaran return INTF_MODE_NONE;
252325fdd593SJeykumar Sankaran }
252458dca981SVinod Koul
dpu_encoder_helper_get_dsc(struct dpu_encoder_phys * phys_enc)252558dca981SVinod Koul unsigned int dpu_encoder_helper_get_dsc(struct dpu_encoder_phys *phys_enc)
252658dca981SVinod Koul {
252758dca981SVinod Koul struct drm_encoder *encoder = phys_enc->parent;
252858dca981SVinod Koul struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(encoder);
252958dca981SVinod Koul
253058dca981SVinod Koul return dpu_enc->dsc_mask;
253158dca981SVinod Koul }
25327731ee3bSDmitry Baryshkov
dpu_encoder_phys_init(struct dpu_encoder_phys * phys_enc,struct dpu_enc_phys_init_params * p)25337731ee3bSDmitry Baryshkov void dpu_encoder_phys_init(struct dpu_encoder_phys *phys_enc,
25347731ee3bSDmitry Baryshkov struct dpu_enc_phys_init_params *p)
25357731ee3bSDmitry Baryshkov {
25367731ee3bSDmitry Baryshkov int i;
25377731ee3bSDmitry Baryshkov
25387731ee3bSDmitry Baryshkov phys_enc->hw_mdptop = p->dpu_kms->hw_mdp;
25398ea432b8SDmitry Baryshkov phys_enc->hw_intf = p->hw_intf;
25408ea432b8SDmitry Baryshkov phys_enc->hw_wb = p->hw_wb;
25417731ee3bSDmitry Baryshkov phys_enc->parent = p->parent;
25427731ee3bSDmitry Baryshkov phys_enc->dpu_kms = p->dpu_kms;
25437731ee3bSDmitry Baryshkov phys_enc->split_role = p->split_role;
25447731ee3bSDmitry Baryshkov phys_enc->enc_spinlock = p->enc_spinlock;
25457731ee3bSDmitry Baryshkov phys_enc->enable_state = DPU_ENC_DISABLED;
25467731ee3bSDmitry Baryshkov
25477731ee3bSDmitry Baryshkov for (i = 0; i < ARRAY_SIZE(phys_enc->irq); i++)
25487731ee3bSDmitry Baryshkov phys_enc->irq[i] = -EINVAL;
25497731ee3bSDmitry Baryshkov
25507731ee3bSDmitry Baryshkov atomic_set(&phys_enc->vblank_refcount, 0);
25517731ee3bSDmitry Baryshkov atomic_set(&phys_enc->pending_kickoff_cnt, 0);
25527731ee3bSDmitry Baryshkov atomic_set(&phys_enc->pending_ctlstart_cnt, 0);
25537731ee3bSDmitry Baryshkov
25547731ee3bSDmitry Baryshkov atomic_set(&phys_enc->vsync_cnt, 0);
25557731ee3bSDmitry Baryshkov atomic_set(&phys_enc->underrun_cnt, 0);
25567731ee3bSDmitry Baryshkov
25577731ee3bSDmitry Baryshkov init_waitqueue_head(&phys_enc->pending_kickoff_wq);
25587731ee3bSDmitry Baryshkov }
2559