1 // SPDX-License-Identifier: GPL-2.0-only 2 /* 3 * Copyright (c) 2022 Qualcomm Innovation Center, Inc. All rights reserved. 4 */ 5 6 #define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__ 7 8 #include <linux/debugfs.h> 9 10 #include "dpu_encoder_phys.h" 11 #include "dpu_formats.h" 12 #include "dpu_hw_top.h" 13 #include "dpu_hw_wb.h" 14 #include "dpu_hw_lm.h" 15 #include "dpu_hw_blk.h" 16 #include "dpu_hw_merge3d.h" 17 #include "dpu_hw_interrupts.h" 18 #include "dpu_core_irq.h" 19 #include "dpu_vbif.h" 20 #include "dpu_crtc.h" 21 #include "disp/msm_disp_snapshot.h" 22 23 #define DEFAULT_MAX_WRITEBACK_WIDTH 2048 24 25 #define to_dpu_encoder_phys_wb(x) \ 26 container_of(x, struct dpu_encoder_phys_wb, base) 27 28 /** 29 * dpu_encoder_phys_wb_is_master - report wb always as master encoder 30 */ 31 static bool dpu_encoder_phys_wb_is_master(struct dpu_encoder_phys *phys_enc) 32 { 33 /* there is only one physical enc for dpu_writeback */ 34 return true; 35 } 36 37 /** 38 * dpu_encoder_phys_wb_set_ot_limit - set OT limit for writeback interface 39 * @phys_enc: Pointer to physical encoder 40 */ 41 static void dpu_encoder_phys_wb_set_ot_limit( 42 struct dpu_encoder_phys *phys_enc) 43 { 44 struct dpu_hw_wb *hw_wb = phys_enc->hw_wb; 45 struct dpu_vbif_set_ot_params ot_params; 46 47 memset(&ot_params, 0, sizeof(ot_params)); 48 ot_params.xin_id = hw_wb->caps->xin_id; 49 ot_params.num = hw_wb->idx - WB_0; 50 ot_params.width = phys_enc->cached_mode.hdisplay; 51 ot_params.height = phys_enc->cached_mode.vdisplay; 52 ot_params.is_wfd = true; 53 ot_params.frame_rate = drm_mode_vrefresh(&phys_enc->cached_mode); 54 ot_params.vbif_idx = hw_wb->caps->vbif_idx; 55 ot_params.clk_ctrl = hw_wb->caps->clk_ctrl; 56 ot_params.rd = false; 57 58 dpu_vbif_set_ot_limit(phys_enc->dpu_kms, &ot_params); 59 } 60 61 /** 62 * dpu_encoder_phys_wb_set_qos_remap - set QoS remapper for writeback 63 * @phys_enc: Pointer to physical encoder 64 */ 65 static void dpu_encoder_phys_wb_set_qos_remap( 66 struct dpu_encoder_phys *phys_enc) 67 { 68 struct dpu_hw_wb *hw_wb; 69 struct dpu_vbif_set_qos_params qos_params; 70 71 if (!phys_enc || !phys_enc->parent || !phys_enc->parent->crtc) { 72 DPU_ERROR("invalid arguments\n"); 73 return; 74 } 75 76 if (!phys_enc->hw_wb || !phys_enc->hw_wb->caps) { 77 DPU_ERROR("invalid writeback hardware\n"); 78 return; 79 } 80 81 hw_wb = phys_enc->hw_wb; 82 83 memset(&qos_params, 0, sizeof(qos_params)); 84 qos_params.vbif_idx = hw_wb->caps->vbif_idx; 85 qos_params.xin_id = hw_wb->caps->xin_id; 86 qos_params.clk_ctrl = hw_wb->caps->clk_ctrl; 87 qos_params.num = hw_wb->idx - WB_0; 88 qos_params.is_rt = false; 89 90 DPU_DEBUG("[qos_remap] wb:%d vbif:%d xin:%d is_rt:%d\n", 91 qos_params.num, 92 qos_params.vbif_idx, 93 qos_params.xin_id, qos_params.is_rt); 94 95 dpu_vbif_set_qos_remap(phys_enc->dpu_kms, &qos_params); 96 } 97 98 /** 99 * dpu_encoder_phys_wb_set_qos - set QoS/danger/safe LUTs for writeback 100 * @phys_enc: Pointer to physical encoder 101 */ 102 static void dpu_encoder_phys_wb_set_qos(struct dpu_encoder_phys *phys_enc) 103 { 104 struct dpu_hw_wb *hw_wb; 105 struct dpu_hw_wb_qos_cfg qos_cfg; 106 struct dpu_mdss_cfg *catalog; 107 struct dpu_qos_lut_tbl *qos_lut_tb; 108 109 if (!phys_enc || !phys_enc->dpu_kms || !phys_enc->dpu_kms->catalog) { 110 DPU_ERROR("invalid parameter(s)\n"); 111 return; 112 } 113 114 catalog = phys_enc->dpu_kms->catalog; 115 116 hw_wb = phys_enc->hw_wb; 117 118 memset(&qos_cfg, 0, sizeof(struct dpu_hw_wb_qos_cfg)); 119 qos_cfg.danger_safe_en = true; 120 qos_cfg.danger_lut = 121 catalog->perf.danger_lut_tbl[DPU_QOS_LUT_USAGE_NRT]; 122 123 qos_cfg.safe_lut = catalog->perf.safe_lut_tbl[DPU_QOS_LUT_USAGE_NRT]; 124 125 qos_lut_tb = &catalog->perf.qos_lut_tbl[DPU_QOS_LUT_USAGE_NRT]; 126 qos_cfg.creq_lut = _dpu_hw_get_qos_lut(qos_lut_tb, 0); 127 128 if (hw_wb->ops.setup_qos_lut) 129 hw_wb->ops.setup_qos_lut(hw_wb, &qos_cfg); 130 } 131 132 /** 133 * dpu_encoder_phys_wb_setup_fb - setup output framebuffer 134 * @phys_enc: Pointer to physical encoder 135 * @fb: Pointer to output framebuffer 136 * @wb_roi: Pointer to output region of interest 137 */ 138 static void dpu_encoder_phys_wb_setup_fb(struct dpu_encoder_phys *phys_enc, 139 struct drm_framebuffer *fb) 140 { 141 struct dpu_encoder_phys_wb *wb_enc = to_dpu_encoder_phys_wb(phys_enc); 142 struct dpu_hw_wb *hw_wb; 143 struct dpu_hw_wb_cfg *wb_cfg; 144 struct dpu_hw_cdp_cfg cdp_cfg; 145 146 if (!phys_enc || !phys_enc->dpu_kms || !phys_enc->dpu_kms->catalog) { 147 DPU_ERROR("invalid encoder\n"); 148 return; 149 } 150 151 hw_wb = phys_enc->hw_wb; 152 wb_cfg = &wb_enc->wb_cfg; 153 154 wb_cfg->intf_mode = phys_enc->intf_mode; 155 wb_cfg->roi.x1 = 0; 156 wb_cfg->roi.x2 = phys_enc->cached_mode.hdisplay; 157 wb_cfg->roi.y1 = 0; 158 wb_cfg->roi.y2 = phys_enc->cached_mode.vdisplay; 159 160 if (hw_wb->ops.setup_roi) 161 hw_wb->ops.setup_roi(hw_wb, wb_cfg); 162 163 if (hw_wb->ops.setup_outformat) 164 hw_wb->ops.setup_outformat(hw_wb, wb_cfg); 165 166 if (hw_wb->ops.setup_cdp) { 167 memset(&cdp_cfg, 0, sizeof(struct dpu_hw_cdp_cfg)); 168 169 cdp_cfg.enable = phys_enc->dpu_kms->catalog->perf.cdp_cfg 170 [DPU_PERF_CDP_USAGE_NRT].wr_enable; 171 cdp_cfg.ubwc_meta_enable = 172 DPU_FORMAT_IS_UBWC(wb_cfg->dest.format); 173 cdp_cfg.tile_amortize_enable = 174 DPU_FORMAT_IS_UBWC(wb_cfg->dest.format) || 175 DPU_FORMAT_IS_TILE(wb_cfg->dest.format); 176 cdp_cfg.preload_ahead = DPU_WB_CDP_PRELOAD_AHEAD_64; 177 178 hw_wb->ops.setup_cdp(hw_wb, &cdp_cfg); 179 } 180 181 if (hw_wb->ops.setup_outaddress) 182 hw_wb->ops.setup_outaddress(hw_wb, wb_cfg); 183 } 184 185 /** 186 * dpu_encoder_phys_wb_setup_cdp - setup chroma down prefetch block 187 * @phys_enc:Pointer to physical encoder 188 */ 189 static void dpu_encoder_phys_wb_setup_cdp(struct dpu_encoder_phys *phys_enc) 190 { 191 struct dpu_hw_wb *hw_wb; 192 struct dpu_hw_ctl *ctl; 193 194 if (!phys_enc) { 195 DPU_ERROR("invalid encoder\n"); 196 return; 197 } 198 199 hw_wb = phys_enc->hw_wb; 200 ctl = phys_enc->hw_ctl; 201 202 if (test_bit(DPU_CTL_ACTIVE_CFG, &ctl->caps->features) && 203 (phys_enc->hw_ctl && 204 phys_enc->hw_ctl->ops.setup_intf_cfg)) { 205 struct dpu_hw_intf_cfg intf_cfg = {0}; 206 struct dpu_hw_pingpong *hw_pp = phys_enc->hw_pp; 207 enum dpu_3d_blend_mode mode_3d; 208 209 mode_3d = dpu_encoder_helper_get_3d_blend_mode(phys_enc); 210 211 intf_cfg.intf = DPU_NONE; 212 intf_cfg.wb = hw_wb->idx; 213 214 if (mode_3d && hw_pp && hw_pp->merge_3d) 215 intf_cfg.merge_3d = hw_pp->merge_3d->idx; 216 217 if (phys_enc->hw_pp->merge_3d && phys_enc->hw_pp->merge_3d->ops.setup_3d_mode) 218 phys_enc->hw_pp->merge_3d->ops.setup_3d_mode(phys_enc->hw_pp->merge_3d, 219 mode_3d); 220 221 /* setup which pp blk will connect to this wb */ 222 if (hw_pp && phys_enc->hw_wb->ops.bind_pingpong_blk) 223 phys_enc->hw_wb->ops.bind_pingpong_blk(phys_enc->hw_wb, true, 224 phys_enc->hw_pp->idx); 225 226 phys_enc->hw_ctl->ops.setup_intf_cfg(phys_enc->hw_ctl, &intf_cfg); 227 } else if (phys_enc->hw_ctl && phys_enc->hw_ctl->ops.setup_intf_cfg) { 228 struct dpu_hw_intf_cfg intf_cfg = {0}; 229 230 intf_cfg.intf = DPU_NONE; 231 intf_cfg.wb = hw_wb->idx; 232 intf_cfg.mode_3d = 233 dpu_encoder_helper_get_3d_blend_mode(phys_enc); 234 phys_enc->hw_ctl->ops.setup_intf_cfg(phys_enc->hw_ctl, &intf_cfg); 235 } 236 } 237 238 /** 239 * dpu_encoder_phys_wb_atomic_check - verify and fixup given atomic states 240 * @phys_enc: Pointer to physical encoder 241 * @crtc_state: Pointer to CRTC atomic state 242 * @conn_state: Pointer to connector atomic state 243 */ 244 static int dpu_encoder_phys_wb_atomic_check( 245 struct dpu_encoder_phys *phys_enc, 246 struct drm_crtc_state *crtc_state, 247 struct drm_connector_state *conn_state) 248 { 249 struct drm_framebuffer *fb; 250 const struct drm_display_mode *mode; 251 252 DPU_DEBUG("[atomic_check:%d, \"%s\",%d,%d]\n", 253 phys_enc->wb_idx, mode->name, mode->hdisplay, mode->vdisplay); 254 255 if (!conn_state->writeback_job || !conn_state->writeback_job->fb) 256 return 0; 257 258 fb = conn_state->writeback_job->fb; 259 mode = &crtc_state->mode; 260 261 if (!conn_state || !conn_state->connector) { 262 DPU_ERROR("invalid connector state\n"); 263 return -EINVAL; 264 } else if (conn_state->connector->status != 265 connector_status_connected) { 266 DPU_ERROR("connector not connected %d\n", 267 conn_state->connector->status); 268 return -EINVAL; 269 } 270 271 DPU_DEBUG("[fb_id:%u][fb:%u,%u]\n", fb->base.id, 272 fb->width, fb->height); 273 274 if (fb->width != mode->hdisplay) { 275 DPU_ERROR("invalid fb w=%d, mode w=%d\n", fb->width, 276 mode->hdisplay); 277 return -EINVAL; 278 } else if (fb->height != mode->vdisplay) { 279 DPU_ERROR("invalid fb h=%d, mode h=%d\n", fb->height, 280 mode->vdisplay); 281 return -EINVAL; 282 } else if (fb->width > DEFAULT_MAX_WRITEBACK_WIDTH) { 283 DPU_ERROR("invalid fb w=%d, maxlinewidth=%u\n", 284 fb->width, DEFAULT_MAX_WRITEBACK_WIDTH); 285 return -EINVAL; 286 } 287 288 return 0; 289 } 290 291 292 /** 293 * _dpu_encoder_phys_wb_update_flush - flush hardware update 294 * @phys_enc: Pointer to physical encoder 295 */ 296 static void _dpu_encoder_phys_wb_update_flush(struct dpu_encoder_phys *phys_enc) 297 { 298 struct dpu_hw_wb *hw_wb; 299 struct dpu_hw_ctl *hw_ctl; 300 struct dpu_hw_pingpong *hw_pp; 301 u32 pending_flush = 0; 302 303 if (!phys_enc) 304 return; 305 306 hw_wb = phys_enc->hw_wb; 307 hw_pp = phys_enc->hw_pp; 308 hw_ctl = phys_enc->hw_ctl; 309 310 DPU_DEBUG("[wb:%d]\n", hw_wb->idx - WB_0); 311 312 if (!hw_ctl) { 313 DPU_DEBUG("[wb:%d] no ctl assigned\n", hw_wb->idx - WB_0); 314 return; 315 } 316 317 if (hw_ctl->ops.update_pending_flush_wb) 318 hw_ctl->ops.update_pending_flush_wb(hw_ctl, hw_wb->idx); 319 320 if (hw_ctl->ops.update_pending_flush_merge_3d && hw_pp && hw_pp->merge_3d) 321 hw_ctl->ops.update_pending_flush_merge_3d(hw_ctl, 322 hw_pp->merge_3d->idx); 323 324 if (hw_ctl->ops.get_pending_flush) 325 pending_flush = hw_ctl->ops.get_pending_flush(hw_ctl); 326 327 DPU_DEBUG("Pending flush mask for CTL_%d is 0x%x, WB %d\n", 328 hw_ctl->idx - CTL_0, pending_flush, 329 hw_wb->idx - WB_0); 330 } 331 332 /** 333 * dpu_encoder_phys_wb_setup - setup writeback encoder 334 * @phys_enc: Pointer to physical encoder 335 */ 336 static void dpu_encoder_phys_wb_setup( 337 struct dpu_encoder_phys *phys_enc) 338 { 339 struct dpu_hw_wb *hw_wb = phys_enc->hw_wb; 340 struct drm_display_mode mode = phys_enc->cached_mode; 341 struct drm_framebuffer *fb = NULL; 342 343 DPU_DEBUG("[mode_set:%d, \"%s\",%d,%d]\n", 344 hw_wb->idx - WB_0, mode.name, 345 mode.hdisplay, mode.vdisplay); 346 347 dpu_encoder_phys_wb_set_ot_limit(phys_enc); 348 349 dpu_encoder_phys_wb_set_qos_remap(phys_enc); 350 351 dpu_encoder_phys_wb_set_qos(phys_enc); 352 353 dpu_encoder_phys_wb_setup_fb(phys_enc, fb); 354 355 dpu_encoder_phys_wb_setup_cdp(phys_enc); 356 357 } 358 359 static void _dpu_encoder_phys_wb_frame_done_helper(void *arg) 360 { 361 struct dpu_encoder_phys *phys_enc = arg; 362 struct dpu_encoder_phys_wb *wb_enc = to_dpu_encoder_phys_wb(phys_enc); 363 364 struct dpu_hw_wb *hw_wb = phys_enc->hw_wb; 365 unsigned long lock_flags; 366 u32 event = DPU_ENCODER_FRAME_EVENT_DONE; 367 368 DPU_DEBUG("[wb:%d]\n", hw_wb->idx - WB_0); 369 370 if (phys_enc->parent_ops->handle_frame_done) 371 phys_enc->parent_ops->handle_frame_done(phys_enc->parent, 372 phys_enc, event); 373 374 if (phys_enc->parent_ops->handle_vblank_virt) 375 phys_enc->parent_ops->handle_vblank_virt(phys_enc->parent, 376 phys_enc); 377 378 spin_lock_irqsave(phys_enc->enc_spinlock, lock_flags); 379 atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0); 380 spin_unlock_irqrestore(phys_enc->enc_spinlock, lock_flags); 381 382 if (wb_enc->wb_conn) 383 drm_writeback_signal_completion(wb_enc->wb_conn, 0); 384 385 /* Signal any waiting atomic commit thread */ 386 wake_up_all(&phys_enc->pending_kickoff_wq); 387 } 388 389 /** 390 * dpu_encoder_phys_wb_done_irq - writeback interrupt handler 391 * @arg: Pointer to writeback encoder 392 * @irq_idx: interrupt index 393 */ 394 static void dpu_encoder_phys_wb_done_irq(void *arg, int irq_idx) 395 { 396 _dpu_encoder_phys_wb_frame_done_helper(arg); 397 } 398 399 /** 400 * dpu_encoder_phys_wb_irq_ctrl - irq control of WB 401 * @phys: Pointer to physical encoder 402 * @enable: indicates enable or disable interrupts 403 */ 404 static void dpu_encoder_phys_wb_irq_ctrl( 405 struct dpu_encoder_phys *phys, bool enable) 406 { 407 408 struct dpu_encoder_phys_wb *wb_enc = to_dpu_encoder_phys_wb(phys); 409 410 if (enable && atomic_inc_return(&wb_enc->wbirq_refcount) == 1) 411 dpu_core_irq_register_callback(phys->dpu_kms, 412 phys->irq[INTR_IDX_WB_DONE], dpu_encoder_phys_wb_done_irq, phys); 413 else if (!enable && 414 atomic_dec_return(&wb_enc->wbirq_refcount) == 0) 415 dpu_core_irq_unregister_callback(phys->dpu_kms, phys->irq[INTR_IDX_WB_DONE]); 416 } 417 418 static void dpu_encoder_phys_wb_atomic_mode_set( 419 struct dpu_encoder_phys *phys_enc, 420 struct drm_crtc_state *crtc_state, 421 struct drm_connector_state *conn_state) 422 { 423 424 phys_enc->irq[INTR_IDX_WB_DONE] = phys_enc->hw_wb->caps->intr_wb_done; 425 } 426 427 static void _dpu_encoder_phys_wb_handle_wbdone_timeout( 428 struct dpu_encoder_phys *phys_enc) 429 { 430 struct dpu_encoder_phys_wb *wb_enc = to_dpu_encoder_phys_wb(phys_enc); 431 u32 frame_event = DPU_ENCODER_FRAME_EVENT_ERROR; 432 433 wb_enc->wb_done_timeout_cnt++; 434 435 if (wb_enc->wb_done_timeout_cnt == 1) 436 msm_disp_snapshot_state(phys_enc->parent->dev); 437 438 atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0); 439 440 /* request a ctl reset before the next kickoff */ 441 phys_enc->enable_state = DPU_ENC_ERR_NEEDS_HW_RESET; 442 443 if (wb_enc->wb_conn) 444 drm_writeback_signal_completion(wb_enc->wb_conn, 0); 445 446 if (phys_enc->parent_ops->handle_frame_done) 447 phys_enc->parent_ops->handle_frame_done( 448 phys_enc->parent, phys_enc, frame_event); 449 } 450 451 /** 452 * dpu_encoder_phys_wb_wait_for_commit_done - wait until request is committed 453 * @phys_enc: Pointer to physical encoder 454 */ 455 static int dpu_encoder_phys_wb_wait_for_commit_done( 456 struct dpu_encoder_phys *phys_enc) 457 { 458 unsigned long ret; 459 struct dpu_encoder_wait_info wait_info; 460 struct dpu_encoder_phys_wb *wb_enc = to_dpu_encoder_phys_wb(phys_enc); 461 462 wait_info.wq = &phys_enc->pending_kickoff_wq; 463 wait_info.atomic_cnt = &phys_enc->pending_kickoff_cnt; 464 wait_info.timeout_ms = KICKOFF_TIMEOUT_MS; 465 466 ret = dpu_encoder_helper_wait_for_irq(phys_enc, INTR_IDX_WB_DONE, 467 dpu_encoder_phys_wb_done_irq, &wait_info); 468 if (ret == -ETIMEDOUT) 469 _dpu_encoder_phys_wb_handle_wbdone_timeout(phys_enc); 470 else if (!ret) 471 wb_enc->wb_done_timeout_cnt = 0; 472 473 return ret; 474 } 475 476 /** 477 * dpu_encoder_phys_wb_prepare_for_kickoff - pre-kickoff processing 478 * @phys_enc: Pointer to physical encoder 479 * Returns: Zero on success 480 */ 481 static void dpu_encoder_phys_wb_prepare_for_kickoff( 482 struct dpu_encoder_phys *phys_enc) 483 { 484 struct dpu_encoder_phys_wb *wb_enc = to_dpu_encoder_phys_wb(phys_enc); 485 struct drm_connector *drm_conn; 486 struct drm_connector_state *state; 487 488 DPU_DEBUG("[wb:%d]\n", phys_enc->hw_wb->idx - WB_0); 489 490 if (!wb_enc->wb_conn || !wb_enc->wb_job) { 491 DPU_ERROR("invalid wb_conn or wb_job\n"); 492 return; 493 } 494 495 drm_conn = &wb_enc->wb_conn->base; 496 state = drm_conn->state; 497 498 if (wb_enc->wb_conn && wb_enc->wb_job) 499 drm_writeback_queue_job(wb_enc->wb_conn, state); 500 501 dpu_encoder_phys_wb_setup(phys_enc); 502 503 _dpu_encoder_phys_wb_update_flush(phys_enc); 504 } 505 506 /** 507 * dpu_encoder_phys_wb_needs_single_flush - trigger flush processing 508 * @phys_enc: Pointer to physical encoder 509 */ 510 static bool dpu_encoder_phys_wb_needs_single_flush(struct dpu_encoder_phys *phys_enc) 511 { 512 DPU_DEBUG("[wb:%d]\n", phys_enc->hw_wb->idx - WB_0); 513 return false; 514 } 515 516 /** 517 * dpu_encoder_phys_wb_handle_post_kickoff - post-kickoff processing 518 * @phys_enc: Pointer to physical encoder 519 */ 520 static void dpu_encoder_phys_wb_handle_post_kickoff( 521 struct dpu_encoder_phys *phys_enc) 522 { 523 DPU_DEBUG("[wb:%d]\n", phys_enc->hw_wb->idx - WB_0); 524 525 } 526 527 /** 528 * dpu_encoder_phys_wb_enable - enable writeback encoder 529 * @phys_enc: Pointer to physical encoder 530 */ 531 static void dpu_encoder_phys_wb_enable(struct dpu_encoder_phys *phys_enc) 532 { 533 DPU_DEBUG("[wb:%d]\n", phys_enc->hw_wb->idx - WB_0); 534 phys_enc->enable_state = DPU_ENC_ENABLED; 535 } 536 /** 537 * dpu_encoder_phys_wb_disable - disable writeback encoder 538 * @phys_enc: Pointer to physical encoder 539 */ 540 static void dpu_encoder_phys_wb_disable(struct dpu_encoder_phys *phys_enc) 541 { 542 struct dpu_hw_wb *hw_wb = phys_enc->hw_wb; 543 struct dpu_hw_ctl *hw_ctl = phys_enc->hw_ctl; 544 545 DPU_DEBUG("[wb:%d]\n", hw_wb->idx - WB_0); 546 547 if (phys_enc->enable_state == DPU_ENC_DISABLED) { 548 DPU_ERROR("encoder is already disabled\n"); 549 return; 550 } 551 552 /* reset h/w before final flush */ 553 if (phys_enc->hw_ctl->ops.clear_pending_flush) 554 phys_enc->hw_ctl->ops.clear_pending_flush(phys_enc->hw_ctl); 555 556 /* 557 * New CTL reset sequence from 5.0 MDP onwards. 558 * If has_3d_merge_reset is not set, legacy reset 559 * sequence is executed. 560 * 561 * Legacy reset sequence has not been implemented yet. 562 * Any target earlier than SM8150 will need it and when 563 * WB support is added to those targets will need to add 564 * the legacy teardown sequence as well. 565 */ 566 if (hw_ctl->caps->features & BIT(DPU_CTL_ACTIVE_CFG)) 567 dpu_encoder_helper_phys_cleanup(phys_enc); 568 569 phys_enc->enable_state = DPU_ENC_DISABLED; 570 } 571 572 /** 573 * dpu_encoder_phys_wb_destroy - destroy writeback encoder 574 * @phys_enc: Pointer to physical encoder 575 */ 576 static void dpu_encoder_phys_wb_destroy(struct dpu_encoder_phys *phys_enc) 577 { 578 DPU_DEBUG("[wb:%d]\n", phys_enc->wb_idx - WB_0); 579 580 if (!phys_enc) 581 return; 582 583 kfree(phys_enc); 584 } 585 586 static void dpu_encoder_phys_wb_prepare_wb_job(struct dpu_encoder_phys *phys_enc, 587 struct drm_writeback_job *job) 588 { 589 const struct msm_format *format; 590 struct msm_gem_address_space *aspace; 591 struct dpu_hw_wb_cfg *wb_cfg; 592 int ret; 593 struct dpu_encoder_phys_wb *wb_enc = to_dpu_encoder_phys_wb(phys_enc); 594 595 if (!job->fb) 596 return; 597 598 wb_enc->wb_job = job; 599 wb_enc->wb_conn = job->connector; 600 aspace = phys_enc->dpu_kms->base.aspace; 601 602 wb_cfg = &wb_enc->wb_cfg; 603 604 memset(wb_cfg, 0, sizeof(struct dpu_hw_wb_cfg)); 605 606 ret = msm_framebuffer_prepare(job->fb, aspace, false); 607 if (ret) { 608 DPU_ERROR("prep fb failed, %d\n", ret); 609 return; 610 } 611 612 format = msm_framebuffer_format(job->fb); 613 614 wb_cfg->dest.format = dpu_get_dpu_format_ext( 615 format->pixel_format, job->fb->modifier); 616 if (!wb_cfg->dest.format) { 617 /* this error should be detected during atomic_check */ 618 DPU_ERROR("failed to get format %x\n", format->pixel_format); 619 return; 620 } 621 622 ret = dpu_format_populate_layout(aspace, job->fb, &wb_cfg->dest); 623 if (ret) { 624 DPU_DEBUG("failed to populate layout %d\n", ret); 625 return; 626 } 627 628 wb_cfg->dest.width = job->fb->width; 629 wb_cfg->dest.height = job->fb->height; 630 wb_cfg->dest.num_planes = wb_cfg->dest.format->num_planes; 631 632 if ((wb_cfg->dest.format->fetch_planes == DPU_PLANE_PLANAR) && 633 (wb_cfg->dest.format->element[0] == C1_B_Cb)) 634 swap(wb_cfg->dest.plane_addr[1], wb_cfg->dest.plane_addr[2]); 635 636 DPU_DEBUG("[fb_offset:%8.8x,%8.8x,%8.8x,%8.8x]\n", 637 wb_cfg->dest.plane_addr[0], wb_cfg->dest.plane_addr[1], 638 wb_cfg->dest.plane_addr[2], wb_cfg->dest.plane_addr[3]); 639 640 DPU_DEBUG("[fb_stride:%8.8x,%8.8x,%8.8x,%8.8x]\n", 641 wb_cfg->dest.plane_pitch[0], wb_cfg->dest.plane_pitch[1], 642 wb_cfg->dest.plane_pitch[2], wb_cfg->dest.plane_pitch[3]); 643 } 644 645 static void dpu_encoder_phys_wb_cleanup_wb_job(struct dpu_encoder_phys *phys_enc, 646 struct drm_writeback_job *job) 647 { 648 struct dpu_encoder_phys_wb *wb_enc = to_dpu_encoder_phys_wb(phys_enc); 649 struct msm_gem_address_space *aspace; 650 651 if (!job->fb) 652 return; 653 654 aspace = phys_enc->dpu_kms->base.aspace; 655 656 msm_framebuffer_cleanup(job->fb, aspace, false); 657 wb_enc->wb_job = NULL; 658 wb_enc->wb_conn = NULL; 659 } 660 661 static bool dpu_encoder_phys_wb_is_valid_for_commit(struct dpu_encoder_phys *phys_enc) 662 { 663 struct dpu_encoder_phys_wb *wb_enc = to_dpu_encoder_phys_wb(phys_enc); 664 665 if (wb_enc->wb_job) 666 return true; 667 else 668 return false; 669 } 670 671 /** 672 * dpu_encoder_phys_wb_init_ops - initialize writeback operations 673 * @ops: Pointer to encoder operation table 674 */ 675 static void dpu_encoder_phys_wb_init_ops(struct dpu_encoder_phys_ops *ops) 676 { 677 ops->is_master = dpu_encoder_phys_wb_is_master; 678 ops->atomic_mode_set = dpu_encoder_phys_wb_atomic_mode_set; 679 ops->enable = dpu_encoder_phys_wb_enable; 680 ops->disable = dpu_encoder_phys_wb_disable; 681 ops->destroy = dpu_encoder_phys_wb_destroy; 682 ops->atomic_check = dpu_encoder_phys_wb_atomic_check; 683 ops->wait_for_commit_done = dpu_encoder_phys_wb_wait_for_commit_done; 684 ops->prepare_for_kickoff = dpu_encoder_phys_wb_prepare_for_kickoff; 685 ops->handle_post_kickoff = dpu_encoder_phys_wb_handle_post_kickoff; 686 ops->needs_single_flush = dpu_encoder_phys_wb_needs_single_flush; 687 ops->trigger_start = dpu_encoder_helper_trigger_start; 688 ops->prepare_wb_job = dpu_encoder_phys_wb_prepare_wb_job; 689 ops->cleanup_wb_job = dpu_encoder_phys_wb_cleanup_wb_job; 690 ops->irq_control = dpu_encoder_phys_wb_irq_ctrl; 691 ops->is_valid_for_commit = dpu_encoder_phys_wb_is_valid_for_commit; 692 693 } 694 695 /** 696 * dpu_encoder_phys_wb_init - initialize writeback encoder 697 * @init: Pointer to init info structure with initialization params 698 */ 699 struct dpu_encoder_phys *dpu_encoder_phys_wb_init( 700 struct dpu_enc_phys_init_params *p) 701 { 702 struct dpu_encoder_phys *phys_enc = NULL; 703 struct dpu_encoder_phys_wb *wb_enc = NULL; 704 int ret = 0; 705 int i; 706 707 DPU_DEBUG("\n"); 708 709 if (!p || !p->parent) { 710 DPU_ERROR("invalid params\n"); 711 ret = -EINVAL; 712 goto fail_alloc; 713 } 714 715 wb_enc = kzalloc(sizeof(*wb_enc), GFP_KERNEL); 716 if (!wb_enc) { 717 DPU_ERROR("failed to allocate wb phys_enc enc\n"); 718 ret = -ENOMEM; 719 goto fail_alloc; 720 } 721 722 phys_enc = &wb_enc->base; 723 phys_enc->hw_mdptop = p->dpu_kms->hw_mdp; 724 phys_enc->wb_idx = p->wb_idx; 725 726 dpu_encoder_phys_wb_init_ops(&phys_enc->ops); 727 phys_enc->parent = p->parent; 728 phys_enc->parent_ops = p->parent_ops; 729 phys_enc->dpu_kms = p->dpu_kms; 730 phys_enc->split_role = p->split_role; 731 phys_enc->intf_mode = INTF_MODE_WB_LINE; 732 phys_enc->wb_idx = p->wb_idx; 733 phys_enc->enc_spinlock = p->enc_spinlock; 734 735 atomic_set(&wb_enc->wbirq_refcount, 0); 736 737 for (i = 0; i < ARRAY_SIZE(phys_enc->irq); i++) 738 phys_enc->irq[i] = -EINVAL; 739 740 atomic_set(&phys_enc->pending_kickoff_cnt, 0); 741 atomic_set(&phys_enc->vblank_refcount, 0); 742 wb_enc->wb_done_timeout_cnt = 0; 743 744 init_waitqueue_head(&phys_enc->pending_kickoff_wq); 745 phys_enc->enable_state = DPU_ENC_DISABLED; 746 747 DPU_DEBUG("Created dpu_encoder_phys for wb %d\n", 748 phys_enc->wb_idx); 749 750 return phys_enc; 751 752 fail_alloc: 753 return ERR_PTR(ret); 754 } 755