1 // SPDX-License-Identifier: GPL-2.0-only 2 /* 3 * Copyright (c) 2015 MediaTek Inc. 4 */ 5 6 #include <linux/clk.h> 7 #include <linux/component.h> 8 #include <linux/iopoll.h> 9 #include <linux/irq.h> 10 #include <linux/of.h> 11 #include <linux/of_platform.h> 12 #include <linux/phy/phy.h> 13 #include <linux/platform_device.h> 14 15 #include <video/mipi_display.h> 16 #include <video/videomode.h> 17 18 #include <drm/drm_atomic_helper.h> 19 #include <drm/drm_bridge.h> 20 #include <drm/drm_mipi_dsi.h> 21 #include <drm/drm_of.h> 22 #include <drm/drm_panel.h> 23 #include <drm/drm_print.h> 24 #include <drm/drm_probe_helper.h> 25 26 #include "mtk_drm_ddp_comp.h" 27 28 #define DSI_START 0x00 29 30 #define DSI_INTEN 0x08 31 32 #define DSI_INTSTA 0x0c 33 #define LPRX_RD_RDY_INT_FLAG BIT(0) 34 #define CMD_DONE_INT_FLAG BIT(1) 35 #define TE_RDY_INT_FLAG BIT(2) 36 #define VM_DONE_INT_FLAG BIT(3) 37 #define EXT_TE_RDY_INT_FLAG BIT(4) 38 #define DSI_BUSY BIT(31) 39 40 #define DSI_CON_CTRL 0x10 41 #define DSI_RESET BIT(0) 42 #define DSI_EN BIT(1) 43 #define DPHY_RESET BIT(2) 44 45 #define DSI_MODE_CTRL 0x14 46 #define MODE (3) 47 #define CMD_MODE 0 48 #define SYNC_PULSE_MODE 1 49 #define SYNC_EVENT_MODE 2 50 #define BURST_MODE 3 51 #define FRM_MODE BIT(16) 52 #define MIX_MODE BIT(17) 53 54 #define DSI_TXRX_CTRL 0x18 55 #define VC_NUM BIT(1) 56 #define LANE_NUM (0xf << 2) 57 #define DIS_EOT BIT(6) 58 #define NULL_EN BIT(7) 59 #define TE_FREERUN BIT(8) 60 #define EXT_TE_EN BIT(9) 61 #define EXT_TE_EDGE BIT(10) 62 #define MAX_RTN_SIZE (0xf << 12) 63 #define HSTX_CKLP_EN BIT(16) 64 65 #define DSI_PSCTRL 0x1c 66 #define DSI_PS_WC 0x3fff 67 #define DSI_PS_SEL (3 << 16) 68 #define PACKED_PS_16BIT_RGB565 (0 << 16) 69 #define LOOSELY_PS_18BIT_RGB666 (1 << 16) 70 #define PACKED_PS_18BIT_RGB666 (2 << 16) 71 #define PACKED_PS_24BIT_RGB888 (3 << 16) 72 73 #define DSI_VSA_NL 0x20 74 #define DSI_VBP_NL 0x24 75 #define DSI_VFP_NL 0x28 76 #define DSI_VACT_NL 0x2C 77 #define DSI_SIZE_CON 0x38 78 #define DSI_HSA_WC 0x50 79 #define DSI_HBP_WC 0x54 80 #define DSI_HFP_WC 0x58 81 82 #define DSI_CMDQ_SIZE 0x60 83 #define CMDQ_SIZE 0x3f 84 85 #define DSI_HSTX_CKL_WC 0x64 86 87 #define DSI_RX_DATA0 0x74 88 #define DSI_RX_DATA1 0x78 89 #define DSI_RX_DATA2 0x7c 90 #define DSI_RX_DATA3 0x80 91 92 #define DSI_RACK 0x84 93 #define RACK BIT(0) 94 95 #define DSI_PHY_LCCON 0x104 96 #define LC_HS_TX_EN BIT(0) 97 #define LC_ULPM_EN BIT(1) 98 #define LC_WAKEUP_EN BIT(2) 99 100 #define DSI_PHY_LD0CON 0x108 101 #define LD0_HS_TX_EN BIT(0) 102 #define LD0_ULPM_EN BIT(1) 103 #define LD0_WAKEUP_EN BIT(2) 104 105 #define DSI_PHY_TIMECON0 0x110 106 #define LPX (0xff << 0) 107 #define HS_PREP (0xff << 8) 108 #define HS_ZERO (0xff << 16) 109 #define HS_TRAIL (0xff << 24) 110 111 #define DSI_PHY_TIMECON1 0x114 112 #define TA_GO (0xff << 0) 113 #define TA_SURE (0xff << 8) 114 #define TA_GET (0xff << 16) 115 #define DA_HS_EXIT (0xff << 24) 116 117 #define DSI_PHY_TIMECON2 0x118 118 #define CONT_DET (0xff << 0) 119 #define CLK_ZERO (0xff << 16) 120 #define CLK_TRAIL (0xff << 24) 121 122 #define DSI_PHY_TIMECON3 0x11c 123 #define CLK_HS_PREP (0xff << 0) 124 #define CLK_HS_POST (0xff << 8) 125 #define CLK_HS_EXIT (0xff << 16) 126 127 #define DSI_VM_CMD_CON 0x130 128 #define VM_CMD_EN BIT(0) 129 #define TS_VFP_EN BIT(5) 130 131 #define DSI_SHADOW_DEBUG 0x190U 132 #define FORCE_COMMIT BIT(0) 133 #define BYPASS_SHADOW BIT(1) 134 135 #define CONFIG (0xff << 0) 136 #define SHORT_PACKET 0 137 #define LONG_PACKET 2 138 #define BTA BIT(2) 139 #define DATA_ID (0xff << 8) 140 #define DATA_0 (0xff << 16) 141 #define DATA_1 (0xff << 24) 142 143 #define NS_TO_CYCLE(n, c) ((n) / (c) + (((n) % (c)) ? 1 : 0)) 144 145 #define MTK_DSI_HOST_IS_READ(type) \ 146 ((type == MIPI_DSI_GENERIC_READ_REQUEST_0_PARAM) || \ 147 (type == MIPI_DSI_GENERIC_READ_REQUEST_1_PARAM) || \ 148 (type == MIPI_DSI_GENERIC_READ_REQUEST_2_PARAM) || \ 149 (type == MIPI_DSI_DCS_READ)) 150 151 struct mtk_phy_timing { 152 u32 lpx; 153 u32 da_hs_prepare; 154 u32 da_hs_zero; 155 u32 da_hs_trail; 156 157 u32 ta_go; 158 u32 ta_sure; 159 u32 ta_get; 160 u32 da_hs_exit; 161 162 u32 clk_hs_zero; 163 u32 clk_hs_trail; 164 165 u32 clk_hs_prepare; 166 u32 clk_hs_post; 167 u32 clk_hs_exit; 168 }; 169 170 struct phy; 171 172 struct mtk_dsi_driver_data { 173 const u32 reg_cmdq_off; 174 bool has_shadow_ctl; 175 bool has_size_ctl; 176 }; 177 178 struct mtk_dsi { 179 struct mtk_ddp_comp ddp_comp; 180 struct device *dev; 181 struct mipi_dsi_host host; 182 struct drm_encoder encoder; 183 struct drm_connector conn; 184 struct drm_panel *panel; 185 struct drm_bridge *bridge; 186 struct phy *phy; 187 188 void __iomem *regs; 189 190 struct clk *engine_clk; 191 struct clk *digital_clk; 192 struct clk *hs_clk; 193 194 u32 data_rate; 195 196 unsigned long mode_flags; 197 enum mipi_dsi_pixel_format format; 198 unsigned int lanes; 199 struct videomode vm; 200 struct mtk_phy_timing phy_timing; 201 int refcount; 202 bool enabled; 203 u32 irq_data; 204 wait_queue_head_t irq_wait_queue; 205 const struct mtk_dsi_driver_data *driver_data; 206 }; 207 208 static inline struct mtk_dsi *encoder_to_dsi(struct drm_encoder *e) 209 { 210 return container_of(e, struct mtk_dsi, encoder); 211 } 212 213 static inline struct mtk_dsi *connector_to_dsi(struct drm_connector *c) 214 { 215 return container_of(c, struct mtk_dsi, conn); 216 } 217 218 static inline struct mtk_dsi *host_to_dsi(struct mipi_dsi_host *h) 219 { 220 return container_of(h, struct mtk_dsi, host); 221 } 222 223 static void mtk_dsi_mask(struct mtk_dsi *dsi, u32 offset, u32 mask, u32 data) 224 { 225 u32 temp = readl(dsi->regs + offset); 226 227 writel((temp & ~mask) | (data & mask), dsi->regs + offset); 228 } 229 230 static void mtk_dsi_phy_timconfig(struct mtk_dsi *dsi) 231 { 232 u32 timcon0, timcon1, timcon2, timcon3; 233 u32 data_rate_mhz = DIV_ROUND_UP(dsi->data_rate, 1000000); 234 struct mtk_phy_timing *timing = &dsi->phy_timing; 235 236 timing->lpx = (60 * data_rate_mhz / (8 * 1000)) + 1; 237 timing->da_hs_prepare = (80 * data_rate_mhz + 4 * 1000) / 8000; 238 timing->da_hs_zero = (170 * data_rate_mhz + 10 * 1000) / 8000 + 1 - 239 timing->da_hs_prepare; 240 timing->da_hs_trail = timing->da_hs_prepare + 1; 241 242 timing->ta_go = 4 * timing->lpx - 2; 243 timing->ta_sure = timing->lpx + 2; 244 timing->ta_get = 4 * timing->lpx; 245 timing->da_hs_exit = 2 * timing->lpx + 1; 246 247 timing->clk_hs_prepare = 70 * data_rate_mhz / (8 * 1000); 248 timing->clk_hs_post = timing->clk_hs_prepare + 8; 249 timing->clk_hs_trail = timing->clk_hs_prepare; 250 timing->clk_hs_zero = timing->clk_hs_trail * 4; 251 timing->clk_hs_exit = 2 * timing->clk_hs_trail; 252 253 timcon0 = timing->lpx | timing->da_hs_prepare << 8 | 254 timing->da_hs_zero << 16 | timing->da_hs_trail << 24; 255 timcon1 = timing->ta_go | timing->ta_sure << 8 | 256 timing->ta_get << 16 | timing->da_hs_exit << 24; 257 timcon2 = 1 << 8 | timing->clk_hs_zero << 16 | 258 timing->clk_hs_trail << 24; 259 timcon3 = timing->clk_hs_prepare | timing->clk_hs_post << 8 | 260 timing->clk_hs_exit << 16; 261 262 writel(timcon0, dsi->regs + DSI_PHY_TIMECON0); 263 writel(timcon1, dsi->regs + DSI_PHY_TIMECON1); 264 writel(timcon2, dsi->regs + DSI_PHY_TIMECON2); 265 writel(timcon3, dsi->regs + DSI_PHY_TIMECON3); 266 } 267 268 static void mtk_dsi_enable(struct mtk_dsi *dsi) 269 { 270 mtk_dsi_mask(dsi, DSI_CON_CTRL, DSI_EN, DSI_EN); 271 } 272 273 static void mtk_dsi_disable(struct mtk_dsi *dsi) 274 { 275 mtk_dsi_mask(dsi, DSI_CON_CTRL, DSI_EN, 0); 276 } 277 278 static void mtk_dsi_reset_engine(struct mtk_dsi *dsi) 279 { 280 mtk_dsi_mask(dsi, DSI_CON_CTRL, DSI_RESET, DSI_RESET); 281 mtk_dsi_mask(dsi, DSI_CON_CTRL, DSI_RESET, 0); 282 } 283 284 static void mtk_dsi_reset_dphy(struct mtk_dsi *dsi) 285 { 286 mtk_dsi_mask(dsi, DSI_CON_CTRL, DPHY_RESET, DPHY_RESET); 287 mtk_dsi_mask(dsi, DSI_CON_CTRL, DPHY_RESET, 0); 288 } 289 290 static void mtk_dsi_clk_ulp_mode_enter(struct mtk_dsi *dsi) 291 { 292 mtk_dsi_mask(dsi, DSI_PHY_LCCON, LC_HS_TX_EN, 0); 293 mtk_dsi_mask(dsi, DSI_PHY_LCCON, LC_ULPM_EN, 0); 294 } 295 296 static void mtk_dsi_clk_ulp_mode_leave(struct mtk_dsi *dsi) 297 { 298 mtk_dsi_mask(dsi, DSI_PHY_LCCON, LC_ULPM_EN, 0); 299 mtk_dsi_mask(dsi, DSI_PHY_LCCON, LC_WAKEUP_EN, LC_WAKEUP_EN); 300 mtk_dsi_mask(dsi, DSI_PHY_LCCON, LC_WAKEUP_EN, 0); 301 } 302 303 static void mtk_dsi_lane0_ulp_mode_enter(struct mtk_dsi *dsi) 304 { 305 mtk_dsi_mask(dsi, DSI_PHY_LD0CON, LD0_HS_TX_EN, 0); 306 mtk_dsi_mask(dsi, DSI_PHY_LD0CON, LD0_ULPM_EN, 0); 307 } 308 309 static void mtk_dsi_lane0_ulp_mode_leave(struct mtk_dsi *dsi) 310 { 311 mtk_dsi_mask(dsi, DSI_PHY_LD0CON, LD0_ULPM_EN, 0); 312 mtk_dsi_mask(dsi, DSI_PHY_LD0CON, LD0_WAKEUP_EN, LD0_WAKEUP_EN); 313 mtk_dsi_mask(dsi, DSI_PHY_LD0CON, LD0_WAKEUP_EN, 0); 314 } 315 316 static bool mtk_dsi_clk_hs_state(struct mtk_dsi *dsi) 317 { 318 u32 tmp_reg1; 319 320 tmp_reg1 = readl(dsi->regs + DSI_PHY_LCCON); 321 return ((tmp_reg1 & LC_HS_TX_EN) == 1) ? true : false; 322 } 323 324 static void mtk_dsi_clk_hs_mode(struct mtk_dsi *dsi, bool enter) 325 { 326 if (enter && !mtk_dsi_clk_hs_state(dsi)) 327 mtk_dsi_mask(dsi, DSI_PHY_LCCON, LC_HS_TX_EN, LC_HS_TX_EN); 328 else if (!enter && mtk_dsi_clk_hs_state(dsi)) 329 mtk_dsi_mask(dsi, DSI_PHY_LCCON, LC_HS_TX_EN, 0); 330 } 331 332 static void mtk_dsi_set_mode(struct mtk_dsi *dsi) 333 { 334 u32 vid_mode = CMD_MODE; 335 336 if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO) { 337 if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO_BURST) 338 vid_mode = BURST_MODE; 339 else if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO_SYNC_PULSE) 340 vid_mode = SYNC_PULSE_MODE; 341 else 342 vid_mode = SYNC_EVENT_MODE; 343 } 344 345 writel(vid_mode, dsi->regs + DSI_MODE_CTRL); 346 } 347 348 static void mtk_dsi_set_vm_cmd(struct mtk_dsi *dsi) 349 { 350 mtk_dsi_mask(dsi, DSI_VM_CMD_CON, VM_CMD_EN, VM_CMD_EN); 351 mtk_dsi_mask(dsi, DSI_VM_CMD_CON, TS_VFP_EN, TS_VFP_EN); 352 } 353 354 static void mtk_dsi_ps_control_vact(struct mtk_dsi *dsi) 355 { 356 struct videomode *vm = &dsi->vm; 357 u32 dsi_buf_bpp, ps_wc; 358 u32 ps_bpp_mode; 359 360 if (dsi->format == MIPI_DSI_FMT_RGB565) 361 dsi_buf_bpp = 2; 362 else 363 dsi_buf_bpp = 3; 364 365 ps_wc = vm->hactive * dsi_buf_bpp; 366 ps_bpp_mode = ps_wc; 367 368 switch (dsi->format) { 369 case MIPI_DSI_FMT_RGB888: 370 ps_bpp_mode |= PACKED_PS_24BIT_RGB888; 371 break; 372 case MIPI_DSI_FMT_RGB666: 373 ps_bpp_mode |= PACKED_PS_18BIT_RGB666; 374 break; 375 case MIPI_DSI_FMT_RGB666_PACKED: 376 ps_bpp_mode |= LOOSELY_PS_18BIT_RGB666; 377 break; 378 case MIPI_DSI_FMT_RGB565: 379 ps_bpp_mode |= PACKED_PS_16BIT_RGB565; 380 break; 381 } 382 383 writel(vm->vactive, dsi->regs + DSI_VACT_NL); 384 writel(ps_bpp_mode, dsi->regs + DSI_PSCTRL); 385 writel(ps_wc, dsi->regs + DSI_HSTX_CKL_WC); 386 } 387 388 static void mtk_dsi_rxtx_control(struct mtk_dsi *dsi) 389 { 390 u32 tmp_reg; 391 392 switch (dsi->lanes) { 393 case 1: 394 tmp_reg = 1 << 2; 395 break; 396 case 2: 397 tmp_reg = 3 << 2; 398 break; 399 case 3: 400 tmp_reg = 7 << 2; 401 break; 402 case 4: 403 tmp_reg = 0xf << 2; 404 break; 405 default: 406 tmp_reg = 0xf << 2; 407 break; 408 } 409 410 tmp_reg |= (dsi->mode_flags & MIPI_DSI_CLOCK_NON_CONTINUOUS) << 6; 411 tmp_reg |= (dsi->mode_flags & MIPI_DSI_MODE_EOT_PACKET) >> 3; 412 413 writel(tmp_reg, dsi->regs + DSI_TXRX_CTRL); 414 } 415 416 static void mtk_dsi_ps_control(struct mtk_dsi *dsi) 417 { 418 u32 dsi_tmp_buf_bpp; 419 u32 tmp_reg; 420 421 switch (dsi->format) { 422 case MIPI_DSI_FMT_RGB888: 423 tmp_reg = PACKED_PS_24BIT_RGB888; 424 dsi_tmp_buf_bpp = 3; 425 break; 426 case MIPI_DSI_FMT_RGB666: 427 tmp_reg = LOOSELY_PS_18BIT_RGB666; 428 dsi_tmp_buf_bpp = 3; 429 break; 430 case MIPI_DSI_FMT_RGB666_PACKED: 431 tmp_reg = PACKED_PS_18BIT_RGB666; 432 dsi_tmp_buf_bpp = 3; 433 break; 434 case MIPI_DSI_FMT_RGB565: 435 tmp_reg = PACKED_PS_16BIT_RGB565; 436 dsi_tmp_buf_bpp = 2; 437 break; 438 default: 439 tmp_reg = PACKED_PS_24BIT_RGB888; 440 dsi_tmp_buf_bpp = 3; 441 break; 442 } 443 444 tmp_reg += dsi->vm.hactive * dsi_tmp_buf_bpp & DSI_PS_WC; 445 writel(tmp_reg, dsi->regs + DSI_PSCTRL); 446 } 447 448 static void mtk_dsi_config_vdo_timing(struct mtk_dsi *dsi) 449 { 450 u32 horizontal_sync_active_byte; 451 u32 horizontal_backporch_byte; 452 u32 horizontal_frontporch_byte; 453 u32 dsi_tmp_buf_bpp, data_phy_cycles; 454 struct mtk_phy_timing *timing = &dsi->phy_timing; 455 456 struct videomode *vm = &dsi->vm; 457 458 if (dsi->format == MIPI_DSI_FMT_RGB565) 459 dsi_tmp_buf_bpp = 2; 460 else 461 dsi_tmp_buf_bpp = 3; 462 463 writel(vm->vsync_len, dsi->regs + DSI_VSA_NL); 464 writel(vm->vback_porch, dsi->regs + DSI_VBP_NL); 465 writel(vm->vfront_porch, dsi->regs + DSI_VFP_NL); 466 writel(vm->vactive, dsi->regs + DSI_VACT_NL); 467 468 if (dsi->driver_data->has_size_ctl) 469 writel(vm->vactive << 16 | vm->hactive, 470 dsi->regs + DSI_SIZE_CON); 471 472 horizontal_sync_active_byte = (vm->hsync_len * dsi_tmp_buf_bpp - 10); 473 474 if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO_SYNC_PULSE) 475 horizontal_backporch_byte = 476 (vm->hback_porch * dsi_tmp_buf_bpp - 10); 477 else 478 horizontal_backporch_byte = ((vm->hback_porch + vm->hsync_len) * 479 dsi_tmp_buf_bpp - 10); 480 481 data_phy_cycles = timing->lpx + timing->da_hs_prepare + 482 timing->da_hs_zero + timing->da_hs_exit + 3; 483 484 if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO_BURST) { 485 if ((vm->hfront_porch + vm->hback_porch) * dsi_tmp_buf_bpp > 486 data_phy_cycles * dsi->lanes + 18) { 487 horizontal_frontporch_byte = 488 vm->hfront_porch * dsi_tmp_buf_bpp - 489 (data_phy_cycles * dsi->lanes + 18) * 490 vm->hfront_porch / 491 (vm->hfront_porch + vm->hback_porch); 492 493 horizontal_backporch_byte = 494 horizontal_backporch_byte - 495 (data_phy_cycles * dsi->lanes + 18) * 496 vm->hback_porch / 497 (vm->hfront_porch + vm->hback_porch); 498 } else { 499 DRM_WARN("HFP less than d-phy, FPS will under 60Hz\n"); 500 horizontal_frontporch_byte = vm->hfront_porch * 501 dsi_tmp_buf_bpp; 502 } 503 } else { 504 if ((vm->hfront_porch + vm->hback_porch) * dsi_tmp_buf_bpp > 505 data_phy_cycles * dsi->lanes + 12) { 506 horizontal_frontporch_byte = 507 vm->hfront_porch * dsi_tmp_buf_bpp - 508 (data_phy_cycles * dsi->lanes + 12) * 509 vm->hfront_porch / 510 (vm->hfront_porch + vm->hback_porch); 511 horizontal_backporch_byte = horizontal_backporch_byte - 512 (data_phy_cycles * dsi->lanes + 12) * 513 vm->hback_porch / 514 (vm->hfront_porch + vm->hback_porch); 515 } else { 516 DRM_WARN("HFP less than d-phy, FPS will under 60Hz\n"); 517 horizontal_frontporch_byte = vm->hfront_porch * 518 dsi_tmp_buf_bpp; 519 } 520 } 521 522 writel(horizontal_sync_active_byte, dsi->regs + DSI_HSA_WC); 523 writel(horizontal_backporch_byte, dsi->regs + DSI_HBP_WC); 524 writel(horizontal_frontporch_byte, dsi->regs + DSI_HFP_WC); 525 526 mtk_dsi_ps_control(dsi); 527 } 528 529 static void mtk_dsi_start(struct mtk_dsi *dsi) 530 { 531 writel(0, dsi->regs + DSI_START); 532 writel(1, dsi->regs + DSI_START); 533 } 534 535 static void mtk_dsi_stop(struct mtk_dsi *dsi) 536 { 537 writel(0, dsi->regs + DSI_START); 538 } 539 540 static void mtk_dsi_set_cmd_mode(struct mtk_dsi *dsi) 541 { 542 writel(CMD_MODE, dsi->regs + DSI_MODE_CTRL); 543 } 544 545 static void mtk_dsi_set_interrupt_enable(struct mtk_dsi *dsi) 546 { 547 u32 inten = LPRX_RD_RDY_INT_FLAG | CMD_DONE_INT_FLAG | VM_DONE_INT_FLAG; 548 549 writel(inten, dsi->regs + DSI_INTEN); 550 } 551 552 static void mtk_dsi_irq_data_set(struct mtk_dsi *dsi, u32 irq_bit) 553 { 554 dsi->irq_data |= irq_bit; 555 } 556 557 static void mtk_dsi_irq_data_clear(struct mtk_dsi *dsi, u32 irq_bit) 558 { 559 dsi->irq_data &= ~irq_bit; 560 } 561 562 static s32 mtk_dsi_wait_for_irq_done(struct mtk_dsi *dsi, u32 irq_flag, 563 unsigned int timeout) 564 { 565 s32 ret = 0; 566 unsigned long jiffies = msecs_to_jiffies(timeout); 567 568 ret = wait_event_interruptible_timeout(dsi->irq_wait_queue, 569 dsi->irq_data & irq_flag, 570 jiffies); 571 if (ret == 0) { 572 DRM_WARN("Wait DSI IRQ(0x%08x) Timeout\n", irq_flag); 573 574 mtk_dsi_enable(dsi); 575 mtk_dsi_reset_engine(dsi); 576 } 577 578 return ret; 579 } 580 581 static irqreturn_t mtk_dsi_irq(int irq, void *dev_id) 582 { 583 struct mtk_dsi *dsi = dev_id; 584 u32 status, tmp; 585 u32 flag = LPRX_RD_RDY_INT_FLAG | CMD_DONE_INT_FLAG | VM_DONE_INT_FLAG; 586 587 status = readl(dsi->regs + DSI_INTSTA) & flag; 588 589 if (status) { 590 do { 591 mtk_dsi_mask(dsi, DSI_RACK, RACK, RACK); 592 tmp = readl(dsi->regs + DSI_INTSTA); 593 } while (tmp & DSI_BUSY); 594 595 mtk_dsi_mask(dsi, DSI_INTSTA, status, 0); 596 mtk_dsi_irq_data_set(dsi, status); 597 wake_up_interruptible(&dsi->irq_wait_queue); 598 } 599 600 return IRQ_HANDLED; 601 } 602 603 static s32 mtk_dsi_switch_to_cmd_mode(struct mtk_dsi *dsi, u8 irq_flag, u32 t) 604 { 605 mtk_dsi_irq_data_clear(dsi, irq_flag); 606 mtk_dsi_set_cmd_mode(dsi); 607 608 if (!mtk_dsi_wait_for_irq_done(dsi, irq_flag, t)) { 609 DRM_ERROR("failed to switch cmd mode\n"); 610 return -ETIME; 611 } else { 612 return 0; 613 } 614 } 615 616 static int mtk_dsi_poweron(struct mtk_dsi *dsi) 617 { 618 struct device *dev = dsi->host.dev; 619 int ret; 620 u32 bit_per_pixel; 621 622 if (++dsi->refcount != 1) 623 return 0; 624 625 switch (dsi->format) { 626 case MIPI_DSI_FMT_RGB565: 627 bit_per_pixel = 16; 628 break; 629 case MIPI_DSI_FMT_RGB666_PACKED: 630 bit_per_pixel = 18; 631 break; 632 case MIPI_DSI_FMT_RGB666: 633 case MIPI_DSI_FMT_RGB888: 634 default: 635 bit_per_pixel = 24; 636 break; 637 } 638 639 dsi->data_rate = DIV_ROUND_UP_ULL(dsi->vm.pixelclock * bit_per_pixel, 640 dsi->lanes); 641 642 ret = clk_set_rate(dsi->hs_clk, dsi->data_rate); 643 if (ret < 0) { 644 dev_err(dev, "Failed to set data rate: %d\n", ret); 645 goto err_refcount; 646 } 647 648 phy_power_on(dsi->phy); 649 650 ret = clk_prepare_enable(dsi->engine_clk); 651 if (ret < 0) { 652 dev_err(dev, "Failed to enable engine clock: %d\n", ret); 653 goto err_phy_power_off; 654 } 655 656 ret = clk_prepare_enable(dsi->digital_clk); 657 if (ret < 0) { 658 dev_err(dev, "Failed to enable digital clock: %d\n", ret); 659 goto err_disable_engine_clk; 660 } 661 662 mtk_dsi_enable(dsi); 663 664 if (dsi->driver_data->has_shadow_ctl) 665 writel(FORCE_COMMIT | BYPASS_SHADOW, 666 dsi->regs + DSI_SHADOW_DEBUG); 667 668 mtk_dsi_reset_engine(dsi); 669 mtk_dsi_phy_timconfig(dsi); 670 671 mtk_dsi_rxtx_control(dsi); 672 usleep_range(30, 100); 673 mtk_dsi_reset_dphy(dsi); 674 mtk_dsi_ps_control_vact(dsi); 675 mtk_dsi_set_vm_cmd(dsi); 676 mtk_dsi_config_vdo_timing(dsi); 677 mtk_dsi_set_interrupt_enable(dsi); 678 679 mtk_dsi_clk_ulp_mode_leave(dsi); 680 mtk_dsi_lane0_ulp_mode_leave(dsi); 681 mtk_dsi_clk_hs_mode(dsi, 0); 682 683 if (dsi->panel) { 684 if (drm_panel_prepare(dsi->panel)) { 685 DRM_ERROR("failed to prepare the panel\n"); 686 goto err_disable_digital_clk; 687 } 688 } 689 690 return 0; 691 err_disable_digital_clk: 692 clk_disable_unprepare(dsi->digital_clk); 693 err_disable_engine_clk: 694 clk_disable_unprepare(dsi->engine_clk); 695 err_phy_power_off: 696 phy_power_off(dsi->phy); 697 err_refcount: 698 dsi->refcount--; 699 return ret; 700 } 701 702 static void mtk_dsi_poweroff(struct mtk_dsi *dsi) 703 { 704 if (WARN_ON(dsi->refcount == 0)) 705 return; 706 707 if (--dsi->refcount != 0) 708 return; 709 710 /* 711 * mtk_dsi_stop() and mtk_dsi_start() is asymmetric, since 712 * mtk_dsi_stop() should be called after mtk_drm_crtc_atomic_disable(), 713 * which needs irq for vblank, and mtk_dsi_stop() will disable irq. 714 * mtk_dsi_start() needs to be called in mtk_output_dsi_enable(), 715 * after dsi is fully set. 716 */ 717 mtk_dsi_stop(dsi); 718 719 if (!mtk_dsi_switch_to_cmd_mode(dsi, VM_DONE_INT_FLAG, 500)) { 720 if (dsi->panel) { 721 if (drm_panel_unprepare(dsi->panel)) { 722 DRM_ERROR("failed to unprepare the panel\n"); 723 return; 724 } 725 } 726 } 727 728 mtk_dsi_reset_engine(dsi); 729 mtk_dsi_lane0_ulp_mode_enter(dsi); 730 mtk_dsi_clk_ulp_mode_enter(dsi); 731 732 mtk_dsi_disable(dsi); 733 734 clk_disable_unprepare(dsi->engine_clk); 735 clk_disable_unprepare(dsi->digital_clk); 736 737 phy_power_off(dsi->phy); 738 } 739 740 static void mtk_output_dsi_enable(struct mtk_dsi *dsi) 741 { 742 int ret; 743 744 if (dsi->enabled) 745 return; 746 747 ret = mtk_dsi_poweron(dsi); 748 if (ret < 0) { 749 DRM_ERROR("failed to power on dsi\n"); 750 return; 751 } 752 753 mtk_dsi_set_mode(dsi); 754 mtk_dsi_clk_hs_mode(dsi, 1); 755 756 mtk_dsi_start(dsi); 757 758 if (dsi->panel) { 759 if (drm_panel_enable(dsi->panel)) { 760 DRM_ERROR("failed to enable the panel\n"); 761 goto err_dsi_power_off; 762 } 763 } 764 765 dsi->enabled = true; 766 767 return; 768 err_dsi_power_off: 769 mtk_dsi_stop(dsi); 770 mtk_dsi_poweroff(dsi); 771 } 772 773 static void mtk_output_dsi_disable(struct mtk_dsi *dsi) 774 { 775 if (!dsi->enabled) 776 return; 777 778 if (dsi->panel) { 779 if (drm_panel_disable(dsi->panel)) { 780 DRM_ERROR("failed to disable the panel\n"); 781 return; 782 } 783 } 784 785 mtk_dsi_poweroff(dsi); 786 787 dsi->enabled = false; 788 } 789 790 static void mtk_dsi_encoder_destroy(struct drm_encoder *encoder) 791 { 792 drm_encoder_cleanup(encoder); 793 } 794 795 static const struct drm_encoder_funcs mtk_dsi_encoder_funcs = { 796 .destroy = mtk_dsi_encoder_destroy, 797 }; 798 799 static bool mtk_dsi_encoder_mode_fixup(struct drm_encoder *encoder, 800 const struct drm_display_mode *mode, 801 struct drm_display_mode *adjusted_mode) 802 { 803 return true; 804 } 805 806 static void mtk_dsi_encoder_mode_set(struct drm_encoder *encoder, 807 struct drm_display_mode *mode, 808 struct drm_display_mode *adjusted) 809 { 810 struct mtk_dsi *dsi = encoder_to_dsi(encoder); 811 812 drm_display_mode_to_videomode(adjusted, &dsi->vm); 813 } 814 815 static void mtk_dsi_encoder_disable(struct drm_encoder *encoder) 816 { 817 struct mtk_dsi *dsi = encoder_to_dsi(encoder); 818 819 mtk_output_dsi_disable(dsi); 820 } 821 822 static void mtk_dsi_encoder_enable(struct drm_encoder *encoder) 823 { 824 struct mtk_dsi *dsi = encoder_to_dsi(encoder); 825 826 mtk_output_dsi_enable(dsi); 827 } 828 829 static int mtk_dsi_connector_get_modes(struct drm_connector *connector) 830 { 831 struct mtk_dsi *dsi = connector_to_dsi(connector); 832 833 return drm_panel_get_modes(dsi->panel, connector); 834 } 835 836 static const struct drm_encoder_helper_funcs mtk_dsi_encoder_helper_funcs = { 837 .mode_fixup = mtk_dsi_encoder_mode_fixup, 838 .mode_set = mtk_dsi_encoder_mode_set, 839 .disable = mtk_dsi_encoder_disable, 840 .enable = mtk_dsi_encoder_enable, 841 }; 842 843 static const struct drm_connector_funcs mtk_dsi_connector_funcs = { 844 .fill_modes = drm_helper_probe_single_connector_modes, 845 .destroy = drm_connector_cleanup, 846 .reset = drm_atomic_helper_connector_reset, 847 .atomic_duplicate_state = drm_atomic_helper_connector_duplicate_state, 848 .atomic_destroy_state = drm_atomic_helper_connector_destroy_state, 849 }; 850 851 static const struct drm_connector_helper_funcs 852 mtk_dsi_connector_helper_funcs = { 853 .get_modes = mtk_dsi_connector_get_modes, 854 }; 855 856 static int mtk_dsi_create_connector(struct drm_device *drm, struct mtk_dsi *dsi) 857 { 858 int ret; 859 860 ret = drm_connector_init(drm, &dsi->conn, &mtk_dsi_connector_funcs, 861 DRM_MODE_CONNECTOR_DSI); 862 if (ret) { 863 DRM_ERROR("Failed to connector init to drm\n"); 864 return ret; 865 } 866 867 drm_connector_helper_add(&dsi->conn, &mtk_dsi_connector_helper_funcs); 868 869 dsi->conn.dpms = DRM_MODE_DPMS_OFF; 870 drm_connector_attach_encoder(&dsi->conn, &dsi->encoder); 871 872 if (dsi->panel) { 873 ret = drm_panel_attach(dsi->panel, &dsi->conn); 874 if (ret) { 875 DRM_ERROR("Failed to attach panel to drm\n"); 876 goto err_connector_cleanup; 877 } 878 } 879 880 return 0; 881 882 err_connector_cleanup: 883 drm_connector_cleanup(&dsi->conn); 884 return ret; 885 } 886 887 static int mtk_dsi_create_conn_enc(struct drm_device *drm, struct mtk_dsi *dsi) 888 { 889 int ret; 890 891 ret = drm_encoder_init(drm, &dsi->encoder, &mtk_dsi_encoder_funcs, 892 DRM_MODE_ENCODER_DSI, NULL); 893 if (ret) { 894 DRM_ERROR("Failed to encoder init to drm\n"); 895 return ret; 896 } 897 drm_encoder_helper_add(&dsi->encoder, &mtk_dsi_encoder_helper_funcs); 898 899 /* 900 * Currently display data paths are statically assigned to a crtc each. 901 * crtc 0 is OVL0 -> COLOR0 -> AAL -> OD -> RDMA0 -> UFOE -> DSI0 902 */ 903 dsi->encoder.possible_crtcs = 1; 904 905 /* If there's a bridge, attach to it and let it create the connector */ 906 if (dsi->bridge) { 907 ret = drm_bridge_attach(&dsi->encoder, dsi->bridge, NULL, 0); 908 if (ret) { 909 DRM_ERROR("Failed to attach bridge to drm\n"); 910 goto err_encoder_cleanup; 911 } 912 } else { 913 /* Otherwise create our own connector and attach to a panel */ 914 ret = mtk_dsi_create_connector(drm, dsi); 915 if (ret) 916 goto err_encoder_cleanup; 917 } 918 919 return 0; 920 921 err_encoder_cleanup: 922 drm_encoder_cleanup(&dsi->encoder); 923 return ret; 924 } 925 926 static void mtk_dsi_destroy_conn_enc(struct mtk_dsi *dsi) 927 { 928 drm_encoder_cleanup(&dsi->encoder); 929 /* Skip connector cleanup if creation was delegated to the bridge */ 930 if (dsi->conn.dev) 931 drm_connector_cleanup(&dsi->conn); 932 if (dsi->panel) 933 drm_panel_detach(dsi->panel); 934 } 935 936 static void mtk_dsi_ddp_start(struct mtk_ddp_comp *comp) 937 { 938 struct mtk_dsi *dsi = container_of(comp, struct mtk_dsi, ddp_comp); 939 940 mtk_dsi_poweron(dsi); 941 } 942 943 static void mtk_dsi_ddp_stop(struct mtk_ddp_comp *comp) 944 { 945 struct mtk_dsi *dsi = container_of(comp, struct mtk_dsi, ddp_comp); 946 947 mtk_dsi_poweroff(dsi); 948 } 949 950 static const struct mtk_ddp_comp_funcs mtk_dsi_funcs = { 951 .start = mtk_dsi_ddp_start, 952 .stop = mtk_dsi_ddp_stop, 953 }; 954 955 static int mtk_dsi_host_attach(struct mipi_dsi_host *host, 956 struct mipi_dsi_device *device) 957 { 958 struct mtk_dsi *dsi = host_to_dsi(host); 959 960 dsi->lanes = device->lanes; 961 dsi->format = device->format; 962 dsi->mode_flags = device->mode_flags; 963 964 if (dsi->conn.dev) 965 drm_helper_hpd_irq_event(dsi->conn.dev); 966 967 return 0; 968 } 969 970 static int mtk_dsi_host_detach(struct mipi_dsi_host *host, 971 struct mipi_dsi_device *device) 972 { 973 struct mtk_dsi *dsi = host_to_dsi(host); 974 975 if (dsi->conn.dev) 976 drm_helper_hpd_irq_event(dsi->conn.dev); 977 978 return 0; 979 } 980 981 static void mtk_dsi_wait_for_idle(struct mtk_dsi *dsi) 982 { 983 int ret; 984 u32 val; 985 986 ret = readl_poll_timeout(dsi->regs + DSI_INTSTA, val, !(val & DSI_BUSY), 987 4, 2000000); 988 if (ret) { 989 DRM_WARN("polling dsi wait not busy timeout!\n"); 990 991 mtk_dsi_enable(dsi); 992 mtk_dsi_reset_engine(dsi); 993 } 994 } 995 996 static u32 mtk_dsi_recv_cnt(u8 type, u8 *read_data) 997 { 998 switch (type) { 999 case MIPI_DSI_RX_GENERIC_SHORT_READ_RESPONSE_1BYTE: 1000 case MIPI_DSI_RX_DCS_SHORT_READ_RESPONSE_1BYTE: 1001 return 1; 1002 case MIPI_DSI_RX_GENERIC_SHORT_READ_RESPONSE_2BYTE: 1003 case MIPI_DSI_RX_DCS_SHORT_READ_RESPONSE_2BYTE: 1004 return 2; 1005 case MIPI_DSI_RX_GENERIC_LONG_READ_RESPONSE: 1006 case MIPI_DSI_RX_DCS_LONG_READ_RESPONSE: 1007 return read_data[1] + read_data[2] * 16; 1008 case MIPI_DSI_RX_ACKNOWLEDGE_AND_ERROR_REPORT: 1009 DRM_INFO("type is 0x02, try again\n"); 1010 break; 1011 default: 1012 DRM_INFO("type(0x%x) not recognized\n", type); 1013 break; 1014 } 1015 1016 return 0; 1017 } 1018 1019 static void mtk_dsi_cmdq(struct mtk_dsi *dsi, const struct mipi_dsi_msg *msg) 1020 { 1021 const char *tx_buf = msg->tx_buf; 1022 u8 config, cmdq_size, cmdq_off, type = msg->type; 1023 u32 reg_val, cmdq_mask, i; 1024 u32 reg_cmdq_off = dsi->driver_data->reg_cmdq_off; 1025 1026 if (MTK_DSI_HOST_IS_READ(type)) 1027 config = BTA; 1028 else 1029 config = (msg->tx_len > 2) ? LONG_PACKET : SHORT_PACKET; 1030 1031 if (msg->tx_len > 2) { 1032 cmdq_size = 1 + (msg->tx_len + 3) / 4; 1033 cmdq_off = 4; 1034 cmdq_mask = CONFIG | DATA_ID | DATA_0 | DATA_1; 1035 reg_val = (msg->tx_len << 16) | (type << 8) | config; 1036 } else { 1037 cmdq_size = 1; 1038 cmdq_off = 2; 1039 cmdq_mask = CONFIG | DATA_ID; 1040 reg_val = (type << 8) | config; 1041 } 1042 1043 for (i = 0; i < msg->tx_len; i++) 1044 mtk_dsi_mask(dsi, (reg_cmdq_off + cmdq_off + i) & (~0x3U), 1045 (0xffUL << (((i + cmdq_off) & 3U) * 8U)), 1046 tx_buf[i] << (((i + cmdq_off) & 3U) * 8U)); 1047 1048 mtk_dsi_mask(dsi, reg_cmdq_off, cmdq_mask, reg_val); 1049 mtk_dsi_mask(dsi, DSI_CMDQ_SIZE, CMDQ_SIZE, cmdq_size); 1050 } 1051 1052 static ssize_t mtk_dsi_host_send_cmd(struct mtk_dsi *dsi, 1053 const struct mipi_dsi_msg *msg, u8 flag) 1054 { 1055 mtk_dsi_wait_for_idle(dsi); 1056 mtk_dsi_irq_data_clear(dsi, flag); 1057 mtk_dsi_cmdq(dsi, msg); 1058 mtk_dsi_start(dsi); 1059 1060 if (!mtk_dsi_wait_for_irq_done(dsi, flag, 2000)) 1061 return -ETIME; 1062 else 1063 return 0; 1064 } 1065 1066 static ssize_t mtk_dsi_host_transfer(struct mipi_dsi_host *host, 1067 const struct mipi_dsi_msg *msg) 1068 { 1069 struct mtk_dsi *dsi = host_to_dsi(host); 1070 u32 recv_cnt, i; 1071 u8 read_data[16]; 1072 void *src_addr; 1073 u8 irq_flag = CMD_DONE_INT_FLAG; 1074 1075 if (readl(dsi->regs + DSI_MODE_CTRL) & MODE) { 1076 DRM_ERROR("dsi engine is not command mode\n"); 1077 return -EINVAL; 1078 } 1079 1080 if (MTK_DSI_HOST_IS_READ(msg->type)) 1081 irq_flag |= LPRX_RD_RDY_INT_FLAG; 1082 1083 if (mtk_dsi_host_send_cmd(dsi, msg, irq_flag) < 0) 1084 return -ETIME; 1085 1086 if (!MTK_DSI_HOST_IS_READ(msg->type)) 1087 return 0; 1088 1089 if (!msg->rx_buf) { 1090 DRM_ERROR("dsi receive buffer size may be NULL\n"); 1091 return -EINVAL; 1092 } 1093 1094 for (i = 0; i < 16; i++) 1095 *(read_data + i) = readb(dsi->regs + DSI_RX_DATA0 + i); 1096 1097 recv_cnt = mtk_dsi_recv_cnt(read_data[0], read_data); 1098 1099 if (recv_cnt > 2) 1100 src_addr = &read_data[4]; 1101 else 1102 src_addr = &read_data[1]; 1103 1104 if (recv_cnt > 10) 1105 recv_cnt = 10; 1106 1107 if (recv_cnt > msg->rx_len) 1108 recv_cnt = msg->rx_len; 1109 1110 if (recv_cnt) 1111 memcpy(msg->rx_buf, src_addr, recv_cnt); 1112 1113 DRM_INFO("dsi get %d byte data from the panel address(0x%x)\n", 1114 recv_cnt, *((u8 *)(msg->tx_buf))); 1115 1116 return recv_cnt; 1117 } 1118 1119 static const struct mipi_dsi_host_ops mtk_dsi_ops = { 1120 .attach = mtk_dsi_host_attach, 1121 .detach = mtk_dsi_host_detach, 1122 .transfer = mtk_dsi_host_transfer, 1123 }; 1124 1125 static int mtk_dsi_bind(struct device *dev, struct device *master, void *data) 1126 { 1127 int ret; 1128 struct drm_device *drm = data; 1129 struct mtk_dsi *dsi = dev_get_drvdata(dev); 1130 1131 ret = mtk_ddp_comp_register(drm, &dsi->ddp_comp); 1132 if (ret < 0) { 1133 dev_err(dev, "Failed to register component %pOF: %d\n", 1134 dev->of_node, ret); 1135 return ret; 1136 } 1137 1138 ret = mtk_dsi_create_conn_enc(drm, dsi); 1139 if (ret) { 1140 DRM_ERROR("Encoder create failed with %d\n", ret); 1141 goto err_unregister; 1142 } 1143 1144 return 0; 1145 1146 err_unregister: 1147 mtk_ddp_comp_unregister(drm, &dsi->ddp_comp); 1148 return ret; 1149 } 1150 1151 static void mtk_dsi_unbind(struct device *dev, struct device *master, 1152 void *data) 1153 { 1154 struct drm_device *drm = data; 1155 struct mtk_dsi *dsi = dev_get_drvdata(dev); 1156 1157 mtk_dsi_destroy_conn_enc(dsi); 1158 mtk_ddp_comp_unregister(drm, &dsi->ddp_comp); 1159 } 1160 1161 static const struct component_ops mtk_dsi_component_ops = { 1162 .bind = mtk_dsi_bind, 1163 .unbind = mtk_dsi_unbind, 1164 }; 1165 1166 static int mtk_dsi_probe(struct platform_device *pdev) 1167 { 1168 struct mtk_dsi *dsi; 1169 struct device *dev = &pdev->dev; 1170 struct resource *regs; 1171 int irq_num; 1172 int comp_id; 1173 int ret; 1174 1175 dsi = devm_kzalloc(dev, sizeof(*dsi), GFP_KERNEL); 1176 if (!dsi) 1177 return -ENOMEM; 1178 1179 dsi->host.ops = &mtk_dsi_ops; 1180 dsi->host.dev = dev; 1181 ret = mipi_dsi_host_register(&dsi->host); 1182 if (ret < 0) { 1183 dev_err(dev, "failed to register DSI host: %d\n", ret); 1184 return ret; 1185 } 1186 1187 ret = drm_of_find_panel_or_bridge(dev->of_node, 0, 0, 1188 &dsi->panel, &dsi->bridge); 1189 if (ret) 1190 goto err_unregister_host; 1191 1192 dsi->driver_data = of_device_get_match_data(dev); 1193 1194 dsi->engine_clk = devm_clk_get(dev, "engine"); 1195 if (IS_ERR(dsi->engine_clk)) { 1196 ret = PTR_ERR(dsi->engine_clk); 1197 dev_err(dev, "Failed to get engine clock: %d\n", ret); 1198 goto err_unregister_host; 1199 } 1200 1201 dsi->digital_clk = devm_clk_get(dev, "digital"); 1202 if (IS_ERR(dsi->digital_clk)) { 1203 ret = PTR_ERR(dsi->digital_clk); 1204 dev_err(dev, "Failed to get digital clock: %d\n", ret); 1205 goto err_unregister_host; 1206 } 1207 1208 dsi->hs_clk = devm_clk_get(dev, "hs"); 1209 if (IS_ERR(dsi->hs_clk)) { 1210 ret = PTR_ERR(dsi->hs_clk); 1211 dev_err(dev, "Failed to get hs clock: %d\n", ret); 1212 goto err_unregister_host; 1213 } 1214 1215 regs = platform_get_resource(pdev, IORESOURCE_MEM, 0); 1216 dsi->regs = devm_ioremap_resource(dev, regs); 1217 if (IS_ERR(dsi->regs)) { 1218 ret = PTR_ERR(dsi->regs); 1219 dev_err(dev, "Failed to ioremap memory: %d\n", ret); 1220 goto err_unregister_host; 1221 } 1222 1223 dsi->phy = devm_phy_get(dev, "dphy"); 1224 if (IS_ERR(dsi->phy)) { 1225 ret = PTR_ERR(dsi->phy); 1226 dev_err(dev, "Failed to get MIPI-DPHY: %d\n", ret); 1227 goto err_unregister_host; 1228 } 1229 1230 comp_id = mtk_ddp_comp_get_id(dev->of_node, MTK_DSI); 1231 if (comp_id < 0) { 1232 dev_err(dev, "Failed to identify by alias: %d\n", comp_id); 1233 ret = comp_id; 1234 goto err_unregister_host; 1235 } 1236 1237 ret = mtk_ddp_comp_init(dev, dev->of_node, &dsi->ddp_comp, comp_id, 1238 &mtk_dsi_funcs); 1239 if (ret) { 1240 dev_err(dev, "Failed to initialize component: %d\n", ret); 1241 goto err_unregister_host; 1242 } 1243 1244 irq_num = platform_get_irq(pdev, 0); 1245 if (irq_num < 0) { 1246 dev_err(&pdev->dev, "failed to get dsi irq_num: %d\n", irq_num); 1247 ret = irq_num; 1248 goto err_unregister_host; 1249 } 1250 1251 irq_set_status_flags(irq_num, IRQ_TYPE_LEVEL_LOW); 1252 ret = devm_request_irq(&pdev->dev, irq_num, mtk_dsi_irq, 1253 IRQF_TRIGGER_LOW, dev_name(&pdev->dev), dsi); 1254 if (ret) { 1255 dev_err(&pdev->dev, "failed to request mediatek dsi irq\n"); 1256 goto err_unregister_host; 1257 } 1258 1259 init_waitqueue_head(&dsi->irq_wait_queue); 1260 1261 platform_set_drvdata(pdev, dsi); 1262 1263 ret = component_add(&pdev->dev, &mtk_dsi_component_ops); 1264 if (ret) { 1265 dev_err(&pdev->dev, "failed to add component: %d\n", ret); 1266 goto err_unregister_host; 1267 } 1268 1269 return 0; 1270 1271 err_unregister_host: 1272 mipi_dsi_host_unregister(&dsi->host); 1273 return ret; 1274 } 1275 1276 static int mtk_dsi_remove(struct platform_device *pdev) 1277 { 1278 struct mtk_dsi *dsi = platform_get_drvdata(pdev); 1279 1280 mtk_output_dsi_disable(dsi); 1281 component_del(&pdev->dev, &mtk_dsi_component_ops); 1282 mipi_dsi_host_unregister(&dsi->host); 1283 1284 return 0; 1285 } 1286 1287 static const struct mtk_dsi_driver_data mt8173_dsi_driver_data = { 1288 .reg_cmdq_off = 0x200, 1289 }; 1290 1291 static const struct mtk_dsi_driver_data mt2701_dsi_driver_data = { 1292 .reg_cmdq_off = 0x180, 1293 }; 1294 1295 static const struct mtk_dsi_driver_data mt8183_dsi_driver_data = { 1296 .reg_cmdq_off = 0x200, 1297 .has_shadow_ctl = true, 1298 .has_size_ctl = true, 1299 }; 1300 1301 static const struct of_device_id mtk_dsi_of_match[] = { 1302 { .compatible = "mediatek,mt2701-dsi", 1303 .data = &mt2701_dsi_driver_data }, 1304 { .compatible = "mediatek,mt8173-dsi", 1305 .data = &mt8173_dsi_driver_data }, 1306 { .compatible = "mediatek,mt8183-dsi", 1307 .data = &mt8183_dsi_driver_data }, 1308 { }, 1309 }; 1310 1311 struct platform_driver mtk_dsi_driver = { 1312 .probe = mtk_dsi_probe, 1313 .remove = mtk_dsi_remove, 1314 .driver = { 1315 .name = "mtk-dsi", 1316 .of_match_table = mtk_dsi_of_match, 1317 }, 1318 }; 1319