1 // SPDX-License-Identifier: GPL-2.0-only 2 /* 3 * Copyright (c) 2015 MediaTek Inc. 4 */ 5 6 #include <linux/clk.h> 7 #include <linux/component.h> 8 #include <linux/iopoll.h> 9 #include <linux/irq.h> 10 #include <linux/of.h> 11 #include <linux/of_platform.h> 12 #include <linux/phy/phy.h> 13 #include <linux/platform_device.h> 14 15 #include <video/mipi_display.h> 16 #include <video/videomode.h> 17 18 #include <drm/drm_atomic_helper.h> 19 #include <drm/drm_bridge.h> 20 #include <drm/drm_mipi_dsi.h> 21 #include <drm/drm_of.h> 22 #include <drm/drm_panel.h> 23 #include <drm/drm_print.h> 24 #include <drm/drm_probe_helper.h> 25 #include <drm/drm_simple_kms_helper.h> 26 27 #include "mtk_drm_ddp_comp.h" 28 29 #define DSI_START 0x00 30 31 #define DSI_INTEN 0x08 32 33 #define DSI_INTSTA 0x0c 34 #define LPRX_RD_RDY_INT_FLAG BIT(0) 35 #define CMD_DONE_INT_FLAG BIT(1) 36 #define TE_RDY_INT_FLAG BIT(2) 37 #define VM_DONE_INT_FLAG BIT(3) 38 #define EXT_TE_RDY_INT_FLAG BIT(4) 39 #define DSI_BUSY BIT(31) 40 41 #define DSI_CON_CTRL 0x10 42 #define DSI_RESET BIT(0) 43 #define DSI_EN BIT(1) 44 #define DPHY_RESET BIT(2) 45 46 #define DSI_MODE_CTRL 0x14 47 #define MODE (3) 48 #define CMD_MODE 0 49 #define SYNC_PULSE_MODE 1 50 #define SYNC_EVENT_MODE 2 51 #define BURST_MODE 3 52 #define FRM_MODE BIT(16) 53 #define MIX_MODE BIT(17) 54 55 #define DSI_TXRX_CTRL 0x18 56 #define VC_NUM BIT(1) 57 #define LANE_NUM (0xf << 2) 58 #define DIS_EOT BIT(6) 59 #define NULL_EN BIT(7) 60 #define TE_FREERUN BIT(8) 61 #define EXT_TE_EN BIT(9) 62 #define EXT_TE_EDGE BIT(10) 63 #define MAX_RTN_SIZE (0xf << 12) 64 #define HSTX_CKLP_EN BIT(16) 65 66 #define DSI_PSCTRL 0x1c 67 #define DSI_PS_WC 0x3fff 68 #define DSI_PS_SEL (3 << 16) 69 #define PACKED_PS_16BIT_RGB565 (0 << 16) 70 #define LOOSELY_PS_18BIT_RGB666 (1 << 16) 71 #define PACKED_PS_18BIT_RGB666 (2 << 16) 72 #define PACKED_PS_24BIT_RGB888 (3 << 16) 73 74 #define DSI_VSA_NL 0x20 75 #define DSI_VBP_NL 0x24 76 #define DSI_VFP_NL 0x28 77 #define DSI_VACT_NL 0x2C 78 #define DSI_SIZE_CON 0x38 79 #define DSI_HSA_WC 0x50 80 #define DSI_HBP_WC 0x54 81 #define DSI_HFP_WC 0x58 82 83 #define DSI_CMDQ_SIZE 0x60 84 #define CMDQ_SIZE 0x3f 85 86 #define DSI_HSTX_CKL_WC 0x64 87 88 #define DSI_RX_DATA0 0x74 89 #define DSI_RX_DATA1 0x78 90 #define DSI_RX_DATA2 0x7c 91 #define DSI_RX_DATA3 0x80 92 93 #define DSI_RACK 0x84 94 #define RACK BIT(0) 95 96 #define DSI_PHY_LCCON 0x104 97 #define LC_HS_TX_EN BIT(0) 98 #define LC_ULPM_EN BIT(1) 99 #define LC_WAKEUP_EN BIT(2) 100 101 #define DSI_PHY_LD0CON 0x108 102 #define LD0_HS_TX_EN BIT(0) 103 #define LD0_ULPM_EN BIT(1) 104 #define LD0_WAKEUP_EN BIT(2) 105 106 #define DSI_PHY_TIMECON0 0x110 107 #define LPX (0xff << 0) 108 #define HS_PREP (0xff << 8) 109 #define HS_ZERO (0xff << 16) 110 #define HS_TRAIL (0xff << 24) 111 112 #define DSI_PHY_TIMECON1 0x114 113 #define TA_GO (0xff << 0) 114 #define TA_SURE (0xff << 8) 115 #define TA_GET (0xff << 16) 116 #define DA_HS_EXIT (0xff << 24) 117 118 #define DSI_PHY_TIMECON2 0x118 119 #define CONT_DET (0xff << 0) 120 #define CLK_ZERO (0xff << 16) 121 #define CLK_TRAIL (0xff << 24) 122 123 #define DSI_PHY_TIMECON3 0x11c 124 #define CLK_HS_PREP (0xff << 0) 125 #define CLK_HS_POST (0xff << 8) 126 #define CLK_HS_EXIT (0xff << 16) 127 128 #define DSI_VM_CMD_CON 0x130 129 #define VM_CMD_EN BIT(0) 130 #define TS_VFP_EN BIT(5) 131 132 #define DSI_SHADOW_DEBUG 0x190U 133 #define FORCE_COMMIT BIT(0) 134 #define BYPASS_SHADOW BIT(1) 135 136 #define CONFIG (0xff << 0) 137 #define SHORT_PACKET 0 138 #define LONG_PACKET 2 139 #define BTA BIT(2) 140 #define DATA_ID (0xff << 8) 141 #define DATA_0 (0xff << 16) 142 #define DATA_1 (0xff << 24) 143 144 #define NS_TO_CYCLE(n, c) ((n) / (c) + (((n) % (c)) ? 1 : 0)) 145 146 #define MTK_DSI_HOST_IS_READ(type) \ 147 ((type == MIPI_DSI_GENERIC_READ_REQUEST_0_PARAM) || \ 148 (type == MIPI_DSI_GENERIC_READ_REQUEST_1_PARAM) || \ 149 (type == MIPI_DSI_GENERIC_READ_REQUEST_2_PARAM) || \ 150 (type == MIPI_DSI_DCS_READ)) 151 152 struct mtk_phy_timing { 153 u32 lpx; 154 u32 da_hs_prepare; 155 u32 da_hs_zero; 156 u32 da_hs_trail; 157 158 u32 ta_go; 159 u32 ta_sure; 160 u32 ta_get; 161 u32 da_hs_exit; 162 163 u32 clk_hs_zero; 164 u32 clk_hs_trail; 165 166 u32 clk_hs_prepare; 167 u32 clk_hs_post; 168 u32 clk_hs_exit; 169 }; 170 171 struct phy; 172 173 struct mtk_dsi_driver_data { 174 const u32 reg_cmdq_off; 175 bool has_shadow_ctl; 176 bool has_size_ctl; 177 }; 178 179 struct mtk_dsi { 180 struct mtk_ddp_comp ddp_comp; 181 struct device *dev; 182 struct mipi_dsi_host host; 183 struct drm_encoder encoder; 184 struct drm_connector conn; 185 struct drm_panel *panel; 186 struct drm_bridge *bridge; 187 struct phy *phy; 188 189 void __iomem *regs; 190 191 struct clk *engine_clk; 192 struct clk *digital_clk; 193 struct clk *hs_clk; 194 195 u32 data_rate; 196 197 unsigned long mode_flags; 198 enum mipi_dsi_pixel_format format; 199 unsigned int lanes; 200 struct videomode vm; 201 struct mtk_phy_timing phy_timing; 202 int refcount; 203 bool enabled; 204 u32 irq_data; 205 wait_queue_head_t irq_wait_queue; 206 const struct mtk_dsi_driver_data *driver_data; 207 }; 208 209 static inline struct mtk_dsi *encoder_to_dsi(struct drm_encoder *e) 210 { 211 return container_of(e, struct mtk_dsi, encoder); 212 } 213 214 static inline struct mtk_dsi *connector_to_dsi(struct drm_connector *c) 215 { 216 return container_of(c, struct mtk_dsi, conn); 217 } 218 219 static inline struct mtk_dsi *host_to_dsi(struct mipi_dsi_host *h) 220 { 221 return container_of(h, struct mtk_dsi, host); 222 } 223 224 static void mtk_dsi_mask(struct mtk_dsi *dsi, u32 offset, u32 mask, u32 data) 225 { 226 u32 temp = readl(dsi->regs + offset); 227 228 writel((temp & ~mask) | (data & mask), dsi->regs + offset); 229 } 230 231 static void mtk_dsi_phy_timconfig(struct mtk_dsi *dsi) 232 { 233 u32 timcon0, timcon1, timcon2, timcon3; 234 u32 data_rate_mhz = DIV_ROUND_UP(dsi->data_rate, 1000000); 235 struct mtk_phy_timing *timing = &dsi->phy_timing; 236 237 timing->lpx = (60 * data_rate_mhz / (8 * 1000)) + 1; 238 timing->da_hs_prepare = (80 * data_rate_mhz + 4 * 1000) / 8000; 239 timing->da_hs_zero = (170 * data_rate_mhz + 10 * 1000) / 8000 + 1 - 240 timing->da_hs_prepare; 241 timing->da_hs_trail = timing->da_hs_prepare + 1; 242 243 timing->ta_go = 4 * timing->lpx - 2; 244 timing->ta_sure = timing->lpx + 2; 245 timing->ta_get = 4 * timing->lpx; 246 timing->da_hs_exit = 2 * timing->lpx + 1; 247 248 timing->clk_hs_prepare = 70 * data_rate_mhz / (8 * 1000); 249 timing->clk_hs_post = timing->clk_hs_prepare + 8; 250 timing->clk_hs_trail = timing->clk_hs_prepare; 251 timing->clk_hs_zero = timing->clk_hs_trail * 4; 252 timing->clk_hs_exit = 2 * timing->clk_hs_trail; 253 254 timcon0 = timing->lpx | timing->da_hs_prepare << 8 | 255 timing->da_hs_zero << 16 | timing->da_hs_trail << 24; 256 timcon1 = timing->ta_go | timing->ta_sure << 8 | 257 timing->ta_get << 16 | timing->da_hs_exit << 24; 258 timcon2 = 1 << 8 | timing->clk_hs_zero << 16 | 259 timing->clk_hs_trail << 24; 260 timcon3 = timing->clk_hs_prepare | timing->clk_hs_post << 8 | 261 timing->clk_hs_exit << 16; 262 263 writel(timcon0, dsi->regs + DSI_PHY_TIMECON0); 264 writel(timcon1, dsi->regs + DSI_PHY_TIMECON1); 265 writel(timcon2, dsi->regs + DSI_PHY_TIMECON2); 266 writel(timcon3, dsi->regs + DSI_PHY_TIMECON3); 267 } 268 269 static void mtk_dsi_enable(struct mtk_dsi *dsi) 270 { 271 mtk_dsi_mask(dsi, DSI_CON_CTRL, DSI_EN, DSI_EN); 272 } 273 274 static void mtk_dsi_disable(struct mtk_dsi *dsi) 275 { 276 mtk_dsi_mask(dsi, DSI_CON_CTRL, DSI_EN, 0); 277 } 278 279 static void mtk_dsi_reset_engine(struct mtk_dsi *dsi) 280 { 281 mtk_dsi_mask(dsi, DSI_CON_CTRL, DSI_RESET, DSI_RESET); 282 mtk_dsi_mask(dsi, DSI_CON_CTRL, DSI_RESET, 0); 283 } 284 285 static void mtk_dsi_reset_dphy(struct mtk_dsi *dsi) 286 { 287 mtk_dsi_mask(dsi, DSI_CON_CTRL, DPHY_RESET, DPHY_RESET); 288 mtk_dsi_mask(dsi, DSI_CON_CTRL, DPHY_RESET, 0); 289 } 290 291 static void mtk_dsi_clk_ulp_mode_enter(struct mtk_dsi *dsi) 292 { 293 mtk_dsi_mask(dsi, DSI_PHY_LCCON, LC_HS_TX_EN, 0); 294 mtk_dsi_mask(dsi, DSI_PHY_LCCON, LC_ULPM_EN, 0); 295 } 296 297 static void mtk_dsi_clk_ulp_mode_leave(struct mtk_dsi *dsi) 298 { 299 mtk_dsi_mask(dsi, DSI_PHY_LCCON, LC_ULPM_EN, 0); 300 mtk_dsi_mask(dsi, DSI_PHY_LCCON, LC_WAKEUP_EN, LC_WAKEUP_EN); 301 mtk_dsi_mask(dsi, DSI_PHY_LCCON, LC_WAKEUP_EN, 0); 302 } 303 304 static void mtk_dsi_lane0_ulp_mode_enter(struct mtk_dsi *dsi) 305 { 306 mtk_dsi_mask(dsi, DSI_PHY_LD0CON, LD0_HS_TX_EN, 0); 307 mtk_dsi_mask(dsi, DSI_PHY_LD0CON, LD0_ULPM_EN, 0); 308 } 309 310 static void mtk_dsi_lane0_ulp_mode_leave(struct mtk_dsi *dsi) 311 { 312 mtk_dsi_mask(dsi, DSI_PHY_LD0CON, LD0_ULPM_EN, 0); 313 mtk_dsi_mask(dsi, DSI_PHY_LD0CON, LD0_WAKEUP_EN, LD0_WAKEUP_EN); 314 mtk_dsi_mask(dsi, DSI_PHY_LD0CON, LD0_WAKEUP_EN, 0); 315 } 316 317 static bool mtk_dsi_clk_hs_state(struct mtk_dsi *dsi) 318 { 319 return readl(dsi->regs + DSI_PHY_LCCON) & LC_HS_TX_EN; 320 } 321 322 static void mtk_dsi_clk_hs_mode(struct mtk_dsi *dsi, bool enter) 323 { 324 if (enter && !mtk_dsi_clk_hs_state(dsi)) 325 mtk_dsi_mask(dsi, DSI_PHY_LCCON, LC_HS_TX_EN, LC_HS_TX_EN); 326 else if (!enter && mtk_dsi_clk_hs_state(dsi)) 327 mtk_dsi_mask(dsi, DSI_PHY_LCCON, LC_HS_TX_EN, 0); 328 } 329 330 static void mtk_dsi_set_mode(struct mtk_dsi *dsi) 331 { 332 u32 vid_mode = CMD_MODE; 333 334 if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO) { 335 if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO_BURST) 336 vid_mode = BURST_MODE; 337 else if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO_SYNC_PULSE) 338 vid_mode = SYNC_PULSE_MODE; 339 else 340 vid_mode = SYNC_EVENT_MODE; 341 } 342 343 writel(vid_mode, dsi->regs + DSI_MODE_CTRL); 344 } 345 346 static void mtk_dsi_set_vm_cmd(struct mtk_dsi *dsi) 347 { 348 mtk_dsi_mask(dsi, DSI_VM_CMD_CON, VM_CMD_EN, VM_CMD_EN); 349 mtk_dsi_mask(dsi, DSI_VM_CMD_CON, TS_VFP_EN, TS_VFP_EN); 350 } 351 352 static void mtk_dsi_ps_control_vact(struct mtk_dsi *dsi) 353 { 354 struct videomode *vm = &dsi->vm; 355 u32 dsi_buf_bpp, ps_wc; 356 u32 ps_bpp_mode; 357 358 if (dsi->format == MIPI_DSI_FMT_RGB565) 359 dsi_buf_bpp = 2; 360 else 361 dsi_buf_bpp = 3; 362 363 ps_wc = vm->hactive * dsi_buf_bpp; 364 ps_bpp_mode = ps_wc; 365 366 switch (dsi->format) { 367 case MIPI_DSI_FMT_RGB888: 368 ps_bpp_mode |= PACKED_PS_24BIT_RGB888; 369 break; 370 case MIPI_DSI_FMT_RGB666: 371 ps_bpp_mode |= PACKED_PS_18BIT_RGB666; 372 break; 373 case MIPI_DSI_FMT_RGB666_PACKED: 374 ps_bpp_mode |= LOOSELY_PS_18BIT_RGB666; 375 break; 376 case MIPI_DSI_FMT_RGB565: 377 ps_bpp_mode |= PACKED_PS_16BIT_RGB565; 378 break; 379 } 380 381 writel(vm->vactive, dsi->regs + DSI_VACT_NL); 382 writel(ps_bpp_mode, dsi->regs + DSI_PSCTRL); 383 writel(ps_wc, dsi->regs + DSI_HSTX_CKL_WC); 384 } 385 386 static void mtk_dsi_rxtx_control(struct mtk_dsi *dsi) 387 { 388 u32 tmp_reg; 389 390 switch (dsi->lanes) { 391 case 1: 392 tmp_reg = 1 << 2; 393 break; 394 case 2: 395 tmp_reg = 3 << 2; 396 break; 397 case 3: 398 tmp_reg = 7 << 2; 399 break; 400 case 4: 401 tmp_reg = 0xf << 2; 402 break; 403 default: 404 tmp_reg = 0xf << 2; 405 break; 406 } 407 408 tmp_reg |= (dsi->mode_flags & MIPI_DSI_CLOCK_NON_CONTINUOUS) << 6; 409 tmp_reg |= (dsi->mode_flags & MIPI_DSI_MODE_EOT_PACKET) >> 3; 410 411 writel(tmp_reg, dsi->regs + DSI_TXRX_CTRL); 412 } 413 414 static void mtk_dsi_ps_control(struct mtk_dsi *dsi) 415 { 416 u32 dsi_tmp_buf_bpp; 417 u32 tmp_reg; 418 419 switch (dsi->format) { 420 case MIPI_DSI_FMT_RGB888: 421 tmp_reg = PACKED_PS_24BIT_RGB888; 422 dsi_tmp_buf_bpp = 3; 423 break; 424 case MIPI_DSI_FMT_RGB666: 425 tmp_reg = LOOSELY_PS_18BIT_RGB666; 426 dsi_tmp_buf_bpp = 3; 427 break; 428 case MIPI_DSI_FMT_RGB666_PACKED: 429 tmp_reg = PACKED_PS_18BIT_RGB666; 430 dsi_tmp_buf_bpp = 3; 431 break; 432 case MIPI_DSI_FMT_RGB565: 433 tmp_reg = PACKED_PS_16BIT_RGB565; 434 dsi_tmp_buf_bpp = 2; 435 break; 436 default: 437 tmp_reg = PACKED_PS_24BIT_RGB888; 438 dsi_tmp_buf_bpp = 3; 439 break; 440 } 441 442 tmp_reg += dsi->vm.hactive * dsi_tmp_buf_bpp & DSI_PS_WC; 443 writel(tmp_reg, dsi->regs + DSI_PSCTRL); 444 } 445 446 static void mtk_dsi_config_vdo_timing(struct mtk_dsi *dsi) 447 { 448 u32 horizontal_sync_active_byte; 449 u32 horizontal_backporch_byte; 450 u32 horizontal_frontporch_byte; 451 u32 dsi_tmp_buf_bpp, data_phy_cycles; 452 struct mtk_phy_timing *timing = &dsi->phy_timing; 453 454 struct videomode *vm = &dsi->vm; 455 456 if (dsi->format == MIPI_DSI_FMT_RGB565) 457 dsi_tmp_buf_bpp = 2; 458 else 459 dsi_tmp_buf_bpp = 3; 460 461 writel(vm->vsync_len, dsi->regs + DSI_VSA_NL); 462 writel(vm->vback_porch, dsi->regs + DSI_VBP_NL); 463 writel(vm->vfront_porch, dsi->regs + DSI_VFP_NL); 464 writel(vm->vactive, dsi->regs + DSI_VACT_NL); 465 466 if (dsi->driver_data->has_size_ctl) 467 writel(vm->vactive << 16 | vm->hactive, 468 dsi->regs + DSI_SIZE_CON); 469 470 horizontal_sync_active_byte = (vm->hsync_len * dsi_tmp_buf_bpp - 10); 471 472 if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO_SYNC_PULSE) 473 horizontal_backporch_byte = 474 (vm->hback_porch * dsi_tmp_buf_bpp - 10); 475 else 476 horizontal_backporch_byte = ((vm->hback_porch + vm->hsync_len) * 477 dsi_tmp_buf_bpp - 10); 478 479 data_phy_cycles = timing->lpx + timing->da_hs_prepare + 480 timing->da_hs_zero + timing->da_hs_exit + 3; 481 482 if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO_BURST) { 483 if ((vm->hfront_porch + vm->hback_porch) * dsi_tmp_buf_bpp > 484 data_phy_cycles * dsi->lanes + 18) { 485 horizontal_frontporch_byte = 486 vm->hfront_porch * dsi_tmp_buf_bpp - 487 (data_phy_cycles * dsi->lanes + 18) * 488 vm->hfront_porch / 489 (vm->hfront_porch + vm->hback_porch); 490 491 horizontal_backporch_byte = 492 horizontal_backporch_byte - 493 (data_phy_cycles * dsi->lanes + 18) * 494 vm->hback_porch / 495 (vm->hfront_porch + vm->hback_porch); 496 } else { 497 DRM_WARN("HFP less than d-phy, FPS will under 60Hz\n"); 498 horizontal_frontporch_byte = vm->hfront_porch * 499 dsi_tmp_buf_bpp; 500 } 501 } else { 502 if ((vm->hfront_porch + vm->hback_porch) * dsi_tmp_buf_bpp > 503 data_phy_cycles * dsi->lanes + 12) { 504 horizontal_frontporch_byte = 505 vm->hfront_porch * dsi_tmp_buf_bpp - 506 (data_phy_cycles * dsi->lanes + 12) * 507 vm->hfront_porch / 508 (vm->hfront_porch + vm->hback_porch); 509 horizontal_backporch_byte = horizontal_backporch_byte - 510 (data_phy_cycles * dsi->lanes + 12) * 511 vm->hback_porch / 512 (vm->hfront_porch + vm->hback_porch); 513 } else { 514 DRM_WARN("HFP less than d-phy, FPS will under 60Hz\n"); 515 horizontal_frontporch_byte = vm->hfront_porch * 516 dsi_tmp_buf_bpp; 517 } 518 } 519 520 writel(horizontal_sync_active_byte, dsi->regs + DSI_HSA_WC); 521 writel(horizontal_backporch_byte, dsi->regs + DSI_HBP_WC); 522 writel(horizontal_frontporch_byte, dsi->regs + DSI_HFP_WC); 523 524 mtk_dsi_ps_control(dsi); 525 } 526 527 static void mtk_dsi_start(struct mtk_dsi *dsi) 528 { 529 writel(0, dsi->regs + DSI_START); 530 writel(1, dsi->regs + DSI_START); 531 } 532 533 static void mtk_dsi_stop(struct mtk_dsi *dsi) 534 { 535 writel(0, dsi->regs + DSI_START); 536 } 537 538 static void mtk_dsi_set_cmd_mode(struct mtk_dsi *dsi) 539 { 540 writel(CMD_MODE, dsi->regs + DSI_MODE_CTRL); 541 } 542 543 static void mtk_dsi_set_interrupt_enable(struct mtk_dsi *dsi) 544 { 545 u32 inten = LPRX_RD_RDY_INT_FLAG | CMD_DONE_INT_FLAG | VM_DONE_INT_FLAG; 546 547 writel(inten, dsi->regs + DSI_INTEN); 548 } 549 550 static void mtk_dsi_irq_data_set(struct mtk_dsi *dsi, u32 irq_bit) 551 { 552 dsi->irq_data |= irq_bit; 553 } 554 555 static void mtk_dsi_irq_data_clear(struct mtk_dsi *dsi, u32 irq_bit) 556 { 557 dsi->irq_data &= ~irq_bit; 558 } 559 560 static s32 mtk_dsi_wait_for_irq_done(struct mtk_dsi *dsi, u32 irq_flag, 561 unsigned int timeout) 562 { 563 s32 ret = 0; 564 unsigned long jiffies = msecs_to_jiffies(timeout); 565 566 ret = wait_event_interruptible_timeout(dsi->irq_wait_queue, 567 dsi->irq_data & irq_flag, 568 jiffies); 569 if (ret == 0) { 570 DRM_WARN("Wait DSI IRQ(0x%08x) Timeout\n", irq_flag); 571 572 mtk_dsi_enable(dsi); 573 mtk_dsi_reset_engine(dsi); 574 } 575 576 return ret; 577 } 578 579 static irqreturn_t mtk_dsi_irq(int irq, void *dev_id) 580 { 581 struct mtk_dsi *dsi = dev_id; 582 u32 status, tmp; 583 u32 flag = LPRX_RD_RDY_INT_FLAG | CMD_DONE_INT_FLAG | VM_DONE_INT_FLAG; 584 585 status = readl(dsi->regs + DSI_INTSTA) & flag; 586 587 if (status) { 588 do { 589 mtk_dsi_mask(dsi, DSI_RACK, RACK, RACK); 590 tmp = readl(dsi->regs + DSI_INTSTA); 591 } while (tmp & DSI_BUSY); 592 593 mtk_dsi_mask(dsi, DSI_INTSTA, status, 0); 594 mtk_dsi_irq_data_set(dsi, status); 595 wake_up_interruptible(&dsi->irq_wait_queue); 596 } 597 598 return IRQ_HANDLED; 599 } 600 601 static s32 mtk_dsi_switch_to_cmd_mode(struct mtk_dsi *dsi, u8 irq_flag, u32 t) 602 { 603 mtk_dsi_irq_data_clear(dsi, irq_flag); 604 mtk_dsi_set_cmd_mode(dsi); 605 606 if (!mtk_dsi_wait_for_irq_done(dsi, irq_flag, t)) { 607 DRM_ERROR("failed to switch cmd mode\n"); 608 return -ETIME; 609 } else { 610 return 0; 611 } 612 } 613 614 static int mtk_dsi_poweron(struct mtk_dsi *dsi) 615 { 616 struct device *dev = dsi->host.dev; 617 int ret; 618 u32 bit_per_pixel; 619 620 if (++dsi->refcount != 1) 621 return 0; 622 623 switch (dsi->format) { 624 case MIPI_DSI_FMT_RGB565: 625 bit_per_pixel = 16; 626 break; 627 case MIPI_DSI_FMT_RGB666_PACKED: 628 bit_per_pixel = 18; 629 break; 630 case MIPI_DSI_FMT_RGB666: 631 case MIPI_DSI_FMT_RGB888: 632 default: 633 bit_per_pixel = 24; 634 break; 635 } 636 637 dsi->data_rate = DIV_ROUND_UP_ULL(dsi->vm.pixelclock * bit_per_pixel, 638 dsi->lanes); 639 640 ret = clk_set_rate(dsi->hs_clk, dsi->data_rate); 641 if (ret < 0) { 642 dev_err(dev, "Failed to set data rate: %d\n", ret); 643 goto err_refcount; 644 } 645 646 phy_power_on(dsi->phy); 647 648 ret = clk_prepare_enable(dsi->engine_clk); 649 if (ret < 0) { 650 dev_err(dev, "Failed to enable engine clock: %d\n", ret); 651 goto err_phy_power_off; 652 } 653 654 ret = clk_prepare_enable(dsi->digital_clk); 655 if (ret < 0) { 656 dev_err(dev, "Failed to enable digital clock: %d\n", ret); 657 goto err_disable_engine_clk; 658 } 659 660 mtk_dsi_enable(dsi); 661 662 if (dsi->driver_data->has_shadow_ctl) 663 writel(FORCE_COMMIT | BYPASS_SHADOW, 664 dsi->regs + DSI_SHADOW_DEBUG); 665 666 mtk_dsi_reset_engine(dsi); 667 mtk_dsi_phy_timconfig(dsi); 668 669 mtk_dsi_rxtx_control(dsi); 670 usleep_range(30, 100); 671 mtk_dsi_reset_dphy(dsi); 672 mtk_dsi_ps_control_vact(dsi); 673 mtk_dsi_set_vm_cmd(dsi); 674 mtk_dsi_config_vdo_timing(dsi); 675 mtk_dsi_set_interrupt_enable(dsi); 676 677 mtk_dsi_clk_ulp_mode_leave(dsi); 678 mtk_dsi_lane0_ulp_mode_leave(dsi); 679 mtk_dsi_clk_hs_mode(dsi, 0); 680 681 if (dsi->panel) { 682 if (drm_panel_prepare(dsi->panel)) { 683 DRM_ERROR("failed to prepare the panel\n"); 684 goto err_disable_digital_clk; 685 } 686 } 687 688 return 0; 689 err_disable_digital_clk: 690 clk_disable_unprepare(dsi->digital_clk); 691 err_disable_engine_clk: 692 clk_disable_unprepare(dsi->engine_clk); 693 err_phy_power_off: 694 phy_power_off(dsi->phy); 695 err_refcount: 696 dsi->refcount--; 697 return ret; 698 } 699 700 static void mtk_dsi_poweroff(struct mtk_dsi *dsi) 701 { 702 if (WARN_ON(dsi->refcount == 0)) 703 return; 704 705 if (--dsi->refcount != 0) 706 return; 707 708 /* 709 * mtk_dsi_stop() and mtk_dsi_start() is asymmetric, since 710 * mtk_dsi_stop() should be called after mtk_drm_crtc_atomic_disable(), 711 * which needs irq for vblank, and mtk_dsi_stop() will disable irq. 712 * mtk_dsi_start() needs to be called in mtk_output_dsi_enable(), 713 * after dsi is fully set. 714 */ 715 mtk_dsi_stop(dsi); 716 717 if (!mtk_dsi_switch_to_cmd_mode(dsi, VM_DONE_INT_FLAG, 500)) { 718 if (dsi->panel) { 719 if (drm_panel_unprepare(dsi->panel)) { 720 DRM_ERROR("failed to unprepare the panel\n"); 721 return; 722 } 723 } 724 } 725 726 mtk_dsi_reset_engine(dsi); 727 mtk_dsi_lane0_ulp_mode_enter(dsi); 728 mtk_dsi_clk_ulp_mode_enter(dsi); 729 730 mtk_dsi_disable(dsi); 731 732 clk_disable_unprepare(dsi->engine_clk); 733 clk_disable_unprepare(dsi->digital_clk); 734 735 phy_power_off(dsi->phy); 736 } 737 738 static void mtk_output_dsi_enable(struct mtk_dsi *dsi) 739 { 740 int ret; 741 742 if (dsi->enabled) 743 return; 744 745 ret = mtk_dsi_poweron(dsi); 746 if (ret < 0) { 747 DRM_ERROR("failed to power on dsi\n"); 748 return; 749 } 750 751 mtk_dsi_set_mode(dsi); 752 mtk_dsi_clk_hs_mode(dsi, 1); 753 754 mtk_dsi_start(dsi); 755 756 if (dsi->panel) { 757 if (drm_panel_enable(dsi->panel)) { 758 DRM_ERROR("failed to enable the panel\n"); 759 goto err_dsi_power_off; 760 } 761 } 762 763 dsi->enabled = true; 764 765 return; 766 err_dsi_power_off: 767 mtk_dsi_stop(dsi); 768 mtk_dsi_poweroff(dsi); 769 } 770 771 static void mtk_output_dsi_disable(struct mtk_dsi *dsi) 772 { 773 if (!dsi->enabled) 774 return; 775 776 if (dsi->panel) { 777 if (drm_panel_disable(dsi->panel)) { 778 DRM_ERROR("failed to disable the panel\n"); 779 return; 780 } 781 } 782 783 mtk_dsi_poweroff(dsi); 784 785 dsi->enabled = false; 786 } 787 788 static bool mtk_dsi_encoder_mode_fixup(struct drm_encoder *encoder, 789 const struct drm_display_mode *mode, 790 struct drm_display_mode *adjusted_mode) 791 { 792 return true; 793 } 794 795 static void mtk_dsi_encoder_mode_set(struct drm_encoder *encoder, 796 struct drm_display_mode *mode, 797 struct drm_display_mode *adjusted) 798 { 799 struct mtk_dsi *dsi = encoder_to_dsi(encoder); 800 801 drm_display_mode_to_videomode(adjusted, &dsi->vm); 802 } 803 804 static void mtk_dsi_encoder_disable(struct drm_encoder *encoder) 805 { 806 struct mtk_dsi *dsi = encoder_to_dsi(encoder); 807 808 mtk_output_dsi_disable(dsi); 809 } 810 811 static void mtk_dsi_encoder_enable(struct drm_encoder *encoder) 812 { 813 struct mtk_dsi *dsi = encoder_to_dsi(encoder); 814 815 mtk_output_dsi_enable(dsi); 816 } 817 818 static int mtk_dsi_connector_get_modes(struct drm_connector *connector) 819 { 820 struct mtk_dsi *dsi = connector_to_dsi(connector); 821 822 return drm_panel_get_modes(dsi->panel, connector); 823 } 824 825 static const struct drm_encoder_helper_funcs mtk_dsi_encoder_helper_funcs = { 826 .mode_fixup = mtk_dsi_encoder_mode_fixup, 827 .mode_set = mtk_dsi_encoder_mode_set, 828 .disable = mtk_dsi_encoder_disable, 829 .enable = mtk_dsi_encoder_enable, 830 }; 831 832 static const struct drm_connector_funcs mtk_dsi_connector_funcs = { 833 .fill_modes = drm_helper_probe_single_connector_modes, 834 .destroy = drm_connector_cleanup, 835 .reset = drm_atomic_helper_connector_reset, 836 .atomic_duplicate_state = drm_atomic_helper_connector_duplicate_state, 837 .atomic_destroy_state = drm_atomic_helper_connector_destroy_state, 838 }; 839 840 static const struct drm_connector_helper_funcs 841 mtk_dsi_connector_helper_funcs = { 842 .get_modes = mtk_dsi_connector_get_modes, 843 }; 844 845 static int mtk_dsi_create_connector(struct drm_device *drm, struct mtk_dsi *dsi) 846 { 847 int ret; 848 849 ret = drm_connector_init(drm, &dsi->conn, &mtk_dsi_connector_funcs, 850 DRM_MODE_CONNECTOR_DSI); 851 if (ret) { 852 DRM_ERROR("Failed to connector init to drm\n"); 853 return ret; 854 } 855 856 drm_connector_helper_add(&dsi->conn, &mtk_dsi_connector_helper_funcs); 857 858 dsi->conn.dpms = DRM_MODE_DPMS_OFF; 859 drm_connector_attach_encoder(&dsi->conn, &dsi->encoder); 860 861 if (dsi->panel) { 862 ret = drm_panel_attach(dsi->panel, &dsi->conn); 863 if (ret) { 864 DRM_ERROR("Failed to attach panel to drm\n"); 865 goto err_connector_cleanup; 866 } 867 } 868 869 return 0; 870 871 err_connector_cleanup: 872 drm_connector_cleanup(&dsi->conn); 873 return ret; 874 } 875 876 static int mtk_dsi_create_conn_enc(struct drm_device *drm, struct mtk_dsi *dsi) 877 { 878 int ret; 879 880 ret = drm_simple_encoder_init(drm, &dsi->encoder, 881 DRM_MODE_ENCODER_DSI); 882 if (ret) { 883 DRM_ERROR("Failed to encoder init to drm\n"); 884 return ret; 885 } 886 drm_encoder_helper_add(&dsi->encoder, &mtk_dsi_encoder_helper_funcs); 887 888 /* 889 * Currently display data paths are statically assigned to a crtc each. 890 * crtc 0 is OVL0 -> COLOR0 -> AAL -> OD -> RDMA0 -> UFOE -> DSI0 891 */ 892 dsi->encoder.possible_crtcs = 1; 893 894 /* If there's a bridge, attach to it and let it create the connector */ 895 if (dsi->bridge) { 896 ret = drm_bridge_attach(&dsi->encoder, dsi->bridge, NULL, 0); 897 if (ret) { 898 DRM_ERROR("Failed to attach bridge to drm\n"); 899 goto err_encoder_cleanup; 900 } 901 } else { 902 /* Otherwise create our own connector and attach to a panel */ 903 ret = mtk_dsi_create_connector(drm, dsi); 904 if (ret) 905 goto err_encoder_cleanup; 906 } 907 908 return 0; 909 910 err_encoder_cleanup: 911 drm_encoder_cleanup(&dsi->encoder); 912 return ret; 913 } 914 915 static void mtk_dsi_destroy_conn_enc(struct mtk_dsi *dsi) 916 { 917 drm_encoder_cleanup(&dsi->encoder); 918 /* Skip connector cleanup if creation was delegated to the bridge */ 919 if (dsi->conn.dev) 920 drm_connector_cleanup(&dsi->conn); 921 if (dsi->panel) 922 drm_panel_detach(dsi->panel); 923 } 924 925 static void mtk_dsi_ddp_start(struct mtk_ddp_comp *comp) 926 { 927 struct mtk_dsi *dsi = container_of(comp, struct mtk_dsi, ddp_comp); 928 929 mtk_dsi_poweron(dsi); 930 } 931 932 static void mtk_dsi_ddp_stop(struct mtk_ddp_comp *comp) 933 { 934 struct mtk_dsi *dsi = container_of(comp, struct mtk_dsi, ddp_comp); 935 936 mtk_dsi_poweroff(dsi); 937 } 938 939 static const struct mtk_ddp_comp_funcs mtk_dsi_funcs = { 940 .start = mtk_dsi_ddp_start, 941 .stop = mtk_dsi_ddp_stop, 942 }; 943 944 static int mtk_dsi_host_attach(struct mipi_dsi_host *host, 945 struct mipi_dsi_device *device) 946 { 947 struct mtk_dsi *dsi = host_to_dsi(host); 948 949 dsi->lanes = device->lanes; 950 dsi->format = device->format; 951 dsi->mode_flags = device->mode_flags; 952 953 if (dsi->conn.dev) 954 drm_helper_hpd_irq_event(dsi->conn.dev); 955 956 return 0; 957 } 958 959 static int mtk_dsi_host_detach(struct mipi_dsi_host *host, 960 struct mipi_dsi_device *device) 961 { 962 struct mtk_dsi *dsi = host_to_dsi(host); 963 964 if (dsi->conn.dev) 965 drm_helper_hpd_irq_event(dsi->conn.dev); 966 967 return 0; 968 } 969 970 static void mtk_dsi_wait_for_idle(struct mtk_dsi *dsi) 971 { 972 int ret; 973 u32 val; 974 975 ret = readl_poll_timeout(dsi->regs + DSI_INTSTA, val, !(val & DSI_BUSY), 976 4, 2000000); 977 if (ret) { 978 DRM_WARN("polling dsi wait not busy timeout!\n"); 979 980 mtk_dsi_enable(dsi); 981 mtk_dsi_reset_engine(dsi); 982 } 983 } 984 985 static u32 mtk_dsi_recv_cnt(u8 type, u8 *read_data) 986 { 987 switch (type) { 988 case MIPI_DSI_RX_GENERIC_SHORT_READ_RESPONSE_1BYTE: 989 case MIPI_DSI_RX_DCS_SHORT_READ_RESPONSE_1BYTE: 990 return 1; 991 case MIPI_DSI_RX_GENERIC_SHORT_READ_RESPONSE_2BYTE: 992 case MIPI_DSI_RX_DCS_SHORT_READ_RESPONSE_2BYTE: 993 return 2; 994 case MIPI_DSI_RX_GENERIC_LONG_READ_RESPONSE: 995 case MIPI_DSI_RX_DCS_LONG_READ_RESPONSE: 996 return read_data[1] + read_data[2] * 16; 997 case MIPI_DSI_RX_ACKNOWLEDGE_AND_ERROR_REPORT: 998 DRM_INFO("type is 0x02, try again\n"); 999 break; 1000 default: 1001 DRM_INFO("type(0x%x) not recognized\n", type); 1002 break; 1003 } 1004 1005 return 0; 1006 } 1007 1008 static void mtk_dsi_cmdq(struct mtk_dsi *dsi, const struct mipi_dsi_msg *msg) 1009 { 1010 const char *tx_buf = msg->tx_buf; 1011 u8 config, cmdq_size, cmdq_off, type = msg->type; 1012 u32 reg_val, cmdq_mask, i; 1013 u32 reg_cmdq_off = dsi->driver_data->reg_cmdq_off; 1014 1015 if (MTK_DSI_HOST_IS_READ(type)) 1016 config = BTA; 1017 else 1018 config = (msg->tx_len > 2) ? LONG_PACKET : SHORT_PACKET; 1019 1020 if (msg->tx_len > 2) { 1021 cmdq_size = 1 + (msg->tx_len + 3) / 4; 1022 cmdq_off = 4; 1023 cmdq_mask = CONFIG | DATA_ID | DATA_0 | DATA_1; 1024 reg_val = (msg->tx_len << 16) | (type << 8) | config; 1025 } else { 1026 cmdq_size = 1; 1027 cmdq_off = 2; 1028 cmdq_mask = CONFIG | DATA_ID; 1029 reg_val = (type << 8) | config; 1030 } 1031 1032 for (i = 0; i < msg->tx_len; i++) 1033 mtk_dsi_mask(dsi, (reg_cmdq_off + cmdq_off + i) & (~0x3U), 1034 (0xffUL << (((i + cmdq_off) & 3U) * 8U)), 1035 tx_buf[i] << (((i + cmdq_off) & 3U) * 8U)); 1036 1037 mtk_dsi_mask(dsi, reg_cmdq_off, cmdq_mask, reg_val); 1038 mtk_dsi_mask(dsi, DSI_CMDQ_SIZE, CMDQ_SIZE, cmdq_size); 1039 } 1040 1041 static ssize_t mtk_dsi_host_send_cmd(struct mtk_dsi *dsi, 1042 const struct mipi_dsi_msg *msg, u8 flag) 1043 { 1044 mtk_dsi_wait_for_idle(dsi); 1045 mtk_dsi_irq_data_clear(dsi, flag); 1046 mtk_dsi_cmdq(dsi, msg); 1047 mtk_dsi_start(dsi); 1048 1049 if (!mtk_dsi_wait_for_irq_done(dsi, flag, 2000)) 1050 return -ETIME; 1051 else 1052 return 0; 1053 } 1054 1055 static ssize_t mtk_dsi_host_transfer(struct mipi_dsi_host *host, 1056 const struct mipi_dsi_msg *msg) 1057 { 1058 struct mtk_dsi *dsi = host_to_dsi(host); 1059 u32 recv_cnt, i; 1060 u8 read_data[16]; 1061 void *src_addr; 1062 u8 irq_flag = CMD_DONE_INT_FLAG; 1063 1064 if (readl(dsi->regs + DSI_MODE_CTRL) & MODE) { 1065 DRM_ERROR("dsi engine is not command mode\n"); 1066 return -EINVAL; 1067 } 1068 1069 if (MTK_DSI_HOST_IS_READ(msg->type)) 1070 irq_flag |= LPRX_RD_RDY_INT_FLAG; 1071 1072 if (mtk_dsi_host_send_cmd(dsi, msg, irq_flag) < 0) 1073 return -ETIME; 1074 1075 if (!MTK_DSI_HOST_IS_READ(msg->type)) 1076 return 0; 1077 1078 if (!msg->rx_buf) { 1079 DRM_ERROR("dsi receive buffer size may be NULL\n"); 1080 return -EINVAL; 1081 } 1082 1083 for (i = 0; i < 16; i++) 1084 *(read_data + i) = readb(dsi->regs + DSI_RX_DATA0 + i); 1085 1086 recv_cnt = mtk_dsi_recv_cnt(read_data[0], read_data); 1087 1088 if (recv_cnt > 2) 1089 src_addr = &read_data[4]; 1090 else 1091 src_addr = &read_data[1]; 1092 1093 if (recv_cnt > 10) 1094 recv_cnt = 10; 1095 1096 if (recv_cnt > msg->rx_len) 1097 recv_cnt = msg->rx_len; 1098 1099 if (recv_cnt) 1100 memcpy(msg->rx_buf, src_addr, recv_cnt); 1101 1102 DRM_INFO("dsi get %d byte data from the panel address(0x%x)\n", 1103 recv_cnt, *((u8 *)(msg->tx_buf))); 1104 1105 return recv_cnt; 1106 } 1107 1108 static const struct mipi_dsi_host_ops mtk_dsi_ops = { 1109 .attach = mtk_dsi_host_attach, 1110 .detach = mtk_dsi_host_detach, 1111 .transfer = mtk_dsi_host_transfer, 1112 }; 1113 1114 static int mtk_dsi_bind(struct device *dev, struct device *master, void *data) 1115 { 1116 int ret; 1117 struct drm_device *drm = data; 1118 struct mtk_dsi *dsi = dev_get_drvdata(dev); 1119 1120 ret = mtk_ddp_comp_register(drm, &dsi->ddp_comp); 1121 if (ret < 0) { 1122 dev_err(dev, "Failed to register component %pOF: %d\n", 1123 dev->of_node, ret); 1124 return ret; 1125 } 1126 1127 ret = mtk_dsi_create_conn_enc(drm, dsi); 1128 if (ret) { 1129 DRM_ERROR("Encoder create failed with %d\n", ret); 1130 goto err_unregister; 1131 } 1132 1133 return 0; 1134 1135 err_unregister: 1136 mtk_ddp_comp_unregister(drm, &dsi->ddp_comp); 1137 return ret; 1138 } 1139 1140 static void mtk_dsi_unbind(struct device *dev, struct device *master, 1141 void *data) 1142 { 1143 struct drm_device *drm = data; 1144 struct mtk_dsi *dsi = dev_get_drvdata(dev); 1145 1146 mtk_dsi_destroy_conn_enc(dsi); 1147 mtk_ddp_comp_unregister(drm, &dsi->ddp_comp); 1148 } 1149 1150 static const struct component_ops mtk_dsi_component_ops = { 1151 .bind = mtk_dsi_bind, 1152 .unbind = mtk_dsi_unbind, 1153 }; 1154 1155 static int mtk_dsi_probe(struct platform_device *pdev) 1156 { 1157 struct mtk_dsi *dsi; 1158 struct device *dev = &pdev->dev; 1159 struct resource *regs; 1160 int irq_num; 1161 int comp_id; 1162 int ret; 1163 1164 dsi = devm_kzalloc(dev, sizeof(*dsi), GFP_KERNEL); 1165 if (!dsi) 1166 return -ENOMEM; 1167 1168 dsi->host.ops = &mtk_dsi_ops; 1169 dsi->host.dev = dev; 1170 ret = mipi_dsi_host_register(&dsi->host); 1171 if (ret < 0) { 1172 dev_err(dev, "failed to register DSI host: %d\n", ret); 1173 return ret; 1174 } 1175 1176 ret = drm_of_find_panel_or_bridge(dev->of_node, 0, 0, 1177 &dsi->panel, &dsi->bridge); 1178 if (ret) 1179 goto err_unregister_host; 1180 1181 dsi->driver_data = of_device_get_match_data(dev); 1182 1183 dsi->engine_clk = devm_clk_get(dev, "engine"); 1184 if (IS_ERR(dsi->engine_clk)) { 1185 ret = PTR_ERR(dsi->engine_clk); 1186 1187 if (ret != -EPROBE_DEFER) 1188 dev_err(dev, "Failed to get engine clock: %d\n", ret); 1189 goto err_unregister_host; 1190 } 1191 1192 dsi->digital_clk = devm_clk_get(dev, "digital"); 1193 if (IS_ERR(dsi->digital_clk)) { 1194 ret = PTR_ERR(dsi->digital_clk); 1195 1196 if (ret != -EPROBE_DEFER) 1197 dev_err(dev, "Failed to get digital clock: %d\n", ret); 1198 goto err_unregister_host; 1199 } 1200 1201 dsi->hs_clk = devm_clk_get(dev, "hs"); 1202 if (IS_ERR(dsi->hs_clk)) { 1203 ret = PTR_ERR(dsi->hs_clk); 1204 dev_err(dev, "Failed to get hs clock: %d\n", ret); 1205 goto err_unregister_host; 1206 } 1207 1208 regs = platform_get_resource(pdev, IORESOURCE_MEM, 0); 1209 dsi->regs = devm_ioremap_resource(dev, regs); 1210 if (IS_ERR(dsi->regs)) { 1211 ret = PTR_ERR(dsi->regs); 1212 dev_err(dev, "Failed to ioremap memory: %d\n", ret); 1213 goto err_unregister_host; 1214 } 1215 1216 dsi->phy = devm_phy_get(dev, "dphy"); 1217 if (IS_ERR(dsi->phy)) { 1218 ret = PTR_ERR(dsi->phy); 1219 dev_err(dev, "Failed to get MIPI-DPHY: %d\n", ret); 1220 goto err_unregister_host; 1221 } 1222 1223 comp_id = mtk_ddp_comp_get_id(dev->of_node, MTK_DSI); 1224 if (comp_id < 0) { 1225 dev_err(dev, "Failed to identify by alias: %d\n", comp_id); 1226 ret = comp_id; 1227 goto err_unregister_host; 1228 } 1229 1230 ret = mtk_ddp_comp_init(dev, dev->of_node, &dsi->ddp_comp, comp_id, 1231 &mtk_dsi_funcs); 1232 if (ret) { 1233 dev_err(dev, "Failed to initialize component: %d\n", ret); 1234 goto err_unregister_host; 1235 } 1236 1237 irq_num = platform_get_irq(pdev, 0); 1238 if (irq_num < 0) { 1239 dev_err(&pdev->dev, "failed to get dsi irq_num: %d\n", irq_num); 1240 ret = irq_num; 1241 goto err_unregister_host; 1242 } 1243 1244 irq_set_status_flags(irq_num, IRQ_TYPE_LEVEL_LOW); 1245 ret = devm_request_irq(&pdev->dev, irq_num, mtk_dsi_irq, 1246 IRQF_TRIGGER_LOW, dev_name(&pdev->dev), dsi); 1247 if (ret) { 1248 dev_err(&pdev->dev, "failed to request mediatek dsi irq\n"); 1249 goto err_unregister_host; 1250 } 1251 1252 init_waitqueue_head(&dsi->irq_wait_queue); 1253 1254 platform_set_drvdata(pdev, dsi); 1255 1256 ret = component_add(&pdev->dev, &mtk_dsi_component_ops); 1257 if (ret) { 1258 dev_err(&pdev->dev, "failed to add component: %d\n", ret); 1259 goto err_unregister_host; 1260 } 1261 1262 return 0; 1263 1264 err_unregister_host: 1265 mipi_dsi_host_unregister(&dsi->host); 1266 return ret; 1267 } 1268 1269 static int mtk_dsi_remove(struct platform_device *pdev) 1270 { 1271 struct mtk_dsi *dsi = platform_get_drvdata(pdev); 1272 1273 mtk_output_dsi_disable(dsi); 1274 component_del(&pdev->dev, &mtk_dsi_component_ops); 1275 mipi_dsi_host_unregister(&dsi->host); 1276 1277 return 0; 1278 } 1279 1280 static const struct mtk_dsi_driver_data mt8173_dsi_driver_data = { 1281 .reg_cmdq_off = 0x200, 1282 }; 1283 1284 static const struct mtk_dsi_driver_data mt2701_dsi_driver_data = { 1285 .reg_cmdq_off = 0x180, 1286 }; 1287 1288 static const struct mtk_dsi_driver_data mt8183_dsi_driver_data = { 1289 .reg_cmdq_off = 0x200, 1290 .has_shadow_ctl = true, 1291 .has_size_ctl = true, 1292 }; 1293 1294 static const struct of_device_id mtk_dsi_of_match[] = { 1295 { .compatible = "mediatek,mt2701-dsi", 1296 .data = &mt2701_dsi_driver_data }, 1297 { .compatible = "mediatek,mt8173-dsi", 1298 .data = &mt8173_dsi_driver_data }, 1299 { .compatible = "mediatek,mt8183-dsi", 1300 .data = &mt8183_dsi_driver_data }, 1301 { }, 1302 }; 1303 1304 struct platform_driver mtk_dsi_driver = { 1305 .probe = mtk_dsi_probe, 1306 .remove = mtk_dsi_remove, 1307 .driver = { 1308 .name = "mtk-dsi", 1309 .of_match_table = mtk_dsi_of_match, 1310 }, 1311 }; 1312