1 // SPDX-License-Identifier: GPL-2.0-only 2 /* 3 * Copyright (c) 2015 MediaTek Inc. 4 */ 5 6 #include <drm/drmP.h> 7 #include <drm/drm_atomic_helper.h> 8 #include <drm/drm_mipi_dsi.h> 9 #include <drm/drm_panel.h> 10 #include <drm/drm_of.h> 11 #include <drm/drm_probe_helper.h> 12 #include <linux/clk.h> 13 #include <linux/component.h> 14 #include <linux/iopoll.h> 15 #include <linux/irq.h> 16 #include <linux/of.h> 17 #include <linux/of_platform.h> 18 #include <linux/phy/phy.h> 19 #include <linux/platform_device.h> 20 #include <video/mipi_display.h> 21 #include <video/videomode.h> 22 23 #include "mtk_drm_ddp_comp.h" 24 25 #define DSI_START 0x00 26 27 #define DSI_INTEN 0x08 28 29 #define DSI_INTSTA 0x0c 30 #define LPRX_RD_RDY_INT_FLAG BIT(0) 31 #define CMD_DONE_INT_FLAG BIT(1) 32 #define TE_RDY_INT_FLAG BIT(2) 33 #define VM_DONE_INT_FLAG BIT(3) 34 #define EXT_TE_RDY_INT_FLAG BIT(4) 35 #define DSI_BUSY BIT(31) 36 37 #define DSI_CON_CTRL 0x10 38 #define DSI_RESET BIT(0) 39 #define DSI_EN BIT(1) 40 41 #define DSI_MODE_CTRL 0x14 42 #define MODE (3) 43 #define CMD_MODE 0 44 #define SYNC_PULSE_MODE 1 45 #define SYNC_EVENT_MODE 2 46 #define BURST_MODE 3 47 #define FRM_MODE BIT(16) 48 #define MIX_MODE BIT(17) 49 50 #define DSI_TXRX_CTRL 0x18 51 #define VC_NUM BIT(1) 52 #define LANE_NUM (0xf << 2) 53 #define DIS_EOT BIT(6) 54 #define NULL_EN BIT(7) 55 #define TE_FREERUN BIT(8) 56 #define EXT_TE_EN BIT(9) 57 #define EXT_TE_EDGE BIT(10) 58 #define MAX_RTN_SIZE (0xf << 12) 59 #define HSTX_CKLP_EN BIT(16) 60 61 #define DSI_PSCTRL 0x1c 62 #define DSI_PS_WC 0x3fff 63 #define DSI_PS_SEL (3 << 16) 64 #define PACKED_PS_16BIT_RGB565 (0 << 16) 65 #define LOOSELY_PS_18BIT_RGB666 (1 << 16) 66 #define PACKED_PS_18BIT_RGB666 (2 << 16) 67 #define PACKED_PS_24BIT_RGB888 (3 << 16) 68 69 #define DSI_VSA_NL 0x20 70 #define DSI_VBP_NL 0x24 71 #define DSI_VFP_NL 0x28 72 #define DSI_VACT_NL 0x2C 73 #define DSI_HSA_WC 0x50 74 #define DSI_HBP_WC 0x54 75 #define DSI_HFP_WC 0x58 76 77 #define DSI_CMDQ_SIZE 0x60 78 #define CMDQ_SIZE 0x3f 79 80 #define DSI_HSTX_CKL_WC 0x64 81 82 #define DSI_RX_DATA0 0x74 83 #define DSI_RX_DATA1 0x78 84 #define DSI_RX_DATA2 0x7c 85 #define DSI_RX_DATA3 0x80 86 87 #define DSI_RACK 0x84 88 #define RACK BIT(0) 89 90 #define DSI_PHY_LCCON 0x104 91 #define LC_HS_TX_EN BIT(0) 92 #define LC_ULPM_EN BIT(1) 93 #define LC_WAKEUP_EN BIT(2) 94 95 #define DSI_PHY_LD0CON 0x108 96 #define LD0_HS_TX_EN BIT(0) 97 #define LD0_ULPM_EN BIT(1) 98 #define LD0_WAKEUP_EN BIT(2) 99 100 #define DSI_PHY_TIMECON0 0x110 101 #define LPX (0xff << 0) 102 #define HS_PREP (0xff << 8) 103 #define HS_ZERO (0xff << 16) 104 #define HS_TRAIL (0xff << 24) 105 106 #define DSI_PHY_TIMECON1 0x114 107 #define TA_GO (0xff << 0) 108 #define TA_SURE (0xff << 8) 109 #define TA_GET (0xff << 16) 110 #define DA_HS_EXIT (0xff << 24) 111 112 #define DSI_PHY_TIMECON2 0x118 113 #define CONT_DET (0xff << 0) 114 #define CLK_ZERO (0xff << 16) 115 #define CLK_TRAIL (0xff << 24) 116 117 #define DSI_PHY_TIMECON3 0x11c 118 #define CLK_HS_PREP (0xff << 0) 119 #define CLK_HS_POST (0xff << 8) 120 #define CLK_HS_EXIT (0xff << 16) 121 122 #define DSI_VM_CMD_CON 0x130 123 #define VM_CMD_EN BIT(0) 124 #define TS_VFP_EN BIT(5) 125 126 #define DSI_CMDQ0 0x180 127 #define CONFIG (0xff << 0) 128 #define SHORT_PACKET 0 129 #define LONG_PACKET 2 130 #define BTA BIT(2) 131 #define DATA_ID (0xff << 8) 132 #define DATA_0 (0xff << 16) 133 #define DATA_1 (0xff << 24) 134 135 #define T_LPX 5 136 #define T_HS_PREP 6 137 #define T_HS_TRAIL 8 138 #define T_HS_EXIT 7 139 #define T_HS_ZERO 10 140 141 #define NS_TO_CYCLE(n, c) ((n) / (c) + (((n) % (c)) ? 1 : 0)) 142 143 #define MTK_DSI_HOST_IS_READ(type) \ 144 ((type == MIPI_DSI_GENERIC_READ_REQUEST_0_PARAM) || \ 145 (type == MIPI_DSI_GENERIC_READ_REQUEST_1_PARAM) || \ 146 (type == MIPI_DSI_GENERIC_READ_REQUEST_2_PARAM) || \ 147 (type == MIPI_DSI_DCS_READ)) 148 149 struct phy; 150 151 struct mtk_dsi { 152 struct mtk_ddp_comp ddp_comp; 153 struct device *dev; 154 struct mipi_dsi_host host; 155 struct drm_encoder encoder; 156 struct drm_connector conn; 157 struct drm_panel *panel; 158 struct drm_bridge *bridge; 159 struct phy *phy; 160 161 void __iomem *regs; 162 163 struct clk *engine_clk; 164 struct clk *digital_clk; 165 struct clk *hs_clk; 166 167 u32 data_rate; 168 169 unsigned long mode_flags; 170 enum mipi_dsi_pixel_format format; 171 unsigned int lanes; 172 struct videomode vm; 173 int refcount; 174 bool enabled; 175 u32 irq_data; 176 wait_queue_head_t irq_wait_queue; 177 }; 178 179 static inline struct mtk_dsi *encoder_to_dsi(struct drm_encoder *e) 180 { 181 return container_of(e, struct mtk_dsi, encoder); 182 } 183 184 static inline struct mtk_dsi *connector_to_dsi(struct drm_connector *c) 185 { 186 return container_of(c, struct mtk_dsi, conn); 187 } 188 189 static inline struct mtk_dsi *host_to_dsi(struct mipi_dsi_host *h) 190 { 191 return container_of(h, struct mtk_dsi, host); 192 } 193 194 static void mtk_dsi_mask(struct mtk_dsi *dsi, u32 offset, u32 mask, u32 data) 195 { 196 u32 temp = readl(dsi->regs + offset); 197 198 writel((temp & ~mask) | (data & mask), dsi->regs + offset); 199 } 200 201 static void mtk_dsi_phy_timconfig(struct mtk_dsi *dsi) 202 { 203 u32 timcon0, timcon1, timcon2, timcon3; 204 u32 ui, cycle_time; 205 206 ui = 1000 / dsi->data_rate + 0x01; 207 cycle_time = 8000 / dsi->data_rate + 0x01; 208 209 timcon0 = T_LPX | T_HS_PREP << 8 | T_HS_ZERO << 16 | T_HS_TRAIL << 24; 210 timcon1 = 4 * T_LPX | (3 * T_LPX / 2) << 8 | 5 * T_LPX << 16 | 211 T_HS_EXIT << 24; 212 timcon2 = ((NS_TO_CYCLE(0x64, cycle_time) + 0xa) << 24) | 213 (NS_TO_CYCLE(0x150, cycle_time) << 16); 214 timcon3 = NS_TO_CYCLE(0x40, cycle_time) | (2 * T_LPX) << 16 | 215 NS_TO_CYCLE(80 + 52 * ui, cycle_time) << 8; 216 217 writel(timcon0, dsi->regs + DSI_PHY_TIMECON0); 218 writel(timcon1, dsi->regs + DSI_PHY_TIMECON1); 219 writel(timcon2, dsi->regs + DSI_PHY_TIMECON2); 220 writel(timcon3, dsi->regs + DSI_PHY_TIMECON3); 221 } 222 223 static void mtk_dsi_enable(struct mtk_dsi *dsi) 224 { 225 mtk_dsi_mask(dsi, DSI_CON_CTRL, DSI_EN, DSI_EN); 226 } 227 228 static void mtk_dsi_disable(struct mtk_dsi *dsi) 229 { 230 mtk_dsi_mask(dsi, DSI_CON_CTRL, DSI_EN, 0); 231 } 232 233 static void mtk_dsi_reset_engine(struct mtk_dsi *dsi) 234 { 235 mtk_dsi_mask(dsi, DSI_CON_CTRL, DSI_RESET, DSI_RESET); 236 mtk_dsi_mask(dsi, DSI_CON_CTRL, DSI_RESET, 0); 237 } 238 239 static void mtk_dsi_clk_ulp_mode_enter(struct mtk_dsi *dsi) 240 { 241 mtk_dsi_mask(dsi, DSI_PHY_LCCON, LC_HS_TX_EN, 0); 242 mtk_dsi_mask(dsi, DSI_PHY_LCCON, LC_ULPM_EN, 0); 243 } 244 245 static void mtk_dsi_clk_ulp_mode_leave(struct mtk_dsi *dsi) 246 { 247 mtk_dsi_mask(dsi, DSI_PHY_LCCON, LC_ULPM_EN, 0); 248 mtk_dsi_mask(dsi, DSI_PHY_LCCON, LC_WAKEUP_EN, LC_WAKEUP_EN); 249 mtk_dsi_mask(dsi, DSI_PHY_LCCON, LC_WAKEUP_EN, 0); 250 } 251 252 static void mtk_dsi_lane0_ulp_mode_enter(struct mtk_dsi *dsi) 253 { 254 mtk_dsi_mask(dsi, DSI_PHY_LD0CON, LD0_HS_TX_EN, 0); 255 mtk_dsi_mask(dsi, DSI_PHY_LD0CON, LD0_ULPM_EN, 0); 256 } 257 258 static void mtk_dsi_lane0_ulp_mode_leave(struct mtk_dsi *dsi) 259 { 260 mtk_dsi_mask(dsi, DSI_PHY_LD0CON, LD0_ULPM_EN, 0); 261 mtk_dsi_mask(dsi, DSI_PHY_LD0CON, LD0_WAKEUP_EN, LD0_WAKEUP_EN); 262 mtk_dsi_mask(dsi, DSI_PHY_LD0CON, LD0_WAKEUP_EN, 0); 263 } 264 265 static bool mtk_dsi_clk_hs_state(struct mtk_dsi *dsi) 266 { 267 u32 tmp_reg1; 268 269 tmp_reg1 = readl(dsi->regs + DSI_PHY_LCCON); 270 return ((tmp_reg1 & LC_HS_TX_EN) == 1) ? true : false; 271 } 272 273 static void mtk_dsi_clk_hs_mode(struct mtk_dsi *dsi, bool enter) 274 { 275 if (enter && !mtk_dsi_clk_hs_state(dsi)) 276 mtk_dsi_mask(dsi, DSI_PHY_LCCON, LC_HS_TX_EN, LC_HS_TX_EN); 277 else if (!enter && mtk_dsi_clk_hs_state(dsi)) 278 mtk_dsi_mask(dsi, DSI_PHY_LCCON, LC_HS_TX_EN, 0); 279 } 280 281 static void mtk_dsi_set_mode(struct mtk_dsi *dsi) 282 { 283 u32 vid_mode = CMD_MODE; 284 285 if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO) { 286 if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO_BURST) 287 vid_mode = BURST_MODE; 288 else if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO_SYNC_PULSE) 289 vid_mode = SYNC_PULSE_MODE; 290 else 291 vid_mode = SYNC_EVENT_MODE; 292 } 293 294 writel(vid_mode, dsi->regs + DSI_MODE_CTRL); 295 } 296 297 static void mtk_dsi_set_vm_cmd(struct mtk_dsi *dsi) 298 { 299 mtk_dsi_mask(dsi, DSI_VM_CMD_CON, VM_CMD_EN, VM_CMD_EN); 300 mtk_dsi_mask(dsi, DSI_VM_CMD_CON, TS_VFP_EN, TS_VFP_EN); 301 } 302 303 static void mtk_dsi_ps_control_vact(struct mtk_dsi *dsi) 304 { 305 struct videomode *vm = &dsi->vm; 306 u32 dsi_buf_bpp, ps_wc; 307 u32 ps_bpp_mode; 308 309 if (dsi->format == MIPI_DSI_FMT_RGB565) 310 dsi_buf_bpp = 2; 311 else 312 dsi_buf_bpp = 3; 313 314 ps_wc = vm->hactive * dsi_buf_bpp; 315 ps_bpp_mode = ps_wc; 316 317 switch (dsi->format) { 318 case MIPI_DSI_FMT_RGB888: 319 ps_bpp_mode |= PACKED_PS_24BIT_RGB888; 320 break; 321 case MIPI_DSI_FMT_RGB666: 322 ps_bpp_mode |= PACKED_PS_18BIT_RGB666; 323 break; 324 case MIPI_DSI_FMT_RGB666_PACKED: 325 ps_bpp_mode |= LOOSELY_PS_18BIT_RGB666; 326 break; 327 case MIPI_DSI_FMT_RGB565: 328 ps_bpp_mode |= PACKED_PS_16BIT_RGB565; 329 break; 330 } 331 332 writel(vm->vactive, dsi->regs + DSI_VACT_NL); 333 writel(ps_bpp_mode, dsi->regs + DSI_PSCTRL); 334 writel(ps_wc, dsi->regs + DSI_HSTX_CKL_WC); 335 } 336 337 static void mtk_dsi_rxtx_control(struct mtk_dsi *dsi) 338 { 339 u32 tmp_reg; 340 341 switch (dsi->lanes) { 342 case 1: 343 tmp_reg = 1 << 2; 344 break; 345 case 2: 346 tmp_reg = 3 << 2; 347 break; 348 case 3: 349 tmp_reg = 7 << 2; 350 break; 351 case 4: 352 tmp_reg = 0xf << 2; 353 break; 354 default: 355 tmp_reg = 0xf << 2; 356 break; 357 } 358 359 tmp_reg |= (dsi->mode_flags & MIPI_DSI_CLOCK_NON_CONTINUOUS) << 6; 360 tmp_reg |= (dsi->mode_flags & MIPI_DSI_MODE_EOT_PACKET) >> 3; 361 362 writel(tmp_reg, dsi->regs + DSI_TXRX_CTRL); 363 } 364 365 static void mtk_dsi_ps_control(struct mtk_dsi *dsi) 366 { 367 u32 dsi_tmp_buf_bpp; 368 u32 tmp_reg; 369 370 switch (dsi->format) { 371 case MIPI_DSI_FMT_RGB888: 372 tmp_reg = PACKED_PS_24BIT_RGB888; 373 dsi_tmp_buf_bpp = 3; 374 break; 375 case MIPI_DSI_FMT_RGB666: 376 tmp_reg = LOOSELY_PS_18BIT_RGB666; 377 dsi_tmp_buf_bpp = 3; 378 break; 379 case MIPI_DSI_FMT_RGB666_PACKED: 380 tmp_reg = PACKED_PS_18BIT_RGB666; 381 dsi_tmp_buf_bpp = 3; 382 break; 383 case MIPI_DSI_FMT_RGB565: 384 tmp_reg = PACKED_PS_16BIT_RGB565; 385 dsi_tmp_buf_bpp = 2; 386 break; 387 default: 388 tmp_reg = PACKED_PS_24BIT_RGB888; 389 dsi_tmp_buf_bpp = 3; 390 break; 391 } 392 393 tmp_reg += dsi->vm.hactive * dsi_tmp_buf_bpp & DSI_PS_WC; 394 writel(tmp_reg, dsi->regs + DSI_PSCTRL); 395 } 396 397 static void mtk_dsi_config_vdo_timing(struct mtk_dsi *dsi) 398 { 399 u32 horizontal_sync_active_byte; 400 u32 horizontal_backporch_byte; 401 u32 horizontal_frontporch_byte; 402 u32 dsi_tmp_buf_bpp; 403 404 struct videomode *vm = &dsi->vm; 405 406 if (dsi->format == MIPI_DSI_FMT_RGB565) 407 dsi_tmp_buf_bpp = 2; 408 else 409 dsi_tmp_buf_bpp = 3; 410 411 writel(vm->vsync_len, dsi->regs + DSI_VSA_NL); 412 writel(vm->vback_porch, dsi->regs + DSI_VBP_NL); 413 writel(vm->vfront_porch, dsi->regs + DSI_VFP_NL); 414 writel(vm->vactive, dsi->regs + DSI_VACT_NL); 415 416 horizontal_sync_active_byte = (vm->hsync_len * dsi_tmp_buf_bpp - 10); 417 418 if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO_SYNC_PULSE) 419 horizontal_backporch_byte = 420 (vm->hback_porch * dsi_tmp_buf_bpp - 10); 421 else 422 horizontal_backporch_byte = ((vm->hback_porch + vm->hsync_len) * 423 dsi_tmp_buf_bpp - 10); 424 425 horizontal_frontporch_byte = (vm->hfront_porch * dsi_tmp_buf_bpp - 12); 426 427 writel(horizontal_sync_active_byte, dsi->regs + DSI_HSA_WC); 428 writel(horizontal_backporch_byte, dsi->regs + DSI_HBP_WC); 429 writel(horizontal_frontporch_byte, dsi->regs + DSI_HFP_WC); 430 431 mtk_dsi_ps_control(dsi); 432 } 433 434 static void mtk_dsi_start(struct mtk_dsi *dsi) 435 { 436 writel(0, dsi->regs + DSI_START); 437 writel(1, dsi->regs + DSI_START); 438 } 439 440 static void mtk_dsi_stop(struct mtk_dsi *dsi) 441 { 442 writel(0, dsi->regs + DSI_START); 443 } 444 445 static void mtk_dsi_set_cmd_mode(struct mtk_dsi *dsi) 446 { 447 writel(CMD_MODE, dsi->regs + DSI_MODE_CTRL); 448 } 449 450 static void mtk_dsi_set_interrupt_enable(struct mtk_dsi *dsi) 451 { 452 u32 inten = LPRX_RD_RDY_INT_FLAG | CMD_DONE_INT_FLAG | VM_DONE_INT_FLAG; 453 454 writel(inten, dsi->regs + DSI_INTEN); 455 } 456 457 static void mtk_dsi_irq_data_set(struct mtk_dsi *dsi, u32 irq_bit) 458 { 459 dsi->irq_data |= irq_bit; 460 } 461 462 static void mtk_dsi_irq_data_clear(struct mtk_dsi *dsi, u32 irq_bit) 463 { 464 dsi->irq_data &= ~irq_bit; 465 } 466 467 static s32 mtk_dsi_wait_for_irq_done(struct mtk_dsi *dsi, u32 irq_flag, 468 unsigned int timeout) 469 { 470 s32 ret = 0; 471 unsigned long jiffies = msecs_to_jiffies(timeout); 472 473 ret = wait_event_interruptible_timeout(dsi->irq_wait_queue, 474 dsi->irq_data & irq_flag, 475 jiffies); 476 if (ret == 0) { 477 DRM_WARN("Wait DSI IRQ(0x%08x) Timeout\n", irq_flag); 478 479 mtk_dsi_enable(dsi); 480 mtk_dsi_reset_engine(dsi); 481 } 482 483 return ret; 484 } 485 486 static irqreturn_t mtk_dsi_irq(int irq, void *dev_id) 487 { 488 struct mtk_dsi *dsi = dev_id; 489 u32 status, tmp; 490 u32 flag = LPRX_RD_RDY_INT_FLAG | CMD_DONE_INT_FLAG | VM_DONE_INT_FLAG; 491 492 status = readl(dsi->regs + DSI_INTSTA) & flag; 493 494 if (status) { 495 do { 496 mtk_dsi_mask(dsi, DSI_RACK, RACK, RACK); 497 tmp = readl(dsi->regs + DSI_INTSTA); 498 } while (tmp & DSI_BUSY); 499 500 mtk_dsi_mask(dsi, DSI_INTSTA, status, 0); 501 mtk_dsi_irq_data_set(dsi, status); 502 wake_up_interruptible(&dsi->irq_wait_queue); 503 } 504 505 return IRQ_HANDLED; 506 } 507 508 static s32 mtk_dsi_switch_to_cmd_mode(struct mtk_dsi *dsi, u8 irq_flag, u32 t) 509 { 510 mtk_dsi_irq_data_clear(dsi, irq_flag); 511 mtk_dsi_set_cmd_mode(dsi); 512 513 if (!mtk_dsi_wait_for_irq_done(dsi, irq_flag, t)) { 514 DRM_ERROR("failed to switch cmd mode\n"); 515 return -ETIME; 516 } else { 517 return 0; 518 } 519 } 520 521 static int mtk_dsi_poweron(struct mtk_dsi *dsi) 522 { 523 struct device *dev = dsi->dev; 524 int ret; 525 u64 pixel_clock, total_bits; 526 u32 htotal, htotal_bits, bit_per_pixel, overhead_cycles, overhead_bits; 527 528 if (++dsi->refcount != 1) 529 return 0; 530 531 switch (dsi->format) { 532 case MIPI_DSI_FMT_RGB565: 533 bit_per_pixel = 16; 534 break; 535 case MIPI_DSI_FMT_RGB666_PACKED: 536 bit_per_pixel = 18; 537 break; 538 case MIPI_DSI_FMT_RGB666: 539 case MIPI_DSI_FMT_RGB888: 540 default: 541 bit_per_pixel = 24; 542 break; 543 } 544 545 /** 546 * htotal_time = htotal * byte_per_pixel / num_lanes 547 * overhead_time = lpx + hs_prepare + hs_zero + hs_trail + hs_exit 548 * mipi_ratio = (htotal_time + overhead_time) / htotal_time 549 * data_rate = pixel_clock * bit_per_pixel * mipi_ratio / num_lanes; 550 */ 551 pixel_clock = dsi->vm.pixelclock; 552 htotal = dsi->vm.hactive + dsi->vm.hback_porch + dsi->vm.hfront_porch + 553 dsi->vm.hsync_len; 554 htotal_bits = htotal * bit_per_pixel; 555 556 overhead_cycles = T_LPX + T_HS_PREP + T_HS_ZERO + T_HS_TRAIL + 557 T_HS_EXIT; 558 overhead_bits = overhead_cycles * dsi->lanes * 8; 559 total_bits = htotal_bits + overhead_bits; 560 561 dsi->data_rate = DIV_ROUND_UP_ULL(pixel_clock * total_bits, 562 htotal * dsi->lanes); 563 564 ret = clk_set_rate(dsi->hs_clk, dsi->data_rate); 565 if (ret < 0) { 566 dev_err(dev, "Failed to set data rate: %d\n", ret); 567 goto err_refcount; 568 } 569 570 phy_power_on(dsi->phy); 571 572 ret = clk_prepare_enable(dsi->engine_clk); 573 if (ret < 0) { 574 dev_err(dev, "Failed to enable engine clock: %d\n", ret); 575 goto err_phy_power_off; 576 } 577 578 ret = clk_prepare_enable(dsi->digital_clk); 579 if (ret < 0) { 580 dev_err(dev, "Failed to enable digital clock: %d\n", ret); 581 goto err_disable_engine_clk; 582 } 583 584 mtk_dsi_enable(dsi); 585 mtk_dsi_reset_engine(dsi); 586 mtk_dsi_phy_timconfig(dsi); 587 588 mtk_dsi_rxtx_control(dsi); 589 mtk_dsi_ps_control_vact(dsi); 590 mtk_dsi_set_vm_cmd(dsi); 591 mtk_dsi_config_vdo_timing(dsi); 592 mtk_dsi_set_interrupt_enable(dsi); 593 594 mtk_dsi_clk_ulp_mode_leave(dsi); 595 mtk_dsi_lane0_ulp_mode_leave(dsi); 596 mtk_dsi_clk_hs_mode(dsi, 0); 597 598 if (dsi->panel) { 599 if (drm_panel_prepare(dsi->panel)) { 600 DRM_ERROR("failed to prepare the panel\n"); 601 goto err_disable_digital_clk; 602 } 603 } 604 605 return 0; 606 err_disable_digital_clk: 607 clk_disable_unprepare(dsi->digital_clk); 608 err_disable_engine_clk: 609 clk_disable_unprepare(dsi->engine_clk); 610 err_phy_power_off: 611 phy_power_off(dsi->phy); 612 err_refcount: 613 dsi->refcount--; 614 return ret; 615 } 616 617 static void mtk_dsi_poweroff(struct mtk_dsi *dsi) 618 { 619 if (WARN_ON(dsi->refcount == 0)) 620 return; 621 622 if (--dsi->refcount != 0) 623 return; 624 625 if (!mtk_dsi_switch_to_cmd_mode(dsi, VM_DONE_INT_FLAG, 500)) { 626 if (dsi->panel) { 627 if (drm_panel_unprepare(dsi->panel)) { 628 DRM_ERROR("failed to unprepare the panel\n"); 629 return; 630 } 631 } 632 } 633 634 mtk_dsi_reset_engine(dsi); 635 mtk_dsi_lane0_ulp_mode_enter(dsi); 636 mtk_dsi_clk_ulp_mode_enter(dsi); 637 638 mtk_dsi_disable(dsi); 639 640 clk_disable_unprepare(dsi->engine_clk); 641 clk_disable_unprepare(dsi->digital_clk); 642 643 phy_power_off(dsi->phy); 644 } 645 646 static void mtk_output_dsi_enable(struct mtk_dsi *dsi) 647 { 648 int ret; 649 650 if (dsi->enabled) 651 return; 652 653 ret = mtk_dsi_poweron(dsi); 654 if (ret < 0) { 655 DRM_ERROR("failed to power on dsi\n"); 656 return; 657 } 658 659 mtk_dsi_set_mode(dsi); 660 mtk_dsi_clk_hs_mode(dsi, 1); 661 662 mtk_dsi_start(dsi); 663 664 if (dsi->panel) { 665 if (drm_panel_enable(dsi->panel)) { 666 DRM_ERROR("failed to enable the panel\n"); 667 goto err_dsi_power_off; 668 } 669 } 670 671 dsi->enabled = true; 672 673 return; 674 err_dsi_power_off: 675 mtk_dsi_stop(dsi); 676 mtk_dsi_poweroff(dsi); 677 } 678 679 static void mtk_output_dsi_disable(struct mtk_dsi *dsi) 680 { 681 if (!dsi->enabled) 682 return; 683 684 if (dsi->panel) { 685 if (drm_panel_disable(dsi->panel)) { 686 DRM_ERROR("failed to disable the panel\n"); 687 return; 688 } 689 } 690 691 mtk_dsi_stop(dsi); 692 mtk_dsi_poweroff(dsi); 693 694 dsi->enabled = false; 695 } 696 697 static void mtk_dsi_encoder_destroy(struct drm_encoder *encoder) 698 { 699 drm_encoder_cleanup(encoder); 700 } 701 702 static const struct drm_encoder_funcs mtk_dsi_encoder_funcs = { 703 .destroy = mtk_dsi_encoder_destroy, 704 }; 705 706 static bool mtk_dsi_encoder_mode_fixup(struct drm_encoder *encoder, 707 const struct drm_display_mode *mode, 708 struct drm_display_mode *adjusted_mode) 709 { 710 return true; 711 } 712 713 static void mtk_dsi_encoder_mode_set(struct drm_encoder *encoder, 714 struct drm_display_mode *mode, 715 struct drm_display_mode *adjusted) 716 { 717 struct mtk_dsi *dsi = encoder_to_dsi(encoder); 718 719 drm_display_mode_to_videomode(adjusted, &dsi->vm); 720 } 721 722 static void mtk_dsi_encoder_disable(struct drm_encoder *encoder) 723 { 724 struct mtk_dsi *dsi = encoder_to_dsi(encoder); 725 726 mtk_output_dsi_disable(dsi); 727 } 728 729 static void mtk_dsi_encoder_enable(struct drm_encoder *encoder) 730 { 731 struct mtk_dsi *dsi = encoder_to_dsi(encoder); 732 733 mtk_output_dsi_enable(dsi); 734 } 735 736 static int mtk_dsi_connector_get_modes(struct drm_connector *connector) 737 { 738 struct mtk_dsi *dsi = connector_to_dsi(connector); 739 740 return drm_panel_get_modes(dsi->panel); 741 } 742 743 static const struct drm_encoder_helper_funcs mtk_dsi_encoder_helper_funcs = { 744 .mode_fixup = mtk_dsi_encoder_mode_fixup, 745 .mode_set = mtk_dsi_encoder_mode_set, 746 .disable = mtk_dsi_encoder_disable, 747 .enable = mtk_dsi_encoder_enable, 748 }; 749 750 static const struct drm_connector_funcs mtk_dsi_connector_funcs = { 751 .fill_modes = drm_helper_probe_single_connector_modes, 752 .destroy = drm_connector_cleanup, 753 .reset = drm_atomic_helper_connector_reset, 754 .atomic_duplicate_state = drm_atomic_helper_connector_duplicate_state, 755 .atomic_destroy_state = drm_atomic_helper_connector_destroy_state, 756 }; 757 758 static const struct drm_connector_helper_funcs 759 mtk_dsi_connector_helper_funcs = { 760 .get_modes = mtk_dsi_connector_get_modes, 761 }; 762 763 static int mtk_dsi_create_connector(struct drm_device *drm, struct mtk_dsi *dsi) 764 { 765 int ret; 766 767 ret = drm_connector_init(drm, &dsi->conn, &mtk_dsi_connector_funcs, 768 DRM_MODE_CONNECTOR_DSI); 769 if (ret) { 770 DRM_ERROR("Failed to connector init to drm\n"); 771 return ret; 772 } 773 774 drm_connector_helper_add(&dsi->conn, &mtk_dsi_connector_helper_funcs); 775 776 dsi->conn.dpms = DRM_MODE_DPMS_OFF; 777 drm_connector_attach_encoder(&dsi->conn, &dsi->encoder); 778 779 if (dsi->panel) { 780 ret = drm_panel_attach(dsi->panel, &dsi->conn); 781 if (ret) { 782 DRM_ERROR("Failed to attach panel to drm\n"); 783 goto err_connector_cleanup; 784 } 785 } 786 787 return 0; 788 789 err_connector_cleanup: 790 drm_connector_cleanup(&dsi->conn); 791 return ret; 792 } 793 794 static int mtk_dsi_create_conn_enc(struct drm_device *drm, struct mtk_dsi *dsi) 795 { 796 int ret; 797 798 ret = drm_encoder_init(drm, &dsi->encoder, &mtk_dsi_encoder_funcs, 799 DRM_MODE_ENCODER_DSI, NULL); 800 if (ret) { 801 DRM_ERROR("Failed to encoder init to drm\n"); 802 return ret; 803 } 804 drm_encoder_helper_add(&dsi->encoder, &mtk_dsi_encoder_helper_funcs); 805 806 /* 807 * Currently display data paths are statically assigned to a crtc each. 808 * crtc 0 is OVL0 -> COLOR0 -> AAL -> OD -> RDMA0 -> UFOE -> DSI0 809 */ 810 dsi->encoder.possible_crtcs = 1; 811 812 /* If there's a bridge, attach to it and let it create the connector */ 813 if (dsi->bridge) { 814 ret = drm_bridge_attach(&dsi->encoder, dsi->bridge, NULL); 815 if (ret) { 816 DRM_ERROR("Failed to attach bridge to drm\n"); 817 goto err_encoder_cleanup; 818 } 819 } else { 820 /* Otherwise create our own connector and attach to a panel */ 821 ret = mtk_dsi_create_connector(drm, dsi); 822 if (ret) 823 goto err_encoder_cleanup; 824 } 825 826 return 0; 827 828 err_encoder_cleanup: 829 drm_encoder_cleanup(&dsi->encoder); 830 return ret; 831 } 832 833 static void mtk_dsi_destroy_conn_enc(struct mtk_dsi *dsi) 834 { 835 drm_encoder_cleanup(&dsi->encoder); 836 /* Skip connector cleanup if creation was delegated to the bridge */ 837 if (dsi->conn.dev) 838 drm_connector_cleanup(&dsi->conn); 839 } 840 841 static void mtk_dsi_ddp_start(struct mtk_ddp_comp *comp) 842 { 843 struct mtk_dsi *dsi = container_of(comp, struct mtk_dsi, ddp_comp); 844 845 mtk_dsi_poweron(dsi); 846 } 847 848 static void mtk_dsi_ddp_stop(struct mtk_ddp_comp *comp) 849 { 850 struct mtk_dsi *dsi = container_of(comp, struct mtk_dsi, ddp_comp); 851 852 mtk_dsi_poweroff(dsi); 853 } 854 855 static const struct mtk_ddp_comp_funcs mtk_dsi_funcs = { 856 .start = mtk_dsi_ddp_start, 857 .stop = mtk_dsi_ddp_stop, 858 }; 859 860 static int mtk_dsi_host_attach(struct mipi_dsi_host *host, 861 struct mipi_dsi_device *device) 862 { 863 struct mtk_dsi *dsi = host_to_dsi(host); 864 865 dsi->lanes = device->lanes; 866 dsi->format = device->format; 867 dsi->mode_flags = device->mode_flags; 868 869 if (dsi->conn.dev) 870 drm_helper_hpd_irq_event(dsi->conn.dev); 871 872 return 0; 873 } 874 875 static int mtk_dsi_host_detach(struct mipi_dsi_host *host, 876 struct mipi_dsi_device *device) 877 { 878 struct mtk_dsi *dsi = host_to_dsi(host); 879 880 if (dsi->conn.dev) 881 drm_helper_hpd_irq_event(dsi->conn.dev); 882 883 return 0; 884 } 885 886 static void mtk_dsi_wait_for_idle(struct mtk_dsi *dsi) 887 { 888 int ret; 889 u32 val; 890 891 ret = readl_poll_timeout(dsi->regs + DSI_INTSTA, val, !(val & DSI_BUSY), 892 4, 2000000); 893 if (ret) { 894 DRM_WARN("polling dsi wait not busy timeout!\n"); 895 896 mtk_dsi_enable(dsi); 897 mtk_dsi_reset_engine(dsi); 898 } 899 } 900 901 static u32 mtk_dsi_recv_cnt(u8 type, u8 *read_data) 902 { 903 switch (type) { 904 case MIPI_DSI_RX_GENERIC_SHORT_READ_RESPONSE_1BYTE: 905 case MIPI_DSI_RX_DCS_SHORT_READ_RESPONSE_1BYTE: 906 return 1; 907 case MIPI_DSI_RX_GENERIC_SHORT_READ_RESPONSE_2BYTE: 908 case MIPI_DSI_RX_DCS_SHORT_READ_RESPONSE_2BYTE: 909 return 2; 910 case MIPI_DSI_RX_GENERIC_LONG_READ_RESPONSE: 911 case MIPI_DSI_RX_DCS_LONG_READ_RESPONSE: 912 return read_data[1] + read_data[2] * 16; 913 case MIPI_DSI_RX_ACKNOWLEDGE_AND_ERROR_REPORT: 914 DRM_INFO("type is 0x02, try again\n"); 915 break; 916 default: 917 DRM_INFO("type(0x%x) not recognized\n", type); 918 break; 919 } 920 921 return 0; 922 } 923 924 static void mtk_dsi_cmdq(struct mtk_dsi *dsi, const struct mipi_dsi_msg *msg) 925 { 926 const char *tx_buf = msg->tx_buf; 927 u8 config, cmdq_size, cmdq_off, type = msg->type; 928 u32 reg_val, cmdq_mask, i; 929 930 if (MTK_DSI_HOST_IS_READ(type)) 931 config = BTA; 932 else 933 config = (msg->tx_len > 2) ? LONG_PACKET : SHORT_PACKET; 934 935 if (msg->tx_len > 2) { 936 cmdq_size = 1 + (msg->tx_len + 3) / 4; 937 cmdq_off = 4; 938 cmdq_mask = CONFIG | DATA_ID | DATA_0 | DATA_1; 939 reg_val = (msg->tx_len << 16) | (type << 8) | config; 940 } else { 941 cmdq_size = 1; 942 cmdq_off = 2; 943 cmdq_mask = CONFIG | DATA_ID; 944 reg_val = (type << 8) | config; 945 } 946 947 for (i = 0; i < msg->tx_len; i++) 948 writeb(tx_buf[i], dsi->regs + DSI_CMDQ0 + cmdq_off + i); 949 950 mtk_dsi_mask(dsi, DSI_CMDQ0, cmdq_mask, reg_val); 951 mtk_dsi_mask(dsi, DSI_CMDQ_SIZE, CMDQ_SIZE, cmdq_size); 952 } 953 954 static ssize_t mtk_dsi_host_send_cmd(struct mtk_dsi *dsi, 955 const struct mipi_dsi_msg *msg, u8 flag) 956 { 957 mtk_dsi_wait_for_idle(dsi); 958 mtk_dsi_irq_data_clear(dsi, flag); 959 mtk_dsi_cmdq(dsi, msg); 960 mtk_dsi_start(dsi); 961 962 if (!mtk_dsi_wait_for_irq_done(dsi, flag, 2000)) 963 return -ETIME; 964 else 965 return 0; 966 } 967 968 static ssize_t mtk_dsi_host_transfer(struct mipi_dsi_host *host, 969 const struct mipi_dsi_msg *msg) 970 { 971 struct mtk_dsi *dsi = host_to_dsi(host); 972 u32 recv_cnt, i; 973 u8 read_data[16]; 974 void *src_addr; 975 u8 irq_flag = CMD_DONE_INT_FLAG; 976 977 if (readl(dsi->regs + DSI_MODE_CTRL) & MODE) { 978 DRM_ERROR("dsi engine is not command mode\n"); 979 return -EINVAL; 980 } 981 982 if (MTK_DSI_HOST_IS_READ(msg->type)) 983 irq_flag |= LPRX_RD_RDY_INT_FLAG; 984 985 if (mtk_dsi_host_send_cmd(dsi, msg, irq_flag) < 0) 986 return -ETIME; 987 988 if (!MTK_DSI_HOST_IS_READ(msg->type)) 989 return 0; 990 991 if (!msg->rx_buf) { 992 DRM_ERROR("dsi receive buffer size may be NULL\n"); 993 return -EINVAL; 994 } 995 996 for (i = 0; i < 16; i++) 997 *(read_data + i) = readb(dsi->regs + DSI_RX_DATA0 + i); 998 999 recv_cnt = mtk_dsi_recv_cnt(read_data[0], read_data); 1000 1001 if (recv_cnt > 2) 1002 src_addr = &read_data[4]; 1003 else 1004 src_addr = &read_data[1]; 1005 1006 if (recv_cnt > 10) 1007 recv_cnt = 10; 1008 1009 if (recv_cnt > msg->rx_len) 1010 recv_cnt = msg->rx_len; 1011 1012 if (recv_cnt) 1013 memcpy(msg->rx_buf, src_addr, recv_cnt); 1014 1015 DRM_INFO("dsi get %d byte data from the panel address(0x%x)\n", 1016 recv_cnt, *((u8 *)(msg->tx_buf))); 1017 1018 return recv_cnt; 1019 } 1020 1021 static const struct mipi_dsi_host_ops mtk_dsi_ops = { 1022 .attach = mtk_dsi_host_attach, 1023 .detach = mtk_dsi_host_detach, 1024 .transfer = mtk_dsi_host_transfer, 1025 }; 1026 1027 static int mtk_dsi_bind(struct device *dev, struct device *master, void *data) 1028 { 1029 int ret; 1030 struct drm_device *drm = data; 1031 struct mtk_dsi *dsi = dev_get_drvdata(dev); 1032 1033 ret = mtk_ddp_comp_register(drm, &dsi->ddp_comp); 1034 if (ret < 0) { 1035 dev_err(dev, "Failed to register component %pOF: %d\n", 1036 dev->of_node, ret); 1037 return ret; 1038 } 1039 1040 ret = mipi_dsi_host_register(&dsi->host); 1041 if (ret < 0) { 1042 dev_err(dev, "failed to register DSI host: %d\n", ret); 1043 goto err_ddp_comp_unregister; 1044 } 1045 1046 ret = mtk_dsi_create_conn_enc(drm, dsi); 1047 if (ret) { 1048 DRM_ERROR("Encoder create failed with %d\n", ret); 1049 goto err_unregister; 1050 } 1051 1052 return 0; 1053 1054 err_unregister: 1055 mipi_dsi_host_unregister(&dsi->host); 1056 err_ddp_comp_unregister: 1057 mtk_ddp_comp_unregister(drm, &dsi->ddp_comp); 1058 return ret; 1059 } 1060 1061 static void mtk_dsi_unbind(struct device *dev, struct device *master, 1062 void *data) 1063 { 1064 struct drm_device *drm = data; 1065 struct mtk_dsi *dsi = dev_get_drvdata(dev); 1066 1067 mtk_dsi_destroy_conn_enc(dsi); 1068 mipi_dsi_host_unregister(&dsi->host); 1069 mtk_ddp_comp_unregister(drm, &dsi->ddp_comp); 1070 } 1071 1072 static const struct component_ops mtk_dsi_component_ops = { 1073 .bind = mtk_dsi_bind, 1074 .unbind = mtk_dsi_unbind, 1075 }; 1076 1077 static int mtk_dsi_probe(struct platform_device *pdev) 1078 { 1079 struct mtk_dsi *dsi; 1080 struct device *dev = &pdev->dev; 1081 struct resource *regs; 1082 int irq_num; 1083 int comp_id; 1084 int ret; 1085 1086 dsi = devm_kzalloc(dev, sizeof(*dsi), GFP_KERNEL); 1087 if (!dsi) 1088 return -ENOMEM; 1089 1090 dsi->host.ops = &mtk_dsi_ops; 1091 dsi->host.dev = dev; 1092 1093 ret = drm_of_find_panel_or_bridge(dev->of_node, 0, 0, 1094 &dsi->panel, &dsi->bridge); 1095 if (ret) 1096 return ret; 1097 1098 dsi->engine_clk = devm_clk_get(dev, "engine"); 1099 if (IS_ERR(dsi->engine_clk)) { 1100 ret = PTR_ERR(dsi->engine_clk); 1101 dev_err(dev, "Failed to get engine clock: %d\n", ret); 1102 return ret; 1103 } 1104 1105 dsi->digital_clk = devm_clk_get(dev, "digital"); 1106 if (IS_ERR(dsi->digital_clk)) { 1107 ret = PTR_ERR(dsi->digital_clk); 1108 dev_err(dev, "Failed to get digital clock: %d\n", ret); 1109 return ret; 1110 } 1111 1112 dsi->hs_clk = devm_clk_get(dev, "hs"); 1113 if (IS_ERR(dsi->hs_clk)) { 1114 ret = PTR_ERR(dsi->hs_clk); 1115 dev_err(dev, "Failed to get hs clock: %d\n", ret); 1116 return ret; 1117 } 1118 1119 regs = platform_get_resource(pdev, IORESOURCE_MEM, 0); 1120 dsi->regs = devm_ioremap_resource(dev, regs); 1121 if (IS_ERR(dsi->regs)) { 1122 ret = PTR_ERR(dsi->regs); 1123 dev_err(dev, "Failed to ioremap memory: %d\n", ret); 1124 return ret; 1125 } 1126 1127 dsi->phy = devm_phy_get(dev, "dphy"); 1128 if (IS_ERR(dsi->phy)) { 1129 ret = PTR_ERR(dsi->phy); 1130 dev_err(dev, "Failed to get MIPI-DPHY: %d\n", ret); 1131 return ret; 1132 } 1133 1134 comp_id = mtk_ddp_comp_get_id(dev->of_node, MTK_DSI); 1135 if (comp_id < 0) { 1136 dev_err(dev, "Failed to identify by alias: %d\n", comp_id); 1137 return comp_id; 1138 } 1139 1140 ret = mtk_ddp_comp_init(dev, dev->of_node, &dsi->ddp_comp, comp_id, 1141 &mtk_dsi_funcs); 1142 if (ret) { 1143 dev_err(dev, "Failed to initialize component: %d\n", ret); 1144 return ret; 1145 } 1146 1147 irq_num = platform_get_irq(pdev, 0); 1148 if (irq_num < 0) { 1149 dev_err(&pdev->dev, "failed to request dsi irq resource\n"); 1150 return -EPROBE_DEFER; 1151 } 1152 1153 irq_set_status_flags(irq_num, IRQ_TYPE_LEVEL_LOW); 1154 ret = devm_request_irq(&pdev->dev, irq_num, mtk_dsi_irq, 1155 IRQF_TRIGGER_LOW, dev_name(&pdev->dev), dsi); 1156 if (ret) { 1157 dev_err(&pdev->dev, "failed to request mediatek dsi irq\n"); 1158 return -EPROBE_DEFER; 1159 } 1160 1161 init_waitqueue_head(&dsi->irq_wait_queue); 1162 1163 platform_set_drvdata(pdev, dsi); 1164 1165 return component_add(&pdev->dev, &mtk_dsi_component_ops); 1166 } 1167 1168 static int mtk_dsi_remove(struct platform_device *pdev) 1169 { 1170 struct mtk_dsi *dsi = platform_get_drvdata(pdev); 1171 1172 mtk_output_dsi_disable(dsi); 1173 component_del(&pdev->dev, &mtk_dsi_component_ops); 1174 1175 return 0; 1176 } 1177 1178 static const struct of_device_id mtk_dsi_of_match[] = { 1179 { .compatible = "mediatek,mt2701-dsi" }, 1180 { .compatible = "mediatek,mt8173-dsi" }, 1181 { }, 1182 }; 1183 1184 struct platform_driver mtk_dsi_driver = { 1185 .probe = mtk_dsi_probe, 1186 .remove = mtk_dsi_remove, 1187 .driver = { 1188 .name = "mtk-dsi", 1189 .of_match_table = mtk_dsi_of_match, 1190 }, 1191 }; 1192