1 /* 2 * Copyright (C) 2017 Samsung Electronics Co.Ltd 3 * Author: 4 * Andrzej Pietrasiewicz <andrzej.p@samsung.com> 5 * 6 * This program is free software; you can redistribute it and/or modify 7 * it under the terms of the GNU General Public License version 2 as 8 * published by the Free Software Foundationr 9 */ 10 11 #include <linux/kernel.h> 12 #include <linux/component.h> 13 #include <linux/err.h> 14 #include <linux/interrupt.h> 15 #include <linux/io.h> 16 #include <linux/platform_device.h> 17 #include <linux/clk.h> 18 #include <linux/of_device.h> 19 #include <linux/pm_runtime.h> 20 21 #include <drm/drmP.h> 22 #include <drm/exynos_drm.h> 23 #include "regs-scaler.h" 24 #include "exynos_drm_fb.h" 25 #include "exynos_drm_drv.h" 26 #include "exynos_drm_iommu.h" 27 #include "exynos_drm_ipp.h" 28 29 #define scaler_read(offset) readl(scaler->regs + (offset)) 30 #define scaler_write(cfg, offset) writel(cfg, scaler->regs + (offset)) 31 #define SCALER_MAX_CLK 4 32 #define SCALER_AUTOSUSPEND_DELAY 2000 33 #define SCALER_RESET_WAIT_RETRIES 100 34 35 struct scaler_data { 36 const char *clk_name[SCALER_MAX_CLK]; 37 unsigned int num_clk; 38 const struct exynos_drm_ipp_formats *formats; 39 unsigned int num_formats; 40 }; 41 42 struct scaler_context { 43 struct exynos_drm_ipp ipp; 44 struct drm_device *drm_dev; 45 struct device *dev; 46 void __iomem *regs; 47 struct clk *clock[SCALER_MAX_CLK]; 48 struct exynos_drm_ipp_task *task; 49 const struct scaler_data *scaler_data; 50 }; 51 52 struct scaler_format { 53 u32 drm_fmt; 54 u32 internal_fmt; 55 u32 chroma_tile_w; 56 u32 chroma_tile_h; 57 }; 58 59 static const struct scaler_format scaler_formats[] = { 60 { DRM_FORMAT_NV12, SCALER_YUV420_2P_UV, 8, 8 }, 61 { DRM_FORMAT_NV21, SCALER_YUV420_2P_VU, 8, 8 }, 62 { DRM_FORMAT_YUV420, SCALER_YUV420_3P, 8, 8 }, 63 { DRM_FORMAT_YUYV, SCALER_YUV422_1P_YUYV, 16, 16 }, 64 { DRM_FORMAT_UYVY, SCALER_YUV422_1P_UYVY, 16, 16 }, 65 { DRM_FORMAT_YVYU, SCALER_YUV422_1P_YVYU, 16, 16 }, 66 { DRM_FORMAT_NV16, SCALER_YUV422_2P_UV, 8, 16 }, 67 { DRM_FORMAT_NV61, SCALER_YUV422_2P_VU, 8, 16 }, 68 { DRM_FORMAT_YUV422, SCALER_YUV422_3P, 8, 16 }, 69 { DRM_FORMAT_NV24, SCALER_YUV444_2P_UV, 16, 16 }, 70 { DRM_FORMAT_NV42, SCALER_YUV444_2P_VU, 16, 16 }, 71 { DRM_FORMAT_YUV444, SCALER_YUV444_3P, 16, 16 }, 72 { DRM_FORMAT_RGB565, SCALER_RGB_565, 0, 0 }, 73 { DRM_FORMAT_XRGB1555, SCALER_ARGB1555, 0, 0 }, 74 { DRM_FORMAT_ARGB1555, SCALER_ARGB1555, 0, 0 }, 75 { DRM_FORMAT_XRGB4444, SCALER_ARGB4444, 0, 0 }, 76 { DRM_FORMAT_ARGB4444, SCALER_ARGB4444, 0, 0 }, 77 { DRM_FORMAT_XRGB8888, SCALER_ARGB8888, 0, 0 }, 78 { DRM_FORMAT_ARGB8888, SCALER_ARGB8888, 0, 0 }, 79 { DRM_FORMAT_RGBX8888, SCALER_RGBA8888, 0, 0 }, 80 { DRM_FORMAT_RGBA8888, SCALER_RGBA8888, 0, 0 }, 81 }; 82 83 static const struct scaler_format *scaler_get_format(u32 drm_fmt) 84 { 85 int i; 86 87 for (i = 0; i < ARRAY_SIZE(scaler_formats); i++) 88 if (scaler_formats[i].drm_fmt == drm_fmt) 89 return &scaler_formats[i]; 90 91 return NULL; 92 } 93 94 static inline int scaler_reset(struct scaler_context *scaler) 95 { 96 int retry = SCALER_RESET_WAIT_RETRIES; 97 98 scaler_write(SCALER_CFG_SOFT_RESET, SCALER_CFG); 99 do { 100 cpu_relax(); 101 } while (retry > 1 && 102 scaler_read(SCALER_CFG) & SCALER_CFG_SOFT_RESET); 103 do { 104 cpu_relax(); 105 scaler_write(1, SCALER_INT_EN); 106 } while (retry > 0 && scaler_read(SCALER_INT_EN) != 1); 107 108 return retry ? 0 : -EIO; 109 } 110 111 static inline void scaler_enable_int(struct scaler_context *scaler) 112 { 113 u32 val; 114 115 val = SCALER_INT_EN_TIMEOUT | 116 SCALER_INT_EN_ILLEGAL_BLEND | 117 SCALER_INT_EN_ILLEGAL_RATIO | 118 SCALER_INT_EN_ILLEGAL_DST_HEIGHT | 119 SCALER_INT_EN_ILLEGAL_DST_WIDTH | 120 SCALER_INT_EN_ILLEGAL_DST_V_POS | 121 SCALER_INT_EN_ILLEGAL_DST_H_POS | 122 SCALER_INT_EN_ILLEGAL_DST_C_SPAN | 123 SCALER_INT_EN_ILLEGAL_DST_Y_SPAN | 124 SCALER_INT_EN_ILLEGAL_DST_CR_BASE | 125 SCALER_INT_EN_ILLEGAL_DST_CB_BASE | 126 SCALER_INT_EN_ILLEGAL_DST_Y_BASE | 127 SCALER_INT_EN_ILLEGAL_DST_COLOR | 128 SCALER_INT_EN_ILLEGAL_SRC_HEIGHT | 129 SCALER_INT_EN_ILLEGAL_SRC_WIDTH | 130 SCALER_INT_EN_ILLEGAL_SRC_CV_POS | 131 SCALER_INT_EN_ILLEGAL_SRC_CH_POS | 132 SCALER_INT_EN_ILLEGAL_SRC_YV_POS | 133 SCALER_INT_EN_ILLEGAL_SRC_YH_POS | 134 SCALER_INT_EN_ILLEGAL_DST_SPAN | 135 SCALER_INT_EN_ILLEGAL_SRC_Y_SPAN | 136 SCALER_INT_EN_ILLEGAL_SRC_CR_BASE | 137 SCALER_INT_EN_ILLEGAL_SRC_CB_BASE | 138 SCALER_INT_EN_ILLEGAL_SRC_Y_BASE | 139 SCALER_INT_EN_ILLEGAL_SRC_COLOR | 140 SCALER_INT_EN_FRAME_END; 141 scaler_write(val, SCALER_INT_EN); 142 } 143 144 static inline void scaler_set_src_fmt(struct scaler_context *scaler, 145 u32 src_fmt, u32 tile) 146 { 147 u32 val; 148 149 val = SCALER_SRC_CFG_SET_COLOR_FORMAT(src_fmt) | (tile << 10); 150 scaler_write(val, SCALER_SRC_CFG); 151 } 152 153 static inline void scaler_set_src_base(struct scaler_context *scaler, 154 struct exynos_drm_ipp_buffer *src_buf) 155 { 156 static unsigned int bases[] = { 157 SCALER_SRC_Y_BASE, 158 SCALER_SRC_CB_BASE, 159 SCALER_SRC_CR_BASE, 160 }; 161 int i; 162 163 for (i = 0; i < src_buf->format->num_planes; ++i) 164 scaler_write(src_buf->dma_addr[i], bases[i]); 165 } 166 167 static inline void scaler_set_src_span(struct scaler_context *scaler, 168 struct exynos_drm_ipp_buffer *src_buf) 169 { 170 u32 val; 171 172 val = SCALER_SRC_SPAN_SET_Y_SPAN(src_buf->buf.pitch[0] / 173 src_buf->format->cpp[0]); 174 175 if (src_buf->format->num_planes > 1) 176 val |= SCALER_SRC_SPAN_SET_C_SPAN(src_buf->buf.pitch[1]); 177 178 scaler_write(val, SCALER_SRC_SPAN); 179 } 180 181 static inline void scaler_set_src_luma_chroma_pos(struct scaler_context *scaler, 182 struct drm_exynos_ipp_task_rect *src_pos, 183 const struct scaler_format *fmt) 184 { 185 u32 val; 186 187 val = SCALER_SRC_Y_POS_SET_YH_POS(src_pos->x << 2); 188 val |= SCALER_SRC_Y_POS_SET_YV_POS(src_pos->y << 2); 189 scaler_write(val, SCALER_SRC_Y_POS); 190 val = SCALER_SRC_C_POS_SET_CH_POS( 191 (src_pos->x * fmt->chroma_tile_w / 16) << 2); 192 val |= SCALER_SRC_C_POS_SET_CV_POS( 193 (src_pos->y * fmt->chroma_tile_h / 16) << 2); 194 scaler_write(val, SCALER_SRC_C_POS); 195 } 196 197 static inline void scaler_set_src_wh(struct scaler_context *scaler, 198 struct drm_exynos_ipp_task_rect *src_pos) 199 { 200 u32 val; 201 202 val = SCALER_SRC_WH_SET_WIDTH(src_pos->w); 203 val |= SCALER_SRC_WH_SET_HEIGHT(src_pos->h); 204 scaler_write(val, SCALER_SRC_WH); 205 } 206 207 static inline void scaler_set_dst_fmt(struct scaler_context *scaler, 208 u32 dst_fmt) 209 { 210 u32 val; 211 212 val = SCALER_DST_CFG_SET_COLOR_FORMAT(dst_fmt); 213 scaler_write(val, SCALER_DST_CFG); 214 } 215 216 static inline void scaler_set_dst_base(struct scaler_context *scaler, 217 struct exynos_drm_ipp_buffer *dst_buf) 218 { 219 static unsigned int bases[] = { 220 SCALER_DST_Y_BASE, 221 SCALER_DST_CB_BASE, 222 SCALER_DST_CR_BASE, 223 }; 224 int i; 225 226 for (i = 0; i < dst_buf->format->num_planes; ++i) 227 scaler_write(dst_buf->dma_addr[i], bases[i]); 228 } 229 230 static inline void scaler_set_dst_span(struct scaler_context *scaler, 231 struct exynos_drm_ipp_buffer *dst_buf) 232 { 233 u32 val; 234 235 val = SCALER_DST_SPAN_SET_Y_SPAN(dst_buf->buf.pitch[0] / 236 dst_buf->format->cpp[0]); 237 238 if (dst_buf->format->num_planes > 1) 239 val |= SCALER_DST_SPAN_SET_C_SPAN(dst_buf->buf.pitch[1]); 240 241 scaler_write(val, SCALER_DST_SPAN); 242 } 243 244 static inline void scaler_set_dst_luma_pos(struct scaler_context *scaler, 245 struct drm_exynos_ipp_task_rect *dst_pos) 246 { 247 u32 val; 248 249 val = SCALER_DST_WH_SET_WIDTH(dst_pos->w); 250 val |= SCALER_DST_WH_SET_HEIGHT(dst_pos->h); 251 scaler_write(val, SCALER_DST_WH); 252 } 253 254 static inline void scaler_set_dst_wh(struct scaler_context *scaler, 255 struct drm_exynos_ipp_task_rect *dst_pos) 256 { 257 u32 val; 258 259 val = SCALER_DST_POS_SET_H_POS(dst_pos->x); 260 val |= SCALER_DST_POS_SET_V_POS(dst_pos->y); 261 scaler_write(val, SCALER_DST_POS); 262 } 263 264 static inline void scaler_set_hv_ratio(struct scaler_context *scaler, 265 unsigned int rotation, 266 struct drm_exynos_ipp_task_rect *src_pos, 267 struct drm_exynos_ipp_task_rect *dst_pos) 268 { 269 u32 val, h_ratio, v_ratio; 270 271 if (drm_rotation_90_or_270(rotation)) { 272 h_ratio = (src_pos->h << 16) / dst_pos->w; 273 v_ratio = (src_pos->w << 16) / dst_pos->h; 274 } else { 275 h_ratio = (src_pos->w << 16) / dst_pos->w; 276 v_ratio = (src_pos->h << 16) / dst_pos->h; 277 } 278 279 val = SCALER_H_RATIO_SET(h_ratio); 280 scaler_write(val, SCALER_H_RATIO); 281 282 val = SCALER_V_RATIO_SET(v_ratio); 283 scaler_write(val, SCALER_V_RATIO); 284 } 285 286 static inline void scaler_set_rotation(struct scaler_context *scaler, 287 unsigned int rotation) 288 { 289 u32 val = 0; 290 291 if (rotation & DRM_MODE_ROTATE_90) 292 val |= SCALER_ROT_CFG_SET_ROTMODE(SCALER_ROT_MODE_90); 293 else if (rotation & DRM_MODE_ROTATE_180) 294 val |= SCALER_ROT_CFG_SET_ROTMODE(SCALER_ROT_MODE_180); 295 else if (rotation & DRM_MODE_ROTATE_270) 296 val |= SCALER_ROT_CFG_SET_ROTMODE(SCALER_ROT_MODE_270); 297 if (rotation & DRM_MODE_REFLECT_X) 298 val |= SCALER_ROT_CFG_FLIP_X_EN; 299 if (rotation & DRM_MODE_REFLECT_Y) 300 val |= SCALER_ROT_CFG_FLIP_Y_EN; 301 scaler_write(val, SCALER_ROT_CFG); 302 } 303 304 static inline void scaler_set_csc(struct scaler_context *scaler, 305 const struct drm_format_info *fmt) 306 { 307 static const u32 csc_mtx[2][3][3] = { 308 { /* YCbCr to RGB */ 309 {0x254, 0x000, 0x331}, 310 {0x254, 0xf38, 0xe60}, 311 {0x254, 0x409, 0x000}, 312 }, 313 { /* RGB to YCbCr */ 314 {0x084, 0x102, 0x032}, 315 {0xfb4, 0xf6b, 0x0e1}, 316 {0x0e1, 0xf44, 0xfdc}, 317 }, 318 }; 319 int i, j, dir; 320 321 switch (fmt->format) { 322 case DRM_FORMAT_RGB565: 323 case DRM_FORMAT_XRGB1555: 324 case DRM_FORMAT_ARGB1555: 325 case DRM_FORMAT_XRGB4444: 326 case DRM_FORMAT_ARGB4444: 327 case DRM_FORMAT_XRGB8888: 328 case DRM_FORMAT_ARGB8888: 329 case DRM_FORMAT_RGBX8888: 330 case DRM_FORMAT_RGBA8888: 331 dir = 1; 332 break; 333 default: 334 dir = 0; 335 } 336 337 for (i = 0; i < 3; i++) 338 for (j = 0; j < 3; j++) 339 scaler_write(csc_mtx[dir][i][j], SCALER_CSC_COEF(j, i)); 340 } 341 342 static inline void scaler_set_timer(struct scaler_context *scaler, 343 unsigned int timer, unsigned int divider) 344 { 345 u32 val; 346 347 val = SCALER_TIMEOUT_CTRL_TIMER_ENABLE; 348 val |= SCALER_TIMEOUT_CTRL_SET_TIMER_VALUE(timer); 349 val |= SCALER_TIMEOUT_CTRL_SET_TIMER_DIV(divider); 350 scaler_write(val, SCALER_TIMEOUT_CTRL); 351 } 352 353 static inline void scaler_start_hw(struct scaler_context *scaler) 354 { 355 scaler_write(SCALER_CFG_START_CMD, SCALER_CFG); 356 } 357 358 static int scaler_commit(struct exynos_drm_ipp *ipp, 359 struct exynos_drm_ipp_task *task) 360 { 361 struct scaler_context *scaler = 362 container_of(ipp, struct scaler_context, ipp); 363 364 struct drm_exynos_ipp_task_rect *src_pos = &task->src.rect; 365 struct drm_exynos_ipp_task_rect *dst_pos = &task->dst.rect; 366 const struct scaler_format *src_fmt, *dst_fmt; 367 368 src_fmt = scaler_get_format(task->src.buf.fourcc); 369 dst_fmt = scaler_get_format(task->dst.buf.fourcc); 370 371 pm_runtime_get_sync(scaler->dev); 372 if (scaler_reset(scaler)) { 373 pm_runtime_put(scaler->dev); 374 return -EIO; 375 } 376 377 scaler->task = task; 378 379 scaler_set_src_fmt( 380 scaler, src_fmt->internal_fmt, task->src.buf.modifier != 0); 381 scaler_set_src_base(scaler, &task->src); 382 scaler_set_src_span(scaler, &task->src); 383 scaler_set_src_luma_chroma_pos(scaler, src_pos, src_fmt); 384 scaler_set_src_wh(scaler, src_pos); 385 386 scaler_set_dst_fmt(scaler, dst_fmt->internal_fmt); 387 scaler_set_dst_base(scaler, &task->dst); 388 scaler_set_dst_span(scaler, &task->dst); 389 scaler_set_dst_luma_pos(scaler, dst_pos); 390 scaler_set_dst_wh(scaler, dst_pos); 391 392 scaler_set_hv_ratio(scaler, task->transform.rotation, src_pos, dst_pos); 393 scaler_set_rotation(scaler, task->transform.rotation); 394 395 scaler_set_csc(scaler, task->src.format); 396 397 scaler_set_timer(scaler, 0xffff, 0xf); 398 399 scaler_enable_int(scaler); 400 scaler_start_hw(scaler); 401 402 return 0; 403 } 404 405 static struct exynos_drm_ipp_funcs ipp_funcs = { 406 .commit = scaler_commit, 407 }; 408 409 static inline void scaler_disable_int(struct scaler_context *scaler) 410 { 411 scaler_write(0, SCALER_INT_EN); 412 } 413 414 static inline u32 scaler_get_int_status(struct scaler_context *scaler) 415 { 416 u32 val = scaler_read(SCALER_INT_STATUS); 417 418 scaler_write(val, SCALER_INT_STATUS); 419 420 return val; 421 } 422 423 static inline int scaler_task_done(u32 val) 424 { 425 return val & SCALER_INT_STATUS_FRAME_END ? 0 : -EINVAL; 426 } 427 428 static irqreturn_t scaler_irq_handler(int irq, void *arg) 429 { 430 struct scaler_context *scaler = arg; 431 432 u32 val = scaler_get_int_status(scaler); 433 434 scaler_disable_int(scaler); 435 436 if (scaler->task) { 437 struct exynos_drm_ipp_task *task = scaler->task; 438 439 scaler->task = NULL; 440 pm_runtime_mark_last_busy(scaler->dev); 441 pm_runtime_put_autosuspend(scaler->dev); 442 exynos_drm_ipp_task_done(task, scaler_task_done(val)); 443 } 444 445 return IRQ_HANDLED; 446 } 447 448 static int scaler_bind(struct device *dev, struct device *master, void *data) 449 { 450 struct scaler_context *scaler = dev_get_drvdata(dev); 451 struct drm_device *drm_dev = data; 452 struct exynos_drm_ipp *ipp = &scaler->ipp; 453 454 scaler->drm_dev = drm_dev; 455 drm_iommu_attach_device(drm_dev, dev); 456 457 exynos_drm_ipp_register(drm_dev, ipp, &ipp_funcs, 458 DRM_EXYNOS_IPP_CAP_CROP | DRM_EXYNOS_IPP_CAP_ROTATE | 459 DRM_EXYNOS_IPP_CAP_SCALE | DRM_EXYNOS_IPP_CAP_CONVERT, 460 scaler->scaler_data->formats, 461 scaler->scaler_data->num_formats, "scaler"); 462 463 dev_info(dev, "The exynos scaler has been probed successfully\n"); 464 465 return 0; 466 } 467 468 static void scaler_unbind(struct device *dev, struct device *master, 469 void *data) 470 { 471 struct scaler_context *scaler = dev_get_drvdata(dev); 472 struct drm_device *drm_dev = data; 473 struct exynos_drm_ipp *ipp = &scaler->ipp; 474 475 exynos_drm_ipp_unregister(drm_dev, ipp); 476 drm_iommu_detach_device(scaler->drm_dev, scaler->dev); 477 } 478 479 static const struct component_ops scaler_component_ops = { 480 .bind = scaler_bind, 481 .unbind = scaler_unbind, 482 }; 483 484 static int scaler_probe(struct platform_device *pdev) 485 { 486 struct device *dev = &pdev->dev; 487 struct resource *regs_res; 488 struct scaler_context *scaler; 489 int irq; 490 int ret, i; 491 492 scaler = devm_kzalloc(dev, sizeof(*scaler), GFP_KERNEL); 493 if (!scaler) 494 return -ENOMEM; 495 496 scaler->scaler_data = 497 (struct scaler_data *)of_device_get_match_data(dev); 498 499 scaler->dev = dev; 500 regs_res = platform_get_resource(pdev, IORESOURCE_MEM, 0); 501 scaler->regs = devm_ioremap_resource(dev, regs_res); 502 if (IS_ERR(scaler->regs)) 503 return PTR_ERR(scaler->regs); 504 505 irq = platform_get_irq(pdev, 0); 506 if (irq < 0) { 507 dev_err(dev, "failed to get irq\n"); 508 return irq; 509 } 510 511 ret = devm_request_threaded_irq(dev, irq, NULL, scaler_irq_handler, 512 IRQF_ONESHOT, "drm_scaler", scaler); 513 if (ret < 0) { 514 dev_err(dev, "failed to request irq\n"); 515 return ret; 516 } 517 518 for (i = 0; i < scaler->scaler_data->num_clk; ++i) { 519 scaler->clock[i] = devm_clk_get(dev, 520 scaler->scaler_data->clk_name[i]); 521 if (IS_ERR(scaler->clock[i])) { 522 dev_err(dev, "failed to get clock\n"); 523 return PTR_ERR(scaler->clock[i]); 524 } 525 } 526 527 pm_runtime_use_autosuspend(dev); 528 pm_runtime_set_autosuspend_delay(dev, SCALER_AUTOSUSPEND_DELAY); 529 pm_runtime_enable(dev); 530 platform_set_drvdata(pdev, scaler); 531 532 ret = component_add(dev, &scaler_component_ops); 533 if (ret) 534 goto err_ippdrv_register; 535 536 return 0; 537 538 err_ippdrv_register: 539 pm_runtime_dont_use_autosuspend(dev); 540 pm_runtime_disable(dev); 541 return ret; 542 } 543 544 static int scaler_remove(struct platform_device *pdev) 545 { 546 struct device *dev = &pdev->dev; 547 548 component_del(dev, &scaler_component_ops); 549 pm_runtime_dont_use_autosuspend(dev); 550 pm_runtime_disable(dev); 551 552 return 0; 553 } 554 555 #ifdef CONFIG_PM 556 557 static int clk_disable_unprepare_wrapper(struct clk *clk) 558 { 559 clk_disable_unprepare(clk); 560 561 return 0; 562 } 563 564 static int scaler_clk_ctrl(struct scaler_context *scaler, bool enable) 565 { 566 int (*clk_fun)(struct clk *clk), i; 567 568 clk_fun = enable ? clk_prepare_enable : clk_disable_unprepare_wrapper; 569 570 for (i = 0; i < scaler->scaler_data->num_clk; ++i) 571 clk_fun(scaler->clock[i]); 572 573 return 0; 574 } 575 576 static int scaler_runtime_suspend(struct device *dev) 577 { 578 struct scaler_context *scaler = dev_get_drvdata(dev); 579 580 return scaler_clk_ctrl(scaler, false); 581 } 582 583 static int scaler_runtime_resume(struct device *dev) 584 { 585 struct scaler_context *scaler = dev_get_drvdata(dev); 586 587 return scaler_clk_ctrl(scaler, true); 588 } 589 #endif 590 591 static const struct dev_pm_ops scaler_pm_ops = { 592 SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend, 593 pm_runtime_force_resume) 594 SET_RUNTIME_PM_OPS(scaler_runtime_suspend, scaler_runtime_resume, NULL) 595 }; 596 597 static const struct drm_exynos_ipp_limit scaler_5420_two_pixel_hv_limits[] = { 598 { IPP_SIZE_LIMIT(BUFFER, .h = { 16, SZ_8K }, .v = { 16, SZ_8K }) }, 599 { IPP_SIZE_LIMIT(AREA, .h.align = 2, .v.align = 2) }, 600 { IPP_SCALE_LIMIT(.h = { 65536 * 1 / 4, 65536 * 16 }, 601 .v = { 65536 * 1 / 4, 65536 * 16 }) }, 602 }; 603 604 static const struct drm_exynos_ipp_limit scaler_5420_two_pixel_h_limits[] = { 605 { IPP_SIZE_LIMIT(BUFFER, .h = { 16, SZ_8K }, .v = { 16, SZ_8K }) }, 606 { IPP_SIZE_LIMIT(AREA, .h.align = 2, .v.align = 1) }, 607 { IPP_SCALE_LIMIT(.h = { 65536 * 1 / 4, 65536 * 16 }, 608 .v = { 65536 * 1 / 4, 65536 * 16 }) }, 609 }; 610 611 static const struct drm_exynos_ipp_limit scaler_5420_one_pixel_limits[] = { 612 { IPP_SIZE_LIMIT(BUFFER, .h = { 16, SZ_8K }, .v = { 16, SZ_8K }) }, 613 { IPP_SCALE_LIMIT(.h = { 65536 * 1 / 4, 65536 * 16 }, 614 .v = { 65536 * 1 / 4, 65536 * 16 }) }, 615 }; 616 617 static const struct drm_exynos_ipp_limit scaler_5420_tile_limits[] = { 618 { IPP_SIZE_LIMIT(BUFFER, .h = { 16, SZ_8K }, .v = { 16, SZ_8K })}, 619 { IPP_SIZE_LIMIT(AREA, .h.align = 16, .v.align = 16) }, 620 { IPP_SCALE_LIMIT(.h = {1, 1}, .v = {1, 1})}, 621 { } 622 }; 623 624 #define IPP_SRCDST_TILE_FORMAT(f, l) \ 625 IPP_SRCDST_MFORMAT(f, DRM_FORMAT_MOD_SAMSUNG_16_16_TILE, (l)) 626 627 static const struct exynos_drm_ipp_formats exynos5420_formats[] = { 628 /* SCALER_YUV420_2P_UV */ 629 { IPP_SRCDST_FORMAT(NV21, scaler_5420_two_pixel_hv_limits) }, 630 631 /* SCALER_YUV420_2P_VU */ 632 { IPP_SRCDST_FORMAT(NV12, scaler_5420_two_pixel_hv_limits) }, 633 634 /* SCALER_YUV420_3P */ 635 { IPP_SRCDST_FORMAT(YUV420, scaler_5420_two_pixel_hv_limits) }, 636 637 /* SCALER_YUV422_1P_YUYV */ 638 { IPP_SRCDST_FORMAT(YUYV, scaler_5420_two_pixel_h_limits) }, 639 640 /* SCALER_YUV422_1P_UYVY */ 641 { IPP_SRCDST_FORMAT(UYVY, scaler_5420_two_pixel_h_limits) }, 642 643 /* SCALER_YUV422_1P_YVYU */ 644 { IPP_SRCDST_FORMAT(YVYU, scaler_5420_two_pixel_h_limits) }, 645 646 /* SCALER_YUV422_2P_UV */ 647 { IPP_SRCDST_FORMAT(NV61, scaler_5420_two_pixel_h_limits) }, 648 649 /* SCALER_YUV422_2P_VU */ 650 { IPP_SRCDST_FORMAT(NV16, scaler_5420_two_pixel_h_limits) }, 651 652 /* SCALER_YUV422_3P */ 653 { IPP_SRCDST_FORMAT(YUV422, scaler_5420_two_pixel_h_limits) }, 654 655 /* SCALER_YUV444_2P_UV */ 656 { IPP_SRCDST_FORMAT(NV42, scaler_5420_one_pixel_limits) }, 657 658 /* SCALER_YUV444_2P_VU */ 659 { IPP_SRCDST_FORMAT(NV24, scaler_5420_one_pixel_limits) }, 660 661 /* SCALER_YUV444_3P */ 662 { IPP_SRCDST_FORMAT(YUV444, scaler_5420_one_pixel_limits) }, 663 664 /* SCALER_RGB_565 */ 665 { IPP_SRCDST_FORMAT(RGB565, scaler_5420_one_pixel_limits) }, 666 667 /* SCALER_ARGB1555 */ 668 { IPP_SRCDST_FORMAT(XRGB1555, scaler_5420_one_pixel_limits) }, 669 670 /* SCALER_ARGB1555 */ 671 { IPP_SRCDST_FORMAT(ARGB1555, scaler_5420_one_pixel_limits) }, 672 673 /* SCALER_ARGB4444 */ 674 { IPP_SRCDST_FORMAT(XRGB4444, scaler_5420_one_pixel_limits) }, 675 676 /* SCALER_ARGB4444 */ 677 { IPP_SRCDST_FORMAT(ARGB4444, scaler_5420_one_pixel_limits) }, 678 679 /* SCALER_ARGB8888 */ 680 { IPP_SRCDST_FORMAT(XRGB8888, scaler_5420_one_pixel_limits) }, 681 682 /* SCALER_ARGB8888 */ 683 { IPP_SRCDST_FORMAT(ARGB8888, scaler_5420_one_pixel_limits) }, 684 685 /* SCALER_RGBA8888 */ 686 { IPP_SRCDST_FORMAT(RGBX8888, scaler_5420_one_pixel_limits) }, 687 688 /* SCALER_RGBA8888 */ 689 { IPP_SRCDST_FORMAT(RGBA8888, scaler_5420_one_pixel_limits) }, 690 691 /* SCALER_YUV420_2P_UV TILE */ 692 { IPP_SRCDST_TILE_FORMAT(NV21, scaler_5420_tile_limits) }, 693 694 /* SCALER_YUV420_2P_VU TILE */ 695 { IPP_SRCDST_TILE_FORMAT(NV12, scaler_5420_tile_limits) }, 696 697 /* SCALER_YUV420_3P TILE */ 698 { IPP_SRCDST_TILE_FORMAT(YUV420, scaler_5420_tile_limits) }, 699 700 /* SCALER_YUV422_1P_YUYV TILE */ 701 { IPP_SRCDST_TILE_FORMAT(YUYV, scaler_5420_tile_limits) }, 702 }; 703 704 static const struct scaler_data exynos5420_data = { 705 .clk_name = {"mscl"}, 706 .num_clk = 1, 707 .formats = exynos5420_formats, 708 .num_formats = ARRAY_SIZE(exynos5420_formats), 709 }; 710 711 static const struct scaler_data exynos5433_data = { 712 .clk_name = {"pclk", "aclk", "aclk_xiu"}, 713 .num_clk = 3, 714 .formats = exynos5420_formats, /* intentional */ 715 .num_formats = ARRAY_SIZE(exynos5420_formats), 716 }; 717 718 static const struct of_device_id exynos_scaler_match[] = { 719 { 720 .compatible = "samsung,exynos5420-scaler", 721 .data = &exynos5420_data, 722 }, { 723 .compatible = "samsung,exynos5433-scaler", 724 .data = &exynos5433_data, 725 }, { 726 }, 727 }; 728 MODULE_DEVICE_TABLE(of, exynos_scaler_match); 729 730 struct platform_driver scaler_driver = { 731 .probe = scaler_probe, 732 .remove = scaler_remove, 733 .driver = { 734 .name = "exynos-scaler", 735 .owner = THIS_MODULE, 736 .pm = &scaler_pm_ops, 737 .of_match_table = exynos_scaler_match, 738 }, 739 }; 740