1 /* 2 * Copyright (C) 2008 Maarten Maathuis. 3 * All Rights Reserved. 4 * 5 * Permission is hereby granted, free of charge, to any person obtaining 6 * a copy of this software and associated documentation files (the 7 * "Software"), to deal in the Software without restriction, including 8 * without limitation the rights to use, copy, modify, merge, publish, 9 * distribute, sublicense, and/or sell copies of the Software, and to 10 * permit persons to whom the Software is furnished to do so, subject to 11 * the following conditions: 12 * 13 * The above copyright notice and this permission notice (including the 14 * next paragraph) shall be included in all copies or substantial 15 * portions of the Software. 16 * 17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 18 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 19 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 20 * IN NO EVENT SHALL THE COPYRIGHT OWNER(S) AND/OR ITS SUPPLIERS BE 21 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 22 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION 23 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 24 * 25 */ 26 27 #include <drm/drmP.h> 28 #include <drm/drm_crtc_helper.h> 29 30 #include <nvif/class.h> 31 32 #include "nouveau_fbcon.h" 33 #include "dispnv04/hw.h" 34 #include "nouveau_crtc.h" 35 #include "nouveau_dma.h" 36 #include "nouveau_gem.h" 37 #include "nouveau_connector.h" 38 #include "nv50_display.h" 39 40 #include "nouveau_fence.h" 41 42 #include <nvif/event.h> 43 44 static int 45 nouveau_display_vblank_handler(struct nvif_notify *notify) 46 { 47 struct nouveau_crtc *nv_crtc = 48 container_of(notify, typeof(*nv_crtc), vblank); 49 drm_handle_vblank(nv_crtc->base.dev, nv_crtc->index); 50 return NVIF_NOTIFY_KEEP; 51 } 52 53 int 54 nouveau_display_vblank_enable(struct drm_device *dev, int head) 55 { 56 struct drm_crtc *crtc; 57 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 58 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 59 if (nv_crtc->index == head) { 60 nvif_notify_get(&nv_crtc->vblank); 61 return 0; 62 } 63 } 64 return -EINVAL; 65 } 66 67 void 68 nouveau_display_vblank_disable(struct drm_device *dev, int head) 69 { 70 struct drm_crtc *crtc; 71 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 72 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 73 if (nv_crtc->index == head) { 74 nvif_notify_put(&nv_crtc->vblank); 75 return; 76 } 77 } 78 } 79 80 static inline int 81 calc(int blanks, int blanke, int total, int line) 82 { 83 if (blanke >= blanks) { 84 if (line >= blanks) 85 line -= total; 86 } else { 87 if (line >= blanks) 88 line -= total; 89 line -= blanke + 1; 90 } 91 return line; 92 } 93 94 int 95 nouveau_display_scanoutpos_head(struct drm_crtc *crtc, int *vpos, int *hpos, 96 ktime_t *stime, ktime_t *etime) 97 { 98 struct { 99 struct nv04_disp_mthd_v0 base; 100 struct nv04_disp_scanoutpos_v0 scan; 101 } args = { 102 .base.method = NV04_DISP_SCANOUTPOS, 103 .base.head = nouveau_crtc(crtc)->index, 104 }; 105 struct nouveau_display *disp = nouveau_display(crtc->dev); 106 int ret, retry = 1; 107 108 do { 109 ret = nvif_mthd(&disp->disp, 0, &args, sizeof(args)); 110 if (ret != 0) 111 return 0; 112 113 if (args.scan.vline) { 114 ret |= DRM_SCANOUTPOS_ACCURATE; 115 ret |= DRM_SCANOUTPOS_VALID; 116 break; 117 } 118 119 if (retry) ndelay(crtc->linedur_ns); 120 } while (retry--); 121 122 *hpos = args.scan.hline; 123 *vpos = calc(args.scan.vblanks, args.scan.vblanke, 124 args.scan.vtotal, args.scan.vline); 125 if (stime) *stime = ns_to_ktime(args.scan.time[0]); 126 if (etime) *etime = ns_to_ktime(args.scan.time[1]); 127 128 if (*vpos < 0) 129 ret |= DRM_SCANOUTPOS_IN_VBLANK; 130 return ret; 131 } 132 133 int 134 nouveau_display_scanoutpos(struct drm_device *dev, int head, unsigned int flags, 135 int *vpos, int *hpos, ktime_t *stime, ktime_t *etime) 136 { 137 struct drm_crtc *crtc; 138 139 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 140 if (nouveau_crtc(crtc)->index == head) { 141 return nouveau_display_scanoutpos_head(crtc, vpos, hpos, 142 stime, etime); 143 } 144 } 145 146 return 0; 147 } 148 149 int 150 nouveau_display_vblstamp(struct drm_device *dev, int head, int *max_error, 151 struct timeval *time, unsigned flags) 152 { 153 struct drm_crtc *crtc; 154 155 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 156 if (nouveau_crtc(crtc)->index == head) { 157 return drm_calc_vbltimestamp_from_scanoutpos(dev, 158 head, max_error, time, flags, crtc, 159 &crtc->hwmode); 160 } 161 } 162 163 return -EINVAL; 164 } 165 166 static void 167 nouveau_display_vblank_fini(struct drm_device *dev) 168 { 169 struct drm_crtc *crtc; 170 171 drm_vblank_cleanup(dev); 172 173 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 174 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 175 nvif_notify_fini(&nv_crtc->vblank); 176 } 177 } 178 179 static int 180 nouveau_display_vblank_init(struct drm_device *dev) 181 { 182 struct nouveau_display *disp = nouveau_display(dev); 183 struct drm_crtc *crtc; 184 int ret; 185 186 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 187 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 188 ret = nvif_notify_init(&disp->disp, NULL, 189 nouveau_display_vblank_handler, false, 190 NV04_DISP_NTFY_VBLANK, 191 &(struct nvif_notify_head_req_v0) { 192 .head = nv_crtc->index, 193 }, 194 sizeof(struct nvif_notify_head_req_v0), 195 sizeof(struct nvif_notify_head_rep_v0), 196 &nv_crtc->vblank); 197 if (ret) { 198 nouveau_display_vblank_fini(dev); 199 return ret; 200 } 201 } 202 203 ret = drm_vblank_init(dev, dev->mode_config.num_crtc); 204 if (ret) { 205 nouveau_display_vblank_fini(dev); 206 return ret; 207 } 208 209 return 0; 210 } 211 212 static void 213 nouveau_user_framebuffer_destroy(struct drm_framebuffer *drm_fb) 214 { 215 struct nouveau_framebuffer *fb = nouveau_framebuffer(drm_fb); 216 struct nouveau_display *disp = nouveau_display(drm_fb->dev); 217 218 if (disp->fb_dtor) 219 disp->fb_dtor(drm_fb); 220 221 if (fb->nvbo) 222 drm_gem_object_unreference_unlocked(&fb->nvbo->gem); 223 224 drm_framebuffer_cleanup(drm_fb); 225 kfree(fb); 226 } 227 228 static int 229 nouveau_user_framebuffer_create_handle(struct drm_framebuffer *drm_fb, 230 struct drm_file *file_priv, 231 unsigned int *handle) 232 { 233 struct nouveau_framebuffer *fb = nouveau_framebuffer(drm_fb); 234 235 return drm_gem_handle_create(file_priv, &fb->nvbo->gem, handle); 236 } 237 238 static const struct drm_framebuffer_funcs nouveau_framebuffer_funcs = { 239 .destroy = nouveau_user_framebuffer_destroy, 240 .create_handle = nouveau_user_framebuffer_create_handle, 241 }; 242 243 int 244 nouveau_framebuffer_init(struct drm_device *dev, 245 struct nouveau_framebuffer *nv_fb, 246 struct drm_mode_fb_cmd2 *mode_cmd, 247 struct nouveau_bo *nvbo) 248 { 249 struct nouveau_display *disp = nouveau_display(dev); 250 struct drm_framebuffer *fb = &nv_fb->base; 251 int ret; 252 253 drm_helper_mode_fill_fb_struct(fb, mode_cmd); 254 nv_fb->nvbo = nvbo; 255 256 ret = drm_framebuffer_init(dev, fb, &nouveau_framebuffer_funcs); 257 if (ret) 258 return ret; 259 260 if (disp->fb_ctor) { 261 ret = disp->fb_ctor(fb); 262 if (ret) 263 disp->fb_dtor(fb); 264 } 265 266 return ret; 267 } 268 269 static struct drm_framebuffer * 270 nouveau_user_framebuffer_create(struct drm_device *dev, 271 struct drm_file *file_priv, 272 struct drm_mode_fb_cmd2 *mode_cmd) 273 { 274 struct nouveau_framebuffer *nouveau_fb; 275 struct drm_gem_object *gem; 276 int ret = -ENOMEM; 277 278 gem = drm_gem_object_lookup(dev, file_priv, mode_cmd->handles[0]); 279 if (!gem) 280 return ERR_PTR(-ENOENT); 281 282 nouveau_fb = kzalloc(sizeof(struct nouveau_framebuffer), GFP_KERNEL); 283 if (!nouveau_fb) 284 goto err_unref; 285 286 ret = nouveau_framebuffer_init(dev, nouveau_fb, mode_cmd, nouveau_gem_object(gem)); 287 if (ret) 288 goto err; 289 290 return &nouveau_fb->base; 291 292 err: 293 kfree(nouveau_fb); 294 err_unref: 295 drm_gem_object_unreference(gem); 296 return ERR_PTR(ret); 297 } 298 299 static const struct drm_mode_config_funcs nouveau_mode_config_funcs = { 300 .fb_create = nouveau_user_framebuffer_create, 301 .output_poll_changed = nouveau_fbcon_output_poll_changed, 302 }; 303 304 305 struct nouveau_drm_prop_enum_list { 306 u8 gen_mask; 307 int type; 308 char *name; 309 }; 310 311 static struct nouveau_drm_prop_enum_list underscan[] = { 312 { 6, UNDERSCAN_AUTO, "auto" }, 313 { 6, UNDERSCAN_OFF, "off" }, 314 { 6, UNDERSCAN_ON, "on" }, 315 {} 316 }; 317 318 static struct nouveau_drm_prop_enum_list dither_mode[] = { 319 { 7, DITHERING_MODE_AUTO, "auto" }, 320 { 7, DITHERING_MODE_OFF, "off" }, 321 { 1, DITHERING_MODE_ON, "on" }, 322 { 6, DITHERING_MODE_STATIC2X2, "static 2x2" }, 323 { 6, DITHERING_MODE_DYNAMIC2X2, "dynamic 2x2" }, 324 { 4, DITHERING_MODE_TEMPORAL, "temporal" }, 325 {} 326 }; 327 328 static struct nouveau_drm_prop_enum_list dither_depth[] = { 329 { 6, DITHERING_DEPTH_AUTO, "auto" }, 330 { 6, DITHERING_DEPTH_6BPC, "6 bpc" }, 331 { 6, DITHERING_DEPTH_8BPC, "8 bpc" }, 332 {} 333 }; 334 335 #define PROP_ENUM(p,gen,n,list) do { \ 336 struct nouveau_drm_prop_enum_list *l = (list); \ 337 int c = 0; \ 338 while (l->gen_mask) { \ 339 if (l->gen_mask & (1 << (gen))) \ 340 c++; \ 341 l++; \ 342 } \ 343 if (c) { \ 344 p = drm_property_create(dev, DRM_MODE_PROP_ENUM, n, c); \ 345 l = (list); \ 346 c = 0; \ 347 while (p && l->gen_mask) { \ 348 if (l->gen_mask & (1 << (gen))) { \ 349 drm_property_add_enum(p, c, l->type, l->name); \ 350 c++; \ 351 } \ 352 l++; \ 353 } \ 354 } \ 355 } while(0) 356 357 int 358 nouveau_display_init(struct drm_device *dev) 359 { 360 struct nouveau_display *disp = nouveau_display(dev); 361 struct drm_connector *connector; 362 int ret; 363 364 ret = disp->init(dev); 365 if (ret) 366 return ret; 367 368 /* enable polling for external displays */ 369 drm_kms_helper_poll_enable(dev); 370 371 /* enable hotplug interrupts */ 372 list_for_each_entry(connector, &dev->mode_config.connector_list, head) { 373 struct nouveau_connector *conn = nouveau_connector(connector); 374 nvif_notify_get(&conn->hpd); 375 } 376 377 return ret; 378 } 379 380 void 381 nouveau_display_fini(struct drm_device *dev) 382 { 383 struct nouveau_display *disp = nouveau_display(dev); 384 struct drm_connector *connector; 385 int head; 386 387 /* Make sure that drm and hw vblank irqs get properly disabled. */ 388 for (head = 0; head < dev->mode_config.num_crtc; head++) 389 drm_vblank_off(dev, head); 390 391 /* disable hotplug interrupts */ 392 list_for_each_entry(connector, &dev->mode_config.connector_list, head) { 393 struct nouveau_connector *conn = nouveau_connector(connector); 394 nvif_notify_put(&conn->hpd); 395 } 396 397 drm_kms_helper_poll_disable(dev); 398 disp->fini(dev); 399 } 400 401 static void 402 nouveau_display_create_properties(struct drm_device *dev) 403 { 404 struct nouveau_display *disp = nouveau_display(dev); 405 int gen; 406 407 if (disp->disp.oclass < NV50_DISP) 408 gen = 0; 409 else 410 if (disp->disp.oclass < GF110_DISP) 411 gen = 1; 412 else 413 gen = 2; 414 415 PROP_ENUM(disp->dithering_mode, gen, "dithering mode", dither_mode); 416 PROP_ENUM(disp->dithering_depth, gen, "dithering depth", dither_depth); 417 PROP_ENUM(disp->underscan_property, gen, "underscan", underscan); 418 419 disp->underscan_hborder_property = 420 drm_property_create_range(dev, 0, "underscan hborder", 0, 128); 421 422 disp->underscan_vborder_property = 423 drm_property_create_range(dev, 0, "underscan vborder", 0, 128); 424 425 if (gen < 1) 426 return; 427 428 /* -90..+90 */ 429 disp->vibrant_hue_property = 430 drm_property_create_range(dev, 0, "vibrant hue", 0, 180); 431 432 /* -100..+100 */ 433 disp->color_vibrance_property = 434 drm_property_create_range(dev, 0, "color vibrance", 0, 200); 435 } 436 437 int 438 nouveau_display_create(struct drm_device *dev) 439 { 440 struct nouveau_drm *drm = nouveau_drm(dev); 441 struct nouveau_display *disp; 442 int ret; 443 444 disp = drm->display = kzalloc(sizeof(*disp), GFP_KERNEL); 445 if (!disp) 446 return -ENOMEM; 447 448 drm_mode_config_init(dev); 449 drm_mode_create_scaling_mode_property(dev); 450 drm_mode_create_dvi_i_properties(dev); 451 452 dev->mode_config.funcs = &nouveau_mode_config_funcs; 453 dev->mode_config.fb_base = nv_device_resource_start(nvxx_device(&drm->device), 1); 454 455 dev->mode_config.min_width = 0; 456 dev->mode_config.min_height = 0; 457 if (drm->device.info.family < NV_DEVICE_INFO_V0_CELSIUS) { 458 dev->mode_config.max_width = 2048; 459 dev->mode_config.max_height = 2048; 460 } else 461 if (drm->device.info.family < NV_DEVICE_INFO_V0_TESLA) { 462 dev->mode_config.max_width = 4096; 463 dev->mode_config.max_height = 4096; 464 } else { 465 dev->mode_config.max_width = 8192; 466 dev->mode_config.max_height = 8192; 467 } 468 469 dev->mode_config.preferred_depth = 24; 470 dev->mode_config.prefer_shadow = 1; 471 472 if (drm->device.info.chipset < 0x11) 473 dev->mode_config.async_page_flip = false; 474 else 475 dev->mode_config.async_page_flip = true; 476 477 drm_kms_helper_poll_init(dev); 478 drm_kms_helper_poll_disable(dev); 479 480 if (nouveau_modeset != 2 && drm->vbios.dcb.entries) { 481 static const u16 oclass[] = { 482 GM204_DISP, 483 GM107_DISP, 484 GK110_DISP, 485 GK104_DISP, 486 GF110_DISP, 487 GT214_DISP, 488 GT206_DISP, 489 GT200_DISP, 490 G82_DISP, 491 NV50_DISP, 492 NV04_DISP, 493 }; 494 int i; 495 496 for (i = 0, ret = -ENODEV; ret && i < ARRAY_SIZE(oclass); i++) { 497 ret = nvif_object_init(nvif_object(&drm->device), NULL, 498 NVDRM_DISPLAY, oclass[i], 499 NULL, 0, &disp->disp); 500 } 501 502 if (ret == 0) { 503 nouveau_display_create_properties(dev); 504 if (disp->disp.oclass < NV50_DISP) 505 ret = nv04_display_create(dev); 506 else 507 ret = nv50_display_create(dev); 508 } 509 } else { 510 ret = 0; 511 } 512 513 if (ret) 514 goto disp_create_err; 515 516 if (dev->mode_config.num_crtc) { 517 ret = nouveau_display_vblank_init(dev); 518 if (ret) 519 goto vblank_err; 520 } 521 522 nouveau_backlight_init(dev); 523 return 0; 524 525 vblank_err: 526 disp->dtor(dev); 527 disp_create_err: 528 drm_kms_helper_poll_fini(dev); 529 drm_mode_config_cleanup(dev); 530 return ret; 531 } 532 533 void 534 nouveau_display_destroy(struct drm_device *dev) 535 { 536 struct nouveau_display *disp = nouveau_display(dev); 537 538 nouveau_backlight_exit(dev); 539 nouveau_display_vblank_fini(dev); 540 541 drm_kms_helper_poll_fini(dev); 542 drm_mode_config_cleanup(dev); 543 544 if (disp->dtor) 545 disp->dtor(dev); 546 547 nvif_object_fini(&disp->disp); 548 549 nouveau_drm(dev)->display = NULL; 550 kfree(disp); 551 } 552 553 int 554 nouveau_display_suspend(struct drm_device *dev, bool runtime) 555 { 556 struct drm_crtc *crtc; 557 558 nouveau_display_fini(dev); 559 560 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 561 struct nouveau_framebuffer *nouveau_fb; 562 563 nouveau_fb = nouveau_framebuffer(crtc->primary->fb); 564 if (!nouveau_fb || !nouveau_fb->nvbo) 565 continue; 566 567 nouveau_bo_unpin(nouveau_fb->nvbo); 568 } 569 570 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 571 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 572 if (nv_crtc->cursor.nvbo) { 573 if (nv_crtc->cursor.set_offset) 574 nouveau_bo_unmap(nv_crtc->cursor.nvbo); 575 nouveau_bo_unpin(nv_crtc->cursor.nvbo); 576 } 577 } 578 579 return 0; 580 } 581 582 void 583 nouveau_display_resume(struct drm_device *dev, bool runtime) 584 { 585 struct nouveau_drm *drm = nouveau_drm(dev); 586 struct drm_crtc *crtc; 587 int ret, head; 588 589 /* re-pin fb/cursors */ 590 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 591 struct nouveau_framebuffer *nouveau_fb; 592 593 nouveau_fb = nouveau_framebuffer(crtc->primary->fb); 594 if (!nouveau_fb || !nouveau_fb->nvbo) 595 continue; 596 597 ret = nouveau_bo_pin(nouveau_fb->nvbo, TTM_PL_FLAG_VRAM, true); 598 if (ret) 599 NV_ERROR(drm, "Could not pin framebuffer\n"); 600 } 601 602 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 603 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 604 if (!nv_crtc->cursor.nvbo) 605 continue; 606 607 ret = nouveau_bo_pin(nv_crtc->cursor.nvbo, TTM_PL_FLAG_VRAM, true); 608 if (!ret && nv_crtc->cursor.set_offset) 609 ret = nouveau_bo_map(nv_crtc->cursor.nvbo); 610 if (ret) 611 NV_ERROR(drm, "Could not pin/map cursor.\n"); 612 } 613 614 nouveau_display_init(dev); 615 616 /* Force CLUT to get re-loaded during modeset */ 617 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 618 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 619 620 nv_crtc->lut.depth = 0; 621 } 622 623 /* Make sure that drm and hw vblank irqs get resumed if needed. */ 624 for (head = 0; head < dev->mode_config.num_crtc; head++) 625 drm_vblank_on(dev, head); 626 627 /* This should ensure we don't hit a locking problem when someone 628 * wakes us up via a connector. We should never go into suspend 629 * while the display is on anyways. 630 */ 631 if (runtime) 632 return; 633 634 drm_helper_resume_force_mode(dev); 635 636 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 637 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 638 639 if (!nv_crtc->cursor.nvbo) 640 continue; 641 642 if (nv_crtc->cursor.set_offset) 643 nv_crtc->cursor.set_offset(nv_crtc, nv_crtc->cursor.nvbo->bo.offset); 644 nv_crtc->cursor.set_pos(nv_crtc, nv_crtc->cursor_saved_x, 645 nv_crtc->cursor_saved_y); 646 } 647 } 648 649 static int 650 nouveau_page_flip_emit(struct nouveau_channel *chan, 651 struct nouveau_bo *old_bo, 652 struct nouveau_bo *new_bo, 653 struct nouveau_page_flip_state *s, 654 struct nouveau_fence **pfence) 655 { 656 struct nouveau_fence_chan *fctx = chan->fence; 657 struct nouveau_drm *drm = chan->drm; 658 struct drm_device *dev = drm->dev; 659 unsigned long flags; 660 int ret; 661 662 /* Queue it to the pending list */ 663 spin_lock_irqsave(&dev->event_lock, flags); 664 list_add_tail(&s->head, &fctx->flip); 665 spin_unlock_irqrestore(&dev->event_lock, flags); 666 667 /* Synchronize with the old framebuffer */ 668 ret = nouveau_fence_sync(old_bo, chan, false, false); 669 if (ret) 670 goto fail; 671 672 /* Emit the pageflip */ 673 ret = RING_SPACE(chan, 2); 674 if (ret) 675 goto fail; 676 677 if (drm->device.info.family < NV_DEVICE_INFO_V0_FERMI) 678 BEGIN_NV04(chan, NvSubSw, NV_SW_PAGE_FLIP, 1); 679 else 680 BEGIN_NVC0(chan, FermiSw, NV_SW_PAGE_FLIP, 1); 681 OUT_RING (chan, 0x00000000); 682 FIRE_RING (chan); 683 684 ret = nouveau_fence_new(chan, false, pfence); 685 if (ret) 686 goto fail; 687 688 return 0; 689 fail: 690 spin_lock_irqsave(&dev->event_lock, flags); 691 list_del(&s->head); 692 spin_unlock_irqrestore(&dev->event_lock, flags); 693 return ret; 694 } 695 696 int 697 nouveau_crtc_page_flip(struct drm_crtc *crtc, struct drm_framebuffer *fb, 698 struct drm_pending_vblank_event *event, u32 flags) 699 { 700 const int swap_interval = (flags & DRM_MODE_PAGE_FLIP_ASYNC) ? 0 : 1; 701 struct drm_device *dev = crtc->dev; 702 struct nouveau_drm *drm = nouveau_drm(dev); 703 struct nouveau_bo *old_bo = nouveau_framebuffer(crtc->primary->fb)->nvbo; 704 struct nouveau_bo *new_bo = nouveau_framebuffer(fb)->nvbo; 705 struct nouveau_page_flip_state *s; 706 struct nouveau_channel *chan; 707 struct nouveau_cli *cli; 708 struct nouveau_fence *fence; 709 int ret; 710 711 chan = drm->channel; 712 if (!chan) 713 return -ENODEV; 714 cli = (void *)nvif_client(&chan->device->base); 715 716 s = kzalloc(sizeof(*s), GFP_KERNEL); 717 if (!s) 718 return -ENOMEM; 719 720 if (new_bo != old_bo) { 721 ret = nouveau_bo_pin(new_bo, TTM_PL_FLAG_VRAM, true); 722 if (ret) 723 goto fail_free; 724 } 725 726 mutex_lock(&cli->mutex); 727 ret = ttm_bo_reserve(&new_bo->bo, true, false, false, NULL); 728 if (ret) 729 goto fail_unpin; 730 731 /* synchronise rendering channel with the kernel's channel */ 732 ret = nouveau_fence_sync(new_bo, chan, false, true); 733 if (ret) { 734 ttm_bo_unreserve(&new_bo->bo); 735 goto fail_unpin; 736 } 737 738 if (new_bo != old_bo) { 739 ttm_bo_unreserve(&new_bo->bo); 740 741 ret = ttm_bo_reserve(&old_bo->bo, true, false, false, NULL); 742 if (ret) 743 goto fail_unpin; 744 } 745 746 /* Initialize a page flip struct */ 747 *s = (struct nouveau_page_flip_state) 748 { { }, event, nouveau_crtc(crtc)->index, 749 fb->bits_per_pixel, fb->pitches[0], crtc->x, crtc->y, 750 new_bo->bo.offset }; 751 752 /* Keep vblanks on during flip, for the target crtc of this flip */ 753 drm_vblank_get(dev, nouveau_crtc(crtc)->index); 754 755 /* Emit a page flip */ 756 if (drm->device.info.family >= NV_DEVICE_INFO_V0_TESLA) { 757 ret = nv50_display_flip_next(crtc, fb, chan, swap_interval); 758 if (ret) 759 goto fail_unreserve; 760 } else { 761 struct nv04_display *dispnv04 = nv04_display(dev); 762 int head = nouveau_crtc(crtc)->index; 763 764 if (swap_interval) { 765 ret = RING_SPACE(chan, 8); 766 if (ret) 767 goto fail_unreserve; 768 769 BEGIN_NV04(chan, NvSubImageBlit, 0x012c, 1); 770 OUT_RING (chan, 0); 771 BEGIN_NV04(chan, NvSubImageBlit, 0x0134, 1); 772 OUT_RING (chan, head); 773 BEGIN_NV04(chan, NvSubImageBlit, 0x0100, 1); 774 OUT_RING (chan, 0); 775 BEGIN_NV04(chan, NvSubImageBlit, 0x0130, 1); 776 OUT_RING (chan, 0); 777 } 778 779 nouveau_bo_ref(new_bo, &dispnv04->image[head]); 780 } 781 782 ret = nouveau_page_flip_emit(chan, old_bo, new_bo, s, &fence); 783 if (ret) 784 goto fail_unreserve; 785 mutex_unlock(&cli->mutex); 786 787 /* Update the crtc struct and cleanup */ 788 crtc->primary->fb = fb; 789 790 nouveau_bo_fence(old_bo, fence, false); 791 ttm_bo_unreserve(&old_bo->bo); 792 if (old_bo != new_bo) 793 nouveau_bo_unpin(old_bo); 794 nouveau_fence_unref(&fence); 795 return 0; 796 797 fail_unreserve: 798 drm_vblank_put(dev, nouveau_crtc(crtc)->index); 799 ttm_bo_unreserve(&old_bo->bo); 800 fail_unpin: 801 mutex_unlock(&cli->mutex); 802 if (old_bo != new_bo) 803 nouveau_bo_unpin(new_bo); 804 fail_free: 805 kfree(s); 806 return ret; 807 } 808 809 int 810 nouveau_finish_page_flip(struct nouveau_channel *chan, 811 struct nouveau_page_flip_state *ps) 812 { 813 struct nouveau_fence_chan *fctx = chan->fence; 814 struct nouveau_drm *drm = chan->drm; 815 struct drm_device *dev = drm->dev; 816 struct nouveau_page_flip_state *s; 817 unsigned long flags; 818 int crtcid = -1; 819 820 spin_lock_irqsave(&dev->event_lock, flags); 821 822 if (list_empty(&fctx->flip)) { 823 NV_ERROR(drm, "unexpected pageflip\n"); 824 spin_unlock_irqrestore(&dev->event_lock, flags); 825 return -EINVAL; 826 } 827 828 s = list_first_entry(&fctx->flip, struct nouveau_page_flip_state, head); 829 if (s->event) { 830 /* Vblank timestamps/counts are only correct on >= NV-50 */ 831 if (drm->device.info.family >= NV_DEVICE_INFO_V0_TESLA) 832 crtcid = s->crtc; 833 834 drm_send_vblank_event(dev, crtcid, s->event); 835 } 836 837 /* Give up ownership of vblank for page-flipped crtc */ 838 drm_vblank_put(dev, s->crtc); 839 840 list_del(&s->head); 841 if (ps) 842 *ps = *s; 843 kfree(s); 844 845 spin_unlock_irqrestore(&dev->event_lock, flags); 846 return 0; 847 } 848 849 int 850 nouveau_flip_complete(void *data) 851 { 852 struct nouveau_channel *chan = data; 853 struct nouveau_drm *drm = chan->drm; 854 struct nouveau_page_flip_state state; 855 856 if (!nouveau_finish_page_flip(chan, &state)) { 857 if (drm->device.info.family < NV_DEVICE_INFO_V0_TESLA) { 858 nv_set_crtc_base(drm->dev, state.crtc, state.offset + 859 state.y * state.pitch + 860 state.x * state.bpp / 8); 861 } 862 } 863 864 return 0; 865 } 866 867 int 868 nouveau_display_dumb_create(struct drm_file *file_priv, struct drm_device *dev, 869 struct drm_mode_create_dumb *args) 870 { 871 struct nouveau_bo *bo; 872 uint32_t domain; 873 int ret; 874 875 args->pitch = roundup(args->width * (args->bpp / 8), 256); 876 args->size = args->pitch * args->height; 877 args->size = roundup(args->size, PAGE_SIZE); 878 879 /* Use VRAM if there is any ; otherwise fallback to system memory */ 880 if (nouveau_drm(dev)->device.info.ram_size != 0) 881 domain = NOUVEAU_GEM_DOMAIN_VRAM; 882 else 883 domain = NOUVEAU_GEM_DOMAIN_GART; 884 885 ret = nouveau_gem_new(dev, args->size, 0, domain, 0, 0, &bo); 886 if (ret) 887 return ret; 888 889 ret = drm_gem_handle_create(file_priv, &bo->gem, &args->handle); 890 drm_gem_object_unreference_unlocked(&bo->gem); 891 return ret; 892 } 893 894 int 895 nouveau_display_dumb_map_offset(struct drm_file *file_priv, 896 struct drm_device *dev, 897 uint32_t handle, uint64_t *poffset) 898 { 899 struct drm_gem_object *gem; 900 901 gem = drm_gem_object_lookup(dev, file_priv, handle); 902 if (gem) { 903 struct nouveau_bo *bo = nouveau_gem_object(gem); 904 *poffset = drm_vma_node_offset_addr(&bo->bo.vma_node); 905 drm_gem_object_unreference_unlocked(gem); 906 return 0; 907 } 908 909 return -ENOENT; 910 } 911