1 /* 2 * Copyright (C) 2008 Maarten Maathuis. 3 * All Rights Reserved. 4 * 5 * Permission is hereby granted, free of charge, to any person obtaining 6 * a copy of this software and associated documentation files (the 7 * "Software"), to deal in the Software without restriction, including 8 * without limitation the rights to use, copy, modify, merge, publish, 9 * distribute, sublicense, and/or sell copies of the Software, and to 10 * permit persons to whom the Software is furnished to do so, subject to 11 * the following conditions: 12 * 13 * The above copyright notice and this permission notice (including the 14 * next paragraph) shall be included in all copies or substantial 15 * portions of the Software. 16 * 17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 18 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 19 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 20 * IN NO EVENT SHALL THE COPYRIGHT OWNER(S) AND/OR ITS SUPPLIERS BE 21 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 22 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION 23 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 24 * 25 */ 26 27 #include <drm/drmP.h> 28 #include <drm/drm_crtc_helper.h> 29 30 #include <nvif/class.h> 31 32 #include "nouveau_fbcon.h" 33 #include "dispnv04/hw.h" 34 #include "nouveau_crtc.h" 35 #include "nouveau_dma.h" 36 #include "nouveau_gem.h" 37 #include "nouveau_connector.h" 38 #include "nv50_display.h" 39 40 #include "nouveau_fence.h" 41 42 #include <nvif/event.h> 43 44 static int 45 nouveau_display_vblank_handler(struct nvif_notify *notify) 46 { 47 struct nouveau_crtc *nv_crtc = 48 container_of(notify, typeof(*nv_crtc), vblank); 49 drm_handle_vblank(nv_crtc->base.dev, nv_crtc->index); 50 return NVIF_NOTIFY_KEEP; 51 } 52 53 int 54 nouveau_display_vblank_enable(struct drm_device *dev, int head) 55 { 56 struct drm_crtc *crtc; 57 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 58 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 59 if (nv_crtc->index == head) { 60 nvif_notify_get(&nv_crtc->vblank); 61 return 0; 62 } 63 } 64 return -EINVAL; 65 } 66 67 void 68 nouveau_display_vblank_disable(struct drm_device *dev, int head) 69 { 70 struct drm_crtc *crtc; 71 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 72 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 73 if (nv_crtc->index == head) { 74 nvif_notify_put(&nv_crtc->vblank); 75 return; 76 } 77 } 78 } 79 80 static inline int 81 calc(int blanks, int blanke, int total, int line) 82 { 83 if (blanke >= blanks) { 84 if (line >= blanks) 85 line -= total; 86 } else { 87 if (line >= blanks) 88 line -= total; 89 line -= blanke + 1; 90 } 91 return line; 92 } 93 94 int 95 nouveau_display_scanoutpos_head(struct drm_crtc *crtc, int *vpos, int *hpos, 96 ktime_t *stime, ktime_t *etime) 97 { 98 struct { 99 struct nv04_disp_mthd_v0 base; 100 struct nv04_disp_scanoutpos_v0 scan; 101 } args = { 102 .base.method = NV04_DISP_SCANOUTPOS, 103 .base.head = nouveau_crtc(crtc)->index, 104 }; 105 struct nouveau_display *disp = nouveau_display(crtc->dev); 106 int ret, retry = 1; 107 108 do { 109 ret = nvif_mthd(&disp->disp, 0, &args, sizeof(args)); 110 if (ret != 0) 111 return 0; 112 113 if (args.scan.vline) { 114 ret |= DRM_SCANOUTPOS_ACCURATE; 115 ret |= DRM_SCANOUTPOS_VALID; 116 break; 117 } 118 119 if (retry) ndelay(crtc->linedur_ns); 120 } while (retry--); 121 122 *hpos = args.scan.hline; 123 *vpos = calc(args.scan.vblanks, args.scan.vblanke, 124 args.scan.vtotal, args.scan.vline); 125 if (stime) *stime = ns_to_ktime(args.scan.time[0]); 126 if (etime) *etime = ns_to_ktime(args.scan.time[1]); 127 128 if (*vpos < 0) 129 ret |= DRM_SCANOUTPOS_IN_VBLANK; 130 return ret; 131 } 132 133 int 134 nouveau_display_scanoutpos(struct drm_device *dev, int head, unsigned int flags, 135 int *vpos, int *hpos, ktime_t *stime, ktime_t *etime) 136 { 137 struct drm_crtc *crtc; 138 139 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 140 if (nouveau_crtc(crtc)->index == head) { 141 return nouveau_display_scanoutpos_head(crtc, vpos, hpos, 142 stime, etime); 143 } 144 } 145 146 return 0; 147 } 148 149 int 150 nouveau_display_vblstamp(struct drm_device *dev, int head, int *max_error, 151 struct timeval *time, unsigned flags) 152 { 153 struct drm_crtc *crtc; 154 155 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 156 if (nouveau_crtc(crtc)->index == head) { 157 return drm_calc_vbltimestamp_from_scanoutpos(dev, 158 head, max_error, time, flags, crtc, 159 &crtc->hwmode); 160 } 161 } 162 163 return -EINVAL; 164 } 165 166 static void 167 nouveau_display_vblank_fini(struct drm_device *dev) 168 { 169 struct drm_crtc *crtc; 170 171 drm_vblank_cleanup(dev); 172 173 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 174 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 175 nvif_notify_fini(&nv_crtc->vblank); 176 } 177 } 178 179 static int 180 nouveau_display_vblank_init(struct drm_device *dev) 181 { 182 struct nouveau_display *disp = nouveau_display(dev); 183 struct drm_crtc *crtc; 184 int ret; 185 186 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 187 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 188 ret = nvif_notify_init(&disp->disp, 189 nouveau_display_vblank_handler, false, 190 NV04_DISP_NTFY_VBLANK, 191 &(struct nvif_notify_head_req_v0) { 192 .head = nv_crtc->index, 193 }, 194 sizeof(struct nvif_notify_head_req_v0), 195 sizeof(struct nvif_notify_head_rep_v0), 196 &nv_crtc->vblank); 197 if (ret) { 198 nouveau_display_vblank_fini(dev); 199 return ret; 200 } 201 } 202 203 ret = drm_vblank_init(dev, dev->mode_config.num_crtc); 204 if (ret) { 205 nouveau_display_vblank_fini(dev); 206 return ret; 207 } 208 209 return 0; 210 } 211 212 static void 213 nouveau_user_framebuffer_destroy(struct drm_framebuffer *drm_fb) 214 { 215 struct nouveau_framebuffer *fb = nouveau_framebuffer(drm_fb); 216 struct nouveau_display *disp = nouveau_display(drm_fb->dev); 217 218 if (disp->fb_dtor) 219 disp->fb_dtor(drm_fb); 220 221 if (fb->nvbo) 222 drm_gem_object_unreference_unlocked(&fb->nvbo->gem); 223 224 drm_framebuffer_cleanup(drm_fb); 225 kfree(fb); 226 } 227 228 static int 229 nouveau_user_framebuffer_create_handle(struct drm_framebuffer *drm_fb, 230 struct drm_file *file_priv, 231 unsigned int *handle) 232 { 233 struct nouveau_framebuffer *fb = nouveau_framebuffer(drm_fb); 234 235 return drm_gem_handle_create(file_priv, &fb->nvbo->gem, handle); 236 } 237 238 static const struct drm_framebuffer_funcs nouveau_framebuffer_funcs = { 239 .destroy = nouveau_user_framebuffer_destroy, 240 .create_handle = nouveau_user_framebuffer_create_handle, 241 }; 242 243 int 244 nouveau_framebuffer_init(struct drm_device *dev, 245 struct nouveau_framebuffer *nv_fb, 246 struct drm_mode_fb_cmd2 *mode_cmd, 247 struct nouveau_bo *nvbo) 248 { 249 struct nouveau_display *disp = nouveau_display(dev); 250 struct drm_framebuffer *fb = &nv_fb->base; 251 int ret; 252 253 drm_helper_mode_fill_fb_struct(fb, mode_cmd); 254 nv_fb->nvbo = nvbo; 255 256 ret = drm_framebuffer_init(dev, fb, &nouveau_framebuffer_funcs); 257 if (ret) 258 return ret; 259 260 if (disp->fb_ctor) { 261 ret = disp->fb_ctor(fb); 262 if (ret) 263 disp->fb_dtor(fb); 264 } 265 266 return ret; 267 } 268 269 static struct drm_framebuffer * 270 nouveau_user_framebuffer_create(struct drm_device *dev, 271 struct drm_file *file_priv, 272 struct drm_mode_fb_cmd2 *mode_cmd) 273 { 274 struct nouveau_framebuffer *nouveau_fb; 275 struct drm_gem_object *gem; 276 int ret = -ENOMEM; 277 278 gem = drm_gem_object_lookup(dev, file_priv, mode_cmd->handles[0]); 279 if (!gem) 280 return ERR_PTR(-ENOENT); 281 282 nouveau_fb = kzalloc(sizeof(struct nouveau_framebuffer), GFP_KERNEL); 283 if (!nouveau_fb) 284 goto err_unref; 285 286 ret = nouveau_framebuffer_init(dev, nouveau_fb, mode_cmd, nouveau_gem_object(gem)); 287 if (ret) 288 goto err; 289 290 return &nouveau_fb->base; 291 292 err: 293 kfree(nouveau_fb); 294 err_unref: 295 drm_gem_object_unreference(gem); 296 return ERR_PTR(ret); 297 } 298 299 static const struct drm_mode_config_funcs nouveau_mode_config_funcs = { 300 .fb_create = nouveau_user_framebuffer_create, 301 .output_poll_changed = nouveau_fbcon_output_poll_changed, 302 }; 303 304 305 struct nouveau_drm_prop_enum_list { 306 u8 gen_mask; 307 int type; 308 char *name; 309 }; 310 311 static struct nouveau_drm_prop_enum_list underscan[] = { 312 { 6, UNDERSCAN_AUTO, "auto" }, 313 { 6, UNDERSCAN_OFF, "off" }, 314 { 6, UNDERSCAN_ON, "on" }, 315 {} 316 }; 317 318 static struct nouveau_drm_prop_enum_list dither_mode[] = { 319 { 7, DITHERING_MODE_AUTO, "auto" }, 320 { 7, DITHERING_MODE_OFF, "off" }, 321 { 1, DITHERING_MODE_ON, "on" }, 322 { 6, DITHERING_MODE_STATIC2X2, "static 2x2" }, 323 { 6, DITHERING_MODE_DYNAMIC2X2, "dynamic 2x2" }, 324 { 4, DITHERING_MODE_TEMPORAL, "temporal" }, 325 {} 326 }; 327 328 static struct nouveau_drm_prop_enum_list dither_depth[] = { 329 { 6, DITHERING_DEPTH_AUTO, "auto" }, 330 { 6, DITHERING_DEPTH_6BPC, "6 bpc" }, 331 { 6, DITHERING_DEPTH_8BPC, "8 bpc" }, 332 {} 333 }; 334 335 #define PROP_ENUM(p,gen,n,list) do { \ 336 struct nouveau_drm_prop_enum_list *l = (list); \ 337 int c = 0; \ 338 while (l->gen_mask) { \ 339 if (l->gen_mask & (1 << (gen))) \ 340 c++; \ 341 l++; \ 342 } \ 343 if (c) { \ 344 p = drm_property_create(dev, DRM_MODE_PROP_ENUM, n, c); \ 345 l = (list); \ 346 c = 0; \ 347 while (p && l->gen_mask) { \ 348 if (l->gen_mask & (1 << (gen))) { \ 349 drm_property_add_enum(p, c, l->type, l->name); \ 350 c++; \ 351 } \ 352 l++; \ 353 } \ 354 } \ 355 } while(0) 356 357 int 358 nouveau_display_init(struct drm_device *dev) 359 { 360 struct nouveau_display *disp = nouveau_display(dev); 361 struct nouveau_drm *drm = nouveau_drm(dev); 362 struct drm_connector *connector; 363 int ret; 364 365 ret = disp->init(dev); 366 if (ret) 367 return ret; 368 369 /* enable polling for external displays */ 370 drm_kms_helper_poll_enable(dev); 371 372 /* enable hotplug interrupts */ 373 list_for_each_entry(connector, &dev->mode_config.connector_list, head) { 374 struct nouveau_connector *conn = nouveau_connector(connector); 375 nvif_notify_get(&conn->hpd); 376 } 377 378 /* enable flip completion events */ 379 nvif_notify_get(&drm->flip); 380 return ret; 381 } 382 383 void 384 nouveau_display_fini(struct drm_device *dev) 385 { 386 struct nouveau_display *disp = nouveau_display(dev); 387 struct nouveau_drm *drm = nouveau_drm(dev); 388 struct drm_connector *connector; 389 int head; 390 391 /* Make sure that drm and hw vblank irqs get properly disabled. */ 392 for (head = 0; head < dev->mode_config.num_crtc; head++) 393 drm_vblank_off(dev, head); 394 395 /* disable flip completion events */ 396 nvif_notify_put(&drm->flip); 397 398 /* disable hotplug interrupts */ 399 list_for_each_entry(connector, &dev->mode_config.connector_list, head) { 400 struct nouveau_connector *conn = nouveau_connector(connector); 401 nvif_notify_put(&conn->hpd); 402 } 403 404 drm_kms_helper_poll_disable(dev); 405 disp->fini(dev); 406 } 407 408 static void 409 nouveau_display_create_properties(struct drm_device *dev) 410 { 411 struct nouveau_display *disp = nouveau_display(dev); 412 int gen; 413 414 if (disp->disp.oclass < NV50_DISP) 415 gen = 0; 416 else 417 if (disp->disp.oclass < GF110_DISP) 418 gen = 1; 419 else 420 gen = 2; 421 422 PROP_ENUM(disp->dithering_mode, gen, "dithering mode", dither_mode); 423 PROP_ENUM(disp->dithering_depth, gen, "dithering depth", dither_depth); 424 PROP_ENUM(disp->underscan_property, gen, "underscan", underscan); 425 426 disp->underscan_hborder_property = 427 drm_property_create_range(dev, 0, "underscan hborder", 0, 128); 428 429 disp->underscan_vborder_property = 430 drm_property_create_range(dev, 0, "underscan vborder", 0, 128); 431 432 if (gen < 1) 433 return; 434 435 /* -90..+90 */ 436 disp->vibrant_hue_property = 437 drm_property_create_range(dev, 0, "vibrant hue", 0, 180); 438 439 /* -100..+100 */ 440 disp->color_vibrance_property = 441 drm_property_create_range(dev, 0, "color vibrance", 0, 200); 442 } 443 444 int 445 nouveau_display_create(struct drm_device *dev) 446 { 447 struct nouveau_drm *drm = nouveau_drm(dev); 448 struct nvkm_device *device = nvxx_device(&drm->device); 449 struct nouveau_display *disp; 450 int ret; 451 452 disp = drm->display = kzalloc(sizeof(*disp), GFP_KERNEL); 453 if (!disp) 454 return -ENOMEM; 455 456 drm_mode_config_init(dev); 457 drm_mode_create_scaling_mode_property(dev); 458 drm_mode_create_dvi_i_properties(dev); 459 460 dev->mode_config.funcs = &nouveau_mode_config_funcs; 461 dev->mode_config.fb_base = device->func->resource_addr(device, 1); 462 463 dev->mode_config.min_width = 0; 464 dev->mode_config.min_height = 0; 465 if (drm->device.info.family < NV_DEVICE_INFO_V0_CELSIUS) { 466 dev->mode_config.max_width = 2048; 467 dev->mode_config.max_height = 2048; 468 } else 469 if (drm->device.info.family < NV_DEVICE_INFO_V0_TESLA) { 470 dev->mode_config.max_width = 4096; 471 dev->mode_config.max_height = 4096; 472 } else 473 if (drm->device.info.family < NV_DEVICE_INFO_V0_FERMI) { 474 dev->mode_config.max_width = 8192; 475 dev->mode_config.max_height = 8192; 476 } else { 477 dev->mode_config.max_width = 16384; 478 dev->mode_config.max_height = 16384; 479 } 480 481 dev->mode_config.preferred_depth = 24; 482 dev->mode_config.prefer_shadow = 1; 483 484 if (drm->device.info.chipset < 0x11) 485 dev->mode_config.async_page_flip = false; 486 else 487 dev->mode_config.async_page_flip = true; 488 489 drm_kms_helper_poll_init(dev); 490 drm_kms_helper_poll_disable(dev); 491 492 if (nouveau_modeset != 2 && drm->vbios.dcb.entries) { 493 static const u16 oclass[] = { 494 GM204_DISP, 495 GM107_DISP, 496 GK110_DISP, 497 GK104_DISP, 498 GF110_DISP, 499 GT214_DISP, 500 GT206_DISP, 501 GT200_DISP, 502 G82_DISP, 503 NV50_DISP, 504 NV04_DISP, 505 }; 506 int i; 507 508 for (i = 0, ret = -ENODEV; ret && i < ARRAY_SIZE(oclass); i++) { 509 ret = nvif_object_init(&drm->device.object, 510 NVDRM_DISPLAY, oclass[i], 511 NULL, 0, &disp->disp); 512 } 513 514 if (ret == 0) { 515 nouveau_display_create_properties(dev); 516 if (disp->disp.oclass < NV50_DISP) 517 ret = nv04_display_create(dev); 518 else 519 ret = nv50_display_create(dev); 520 } 521 } else { 522 ret = 0; 523 } 524 525 if (ret) 526 goto disp_create_err; 527 528 if (dev->mode_config.num_crtc) { 529 ret = nouveau_display_vblank_init(dev); 530 if (ret) 531 goto vblank_err; 532 } 533 534 nouveau_backlight_init(dev); 535 return 0; 536 537 vblank_err: 538 disp->dtor(dev); 539 disp_create_err: 540 drm_kms_helper_poll_fini(dev); 541 drm_mode_config_cleanup(dev); 542 return ret; 543 } 544 545 void 546 nouveau_display_destroy(struct drm_device *dev) 547 { 548 struct nouveau_display *disp = nouveau_display(dev); 549 550 nouveau_backlight_exit(dev); 551 nouveau_display_vblank_fini(dev); 552 553 drm_kms_helper_poll_fini(dev); 554 drm_mode_config_cleanup(dev); 555 556 if (disp->dtor) 557 disp->dtor(dev); 558 559 nvif_object_fini(&disp->disp); 560 561 nouveau_drm(dev)->display = NULL; 562 kfree(disp); 563 } 564 565 int 566 nouveau_display_suspend(struct drm_device *dev, bool runtime) 567 { 568 struct drm_crtc *crtc; 569 570 nouveau_display_fini(dev); 571 572 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 573 struct nouveau_framebuffer *nouveau_fb; 574 575 nouveau_fb = nouveau_framebuffer(crtc->primary->fb); 576 if (!nouveau_fb || !nouveau_fb->nvbo) 577 continue; 578 579 nouveau_bo_unpin(nouveau_fb->nvbo); 580 } 581 582 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 583 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 584 if (nv_crtc->cursor.nvbo) { 585 if (nv_crtc->cursor.set_offset) 586 nouveau_bo_unmap(nv_crtc->cursor.nvbo); 587 nouveau_bo_unpin(nv_crtc->cursor.nvbo); 588 } 589 } 590 591 return 0; 592 } 593 594 void 595 nouveau_display_resume(struct drm_device *dev, bool runtime) 596 { 597 struct nouveau_drm *drm = nouveau_drm(dev); 598 struct drm_crtc *crtc; 599 int ret, head; 600 601 /* re-pin fb/cursors */ 602 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 603 struct nouveau_framebuffer *nouveau_fb; 604 605 nouveau_fb = nouveau_framebuffer(crtc->primary->fb); 606 if (!nouveau_fb || !nouveau_fb->nvbo) 607 continue; 608 609 ret = nouveau_bo_pin(nouveau_fb->nvbo, TTM_PL_FLAG_VRAM, true); 610 if (ret) 611 NV_ERROR(drm, "Could not pin framebuffer\n"); 612 } 613 614 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 615 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 616 if (!nv_crtc->cursor.nvbo) 617 continue; 618 619 ret = nouveau_bo_pin(nv_crtc->cursor.nvbo, TTM_PL_FLAG_VRAM, true); 620 if (!ret && nv_crtc->cursor.set_offset) 621 ret = nouveau_bo_map(nv_crtc->cursor.nvbo); 622 if (ret) 623 NV_ERROR(drm, "Could not pin/map cursor.\n"); 624 } 625 626 nouveau_display_init(dev); 627 628 /* Force CLUT to get re-loaded during modeset */ 629 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 630 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 631 632 nv_crtc->lut.depth = 0; 633 } 634 635 /* Make sure that drm and hw vblank irqs get resumed if needed. */ 636 for (head = 0; head < dev->mode_config.num_crtc; head++) 637 drm_vblank_on(dev, head); 638 639 /* This should ensure we don't hit a locking problem when someone 640 * wakes us up via a connector. We should never go into suspend 641 * while the display is on anyways. 642 */ 643 if (runtime) 644 return; 645 646 drm_helper_resume_force_mode(dev); 647 648 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 649 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 650 651 if (!nv_crtc->cursor.nvbo) 652 continue; 653 654 if (nv_crtc->cursor.set_offset) 655 nv_crtc->cursor.set_offset(nv_crtc, nv_crtc->cursor.nvbo->bo.offset); 656 nv_crtc->cursor.set_pos(nv_crtc, nv_crtc->cursor_saved_x, 657 nv_crtc->cursor_saved_y); 658 } 659 } 660 661 static int 662 nouveau_page_flip_emit(struct nouveau_channel *chan, 663 struct nouveau_bo *old_bo, 664 struct nouveau_bo *new_bo, 665 struct nouveau_page_flip_state *s, 666 struct nouveau_fence **pfence) 667 { 668 struct nouveau_fence_chan *fctx = chan->fence; 669 struct nouveau_drm *drm = chan->drm; 670 struct drm_device *dev = drm->dev; 671 unsigned long flags; 672 int ret; 673 674 /* Queue it to the pending list */ 675 spin_lock_irqsave(&dev->event_lock, flags); 676 list_add_tail(&s->head, &fctx->flip); 677 spin_unlock_irqrestore(&dev->event_lock, flags); 678 679 /* Synchronize with the old framebuffer */ 680 ret = nouveau_fence_sync(old_bo, chan, false, false); 681 if (ret) 682 goto fail; 683 684 /* Emit the pageflip */ 685 ret = RING_SPACE(chan, 2); 686 if (ret) 687 goto fail; 688 689 if (drm->device.info.family < NV_DEVICE_INFO_V0_FERMI) 690 BEGIN_NV04(chan, NvSubSw, NV_SW_PAGE_FLIP, 1); 691 else 692 BEGIN_NVC0(chan, FermiSw, NV_SW_PAGE_FLIP, 1); 693 OUT_RING (chan, 0x00000000); 694 FIRE_RING (chan); 695 696 ret = nouveau_fence_new(chan, false, pfence); 697 if (ret) 698 goto fail; 699 700 return 0; 701 fail: 702 spin_lock_irqsave(&dev->event_lock, flags); 703 list_del(&s->head); 704 spin_unlock_irqrestore(&dev->event_lock, flags); 705 return ret; 706 } 707 708 int 709 nouveau_crtc_page_flip(struct drm_crtc *crtc, struct drm_framebuffer *fb, 710 struct drm_pending_vblank_event *event, u32 flags) 711 { 712 const int swap_interval = (flags & DRM_MODE_PAGE_FLIP_ASYNC) ? 0 : 1; 713 struct drm_device *dev = crtc->dev; 714 struct nouveau_drm *drm = nouveau_drm(dev); 715 struct nouveau_bo *old_bo = nouveau_framebuffer(crtc->primary->fb)->nvbo; 716 struct nouveau_bo *new_bo = nouveau_framebuffer(fb)->nvbo; 717 struct nouveau_page_flip_state *s; 718 struct nouveau_channel *chan; 719 struct nouveau_cli *cli; 720 struct nouveau_fence *fence; 721 int ret; 722 723 chan = drm->channel; 724 if (!chan) 725 return -ENODEV; 726 cli = (void *)chan->user.client; 727 728 s = kzalloc(sizeof(*s), GFP_KERNEL); 729 if (!s) 730 return -ENOMEM; 731 732 if (new_bo != old_bo) { 733 ret = nouveau_bo_pin(new_bo, TTM_PL_FLAG_VRAM, true); 734 if (ret) 735 goto fail_free; 736 } 737 738 mutex_lock(&cli->mutex); 739 ret = ttm_bo_reserve(&new_bo->bo, true, false, false, NULL); 740 if (ret) 741 goto fail_unpin; 742 743 /* synchronise rendering channel with the kernel's channel */ 744 ret = nouveau_fence_sync(new_bo, chan, false, true); 745 if (ret) { 746 ttm_bo_unreserve(&new_bo->bo); 747 goto fail_unpin; 748 } 749 750 if (new_bo != old_bo) { 751 ttm_bo_unreserve(&new_bo->bo); 752 753 ret = ttm_bo_reserve(&old_bo->bo, true, false, false, NULL); 754 if (ret) 755 goto fail_unpin; 756 } 757 758 /* Initialize a page flip struct */ 759 *s = (struct nouveau_page_flip_state) 760 { { }, event, nouveau_crtc(crtc)->index, 761 fb->bits_per_pixel, fb->pitches[0], crtc->x, crtc->y, 762 new_bo->bo.offset }; 763 764 /* Keep vblanks on during flip, for the target crtc of this flip */ 765 drm_vblank_get(dev, nouveau_crtc(crtc)->index); 766 767 /* Emit a page flip */ 768 if (drm->device.info.family >= NV_DEVICE_INFO_V0_TESLA) { 769 ret = nv50_display_flip_next(crtc, fb, chan, swap_interval); 770 if (ret) 771 goto fail_unreserve; 772 } else { 773 struct nv04_display *dispnv04 = nv04_display(dev); 774 int head = nouveau_crtc(crtc)->index; 775 776 if (swap_interval) { 777 ret = RING_SPACE(chan, 8); 778 if (ret) 779 goto fail_unreserve; 780 781 BEGIN_NV04(chan, NvSubImageBlit, 0x012c, 1); 782 OUT_RING (chan, 0); 783 BEGIN_NV04(chan, NvSubImageBlit, 0x0134, 1); 784 OUT_RING (chan, head); 785 BEGIN_NV04(chan, NvSubImageBlit, 0x0100, 1); 786 OUT_RING (chan, 0); 787 BEGIN_NV04(chan, NvSubImageBlit, 0x0130, 1); 788 OUT_RING (chan, 0); 789 } 790 791 nouveau_bo_ref(new_bo, &dispnv04->image[head]); 792 } 793 794 ret = nouveau_page_flip_emit(chan, old_bo, new_bo, s, &fence); 795 if (ret) 796 goto fail_unreserve; 797 mutex_unlock(&cli->mutex); 798 799 /* Update the crtc struct and cleanup */ 800 crtc->primary->fb = fb; 801 802 nouveau_bo_fence(old_bo, fence, false); 803 ttm_bo_unreserve(&old_bo->bo); 804 if (old_bo != new_bo) 805 nouveau_bo_unpin(old_bo); 806 nouveau_fence_unref(&fence); 807 return 0; 808 809 fail_unreserve: 810 drm_vblank_put(dev, nouveau_crtc(crtc)->index); 811 ttm_bo_unreserve(&old_bo->bo); 812 fail_unpin: 813 mutex_unlock(&cli->mutex); 814 if (old_bo != new_bo) 815 nouveau_bo_unpin(new_bo); 816 fail_free: 817 kfree(s); 818 return ret; 819 } 820 821 int 822 nouveau_finish_page_flip(struct nouveau_channel *chan, 823 struct nouveau_page_flip_state *ps) 824 { 825 struct nouveau_fence_chan *fctx = chan->fence; 826 struct nouveau_drm *drm = chan->drm; 827 struct drm_device *dev = drm->dev; 828 struct nouveau_page_flip_state *s; 829 unsigned long flags; 830 int crtcid = -1; 831 832 spin_lock_irqsave(&dev->event_lock, flags); 833 834 if (list_empty(&fctx->flip)) { 835 NV_ERROR(drm, "unexpected pageflip\n"); 836 spin_unlock_irqrestore(&dev->event_lock, flags); 837 return -EINVAL; 838 } 839 840 s = list_first_entry(&fctx->flip, struct nouveau_page_flip_state, head); 841 if (s->event) { 842 /* Vblank timestamps/counts are only correct on >= NV-50 */ 843 if (drm->device.info.family >= NV_DEVICE_INFO_V0_TESLA) 844 crtcid = s->crtc; 845 846 drm_send_vblank_event(dev, crtcid, s->event); 847 } 848 849 /* Give up ownership of vblank for page-flipped crtc */ 850 drm_vblank_put(dev, s->crtc); 851 852 list_del(&s->head); 853 if (ps) 854 *ps = *s; 855 kfree(s); 856 857 spin_unlock_irqrestore(&dev->event_lock, flags); 858 return 0; 859 } 860 861 int 862 nouveau_flip_complete(struct nvif_notify *notify) 863 { 864 struct nouveau_drm *drm = container_of(notify, typeof(*drm), flip); 865 struct nouveau_channel *chan = drm->channel; 866 struct nouveau_page_flip_state state; 867 868 if (!nouveau_finish_page_flip(chan, &state)) { 869 if (drm->device.info.family < NV_DEVICE_INFO_V0_TESLA) { 870 nv_set_crtc_base(drm->dev, state.crtc, state.offset + 871 state.y * state.pitch + 872 state.x * state.bpp / 8); 873 } 874 } 875 876 return NVIF_NOTIFY_KEEP; 877 } 878 879 int 880 nouveau_display_dumb_create(struct drm_file *file_priv, struct drm_device *dev, 881 struct drm_mode_create_dumb *args) 882 { 883 struct nouveau_bo *bo; 884 uint32_t domain; 885 int ret; 886 887 args->pitch = roundup(args->width * (args->bpp / 8), 256); 888 args->size = args->pitch * args->height; 889 args->size = roundup(args->size, PAGE_SIZE); 890 891 /* Use VRAM if there is any ; otherwise fallback to system memory */ 892 if (nouveau_drm(dev)->device.info.ram_size != 0) 893 domain = NOUVEAU_GEM_DOMAIN_VRAM; 894 else 895 domain = NOUVEAU_GEM_DOMAIN_GART; 896 897 ret = nouveau_gem_new(dev, args->size, 0, domain, 0, 0, &bo); 898 if (ret) 899 return ret; 900 901 ret = drm_gem_handle_create(file_priv, &bo->gem, &args->handle); 902 drm_gem_object_unreference_unlocked(&bo->gem); 903 return ret; 904 } 905 906 int 907 nouveau_display_dumb_map_offset(struct drm_file *file_priv, 908 struct drm_device *dev, 909 uint32_t handle, uint64_t *poffset) 910 { 911 struct drm_gem_object *gem; 912 913 gem = drm_gem_object_lookup(dev, file_priv, handle); 914 if (gem) { 915 struct nouveau_bo *bo = nouveau_gem_object(gem); 916 *poffset = drm_vma_node_offset_addr(&bo->bo.vma_node); 917 drm_gem_object_unreference_unlocked(gem); 918 return 0; 919 } 920 921 return -ENOENT; 922 } 923