1 /* 2 * Copyright (C) 2008 Maarten Maathuis. 3 * All Rights Reserved. 4 * 5 * Permission is hereby granted, free of charge, to any person obtaining 6 * a copy of this software and associated documentation files (the 7 * "Software"), to deal in the Software without restriction, including 8 * without limitation the rights to use, copy, modify, merge, publish, 9 * distribute, sublicense, and/or sell copies of the Software, and to 10 * permit persons to whom the Software is furnished to do so, subject to 11 * the following conditions: 12 * 13 * The above copyright notice and this permission notice (including the 14 * next paragraph) shall be included in all copies or substantial 15 * portions of the Software. 16 * 17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 18 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 19 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 20 * IN NO EVENT SHALL THE COPYRIGHT OWNER(S) AND/OR ITS SUPPLIERS BE 21 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 22 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION 23 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 24 * 25 */ 26 27 #include <drm/drmP.h> 28 #include <drm/drm_atomic.h> 29 #include <drm/drm_atomic_helper.h> 30 #include <drm/drm_crtc_helper.h> 31 32 #include <nvif/class.h> 33 34 #include "nouveau_fbcon.h" 35 #include "dispnv04/hw.h" 36 #include "nouveau_crtc.h" 37 #include "nouveau_dma.h" 38 #include "nouveau_gem.h" 39 #include "nouveau_connector.h" 40 #include "nv50_display.h" 41 42 #include "nouveau_fence.h" 43 44 #include <nvif/cl0046.h> 45 #include <nvif/event.h> 46 47 static int 48 nouveau_display_vblank_handler(struct nvif_notify *notify) 49 { 50 struct nouveau_crtc *nv_crtc = 51 container_of(notify, typeof(*nv_crtc), vblank); 52 drm_crtc_handle_vblank(&nv_crtc->base); 53 return NVIF_NOTIFY_KEEP; 54 } 55 56 int 57 nouveau_display_vblank_enable(struct drm_device *dev, unsigned int pipe) 58 { 59 struct drm_crtc *crtc; 60 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 61 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 62 if (nv_crtc->index == pipe) { 63 nvif_notify_get(&nv_crtc->vblank); 64 return 0; 65 } 66 } 67 return -EINVAL; 68 } 69 70 void 71 nouveau_display_vblank_disable(struct drm_device *dev, unsigned int pipe) 72 { 73 struct drm_crtc *crtc; 74 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 75 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 76 if (nv_crtc->index == pipe) { 77 nvif_notify_put(&nv_crtc->vblank); 78 return; 79 } 80 } 81 } 82 83 static inline int 84 calc(int blanks, int blanke, int total, int line) 85 { 86 if (blanke >= blanks) { 87 if (line >= blanks) 88 line -= total; 89 } else { 90 if (line >= blanks) 91 line -= total; 92 line -= blanke + 1; 93 } 94 return line; 95 } 96 97 static int 98 nouveau_display_scanoutpos_head(struct drm_crtc *crtc, int *vpos, int *hpos, 99 ktime_t *stime, ktime_t *etime) 100 { 101 struct { 102 struct nv04_disp_mthd_v0 base; 103 struct nv04_disp_scanoutpos_v0 scan; 104 } args = { 105 .base.method = NV04_DISP_SCANOUTPOS, 106 .base.head = nouveau_crtc(crtc)->index, 107 }; 108 struct nouveau_display *disp = nouveau_display(crtc->dev); 109 struct drm_vblank_crtc *vblank = &crtc->dev->vblank[drm_crtc_index(crtc)]; 110 int ret, retry = 1; 111 112 do { 113 ret = nvif_mthd(&disp->disp, 0, &args, sizeof(args)); 114 if (ret != 0) 115 return 0; 116 117 if (args.scan.vline) { 118 ret |= DRM_SCANOUTPOS_ACCURATE; 119 ret |= DRM_SCANOUTPOS_VALID; 120 break; 121 } 122 123 if (retry) ndelay(vblank->linedur_ns); 124 } while (retry--); 125 126 *hpos = args.scan.hline; 127 *vpos = calc(args.scan.vblanks, args.scan.vblanke, 128 args.scan.vtotal, args.scan.vline); 129 if (stime) *stime = ns_to_ktime(args.scan.time[0]); 130 if (etime) *etime = ns_to_ktime(args.scan.time[1]); 131 132 if (*vpos < 0) 133 ret |= DRM_SCANOUTPOS_IN_VBLANK; 134 return ret; 135 } 136 137 int 138 nouveau_display_scanoutpos(struct drm_device *dev, unsigned int pipe, 139 unsigned int flags, int *vpos, int *hpos, 140 ktime_t *stime, ktime_t *etime, 141 const struct drm_display_mode *mode) 142 { 143 struct drm_crtc *crtc; 144 145 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 146 if (nouveau_crtc(crtc)->index == pipe) { 147 return nouveau_display_scanoutpos_head(crtc, vpos, hpos, 148 stime, etime); 149 } 150 } 151 152 return 0; 153 } 154 155 int 156 nouveau_display_vblstamp(struct drm_device *dev, unsigned int pipe, 157 int *max_error, struct timeval *time, unsigned flags) 158 { 159 struct drm_crtc *crtc; 160 161 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 162 if (nouveau_crtc(crtc)->index == pipe) { 163 struct drm_display_mode *mode; 164 if (dev->mode_config.funcs->atomic_commit) 165 mode = &crtc->state->adjusted_mode; 166 else 167 mode = &crtc->hwmode; 168 return drm_calc_vbltimestamp_from_scanoutpos(dev, 169 pipe, max_error, time, flags, mode); 170 } 171 } 172 173 return -EINVAL; 174 } 175 176 static void 177 nouveau_display_vblank_fini(struct drm_device *dev) 178 { 179 struct drm_crtc *crtc; 180 181 drm_vblank_cleanup(dev); 182 183 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 184 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 185 nvif_notify_fini(&nv_crtc->vblank); 186 } 187 } 188 189 static int 190 nouveau_display_vblank_init(struct drm_device *dev) 191 { 192 struct nouveau_display *disp = nouveau_display(dev); 193 struct drm_crtc *crtc; 194 int ret; 195 196 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 197 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 198 ret = nvif_notify_init(&disp->disp, 199 nouveau_display_vblank_handler, false, 200 NV04_DISP_NTFY_VBLANK, 201 &(struct nvif_notify_head_req_v0) { 202 .head = nv_crtc->index, 203 }, 204 sizeof(struct nvif_notify_head_req_v0), 205 sizeof(struct nvif_notify_head_rep_v0), 206 &nv_crtc->vblank); 207 if (ret) { 208 nouveau_display_vblank_fini(dev); 209 return ret; 210 } 211 } 212 213 ret = drm_vblank_init(dev, dev->mode_config.num_crtc); 214 if (ret) { 215 nouveau_display_vblank_fini(dev); 216 return ret; 217 } 218 219 return 0; 220 } 221 222 static void 223 nouveau_user_framebuffer_destroy(struct drm_framebuffer *drm_fb) 224 { 225 struct nouveau_framebuffer *fb = nouveau_framebuffer(drm_fb); 226 227 if (fb->nvbo) 228 drm_gem_object_unreference_unlocked(&fb->nvbo->gem); 229 230 drm_framebuffer_cleanup(drm_fb); 231 kfree(fb); 232 } 233 234 static int 235 nouveau_user_framebuffer_create_handle(struct drm_framebuffer *drm_fb, 236 struct drm_file *file_priv, 237 unsigned int *handle) 238 { 239 struct nouveau_framebuffer *fb = nouveau_framebuffer(drm_fb); 240 241 return drm_gem_handle_create(file_priv, &fb->nvbo->gem, handle); 242 } 243 244 static const struct drm_framebuffer_funcs nouveau_framebuffer_funcs = { 245 .destroy = nouveau_user_framebuffer_destroy, 246 .create_handle = nouveau_user_framebuffer_create_handle, 247 }; 248 249 int 250 nouveau_framebuffer_new(struct drm_device *dev, 251 const struct drm_mode_fb_cmd2 *mode_cmd, 252 struct nouveau_bo *nvbo, 253 struct nouveau_framebuffer **pfb) 254 { 255 struct nouveau_framebuffer *fb; 256 int ret; 257 258 if (!(fb = *pfb = kzalloc(sizeof(*fb), GFP_KERNEL))) 259 return -ENOMEM; 260 261 drm_helper_mode_fill_fb_struct(&fb->base, mode_cmd); 262 fb->nvbo = nvbo; 263 264 ret = drm_framebuffer_init(dev, &fb->base, &nouveau_framebuffer_funcs); 265 if (ret) 266 kfree(fb); 267 return ret; 268 } 269 270 struct drm_framebuffer * 271 nouveau_user_framebuffer_create(struct drm_device *dev, 272 struct drm_file *file_priv, 273 const struct drm_mode_fb_cmd2 *mode_cmd) 274 { 275 struct nouveau_framebuffer *fb; 276 struct nouveau_bo *nvbo; 277 struct drm_gem_object *gem; 278 int ret; 279 280 gem = drm_gem_object_lookup(file_priv, mode_cmd->handles[0]); 281 if (!gem) 282 return ERR_PTR(-ENOENT); 283 nvbo = nouveau_gem_object(gem); 284 285 ret = nouveau_framebuffer_new(dev, mode_cmd, nvbo, &fb); 286 if (ret == 0) 287 return &fb->base; 288 289 drm_gem_object_unreference_unlocked(gem); 290 return ERR_PTR(ret); 291 } 292 293 static const struct drm_mode_config_funcs nouveau_mode_config_funcs = { 294 .fb_create = nouveau_user_framebuffer_create, 295 .output_poll_changed = nouveau_fbcon_output_poll_changed, 296 }; 297 298 299 struct nouveau_drm_prop_enum_list { 300 u8 gen_mask; 301 int type; 302 char *name; 303 }; 304 305 static struct nouveau_drm_prop_enum_list underscan[] = { 306 { 6, UNDERSCAN_AUTO, "auto" }, 307 { 6, UNDERSCAN_OFF, "off" }, 308 { 6, UNDERSCAN_ON, "on" }, 309 {} 310 }; 311 312 static struct nouveau_drm_prop_enum_list dither_mode[] = { 313 { 7, DITHERING_MODE_AUTO, "auto" }, 314 { 7, DITHERING_MODE_OFF, "off" }, 315 { 1, DITHERING_MODE_ON, "on" }, 316 { 6, DITHERING_MODE_STATIC2X2, "static 2x2" }, 317 { 6, DITHERING_MODE_DYNAMIC2X2, "dynamic 2x2" }, 318 { 4, DITHERING_MODE_TEMPORAL, "temporal" }, 319 {} 320 }; 321 322 static struct nouveau_drm_prop_enum_list dither_depth[] = { 323 { 6, DITHERING_DEPTH_AUTO, "auto" }, 324 { 6, DITHERING_DEPTH_6BPC, "6 bpc" }, 325 { 6, DITHERING_DEPTH_8BPC, "8 bpc" }, 326 {} 327 }; 328 329 #define PROP_ENUM(p,gen,n,list) do { \ 330 struct nouveau_drm_prop_enum_list *l = (list); \ 331 int c = 0; \ 332 while (l->gen_mask) { \ 333 if (l->gen_mask & (1 << (gen))) \ 334 c++; \ 335 l++; \ 336 } \ 337 if (c) { \ 338 p = drm_property_create(dev, DRM_MODE_PROP_ENUM, n, c); \ 339 l = (list); \ 340 c = 0; \ 341 while (p && l->gen_mask) { \ 342 if (l->gen_mask & (1 << (gen))) { \ 343 drm_property_add_enum(p, c, l->type, l->name); \ 344 c++; \ 345 } \ 346 l++; \ 347 } \ 348 } \ 349 } while(0) 350 351 int 352 nouveau_display_init(struct drm_device *dev) 353 { 354 struct nouveau_display *disp = nouveau_display(dev); 355 struct nouveau_drm *drm = nouveau_drm(dev); 356 struct drm_connector *connector; 357 int ret; 358 359 ret = disp->init(dev); 360 if (ret) 361 return ret; 362 363 /* enable polling for external displays */ 364 drm_kms_helper_poll_enable(dev); 365 366 /* enable hotplug interrupts */ 367 list_for_each_entry(connector, &dev->mode_config.connector_list, head) { 368 struct nouveau_connector *conn = nouveau_connector(connector); 369 nvif_notify_get(&conn->hpd); 370 } 371 372 /* enable flip completion events */ 373 nvif_notify_get(&drm->flip); 374 return ret; 375 } 376 377 void 378 nouveau_display_fini(struct drm_device *dev, bool suspend) 379 { 380 struct nouveau_display *disp = nouveau_display(dev); 381 struct nouveau_drm *drm = nouveau_drm(dev); 382 struct drm_connector *connector; 383 int head; 384 385 if (!suspend) 386 drm_crtc_force_disable_all(dev); 387 388 /* Make sure that drm and hw vblank irqs get properly disabled. */ 389 for (head = 0; head < dev->mode_config.num_crtc; head++) 390 drm_vblank_off(dev, head); 391 392 /* disable flip completion events */ 393 nvif_notify_put(&drm->flip); 394 395 /* disable hotplug interrupts */ 396 list_for_each_entry(connector, &dev->mode_config.connector_list, head) { 397 struct nouveau_connector *conn = nouveau_connector(connector); 398 nvif_notify_put(&conn->hpd); 399 } 400 401 drm_kms_helper_poll_disable(dev); 402 disp->fini(dev); 403 } 404 405 static void 406 nouveau_display_create_properties(struct drm_device *dev) 407 { 408 struct nouveau_display *disp = nouveau_display(dev); 409 int gen; 410 411 if (disp->disp.oclass < NV50_DISP) 412 gen = 0; 413 else 414 if (disp->disp.oclass < GF110_DISP) 415 gen = 1; 416 else 417 gen = 2; 418 419 PROP_ENUM(disp->dithering_mode, gen, "dithering mode", dither_mode); 420 PROP_ENUM(disp->dithering_depth, gen, "dithering depth", dither_depth); 421 PROP_ENUM(disp->underscan_property, gen, "underscan", underscan); 422 423 disp->underscan_hborder_property = 424 drm_property_create_range(dev, 0, "underscan hborder", 0, 128); 425 426 disp->underscan_vborder_property = 427 drm_property_create_range(dev, 0, "underscan vborder", 0, 128); 428 429 if (gen < 1) 430 return; 431 432 /* -90..+90 */ 433 disp->vibrant_hue_property = 434 drm_property_create_range(dev, 0, "vibrant hue", 0, 180); 435 436 /* -100..+100 */ 437 disp->color_vibrance_property = 438 drm_property_create_range(dev, 0, "color vibrance", 0, 200); 439 } 440 441 int 442 nouveau_display_create(struct drm_device *dev) 443 { 444 struct nouveau_drm *drm = nouveau_drm(dev); 445 struct nvkm_device *device = nvxx_device(&drm->device); 446 struct nouveau_display *disp; 447 int ret; 448 449 disp = drm->display = kzalloc(sizeof(*disp), GFP_KERNEL); 450 if (!disp) 451 return -ENOMEM; 452 453 drm_mode_config_init(dev); 454 drm_mode_create_scaling_mode_property(dev); 455 drm_mode_create_dvi_i_properties(dev); 456 457 dev->mode_config.funcs = &nouveau_mode_config_funcs; 458 dev->mode_config.fb_base = device->func->resource_addr(device, 1); 459 460 dev->mode_config.min_width = 0; 461 dev->mode_config.min_height = 0; 462 if (drm->device.info.family < NV_DEVICE_INFO_V0_CELSIUS) { 463 dev->mode_config.max_width = 2048; 464 dev->mode_config.max_height = 2048; 465 } else 466 if (drm->device.info.family < NV_DEVICE_INFO_V0_TESLA) { 467 dev->mode_config.max_width = 4096; 468 dev->mode_config.max_height = 4096; 469 } else 470 if (drm->device.info.family < NV_DEVICE_INFO_V0_FERMI) { 471 dev->mode_config.max_width = 8192; 472 dev->mode_config.max_height = 8192; 473 } else { 474 dev->mode_config.max_width = 16384; 475 dev->mode_config.max_height = 16384; 476 } 477 478 dev->mode_config.preferred_depth = 24; 479 dev->mode_config.prefer_shadow = 1; 480 481 if (drm->device.info.chipset < 0x11) 482 dev->mode_config.async_page_flip = false; 483 else 484 dev->mode_config.async_page_flip = true; 485 486 drm_kms_helper_poll_init(dev); 487 drm_kms_helper_poll_disable(dev); 488 489 if (nouveau_modeset != 2 && drm->vbios.dcb.entries) { 490 static const u16 oclass[] = { 491 GP104_DISP, 492 GP100_DISP, 493 GM200_DISP, 494 GM107_DISP, 495 GK110_DISP, 496 GK104_DISP, 497 GF110_DISP, 498 GT214_DISP, 499 GT206_DISP, 500 GT200_DISP, 501 G82_DISP, 502 NV50_DISP, 503 NV04_DISP, 504 }; 505 int i; 506 507 for (i = 0, ret = -ENODEV; ret && i < ARRAY_SIZE(oclass); i++) { 508 ret = nvif_object_init(&drm->device.object, 0, 509 oclass[i], NULL, 0, &disp->disp); 510 } 511 512 if (ret == 0) { 513 nouveau_display_create_properties(dev); 514 if (disp->disp.oclass < NV50_DISP) 515 ret = nv04_display_create(dev); 516 else 517 ret = nv50_display_create(dev); 518 } 519 } else { 520 ret = 0; 521 } 522 523 if (ret) 524 goto disp_create_err; 525 526 drm_mode_config_reset(dev); 527 528 if (dev->mode_config.num_crtc) { 529 ret = nouveau_display_vblank_init(dev); 530 if (ret) 531 goto vblank_err; 532 } 533 534 nouveau_backlight_init(dev); 535 return 0; 536 537 vblank_err: 538 disp->dtor(dev); 539 disp_create_err: 540 drm_kms_helper_poll_fini(dev); 541 drm_mode_config_cleanup(dev); 542 return ret; 543 } 544 545 void 546 nouveau_display_destroy(struct drm_device *dev) 547 { 548 struct nouveau_display *disp = nouveau_display(dev); 549 550 nouveau_backlight_exit(dev); 551 nouveau_display_vblank_fini(dev); 552 553 drm_kms_helper_poll_fini(dev); 554 drm_mode_config_cleanup(dev); 555 556 if (disp->dtor) 557 disp->dtor(dev); 558 559 nvif_object_fini(&disp->disp); 560 561 nouveau_drm(dev)->display = NULL; 562 kfree(disp); 563 } 564 565 static int 566 nouveau_atomic_disable_connector(struct drm_atomic_state *state, 567 struct drm_connector *connector) 568 { 569 struct drm_connector_state *connector_state; 570 struct drm_crtc *crtc; 571 struct drm_crtc_state *crtc_state; 572 struct drm_plane_state *plane_state; 573 struct drm_plane *plane; 574 int ret; 575 576 if (!(crtc = connector->state->crtc)) 577 return 0; 578 579 connector_state = drm_atomic_get_connector_state(state, connector); 580 if (IS_ERR(connector_state)) 581 return PTR_ERR(connector_state); 582 583 ret = drm_atomic_set_crtc_for_connector(connector_state, NULL); 584 if (ret) 585 return ret; 586 587 crtc_state = drm_atomic_get_crtc_state(state, crtc); 588 if (IS_ERR(crtc_state)) 589 return PTR_ERR(crtc_state); 590 591 ret = drm_atomic_set_mode_for_crtc(crtc_state, NULL); 592 if (ret) 593 return ret; 594 595 crtc_state->active = false; 596 597 drm_for_each_plane_mask(plane, connector->dev, crtc_state->plane_mask) { 598 plane_state = drm_atomic_get_plane_state(state, plane); 599 if (IS_ERR(plane_state)) 600 return PTR_ERR(plane_state); 601 602 ret = drm_atomic_set_crtc_for_plane(plane_state, NULL); 603 if (ret) 604 return ret; 605 606 drm_atomic_set_fb_for_plane(plane_state, NULL); 607 } 608 609 return 0; 610 } 611 612 static int 613 nouveau_atomic_disable(struct drm_device *dev, 614 struct drm_modeset_acquire_ctx *ctx) 615 { 616 struct drm_atomic_state *state; 617 struct drm_connector *connector; 618 int ret; 619 620 state = drm_atomic_state_alloc(dev); 621 if (!state) 622 return -ENOMEM; 623 624 state->acquire_ctx = ctx; 625 626 drm_for_each_connector(connector, dev) { 627 ret = nouveau_atomic_disable_connector(state, connector); 628 if (ret) 629 break; 630 } 631 632 if (ret == 0) 633 ret = drm_atomic_commit(state); 634 drm_atomic_state_put(state); 635 return ret; 636 } 637 638 static struct drm_atomic_state * 639 nouveau_atomic_suspend(struct drm_device *dev) 640 { 641 struct drm_modeset_acquire_ctx ctx; 642 struct drm_atomic_state *state; 643 int ret; 644 645 drm_modeset_acquire_init(&ctx, 0); 646 647 retry: 648 ret = drm_modeset_lock_all_ctx(dev, &ctx); 649 if (ret < 0) { 650 state = ERR_PTR(ret); 651 goto unlock; 652 } 653 654 state = drm_atomic_helper_duplicate_state(dev, &ctx); 655 if (IS_ERR(state)) 656 goto unlock; 657 658 ret = nouveau_atomic_disable(dev, &ctx); 659 if (ret < 0) { 660 drm_atomic_state_put(state); 661 state = ERR_PTR(ret); 662 goto unlock; 663 } 664 665 unlock: 666 if (PTR_ERR(state) == -EDEADLK) { 667 drm_modeset_backoff(&ctx); 668 goto retry; 669 } 670 671 drm_modeset_drop_locks(&ctx); 672 drm_modeset_acquire_fini(&ctx); 673 return state; 674 } 675 676 int 677 nouveau_display_suspend(struct drm_device *dev, bool runtime) 678 { 679 struct nouveau_display *disp = nouveau_display(dev); 680 struct drm_crtc *crtc; 681 682 if (dev->mode_config.funcs->atomic_commit) { 683 if (!runtime) { 684 disp->suspend = nouveau_atomic_suspend(dev); 685 if (IS_ERR(disp->suspend)) { 686 int ret = PTR_ERR(disp->suspend); 687 disp->suspend = NULL; 688 return ret; 689 } 690 } 691 692 nouveau_display_fini(dev, true); 693 return 0; 694 } 695 696 nouveau_display_fini(dev, true); 697 698 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 699 struct nouveau_framebuffer *nouveau_fb; 700 701 nouveau_fb = nouveau_framebuffer(crtc->primary->fb); 702 if (!nouveau_fb || !nouveau_fb->nvbo) 703 continue; 704 705 nouveau_bo_unpin(nouveau_fb->nvbo); 706 } 707 708 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 709 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 710 if (nv_crtc->cursor.nvbo) { 711 if (nv_crtc->cursor.set_offset) 712 nouveau_bo_unmap(nv_crtc->cursor.nvbo); 713 nouveau_bo_unpin(nv_crtc->cursor.nvbo); 714 } 715 } 716 717 return 0; 718 } 719 720 void 721 nouveau_display_resume(struct drm_device *dev, bool runtime) 722 { 723 struct nouveau_display *disp = nouveau_display(dev); 724 struct nouveau_drm *drm = nouveau_drm(dev); 725 struct drm_crtc *crtc; 726 int ret, head; 727 728 if (dev->mode_config.funcs->atomic_commit) { 729 nouveau_display_init(dev); 730 if (disp->suspend) { 731 drm_atomic_helper_resume(dev, disp->suspend); 732 disp->suspend = NULL; 733 } 734 return; 735 } 736 737 /* re-pin fb/cursors */ 738 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 739 struct nouveau_framebuffer *nouveau_fb; 740 741 nouveau_fb = nouveau_framebuffer(crtc->primary->fb); 742 if (!nouveau_fb || !nouveau_fb->nvbo) 743 continue; 744 745 ret = nouveau_bo_pin(nouveau_fb->nvbo, TTM_PL_FLAG_VRAM, true); 746 if (ret) 747 NV_ERROR(drm, "Could not pin framebuffer\n"); 748 } 749 750 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 751 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 752 if (!nv_crtc->cursor.nvbo) 753 continue; 754 755 ret = nouveau_bo_pin(nv_crtc->cursor.nvbo, TTM_PL_FLAG_VRAM, true); 756 if (!ret && nv_crtc->cursor.set_offset) 757 ret = nouveau_bo_map(nv_crtc->cursor.nvbo); 758 if (ret) 759 NV_ERROR(drm, "Could not pin/map cursor.\n"); 760 } 761 762 nouveau_display_init(dev); 763 764 /* Force CLUT to get re-loaded during modeset */ 765 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 766 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 767 768 nv_crtc->lut.depth = 0; 769 } 770 771 /* This should ensure we don't hit a locking problem when someone 772 * wakes us up via a connector. We should never go into suspend 773 * while the display is on anyways. 774 */ 775 if (runtime) 776 return; 777 778 drm_helper_resume_force_mode(dev); 779 780 /* Make sure that drm and hw vblank irqs get resumed if needed. */ 781 for (head = 0; head < dev->mode_config.num_crtc; head++) 782 drm_vblank_on(dev, head); 783 784 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 785 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 786 787 if (!nv_crtc->cursor.nvbo) 788 continue; 789 790 if (nv_crtc->cursor.set_offset) 791 nv_crtc->cursor.set_offset(nv_crtc, nv_crtc->cursor.nvbo->bo.offset); 792 nv_crtc->cursor.set_pos(nv_crtc, nv_crtc->cursor_saved_x, 793 nv_crtc->cursor_saved_y); 794 } 795 } 796 797 static int 798 nouveau_page_flip_emit(struct nouveau_channel *chan, 799 struct nouveau_bo *old_bo, 800 struct nouveau_bo *new_bo, 801 struct nouveau_page_flip_state *s, 802 struct nouveau_fence **pfence) 803 { 804 struct nouveau_fence_chan *fctx = chan->fence; 805 struct nouveau_drm *drm = chan->drm; 806 struct drm_device *dev = drm->dev; 807 unsigned long flags; 808 int ret; 809 810 /* Queue it to the pending list */ 811 spin_lock_irqsave(&dev->event_lock, flags); 812 list_add_tail(&s->head, &fctx->flip); 813 spin_unlock_irqrestore(&dev->event_lock, flags); 814 815 /* Synchronize with the old framebuffer */ 816 ret = nouveau_fence_sync(old_bo, chan, false, false); 817 if (ret) 818 goto fail; 819 820 /* Emit the pageflip */ 821 ret = RING_SPACE(chan, 2); 822 if (ret) 823 goto fail; 824 825 BEGIN_NV04(chan, NvSubSw, NV_SW_PAGE_FLIP, 1); 826 OUT_RING (chan, 0x00000000); 827 FIRE_RING (chan); 828 829 ret = nouveau_fence_new(chan, false, pfence); 830 if (ret) 831 goto fail; 832 833 return 0; 834 fail: 835 spin_lock_irqsave(&dev->event_lock, flags); 836 list_del(&s->head); 837 spin_unlock_irqrestore(&dev->event_lock, flags); 838 return ret; 839 } 840 841 int 842 nouveau_crtc_page_flip(struct drm_crtc *crtc, struct drm_framebuffer *fb, 843 struct drm_pending_vblank_event *event, u32 flags) 844 { 845 const int swap_interval = (flags & DRM_MODE_PAGE_FLIP_ASYNC) ? 0 : 1; 846 struct drm_device *dev = crtc->dev; 847 struct nouveau_drm *drm = nouveau_drm(dev); 848 struct nouveau_bo *old_bo = nouveau_framebuffer(crtc->primary->fb)->nvbo; 849 struct nouveau_bo *new_bo = nouveau_framebuffer(fb)->nvbo; 850 struct nouveau_page_flip_state *s; 851 struct nouveau_channel *chan; 852 struct nouveau_cli *cli; 853 struct nouveau_fence *fence; 854 struct nv04_display *dispnv04 = nv04_display(dev); 855 int head = nouveau_crtc(crtc)->index; 856 int ret; 857 858 chan = drm->channel; 859 if (!chan) 860 return -ENODEV; 861 cli = (void *)chan->user.client; 862 863 s = kzalloc(sizeof(*s), GFP_KERNEL); 864 if (!s) 865 return -ENOMEM; 866 867 if (new_bo != old_bo) { 868 ret = nouveau_bo_pin(new_bo, TTM_PL_FLAG_VRAM, true); 869 if (ret) 870 goto fail_free; 871 } 872 873 mutex_lock(&cli->mutex); 874 ret = ttm_bo_reserve(&new_bo->bo, true, false, NULL); 875 if (ret) 876 goto fail_unpin; 877 878 /* synchronise rendering channel with the kernel's channel */ 879 ret = nouveau_fence_sync(new_bo, chan, false, true); 880 if (ret) { 881 ttm_bo_unreserve(&new_bo->bo); 882 goto fail_unpin; 883 } 884 885 if (new_bo != old_bo) { 886 ttm_bo_unreserve(&new_bo->bo); 887 888 ret = ttm_bo_reserve(&old_bo->bo, true, false, NULL); 889 if (ret) 890 goto fail_unpin; 891 } 892 893 /* Initialize a page flip struct */ 894 *s = (struct nouveau_page_flip_state) 895 { { }, event, crtc, fb->bits_per_pixel, fb->pitches[0], 896 new_bo->bo.offset }; 897 898 /* Keep vblanks on during flip, for the target crtc of this flip */ 899 drm_crtc_vblank_get(crtc); 900 901 /* Emit a page flip */ 902 if (swap_interval) { 903 ret = RING_SPACE(chan, 8); 904 if (ret) 905 goto fail_unreserve; 906 907 BEGIN_NV04(chan, NvSubImageBlit, 0x012c, 1); 908 OUT_RING (chan, 0); 909 BEGIN_NV04(chan, NvSubImageBlit, 0x0134, 1); 910 OUT_RING (chan, head); 911 BEGIN_NV04(chan, NvSubImageBlit, 0x0100, 1); 912 OUT_RING (chan, 0); 913 BEGIN_NV04(chan, NvSubImageBlit, 0x0130, 1); 914 OUT_RING (chan, 0); 915 } 916 917 nouveau_bo_ref(new_bo, &dispnv04->image[head]); 918 919 ret = nouveau_page_flip_emit(chan, old_bo, new_bo, s, &fence); 920 if (ret) 921 goto fail_unreserve; 922 mutex_unlock(&cli->mutex); 923 924 /* Update the crtc struct and cleanup */ 925 crtc->primary->fb = fb; 926 927 nouveau_bo_fence(old_bo, fence, false); 928 ttm_bo_unreserve(&old_bo->bo); 929 if (old_bo != new_bo) 930 nouveau_bo_unpin(old_bo); 931 nouveau_fence_unref(&fence); 932 return 0; 933 934 fail_unreserve: 935 drm_crtc_vblank_put(crtc); 936 ttm_bo_unreserve(&old_bo->bo); 937 fail_unpin: 938 mutex_unlock(&cli->mutex); 939 if (old_bo != new_bo) 940 nouveau_bo_unpin(new_bo); 941 fail_free: 942 kfree(s); 943 return ret; 944 } 945 946 int 947 nouveau_finish_page_flip(struct nouveau_channel *chan, 948 struct nouveau_page_flip_state *ps) 949 { 950 struct nouveau_fence_chan *fctx = chan->fence; 951 struct nouveau_drm *drm = chan->drm; 952 struct drm_device *dev = drm->dev; 953 struct nouveau_page_flip_state *s; 954 unsigned long flags; 955 956 spin_lock_irqsave(&dev->event_lock, flags); 957 958 if (list_empty(&fctx->flip)) { 959 NV_ERROR(drm, "unexpected pageflip\n"); 960 spin_unlock_irqrestore(&dev->event_lock, flags); 961 return -EINVAL; 962 } 963 964 s = list_first_entry(&fctx->flip, struct nouveau_page_flip_state, head); 965 if (s->event) { 966 drm_crtc_arm_vblank_event(s->crtc, s->event); 967 } else { 968 /* Give up ownership of vblank for page-flipped crtc */ 969 drm_crtc_vblank_put(s->crtc); 970 } 971 972 list_del(&s->head); 973 if (ps) 974 *ps = *s; 975 kfree(s); 976 977 spin_unlock_irqrestore(&dev->event_lock, flags); 978 return 0; 979 } 980 981 int 982 nouveau_flip_complete(struct nvif_notify *notify) 983 { 984 struct nouveau_drm *drm = container_of(notify, typeof(*drm), flip); 985 struct nouveau_channel *chan = drm->channel; 986 struct nouveau_page_flip_state state; 987 988 if (!nouveau_finish_page_flip(chan, &state)) { 989 nv_set_crtc_base(drm->dev, drm_crtc_index(state.crtc), 990 state.offset + state.crtc->y * 991 state.pitch + state.crtc->x * 992 state.bpp / 8); 993 } 994 995 return NVIF_NOTIFY_KEEP; 996 } 997 998 int 999 nouveau_display_dumb_create(struct drm_file *file_priv, struct drm_device *dev, 1000 struct drm_mode_create_dumb *args) 1001 { 1002 struct nouveau_bo *bo; 1003 uint32_t domain; 1004 int ret; 1005 1006 args->pitch = roundup(args->width * (args->bpp / 8), 256); 1007 args->size = args->pitch * args->height; 1008 args->size = roundup(args->size, PAGE_SIZE); 1009 1010 /* Use VRAM if there is any ; otherwise fallback to system memory */ 1011 if (nouveau_drm(dev)->device.info.ram_size != 0) 1012 domain = NOUVEAU_GEM_DOMAIN_VRAM; 1013 else 1014 domain = NOUVEAU_GEM_DOMAIN_GART; 1015 1016 ret = nouveau_gem_new(dev, args->size, 0, domain, 0, 0, &bo); 1017 if (ret) 1018 return ret; 1019 1020 ret = drm_gem_handle_create(file_priv, &bo->gem, &args->handle); 1021 drm_gem_object_unreference_unlocked(&bo->gem); 1022 return ret; 1023 } 1024 1025 int 1026 nouveau_display_dumb_map_offset(struct drm_file *file_priv, 1027 struct drm_device *dev, 1028 uint32_t handle, uint64_t *poffset) 1029 { 1030 struct drm_gem_object *gem; 1031 1032 gem = drm_gem_object_lookup(file_priv, handle); 1033 if (gem) { 1034 struct nouveau_bo *bo = nouveau_gem_object(gem); 1035 *poffset = drm_vma_node_offset_addr(&bo->bo.vma_node); 1036 drm_gem_object_unreference_unlocked(gem); 1037 return 0; 1038 } 1039 1040 return -ENOENT; 1041 } 1042