1 /* 2 * Copyright (C) 2008 Maarten Maathuis. 3 * All Rights Reserved. 4 * 5 * Permission is hereby granted, free of charge, to any person obtaining 6 * a copy of this software and associated documentation files (the 7 * "Software"), to deal in the Software without restriction, including 8 * without limitation the rights to use, copy, modify, merge, publish, 9 * distribute, sublicense, and/or sell copies of the Software, and to 10 * permit persons to whom the Software is furnished to do so, subject to 11 * the following conditions: 12 * 13 * The above copyright notice and this permission notice (including the 14 * next paragraph) shall be included in all copies or substantial 15 * portions of the Software. 16 * 17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 18 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 19 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 20 * IN NO EVENT SHALL THE COPYRIGHT OWNER(S) AND/OR ITS SUPPLIERS BE 21 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 22 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION 23 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 24 * 25 */ 26 27 #include <acpi/video.h> 28 #include <drm/drmP.h> 29 #include <drm/drm_atomic.h> 30 #include <drm/drm_atomic_helper.h> 31 #include <drm/drm_crtc_helper.h> 32 #include <drm/drm_fb_helper.h> 33 #include <drm/drm_probe_helper.h> 34 35 #include <nvif/class.h> 36 37 #include "nouveau_fbcon.h" 38 #include "dispnv04/hw.h" 39 #include "nouveau_crtc.h" 40 #include "nouveau_dma.h" 41 #include "nouveau_gem.h" 42 #include "nouveau_connector.h" 43 #include "nv50_display.h" 44 45 #include "nouveau_fence.h" 46 47 #include <nvif/cl0046.h> 48 #include <nvif/event.h> 49 50 static int 51 nouveau_display_vblank_handler(struct nvif_notify *notify) 52 { 53 struct nouveau_crtc *nv_crtc = 54 container_of(notify, typeof(*nv_crtc), vblank); 55 drm_crtc_handle_vblank(&nv_crtc->base); 56 return NVIF_NOTIFY_KEEP; 57 } 58 59 int 60 nouveau_display_vblank_enable(struct drm_device *dev, unsigned int pipe) 61 { 62 struct drm_crtc *crtc; 63 struct nouveau_crtc *nv_crtc; 64 65 crtc = drm_crtc_from_index(dev, pipe); 66 if (!crtc) 67 return -EINVAL; 68 69 nv_crtc = nouveau_crtc(crtc); 70 nvif_notify_get(&nv_crtc->vblank); 71 72 return 0; 73 } 74 75 void 76 nouveau_display_vblank_disable(struct drm_device *dev, unsigned int pipe) 77 { 78 struct drm_crtc *crtc; 79 struct nouveau_crtc *nv_crtc; 80 81 crtc = drm_crtc_from_index(dev, pipe); 82 if (!crtc) 83 return; 84 85 nv_crtc = nouveau_crtc(crtc); 86 nvif_notify_put(&nv_crtc->vblank); 87 } 88 89 static inline int 90 calc(int blanks, int blanke, int total, int line) 91 { 92 if (blanke >= blanks) { 93 if (line >= blanks) 94 line -= total; 95 } else { 96 if (line >= blanks) 97 line -= total; 98 line -= blanke + 1; 99 } 100 return line; 101 } 102 103 static bool 104 nouveau_display_scanoutpos_head(struct drm_crtc *crtc, int *vpos, int *hpos, 105 ktime_t *stime, ktime_t *etime) 106 { 107 struct { 108 struct nv04_disp_mthd_v0 base; 109 struct nv04_disp_scanoutpos_v0 scan; 110 } args = { 111 .base.method = NV04_DISP_SCANOUTPOS, 112 .base.head = nouveau_crtc(crtc)->index, 113 }; 114 struct nouveau_display *disp = nouveau_display(crtc->dev); 115 struct drm_vblank_crtc *vblank = &crtc->dev->vblank[drm_crtc_index(crtc)]; 116 int retry = 20; 117 bool ret = false; 118 119 do { 120 ret = nvif_mthd(&disp->disp.object, 0, &args, sizeof(args)); 121 if (ret != 0) 122 return false; 123 124 if (args.scan.vline) { 125 ret = true; 126 break; 127 } 128 129 if (retry) ndelay(vblank->linedur_ns); 130 } while (retry--); 131 132 *hpos = args.scan.hline; 133 *vpos = calc(args.scan.vblanks, args.scan.vblanke, 134 args.scan.vtotal, args.scan.vline); 135 if (stime) *stime = ns_to_ktime(args.scan.time[0]); 136 if (etime) *etime = ns_to_ktime(args.scan.time[1]); 137 138 return ret; 139 } 140 141 bool 142 nouveau_display_scanoutpos(struct drm_device *dev, unsigned int pipe, 143 bool in_vblank_irq, int *vpos, int *hpos, 144 ktime_t *stime, ktime_t *etime, 145 const struct drm_display_mode *mode) 146 { 147 struct drm_crtc *crtc; 148 149 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 150 if (nouveau_crtc(crtc)->index == pipe) { 151 return nouveau_display_scanoutpos_head(crtc, vpos, hpos, 152 stime, etime); 153 } 154 } 155 156 return false; 157 } 158 159 static void 160 nouveau_display_vblank_fini(struct drm_device *dev) 161 { 162 struct drm_crtc *crtc; 163 164 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 165 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 166 nvif_notify_fini(&nv_crtc->vblank); 167 } 168 } 169 170 static int 171 nouveau_display_vblank_init(struct drm_device *dev) 172 { 173 struct nouveau_display *disp = nouveau_display(dev); 174 struct drm_crtc *crtc; 175 int ret; 176 177 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 178 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 179 ret = nvif_notify_init(&disp->disp.object, 180 nouveau_display_vblank_handler, false, 181 NV04_DISP_NTFY_VBLANK, 182 &(struct nvif_notify_head_req_v0) { 183 .head = nv_crtc->index, 184 }, 185 sizeof(struct nvif_notify_head_req_v0), 186 sizeof(struct nvif_notify_head_rep_v0), 187 &nv_crtc->vblank); 188 if (ret) { 189 nouveau_display_vblank_fini(dev); 190 return ret; 191 } 192 } 193 194 ret = drm_vblank_init(dev, dev->mode_config.num_crtc); 195 if (ret) { 196 nouveau_display_vblank_fini(dev); 197 return ret; 198 } 199 200 return 0; 201 } 202 203 static void 204 nouveau_user_framebuffer_destroy(struct drm_framebuffer *drm_fb) 205 { 206 struct nouveau_framebuffer *fb = nouveau_framebuffer(drm_fb); 207 208 if (fb->nvbo) 209 drm_gem_object_put_unlocked(&fb->nvbo->gem); 210 211 drm_framebuffer_cleanup(drm_fb); 212 kfree(fb); 213 } 214 215 static int 216 nouveau_user_framebuffer_create_handle(struct drm_framebuffer *drm_fb, 217 struct drm_file *file_priv, 218 unsigned int *handle) 219 { 220 struct nouveau_framebuffer *fb = nouveau_framebuffer(drm_fb); 221 222 return drm_gem_handle_create(file_priv, &fb->nvbo->gem, handle); 223 } 224 225 static const struct drm_framebuffer_funcs nouveau_framebuffer_funcs = { 226 .destroy = nouveau_user_framebuffer_destroy, 227 .create_handle = nouveau_user_framebuffer_create_handle, 228 }; 229 230 int 231 nouveau_framebuffer_new(struct drm_device *dev, 232 const struct drm_mode_fb_cmd2 *mode_cmd, 233 struct nouveau_bo *nvbo, 234 struct nouveau_framebuffer **pfb) 235 { 236 struct nouveau_drm *drm = nouveau_drm(dev); 237 struct nouveau_framebuffer *fb; 238 int ret; 239 240 /* YUV overlays have special requirements pre-NV50 */ 241 if (drm->client.device.info.family < NV_DEVICE_INFO_V0_TESLA && 242 243 (mode_cmd->pixel_format == DRM_FORMAT_YUYV || 244 mode_cmd->pixel_format == DRM_FORMAT_UYVY || 245 mode_cmd->pixel_format == DRM_FORMAT_NV12 || 246 mode_cmd->pixel_format == DRM_FORMAT_NV21) && 247 (mode_cmd->pitches[0] & 0x3f || /* align 64 */ 248 mode_cmd->pitches[0] >= 0x10000 || /* at most 64k pitch */ 249 (mode_cmd->pitches[1] && /* pitches for planes must match */ 250 mode_cmd->pitches[0] != mode_cmd->pitches[1]))) { 251 struct drm_format_name_buf format_name; 252 DRM_DEBUG_KMS("Unsuitable framebuffer: format: %s; pitches: 0x%x\n 0x%x\n", 253 drm_get_format_name(mode_cmd->pixel_format, 254 &format_name), 255 mode_cmd->pitches[0], 256 mode_cmd->pitches[1]); 257 return -EINVAL; 258 } 259 260 if (!(fb = *pfb = kzalloc(sizeof(*fb), GFP_KERNEL))) 261 return -ENOMEM; 262 263 drm_helper_mode_fill_fb_struct(dev, &fb->base, mode_cmd); 264 fb->nvbo = nvbo; 265 266 ret = drm_framebuffer_init(dev, &fb->base, &nouveau_framebuffer_funcs); 267 if (ret) 268 kfree(fb); 269 return ret; 270 } 271 272 struct drm_framebuffer * 273 nouveau_user_framebuffer_create(struct drm_device *dev, 274 struct drm_file *file_priv, 275 const struct drm_mode_fb_cmd2 *mode_cmd) 276 { 277 struct nouveau_framebuffer *fb; 278 struct nouveau_bo *nvbo; 279 struct drm_gem_object *gem; 280 int ret; 281 282 gem = drm_gem_object_lookup(file_priv, mode_cmd->handles[0]); 283 if (!gem) 284 return ERR_PTR(-ENOENT); 285 nvbo = nouveau_gem_object(gem); 286 287 ret = nouveau_framebuffer_new(dev, mode_cmd, nvbo, &fb); 288 if (ret == 0) 289 return &fb->base; 290 291 drm_gem_object_put_unlocked(gem); 292 return ERR_PTR(ret); 293 } 294 295 static const struct drm_mode_config_funcs nouveau_mode_config_funcs = { 296 .fb_create = nouveau_user_framebuffer_create, 297 .output_poll_changed = nouveau_fbcon_output_poll_changed, 298 }; 299 300 301 struct nouveau_drm_prop_enum_list { 302 u8 gen_mask; 303 int type; 304 char *name; 305 }; 306 307 static struct nouveau_drm_prop_enum_list underscan[] = { 308 { 6, UNDERSCAN_AUTO, "auto" }, 309 { 6, UNDERSCAN_OFF, "off" }, 310 { 6, UNDERSCAN_ON, "on" }, 311 {} 312 }; 313 314 static struct nouveau_drm_prop_enum_list dither_mode[] = { 315 { 7, DITHERING_MODE_AUTO, "auto" }, 316 { 7, DITHERING_MODE_OFF, "off" }, 317 { 1, DITHERING_MODE_ON, "on" }, 318 { 6, DITHERING_MODE_STATIC2X2, "static 2x2" }, 319 { 6, DITHERING_MODE_DYNAMIC2X2, "dynamic 2x2" }, 320 { 4, DITHERING_MODE_TEMPORAL, "temporal" }, 321 {} 322 }; 323 324 static struct nouveau_drm_prop_enum_list dither_depth[] = { 325 { 6, DITHERING_DEPTH_AUTO, "auto" }, 326 { 6, DITHERING_DEPTH_6BPC, "6 bpc" }, 327 { 6, DITHERING_DEPTH_8BPC, "8 bpc" }, 328 {} 329 }; 330 331 #define PROP_ENUM(p,gen,n,list) do { \ 332 struct nouveau_drm_prop_enum_list *l = (list); \ 333 int c = 0; \ 334 while (l->gen_mask) { \ 335 if (l->gen_mask & (1 << (gen))) \ 336 c++; \ 337 l++; \ 338 } \ 339 if (c) { \ 340 p = drm_property_create(dev, DRM_MODE_PROP_ENUM, n, c); \ 341 l = (list); \ 342 while (p && l->gen_mask) { \ 343 if (l->gen_mask & (1 << (gen))) { \ 344 drm_property_add_enum(p, l->type, l->name); \ 345 } \ 346 l++; \ 347 } \ 348 } \ 349 } while(0) 350 351 static void 352 nouveau_display_hpd_work(struct work_struct *work) 353 { 354 struct nouveau_drm *drm = container_of(work, typeof(*drm), hpd_work); 355 356 pm_runtime_get_sync(drm->dev->dev); 357 358 drm_helper_hpd_irq_event(drm->dev); 359 360 pm_runtime_mark_last_busy(drm->dev->dev); 361 pm_runtime_put_sync(drm->dev->dev); 362 } 363 364 #ifdef CONFIG_ACPI 365 366 /* 367 * Hans de Goede: This define belongs in acpi/video.h, I've submitted a patch 368 * to the acpi subsys to move it there from drivers/acpi/acpi_video.c . 369 * This should be dropped once that is merged. 370 */ 371 #ifndef ACPI_VIDEO_NOTIFY_PROBE 372 #define ACPI_VIDEO_NOTIFY_PROBE 0x81 373 #endif 374 375 static int 376 nouveau_display_acpi_ntfy(struct notifier_block *nb, unsigned long val, 377 void *data) 378 { 379 struct nouveau_drm *drm = container_of(nb, typeof(*drm), acpi_nb); 380 struct acpi_bus_event *info = data; 381 int ret; 382 383 if (!strcmp(info->device_class, ACPI_VIDEO_CLASS)) { 384 if (info->type == ACPI_VIDEO_NOTIFY_PROBE) { 385 ret = pm_runtime_get(drm->dev->dev); 386 if (ret == 1 || ret == -EACCES) { 387 /* If the GPU is already awake, or in a state 388 * where we can't wake it up, it can handle 389 * it's own hotplug events. 390 */ 391 pm_runtime_put_autosuspend(drm->dev->dev); 392 } else if (ret == 0) { 393 /* This may be the only indication we receive 394 * of a connector hotplug on a runtime 395 * suspended GPU, schedule hpd_work to check. 396 */ 397 NV_DEBUG(drm, "ACPI requested connector reprobe\n"); 398 schedule_work(&drm->hpd_work); 399 pm_runtime_put_noidle(drm->dev->dev); 400 } else { 401 NV_WARN(drm, "Dropped ACPI reprobe event due to RPM error: %d\n", 402 ret); 403 } 404 405 /* acpi-video should not generate keypresses for this */ 406 return NOTIFY_BAD; 407 } 408 } 409 410 return NOTIFY_DONE; 411 } 412 #endif 413 414 int 415 nouveau_display_init(struct drm_device *dev) 416 { 417 struct nouveau_display *disp = nouveau_display(dev); 418 struct nouveau_drm *drm = nouveau_drm(dev); 419 struct drm_connector *connector; 420 struct drm_connector_list_iter conn_iter; 421 int ret; 422 423 ret = disp->init(dev); 424 if (ret) 425 return ret; 426 427 /* enable connector detection and polling for connectors without HPD 428 * support 429 */ 430 drm_kms_helper_poll_enable(dev); 431 432 /* enable hotplug interrupts */ 433 drm_connector_list_iter_begin(dev, &conn_iter); 434 nouveau_for_each_non_mst_connector_iter(connector, &conn_iter) { 435 struct nouveau_connector *conn = nouveau_connector(connector); 436 nvif_notify_get(&conn->hpd); 437 } 438 drm_connector_list_iter_end(&conn_iter); 439 440 /* enable flip completion events */ 441 nvif_notify_get(&drm->flip); 442 return ret; 443 } 444 445 void 446 nouveau_display_fini(struct drm_device *dev, bool suspend, bool runtime) 447 { 448 struct nouveau_display *disp = nouveau_display(dev); 449 struct nouveau_drm *drm = nouveau_drm(dev); 450 struct drm_connector *connector; 451 struct drm_connector_list_iter conn_iter; 452 453 if (!suspend) { 454 if (drm_drv_uses_atomic_modeset(dev)) 455 drm_atomic_helper_shutdown(dev); 456 else 457 drm_helper_force_disable_all(dev); 458 } 459 460 /* disable flip completion events */ 461 nvif_notify_put(&drm->flip); 462 463 /* disable hotplug interrupts */ 464 drm_connector_list_iter_begin(dev, &conn_iter); 465 nouveau_for_each_non_mst_connector_iter(connector, &conn_iter) { 466 struct nouveau_connector *conn = nouveau_connector(connector); 467 nvif_notify_put(&conn->hpd); 468 } 469 drm_connector_list_iter_end(&conn_iter); 470 471 if (!runtime) 472 cancel_work_sync(&drm->hpd_work); 473 474 drm_kms_helper_poll_disable(dev); 475 disp->fini(dev); 476 } 477 478 static void 479 nouveau_display_create_properties(struct drm_device *dev) 480 { 481 struct nouveau_display *disp = nouveau_display(dev); 482 int gen; 483 484 if (disp->disp.object.oclass < NV50_DISP) 485 gen = 0; 486 else 487 if (disp->disp.object.oclass < GF110_DISP) 488 gen = 1; 489 else 490 gen = 2; 491 492 PROP_ENUM(disp->dithering_mode, gen, "dithering mode", dither_mode); 493 PROP_ENUM(disp->dithering_depth, gen, "dithering depth", dither_depth); 494 PROP_ENUM(disp->underscan_property, gen, "underscan", underscan); 495 496 disp->underscan_hborder_property = 497 drm_property_create_range(dev, 0, "underscan hborder", 0, 128); 498 499 disp->underscan_vborder_property = 500 drm_property_create_range(dev, 0, "underscan vborder", 0, 128); 501 502 if (gen < 1) 503 return; 504 505 /* -90..+90 */ 506 disp->vibrant_hue_property = 507 drm_property_create_range(dev, 0, "vibrant hue", 0, 180); 508 509 /* -100..+100 */ 510 disp->color_vibrance_property = 511 drm_property_create_range(dev, 0, "color vibrance", 0, 200); 512 } 513 514 int 515 nouveau_display_create(struct drm_device *dev) 516 { 517 struct nouveau_drm *drm = nouveau_drm(dev); 518 struct nvkm_device *device = nvxx_device(&drm->client.device); 519 struct nouveau_display *disp; 520 int ret; 521 522 disp = drm->display = kzalloc(sizeof(*disp), GFP_KERNEL); 523 if (!disp) 524 return -ENOMEM; 525 526 drm_mode_config_init(dev); 527 drm_mode_create_scaling_mode_property(dev); 528 drm_mode_create_dvi_i_properties(dev); 529 530 dev->mode_config.funcs = &nouveau_mode_config_funcs; 531 dev->mode_config.fb_base = device->func->resource_addr(device, 1); 532 533 dev->mode_config.min_width = 0; 534 dev->mode_config.min_height = 0; 535 if (drm->client.device.info.family < NV_DEVICE_INFO_V0_CELSIUS) { 536 dev->mode_config.max_width = 2048; 537 dev->mode_config.max_height = 2048; 538 } else 539 if (drm->client.device.info.family < NV_DEVICE_INFO_V0_TESLA) { 540 dev->mode_config.max_width = 4096; 541 dev->mode_config.max_height = 4096; 542 } else 543 if (drm->client.device.info.family < NV_DEVICE_INFO_V0_FERMI) { 544 dev->mode_config.max_width = 8192; 545 dev->mode_config.max_height = 8192; 546 } else { 547 dev->mode_config.max_width = 16384; 548 dev->mode_config.max_height = 16384; 549 } 550 551 dev->mode_config.preferred_depth = 24; 552 dev->mode_config.prefer_shadow = 1; 553 554 if (drm->client.device.info.chipset < 0x11) 555 dev->mode_config.async_page_flip = false; 556 else 557 dev->mode_config.async_page_flip = true; 558 559 drm_kms_helper_poll_init(dev); 560 drm_kms_helper_poll_disable(dev); 561 562 if (nouveau_modeset != 2 && drm->vbios.dcb.entries) { 563 ret = nvif_disp_ctor(&drm->client.device, 0, &disp->disp); 564 if (ret == 0) { 565 nouveau_display_create_properties(dev); 566 if (disp->disp.object.oclass < NV50_DISP) 567 ret = nv04_display_create(dev); 568 else 569 ret = nv50_display_create(dev); 570 } 571 } else { 572 ret = 0; 573 } 574 575 if (ret) 576 goto disp_create_err; 577 578 drm_mode_config_reset(dev); 579 580 if (dev->mode_config.num_crtc) { 581 ret = nouveau_display_vblank_init(dev); 582 if (ret) 583 goto vblank_err; 584 } 585 586 INIT_WORK(&drm->hpd_work, nouveau_display_hpd_work); 587 #ifdef CONFIG_ACPI 588 drm->acpi_nb.notifier_call = nouveau_display_acpi_ntfy; 589 register_acpi_notifier(&drm->acpi_nb); 590 #endif 591 592 return 0; 593 594 vblank_err: 595 disp->dtor(dev); 596 disp_create_err: 597 drm_kms_helper_poll_fini(dev); 598 drm_mode_config_cleanup(dev); 599 return ret; 600 } 601 602 void 603 nouveau_display_destroy(struct drm_device *dev) 604 { 605 struct nouveau_display *disp = nouveau_display(dev); 606 607 #ifdef CONFIG_ACPI 608 unregister_acpi_notifier(&nouveau_drm(dev)->acpi_nb); 609 #endif 610 nouveau_display_vblank_fini(dev); 611 612 drm_kms_helper_poll_fini(dev); 613 drm_mode_config_cleanup(dev); 614 615 if (disp->dtor) 616 disp->dtor(dev); 617 618 nvif_disp_dtor(&disp->disp); 619 620 nouveau_drm(dev)->display = NULL; 621 kfree(disp); 622 } 623 624 int 625 nouveau_display_suspend(struct drm_device *dev, bool runtime) 626 { 627 struct nouveau_display *disp = nouveau_display(dev); 628 struct drm_crtc *crtc; 629 630 if (drm_drv_uses_atomic_modeset(dev)) { 631 if (!runtime) { 632 disp->suspend = drm_atomic_helper_suspend(dev); 633 if (IS_ERR(disp->suspend)) { 634 int ret = PTR_ERR(disp->suspend); 635 disp->suspend = NULL; 636 return ret; 637 } 638 } 639 640 nouveau_display_fini(dev, true, runtime); 641 return 0; 642 } 643 644 nouveau_display_fini(dev, true, runtime); 645 646 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 647 struct nouveau_framebuffer *nouveau_fb; 648 649 nouveau_fb = nouveau_framebuffer(crtc->primary->fb); 650 if (!nouveau_fb || !nouveau_fb->nvbo) 651 continue; 652 653 nouveau_bo_unpin(nouveau_fb->nvbo); 654 } 655 656 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 657 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 658 if (nv_crtc->cursor.nvbo) { 659 if (nv_crtc->cursor.set_offset) 660 nouveau_bo_unmap(nv_crtc->cursor.nvbo); 661 nouveau_bo_unpin(nv_crtc->cursor.nvbo); 662 } 663 } 664 665 return 0; 666 } 667 668 void 669 nouveau_display_resume(struct drm_device *dev, bool runtime) 670 { 671 struct nouveau_display *disp = nouveau_display(dev); 672 struct nouveau_drm *drm = nouveau_drm(dev); 673 struct drm_crtc *crtc; 674 int ret; 675 676 if (drm_drv_uses_atomic_modeset(dev)) { 677 nouveau_display_init(dev); 678 if (disp->suspend) { 679 drm_atomic_helper_resume(dev, disp->suspend); 680 disp->suspend = NULL; 681 } 682 return; 683 } 684 685 /* re-pin fb/cursors */ 686 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 687 struct nouveau_framebuffer *nouveau_fb; 688 689 nouveau_fb = nouveau_framebuffer(crtc->primary->fb); 690 if (!nouveau_fb || !nouveau_fb->nvbo) 691 continue; 692 693 ret = nouveau_bo_pin(nouveau_fb->nvbo, TTM_PL_FLAG_VRAM, true); 694 if (ret) 695 NV_ERROR(drm, "Could not pin framebuffer\n"); 696 } 697 698 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 699 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 700 if (!nv_crtc->cursor.nvbo) 701 continue; 702 703 ret = nouveau_bo_pin(nv_crtc->cursor.nvbo, TTM_PL_FLAG_VRAM, true); 704 if (!ret && nv_crtc->cursor.set_offset) 705 ret = nouveau_bo_map(nv_crtc->cursor.nvbo); 706 if (ret) 707 NV_ERROR(drm, "Could not pin/map cursor.\n"); 708 } 709 710 nouveau_display_init(dev); 711 712 /* Force CLUT to get re-loaded during modeset */ 713 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 714 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 715 716 nv_crtc->lut.depth = 0; 717 } 718 719 /* This should ensure we don't hit a locking problem when someone 720 * wakes us up via a connector. We should never go into suspend 721 * while the display is on anyways. 722 */ 723 if (runtime) 724 return; 725 726 drm_helper_resume_force_mode(dev); 727 728 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 729 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 730 731 if (!nv_crtc->cursor.nvbo) 732 continue; 733 734 if (nv_crtc->cursor.set_offset) 735 nv_crtc->cursor.set_offset(nv_crtc, nv_crtc->cursor.nvbo->bo.offset); 736 nv_crtc->cursor.set_pos(nv_crtc, nv_crtc->cursor_saved_x, 737 nv_crtc->cursor_saved_y); 738 } 739 } 740 741 static int 742 nouveau_page_flip_emit(struct nouveau_channel *chan, 743 struct nouveau_bo *old_bo, 744 struct nouveau_bo *new_bo, 745 struct nouveau_page_flip_state *s, 746 struct nouveau_fence **pfence) 747 { 748 struct nouveau_fence_chan *fctx = chan->fence; 749 struct nouveau_drm *drm = chan->drm; 750 struct drm_device *dev = drm->dev; 751 unsigned long flags; 752 int ret; 753 754 /* Queue it to the pending list */ 755 spin_lock_irqsave(&dev->event_lock, flags); 756 list_add_tail(&s->head, &fctx->flip); 757 spin_unlock_irqrestore(&dev->event_lock, flags); 758 759 /* Synchronize with the old framebuffer */ 760 ret = nouveau_fence_sync(old_bo, chan, false, false); 761 if (ret) 762 goto fail; 763 764 /* Emit the pageflip */ 765 ret = RING_SPACE(chan, 2); 766 if (ret) 767 goto fail; 768 769 BEGIN_NV04(chan, NvSubSw, NV_SW_PAGE_FLIP, 1); 770 OUT_RING (chan, 0x00000000); 771 FIRE_RING (chan); 772 773 ret = nouveau_fence_new(chan, false, pfence); 774 if (ret) 775 goto fail; 776 777 return 0; 778 fail: 779 spin_lock_irqsave(&dev->event_lock, flags); 780 list_del(&s->head); 781 spin_unlock_irqrestore(&dev->event_lock, flags); 782 return ret; 783 } 784 785 int 786 nouveau_crtc_page_flip(struct drm_crtc *crtc, struct drm_framebuffer *fb, 787 struct drm_pending_vblank_event *event, u32 flags, 788 struct drm_modeset_acquire_ctx *ctx) 789 { 790 const int swap_interval = (flags & DRM_MODE_PAGE_FLIP_ASYNC) ? 0 : 1; 791 struct drm_device *dev = crtc->dev; 792 struct nouveau_drm *drm = nouveau_drm(dev); 793 struct nouveau_bo *old_bo = nouveau_framebuffer(crtc->primary->fb)->nvbo; 794 struct nouveau_bo *new_bo = nouveau_framebuffer(fb)->nvbo; 795 struct nouveau_page_flip_state *s; 796 struct nouveau_channel *chan; 797 struct nouveau_cli *cli; 798 struct nouveau_fence *fence; 799 struct nv04_display *dispnv04 = nv04_display(dev); 800 int head = nouveau_crtc(crtc)->index; 801 int ret; 802 803 chan = drm->channel; 804 if (!chan) 805 return -ENODEV; 806 cli = (void *)chan->user.client; 807 808 s = kzalloc(sizeof(*s), GFP_KERNEL); 809 if (!s) 810 return -ENOMEM; 811 812 if (new_bo != old_bo) { 813 ret = nouveau_bo_pin(new_bo, TTM_PL_FLAG_VRAM, true); 814 if (ret) 815 goto fail_free; 816 } 817 818 mutex_lock(&cli->mutex); 819 ret = ttm_bo_reserve(&new_bo->bo, true, false, NULL); 820 if (ret) 821 goto fail_unpin; 822 823 /* synchronise rendering channel with the kernel's channel */ 824 ret = nouveau_fence_sync(new_bo, chan, false, true); 825 if (ret) { 826 ttm_bo_unreserve(&new_bo->bo); 827 goto fail_unpin; 828 } 829 830 if (new_bo != old_bo) { 831 ttm_bo_unreserve(&new_bo->bo); 832 833 ret = ttm_bo_reserve(&old_bo->bo, true, false, NULL); 834 if (ret) 835 goto fail_unpin; 836 } 837 838 /* Initialize a page flip struct */ 839 *s = (struct nouveau_page_flip_state) 840 { { }, event, crtc, fb->format->cpp[0] * 8, fb->pitches[0], 841 new_bo->bo.offset }; 842 843 /* Keep vblanks on during flip, for the target crtc of this flip */ 844 drm_crtc_vblank_get(crtc); 845 846 /* Emit a page flip */ 847 if (swap_interval) { 848 ret = RING_SPACE(chan, 8); 849 if (ret) 850 goto fail_unreserve; 851 852 BEGIN_NV04(chan, NvSubImageBlit, 0x012c, 1); 853 OUT_RING (chan, 0); 854 BEGIN_NV04(chan, NvSubImageBlit, 0x0134, 1); 855 OUT_RING (chan, head); 856 BEGIN_NV04(chan, NvSubImageBlit, 0x0100, 1); 857 OUT_RING (chan, 0); 858 BEGIN_NV04(chan, NvSubImageBlit, 0x0130, 1); 859 OUT_RING (chan, 0); 860 } 861 862 nouveau_bo_ref(new_bo, &dispnv04->image[head]); 863 864 ret = nouveau_page_flip_emit(chan, old_bo, new_bo, s, &fence); 865 if (ret) 866 goto fail_unreserve; 867 mutex_unlock(&cli->mutex); 868 869 /* Update the crtc struct and cleanup */ 870 crtc->primary->fb = fb; 871 872 nouveau_bo_fence(old_bo, fence, false); 873 ttm_bo_unreserve(&old_bo->bo); 874 if (old_bo != new_bo) 875 nouveau_bo_unpin(old_bo); 876 nouveau_fence_unref(&fence); 877 return 0; 878 879 fail_unreserve: 880 drm_crtc_vblank_put(crtc); 881 ttm_bo_unreserve(&old_bo->bo); 882 fail_unpin: 883 mutex_unlock(&cli->mutex); 884 if (old_bo != new_bo) 885 nouveau_bo_unpin(new_bo); 886 fail_free: 887 kfree(s); 888 return ret; 889 } 890 891 int 892 nouveau_finish_page_flip(struct nouveau_channel *chan, 893 struct nouveau_page_flip_state *ps) 894 { 895 struct nouveau_fence_chan *fctx = chan->fence; 896 struct nouveau_drm *drm = chan->drm; 897 struct drm_device *dev = drm->dev; 898 struct nouveau_page_flip_state *s; 899 unsigned long flags; 900 901 spin_lock_irqsave(&dev->event_lock, flags); 902 903 if (list_empty(&fctx->flip)) { 904 NV_ERROR(drm, "unexpected pageflip\n"); 905 spin_unlock_irqrestore(&dev->event_lock, flags); 906 return -EINVAL; 907 } 908 909 s = list_first_entry(&fctx->flip, struct nouveau_page_flip_state, head); 910 if (s->event) { 911 drm_crtc_arm_vblank_event(s->crtc, s->event); 912 } else { 913 /* Give up ownership of vblank for page-flipped crtc */ 914 drm_crtc_vblank_put(s->crtc); 915 } 916 917 list_del(&s->head); 918 if (ps) 919 *ps = *s; 920 kfree(s); 921 922 spin_unlock_irqrestore(&dev->event_lock, flags); 923 return 0; 924 } 925 926 int 927 nouveau_flip_complete(struct nvif_notify *notify) 928 { 929 struct nouveau_drm *drm = container_of(notify, typeof(*drm), flip); 930 struct nouveau_channel *chan = drm->channel; 931 struct nouveau_page_flip_state state; 932 933 if (!nouveau_finish_page_flip(chan, &state)) { 934 nv_set_crtc_base(drm->dev, drm_crtc_index(state.crtc), 935 state.offset + state.crtc->y * 936 state.pitch + state.crtc->x * 937 state.bpp / 8); 938 } 939 940 return NVIF_NOTIFY_KEEP; 941 } 942 943 int 944 nouveau_display_dumb_create(struct drm_file *file_priv, struct drm_device *dev, 945 struct drm_mode_create_dumb *args) 946 { 947 struct nouveau_cli *cli = nouveau_cli(file_priv); 948 struct nouveau_bo *bo; 949 uint32_t domain; 950 int ret; 951 952 args->pitch = roundup(args->width * (args->bpp / 8), 256); 953 args->size = args->pitch * args->height; 954 args->size = roundup(args->size, PAGE_SIZE); 955 956 /* Use VRAM if there is any ; otherwise fallback to system memory */ 957 if (nouveau_drm(dev)->client.device.info.ram_size != 0) 958 domain = NOUVEAU_GEM_DOMAIN_VRAM; 959 else 960 domain = NOUVEAU_GEM_DOMAIN_GART; 961 962 ret = nouveau_gem_new(cli, args->size, 0, domain, 0, 0, &bo); 963 if (ret) 964 return ret; 965 966 ret = drm_gem_handle_create(file_priv, &bo->gem, &args->handle); 967 drm_gem_object_put_unlocked(&bo->gem); 968 return ret; 969 } 970 971 int 972 nouveau_display_dumb_map_offset(struct drm_file *file_priv, 973 struct drm_device *dev, 974 uint32_t handle, uint64_t *poffset) 975 { 976 struct drm_gem_object *gem; 977 978 gem = drm_gem_object_lookup(file_priv, handle); 979 if (gem) { 980 struct nouveau_bo *bo = nouveau_gem_object(gem); 981 *poffset = drm_vma_node_offset_addr(&bo->bo.vma_node); 982 drm_gem_object_put_unlocked(gem); 983 return 0; 984 } 985 986 return -ENOENT; 987 } 988