1 /* 2 * Copyright (C) 2008 Maarten Maathuis. 3 * All Rights Reserved. 4 * 5 * Permission is hereby granted, free of charge, to any person obtaining 6 * a copy of this software and associated documentation files (the 7 * "Software"), to deal in the Software without restriction, including 8 * without limitation the rights to use, copy, modify, merge, publish, 9 * distribute, sublicense, and/or sell copies of the Software, and to 10 * permit persons to whom the Software is furnished to do so, subject to 11 * the following conditions: 12 * 13 * The above copyright notice and this permission notice (including the 14 * next paragraph) shall be included in all copies or substantial 15 * portions of the Software. 16 * 17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 18 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 19 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 20 * IN NO EVENT SHALL THE COPYRIGHT OWNER(S) AND/OR ITS SUPPLIERS BE 21 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 22 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION 23 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 24 * 25 */ 26 27 #include <acpi/video.h> 28 29 #include <drm/drm_atomic.h> 30 #include <drm/drm_atomic_helper.h> 31 #include <drm/drm_crtc_helper.h> 32 #include <drm/drm_fb_helper.h> 33 #include <drm/drm_fourcc.h> 34 #include <drm/drm_gem_framebuffer_helper.h> 35 #include <drm/drm_probe_helper.h> 36 #include <drm/drm_vblank.h> 37 38 #include "nouveau_fbcon.h" 39 #include "nouveau_crtc.h" 40 #include "nouveau_gem.h" 41 #include "nouveau_connector.h" 42 #include "nv50_display.h" 43 44 #include <nvif/class.h> 45 #include <nvif/cl0046.h> 46 #include <nvif/event.h> 47 #include <dispnv50/crc.h> 48 49 int 50 nouveau_display_vblank_enable(struct drm_crtc *crtc) 51 { 52 struct nouveau_crtc *nv_crtc; 53 54 nv_crtc = nouveau_crtc(crtc); 55 nvif_notify_get(&nv_crtc->vblank); 56 57 return 0; 58 } 59 60 void 61 nouveau_display_vblank_disable(struct drm_crtc *crtc) 62 { 63 struct nouveau_crtc *nv_crtc; 64 65 nv_crtc = nouveau_crtc(crtc); 66 nvif_notify_put(&nv_crtc->vblank); 67 } 68 69 static inline int 70 calc(int blanks, int blanke, int total, int line) 71 { 72 if (blanke >= blanks) { 73 if (line >= blanks) 74 line -= total; 75 } else { 76 if (line >= blanks) 77 line -= total; 78 line -= blanke + 1; 79 } 80 return line; 81 } 82 83 static bool 84 nouveau_display_scanoutpos_head(struct drm_crtc *crtc, int *vpos, int *hpos, 85 ktime_t *stime, ktime_t *etime) 86 { 87 struct { 88 struct nv04_disp_mthd_v0 base; 89 struct nv04_disp_scanoutpos_v0 scan; 90 } args = { 91 .base.method = NV04_DISP_SCANOUTPOS, 92 .base.head = nouveau_crtc(crtc)->index, 93 }; 94 struct nouveau_display *disp = nouveau_display(crtc->dev); 95 struct drm_vblank_crtc *vblank = &crtc->dev->vblank[drm_crtc_index(crtc)]; 96 int retry = 20; 97 bool ret = false; 98 99 do { 100 ret = nvif_mthd(&disp->disp.object, 0, &args, sizeof(args)); 101 if (ret != 0) 102 return false; 103 104 if (args.scan.vline) { 105 ret = true; 106 break; 107 } 108 109 if (retry) ndelay(vblank->linedur_ns); 110 } while (retry--); 111 112 *hpos = args.scan.hline; 113 *vpos = calc(args.scan.vblanks, args.scan.vblanke, 114 args.scan.vtotal, args.scan.vline); 115 if (stime) *stime = ns_to_ktime(args.scan.time[0]); 116 if (etime) *etime = ns_to_ktime(args.scan.time[1]); 117 118 return ret; 119 } 120 121 bool 122 nouveau_display_scanoutpos(struct drm_crtc *crtc, 123 bool in_vblank_irq, int *vpos, int *hpos, 124 ktime_t *stime, ktime_t *etime, 125 const struct drm_display_mode *mode) 126 { 127 return nouveau_display_scanoutpos_head(crtc, vpos, hpos, 128 stime, etime); 129 } 130 131 static const struct drm_framebuffer_funcs nouveau_framebuffer_funcs = { 132 .destroy = drm_gem_fb_destroy, 133 .create_handle = drm_gem_fb_create_handle, 134 }; 135 136 static void 137 nouveau_decode_mod(struct nouveau_drm *drm, 138 uint64_t modifier, 139 uint32_t *tile_mode, 140 uint8_t *kind) 141 { 142 struct nouveau_display *disp = nouveau_display(drm->dev); 143 BUG_ON(!tile_mode || !kind); 144 145 if (modifier == DRM_FORMAT_MOD_LINEAR) { 146 /* tile_mode will not be used in this case */ 147 *tile_mode = 0; 148 *kind = 0; 149 } else { 150 /* 151 * Extract the block height and kind from the corresponding 152 * modifier fields. See drm_fourcc.h for details. 153 */ 154 155 if ((modifier & (0xffull << 12)) == 0ull) { 156 /* Legacy modifier. Translate to this dev's 'kind.' */ 157 modifier |= disp->format_modifiers[0] & (0xffull << 12); 158 } 159 160 *tile_mode = (uint32_t)(modifier & 0xF); 161 *kind = (uint8_t)((modifier >> 12) & 0xFF); 162 163 if (drm->client.device.info.chipset >= 0xc0) 164 *tile_mode <<= 4; 165 } 166 } 167 168 void 169 nouveau_framebuffer_get_layout(struct drm_framebuffer *fb, 170 uint32_t *tile_mode, 171 uint8_t *kind) 172 { 173 if (fb->flags & DRM_MODE_FB_MODIFIERS) { 174 struct nouveau_drm *drm = nouveau_drm(fb->dev); 175 176 nouveau_decode_mod(drm, fb->modifier, tile_mode, kind); 177 } else { 178 const struct nouveau_bo *nvbo = nouveau_gem_object(fb->obj[0]); 179 180 *tile_mode = nvbo->mode; 181 *kind = nvbo->kind; 182 } 183 } 184 185 static const u64 legacy_modifiers[] = { 186 DRM_FORMAT_MOD_NVIDIA_16BX2_BLOCK(0), 187 DRM_FORMAT_MOD_NVIDIA_16BX2_BLOCK(1), 188 DRM_FORMAT_MOD_NVIDIA_16BX2_BLOCK(2), 189 DRM_FORMAT_MOD_NVIDIA_16BX2_BLOCK(3), 190 DRM_FORMAT_MOD_NVIDIA_16BX2_BLOCK(4), 191 DRM_FORMAT_MOD_NVIDIA_16BX2_BLOCK(5), 192 DRM_FORMAT_MOD_INVALID 193 }; 194 195 static int 196 nouveau_validate_decode_mod(struct nouveau_drm *drm, 197 uint64_t modifier, 198 uint32_t *tile_mode, 199 uint8_t *kind) 200 { 201 struct nouveau_display *disp = nouveau_display(drm->dev); 202 int mod; 203 204 if (drm->client.device.info.family < NV_DEVICE_INFO_V0_TESLA) { 205 return -EINVAL; 206 } 207 208 BUG_ON(!disp->format_modifiers); 209 210 for (mod = 0; 211 (disp->format_modifiers[mod] != DRM_FORMAT_MOD_INVALID) && 212 (disp->format_modifiers[mod] != modifier); 213 mod++); 214 215 if (disp->format_modifiers[mod] == DRM_FORMAT_MOD_INVALID) { 216 for (mod = 0; 217 (legacy_modifiers[mod] != DRM_FORMAT_MOD_INVALID) && 218 (legacy_modifiers[mod] != modifier); 219 mod++); 220 if (legacy_modifiers[mod] == DRM_FORMAT_MOD_INVALID) 221 return -EINVAL; 222 } 223 224 nouveau_decode_mod(drm, modifier, tile_mode, kind); 225 226 return 0; 227 } 228 229 static inline uint32_t 230 nouveau_get_width_in_blocks(uint32_t stride) 231 { 232 /* GOBs per block in the x direction is always one, and GOBs are 233 * 64 bytes wide 234 */ 235 static const uint32_t log_block_width = 6; 236 237 return (stride + (1 << log_block_width) - 1) >> log_block_width; 238 } 239 240 static inline uint32_t 241 nouveau_get_height_in_blocks(struct nouveau_drm *drm, 242 uint32_t height, 243 uint32_t log_block_height_in_gobs) 244 { 245 uint32_t log_gob_height; 246 uint32_t log_block_height; 247 248 BUG_ON(drm->client.device.info.family < NV_DEVICE_INFO_V0_TESLA); 249 250 if (drm->client.device.info.family < NV_DEVICE_INFO_V0_FERMI) 251 log_gob_height = 2; 252 else 253 log_gob_height = 3; 254 255 log_block_height = log_block_height_in_gobs + log_gob_height; 256 257 return (height + (1 << log_block_height) - 1) >> log_block_height; 258 } 259 260 static int 261 nouveau_check_bl_size(struct nouveau_drm *drm, struct nouveau_bo *nvbo, 262 uint32_t offset, uint32_t stride, uint32_t h, 263 uint32_t tile_mode) 264 { 265 uint32_t gob_size, bw, bh; 266 uint64_t bl_size; 267 268 BUG_ON(drm->client.device.info.family < NV_DEVICE_INFO_V0_TESLA); 269 270 if (drm->client.device.info.chipset >= 0xc0) { 271 if (tile_mode & 0xF) 272 return -EINVAL; 273 tile_mode >>= 4; 274 } 275 276 if (tile_mode & 0xFFFFFFF0) 277 return -EINVAL; 278 279 if (drm->client.device.info.family < NV_DEVICE_INFO_V0_FERMI) 280 gob_size = 256; 281 else 282 gob_size = 512; 283 284 bw = nouveau_get_width_in_blocks(stride); 285 bh = nouveau_get_height_in_blocks(drm, h, tile_mode); 286 287 bl_size = bw * bh * (1 << tile_mode) * gob_size; 288 289 DRM_DEBUG_KMS("offset=%u stride=%u h=%u tile_mode=0x%02x bw=%u bh=%u gob_size=%u bl_size=%llu size=%lu\n", 290 offset, stride, h, tile_mode, bw, bh, gob_size, bl_size, 291 nvbo->bo.mem.size); 292 293 if (bl_size + offset > nvbo->bo.mem.size) 294 return -ERANGE; 295 296 return 0; 297 } 298 299 int 300 nouveau_framebuffer_new(struct drm_device *dev, 301 const struct drm_mode_fb_cmd2 *mode_cmd, 302 struct drm_gem_object *gem, 303 struct drm_framebuffer **pfb) 304 { 305 struct nouveau_drm *drm = nouveau_drm(dev); 306 struct nouveau_bo *nvbo = nouveau_gem_object(gem); 307 struct drm_framebuffer *fb; 308 const struct drm_format_info *info; 309 unsigned int width, height, i; 310 uint32_t tile_mode; 311 uint8_t kind; 312 int ret; 313 314 /* YUV overlays have special requirements pre-NV50 */ 315 if (drm->client.device.info.family < NV_DEVICE_INFO_V0_TESLA && 316 317 (mode_cmd->pixel_format == DRM_FORMAT_YUYV || 318 mode_cmd->pixel_format == DRM_FORMAT_UYVY || 319 mode_cmd->pixel_format == DRM_FORMAT_NV12 || 320 mode_cmd->pixel_format == DRM_FORMAT_NV21) && 321 (mode_cmd->pitches[0] & 0x3f || /* align 64 */ 322 mode_cmd->pitches[0] >= 0x10000 || /* at most 64k pitch */ 323 (mode_cmd->pitches[1] && /* pitches for planes must match */ 324 mode_cmd->pitches[0] != mode_cmd->pitches[1]))) { 325 struct drm_format_name_buf format_name; 326 DRM_DEBUG_KMS("Unsuitable framebuffer: format: %s; pitches: 0x%x\n 0x%x\n", 327 drm_get_format_name(mode_cmd->pixel_format, 328 &format_name), 329 mode_cmd->pitches[0], 330 mode_cmd->pitches[1]); 331 return -EINVAL; 332 } 333 334 if (mode_cmd->flags & DRM_MODE_FB_MODIFIERS) { 335 if (nouveau_validate_decode_mod(drm, mode_cmd->modifier[0], 336 &tile_mode, &kind)) { 337 DRM_DEBUG_KMS("Unsupported modifier: 0x%llx\n", 338 mode_cmd->modifier[0]); 339 return -EINVAL; 340 } 341 } else { 342 tile_mode = nvbo->mode; 343 kind = nvbo->kind; 344 } 345 346 info = drm_get_format_info(dev, mode_cmd); 347 348 for (i = 0; i < info->num_planes; i++) { 349 width = drm_format_info_plane_width(info, 350 mode_cmd->width, 351 i); 352 height = drm_format_info_plane_height(info, 353 mode_cmd->height, 354 i); 355 356 if (kind) { 357 ret = nouveau_check_bl_size(drm, nvbo, 358 mode_cmd->offsets[i], 359 mode_cmd->pitches[i], 360 height, tile_mode); 361 if (ret) 362 return ret; 363 } else { 364 uint32_t size = mode_cmd->pitches[i] * height; 365 366 if (size + mode_cmd->offsets[i] > nvbo->bo.mem.size) 367 return -ERANGE; 368 } 369 } 370 371 if (!(fb = *pfb = kzalloc(sizeof(*fb), GFP_KERNEL))) 372 return -ENOMEM; 373 374 drm_helper_mode_fill_fb_struct(dev, fb, mode_cmd); 375 fb->obj[0] = gem; 376 377 ret = drm_framebuffer_init(dev, fb, &nouveau_framebuffer_funcs); 378 if (ret) 379 kfree(fb); 380 return ret; 381 } 382 383 struct drm_framebuffer * 384 nouveau_user_framebuffer_create(struct drm_device *dev, 385 struct drm_file *file_priv, 386 const struct drm_mode_fb_cmd2 *mode_cmd) 387 { 388 struct drm_framebuffer *fb; 389 struct drm_gem_object *gem; 390 int ret; 391 392 gem = drm_gem_object_lookup(file_priv, mode_cmd->handles[0]); 393 if (!gem) 394 return ERR_PTR(-ENOENT); 395 396 ret = nouveau_framebuffer_new(dev, mode_cmd, gem, &fb); 397 if (ret == 0) 398 return fb; 399 400 drm_gem_object_put(gem); 401 return ERR_PTR(ret); 402 } 403 404 static const struct drm_mode_config_funcs nouveau_mode_config_funcs = { 405 .fb_create = nouveau_user_framebuffer_create, 406 .output_poll_changed = nouveau_fbcon_output_poll_changed, 407 }; 408 409 410 struct nouveau_drm_prop_enum_list { 411 u8 gen_mask; 412 int type; 413 char *name; 414 }; 415 416 static struct nouveau_drm_prop_enum_list underscan[] = { 417 { 6, UNDERSCAN_AUTO, "auto" }, 418 { 6, UNDERSCAN_OFF, "off" }, 419 { 6, UNDERSCAN_ON, "on" }, 420 {} 421 }; 422 423 static struct nouveau_drm_prop_enum_list dither_mode[] = { 424 { 7, DITHERING_MODE_AUTO, "auto" }, 425 { 7, DITHERING_MODE_OFF, "off" }, 426 { 1, DITHERING_MODE_ON, "on" }, 427 { 6, DITHERING_MODE_STATIC2X2, "static 2x2" }, 428 { 6, DITHERING_MODE_DYNAMIC2X2, "dynamic 2x2" }, 429 { 4, DITHERING_MODE_TEMPORAL, "temporal" }, 430 {} 431 }; 432 433 static struct nouveau_drm_prop_enum_list dither_depth[] = { 434 { 6, DITHERING_DEPTH_AUTO, "auto" }, 435 { 6, DITHERING_DEPTH_6BPC, "6 bpc" }, 436 { 6, DITHERING_DEPTH_8BPC, "8 bpc" }, 437 {} 438 }; 439 440 #define PROP_ENUM(p,gen,n,list) do { \ 441 struct nouveau_drm_prop_enum_list *l = (list); \ 442 int c = 0; \ 443 while (l->gen_mask) { \ 444 if (l->gen_mask & (1 << (gen))) \ 445 c++; \ 446 l++; \ 447 } \ 448 if (c) { \ 449 p = drm_property_create(dev, DRM_MODE_PROP_ENUM, n, c); \ 450 l = (list); \ 451 while (p && l->gen_mask) { \ 452 if (l->gen_mask & (1 << (gen))) { \ 453 drm_property_add_enum(p, l->type, l->name); \ 454 } \ 455 l++; \ 456 } \ 457 } \ 458 } while(0) 459 460 void 461 nouveau_display_hpd_resume(struct drm_device *dev) 462 { 463 struct nouveau_drm *drm = nouveau_drm(dev); 464 465 mutex_lock(&drm->hpd_lock); 466 drm->hpd_pending = ~0; 467 mutex_unlock(&drm->hpd_lock); 468 469 schedule_work(&drm->hpd_work); 470 } 471 472 static void 473 nouveau_display_hpd_work(struct work_struct *work) 474 { 475 struct nouveau_drm *drm = container_of(work, typeof(*drm), hpd_work); 476 struct drm_device *dev = drm->dev; 477 struct drm_connector *connector; 478 struct drm_connector_list_iter conn_iter; 479 u32 pending; 480 bool changed = false; 481 482 pm_runtime_get_sync(dev->dev); 483 484 mutex_lock(&drm->hpd_lock); 485 pending = drm->hpd_pending; 486 drm->hpd_pending = 0; 487 mutex_unlock(&drm->hpd_lock); 488 489 /* Nothing to do, exit early without updating the last busy counter */ 490 if (!pending) 491 goto noop; 492 493 mutex_lock(&dev->mode_config.mutex); 494 drm_connector_list_iter_begin(dev, &conn_iter); 495 496 nouveau_for_each_non_mst_connector_iter(connector, &conn_iter) { 497 enum drm_connector_status old_status = connector->status; 498 u64 old_epoch_counter = connector->epoch_counter; 499 500 if (!(pending & drm_connector_mask(connector))) 501 continue; 502 503 connector->status = drm_helper_probe_detect(connector, NULL, 504 false); 505 if (old_epoch_counter == connector->epoch_counter) 506 continue; 507 508 changed = true; 509 drm_dbg_kms(dev, "[CONNECTOR:%d:%s] status updated from %s to %s (epoch counter %llu->%llu)\n", 510 connector->base.id, connector->name, 511 drm_get_connector_status_name(old_status), 512 drm_get_connector_status_name(connector->status), 513 old_epoch_counter, connector->epoch_counter); 514 } 515 516 drm_connector_list_iter_end(&conn_iter); 517 mutex_unlock(&dev->mode_config.mutex); 518 519 if (changed) 520 drm_kms_helper_hotplug_event(dev); 521 522 pm_runtime_mark_last_busy(drm->dev->dev); 523 noop: 524 pm_runtime_put_sync(drm->dev->dev); 525 } 526 527 #ifdef CONFIG_ACPI 528 529 static int 530 nouveau_display_acpi_ntfy(struct notifier_block *nb, unsigned long val, 531 void *data) 532 { 533 struct nouveau_drm *drm = container_of(nb, typeof(*drm), acpi_nb); 534 struct acpi_bus_event *info = data; 535 int ret; 536 537 if (!strcmp(info->device_class, ACPI_VIDEO_CLASS)) { 538 if (info->type == ACPI_VIDEO_NOTIFY_PROBE) { 539 ret = pm_runtime_get(drm->dev->dev); 540 if (ret == 1 || ret == -EACCES) { 541 /* If the GPU is already awake, or in a state 542 * where we can't wake it up, it can handle 543 * it's own hotplug events. 544 */ 545 pm_runtime_put_autosuspend(drm->dev->dev); 546 } else if (ret == 0) { 547 /* We've started resuming the GPU already, so 548 * it will handle scheduling a full reprobe 549 * itself 550 */ 551 NV_DEBUG(drm, "ACPI requested connector reprobe\n"); 552 pm_runtime_put_noidle(drm->dev->dev); 553 } else { 554 NV_WARN(drm, "Dropped ACPI reprobe event due to RPM error: %d\n", 555 ret); 556 } 557 558 /* acpi-video should not generate keypresses for this */ 559 return NOTIFY_BAD; 560 } 561 } 562 563 return NOTIFY_DONE; 564 } 565 #endif 566 567 int 568 nouveau_display_init(struct drm_device *dev, bool resume, bool runtime) 569 { 570 struct nouveau_display *disp = nouveau_display(dev); 571 struct drm_connector *connector; 572 struct drm_connector_list_iter conn_iter; 573 int ret; 574 575 /* 576 * Enable hotplug interrupts (done as early as possible, since we need 577 * them for MST) 578 */ 579 drm_connector_list_iter_begin(dev, &conn_iter); 580 nouveau_for_each_non_mst_connector_iter(connector, &conn_iter) { 581 struct nouveau_connector *conn = nouveau_connector(connector); 582 nvif_notify_get(&conn->hpd); 583 } 584 drm_connector_list_iter_end(&conn_iter); 585 586 ret = disp->init(dev, resume, runtime); 587 if (ret) 588 return ret; 589 590 /* enable connector detection and polling for connectors without HPD 591 * support 592 */ 593 drm_kms_helper_poll_enable(dev); 594 595 return ret; 596 } 597 598 void 599 nouveau_display_fini(struct drm_device *dev, bool suspend, bool runtime) 600 { 601 struct nouveau_display *disp = nouveau_display(dev); 602 struct nouveau_drm *drm = nouveau_drm(dev); 603 struct drm_connector *connector; 604 struct drm_connector_list_iter conn_iter; 605 606 if (!suspend) { 607 if (drm_drv_uses_atomic_modeset(dev)) 608 drm_atomic_helper_shutdown(dev); 609 else 610 drm_helper_force_disable_all(dev); 611 } 612 613 /* disable hotplug interrupts */ 614 drm_connector_list_iter_begin(dev, &conn_iter); 615 nouveau_for_each_non_mst_connector_iter(connector, &conn_iter) { 616 struct nouveau_connector *conn = nouveau_connector(connector); 617 nvif_notify_put(&conn->hpd); 618 } 619 drm_connector_list_iter_end(&conn_iter); 620 621 if (!runtime) 622 cancel_work_sync(&drm->hpd_work); 623 624 drm_kms_helper_poll_disable(dev); 625 disp->fini(dev, runtime, suspend); 626 } 627 628 static void 629 nouveau_display_create_properties(struct drm_device *dev) 630 { 631 struct nouveau_display *disp = nouveau_display(dev); 632 int gen; 633 634 if (disp->disp.object.oclass < NV50_DISP) 635 gen = 0; 636 else 637 if (disp->disp.object.oclass < GF110_DISP) 638 gen = 1; 639 else 640 gen = 2; 641 642 PROP_ENUM(disp->dithering_mode, gen, "dithering mode", dither_mode); 643 PROP_ENUM(disp->dithering_depth, gen, "dithering depth", dither_depth); 644 PROP_ENUM(disp->underscan_property, gen, "underscan", underscan); 645 646 disp->underscan_hborder_property = 647 drm_property_create_range(dev, 0, "underscan hborder", 0, 128); 648 649 disp->underscan_vborder_property = 650 drm_property_create_range(dev, 0, "underscan vborder", 0, 128); 651 652 if (gen < 1) 653 return; 654 655 /* -90..+90 */ 656 disp->vibrant_hue_property = 657 drm_property_create_range(dev, 0, "vibrant hue", 0, 180); 658 659 /* -100..+100 */ 660 disp->color_vibrance_property = 661 drm_property_create_range(dev, 0, "color vibrance", 0, 200); 662 } 663 664 int 665 nouveau_display_create(struct drm_device *dev) 666 { 667 struct nouveau_drm *drm = nouveau_drm(dev); 668 struct nvkm_device *device = nvxx_device(&drm->client.device); 669 struct nouveau_display *disp; 670 int ret; 671 672 disp = drm->display = kzalloc(sizeof(*disp), GFP_KERNEL); 673 if (!disp) 674 return -ENOMEM; 675 676 drm_mode_config_init(dev); 677 drm_mode_create_scaling_mode_property(dev); 678 drm_mode_create_dvi_i_properties(dev); 679 680 dev->mode_config.funcs = &nouveau_mode_config_funcs; 681 dev->mode_config.fb_base = device->func->resource_addr(device, 1); 682 683 dev->mode_config.min_width = 0; 684 dev->mode_config.min_height = 0; 685 if (drm->client.device.info.family < NV_DEVICE_INFO_V0_CELSIUS) { 686 dev->mode_config.max_width = 2048; 687 dev->mode_config.max_height = 2048; 688 } else 689 if (drm->client.device.info.family < NV_DEVICE_INFO_V0_TESLA) { 690 dev->mode_config.max_width = 4096; 691 dev->mode_config.max_height = 4096; 692 } else 693 if (drm->client.device.info.family < NV_DEVICE_INFO_V0_FERMI) { 694 dev->mode_config.max_width = 8192; 695 dev->mode_config.max_height = 8192; 696 } else { 697 dev->mode_config.max_width = 16384; 698 dev->mode_config.max_height = 16384; 699 } 700 701 dev->mode_config.preferred_depth = 24; 702 dev->mode_config.prefer_shadow = 1; 703 dev->mode_config.allow_fb_modifiers = true; 704 705 if (drm->client.device.info.chipset < 0x11) 706 dev->mode_config.async_page_flip = false; 707 else 708 dev->mode_config.async_page_flip = true; 709 710 drm_kms_helper_poll_init(dev); 711 drm_kms_helper_poll_disable(dev); 712 713 if (nouveau_modeset != 2 && drm->vbios.dcb.entries) { 714 ret = nvif_disp_ctor(&drm->client.device, "kmsDisp", 0, 715 &disp->disp); 716 if (ret == 0) { 717 nouveau_display_create_properties(dev); 718 if (disp->disp.object.oclass < NV50_DISP) 719 ret = nv04_display_create(dev); 720 else 721 ret = nv50_display_create(dev); 722 } 723 } else { 724 ret = 0; 725 } 726 727 if (ret) 728 goto disp_create_err; 729 730 drm_mode_config_reset(dev); 731 732 if (dev->mode_config.num_crtc) { 733 ret = drm_vblank_init(dev, dev->mode_config.num_crtc); 734 if (ret) 735 goto vblank_err; 736 737 if (disp->disp.object.oclass >= NV50_DISP) 738 nv50_crc_init(dev); 739 } 740 741 INIT_WORK(&drm->hpd_work, nouveau_display_hpd_work); 742 mutex_init(&drm->hpd_lock); 743 #ifdef CONFIG_ACPI 744 drm->acpi_nb.notifier_call = nouveau_display_acpi_ntfy; 745 register_acpi_notifier(&drm->acpi_nb); 746 #endif 747 748 return 0; 749 750 vblank_err: 751 disp->dtor(dev); 752 disp_create_err: 753 drm_kms_helper_poll_fini(dev); 754 drm_mode_config_cleanup(dev); 755 return ret; 756 } 757 758 void 759 nouveau_display_destroy(struct drm_device *dev) 760 { 761 struct nouveau_display *disp = nouveau_display(dev); 762 struct nouveau_drm *drm = nouveau_drm(dev); 763 764 #ifdef CONFIG_ACPI 765 unregister_acpi_notifier(&drm->acpi_nb); 766 #endif 767 768 drm_kms_helper_poll_fini(dev); 769 drm_mode_config_cleanup(dev); 770 771 if (disp->dtor) 772 disp->dtor(dev); 773 774 nvif_disp_dtor(&disp->disp); 775 776 nouveau_drm(dev)->display = NULL; 777 mutex_destroy(&drm->hpd_lock); 778 kfree(disp); 779 } 780 781 int 782 nouveau_display_suspend(struct drm_device *dev, bool runtime) 783 { 784 struct nouveau_display *disp = nouveau_display(dev); 785 786 if (drm_drv_uses_atomic_modeset(dev)) { 787 if (!runtime) { 788 disp->suspend = drm_atomic_helper_suspend(dev); 789 if (IS_ERR(disp->suspend)) { 790 int ret = PTR_ERR(disp->suspend); 791 disp->suspend = NULL; 792 return ret; 793 } 794 } 795 } 796 797 nouveau_display_fini(dev, true, runtime); 798 return 0; 799 } 800 801 void 802 nouveau_display_resume(struct drm_device *dev, bool runtime) 803 { 804 struct nouveau_display *disp = nouveau_display(dev); 805 806 nouveau_display_init(dev, true, runtime); 807 808 if (drm_drv_uses_atomic_modeset(dev)) { 809 if (disp->suspend) { 810 drm_atomic_helper_resume(dev, disp->suspend); 811 disp->suspend = NULL; 812 } 813 return; 814 } 815 } 816 817 int 818 nouveau_display_dumb_create(struct drm_file *file_priv, struct drm_device *dev, 819 struct drm_mode_create_dumb *args) 820 { 821 struct nouveau_cli *cli = nouveau_cli(file_priv); 822 struct nouveau_bo *bo; 823 uint32_t domain; 824 int ret; 825 826 args->pitch = roundup(args->width * (args->bpp / 8), 256); 827 args->size = args->pitch * args->height; 828 args->size = roundup(args->size, PAGE_SIZE); 829 830 /* Use VRAM if there is any ; otherwise fallback to system memory */ 831 if (nouveau_drm(dev)->client.device.info.ram_size != 0) 832 domain = NOUVEAU_GEM_DOMAIN_VRAM; 833 else 834 domain = NOUVEAU_GEM_DOMAIN_GART; 835 836 ret = nouveau_gem_new(cli, args->size, 0, domain, 0, 0, &bo); 837 if (ret) 838 return ret; 839 840 ret = drm_gem_handle_create(file_priv, &bo->bo.base, &args->handle); 841 drm_gem_object_put(&bo->bo.base); 842 return ret; 843 } 844 845 int 846 nouveau_display_dumb_map_offset(struct drm_file *file_priv, 847 struct drm_device *dev, 848 uint32_t handle, uint64_t *poffset) 849 { 850 struct drm_gem_object *gem; 851 852 gem = drm_gem_object_lookup(file_priv, handle); 853 if (gem) { 854 struct nouveau_bo *bo = nouveau_gem_object(gem); 855 *poffset = drm_vma_node_offset_addr(&bo->bo.base.vma_node); 856 drm_gem_object_put(gem); 857 return 0; 858 } 859 860 return -ENOENT; 861 } 862