1 // SPDX-License-Identifier: GPL-2.0-only 2 /* 3 * Copyright (C) 2012-2013 Avionic Design GmbH 4 * Copyright (C) 2012 NVIDIA CORPORATION. All rights reserved. 5 * 6 * Based on the KMS/FB CMA helpers 7 * Copyright (C) 2012 Analog Device Inc. 8 */ 9 10 #include <linux/console.h> 11 12 #include <drm/drm_fourcc.h> 13 #include <drm/drm_gem_framebuffer_helper.h> 14 #include <drm/drm_modeset_helper.h> 15 16 #include "drm.h" 17 #include "gem.h" 18 19 #ifdef CONFIG_DRM_FBDEV_EMULATION 20 static inline struct tegra_fbdev *to_tegra_fbdev(struct drm_fb_helper *helper) 21 { 22 return container_of(helper, struct tegra_fbdev, base); 23 } 24 #endif 25 26 struct tegra_bo *tegra_fb_get_plane(struct drm_framebuffer *framebuffer, 27 unsigned int index) 28 { 29 return to_tegra_bo(drm_gem_fb_get_obj(framebuffer, index)); 30 } 31 32 bool tegra_fb_is_bottom_up(struct drm_framebuffer *framebuffer) 33 { 34 struct tegra_bo *bo = tegra_fb_get_plane(framebuffer, 0); 35 36 if (bo->flags & TEGRA_BO_BOTTOM_UP) 37 return true; 38 39 return false; 40 } 41 42 int tegra_fb_get_tiling(struct drm_framebuffer *framebuffer, 43 struct tegra_bo_tiling *tiling) 44 { 45 uint64_t modifier = framebuffer->modifier; 46 47 switch (modifier) { 48 case DRM_FORMAT_MOD_LINEAR: 49 tiling->mode = TEGRA_BO_TILING_MODE_PITCH; 50 tiling->value = 0; 51 break; 52 53 case DRM_FORMAT_MOD_NVIDIA_TEGRA_TILED: 54 tiling->mode = TEGRA_BO_TILING_MODE_TILED; 55 tiling->value = 0; 56 break; 57 58 case DRM_FORMAT_MOD_NVIDIA_16BX2_BLOCK(0): 59 tiling->mode = TEGRA_BO_TILING_MODE_BLOCK; 60 tiling->value = 0; 61 break; 62 63 case DRM_FORMAT_MOD_NVIDIA_16BX2_BLOCK(1): 64 tiling->mode = TEGRA_BO_TILING_MODE_BLOCK; 65 tiling->value = 1; 66 break; 67 68 case DRM_FORMAT_MOD_NVIDIA_16BX2_BLOCK(2): 69 tiling->mode = TEGRA_BO_TILING_MODE_BLOCK; 70 tiling->value = 2; 71 break; 72 73 case DRM_FORMAT_MOD_NVIDIA_16BX2_BLOCK(3): 74 tiling->mode = TEGRA_BO_TILING_MODE_BLOCK; 75 tiling->value = 3; 76 break; 77 78 case DRM_FORMAT_MOD_NVIDIA_16BX2_BLOCK(4): 79 tiling->mode = TEGRA_BO_TILING_MODE_BLOCK; 80 tiling->value = 4; 81 break; 82 83 case DRM_FORMAT_MOD_NVIDIA_16BX2_BLOCK(5): 84 tiling->mode = TEGRA_BO_TILING_MODE_BLOCK; 85 tiling->value = 5; 86 break; 87 88 default: 89 return -EINVAL; 90 } 91 92 return 0; 93 } 94 95 static const struct drm_framebuffer_funcs tegra_fb_funcs = { 96 .destroy = drm_gem_fb_destroy, 97 .create_handle = drm_gem_fb_create_handle, 98 }; 99 100 static struct drm_framebuffer *tegra_fb_alloc(struct drm_device *drm, 101 const struct drm_mode_fb_cmd2 *mode_cmd, 102 struct tegra_bo **planes, 103 unsigned int num_planes) 104 { 105 struct drm_framebuffer *fb; 106 unsigned int i; 107 int err; 108 109 fb = kzalloc(sizeof(*fb), GFP_KERNEL); 110 if (!fb) 111 return ERR_PTR(-ENOMEM); 112 113 drm_helper_mode_fill_fb_struct(drm, fb, mode_cmd); 114 115 for (i = 0; i < fb->format->num_planes; i++) 116 fb->obj[i] = &planes[i]->gem; 117 118 err = drm_framebuffer_init(drm, fb, &tegra_fb_funcs); 119 if (err < 0) { 120 dev_err(drm->dev, "failed to initialize framebuffer: %d\n", 121 err); 122 kfree(fb); 123 return ERR_PTR(err); 124 } 125 126 return fb; 127 } 128 129 struct drm_framebuffer *tegra_fb_create(struct drm_device *drm, 130 struct drm_file *file, 131 const struct drm_mode_fb_cmd2 *cmd) 132 { 133 const struct drm_format_info *info = drm_get_format_info(drm, cmd); 134 struct tegra_bo *planes[4]; 135 struct drm_gem_object *gem; 136 struct drm_framebuffer *fb; 137 unsigned int i; 138 int err; 139 140 for (i = 0; i < info->num_planes; i++) { 141 unsigned int width = cmd->width / (i ? info->hsub : 1); 142 unsigned int height = cmd->height / (i ? info->vsub : 1); 143 unsigned int size, bpp; 144 145 gem = drm_gem_object_lookup(file, cmd->handles[i]); 146 if (!gem) { 147 err = -ENXIO; 148 goto unreference; 149 } 150 151 bpp = info->cpp[i]; 152 153 size = (height - 1) * cmd->pitches[i] + 154 width * bpp + cmd->offsets[i]; 155 156 if (gem->size < size) { 157 err = -EINVAL; 158 goto unreference; 159 } 160 161 planes[i] = to_tegra_bo(gem); 162 } 163 164 fb = tegra_fb_alloc(drm, cmd, planes, i); 165 if (IS_ERR(fb)) { 166 err = PTR_ERR(fb); 167 goto unreference; 168 } 169 170 return fb; 171 172 unreference: 173 while (i--) 174 drm_gem_object_put_unlocked(&planes[i]->gem); 175 176 return ERR_PTR(err); 177 } 178 179 #ifdef CONFIG_DRM_FBDEV_EMULATION 180 static int tegra_fb_mmap(struct fb_info *info, struct vm_area_struct *vma) 181 { 182 struct drm_fb_helper *helper = info->par; 183 struct tegra_bo *bo; 184 int err; 185 186 bo = tegra_fb_get_plane(helper->fb, 0); 187 188 err = drm_gem_mmap_obj(&bo->gem, bo->gem.size, vma); 189 if (err < 0) 190 return err; 191 192 return __tegra_gem_mmap(&bo->gem, vma); 193 } 194 195 static struct fb_ops tegra_fb_ops = { 196 .owner = THIS_MODULE, 197 DRM_FB_HELPER_DEFAULT_OPS, 198 .fb_fillrect = drm_fb_helper_sys_fillrect, 199 .fb_copyarea = drm_fb_helper_sys_copyarea, 200 .fb_imageblit = drm_fb_helper_sys_imageblit, 201 .fb_mmap = tegra_fb_mmap, 202 }; 203 204 static int tegra_fbdev_probe(struct drm_fb_helper *helper, 205 struct drm_fb_helper_surface_size *sizes) 206 { 207 struct tegra_fbdev *fbdev = to_tegra_fbdev(helper); 208 struct tegra_drm *tegra = helper->dev->dev_private; 209 struct drm_device *drm = helper->dev; 210 struct drm_mode_fb_cmd2 cmd = { 0 }; 211 unsigned int bytes_per_pixel; 212 struct drm_framebuffer *fb; 213 unsigned long offset; 214 struct fb_info *info; 215 struct tegra_bo *bo; 216 size_t size; 217 int err; 218 219 bytes_per_pixel = DIV_ROUND_UP(sizes->surface_bpp, 8); 220 221 cmd.width = sizes->surface_width; 222 cmd.height = sizes->surface_height; 223 cmd.pitches[0] = round_up(sizes->surface_width * bytes_per_pixel, 224 tegra->pitch_align); 225 226 cmd.pixel_format = drm_mode_legacy_fb_format(sizes->surface_bpp, 227 sizes->surface_depth); 228 229 size = cmd.pitches[0] * cmd.height; 230 231 bo = tegra_bo_create(drm, size, 0); 232 if (IS_ERR(bo)) 233 return PTR_ERR(bo); 234 235 info = drm_fb_helper_alloc_fbi(helper); 236 if (IS_ERR(info)) { 237 dev_err(drm->dev, "failed to allocate framebuffer info\n"); 238 drm_gem_object_put_unlocked(&bo->gem); 239 return PTR_ERR(info); 240 } 241 242 fbdev->fb = tegra_fb_alloc(drm, &cmd, &bo, 1); 243 if (IS_ERR(fbdev->fb)) { 244 err = PTR_ERR(fbdev->fb); 245 dev_err(drm->dev, "failed to allocate DRM framebuffer: %d\n", 246 err); 247 drm_gem_object_put_unlocked(&bo->gem); 248 return PTR_ERR(fbdev->fb); 249 } 250 251 fb = fbdev->fb; 252 helper->fb = fb; 253 helper->fbdev = info; 254 255 info->fbops = &tegra_fb_ops; 256 257 drm_fb_helper_fill_info(info, helper, sizes); 258 259 offset = info->var.xoffset * bytes_per_pixel + 260 info->var.yoffset * fb->pitches[0]; 261 262 if (bo->pages) { 263 bo->vaddr = vmap(bo->pages, bo->num_pages, VM_MAP, 264 pgprot_writecombine(PAGE_KERNEL)); 265 if (!bo->vaddr) { 266 dev_err(drm->dev, "failed to vmap() framebuffer\n"); 267 err = -ENOMEM; 268 goto destroy; 269 } 270 } 271 272 drm->mode_config.fb_base = (resource_size_t)bo->iova; 273 info->screen_base = (void __iomem *)bo->vaddr + offset; 274 info->screen_size = size; 275 info->fix.smem_start = (unsigned long)(bo->iova + offset); 276 info->fix.smem_len = size; 277 278 return 0; 279 280 destroy: 281 drm_framebuffer_remove(fb); 282 return err; 283 } 284 285 static const struct drm_fb_helper_funcs tegra_fb_helper_funcs = { 286 .fb_probe = tegra_fbdev_probe, 287 }; 288 289 static struct tegra_fbdev *tegra_fbdev_create(struct drm_device *drm) 290 { 291 struct tegra_fbdev *fbdev; 292 293 fbdev = kzalloc(sizeof(*fbdev), GFP_KERNEL); 294 if (!fbdev) { 295 dev_err(drm->dev, "failed to allocate DRM fbdev\n"); 296 return ERR_PTR(-ENOMEM); 297 } 298 299 drm_fb_helper_prepare(drm, &fbdev->base, &tegra_fb_helper_funcs); 300 301 return fbdev; 302 } 303 304 static void tegra_fbdev_free(struct tegra_fbdev *fbdev) 305 { 306 kfree(fbdev); 307 } 308 309 static int tegra_fbdev_init(struct tegra_fbdev *fbdev, 310 unsigned int preferred_bpp, 311 unsigned int num_crtc, 312 unsigned int max_connectors) 313 { 314 struct drm_device *drm = fbdev->base.dev; 315 int err; 316 317 err = drm_fb_helper_init(drm, &fbdev->base, max_connectors); 318 if (err < 0) { 319 dev_err(drm->dev, "failed to initialize DRM FB helper: %d\n", 320 err); 321 return err; 322 } 323 324 err = drm_fb_helper_single_add_all_connectors(&fbdev->base); 325 if (err < 0) { 326 dev_err(drm->dev, "failed to add connectors: %d\n", err); 327 goto fini; 328 } 329 330 err = drm_fb_helper_initial_config(&fbdev->base, preferred_bpp); 331 if (err < 0) { 332 dev_err(drm->dev, "failed to set initial configuration: %d\n", 333 err); 334 goto fini; 335 } 336 337 return 0; 338 339 fini: 340 drm_fb_helper_fini(&fbdev->base); 341 return err; 342 } 343 344 static void tegra_fbdev_exit(struct tegra_fbdev *fbdev) 345 { 346 drm_fb_helper_unregister_fbi(&fbdev->base); 347 348 if (fbdev->fb) { 349 struct tegra_bo *bo = tegra_fb_get_plane(fbdev->fb, 0); 350 351 /* Undo the special mapping we made in fbdev probe. */ 352 if (bo && bo->pages) { 353 vunmap(bo->vaddr); 354 bo->vaddr = NULL; 355 } 356 357 drm_framebuffer_remove(fbdev->fb); 358 } 359 360 drm_fb_helper_fini(&fbdev->base); 361 tegra_fbdev_free(fbdev); 362 } 363 #endif 364 365 int tegra_drm_fb_prepare(struct drm_device *drm) 366 { 367 #ifdef CONFIG_DRM_FBDEV_EMULATION 368 struct tegra_drm *tegra = drm->dev_private; 369 370 tegra->fbdev = tegra_fbdev_create(drm); 371 if (IS_ERR(tegra->fbdev)) 372 return PTR_ERR(tegra->fbdev); 373 #endif 374 375 return 0; 376 } 377 378 void tegra_drm_fb_free(struct drm_device *drm) 379 { 380 #ifdef CONFIG_DRM_FBDEV_EMULATION 381 struct tegra_drm *tegra = drm->dev_private; 382 383 tegra_fbdev_free(tegra->fbdev); 384 #endif 385 } 386 387 int tegra_drm_fb_init(struct drm_device *drm) 388 { 389 #ifdef CONFIG_DRM_FBDEV_EMULATION 390 struct tegra_drm *tegra = drm->dev_private; 391 int err; 392 393 err = tegra_fbdev_init(tegra->fbdev, 32, drm->mode_config.num_crtc, 394 drm->mode_config.num_connector); 395 if (err < 0) 396 return err; 397 #endif 398 399 return 0; 400 } 401 402 void tegra_drm_fb_exit(struct drm_device *drm) 403 { 404 #ifdef CONFIG_DRM_FBDEV_EMULATION 405 struct tegra_drm *tegra = drm->dev_private; 406 407 tegra_fbdev_exit(tegra->fbdev); 408 #endif 409 } 410