1 // SPDX-License-Identifier: GPL-2.0-or-later
2 
3 #include <drm/drm_gem_vram_helper.h>
4 #include <drm/drm_device.h>
5 #include <drm/drm_mode.h>
6 #include <drm/drm_prime.h>
7 #include <drm/drm_vram_mm_helper.h>
8 #include <drm/ttm/ttm_page_alloc.h>
9 
10 /**
11  * DOC: overview
12  *
13  * This library provides a GEM buffer object that is backed by video RAM
14  * (VRAM). It can be used for framebuffer devices with dedicated memory.
15  */
16 
17 /*
18  * Buffer-objects helpers
19  */
20 
21 static void drm_gem_vram_cleanup(struct drm_gem_vram_object *gbo)
22 {
23 	/* We got here via ttm_bo_put(), which means that the
24 	 * TTM buffer object in 'bo' has already been cleaned
25 	 * up; only release the GEM object.
26 	 */
27 	drm_gem_object_release(&gbo->gem);
28 }
29 
30 static void drm_gem_vram_destroy(struct drm_gem_vram_object *gbo)
31 {
32 	drm_gem_vram_cleanup(gbo);
33 	kfree(gbo);
34 }
35 
36 static void ttm_buffer_object_destroy(struct ttm_buffer_object *bo)
37 {
38 	struct drm_gem_vram_object *gbo = drm_gem_vram_of_bo(bo);
39 
40 	drm_gem_vram_destroy(gbo);
41 }
42 
43 static void drm_gem_vram_placement(struct drm_gem_vram_object *gbo,
44 				   unsigned long pl_flag)
45 {
46 	unsigned int i;
47 	unsigned int c = 0;
48 
49 	gbo->placement.placement = gbo->placements;
50 	gbo->placement.busy_placement = gbo->placements;
51 
52 	if (pl_flag & TTM_PL_FLAG_VRAM)
53 		gbo->placements[c++].flags = TTM_PL_FLAG_WC |
54 					     TTM_PL_FLAG_UNCACHED |
55 					     TTM_PL_FLAG_VRAM;
56 
57 	if (pl_flag & TTM_PL_FLAG_SYSTEM)
58 		gbo->placements[c++].flags = TTM_PL_MASK_CACHING |
59 					     TTM_PL_FLAG_SYSTEM;
60 
61 	if (!c)
62 		gbo->placements[c++].flags = TTM_PL_MASK_CACHING |
63 					     TTM_PL_FLAG_SYSTEM;
64 
65 	gbo->placement.num_placement = c;
66 	gbo->placement.num_busy_placement = c;
67 
68 	for (i = 0; i < c; ++i) {
69 		gbo->placements[i].fpfn = 0;
70 		gbo->placements[i].lpfn = 0;
71 	}
72 }
73 
74 static int drm_gem_vram_init(struct drm_device *dev,
75 			     struct ttm_bo_device *bdev,
76 			     struct drm_gem_vram_object *gbo,
77 			     size_t size, unsigned long pg_align,
78 			     bool interruptible)
79 {
80 	int ret;
81 	size_t acc_size;
82 
83 	ret = drm_gem_object_init(dev, &gbo->gem, size);
84 	if (ret)
85 		return ret;
86 
87 	acc_size = ttm_bo_dma_acc_size(bdev, size, sizeof(*gbo));
88 
89 	gbo->bo.bdev = bdev;
90 	drm_gem_vram_placement(gbo, TTM_PL_FLAG_VRAM | TTM_PL_FLAG_SYSTEM);
91 
92 	ret = ttm_bo_init(bdev, &gbo->bo, size, ttm_bo_type_device,
93 			  &gbo->placement, pg_align, interruptible, acc_size,
94 			  NULL, NULL, ttm_buffer_object_destroy);
95 	if (ret)
96 		goto err_drm_gem_object_release;
97 
98 	return 0;
99 
100 err_drm_gem_object_release:
101 	drm_gem_object_release(&gbo->gem);
102 	return ret;
103 }
104 
105 /**
106  * drm_gem_vram_create() - Creates a VRAM-backed GEM object
107  * @dev:		the DRM device
108  * @bdev:		the TTM BO device backing the object
109  * @size:		the buffer size in bytes
110  * @pg_align:		the buffer's alignment in multiples of the page size
111  * @interruptible:	sleep interruptible if waiting for memory
112  *
113  * Returns:
114  * A new instance of &struct drm_gem_vram_object on success, or
115  * an ERR_PTR()-encoded error code otherwise.
116  */
117 struct drm_gem_vram_object *drm_gem_vram_create(struct drm_device *dev,
118 						struct ttm_bo_device *bdev,
119 						size_t size,
120 						unsigned long pg_align,
121 						bool interruptible)
122 {
123 	struct drm_gem_vram_object *gbo;
124 	int ret;
125 
126 	gbo = kzalloc(sizeof(*gbo), GFP_KERNEL);
127 	if (!gbo)
128 		return ERR_PTR(-ENOMEM);
129 
130 	ret = drm_gem_vram_init(dev, bdev, gbo, size, pg_align, interruptible);
131 	if (ret < 0)
132 		goto err_kfree;
133 
134 	return gbo;
135 
136 err_kfree:
137 	kfree(gbo);
138 	return ERR_PTR(ret);
139 }
140 EXPORT_SYMBOL(drm_gem_vram_create);
141 
142 /**
143  * drm_gem_vram_put() - Releases a reference to a VRAM-backed GEM object
144  * @gbo:	the GEM VRAM object
145  *
146  * See ttm_bo_put() for more information.
147  */
148 void drm_gem_vram_put(struct drm_gem_vram_object *gbo)
149 {
150 	ttm_bo_put(&gbo->bo);
151 }
152 EXPORT_SYMBOL(drm_gem_vram_put);
153 
154 /**
155  * drm_gem_vram_lock() - Locks a VRAM-backed GEM object
156  * @gbo:	the GEM VRAM object
157  * @no_wait:	don't wait for buffer object to become available
158  *
159  * See ttm_bo_reserve() for more information.
160  *
161  * Returns:
162  * 0 on success, or
163  * a negative error code otherwise
164  */
165 int drm_gem_vram_lock(struct drm_gem_vram_object *gbo, bool no_wait)
166 {
167 	return ttm_bo_reserve(&gbo->bo, true, no_wait, NULL);
168 }
169 EXPORT_SYMBOL(drm_gem_vram_lock);
170 
171 /**
172  * drm_gem_vram_unlock() - \
173 	Release a reservation acquired by drm_gem_vram_lock()
174  * @gbo:	the GEM VRAM object
175  *
176  * See ttm_bo_unreserve() for more information.
177  */
178 void drm_gem_vram_unlock(struct drm_gem_vram_object *gbo)
179 {
180 	ttm_bo_unreserve(&gbo->bo);
181 }
182 EXPORT_SYMBOL(drm_gem_vram_unlock);
183 
184 /**
185  * drm_gem_vram_mmap_offset() - Returns a GEM VRAM object's mmap offset
186  * @gbo:	the GEM VRAM object
187  *
188  * See drm_vma_node_offset_addr() for more information.
189  *
190  * Returns:
191  * The buffer object's offset for userspace mappings on success, or
192  * 0 if no offset is allocated.
193  */
194 u64 drm_gem_vram_mmap_offset(struct drm_gem_vram_object *gbo)
195 {
196 	return drm_vma_node_offset_addr(&gbo->bo.vma_node);
197 }
198 EXPORT_SYMBOL(drm_gem_vram_mmap_offset);
199 
200 /**
201  * drm_gem_vram_offset() - \
202 	Returns a GEM VRAM object's offset in video memory
203  * @gbo:	the GEM VRAM object
204  *
205  * This function returns the buffer object's offset in the device's video
206  * memory. The buffer object has to be pinned to %TTM_PL_VRAM.
207  *
208  * Returns:
209  * The buffer object's offset in video memory on success, or
210  * a negative errno code otherwise.
211  */
212 s64 drm_gem_vram_offset(struct drm_gem_vram_object *gbo)
213 {
214 	if (WARN_ON_ONCE(!gbo->pin_count))
215 		return (s64)-ENODEV;
216 	return gbo->bo.offset;
217 }
218 EXPORT_SYMBOL(drm_gem_vram_offset);
219 
220 /**
221  * drm_gem_vram_pin() - Pins a GEM VRAM object in a region.
222  * @gbo:	the GEM VRAM object
223  * @pl_flag:	a bitmask of possible memory regions
224  *
225  * Pinning a buffer object ensures that it is not evicted from
226  * a memory region. A pinned buffer object has to be unpinned before
227  * it can be pinned to another region.
228  *
229  * Returns:
230  * 0 on success, or
231  * a negative error code otherwise.
232  */
233 int drm_gem_vram_pin(struct drm_gem_vram_object *gbo, unsigned long pl_flag)
234 {
235 	int i, ret;
236 	struct ttm_operation_ctx ctx = { false, false };
237 
238 	ret = ttm_bo_reserve(&gbo->bo, true, false, NULL);
239 	if (ret < 0)
240 		return ret;
241 
242 	if (gbo->pin_count)
243 		goto out;
244 
245 	drm_gem_vram_placement(gbo, pl_flag);
246 	for (i = 0; i < gbo->placement.num_placement; ++i)
247 		gbo->placements[i].flags |= TTM_PL_FLAG_NO_EVICT;
248 
249 	ret = ttm_bo_validate(&gbo->bo, &gbo->placement, &ctx);
250 	if (ret < 0)
251 		goto err_ttm_bo_unreserve;
252 
253 out:
254 	++gbo->pin_count;
255 	ttm_bo_unreserve(&gbo->bo);
256 
257 	return 0;
258 
259 err_ttm_bo_unreserve:
260 	ttm_bo_unreserve(&gbo->bo);
261 	return ret;
262 }
263 EXPORT_SYMBOL(drm_gem_vram_pin);
264 
265 /**
266  * drm_gem_vram_pin_locked() - Pins a GEM VRAM object in a region.
267  * @gbo:	the GEM VRAM object
268  * @pl_flag:	a bitmask of possible memory regions
269  *
270  * Pinning a buffer object ensures that it is not evicted from
271  * a memory region. A pinned buffer object has to be unpinned before
272  * it can be pinned to another region.
273  *
274  * This function pins a GEM VRAM object that has already been
275  * locked. Use drm_gem_vram_pin() if possible.
276  *
277  * Returns:
278  * 0 on success, or
279  * a negative error code otherwise.
280  */
281 int drm_gem_vram_pin_locked(struct drm_gem_vram_object *gbo,
282 			    unsigned long pl_flag)
283 {
284 	int i, ret;
285 	struct ttm_operation_ctx ctx = { false, false };
286 
287 	lockdep_assert_held(&gbo->bo.resv->lock.base);
288 
289 	if (gbo->pin_count) {
290 		++gbo->pin_count;
291 		return 0;
292 	}
293 
294 	drm_gem_vram_placement(gbo, pl_flag);
295 	for (i = 0; i < gbo->placement.num_placement; ++i)
296 		gbo->placements[i].flags |= TTM_PL_FLAG_NO_EVICT;
297 
298 	ret = ttm_bo_validate(&gbo->bo, &gbo->placement, &ctx);
299 	if (ret < 0)
300 		return ret;
301 
302 	gbo->pin_count = 1;
303 
304 	return 0;
305 }
306 EXPORT_SYMBOL(drm_gem_vram_pin_locked);
307 
308 /**
309  * drm_gem_vram_unpin() - Unpins a GEM VRAM object
310  * @gbo:	the GEM VRAM object
311  *
312  * Returns:
313  * 0 on success, or
314  * a negative error code otherwise.
315  */
316 int drm_gem_vram_unpin(struct drm_gem_vram_object *gbo)
317 {
318 	int i, ret;
319 	struct ttm_operation_ctx ctx = { false, false };
320 
321 	ret = ttm_bo_reserve(&gbo->bo, true, false, NULL);
322 	if (ret < 0)
323 		return ret;
324 
325 	if (WARN_ON_ONCE(!gbo->pin_count))
326 		goto out;
327 
328 	--gbo->pin_count;
329 	if (gbo->pin_count)
330 		goto out;
331 
332 	for (i = 0; i < gbo->placement.num_placement ; ++i)
333 		gbo->placements[i].flags &= ~TTM_PL_FLAG_NO_EVICT;
334 
335 	ret = ttm_bo_validate(&gbo->bo, &gbo->placement, &ctx);
336 	if (ret < 0)
337 		goto err_ttm_bo_unreserve;
338 
339 out:
340 	ttm_bo_unreserve(&gbo->bo);
341 
342 	return 0;
343 
344 err_ttm_bo_unreserve:
345 	ttm_bo_unreserve(&gbo->bo);
346 	return ret;
347 }
348 EXPORT_SYMBOL(drm_gem_vram_unpin);
349 
350 /**
351  * drm_gem_vram_unpin_locked() - Unpins a GEM VRAM object
352  * @gbo:	the GEM VRAM object
353  *
354  * This function unpins a GEM VRAM object that has already been
355  * locked. Use drm_gem_vram_unpin() if possible.
356  *
357  * Returns:
358  * 0 on success, or
359  * a negative error code otherwise.
360  */
361 int drm_gem_vram_unpin_locked(struct drm_gem_vram_object *gbo)
362 {
363 	int i, ret;
364 	struct ttm_operation_ctx ctx = { false, false };
365 
366 	lockdep_assert_held(&gbo->bo.resv->lock.base);
367 
368 	if (WARN_ON_ONCE(!gbo->pin_count))
369 		return 0;
370 
371 	--gbo->pin_count;
372 	if (gbo->pin_count)
373 		return 0;
374 
375 	for (i = 0; i < gbo->placement.num_placement ; ++i)
376 		gbo->placements[i].flags &= ~TTM_PL_FLAG_NO_EVICT;
377 
378 	ret = ttm_bo_validate(&gbo->bo, &gbo->placement, &ctx);
379 	if (ret < 0)
380 		return ret;
381 
382 	return 0;
383 }
384 EXPORT_SYMBOL(drm_gem_vram_unpin_locked);
385 
386 /**
387  * drm_gem_vram_kmap_at() - Maps a GEM VRAM object into kernel address space
388  * @gbo:	the GEM VRAM object
389  * @map:	establish a mapping if necessary
390  * @is_iomem:	returns true if the mapped memory is I/O memory, or false \
391 	otherwise; can be NULL
392  * @kmap:	the mapping's kmap object
393  *
394  * This function maps the buffer object into the kernel's address space
395  * or returns the current mapping. If the parameter map is false, the
396  * function only queries the current mapping, but does not establish a
397  * new one.
398  *
399  * Returns:
400  * The buffers virtual address if mapped, or
401  * NULL if not mapped, or
402  * an ERR_PTR()-encoded error code otherwise.
403  */
404 void *drm_gem_vram_kmap_at(struct drm_gem_vram_object *gbo, bool map,
405 			   bool *is_iomem, struct ttm_bo_kmap_obj *kmap)
406 {
407 	int ret;
408 
409 	if (kmap->virtual || !map)
410 		goto out;
411 
412 	ret = ttm_bo_kmap(&gbo->bo, 0, gbo->bo.num_pages, kmap);
413 	if (ret)
414 		return ERR_PTR(ret);
415 
416 out:
417 	if (!is_iomem)
418 		return kmap->virtual;
419 	if (!kmap->virtual) {
420 		*is_iomem = false;
421 		return NULL;
422 	}
423 	return ttm_kmap_obj_virtual(kmap, is_iomem);
424 }
425 EXPORT_SYMBOL(drm_gem_vram_kmap_at);
426 
427 /**
428  * drm_gem_vram_kmap() - Maps a GEM VRAM object into kernel address space
429  * @gbo:	the GEM VRAM object
430  * @map:	establish a mapping if necessary
431  * @is_iomem:	returns true if the mapped memory is I/O memory, or false \
432 	otherwise; can be NULL
433  *
434  * This function maps the buffer object into the kernel's address space
435  * or returns the current mapping. If the parameter map is false, the
436  * function only queries the current mapping, but does not establish a
437  * new one.
438  *
439  * Returns:
440  * The buffers virtual address if mapped, or
441  * NULL if not mapped, or
442  * an ERR_PTR()-encoded error code otherwise.
443  */
444 void *drm_gem_vram_kmap(struct drm_gem_vram_object *gbo, bool map,
445 			bool *is_iomem)
446 {
447 	return drm_gem_vram_kmap_at(gbo, map, is_iomem, &gbo->kmap);
448 }
449 EXPORT_SYMBOL(drm_gem_vram_kmap);
450 
451 /**
452  * drm_gem_vram_kunmap_at() - Unmaps a GEM VRAM object
453  * @gbo:	the GEM VRAM object
454  * @kmap:	the mapping's kmap object
455  */
456 void drm_gem_vram_kunmap_at(struct drm_gem_vram_object *gbo,
457 			    struct ttm_bo_kmap_obj *kmap)
458 {
459 	if (!kmap->virtual)
460 		return;
461 
462 	ttm_bo_kunmap(kmap);
463 	kmap->virtual = NULL;
464 }
465 EXPORT_SYMBOL(drm_gem_vram_kunmap_at);
466 
467 /**
468  * drm_gem_vram_kunmap() - Unmaps a GEM VRAM object
469  * @gbo:	the GEM VRAM object
470  */
471 void drm_gem_vram_kunmap(struct drm_gem_vram_object *gbo)
472 {
473 	drm_gem_vram_kunmap_at(gbo, &gbo->kmap);
474 }
475 EXPORT_SYMBOL(drm_gem_vram_kunmap);
476 
477 /**
478  * drm_gem_vram_fill_create_dumb() - \
479 	Helper for implementing &struct drm_driver.dumb_create
480  * @file:		the DRM file
481  * @dev:		the DRM device
482  * @bdev:		the TTM BO device managing the buffer object
483  * @pg_align:		the buffer's alignment in multiples of the page size
484  * @interruptible:	sleep interruptible if waiting for memory
485  * @args:		the arguments as provided to \
486 				&struct drm_driver.dumb_create
487  *
488  * This helper function fills &struct drm_mode_create_dumb, which is used
489  * by &struct drm_driver.dumb_create. Implementations of this interface
490  * should forwards their arguments to this helper, plus the driver-specific
491  * parameters.
492  *
493  * Returns:
494  * 0 on success, or
495  * a negative error code otherwise.
496  */
497 int drm_gem_vram_fill_create_dumb(struct drm_file *file,
498 				  struct drm_device *dev,
499 				  struct ttm_bo_device *bdev,
500 				  unsigned long pg_align,
501 				  bool interruptible,
502 				  struct drm_mode_create_dumb *args)
503 {
504 	size_t pitch, size;
505 	struct drm_gem_vram_object *gbo;
506 	int ret;
507 	u32 handle;
508 
509 	pitch = args->width * ((args->bpp + 7) / 8);
510 	size = pitch * args->height;
511 
512 	size = roundup(size, PAGE_SIZE);
513 	if (!size)
514 		return -EINVAL;
515 
516 	gbo = drm_gem_vram_create(dev, bdev, size, pg_align, interruptible);
517 	if (IS_ERR(gbo))
518 		return PTR_ERR(gbo);
519 
520 	ret = drm_gem_handle_create(file, &gbo->gem, &handle);
521 	if (ret)
522 		goto err_drm_gem_object_put_unlocked;
523 
524 	drm_gem_object_put_unlocked(&gbo->gem);
525 
526 	args->pitch = pitch;
527 	args->size = size;
528 	args->handle = handle;
529 
530 	return 0;
531 
532 err_drm_gem_object_put_unlocked:
533 	drm_gem_object_put_unlocked(&gbo->gem);
534 	return ret;
535 }
536 EXPORT_SYMBOL(drm_gem_vram_fill_create_dumb);
537 
538 /*
539  * Helpers for struct ttm_bo_driver
540  */
541 
542 static bool drm_is_gem_vram(struct ttm_buffer_object *bo)
543 {
544 	return (bo->destroy == ttm_buffer_object_destroy);
545 }
546 
547 /**
548  * drm_gem_vram_bo_driver_evict_flags() - \
549 	Implements &struct ttm_bo_driver.evict_flags
550  * @bo:	TTM buffer object. Refers to &struct drm_gem_vram_object.bo
551  * @pl:	TTM placement information.
552  */
553 void drm_gem_vram_bo_driver_evict_flags(struct ttm_buffer_object *bo,
554 					struct ttm_placement *pl)
555 {
556 	struct drm_gem_vram_object *gbo;
557 
558 	/* TTM may pass BOs that are not GEM VRAM BOs. */
559 	if (!drm_is_gem_vram(bo))
560 		return;
561 
562 	gbo = drm_gem_vram_of_bo(bo);
563 	drm_gem_vram_placement(gbo, TTM_PL_FLAG_SYSTEM);
564 	*pl = gbo->placement;
565 }
566 EXPORT_SYMBOL(drm_gem_vram_bo_driver_evict_flags);
567 
568 /**
569  * drm_gem_vram_bo_driver_verify_access() - \
570 	Implements &struct ttm_bo_driver.verify_access
571  * @bo:		TTM buffer object. Refers to &struct drm_gem_vram_object.bo
572  * @filp:	File pointer.
573  *
574  * Returns:
575  * 0 on success, or
576  * a negative errno code otherwise.
577  */
578 int drm_gem_vram_bo_driver_verify_access(struct ttm_buffer_object *bo,
579 					 struct file *filp)
580 {
581 	struct drm_gem_vram_object *gbo = drm_gem_vram_of_bo(bo);
582 
583 	return drm_vma_node_verify_access(&gbo->gem.vma_node,
584 					  filp->private_data);
585 }
586 EXPORT_SYMBOL(drm_gem_vram_bo_driver_verify_access);
587 
588 /**
589  * drm_gem_vram_mm_funcs - Functions for &struct drm_vram_mm
590  *
591  * Most users of @struct drm_gem_vram_object will also use
592  * @struct drm_vram_mm. This instance of &struct drm_vram_mm_funcs
593  * can be used to connect both.
594  */
595 const struct drm_vram_mm_funcs drm_gem_vram_mm_funcs = {
596 	.evict_flags = drm_gem_vram_bo_driver_evict_flags,
597 	.verify_access = drm_gem_vram_bo_driver_verify_access
598 };
599 EXPORT_SYMBOL(drm_gem_vram_mm_funcs);
600 
601 /*
602  * Helpers for struct drm_driver
603  */
604 
605 /**
606  * drm_gem_vram_driver_gem_free_object_unlocked() - \
607 	Implements &struct drm_driver.gem_free_object_unlocked
608  * @gem:	GEM object. Refers to &struct drm_gem_vram_object.gem
609  */
610 void drm_gem_vram_driver_gem_free_object_unlocked(struct drm_gem_object *gem)
611 {
612 	struct drm_gem_vram_object *gbo = drm_gem_vram_of_gem(gem);
613 
614 	drm_gem_vram_put(gbo);
615 }
616 EXPORT_SYMBOL(drm_gem_vram_driver_gem_free_object_unlocked);
617 
618 /**
619  * drm_gem_vram_driver_create_dumb() - \
620 	Implements &struct drm_driver.dumb_create
621  * @file:		the DRM file
622  * @dev:		the DRM device
623  * @args:		the arguments as provided to \
624 				&struct drm_driver.dumb_create
625  *
626  * This function requires the driver to use @drm_device.vram_mm for its
627  * instance of VRAM MM.
628  *
629  * Returns:
630  * 0 on success, or
631  * a negative error code otherwise.
632  */
633 int drm_gem_vram_driver_dumb_create(struct drm_file *file,
634 				    struct drm_device *dev,
635 				    struct drm_mode_create_dumb *args)
636 {
637 	if (WARN_ONCE(!dev->vram_mm, "VRAM MM not initialized"))
638 		return -EINVAL;
639 
640 	return drm_gem_vram_fill_create_dumb(file, dev, &dev->vram_mm->bdev, 0,
641 					     false, args);
642 }
643 EXPORT_SYMBOL(drm_gem_vram_driver_dumb_create);
644 
645 /**
646  * drm_gem_vram_driver_dumb_mmap_offset() - \
647 	Implements &struct drm_driver.dumb_mmap_offset
648  * @file:	DRM file pointer.
649  * @dev:	DRM device.
650  * @handle:	GEM handle
651  * @offset:	Returns the mapping's memory offset on success
652  *
653  * Returns:
654  * 0 on success, or
655  * a negative errno code otherwise.
656  */
657 int drm_gem_vram_driver_dumb_mmap_offset(struct drm_file *file,
658 					 struct drm_device *dev,
659 					 uint32_t handle, uint64_t *offset)
660 {
661 	struct drm_gem_object *gem;
662 	struct drm_gem_vram_object *gbo;
663 
664 	gem = drm_gem_object_lookup(file, handle);
665 	if (!gem)
666 		return -ENOENT;
667 
668 	gbo = drm_gem_vram_of_gem(gem);
669 	*offset = drm_gem_vram_mmap_offset(gbo);
670 
671 	drm_gem_object_put_unlocked(gem);
672 
673 	return 0;
674 }
675 EXPORT_SYMBOL(drm_gem_vram_driver_dumb_mmap_offset);
676 
677 /*
678  * PRIME helpers for struct drm_driver
679  */
680 
681 /**
682  * drm_gem_vram_driver_gem_prime_pin() - \
683 	Implements &struct drm_driver.gem_prime_pin
684  * @gem:	The GEM object to pin
685  *
686  * Returns:
687  * 0 on success, or
688  * a negative errno code otherwise.
689  */
690 int drm_gem_vram_driver_gem_prime_pin(struct drm_gem_object *gem)
691 {
692 	struct drm_gem_vram_object *gbo = drm_gem_vram_of_gem(gem);
693 
694 	return drm_gem_vram_pin(gbo, DRM_GEM_VRAM_PL_FLAG_VRAM);
695 }
696 EXPORT_SYMBOL(drm_gem_vram_driver_gem_prime_pin);
697 
698 /**
699  * drm_gem_vram_driver_gem_prime_unpin() - \
700 	Implements &struct drm_driver.gem_prime_unpin
701  * @gem:	The GEM object to unpin
702  */
703 void drm_gem_vram_driver_gem_prime_unpin(struct drm_gem_object *gem)
704 {
705 	struct drm_gem_vram_object *gbo = drm_gem_vram_of_gem(gem);
706 
707 	drm_gem_vram_unpin(gbo);
708 }
709 EXPORT_SYMBOL(drm_gem_vram_driver_gem_prime_unpin);
710 
711 /**
712  * drm_gem_vram_driver_gem_prime_vmap() - \
713 	Implements &struct drm_driver.gem_prime_vmap
714  * @gem:	The GEM object to map
715  *
716  * Returns:
717  * The buffers virtual address on success, or
718  * NULL otherwise.
719  */
720 void *drm_gem_vram_driver_gem_prime_vmap(struct drm_gem_object *gem)
721 {
722 	struct drm_gem_vram_object *gbo = drm_gem_vram_of_gem(gem);
723 	int ret;
724 	void *base;
725 
726 	ret = drm_gem_vram_pin(gbo, DRM_GEM_VRAM_PL_FLAG_VRAM);
727 	if (ret)
728 		return NULL;
729 	base = drm_gem_vram_kmap(gbo, true, NULL);
730 	if (IS_ERR(base)) {
731 		drm_gem_vram_unpin(gbo);
732 		return NULL;
733 	}
734 	return base;
735 }
736 EXPORT_SYMBOL(drm_gem_vram_driver_gem_prime_vmap);
737 
738 /**
739  * drm_gem_vram_driver_gem_prime_vunmap() - \
740 	Implements &struct drm_driver.gem_prime_vunmap
741  * @gem:	The GEM object to unmap
742  * @vaddr:	The mapping's base address
743  */
744 void drm_gem_vram_driver_gem_prime_vunmap(struct drm_gem_object *gem,
745 					  void *vaddr)
746 {
747 	struct drm_gem_vram_object *gbo = drm_gem_vram_of_gem(gem);
748 
749 	drm_gem_vram_kunmap(gbo);
750 	drm_gem_vram_unpin(gbo);
751 }
752 EXPORT_SYMBOL(drm_gem_vram_driver_gem_prime_vunmap);
753 
754 /**
755  * drm_gem_vram_driver_gem_prime_mmap() - \
756 	Implements &struct drm_driver.gem_prime_mmap
757  * @gem:	The GEM object to map
758  * @vma:	The VMA describing the mapping
759  *
760  * Returns:
761  * 0 on success, or
762  * a negative errno code otherwise.
763  */
764 int drm_gem_vram_driver_gem_prime_mmap(struct drm_gem_object *gem,
765 				       struct vm_area_struct *vma)
766 {
767 	struct drm_gem_vram_object *gbo = drm_gem_vram_of_gem(gem);
768 
769 	gbo->gem.vma_node.vm_node.start = gbo->bo.vma_node.vm_node.start;
770 	return drm_gem_prime_mmap(gem, vma);
771 }
772 EXPORT_SYMBOL(drm_gem_vram_driver_gem_prime_mmap);
773