radeon.h (e0273728564a395a13cfed70e34da4f2613d2d44) radeon.h (721604a15b934f0a8d1909acb8017f029128be2f)
1/*
2 * Copyright 2008 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 * Copyright 2009 Jerome Glisse.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation

--- 93 unchanged lines hidden (view full) ---

102#define RADEON_MAX_USEC_TIMEOUT 100000 /* 100 ms */
103#define RADEON_FENCE_JIFFIES_TIMEOUT (HZ / 2)
104/* RADEON_IB_POOL_SIZE must be a power of 2 */
105#define RADEON_IB_POOL_SIZE 16
106#define RADEON_DEBUGFS_MAX_COMPONENTS 32
107#define RADEONFB_CONN_LIMIT 4
108#define RADEON_BIOS_NUM_SCRATCH 8
109
1/*
2 * Copyright 2008 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 * Copyright 2009 Jerome Glisse.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation

--- 93 unchanged lines hidden (view full) ---

102#define RADEON_MAX_USEC_TIMEOUT 100000 /* 100 ms */
103#define RADEON_FENCE_JIFFIES_TIMEOUT (HZ / 2)
104/* RADEON_IB_POOL_SIZE must be a power of 2 */
105#define RADEON_IB_POOL_SIZE 16
106#define RADEON_DEBUGFS_MAX_COMPONENTS 32
107#define RADEONFB_CONN_LIMIT 4
108#define RADEON_BIOS_NUM_SCRATCH 8
109
110/* max number of rings */
111#define RADEON_NUM_RINGS 3
112
113/* internal ring indices */
114/* r1xx+ has gfx CP ring */
115#define RADEON_RING_TYPE_GFX_INDEX 0
116
117/* cayman has 2 compute CP rings */
118#define CAYMAN_RING_TYPE_CP1_INDEX 1
119#define CAYMAN_RING_TYPE_CP2_INDEX 2
120
121/* hardcode those limit for now */
122#define RADEON_VA_RESERVED_SIZE (8 << 20)
123#define RADEON_IB_VM_MAX_SIZE (64 << 10)
124
110/*
111 * Errata workarounds.
112 */
113enum radeon_pll_errata {
114 CHIP_ERRATA_R300_CG = 0x00000001,
115 CHIP_ERRATA_PLL_DUMMYREADS = 0x00000002,
116 CHIP_ERRATA_PLL_DELAY = 0x00000004
117};

--- 69 unchanged lines hidden (view full) ---

187extern int evergreen_get_temp(struct radeon_device *rdev);
188extern int sumo_get_temp(struct radeon_device *rdev);
189
190/*
191 * Fences.
192 */
193struct radeon_fence_driver {
194 uint32_t scratch_reg;
125/*
126 * Errata workarounds.
127 */
128enum radeon_pll_errata {
129 CHIP_ERRATA_R300_CG = 0x00000001,
130 CHIP_ERRATA_PLL_DUMMYREADS = 0x00000002,
131 CHIP_ERRATA_PLL_DELAY = 0x00000004
132};

--- 69 unchanged lines hidden (view full) ---

202extern int evergreen_get_temp(struct radeon_device *rdev);
203extern int sumo_get_temp(struct radeon_device *rdev);
204
205/*
206 * Fences.
207 */
208struct radeon_fence_driver {
209 uint32_t scratch_reg;
210 uint64_t gpu_addr;
211 volatile uint32_t *cpu_addr;
195 atomic_t seq;
196 uint32_t last_seq;
197 unsigned long last_jiffies;
198 unsigned long last_timeout;
199 wait_queue_head_t queue;
212 atomic_t seq;
213 uint32_t last_seq;
214 unsigned long last_jiffies;
215 unsigned long last_timeout;
216 wait_queue_head_t queue;
200 rwlock_t lock;
201 struct list_head created;
217 struct list_head created;
202 struct list_head emited;
218 struct list_head emitted;
203 struct list_head signaled;
204 bool initialized;
205};
206
207struct radeon_fence {
208 struct radeon_device *rdev;
209 struct kref kref;
210 struct list_head list;
211 /* protected by radeon_fence.lock */
212 uint32_t seq;
219 struct list_head signaled;
220 bool initialized;
221};
222
223struct radeon_fence {
224 struct radeon_device *rdev;
225 struct kref kref;
226 struct list_head list;
227 /* protected by radeon_fence.lock */
228 uint32_t seq;
213 bool emited;
229 bool emitted;
214 bool signaled;
230 bool signaled;
231 /* RB, DMA, etc. */
232 int ring;
215};
216
233};
234
235int radeon_fence_driver_start_ring(struct radeon_device *rdev, int ring);
217int radeon_fence_driver_init(struct radeon_device *rdev);
218void radeon_fence_driver_fini(struct radeon_device *rdev);
236int radeon_fence_driver_init(struct radeon_device *rdev);
237void radeon_fence_driver_fini(struct radeon_device *rdev);
219int radeon_fence_create(struct radeon_device *rdev, struct radeon_fence **fence);
238int radeon_fence_create(struct radeon_device *rdev, struct radeon_fence **fence, int ring);
220int radeon_fence_emit(struct radeon_device *rdev, struct radeon_fence *fence);
239int radeon_fence_emit(struct radeon_device *rdev, struct radeon_fence *fence);
221void radeon_fence_process(struct radeon_device *rdev);
240void radeon_fence_process(struct radeon_device *rdev, int ring);
222bool radeon_fence_signaled(struct radeon_fence *fence);
223int radeon_fence_wait(struct radeon_fence *fence, bool interruptible);
241bool radeon_fence_signaled(struct radeon_fence *fence);
242int radeon_fence_wait(struct radeon_fence *fence, bool interruptible);
224int radeon_fence_wait_next(struct radeon_device *rdev);
225int radeon_fence_wait_last(struct radeon_device *rdev);
243int radeon_fence_wait_next(struct radeon_device *rdev, int ring);
244int radeon_fence_wait_last(struct radeon_device *rdev, int ring);
226struct radeon_fence *radeon_fence_ref(struct radeon_fence *fence);
227void radeon_fence_unref(struct radeon_fence **fence);
245struct radeon_fence *radeon_fence_ref(struct radeon_fence *fence);
246void radeon_fence_unref(struct radeon_fence **fence);
247int radeon_fence_count_emitted(struct radeon_device *rdev, int ring);
228
229/*
230 * Tiling registers
231 */
232struct radeon_surface_reg {
233 struct radeon_bo *bo;
234};
235

--- 5 unchanged lines hidden (view full) ---

241struct radeon_mman {
242 struct ttm_bo_global_ref bo_global_ref;
243 struct drm_global_reference mem_global_ref;
244 struct ttm_bo_device bdev;
245 bool mem_global_referenced;
246 bool initialized;
247};
248
248
249/*
250 * Tiling registers
251 */
252struct radeon_surface_reg {
253 struct radeon_bo *bo;
254};
255

--- 5 unchanged lines hidden (view full) ---

261struct radeon_mman {
262 struct ttm_bo_global_ref bo_global_ref;
263 struct drm_global_reference mem_global_ref;
264 struct ttm_bo_device bdev;
265 bool mem_global_referenced;
266 bool initialized;
267};
268
269/* bo virtual address in a specific vm */
270struct radeon_bo_va {
271 /* bo list is protected by bo being reserved */
272 struct list_head bo_list;
273 /* vm list is protected by vm mutex */
274 struct list_head vm_list;
275 /* constant after initialization */
276 struct radeon_vm *vm;
277 struct radeon_bo *bo;
278 uint64_t soffset;
279 uint64_t eoffset;
280 uint32_t flags;
281 bool valid;
282};
283
249struct radeon_bo {
250 /* Protected by gem.mutex */
251 struct list_head list;
252 /* Protected by tbo.reserved */
253 u32 placements[3];
254 struct ttm_placement placement;
255 struct ttm_buffer_object tbo;
256 struct ttm_bo_kmap_obj kmap;
257 unsigned pin_count;
258 void *kptr;
259 u32 tiling_flags;
260 u32 pitch;
261 int surface_reg;
284struct radeon_bo {
285 /* Protected by gem.mutex */
286 struct list_head list;
287 /* Protected by tbo.reserved */
288 u32 placements[3];
289 struct ttm_placement placement;
290 struct ttm_buffer_object tbo;
291 struct ttm_bo_kmap_obj kmap;
292 unsigned pin_count;
293 void *kptr;
294 u32 tiling_flags;
295 u32 pitch;
296 int surface_reg;
297 /* list of all virtual address to which this bo
298 * is associated to
299 */
300 struct list_head va;
262 /* Constant after initialization */
263 struct radeon_device *rdev;
264 struct drm_gem_object gem_base;
265};
266#define gem_to_radeon_bo(gobj) container_of((gobj), struct radeon_bo, gem_base)
267
268struct radeon_bo_list {
269 struct ttm_validate_buffer tv;
270 struct radeon_bo *bo;
271 uint64_t gpu_offset;
272 unsigned rdomain;
273 unsigned wdomain;
274 u32 tiling_flags;
275};
276
301 /* Constant after initialization */
302 struct radeon_device *rdev;
303 struct drm_gem_object gem_base;
304};
305#define gem_to_radeon_bo(gobj) container_of((gobj), struct radeon_bo, gem_base)
306
307struct radeon_bo_list {
308 struct ttm_validate_buffer tv;
309 struct radeon_bo *bo;
310 uint64_t gpu_offset;
311 unsigned rdomain;
312 unsigned wdomain;
313 u32 tiling_flags;
314};
315
316/* sub-allocation manager, it has to be protected by another lock.
317 * By conception this is an helper for other part of the driver
318 * like the indirect buffer or semaphore, which both have their
319 * locking.
320 *
321 * Principe is simple, we keep a list of sub allocation in offset
322 * order (first entry has offset == 0, last entry has the highest
323 * offset).
324 *
325 * When allocating new object we first check if there is room at
326 * the end total_size - (last_object_offset + last_object_size) >=
327 * alloc_size. If so we allocate new object there.
328 *
329 * When there is not enough room at the end, we start waiting for
330 * each sub object until we reach object_offset+object_size >=
331 * alloc_size, this object then become the sub object we return.
332 *
333 * Alignment can't be bigger than page size.
334 *
335 * Hole are not considered for allocation to keep things simple.
336 * Assumption is that there won't be hole (all object on same
337 * alignment).
338 */
339struct radeon_sa_manager {
340 struct radeon_bo *bo;
341 struct list_head sa_bo;
342 unsigned size;
343 uint64_t gpu_addr;
344 void *cpu_ptr;
345 uint32_t domain;
346};
347
348struct radeon_sa_bo;
349
350/* sub-allocation buffer */
351struct radeon_sa_bo {
352 struct list_head list;
353 struct radeon_sa_manager *manager;
354 unsigned offset;
355 unsigned size;
356};
357
277/*
278 * GEM objects.
279 */
280struct radeon_gem {
281 struct mutex mutex;
282 struct list_head objects;
283};
284

--- 13 unchanged lines hidden (view full) ---

298int radeon_mode_dumb_mmap(struct drm_file *filp,
299 struct drm_device *dev,
300 uint32_t handle, uint64_t *offset_p);
301int radeon_mode_dumb_destroy(struct drm_file *file_priv,
302 struct drm_device *dev,
303 uint32_t handle);
304
305/*
358/*
359 * GEM objects.
360 */
361struct radeon_gem {
362 struct mutex mutex;
363 struct list_head objects;
364};
365

--- 13 unchanged lines hidden (view full) ---

379int radeon_mode_dumb_mmap(struct drm_file *filp,
380 struct drm_device *dev,
381 uint32_t handle, uint64_t *offset_p);
382int radeon_mode_dumb_destroy(struct drm_file *file_priv,
383 struct drm_device *dev,
384 uint32_t handle);
385
386/*
387 * Semaphores.
388 */
389struct radeon_ring;
390
391#define RADEON_SEMAPHORE_BO_SIZE 256
392
393struct radeon_semaphore_driver {
394 rwlock_t lock;
395 struct list_head bo;
396};
397
398struct radeon_semaphore_bo;
399
400/* everything here is constant */
401struct radeon_semaphore {
402 struct list_head list;
403 uint64_t gpu_addr;
404 uint32_t *cpu_ptr;
405 struct radeon_semaphore_bo *bo;
406};
407
408struct radeon_semaphore_bo {
409 struct list_head list;
410 struct radeon_ib *ib;
411 struct list_head free;
412 struct radeon_semaphore semaphores[RADEON_SEMAPHORE_BO_SIZE/8];
413 unsigned nused;
414};
415
416void radeon_semaphore_driver_fini(struct radeon_device *rdev);
417int radeon_semaphore_create(struct radeon_device *rdev,
418 struct radeon_semaphore **semaphore);
419void radeon_semaphore_emit_signal(struct radeon_device *rdev, int ring,
420 struct radeon_semaphore *semaphore);
421void radeon_semaphore_emit_wait(struct radeon_device *rdev, int ring,
422 struct radeon_semaphore *semaphore);
423void radeon_semaphore_free(struct radeon_device *rdev,
424 struct radeon_semaphore *semaphore);
425
426/*
306 * GART structures, functions & helpers
307 */
308struct radeon_mc;
309
310#define RADEON_GPU_PAGE_SIZE 4096
311#define RADEON_GPU_PAGE_MASK (RADEON_GPU_PAGE_SIZE - 1)
312#define RADEON_GPU_PAGE_SHIFT 12
427 * GART structures, functions & helpers
428 */
429struct radeon_mc;
430
431#define RADEON_GPU_PAGE_SIZE 4096
432#define RADEON_GPU_PAGE_MASK (RADEON_GPU_PAGE_SIZE - 1)
433#define RADEON_GPU_PAGE_SHIFT 12
434#define RADEON_GPU_PAGE_ALIGN(a) (((a) + RADEON_GPU_PAGE_MASK) & ~RADEON_GPU_PAGE_MASK)
313
314struct radeon_gart {
315 dma_addr_t table_addr;
316 struct radeon_bo *robj;
317 void *ptr;
318 unsigned num_gpu_pages;
319 unsigned num_cpu_pages;
320 unsigned table_size;
321 struct page **pages;
322 dma_addr_t *pages_addr;
435
436struct radeon_gart {
437 dma_addr_t table_addr;
438 struct radeon_bo *robj;
439 void *ptr;
440 unsigned num_gpu_pages;
441 unsigned num_cpu_pages;
442 unsigned table_size;
443 struct page **pages;
444 dma_addr_t *pages_addr;
323 bool *ttm_alloced;
324 bool ready;
325};
326
327int radeon_gart_table_ram_alloc(struct radeon_device *rdev);
328void radeon_gart_table_ram_free(struct radeon_device *rdev);
329int radeon_gart_table_vram_alloc(struct radeon_device *rdev);
330void radeon_gart_table_vram_free(struct radeon_device *rdev);
331int radeon_gart_table_vram_pin(struct radeon_device *rdev);

--- 97 unchanged lines hidden (view full) ---

429};
430
431#define RADEON_MAX_HPD_PINS 6
432#define RADEON_MAX_CRTCS 6
433#define RADEON_MAX_HDMI_BLOCKS 2
434
435struct radeon_irq {
436 bool installed;
445 bool ready;
446};
447
448int radeon_gart_table_ram_alloc(struct radeon_device *rdev);
449void radeon_gart_table_ram_free(struct radeon_device *rdev);
450int radeon_gart_table_vram_alloc(struct radeon_device *rdev);
451void radeon_gart_table_vram_free(struct radeon_device *rdev);
452int radeon_gart_table_vram_pin(struct radeon_device *rdev);

--- 97 unchanged lines hidden (view full) ---

550};
551
552#define RADEON_MAX_HPD_PINS 6
553#define RADEON_MAX_CRTCS 6
554#define RADEON_MAX_HDMI_BLOCKS 2
555
556struct radeon_irq {
557 bool installed;
437 bool sw_int;
558 bool sw_int[RADEON_NUM_RINGS];
438 bool crtc_vblank_int[RADEON_MAX_CRTCS];
439 bool pflip[RADEON_MAX_CRTCS];
440 wait_queue_head_t vblank_queue;
441 bool hpd[RADEON_MAX_HPD_PINS];
442 bool gui_idle;
443 bool gui_idle_acked;
444 wait_queue_head_t idle_queue;
445 bool hdmi[RADEON_MAX_HDMI_BLOCKS];
446 spinlock_t sw_lock;
559 bool crtc_vblank_int[RADEON_MAX_CRTCS];
560 bool pflip[RADEON_MAX_CRTCS];
561 wait_queue_head_t vblank_queue;
562 bool hpd[RADEON_MAX_HPD_PINS];
563 bool gui_idle;
564 bool gui_idle_acked;
565 wait_queue_head_t idle_queue;
566 bool hdmi[RADEON_MAX_HDMI_BLOCKS];
567 spinlock_t sw_lock;
447 int sw_refcount;
568 int sw_refcount[RADEON_NUM_RINGS];
448 union radeon_irq_stat_regs stat_regs;
449 spinlock_t pflip_lock[RADEON_MAX_CRTCS];
450 int pflip_refcount[RADEON_MAX_CRTCS];
451};
452
453int radeon_irq_kms_init(struct radeon_device *rdev);
454void radeon_irq_kms_fini(struct radeon_device *rdev);
569 union radeon_irq_stat_regs stat_regs;
570 spinlock_t pflip_lock[RADEON_MAX_CRTCS];
571 int pflip_refcount[RADEON_MAX_CRTCS];
572};
573
574int radeon_irq_kms_init(struct radeon_device *rdev);
575void radeon_irq_kms_fini(struct radeon_device *rdev);
455void radeon_irq_kms_sw_irq_get(struct radeon_device *rdev);
456void radeon_irq_kms_sw_irq_put(struct radeon_device *rdev);
576void radeon_irq_kms_sw_irq_get(struct radeon_device *rdev, int ring);
577void radeon_irq_kms_sw_irq_put(struct radeon_device *rdev, int ring);
457void radeon_irq_kms_pflip_irq_get(struct radeon_device *rdev, int crtc);
458void radeon_irq_kms_pflip_irq_put(struct radeon_device *rdev, int crtc);
459
460/*
578void radeon_irq_kms_pflip_irq_get(struct radeon_device *rdev, int crtc);
579void radeon_irq_kms_pflip_irq_put(struct radeon_device *rdev, int crtc);
580
581/*
461 * CP & ring.
582 * CP & rings.
462 */
583 */
584
463struct radeon_ib {
585struct radeon_ib {
464 struct list_head list;
586 struct radeon_sa_bo sa_bo;
465 unsigned idx;
587 unsigned idx;
588 uint32_t length_dw;
466 uint64_t gpu_addr;
589 uint64_t gpu_addr;
467 struct radeon_fence *fence;
468 uint32_t *ptr;
590 uint32_t *ptr;
469 uint32_t length_dw;
470 bool free;
591 struct radeon_fence *fence;
592 unsigned vm_id;
471};
472
473/*
474 * locking -
475 * mutex protects scheduled_ibs, ready, alloc_bm
476 */
477struct radeon_ib_pool {
593};
594
595/*
596 * locking -
597 * mutex protects scheduled_ibs, ready, alloc_bm
598 */
599struct radeon_ib_pool {
478 struct mutex mutex;
479 struct radeon_bo *robj;
480 struct list_head bogus_ib;
481 struct radeon_ib ibs[RADEON_IB_POOL_SIZE];
482 bool ready;
483 unsigned head_id;
600 struct mutex mutex;
601 struct radeon_sa_manager sa_manager;
602 struct radeon_ib ibs[RADEON_IB_POOL_SIZE];
603 bool ready;
604 unsigned head_id;
484};
485
605};
606
486struct radeon_cp {
607struct radeon_ring {
487 struct radeon_bo *ring_obj;
488 volatile uint32_t *ring;
489 unsigned rptr;
608 struct radeon_bo *ring_obj;
609 volatile uint32_t *ring;
610 unsigned rptr;
611 unsigned rptr_offs;
612 unsigned rptr_reg;
490 unsigned wptr;
491 unsigned wptr_old;
613 unsigned wptr;
614 unsigned wptr_old;
615 unsigned wptr_reg;
492 unsigned ring_size;
493 unsigned ring_free_dw;
494 int count_dw;
495 uint64_t gpu_addr;
496 uint32_t align_mask;
497 uint32_t ptr_mask;
498 struct mutex mutex;
499 bool ready;
616 unsigned ring_size;
617 unsigned ring_free_dw;
618 int count_dw;
619 uint64_t gpu_addr;
620 uint32_t align_mask;
621 uint32_t ptr_mask;
622 struct mutex mutex;
623 bool ready;
624 u32 ptr_reg_shift;
625 u32 ptr_reg_mask;
626 u32 nop;
500};
501
502/*
627};
628
629/*
630 * VM
631 */
632struct radeon_vm {
633 struct list_head list;
634 struct list_head va;
635 int id;
636 unsigned last_pfn;
637 u64 pt_gpu_addr;
638 u64 *pt;
639 struct radeon_sa_bo sa_bo;
640 struct mutex mutex;
641 /* last fence for cs using this vm */
642 struct radeon_fence *fence;
643};
644
645struct radeon_vm_funcs {
646 int (*init)(struct radeon_device *rdev);
647 void (*fini)(struct radeon_device *rdev);
648 /* cs mutex must be lock for schedule_ib */
649 int (*bind)(struct radeon_device *rdev, struct radeon_vm *vm, int id);
650 void (*unbind)(struct radeon_device *rdev, struct radeon_vm *vm);
651 void (*tlb_flush)(struct radeon_device *rdev, struct radeon_vm *vm);
652 uint32_t (*page_flags)(struct radeon_device *rdev,
653 struct radeon_vm *vm,
654 uint32_t flags);
655 void (*set_page)(struct radeon_device *rdev, struct radeon_vm *vm,
656 unsigned pfn, uint64_t addr, uint32_t flags);
657};
658
659struct radeon_vm_manager {
660 struct list_head lru_vm;
661 uint32_t use_bitmap;
662 struct radeon_sa_manager sa_manager;
663 uint32_t max_pfn;
664 /* fields constant after init */
665 const struct radeon_vm_funcs *funcs;
666 /* number of VMIDs */
667 unsigned nvm;
668 /* vram base address for page table entry */
669 u64 vram_base_offset;
670};
671
672/*
673 * file private structure
674 */
675struct radeon_fpriv {
676 struct radeon_vm vm;
677};
678
679/*
503 * R6xx+ IH ring
504 */
505struct r600_ih {
506 struct radeon_bo *ring_obj;
507 volatile uint32_t *ring;
508 unsigned rptr;
680 * R6xx+ IH ring
681 */
682struct r600_ih {
683 struct radeon_bo *ring_obj;
684 volatile uint32_t *ring;
685 unsigned rptr;
686 unsigned rptr_offs;
509 unsigned wptr;
510 unsigned wptr_old;
511 unsigned ring_size;
512 uint64_t gpu_addr;
513 uint32_t ptr_mask;
514 spinlock_t lock;
515 bool enabled;
516};

--- 27 unchanged lines hidden (view full) ---

544 u32 state_offset;
545 u32 state_len;
546 u32 vb_used, vb_total;
547 struct radeon_ib *vb_ib;
548};
549
550void r600_blit_suspend(struct radeon_device *rdev);
551
687 unsigned wptr;
688 unsigned wptr_old;
689 unsigned ring_size;
690 uint64_t gpu_addr;
691 uint32_t ptr_mask;
692 spinlock_t lock;
693 bool enabled;
694};

--- 27 unchanged lines hidden (view full) ---

722 u32 state_offset;
723 u32 state_len;
724 u32 vb_used, vb_total;
725 struct radeon_ib *vb_ib;
726};
727
728void r600_blit_suspend(struct radeon_device *rdev);
729
552int radeon_ib_get(struct radeon_device *rdev, struct radeon_ib **ib);
730int radeon_ib_get(struct radeon_device *rdev, int ring,
731 struct radeon_ib **ib, unsigned size);
553void radeon_ib_free(struct radeon_device *rdev, struct radeon_ib **ib);
732void radeon_ib_free(struct radeon_device *rdev, struct radeon_ib **ib);
733bool radeon_ib_try_free(struct radeon_device *rdev, struct radeon_ib *ib);
554int radeon_ib_schedule(struct radeon_device *rdev, struct radeon_ib *ib);
555int radeon_ib_pool_init(struct radeon_device *rdev);
556void radeon_ib_pool_fini(struct radeon_device *rdev);
734int radeon_ib_schedule(struct radeon_device *rdev, struct radeon_ib *ib);
735int radeon_ib_pool_init(struct radeon_device *rdev);
736void radeon_ib_pool_fini(struct radeon_device *rdev);
737int radeon_ib_pool_start(struct radeon_device *rdev);
738int radeon_ib_pool_suspend(struct radeon_device *rdev);
557int radeon_ib_test(struct radeon_device *rdev);
739int radeon_ib_test(struct radeon_device *rdev);
558extern void radeon_ib_bogus_add(struct radeon_device *rdev, struct radeon_ib *ib);
559/* Ring access between begin & end cannot sleep */
740/* Ring access between begin & end cannot sleep */
560void radeon_ring_free_size(struct radeon_device *rdev);
561int radeon_ring_alloc(struct radeon_device *rdev, unsigned ndw);
562int radeon_ring_lock(struct radeon_device *rdev, unsigned ndw);
563void radeon_ring_commit(struct radeon_device *rdev);
564void radeon_ring_unlock_commit(struct radeon_device *rdev);
565void radeon_ring_unlock_undo(struct radeon_device *rdev);
566int radeon_ring_test(struct radeon_device *rdev);
567int radeon_ring_init(struct radeon_device *rdev, unsigned ring_size);
568void radeon_ring_fini(struct radeon_device *rdev);
741int radeon_ring_index(struct radeon_device *rdev, struct radeon_ring *cp);
742void radeon_ring_free_size(struct radeon_device *rdev, struct radeon_ring *cp);
743int radeon_ring_alloc(struct radeon_device *rdev, struct radeon_ring *cp, unsigned ndw);
744int radeon_ring_lock(struct radeon_device *rdev, struct radeon_ring *cp, unsigned ndw);
745void radeon_ring_commit(struct radeon_device *rdev, struct radeon_ring *cp);
746void radeon_ring_unlock_commit(struct radeon_device *rdev, struct radeon_ring *cp);
747void radeon_ring_unlock_undo(struct radeon_device *rdev, struct radeon_ring *cp);
748int radeon_ring_test(struct radeon_device *rdev, struct radeon_ring *cp);
749int radeon_ring_init(struct radeon_device *rdev, struct radeon_ring *cp, unsigned ring_size,
750 unsigned rptr_offs, unsigned rptr_reg, unsigned wptr_reg,
751 u32 ptr_reg_shift, u32 ptr_reg_mask, u32 nop);
752void radeon_ring_fini(struct radeon_device *rdev, struct radeon_ring *cp);
569
570
571/*
572 * CS.
573 */
574struct radeon_cs_reloc {
575 struct drm_gem_object *gobj;
576 struct radeon_bo *robj;
577 struct radeon_bo_list lobj;
578 uint32_t handle;
579 uint32_t flags;
580};
581
582struct radeon_cs_chunk {
583 uint32_t chunk_id;
584 uint32_t length_dw;
753
754
755/*
756 * CS.
757 */
758struct radeon_cs_reloc {
759 struct drm_gem_object *gobj;
760 struct radeon_bo *robj;
761 struct radeon_bo_list lobj;
762 uint32_t handle;
763 uint32_t flags;
764};
765
766struct radeon_cs_chunk {
767 uint32_t chunk_id;
768 uint32_t length_dw;
585 int kpage_idx[2];
586 uint32_t *kpage[2];
769 int kpage_idx[2];
770 uint32_t *kpage[2];
587 uint32_t *kdata;
771 uint32_t *kdata;
588 void __user *user_ptr;
589 int last_copied_page;
590 int last_page_index;
772 void __user *user_ptr;
773 int last_copied_page;
774 int last_page_index;
591};
592
593struct radeon_cs_parser {
594 struct device *dev;
595 struct radeon_device *rdev;
596 struct drm_file *filp;
597 /* chunks */
598 unsigned nchunks;

--- 4 unchanged lines hidden (view full) ---

603 /* relocations */
604 unsigned nrelocs;
605 struct radeon_cs_reloc *relocs;
606 struct radeon_cs_reloc **relocs_ptr;
607 struct list_head validated;
608 /* indices of various chunks */
609 int chunk_ib_idx;
610 int chunk_relocs_idx;
775};
776
777struct radeon_cs_parser {
778 struct device *dev;
779 struct radeon_device *rdev;
780 struct drm_file *filp;
781 /* chunks */
782 unsigned nchunks;

--- 4 unchanged lines hidden (view full) ---

787 /* relocations */
788 unsigned nrelocs;
789 struct radeon_cs_reloc *relocs;
790 struct radeon_cs_reloc **relocs_ptr;
791 struct list_head validated;
792 /* indices of various chunks */
793 int chunk_ib_idx;
794 int chunk_relocs_idx;
795 int chunk_flags_idx;
611 struct radeon_ib *ib;
612 void *track;
613 unsigned family;
614 int parser_error;
796 struct radeon_ib *ib;
797 void *track;
798 unsigned family;
799 int parser_error;
615 bool keep_tiling_flags;
800 u32 cs_flags;
801 u32 ring;
802 s32 priority;
616};
617
618extern int radeon_cs_update_pages(struct radeon_cs_parser *p, int pg_idx);
619extern int radeon_cs_finish_pages(struct radeon_cs_parser *p);
620extern u32 radeon_get_ib_value(struct radeon_cs_parser *p, int idx);
621
622struct radeon_cs_packet {
623 unsigned idx;

--- 240 unchanged lines hidden (view full) ---

864 */
865void radeon_benchmark(struct radeon_device *rdev, int test_number);
866
867
868/*
869 * Testing
870 */
871void radeon_test_moves(struct radeon_device *rdev);
803};
804
805extern int radeon_cs_update_pages(struct radeon_cs_parser *p, int pg_idx);
806extern int radeon_cs_finish_pages(struct radeon_cs_parser *p);
807extern u32 radeon_get_ib_value(struct radeon_cs_parser *p, int idx);
808
809struct radeon_cs_packet {
810 unsigned idx;

--- 240 unchanged lines hidden (view full) ---

1051 */
1052void radeon_benchmark(struct radeon_device *rdev, int test_number);
1053
1054
1055/*
1056 * Testing
1057 */
1058void radeon_test_moves(struct radeon_device *rdev);
1059void radeon_test_ring_sync(struct radeon_device *rdev,
1060 struct radeon_ring *cpA,
1061 struct radeon_ring *cpB);
1062void radeon_test_syncing(struct radeon_device *rdev);
872
873
874/*
875 * Debugfs
876 */
1063
1064
1065/*
1066 * Debugfs
1067 */
1068struct radeon_debugfs {
1069 struct drm_info_list *files;
1070 unsigned num_files;
1071};
1072
877int radeon_debugfs_add_files(struct radeon_device *rdev,
878 struct drm_info_list *files,
879 unsigned nfiles);
880int radeon_debugfs_fence_init(struct radeon_device *rdev);
881
882
883/*
884 * ASIC specific functions.
885 */
886struct radeon_asic {
887 int (*init)(struct radeon_device *rdev);
888 void (*fini)(struct radeon_device *rdev);
889 int (*resume)(struct radeon_device *rdev);
890 int (*suspend)(struct radeon_device *rdev);
891 void (*vga_set_state)(struct radeon_device *rdev, bool state);
1073int radeon_debugfs_add_files(struct radeon_device *rdev,
1074 struct drm_info_list *files,
1075 unsigned nfiles);
1076int radeon_debugfs_fence_init(struct radeon_device *rdev);
1077
1078
1079/*
1080 * ASIC specific functions.
1081 */
1082struct radeon_asic {
1083 int (*init)(struct radeon_device *rdev);
1084 void (*fini)(struct radeon_device *rdev);
1085 int (*resume)(struct radeon_device *rdev);
1086 int (*suspend)(struct radeon_device *rdev);
1087 void (*vga_set_state)(struct radeon_device *rdev, bool state);
892 bool (*gpu_is_lockup)(struct radeon_device *rdev);
1088 bool (*gpu_is_lockup)(struct radeon_device *rdev, struct radeon_ring *cp);
893 int (*asic_reset)(struct radeon_device *rdev);
894 void (*gart_tlb_flush)(struct radeon_device *rdev);
895 int (*gart_set_page)(struct radeon_device *rdev, int i, uint64_t addr);
896 int (*cp_init)(struct radeon_device *rdev, unsigned ring_size);
897 void (*cp_fini)(struct radeon_device *rdev);
898 void (*cp_disable)(struct radeon_device *rdev);
1089 int (*asic_reset)(struct radeon_device *rdev);
1090 void (*gart_tlb_flush)(struct radeon_device *rdev);
1091 int (*gart_set_page)(struct radeon_device *rdev, int i, uint64_t addr);
1092 int (*cp_init)(struct radeon_device *rdev, unsigned ring_size);
1093 void (*cp_fini)(struct radeon_device *rdev);
1094 void (*cp_disable)(struct radeon_device *rdev);
899 void (*cp_commit)(struct radeon_device *rdev);
900 void (*ring_start)(struct radeon_device *rdev);
1095 void (*ring_start)(struct radeon_device *rdev);
901 int (*ring_test)(struct radeon_device *rdev);
902 void (*ring_ib_execute)(struct radeon_device *rdev, struct radeon_ib *ib);
1096
1097 struct {
1098 void (*ib_execute)(struct radeon_device *rdev, struct radeon_ib *ib);
1099 int (*ib_parse)(struct radeon_device *rdev, struct radeon_ib *ib);
1100 void (*emit_fence)(struct radeon_device *rdev, struct radeon_fence *fence);
1101 void (*emit_semaphore)(struct radeon_device *rdev, struct radeon_ring *cp,
1102 struct radeon_semaphore *semaphore, bool emit_wait);
1103 } ring[RADEON_NUM_RINGS];
1104
1105 int (*ring_test)(struct radeon_device *rdev, struct radeon_ring *cp);
903 int (*irq_set)(struct radeon_device *rdev);
904 int (*irq_process)(struct radeon_device *rdev);
905 u32 (*get_vblank_counter)(struct radeon_device *rdev, int crtc);
1106 int (*irq_set)(struct radeon_device *rdev);
1107 int (*irq_process)(struct radeon_device *rdev);
1108 u32 (*get_vblank_counter)(struct radeon_device *rdev, int crtc);
906 void (*fence_ring_emit)(struct radeon_device *rdev, struct radeon_fence *fence);
907 int (*cs_parse)(struct radeon_cs_parser *p);
908 int (*copy_blit)(struct radeon_device *rdev,
909 uint64_t src_offset,
910 uint64_t dst_offset,
911 unsigned num_gpu_pages,
912 struct radeon_fence *fence);
913 int (*copy_dma)(struct radeon_device *rdev,
914 uint64_t src_offset,

--- 212 unchanged lines hidden (view full) ---

1127int radeon_gem_set_domain_ioctl(struct drm_device *dev, void *data,
1128 struct drm_file *filp);
1129int radeon_gem_mmap_ioctl(struct drm_device *dev, void *data,
1130 struct drm_file *filp);
1131int radeon_gem_busy_ioctl(struct drm_device *dev, void *data,
1132 struct drm_file *filp);
1133int radeon_gem_wait_idle_ioctl(struct drm_device *dev, void *data,
1134 struct drm_file *filp);
1109 int (*cs_parse)(struct radeon_cs_parser *p);
1110 int (*copy_blit)(struct radeon_device *rdev,
1111 uint64_t src_offset,
1112 uint64_t dst_offset,
1113 unsigned num_gpu_pages,
1114 struct radeon_fence *fence);
1115 int (*copy_dma)(struct radeon_device *rdev,
1116 uint64_t src_offset,

--- 212 unchanged lines hidden (view full) ---

1329int radeon_gem_set_domain_ioctl(struct drm_device *dev, void *data,
1330 struct drm_file *filp);
1331int radeon_gem_mmap_ioctl(struct drm_device *dev, void *data,
1332 struct drm_file *filp);
1333int radeon_gem_busy_ioctl(struct drm_device *dev, void *data,
1334 struct drm_file *filp);
1335int radeon_gem_wait_idle_ioctl(struct drm_device *dev, void *data,
1336 struct drm_file *filp);
1337int radeon_gem_va_ioctl(struct drm_device *dev, void *data,
1338 struct drm_file *filp);
1135int radeon_cs_ioctl(struct drm_device *dev, void *data, struct drm_file *filp);
1136int radeon_gem_set_tiling_ioctl(struct drm_device *dev, void *data,
1137 struct drm_file *filp);
1138int radeon_gem_get_tiling_ioctl(struct drm_device *dev, void *data,
1139 struct drm_file *filp);
1140
1141/* VRAM scratch page for HDP bug, default vram page */
1142struct r600_vram_scratch {

--- 83 unchanged lines hidden (view full) ---

1226 void __iomem *rio_mem;
1227 resource_size_t rio_mem_size;
1228 struct radeon_clock clock;
1229 struct radeon_mc mc;
1230 struct radeon_gart gart;
1231 struct radeon_mode_info mode_info;
1232 struct radeon_scratch scratch;
1233 struct radeon_mman mman;
1339int radeon_cs_ioctl(struct drm_device *dev, void *data, struct drm_file *filp);
1340int radeon_gem_set_tiling_ioctl(struct drm_device *dev, void *data,
1341 struct drm_file *filp);
1342int radeon_gem_get_tiling_ioctl(struct drm_device *dev, void *data,
1343 struct drm_file *filp);
1344
1345/* VRAM scratch page for HDP bug, default vram page */
1346struct r600_vram_scratch {

--- 83 unchanged lines hidden (view full) ---

1430 void __iomem *rio_mem;
1431 resource_size_t rio_mem_size;
1432 struct radeon_clock clock;
1433 struct radeon_mc mc;
1434 struct radeon_gart gart;
1435 struct radeon_mode_info mode_info;
1436 struct radeon_scratch scratch;
1437 struct radeon_mman mman;
1234 struct radeon_fence_driver fence_drv;
1235 struct radeon_cp cp;
1236 /* cayman compute rings */
1237 struct radeon_cp cp1;
1238 struct radeon_cp cp2;
1438 rwlock_t fence_lock;
1439 struct radeon_fence_driver fence_drv[RADEON_NUM_RINGS];
1440 struct radeon_semaphore_driver semaphore_drv;
1441 struct radeon_ring ring[RADEON_NUM_RINGS];
1239 struct radeon_ib_pool ib_pool;
1240 struct radeon_irq irq;
1241 struct radeon_asic *asic;
1242 struct radeon_gem gem;
1243 struct radeon_pm pm;
1244 uint32_t bios_scratch[RADEON_BIOS_NUM_SCRATCH];
1245 struct radeon_mutex cs_mutex;
1246 struct radeon_wb wb;

--- 27 unchanged lines hidden (view full) ---

1274 uint8_t audio_category_code;
1275
1276 struct notifier_block acpi_nb;
1277 /* only one userspace can use Hyperz features or CMASK at a time */
1278 struct drm_file *hyperz_filp;
1279 struct drm_file *cmask_filp;
1280 /* i2c buses */
1281 struct radeon_i2c_chan *i2c_bus[RADEON_MAX_I2C_BUS];
1442 struct radeon_ib_pool ib_pool;
1443 struct radeon_irq irq;
1444 struct radeon_asic *asic;
1445 struct radeon_gem gem;
1446 struct radeon_pm pm;
1447 uint32_t bios_scratch[RADEON_BIOS_NUM_SCRATCH];
1448 struct radeon_mutex cs_mutex;
1449 struct radeon_wb wb;

--- 27 unchanged lines hidden (view full) ---

1477 uint8_t audio_category_code;
1478
1479 struct notifier_block acpi_nb;
1480 /* only one userspace can use Hyperz features or CMASK at a time */
1481 struct drm_file *hyperz_filp;
1482 struct drm_file *cmask_filp;
1483 /* i2c buses */
1484 struct radeon_i2c_chan *i2c_bus[RADEON_MAX_I2C_BUS];
1485 /* debugfs */
1486 struct radeon_debugfs debugfs[RADEON_DEBUGFS_MAX_COMPONENTS];
1487 unsigned debugfs_count;
1488 /* virtual memory */
1489 struct radeon_vm_manager vm_manager;
1282};
1283
1284int radeon_device_init(struct radeon_device *rdev,
1285 struct drm_device *ddev,
1286 struct pci_dev *pdev,
1287 uint32_t flags);
1288void radeon_device_fini(struct radeon_device *rdev);
1289int radeon_gpu_wait_for_idle(struct radeon_device *rdev);

--- 119 unchanged lines hidden (view full) ---

1409void radeon_combios_fini(struct radeon_device *rdev);
1410int radeon_atombios_init(struct radeon_device *rdev);
1411void radeon_atombios_fini(struct radeon_device *rdev);
1412
1413
1414/*
1415 * RING helpers.
1416 */
1490};
1491
1492int radeon_device_init(struct radeon_device *rdev,
1493 struct drm_device *ddev,
1494 struct pci_dev *pdev,
1495 uint32_t flags);
1496void radeon_device_fini(struct radeon_device *rdev);
1497int radeon_gpu_wait_for_idle(struct radeon_device *rdev);

--- 119 unchanged lines hidden (view full) ---

1617void radeon_combios_fini(struct radeon_device *rdev);
1618int radeon_atombios_init(struct radeon_device *rdev);
1619void radeon_atombios_fini(struct radeon_device *rdev);
1620
1621
1622/*
1623 * RING helpers.
1624 */
1417
1418#if DRM_DEBUG_CODE == 0
1625#if DRM_DEBUG_CODE == 0
1419static inline void radeon_ring_write(struct radeon_device *rdev, uint32_t v)
1626static inline void radeon_ring_write(struct radeon_ring *ring, uint32_t v)
1420{
1627{
1421 rdev->cp.ring[rdev->cp.wptr++] = v;
1422 rdev->cp.wptr &= rdev->cp.ptr_mask;
1423 rdev->cp.count_dw--;
1424 rdev->cp.ring_free_dw--;
1628 ring->ring[ring->wptr++] = v;
1629 ring->wptr &= ring->ptr_mask;
1630 ring->count_dw--;
1631 ring->ring_free_dw--;
1425}
1426#else
1427/* With debugging this is just too big to inline */
1632}
1633#else
1634/* With debugging this is just too big to inline */
1428void radeon_ring_write(struct radeon_device *rdev, uint32_t v);
1635void radeon_ring_write(struct radeon_ring *ring, uint32_t v);
1429#endif
1430
1431/*
1432 * ASICs macro.
1433 */
1434#define radeon_init(rdev) (rdev)->asic->init((rdev))
1435#define radeon_fini(rdev) (rdev)->asic->fini((rdev))
1436#define radeon_resume(rdev) (rdev)->asic->resume((rdev))
1437#define radeon_suspend(rdev) (rdev)->asic->suspend((rdev))
1438#define radeon_cs_parse(p) rdev->asic->cs_parse((p))
1439#define radeon_vga_set_state(rdev, state) (rdev)->asic->vga_set_state((rdev), (state))
1636#endif
1637
1638/*
1639 * ASICs macro.
1640 */
1641#define radeon_init(rdev) (rdev)->asic->init((rdev))
1642#define radeon_fini(rdev) (rdev)->asic->fini((rdev))
1643#define radeon_resume(rdev) (rdev)->asic->resume((rdev))
1644#define radeon_suspend(rdev) (rdev)->asic->suspend((rdev))
1645#define radeon_cs_parse(p) rdev->asic->cs_parse((p))
1646#define radeon_vga_set_state(rdev, state) (rdev)->asic->vga_set_state((rdev), (state))
1440#define radeon_gpu_is_lockup(rdev) (rdev)->asic->gpu_is_lockup((rdev))
1647#define radeon_gpu_is_lockup(rdev, cp) (rdev)->asic->gpu_is_lockup((rdev), (cp))
1441#define radeon_asic_reset(rdev) (rdev)->asic->asic_reset((rdev))
1442#define radeon_gart_tlb_flush(rdev) (rdev)->asic->gart_tlb_flush((rdev))
1443#define radeon_gart_set_page(rdev, i, p) (rdev)->asic->gart_set_page((rdev), (i), (p))
1648#define radeon_asic_reset(rdev) (rdev)->asic->asic_reset((rdev))
1649#define radeon_gart_tlb_flush(rdev) (rdev)->asic->gart_tlb_flush((rdev))
1650#define radeon_gart_set_page(rdev, i, p) (rdev)->asic->gart_set_page((rdev), (i), (p))
1444#define radeon_cp_commit(rdev) (rdev)->asic->cp_commit((rdev))
1445#define radeon_ring_start(rdev) (rdev)->asic->ring_start((rdev))
1651#define radeon_ring_start(rdev) (rdev)->asic->ring_start((rdev))
1446#define radeon_ring_test(rdev) (rdev)->asic->ring_test((rdev))
1447#define radeon_ring_ib_execute(rdev, ib) (rdev)->asic->ring_ib_execute((rdev), (ib))
1652#define radeon_ring_test(rdev, cp) (rdev)->asic->ring_test((rdev), (cp))
1653#define radeon_ring_ib_execute(rdev, r, ib) (rdev)->asic->ring[(r)].ib_execute((rdev), (ib))
1654#define radeon_ring_ib_parse(rdev, r, ib) (rdev)->asic->ring[(r)].ib_parse((rdev), (ib))
1448#define radeon_irq_set(rdev) (rdev)->asic->irq_set((rdev))
1449#define radeon_irq_process(rdev) (rdev)->asic->irq_process((rdev))
1450#define radeon_get_vblank_counter(rdev, crtc) (rdev)->asic->get_vblank_counter((rdev), (crtc))
1655#define radeon_irq_set(rdev) (rdev)->asic->irq_set((rdev))
1656#define radeon_irq_process(rdev) (rdev)->asic->irq_process((rdev))
1657#define radeon_get_vblank_counter(rdev, crtc) (rdev)->asic->get_vblank_counter((rdev), (crtc))
1451#define radeon_fence_ring_emit(rdev, fence) (rdev)->asic->fence_ring_emit((rdev), (fence))
1658#define radeon_fence_ring_emit(rdev, r, fence) (rdev)->asic->ring[(r)].emit_fence((rdev), (fence))
1659#define radeon_semaphore_ring_emit(rdev, r, cp, semaphore, emit_wait) (rdev)->asic->ring[(r)].emit_semaphore((rdev), (cp), (semaphore), (emit_wait))
1452#define radeon_copy_blit(rdev, s, d, np, f) (rdev)->asic->copy_blit((rdev), (s), (d), (np), (f))
1453#define radeon_copy_dma(rdev, s, d, np, f) (rdev)->asic->copy_dma((rdev), (s), (d), (np), (f))
1454#define radeon_copy(rdev, s, d, np, f) (rdev)->asic->copy((rdev), (s), (d), (np), (f))
1455#define radeon_get_engine_clock(rdev) (rdev)->asic->get_engine_clock((rdev))
1456#define radeon_set_engine_clock(rdev, e) (rdev)->asic->set_engine_clock((rdev), (e))
1457#define radeon_get_memory_clock(rdev) (rdev)->asic->get_memory_clock((rdev))
1458#define radeon_set_memory_clock(rdev, e) (rdev)->asic->set_memory_clock((rdev), (e))
1459#define radeon_get_pcie_lanes(rdev) (rdev)->asic->get_pcie_lanes((rdev))

--- 38 unchanged lines hidden (view full) ---

1498extern bool radeon_ttm_bo_is_radeon_bo(struct ttm_buffer_object *bo);
1499extern void radeon_vram_location(struct radeon_device *rdev, struct radeon_mc *mc, u64 base);
1500extern void radeon_gtt_location(struct radeon_device *rdev, struct radeon_mc *mc);
1501extern int radeon_resume_kms(struct drm_device *dev);
1502extern int radeon_suspend_kms(struct drm_device *dev, pm_message_t state);
1503extern void radeon_ttm_set_active_vram_size(struct radeon_device *rdev, u64 size);
1504
1505/*
1660#define radeon_copy_blit(rdev, s, d, np, f) (rdev)->asic->copy_blit((rdev), (s), (d), (np), (f))
1661#define radeon_copy_dma(rdev, s, d, np, f) (rdev)->asic->copy_dma((rdev), (s), (d), (np), (f))
1662#define radeon_copy(rdev, s, d, np, f) (rdev)->asic->copy((rdev), (s), (d), (np), (f))
1663#define radeon_get_engine_clock(rdev) (rdev)->asic->get_engine_clock((rdev))
1664#define radeon_set_engine_clock(rdev, e) (rdev)->asic->set_engine_clock((rdev), (e))
1665#define radeon_get_memory_clock(rdev) (rdev)->asic->get_memory_clock((rdev))
1666#define radeon_set_memory_clock(rdev, e) (rdev)->asic->set_memory_clock((rdev), (e))
1667#define radeon_get_pcie_lanes(rdev) (rdev)->asic->get_pcie_lanes((rdev))

--- 38 unchanged lines hidden (view full) ---

1706extern bool radeon_ttm_bo_is_radeon_bo(struct ttm_buffer_object *bo);
1707extern void radeon_vram_location(struct radeon_device *rdev, struct radeon_mc *mc, u64 base);
1708extern void radeon_gtt_location(struct radeon_device *rdev, struct radeon_mc *mc);
1709extern int radeon_resume_kms(struct drm_device *dev);
1710extern int radeon_suspend_kms(struct drm_device *dev, pm_message_t state);
1711extern void radeon_ttm_set_active_vram_size(struct radeon_device *rdev, u64 size);
1712
1713/*
1714 * vm
1715 */
1716int radeon_vm_manager_init(struct radeon_device *rdev);
1717void radeon_vm_manager_fini(struct radeon_device *rdev);
1718int radeon_vm_manager_start(struct radeon_device *rdev);
1719int radeon_vm_manager_suspend(struct radeon_device *rdev);
1720int radeon_vm_init(struct radeon_device *rdev, struct radeon_vm *vm);
1721void radeon_vm_fini(struct radeon_device *rdev, struct radeon_vm *vm);
1722int radeon_vm_bind(struct radeon_device *rdev, struct radeon_vm *vm);
1723void radeon_vm_unbind(struct radeon_device *rdev, struct radeon_vm *vm);
1724int radeon_vm_bo_update_pte(struct radeon_device *rdev,
1725 struct radeon_vm *vm,
1726 struct radeon_bo *bo,
1727 struct ttm_mem_reg *mem);
1728void radeon_vm_bo_invalidate(struct radeon_device *rdev,
1729 struct radeon_bo *bo);
1730int radeon_vm_bo_add(struct radeon_device *rdev,
1731 struct radeon_vm *vm,
1732 struct radeon_bo *bo,
1733 uint64_t offset,
1734 uint32_t flags);
1735int radeon_vm_bo_rmv(struct radeon_device *rdev,
1736 struct radeon_vm *vm,
1737 struct radeon_bo *bo);
1738
1739
1740/*
1506 * R600 vram scratch functions
1507 */
1508int r600_vram_scratch_init(struct radeon_device *rdev);
1509void r600_vram_scratch_fini(struct radeon_device *rdev);
1510
1511/*
1512 * r600 functions used by radeon_encoder.c
1513 */

--- 17 unchanged lines hidden ---
1741 * R600 vram scratch functions
1742 */
1743int r600_vram_scratch_init(struct radeon_device *rdev);
1744void r600_vram_scratch_fini(struct radeon_device *rdev);
1745
1746/*
1747 * r600 functions used by radeon_encoder.c
1748 */

--- 17 unchanged lines hidden ---