12c86e55dSMatthew Auld /* SPDX-License-Identifier: MIT */ 22c86e55dSMatthew Auld /* 32c86e55dSMatthew Auld * Copyright © 2020 Intel Corporation 42c86e55dSMatthew Auld */ 52c86e55dSMatthew Auld 62c86e55dSMatthew Auld #ifndef __GEN6_PPGTT_H__ 72c86e55dSMatthew Auld #define __GEN6_PPGTT_H__ 82c86e55dSMatthew Auld 92c86e55dSMatthew Auld #include "intel_gtt.h" 102c86e55dSMatthew Auld 112c86e55dSMatthew Auld struct gen6_ppgtt { 122c86e55dSMatthew Auld struct i915_ppgtt base; 132c86e55dSMatthew Auld 142c86e55dSMatthew Auld struct mutex flush; 152c86e55dSMatthew Auld struct i915_vma *vma; 162c86e55dSMatthew Auld gen6_pte_t __iomem *pd_addr; 1789351925SChris Wilson u32 pp_dir; 182c86e55dSMatthew Auld 192c86e55dSMatthew Auld atomic_t pin_count; 202c86e55dSMatthew Auld struct mutex pin_mutex; 212c86e55dSMatthew Auld 222c86e55dSMatthew Auld bool scan_for_unused_pt; 232c86e55dSMatthew Auld }; 242c86e55dSMatthew Auld 252c86e55dSMatthew Auld static inline u32 gen6_pte_index(u32 addr) 262c86e55dSMatthew Auld { 272c86e55dSMatthew Auld return i915_pte_index(addr, GEN6_PDE_SHIFT); 282c86e55dSMatthew Auld } 292c86e55dSMatthew Auld 302c86e55dSMatthew Auld static inline u32 gen6_pte_count(u32 addr, u32 length) 312c86e55dSMatthew Auld { 322c86e55dSMatthew Auld return i915_pte_count(addr, length, GEN6_PDE_SHIFT); 332c86e55dSMatthew Auld } 342c86e55dSMatthew Auld 352c86e55dSMatthew Auld static inline u32 gen6_pde_index(u32 addr) 362c86e55dSMatthew Auld { 372c86e55dSMatthew Auld return i915_pde_index(addr, GEN6_PDE_SHIFT); 382c86e55dSMatthew Auld } 392c86e55dSMatthew Auld 402c86e55dSMatthew Auld #define __to_gen6_ppgtt(base) container_of(base, struct gen6_ppgtt, base) 412c86e55dSMatthew Auld 422c86e55dSMatthew Auld static inline struct gen6_ppgtt *to_gen6_ppgtt(struct i915_ppgtt *base) 432c86e55dSMatthew Auld { 442c86e55dSMatthew Auld BUILD_BUG_ON(offsetof(struct gen6_ppgtt, base)); 452c86e55dSMatthew Auld return __to_gen6_ppgtt(base); 462c86e55dSMatthew Auld } 472c86e55dSMatthew Auld 482c86e55dSMatthew Auld /* 492c86e55dSMatthew Auld * gen6_for_each_pde() iterates over every pde from start until start+length. 502c86e55dSMatthew Auld * If start and start+length are not perfectly divisible, the macro will round 512c86e55dSMatthew Auld * down and up as needed. Start=0 and length=2G effectively iterates over 522c86e55dSMatthew Auld * every PDE in the system. The macro modifies ALL its parameters except 'pd', 532c86e55dSMatthew Auld * so each of the other parameters should preferably be a simple variable, or 542c86e55dSMatthew Auld * at most an lvalue with no side-effects! 552c86e55dSMatthew Auld */ 562c86e55dSMatthew Auld #define gen6_for_each_pde(pt, pd, start, length, iter) \ 572c86e55dSMatthew Auld for (iter = gen6_pde_index(start); \ 582c86e55dSMatthew Auld length > 0 && iter < I915_PDES && \ 592c86e55dSMatthew Auld (pt = i915_pt_entry(pd, iter), true); \ 602c86e55dSMatthew Auld ({ u32 temp = ALIGN(start+1, 1 << GEN6_PDE_SHIFT); \ 612c86e55dSMatthew Auld temp = min(temp - start, length); \ 622c86e55dSMatthew Auld start += temp, length -= temp; }), ++iter) 632c86e55dSMatthew Auld 642c86e55dSMatthew Auld #define gen6_for_all_pdes(pt, pd, iter) \ 652c86e55dSMatthew Auld for (iter = 0; \ 662c86e55dSMatthew Auld iter < I915_PDES && \ 672c86e55dSMatthew Auld (pt = i915_pt_entry(pd, iter), true); \ 682c86e55dSMatthew Auld ++iter) 692c86e55dSMatthew Auld 702c86e55dSMatthew Auld int gen6_ppgtt_pin(struct i915_ppgtt *base); 712c86e55dSMatthew Auld void gen6_ppgtt_unpin(struct i915_ppgtt *base); 722c86e55dSMatthew Auld void gen6_ppgtt_unpin_all(struct i915_ppgtt *base); 732c86e55dSMatthew Auld void gen6_ppgtt_enable(struct intel_gt *gt); 742c86e55dSMatthew Auld void gen7_ppgtt_enable(struct intel_gt *gt); 752c86e55dSMatthew Auld struct i915_ppgtt *gen6_ppgtt_create(struct intel_gt *gt); 762c86e55dSMatthew Auld 772c86e55dSMatthew Auld #endif 78