12c86e55dSMatthew Auld /* SPDX-License-Identifier: MIT */ 22c86e55dSMatthew Auld /* 32c86e55dSMatthew Auld * Copyright © 2020 Intel Corporation 42c86e55dSMatthew Auld */ 52c86e55dSMatthew Auld 62c86e55dSMatthew Auld #ifndef __GEN6_PPGTT_H__ 72c86e55dSMatthew Auld #define __GEN6_PPGTT_H__ 82c86e55dSMatthew Auld 92c86e55dSMatthew Auld #include "intel_gtt.h" 102c86e55dSMatthew Auld 1147b08693SMaarten Lankhorst struct i915_gem_ww_ctx; 1247b08693SMaarten Lankhorst 132c86e55dSMatthew Auld struct gen6_ppgtt { 142c86e55dSMatthew Auld struct i915_ppgtt base; 152c86e55dSMatthew Auld 162c86e55dSMatthew Auld struct mutex flush; 172c86e55dSMatthew Auld struct i915_vma *vma; 182c86e55dSMatthew Auld gen6_pte_t __iomem *pd_addr; 1989351925SChris Wilson u32 pp_dir; 202c86e55dSMatthew Auld 212c86e55dSMatthew Auld atomic_t pin_count; 222c86e55dSMatthew Auld struct mutex pin_mutex; 232c86e55dSMatthew Auld 242c86e55dSMatthew Auld bool scan_for_unused_pt; 252c86e55dSMatthew Auld }; 262c86e55dSMatthew Auld 272c86e55dSMatthew Auld static inline u32 gen6_pte_index(u32 addr) 282c86e55dSMatthew Auld { 292c86e55dSMatthew Auld return i915_pte_index(addr, GEN6_PDE_SHIFT); 302c86e55dSMatthew Auld } 312c86e55dSMatthew Auld 322c86e55dSMatthew Auld static inline u32 gen6_pte_count(u32 addr, u32 length) 332c86e55dSMatthew Auld { 342c86e55dSMatthew Auld return i915_pte_count(addr, length, GEN6_PDE_SHIFT); 352c86e55dSMatthew Auld } 362c86e55dSMatthew Auld 372c86e55dSMatthew Auld static inline u32 gen6_pde_index(u32 addr) 382c86e55dSMatthew Auld { 392c86e55dSMatthew Auld return i915_pde_index(addr, GEN6_PDE_SHIFT); 402c86e55dSMatthew Auld } 412c86e55dSMatthew Auld 422c86e55dSMatthew Auld #define __to_gen6_ppgtt(base) container_of(base, struct gen6_ppgtt, base) 432c86e55dSMatthew Auld 442c86e55dSMatthew Auld static inline struct gen6_ppgtt *to_gen6_ppgtt(struct i915_ppgtt *base) 452c86e55dSMatthew Auld { 462c86e55dSMatthew Auld BUILD_BUG_ON(offsetof(struct gen6_ppgtt, base)); 472c86e55dSMatthew Auld return __to_gen6_ppgtt(base); 482c86e55dSMatthew Auld } 492c86e55dSMatthew Auld 502c86e55dSMatthew Auld /* 512c86e55dSMatthew Auld * gen6_for_each_pde() iterates over every pde from start until start+length. 522c86e55dSMatthew Auld * If start and start+length are not perfectly divisible, the macro will round 532c86e55dSMatthew Auld * down and up as needed. Start=0 and length=2G effectively iterates over 542c86e55dSMatthew Auld * every PDE in the system. The macro modifies ALL its parameters except 'pd', 552c86e55dSMatthew Auld * so each of the other parameters should preferably be a simple variable, or 562c86e55dSMatthew Auld * at most an lvalue with no side-effects! 572c86e55dSMatthew Auld */ 582c86e55dSMatthew Auld #define gen6_for_each_pde(pt, pd, start, length, iter) \ 592c86e55dSMatthew Auld for (iter = gen6_pde_index(start); \ 602c86e55dSMatthew Auld length > 0 && iter < I915_PDES && \ 612c86e55dSMatthew Auld (pt = i915_pt_entry(pd, iter), true); \ 622c86e55dSMatthew Auld ({ u32 temp = ALIGN(start+1, 1 << GEN6_PDE_SHIFT); \ 632c86e55dSMatthew Auld temp = min(temp - start, length); \ 642c86e55dSMatthew Auld start += temp, length -= temp; }), ++iter) 652c86e55dSMatthew Auld 662c86e55dSMatthew Auld #define gen6_for_all_pdes(pt, pd, iter) \ 672c86e55dSMatthew Auld for (iter = 0; \ 682c86e55dSMatthew Auld iter < I915_PDES && \ 692c86e55dSMatthew Auld (pt = i915_pt_entry(pd, iter), true); \ 702c86e55dSMatthew Auld ++iter) 712c86e55dSMatthew Auld 7247b08693SMaarten Lankhorst int gen6_ppgtt_pin(struct i915_ppgtt *base, struct i915_gem_ww_ctx *ww); 732c86e55dSMatthew Auld void gen6_ppgtt_unpin(struct i915_ppgtt *base); 742c86e55dSMatthew Auld void gen6_ppgtt_unpin_all(struct i915_ppgtt *base); 752c86e55dSMatthew Auld void gen6_ppgtt_enable(struct intel_gt *gt); 762c86e55dSMatthew Auld void gen7_ppgtt_enable(struct intel_gt *gt); 772c86e55dSMatthew Auld struct i915_ppgtt *gen6_ppgtt_create(struct intel_gt *gt); 782c86e55dSMatthew Auld 792c86e55dSMatthew Auld #endif 80