12c86e55dSMatthew Auld /* SPDX-License-Identifier: MIT */
22c86e55dSMatthew Auld /*
32c86e55dSMatthew Auld * Copyright © 2020 Intel Corporation
42c86e55dSMatthew Auld */
52c86e55dSMatthew Auld
62c86e55dSMatthew Auld #ifndef __GEN6_PPGTT_H__
72c86e55dSMatthew Auld #define __GEN6_PPGTT_H__
82c86e55dSMatthew Auld
92c86e55dSMatthew Auld #include "intel_gtt.h"
102c86e55dSMatthew Auld
1147b08693SMaarten Lankhorst struct i915_gem_ww_ctx;
1247b08693SMaarten Lankhorst
132c86e55dSMatthew Auld struct gen6_ppgtt {
142c86e55dSMatthew Auld struct i915_ppgtt base;
152c86e55dSMatthew Auld
162c86e55dSMatthew Auld struct mutex flush;
172c86e55dSMatthew Auld struct i915_vma *vma;
182c86e55dSMatthew Auld gen6_pte_t __iomem *pd_addr;
1989351925SChris Wilson u32 pp_dir;
202c86e55dSMatthew Auld
212c86e55dSMatthew Auld atomic_t pin_count;
222c86e55dSMatthew Auld
232c86e55dSMatthew Auld bool scan_for_unused_pt;
242c86e55dSMatthew Auld };
252c86e55dSMatthew Auld
gen6_pte_index(u32 addr)262c86e55dSMatthew Auld static inline u32 gen6_pte_index(u32 addr)
272c86e55dSMatthew Auld {
282c86e55dSMatthew Auld return i915_pte_index(addr, GEN6_PDE_SHIFT);
292c86e55dSMatthew Auld }
302c86e55dSMatthew Auld
gen6_pte_count(u32 addr,u32 length)312c86e55dSMatthew Auld static inline u32 gen6_pte_count(u32 addr, u32 length)
322c86e55dSMatthew Auld {
332c86e55dSMatthew Auld return i915_pte_count(addr, length, GEN6_PDE_SHIFT);
342c86e55dSMatthew Auld }
352c86e55dSMatthew Auld
gen6_pde_index(u32 addr)362c86e55dSMatthew Auld static inline u32 gen6_pde_index(u32 addr)
372c86e55dSMatthew Auld {
382c86e55dSMatthew Auld return i915_pde_index(addr, GEN6_PDE_SHIFT);
392c86e55dSMatthew Auld }
402c86e55dSMatthew Auld
412c86e55dSMatthew Auld #define __to_gen6_ppgtt(base) container_of(base, struct gen6_ppgtt, base)
422c86e55dSMatthew Auld
to_gen6_ppgtt(struct i915_ppgtt * base)432c86e55dSMatthew Auld static inline struct gen6_ppgtt *to_gen6_ppgtt(struct i915_ppgtt *base)
442c86e55dSMatthew Auld {
452c86e55dSMatthew Auld BUILD_BUG_ON(offsetof(struct gen6_ppgtt, base));
462c86e55dSMatthew Auld return __to_gen6_ppgtt(base);
472c86e55dSMatthew Auld }
482c86e55dSMatthew Auld
492c86e55dSMatthew Auld /*
502c86e55dSMatthew Auld * gen6_for_each_pde() iterates over every pde from start until start+length.
512c86e55dSMatthew Auld * If start and start+length are not perfectly divisible, the macro will round
522c86e55dSMatthew Auld * down and up as needed. Start=0 and length=2G effectively iterates over
532c86e55dSMatthew Auld * every PDE in the system. The macro modifies ALL its parameters except 'pd',
542c86e55dSMatthew Auld * so each of the other parameters should preferably be a simple variable, or
552c86e55dSMatthew Auld * at most an lvalue with no side-effects!
562c86e55dSMatthew Auld */
572c86e55dSMatthew Auld #define gen6_for_each_pde(pt, pd, start, length, iter) \
582c86e55dSMatthew Auld for (iter = gen6_pde_index(start); \
592c86e55dSMatthew Auld length > 0 && iter < I915_PDES && \
602c86e55dSMatthew Auld (pt = i915_pt_entry(pd, iter), true); \
612c86e55dSMatthew Auld ({ u32 temp = ALIGN(start + 1, 1 << GEN6_PDE_SHIFT); \
622c86e55dSMatthew Auld temp = min(temp - start, length); \
63*70b0f077SChris Wilson start += temp; length -= temp; }), ++iter)
642c86e55dSMatthew Auld
652c86e55dSMatthew Auld #define gen6_for_all_pdes(pt, pd, iter) \
662c86e55dSMatthew Auld for (iter = 0; \
672c86e55dSMatthew Auld iter < I915_PDES && \
682c86e55dSMatthew Auld (pt = i915_pt_entry(pd, iter), true); \
692c86e55dSMatthew Auld ++iter)
702c86e55dSMatthew Auld
7147b08693SMaarten Lankhorst int gen6_ppgtt_pin(struct i915_ppgtt *base, struct i915_gem_ww_ctx *ww);
722c86e55dSMatthew Auld void gen6_ppgtt_unpin(struct i915_ppgtt *base);
732c86e55dSMatthew Auld void gen6_ppgtt_enable(struct intel_gt *gt);
742c86e55dSMatthew Auld void gen7_ppgtt_enable(struct intel_gt *gt);
752c86e55dSMatthew Auld struct i915_ppgtt *gen6_ppgtt_create(struct intel_gt *gt);
762c86e55dSMatthew Auld
772c86e55dSMatthew Auld #endif
78