Lines Matching refs:hmmu_id

5554 bool gaudi2_is_hmmu_enabled(struct hl_device *hdev, int dcore_id, int hmmu_id)  in gaudi2_is_hmmu_enabled()  argument
5559 hw_cap = HW_CAP_DCORE0_DMMU0 << (NUM_OF_HMMU_PER_DCORE * dcore_id + hmmu_id); in gaudi2_is_hmmu_enabled()
5568 static inline u32 get_hmmu_stlb_base(int dcore_id, int hmmu_id) in get_hmmu_stlb_base() argument
5572 offset = (u32) (dcore_id * DCORE_OFFSET + hmmu_id * DCORE_HMMU_OFFSET); in get_hmmu_stlb_base()
5612 int dcore_id, int hmmu_id, in gaudi2_hmmu_invalidate_cache_trigger() argument
5615 u32 stlb_base = get_hmmu_stlb_base(dcore_id, hmmu_id); in gaudi2_hmmu_invalidate_cache_trigger()
5621 int dcore_id, int hmmu_id, in gaudi2_hmmu_invalidate_cache_status_poll() argument
5624 u32 stlb_base = get_hmmu_stlb_base(dcore_id, hmmu_id); in gaudi2_hmmu_invalidate_cache_status_poll()
5632 int dcore_id, hmmu_id; in gaudi2_hmmus_invalidate_cache() local
5636 for (hmmu_id = 0 ; hmmu_id < NUM_OF_HMMU_PER_DCORE ; hmmu_id++) { in gaudi2_hmmus_invalidate_cache()
5637 if (!gaudi2_is_hmmu_enabled(hdev, dcore_id, hmmu_id)) in gaudi2_hmmus_invalidate_cache()
5640 gaudi2_hmmu_invalidate_cache_trigger(hdev, dcore_id, hmmu_id, inv_params); in gaudi2_hmmus_invalidate_cache()
5646 for (hmmu_id = 0 ; hmmu_id < NUM_OF_HMMU_PER_DCORE ; hmmu_id++) { in gaudi2_hmmus_invalidate_cache()
5649 if (!gaudi2_is_hmmu_enabled(hdev, dcore_id, hmmu_id)) in gaudi2_hmmus_invalidate_cache()
5652 rc = gaudi2_hmmu_invalidate_cache_status_poll(hdev, dcore_id, hmmu_id, in gaudi2_hmmus_invalidate_cache()
5858 int hmmu_id) in gaudi2_dcore_hmmu_init() argument
5866 dmmu_seq = NUM_OF_HMMU_PER_DCORE * dcore_id + hmmu_id; in gaudi2_dcore_hmmu_init()
5876 offset = (u32) (dcore_id * DCORE_OFFSET + hmmu_id * DCORE_HMMU_OFFSET); in gaudi2_dcore_hmmu_init()
5911 int rc, dcore_id, hmmu_id; in gaudi2_hbm_mmu_init() local
5914 for (hmmu_id = 0 ; hmmu_id < NUM_OF_HMMU_PER_DCORE; hmmu_id++) { in gaudi2_hbm_mmu_init()
5915 rc = gaudi2_dcore_hmmu_init(hdev, dcore_id, hmmu_id); in gaudi2_hbm_mmu_init()