Searched refs:num_compute_rings (Results 1 – 11 of 11) sorted by relevance
200 if (adev->gfx.num_compute_rings > 1 && in amdgpu_gfx_is_high_priority_compute_queue()213 adev->gfx.num_compute_rings); in amdgpu_gfx_compute_queue_acquire()445 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in amdgpu_gfx_mqd_sw_init()446 j = i + xcc_id * adev->gfx.num_compute_rings; in amdgpu_gfx_mqd_sw_init()486 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in amdgpu_gfx_mqd_sw_fini()487 j = i + xcc_id * adev->gfx.num_compute_rings; in amdgpu_gfx_mqd_sw_fini()514 adev->gfx.num_compute_rings)) { in amdgpu_gfx_disable_kcq()519 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in amdgpu_gfx_disable_kcq()520 j = i + xcc_id * adev->gfx.num_compute_rings; in amdgpu_gfx_disable_kcq()610 adev->gfx.num_compute_rings + in amdgpu_gfx_enable_kcq()[all …]
469 adev->gfx.num_compute_rings * num_xcc * GFX9_MEC_HPD_SIZE; in gfx_v9_4_3_mec_init()747 ring = &adev->gfx.compute_ring[xcc_id * adev->gfx.num_compute_rings + in gfx_v9_4_3_compute_ring_init()762 (ring_id + xcc_id * adev->gfx.num_compute_rings) * in gfx_v9_4_3_compute_ring_init()883 for (i = 0; i < adev->gfx.num_compute_rings * num_xcc; i++) in gfx_v9_4_3_sw_fini()1842 for (j = 0; j < adev->gfx.num_compute_rings; j++) { in gfx_v9_4_3_xcc_kcq_fini_register()1843 ring = &adev->gfx.compute_ring[j + xcc_id * adev->gfx.num_compute_rings]; in gfx_v9_4_3_xcc_kcq_fini_register()1889 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_4_3_xcc_kcq_resume()1890 ring = &adev->gfx.compute_ring[i + xcc_id * adev->gfx.num_compute_rings]; in gfx_v9_4_3_xcc_kcq_resume()1934 for (j = 0; j < adev->gfx.num_compute_rings; j++) { in gfx_v9_4_3_xcc_cp_resume()1936 [j + xcc_id * adev->gfx.num_compute_rings]; in gfx_v9_4_3_xcc_cp_resume()[all …]
290 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in suspend_resume_compute_scheduler()
1312 mec_hpd_size = adev->gfx.num_compute_rings * GFX8_MEC_HPD_SIZE; in gfx_v8_0_mec_init()2051 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v8_0_sw_fini()4337 r = amdgpu_ring_alloc(kiq_ring, (8 * adev->gfx.num_compute_rings) + 8); in gfx_v8_0_kiq_kcq_enable()4351 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_kiq_kcq_enable()4705 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_kcq_resume()4748 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_cp_test_all_rings()4812 r = amdgpu_ring_alloc(kiq_ring, 6 * adev->gfx.num_compute_rings); in gfx_v8_0_kcq_disable()4816 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_kcq_disable()5015 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_pre_soft_reset()5110 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_post_soft_reset()[all …]
2712 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v7_0_cp_compute_fini()3053 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v7_0_cp_compute_resume()3063 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v7_0_cp_compute_resume()4181 adev->gfx.num_compute_rings = min(amdgpu_gfx_get_num_kcq(adev), in gfx_v7_0_early_init()4487 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v7_0_sw_fini()4838 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v7_0_eop_irq()4863 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v7_0_fault()5053 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v7_0_set_ring_funcs()
395 unsigned num_compute_rings; member
736 mec_hpd_size = adev->gfx.num_compute_rings * GFX11_MEC_HPD_SIZE; in gfx_v11_0_mec_init()1508 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v11_0_sw_fini()4079 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v11_0_kcq_resume()4158 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v11_0_cp_resume()4566 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v11_0_check_soft_reset()4649 adev->gfx.num_compute_rings = min(amdgpu_gfx_get_num_kcq(adev), in gfx_v11_0_early_init()5879 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v11_0_eop_irq()5957 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v11_0_handle_priv_fault()6208 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v11_0_set_ring_funcs()
1700 mec_hpd_size = adev->gfx.num_compute_rings * GFX9_MEC_HPD_SIZE; in gfx_v9_0_mec_init()2196 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v9_0_sw_fini()3650 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_kcq_resume()3714 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_cp_resume()4530 adev->gfx.num_compute_rings = min(amdgpu_gfx_get_num_kcq(adev), in gfx_v9_0_early_init()5925 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_eop_irq()5955 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_fault()7060 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v9_0_set_ring_funcs()
3033 adev->gfx.num_compute_rings = min(amdgpu_gfx_get_num_kcq(adev), in gfx_v6_0_early_init()3085 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v6_0_sw_init()3118 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v6_0_sw_fini()3521 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v6_0_set_ring_funcs()
4202 mec_hpd_size = adev->gfx.num_compute_rings * GFX10_MEC_HPD_SIZE; in gfx_v10_0_mec_init()4667 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v10_0_sw_fini()6822 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v10_0_kcq_resume()6891 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v10_0_cp_resume()7419 adev->gfx.num_compute_rings = min(amdgpu_gfx_get_num_kcq(adev), in gfx_v10_0_early_init()8930 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v10_0_eop_irq()9008 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v10_0_handle_priv_fault()9269 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v10_0_set_ring_funcs()
386 for (i = 0; i < adev->gfx.num_compute_rings; i++) in amdgpu_hw_ip_info()