Home
last modified time | relevance | path

Searched refs:NUM_XCC (Results 1 – 14 of 14) sorted by relevance

/openbmc/linux/drivers/gpu/drm/amd/amdkfd/
H A Dkfd_mqd_manager_v9.c139 NUM_XCC(node->xcc_mask), in allocate_mqd()
530 for (xcc = 0; xcc < NUM_XCC(mm->dev->xcc_mask); xcc++) { in init_mqd_hiq_v9_4_3()
625 for (xcc = 0; xcc < NUM_XCC(mm->dev->xcc_mask); xcc++) { in init_mqd_v9_4_3()
652 NUM_XCC(mm->dev->xcc_mask); in init_mqd_v9_4_3()
685 for (xcc = 0; xcc < NUM_XCC(mm->dev->xcc_mask); xcc++) { in update_mqd_v9_4_3()
778 for (xcc = 0; xcc < NUM_XCC(mm->dev->xcc_mask); xcc++) { in get_wave_state_v9_4_3()
H A Dkfd_mqd_manager.c80 NUM_XCC(dev->xcc_mask); in allocate_sdma_mqd()
108 int inc = cu_inc * NUM_XCC(mm->dev->xcc_mask); in mqd_symmetrically_map_cu_mask()
H A Dkfd_topology.c478 NUM_XCC(dev->gpu->xcc_mask)) : 0); in node_show()
544 NUM_XCC(dev->gpu->xcc_mask)); in node_show()
1112 buf[7] = (ffs(gpu->xcc_mask) - 1) | (NUM_XCC(gpu->xcc_mask) << 16); in kfd_generate_gpu_id()
1612 end = start + NUM_XCC(knode->xcc_mask); in fill_in_l2_l3_pcache()
1712 end = start + NUM_XCC(kdev->xcc_mask); in kfd_fill_cache_non_crat_info()
H A Dkfd_debug.c1068 device_info.num_xcc = NUM_XCC(pdd->dev->xcc_mask); in kfd_dbg_trap_device_snapshot()
H A Dkfd_device.c773 (1U << NUM_XCC(kfd->adev->gfx.xcc_mask)) - 1; in kgd2kfd_device_init()
H A Dkfd_process_queue_manager.c1041 num_xccs = NUM_XCC(q->device->xcc_mask); in pqm_debugfs_mqds()
H A Dkfd_device_queue_manager.c2473 NUM_XCC(dqm->dev->xcc_mask)); in allocate_hiq_sdma_mqd()
/openbmc/linux/drivers/gpu/drm/amd/amdgpu/
H A Dgfxhub_v1_2.c70 xcc_mask = GENMASK(NUM_XCC(adev->gfx.xcc_mask) - 1, 0); in gfxhub_v1_2_setup_vm_pt_regs()
427 xcc_mask = GENMASK(NUM_XCC(adev->gfx.xcc_mask) - 1, 0); in gfxhub_v1_2_gart_enable()
466 xcc_mask = GENMASK(NUM_XCC(adev->gfx.xcc_mask) - 1, 0); in gfxhub_v1_2_gart_disable()
524 xcc_mask = GENMASK(NUM_XCC(adev->gfx.xcc_mask) - 1, 0); in gfxhub_v1_2_set_fault_enable_default()
573 xcc_mask = GENMASK(NUM_XCC(adev->gfx.xcc_mask) - 1, 0); in gfxhub_v1_2_init()
H A Daqua_vanjaram.c321 num_xcc = NUM_XCC(xcp_mgr->adev->gfx.xcc_mask); in __aqua_vanjaram_get_xcc_per_xcp()
414 num_xcc = NUM_XCC(xcp_mgr->adev->gfx.xcc_mask); in __aqua_vanjaram_get_auto_mode()
438 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in __aqua_vanjaram_is_valid_mode()
499 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in aqua_vanjaram_switch_partition_mode()
H A Dgfx_v9_4_3.c187 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in gfx_v9_4_3_set_kiq_pm4_funcs()
196 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in gfx_v9_4_3_init_golden_registers()
461 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in gfx_v9_4_3_mec_init()
630 NUM_XCC(adev->gfx.xcc_mask) / in gfx_v9_4_3_switch_compute_partition()
636 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in gfx_v9_4_3_switch_compute_partition()
788 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in gfx_v9_4_3_sw_init()
882 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in gfx_v9_4_3_sw_fini()
1015 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in gfx_v9_4_3_constants_init()
1094 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in gfx_v9_4_3_init_rlcg_reg_access_ctrl()
1186 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in gfx_v9_4_3_rlc_stop()
[all …]
H A Damdgpu_gfx.c214 int num_xcc = adev->gfx.xcc_mask ? NUM_XCC(adev->gfx.xcc_mask) : 1; in amdgpu_gfx_compute_queue_acquire()
910 int num_xcc = adev->gfx.xcc_mask ? NUM_XCC(adev->gfx.xcc_mask) : 1; in amdgpu_gfx_ras_error_func()
1238 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in amdgpu_gfx_set_compute_partition()
1283 switch (NUM_XCC(adev->gfx.xcc_mask)) { in amdgpu_gfx_get_available_compute_partition()
H A Damdgpu_gfx.h70 #define NUM_XCC(x) hweight16(x) macro
H A Dgmc_v9_0.c1888 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in gmc_v9_0_init_acpi_mem_ranges()
2113 NUM_XCC(adev->gfx.xcc_mask)); in gmc_v9_0_sw_init()
H A Damdgpu_ras.c339 int num_xcc = adev->gfx.xcc_mask ? NUM_XCC(adev->gfx.xcc_mask) : 1; in amdgpu_ras_instance_mask_check()