Home
last modified time | relevance | path

Searched refs:xcp (Results 1 – 14 of 14) sorted by relevance

/openbmc/linux/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_xcp.c66 struct amdgpu_xcp *xcp; in amdgpu_xcp_run_transition() local
72 xcp = &xcp_mgr->xcp[xcp_id]; in amdgpu_xcp_run_transition()
74 xcp_ip = &xcp->ip[i]; in amdgpu_xcp_run_transition()
108 struct amdgpu_xcp *xcp; in __amdgpu_xcp_add_block() local
113 xcp = &xcp_mgr->xcp[xcp_id]; in __amdgpu_xcp_add_block()
117 xcp->valid = true; in __amdgpu_xcp_add_block()
148 xcp_mgr->xcp[i].id = i; in amdgpu_xcp_init()
296 struct amdgpu_xcp *xcp; in amdgpu_xcp_get_partition() local
303 xcp = &xcp_mgr->xcp[i]; in amdgpu_xcp_get_partition()
304 if ((xcp->valid) && (xcp->ip[ip].valid) && in amdgpu_xcp_get_partition()
[all …]
H A Damdgpu_xcp.h94 struct amdgpu_xcp xcp[MAX_XCP]; member
110 struct amdgpu_xcp *xcp, uint8_t *mem_id);
135 int amdgpu_xcp_get_inst_details(struct amdgpu_xcp *xcp,
172 if (xcp_mgr->xcp[*from].valid) in amdgpu_get_next_xcp()
173 return &xcp_mgr->xcp[*from]; in amdgpu_get_next_xcp()
180 #define for_each_xcp(xcp_mgr, xcp, i) \ argument
181 for (i = 0, xcp = amdgpu_get_next_xcp(xcp_mgr, &i); xcp; \
182 xcp = amdgpu_get_next_xcp(xcp_mgr, &i))
H A Daqua_vanjaram.c103 if (adev->xcp_mgr->xcp[xcp_id].ip[ip_blk].inst_mask & inst_mask) { in aqua_vanjaram_set_xcp_id()
117 num_gpu_sched = &adev->xcp_mgr->xcp[sel_xcp_id] in aqua_vanjaram_xcp_gpu_sched_update()
119 adev->xcp_mgr->xcp[sel_xcp_id].gpu_sched[ring->funcs->type][ring->hw_prio] in aqua_vanjaram_xcp_gpu_sched_update()
133 atomic_set(&adev->xcp_mgr->xcp[i].ref_cnt, 0); in aqua_vanjaram_xcp_sched_list_update()
134 memset(adev->xcp_mgr->xcp[i].gpu_sched, 0, sizeof(adev->xcp_mgr->xcp->gpu_sched)); in aqua_vanjaram_xcp_sched_list_update()
194 total_ref_cnt = atomic_read(&adev->xcp_mgr->xcp[i].ref_cnt); in aqua_vanjaram_select_scheds()
203 if (adev->xcp_mgr->xcp[sel_xcp_id].gpu_sched[hw_ip][hw_prio].num_scheds) { in aqua_vanjaram_select_scheds()
205 *scheds = adev->xcp_mgr->xcp[fpriv->xcp_id].gpu_sched[hw_ip][hw_prio].sched; in aqua_vanjaram_select_scheds()
206 atomic_inc(&adev->xcp_mgr->xcp[sel_xcp_id].ref_cnt); in aqua_vanjaram_select_scheds()
557 struct amdgpu_xcp *xcp, uint8_t *mem_id) in aqua_vanjaram_get_xcp_mem_id() argument
[all …]
H A Damdgpu_amdkfd.c422 struct amdgpu_xcp *xcp) in amdgpu_amdkfd_get_local_mem_info() argument
426 if (xcp) { in amdgpu_amdkfd_get_local_mem_info()
429 KFD_XCP_MEMORY_SIZE(adev, xcp->id); in amdgpu_amdkfd_get_local_mem_info()
432 KFD_XCP_MEMORY_SIZE(adev, xcp->id); in amdgpu_amdkfd_get_local_mem_info()
H A Damdgpu_amdkfd.h234 struct amdgpu_xcp *xcp);
344 (adev)->xcp_mgr->xcp[(xcp_id)].mem_id : -1)
/openbmc/linux/arch/mips/math-emu/
H A Dcp1emu.c993 if (delay_slot(xcp)) { in cop1Emulate()
996 clear_delay_slot(xcp); in cop1Emulate()
1003 if (delay_slot(xcp)) { in cop1Emulate()
1206 if (delay_slot(xcp)) in cop1Emulate()
1233 set_delay_slot(xcp); in cop1Emulate()
1245 bcpc = xcp->cp0_epc; in cop1Emulate()
1319 xcp->cp0_epc = bcpc; in cop1Emulate()
1331 xcp->cp0_epc = bcpc; in cop1Emulate()
1384 xcp->cp0_epc = contpc; in cop1Emulate()
1385 clear_delay_slot(xcp); in cop1Emulate()
[all …]
H A Ddsemul.c292 bool do_dsemulret(struct pt_regs *xcp) in do_dsemulret() argument
301 xcp->cp0_epc = current->thread.bd_emu_cont_pc; in do_dsemulret()
302 pr_debug("dsemulret to 0x%08lx\n", xcp->cp0_epc); in do_dsemulret()
/openbmc/linux/arch/mips/include/asm/
H A Ddsemul.h52 extern bool do_dsemulret(struct pt_regs *xcp);
54 static inline bool do_dsemulret(struct pt_regs *xcp) in do_dsemulret() argument
H A Dfpu_emulator.h168 extern int fpu_emulator_cop1Handler(struct pt_regs *xcp,
/openbmc/linux/drivers/gpu/drm/amd/amdkfd/
H A Dkfd_device.c765 node->xcp = amdgpu_get_next_xcp(kfd->adev->xcp_mgr, &xcp_idx); in kgd2kfd_device_init()
767 if (node->xcp) { in kgd2kfd_device_init()
768 amdgpu_xcp_get_inst_details(node->xcp, AMDGPU_XCP_GFX, in kgd2kfd_device_init()
776 if (node->xcp) { in kgd2kfd_device_init()
778 node->node_id, node->xcp->mem_id, in kgd2kfd_device_init()
808 &node->local_mem_info, node->xcp); in kgd2kfd_device_init()
H A Dkfd_migrate.c522 node->xcp ? node->xcp->id : 0); in svm_migrate_ram_to_vram()
565 node->xcp ? node->xcp->id : 0); in svm_migrate_ram_to_vram()
H A Dkfd_priv.h271 struct amdgpu_xcp *xcp; member
1490 if (node->xcp) in kfd_devcgroup_check_permission()
1491 ddev = node->xcp->ddev; in kfd_devcgroup_check_permission()
H A Dkfd_topology.c1184 dev->gpu->xcp); in kfd_fill_mem_clk_max_info()
1931 if (gpu->xcp && !gpu->xcp->ddev) { in kfd_topology_add_device()
1997 if (gpu->xcp) in kfd_topology_add_device()
1998 dev->node_props.drm_render_minor = gpu->xcp->ddev->render->index; in kfd_topology_add_device()
H A Dkfd_svm.c564 if (node->xcp) in svm_range_vram_node_new()
565 bp.xcp_id_plus1 = node->xcp->id + 1; in svm_range_vram_node_new()
1245 (!bo_node->xcp || !node->xcp || bo_node->xcp->mem_id == node->xcp->mem_id)) in svm_range_get_pte_flags()
1998 if (adev->kfd.dev->nodes[i]->xcp) in svm_range_set_max_pages()
1999 id = adev->kfd.dev->nodes[i]->xcp->id; in svm_range_set_max_pages()