Lines Matching refs:gpu

22 static void a4xx_dump(struct msm_gpu *gpu);
23 static bool a4xx_idle(struct msm_gpu *gpu);
25 static void a4xx_submit(struct msm_gpu *gpu, struct msm_gem_submit *submit) in a4xx_submit() argument
37 if (gpu->cur_ctx_seqno == submit->queue->ctx->seqno) in a4xx_submit()
69 adreno_flush(gpu, ring, REG_A4XX_CP_RB_WPTR); in a4xx_submit()
76 static void a4xx_enable_hwcg(struct msm_gpu *gpu) in a4xx_enable_hwcg() argument
78 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a4xx_enable_hwcg()
81 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_CTL_TP(i), 0x02222202); in a4xx_enable_hwcg()
83 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_CTL2_TP(i), 0x00002222); in a4xx_enable_hwcg()
85 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_HYST_TP(i), 0x0E739CE7); in a4xx_enable_hwcg()
87 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_DELAY_TP(i), 0x00111111); in a4xx_enable_hwcg()
89 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_CTL_SP(i), 0x22222222); in a4xx_enable_hwcg()
91 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_CTL2_SP(i), 0x00222222); in a4xx_enable_hwcg()
93 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_HYST_SP(i), 0x00000104); in a4xx_enable_hwcg()
95 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_DELAY_SP(i), 0x00000081); in a4xx_enable_hwcg()
96 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_CTL_UCHE, 0x22222222); in a4xx_enable_hwcg()
97 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_CTL2_UCHE, 0x02222222); in a4xx_enable_hwcg()
98 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_CTL3_UCHE, 0x00000000); in a4xx_enable_hwcg()
99 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_CTL4_UCHE, 0x00000000); in a4xx_enable_hwcg()
100 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_HYST_UCHE, 0x00004444); in a4xx_enable_hwcg()
101 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_DELAY_UCHE, 0x00001112); in a4xx_enable_hwcg()
103 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_CTL_RB(i), 0x22222222); in a4xx_enable_hwcg()
108 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_CTL2_RB(i), in a4xx_enable_hwcg()
111 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_CTL2_RB(i), in a4xx_enable_hwcg()
119 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_CTL_MARB_CCU(i), in a4xx_enable_hwcg()
124 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_HYST_RB_MARB_CCU(i), in a4xx_enable_hwcg()
129 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_DELAY_RB_MARB_CCU_L1(i), in a4xx_enable_hwcg()
134 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_MODE_GPC, 0x02222222); in a4xx_enable_hwcg()
135 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_HYST_GPC, 0x04100104); in a4xx_enable_hwcg()
136 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_DELAY_GPC, 0x00022222); in a4xx_enable_hwcg()
137 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_CTL_COM_DCOM, 0x00000022); in a4xx_enable_hwcg()
138 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_HYST_COM_DCOM, 0x0000010F); in a4xx_enable_hwcg()
139 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_DELAY_COM_DCOM, 0x00000022); in a4xx_enable_hwcg()
140 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_CTL_TSE_RAS_RBBM, 0x00222222); in a4xx_enable_hwcg()
141 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_HYST_TSE_RAS_RBBM, 0x00004104); in a4xx_enable_hwcg()
142 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_DELAY_TSE_RAS_RBBM, 0x00000222); in a4xx_enable_hwcg()
143 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_CTL_HLSQ , 0x00000000); in a4xx_enable_hwcg()
144 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_HYST_HLSQ, 0x00000000); in a4xx_enable_hwcg()
145 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_DELAY_HLSQ, 0x00220000); in a4xx_enable_hwcg()
149 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_CTL, 0); in a4xx_enable_hwcg()
151 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_CTL, 0xAAAAAAAA); in a4xx_enable_hwcg()
152 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_CTL2, 0); in a4xx_enable_hwcg()
156 static bool a4xx_me_init(struct msm_gpu *gpu) in a4xx_me_init() argument
158 struct msm_ringbuffer *ring = gpu->rb[0]; in a4xx_me_init()
179 adreno_flush(gpu, ring, REG_A4XX_CP_RB_WPTR); in a4xx_me_init()
180 return a4xx_idle(gpu); in a4xx_me_init()
183 static int a4xx_hw_init(struct msm_gpu *gpu) in a4xx_hw_init() argument
185 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a4xx_hw_init()
191 gpu_write(gpu, REG_A4XX_VBIF_ROUND_ROBIN_QOS_ARB, 0x00000003); in a4xx_hw_init()
193 gpu_write(gpu, REG_A4XX_VBIF_ABIT_SORT, 0x0001001F); in a4xx_hw_init()
194 gpu_write(gpu, REG_A4XX_VBIF_ABIT_SORT_CONF, 0x000000A4); in a4xx_hw_init()
195 gpu_write(gpu, REG_A4XX_VBIF_GATE_OFF_WRREQ_EN, 0x00000001); in a4xx_hw_init()
196 gpu_write(gpu, REG_A4XX_VBIF_IN_RD_LIM_CONF0, 0x18181818); in a4xx_hw_init()
197 gpu_write(gpu, REG_A4XX_VBIF_IN_RD_LIM_CONF1, 0x00000018); in a4xx_hw_init()
198 gpu_write(gpu, REG_A4XX_VBIF_IN_WR_LIM_CONF0, 0x18181818); in a4xx_hw_init()
199 gpu_write(gpu, REG_A4XX_VBIF_IN_WR_LIM_CONF1, 0x00000018); in a4xx_hw_init()
200 gpu_write(gpu, REG_A4XX_VBIF_ROUND_ROBIN_QOS_ARB, 0x00000003); in a4xx_hw_init()
202 gpu_write(gpu, REG_A4XX_VBIF_GATE_OFF_WRREQ_EN, 0x00000001); in a4xx_hw_init()
203 gpu_write(gpu, REG_A4XX_VBIF_IN_RD_LIM_CONF0, 0x18181818); in a4xx_hw_init()
204 gpu_write(gpu, REG_A4XX_VBIF_IN_RD_LIM_CONF1, 0x00000018); in a4xx_hw_init()
205 gpu_write(gpu, REG_A4XX_VBIF_IN_WR_LIM_CONF0, 0x18181818); in a4xx_hw_init()
206 gpu_write(gpu, REG_A4XX_VBIF_IN_WR_LIM_CONF1, 0x00000018); in a4xx_hw_init()
207 gpu_write(gpu, REG_A4XX_VBIF_ROUND_ROBIN_QOS_ARB, 0x00000003); in a4xx_hw_init()
213 gpu_write(gpu, REG_A4XX_RBBM_GPU_BUSY_MASKED, 0xffffffff); in a4xx_hw_init()
216 gpu_write(gpu, REG_A4XX_RBBM_SP_HYST_CNT, 0x10); in a4xx_hw_init()
217 gpu_write(gpu, REG_A4XX_RBBM_WAIT_IDLE_CLOCKS_CTL, 0x10); in a4xx_hw_init()
220 gpu_write(gpu, REG_A4XX_RBBM_WAIT_IDLE_CLOCKS_CTL2, 0x30); in a4xx_hw_init()
224 gpu_write(gpu, REG_A4XX_RBBM_AHB_CTL0, 0x00000001); in a4xx_hw_init()
227 gpu_write(gpu, REG_A4XX_RBBM_AHB_CTL1, 0xa6ffffff); in a4xx_hw_init()
230 gpu_write(gpu, REG_A4XX_RBBM_RBBM_CTL, 0x00000030); in a4xx_hw_init()
236 gpu_write(gpu, REG_A4XX_RBBM_INTERFACE_HANG_INT_CTL, in a4xx_hw_init()
239 gpu_write(gpu, REG_A4XX_RB_GMEM_BASE_ADDR, in a4xx_hw_init()
243 gpu_write(gpu, REG_A4XX_RBBM_PERFCTR_CTL, 0x01); in a4xx_hw_init()
248 gpu_write(gpu, REG_A4XX_CP_PERFCTR_CP_SEL_0, CP_ALWAYS_COUNT); in a4xx_hw_init()
251 gpu_write(gpu, REG_A4XX_UCHE_CACHE_WAYS_VFD, 0x07); in a4xx_hw_init()
254 gpu_write(gpu, REG_A4XX_UCHE_TRAP_BASE_LO, 0xffff0000); in a4xx_hw_init()
255 gpu_write(gpu, REG_A4XX_UCHE_TRAP_BASE_HI, 0xffff0000); in a4xx_hw_init()
257 gpu_write(gpu, REG_A4XX_CP_DEBUG, (1 << 25) | in a4xx_hw_init()
263 gpu_write(gpu, REG_A4XX_RBBM_SP_REGFILE_SLEEP_CNTL_0, in a4xx_hw_init()
265 gpu_write(gpu, REG_A4XX_RBBM_SP_REGFILE_SLEEP_CNTL_1, in a4xx_hw_init()
269 a4xx_enable_hwcg(gpu); in a4xx_hw_init()
277 val = gpu_read(gpu, REG_A4XX_RBBM_CLOCK_DELAY_HLSQ); in a4xx_hw_init()
280 gpu_write(gpu, REG_A4XX_RBBM_CLOCK_DELAY_HLSQ, val); in a4xx_hw_init()
284 gpu_write(gpu, REG_A4XX_CP_PROTECT_CTRL, 0x00000007); in a4xx_hw_init()
287 gpu_write(gpu, REG_A4XX_CP_PROTECT(0), 0x62000010); in a4xx_hw_init()
288 gpu_write(gpu, REG_A4XX_CP_PROTECT(1), 0x63000020); in a4xx_hw_init()
289 gpu_write(gpu, REG_A4XX_CP_PROTECT(2), 0x64000040); in a4xx_hw_init()
290 gpu_write(gpu, REG_A4XX_CP_PROTECT(3), 0x65000080); in a4xx_hw_init()
291 gpu_write(gpu, REG_A4XX_CP_PROTECT(4), 0x66000100); in a4xx_hw_init()
292 gpu_write(gpu, REG_A4XX_CP_PROTECT(5), 0x64000200); in a4xx_hw_init()
295 gpu_write(gpu, REG_A4XX_CP_PROTECT(6), 0x67000800); in a4xx_hw_init()
296 gpu_write(gpu, REG_A4XX_CP_PROTECT(7), 0x64001600); in a4xx_hw_init()
300 gpu_write(gpu, REG_A4XX_CP_PROTECT(8), 0x60003300); in a4xx_hw_init()
303 gpu_write(gpu, REG_A4XX_CP_PROTECT(9), 0x60003800); in a4xx_hw_init()
306 gpu_write(gpu, REG_A4XX_CP_PROTECT(10), 0x61003980); in a4xx_hw_init()
309 gpu_write(gpu, REG_A4XX_CP_PROTECT(11), 0x6e010000); in a4xx_hw_init()
311 gpu_write(gpu, REG_A4XX_RBBM_INT_0_MASK, A4XX_INT0_MASK); in a4xx_hw_init()
313 ret = adreno_hw_init(gpu); in a4xx_hw_init()
321 gpu_write(gpu, REG_A4XX_CP_RB_CNTL, in a4xx_hw_init()
325 gpu_write(gpu, REG_A4XX_CP_RB_BASE, lower_32_bits(gpu->rb[0]->iova)); in a4xx_hw_init()
331 gpu_write(gpu, REG_A4XX_CP_ME_RAM_WADDR, 0); in a4xx_hw_init()
333 gpu_write(gpu, REG_A4XX_CP_ME_RAM_DATA, ptr[i]); in a4xx_hw_init()
340 gpu_write(gpu, REG_A4XX_CP_PFP_UCODE_ADDR, 0); in a4xx_hw_init()
342 gpu_write(gpu, REG_A4XX_CP_PFP_UCODE_DATA, ptr[i]); in a4xx_hw_init()
345 gpu_write(gpu, REG_A4XX_CP_ME_CNTL, 0); in a4xx_hw_init()
347 return a4xx_me_init(gpu) ? 0 : -EINVAL; in a4xx_hw_init()
350 static void a4xx_recover(struct msm_gpu *gpu) in a4xx_recover() argument
354 adreno_dump_info(gpu); in a4xx_recover()
358 gpu_read(gpu, REG_AXXX_CP_SCRATCH_REG0 + i)); in a4xx_recover()
363 a4xx_dump(gpu); in a4xx_recover()
365 gpu_write(gpu, REG_A4XX_RBBM_SW_RESET_CMD, 1); in a4xx_recover()
366 gpu_read(gpu, REG_A4XX_RBBM_SW_RESET_CMD); in a4xx_recover()
367 gpu_write(gpu, REG_A4XX_RBBM_SW_RESET_CMD, 0); in a4xx_recover()
368 adreno_recover(gpu); in a4xx_recover()
371 static void a4xx_destroy(struct msm_gpu *gpu) in a4xx_destroy() argument
373 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a4xx_destroy()
376 DBG("%s", gpu->name); in a4xx_destroy()
385 static bool a4xx_idle(struct msm_gpu *gpu) in a4xx_idle() argument
388 if (!adreno_idle(gpu, gpu->rb[0])) in a4xx_idle()
392 if (spin_until(!(gpu_read(gpu, REG_A4XX_RBBM_STATUS) & in a4xx_idle()
394 DRM_ERROR("%s: timeout waiting for GPU to idle!\n", gpu->name); in a4xx_idle()
402 static irqreturn_t a4xx_irq(struct msm_gpu *gpu) in a4xx_irq() argument
406 status = gpu_read(gpu, REG_A4XX_RBBM_INT_0_STATUS); in a4xx_irq()
407 DBG("%s: Int status %08x", gpu->name, status); in a4xx_irq()
410 uint32_t reg = gpu_read(gpu, REG_A4XX_CP_PROTECT_STATUS); in a4xx_irq()
416 gpu_write(gpu, REG_A4XX_RBBM_INT_CLEAR_CMD, status); in a4xx_irq()
418 msm_gpu_retire(gpu); in a4xx_irq()
551 static struct msm_gpu_state *a4xx_gpu_state_get(struct msm_gpu *gpu) in a4xx_gpu_state_get() argument
558 adreno_gpu_state_get(gpu, state); in a4xx_gpu_state_get()
560 state->rbbm_status = gpu_read(gpu, REG_A4XX_RBBM_STATUS); in a4xx_gpu_state_get()
565 static void a4xx_dump(struct msm_gpu *gpu) in a4xx_dump() argument
568 gpu_read(gpu, REG_A4XX_RBBM_STATUS)); in a4xx_dump()
569 adreno_dump(gpu); in a4xx_dump()
572 static int a4xx_pm_resume(struct msm_gpu *gpu) { in a4xx_pm_resume() argument
573 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a4xx_pm_resume()
576 ret = msm_gpu_pm_resume(gpu); in a4xx_pm_resume()
583 gpu_write(gpu, REG_A4XX_RBBM_POWER_CNTL_IP, 0x778000); in a4xx_pm_resume()
586 reg = gpu_read(gpu, REG_A4XX_RBBM_POWER_STATUS); in a4xx_pm_resume()
592 static int a4xx_pm_suspend(struct msm_gpu *gpu) { in a4xx_pm_suspend() argument
593 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a4xx_pm_suspend()
596 ret = msm_gpu_pm_suspend(gpu); in a4xx_pm_suspend()
602 gpu_write(gpu, REG_A4XX_RBBM_POWER_CNTL_IP, 0x778001); in a4xx_pm_suspend()
607 static int a4xx_get_timestamp(struct msm_gpu *gpu, uint64_t *value) in a4xx_get_timestamp() argument
609 *value = gpu_read64(gpu, REG_A4XX_RBBM_PERFCTR_CP_0_LO); in a4xx_get_timestamp()
614 static u64 a4xx_gpu_busy(struct msm_gpu *gpu, unsigned long *out_sample_rate) in a4xx_gpu_busy() argument
618 busy_cycles = gpu_read64(gpu, REG_A4XX_RBBM_PERFCTR_RBBM_1_LO); in a4xx_gpu_busy()
619 *out_sample_rate = clk_get_rate(gpu->core_clk); in a4xx_gpu_busy()
624 static u32 a4xx_get_rptr(struct msm_gpu *gpu, struct msm_ringbuffer *ring) in a4xx_get_rptr() argument
626 ring->memptrs->rptr = gpu_read(gpu, REG_A4XX_CP_RB_RPTR); in a4xx_get_rptr()
658 struct msm_gpu *gpu; in a4xx_gpu_init() local
678 gpu = &adreno_gpu->base; in a4xx_gpu_init()
680 gpu->perfcntrs = NULL; in a4xx_gpu_init()
681 gpu->num_perfcntrs = 0; in a4xx_gpu_init()
696 if (!gpu->aspace) { in a4xx_gpu_init()
731 icc_set_bw(icc_path, 0, Bps_to_icc(gpu->fast_rate) * 8); in a4xx_gpu_init()
732 icc_set_bw(ocmem_icc_path, 0, Bps_to_icc(gpu->fast_rate) * 8); in a4xx_gpu_init()
734 return gpu; in a4xx_gpu_init()