Lines Matching +full:cs +full:- +full:out

2  * Copyright(c) 2011-2016 Intel Corporation. All rights reserved.
20 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
168 struct intel_gvt *gvt = engine->i915->gvt; in load_render_mocs()
169 struct intel_uncore *uncore = engine->uncore; in load_render_mocs()
170 u32 cnt = gvt->engine_mmio_list.mocs_mmio_offset_list_cnt; in load_render_mocs()
171 u32 *regs = gvt->engine_mmio_list.mocs_mmio_offset_list; in load_render_mocs()
180 if (!HAS_ENGINE(engine->gt, ring_id)) in load_render_mocs()
204 u32 *cs; in restore_context_mmio_for_inhibit() local
207 struct intel_gvt *gvt = vgpu->gvt; in restore_context_mmio_for_inhibit()
208 int ring_id = req->engine->id; in restore_context_mmio_for_inhibit()
209 int count = gvt->engine_mmio_list.ctx_mmio_count[ring_id]; in restore_context_mmio_for_inhibit()
214 ret = req->engine->emit_flush(req, EMIT_BARRIER); in restore_context_mmio_for_inhibit()
218 cs = intel_ring_begin(req, count * 2 + 2); in restore_context_mmio_for_inhibit()
219 if (IS_ERR(cs)) in restore_context_mmio_for_inhibit()
220 return PTR_ERR(cs); in restore_context_mmio_for_inhibit()
222 *cs++ = MI_LOAD_REGISTER_IMM(count); in restore_context_mmio_for_inhibit()
223 for (mmio = gvt->engine_mmio_list.mmio; in restore_context_mmio_for_inhibit()
224 i915_mmio_reg_valid(mmio->reg); mmio++) { in restore_context_mmio_for_inhibit()
225 if (mmio->id != ring_id || !mmio->in_context) in restore_context_mmio_for_inhibit()
228 *cs++ = i915_mmio_reg_offset(mmio->reg); in restore_context_mmio_for_inhibit()
229 *cs++ = vgpu_vreg_t(vgpu, mmio->reg) | (mmio->mask << 16); in restore_context_mmio_for_inhibit()
231 *(cs-2), *(cs-1), vgpu->id, ring_id); in restore_context_mmio_for_inhibit()
234 *cs++ = MI_NOOP; in restore_context_mmio_for_inhibit()
235 intel_ring_advance(req, cs); in restore_context_mmio_for_inhibit()
237 ret = req->engine->emit_flush(req, EMIT_BARRIER); in restore_context_mmio_for_inhibit()
249 u32 *cs; in restore_render_mocs_control_for_inhibit() local
251 cs = intel_ring_begin(req, 2 * GEN9_MOCS_SIZE + 2); in restore_render_mocs_control_for_inhibit()
252 if (IS_ERR(cs)) in restore_render_mocs_control_for_inhibit()
253 return PTR_ERR(cs); in restore_render_mocs_control_for_inhibit()
255 *cs++ = MI_LOAD_REGISTER_IMM(GEN9_MOCS_SIZE); in restore_render_mocs_control_for_inhibit()
258 *cs++ = i915_mmio_reg_offset(GEN9_GFX_MOCS(index)); in restore_render_mocs_control_for_inhibit()
259 *cs++ = vgpu_vreg_t(vgpu, GEN9_GFX_MOCS(index)); in restore_render_mocs_control_for_inhibit()
261 *(cs-2), *(cs-1), vgpu->id, req->engine->id); in restore_render_mocs_control_for_inhibit()
265 *cs++ = MI_NOOP; in restore_render_mocs_control_for_inhibit()
266 intel_ring_advance(req, cs); in restore_render_mocs_control_for_inhibit()
276 u32 *cs; in restore_render_mocs_l3cc_for_inhibit() local
278 cs = intel_ring_begin(req, 2 * GEN9_MOCS_SIZE / 2 + 2); in restore_render_mocs_l3cc_for_inhibit()
279 if (IS_ERR(cs)) in restore_render_mocs_l3cc_for_inhibit()
280 return PTR_ERR(cs); in restore_render_mocs_l3cc_for_inhibit()
282 *cs++ = MI_LOAD_REGISTER_IMM(GEN9_MOCS_SIZE / 2); in restore_render_mocs_l3cc_for_inhibit()
285 *cs++ = i915_mmio_reg_offset(GEN9_LNCFCMOCS(index)); in restore_render_mocs_l3cc_for_inhibit()
286 *cs++ = vgpu_vreg_t(vgpu, GEN9_LNCFCMOCS(index)); in restore_render_mocs_l3cc_for_inhibit()
288 *(cs-2), *(cs-1), vgpu->id, req->engine->id); in restore_render_mocs_l3cc_for_inhibit()
292 *cs++ = MI_NOOP; in restore_render_mocs_l3cc_for_inhibit()
293 intel_ring_advance(req, cs); in restore_render_mocs_l3cc_for_inhibit()
307 u32 *cs; in intel_vgpu_restore_inhibit_context() local
309 cs = intel_ring_begin(req, 2); in intel_vgpu_restore_inhibit_context()
310 if (IS_ERR(cs)) in intel_vgpu_restore_inhibit_context()
311 return PTR_ERR(cs); in intel_vgpu_restore_inhibit_context()
313 *cs++ = MI_ARB_ON_OFF | MI_ARB_DISABLE; in intel_vgpu_restore_inhibit_context()
314 *cs++ = MI_NOOP; in intel_vgpu_restore_inhibit_context()
315 intel_ring_advance(req, cs); in intel_vgpu_restore_inhibit_context()
319 goto out; in intel_vgpu_restore_inhibit_context()
322 if (req->engine->id != RCS0) in intel_vgpu_restore_inhibit_context()
323 goto out; in intel_vgpu_restore_inhibit_context()
327 goto out; in intel_vgpu_restore_inhibit_context()
331 goto out; in intel_vgpu_restore_inhibit_context()
333 out: in intel_vgpu_restore_inhibit_context()
334 cs = intel_ring_begin(req, 2); in intel_vgpu_restore_inhibit_context()
335 if (IS_ERR(cs)) in intel_vgpu_restore_inhibit_context()
336 return PTR_ERR(cs); in intel_vgpu_restore_inhibit_context()
338 *cs++ = MI_ARB_ON_OFF | MI_ARB_ENABLE; in intel_vgpu_restore_inhibit_context()
339 *cs++ = MI_NOOP; in intel_vgpu_restore_inhibit_context()
340 intel_ring_advance(req, cs); in intel_vgpu_restore_inhibit_context()
356 struct intel_uncore *uncore = engine->uncore; in handle_tlb_pending_event()
357 struct intel_vgpu_submission *s = &vgpu->submission; in handle_tlb_pending_event()
358 u32 *regs = vgpu->gvt->engine_mmio_list.tlb_mmio_offset_list; in handle_tlb_pending_event()
359 u32 cnt = vgpu->gvt->engine_mmio_list.tlb_mmio_offset_list_cnt; in handle_tlb_pending_event()
366 if (drm_WARN_ON(&engine->i915->drm, engine->id >= cnt)) in handle_tlb_pending_event()
369 if (!test_and_clear_bit(engine->id, (void *)s->tlb_handle_pending)) in handle_tlb_pending_event()
372 reg = _MMIO(regs[engine->id]); in handle_tlb_pending_event()
381 if (engine->id == RCS0 && GRAPHICS_VER(engine->i915) >= 9) in handle_tlb_pending_event()
390 engine->name); in handle_tlb_pending_event()
396 gvt_dbg_core("invalidate TLB for ring %s\n", engine->name); in handle_tlb_pending_event()
409 struct intel_uncore *uncore = engine->uncore; in switch_mocs()
414 if (drm_WARN_ON(&engine->i915->drm, engine->id >= ARRAY_SIZE(regs))) in switch_mocs()
417 if (engine->id == RCS0 && GRAPHICS_VER(engine->i915) == 9) in switch_mocs()
423 offset.reg = regs[engine->id]; in switch_mocs()
428 old_v = gen9_render_mocs.control_table[engine->id][i]; in switch_mocs()
432 new_v = gen9_render_mocs.control_table[engine->id][i]; in switch_mocs()
440 if (engine->id == RCS0) { in switch_mocs()
464 const u32 *reg_state = ce->lrc_reg_state; in is_inhibit_context()
477 struct intel_uncore *uncore = engine->uncore; in switch_mmio()
482 if (GRAPHICS_VER(engine->i915) >= 9) in switch_mmio()
485 for (mmio = engine->i915->gvt->engine_mmio_list.mmio; in switch_mmio()
486 i915_mmio_reg_valid(mmio->reg); mmio++) { in switch_mmio()
487 if (mmio->id != engine->id) in switch_mmio()
494 if (GRAPHICS_VER(engine->i915) == 9 && mmio->in_context) in switch_mmio()
499 vgpu_vreg_t(pre, mmio->reg) = in switch_mmio()
500 intel_uncore_read_fw(uncore, mmio->reg); in switch_mmio()
501 if (mmio->mask) in switch_mmio()
502 vgpu_vreg_t(pre, mmio->reg) &= in switch_mmio()
503 ~(mmio->mask << 16); in switch_mmio()
504 old_v = vgpu_vreg_t(pre, mmio->reg); in switch_mmio()
506 old_v = mmio->value = in switch_mmio()
507 intel_uncore_read_fw(uncore, mmio->reg); in switch_mmio()
512 s = &next->submission; in switch_mmio()
518 if (mmio->in_context && in switch_mmio()
519 !is_inhibit_context(s->shadow[engine->id])) in switch_mmio()
522 if (mmio->mask) in switch_mmio()
523 new_v = vgpu_vreg_t(next, mmio->reg) | in switch_mmio()
524 (mmio->mask << 16); in switch_mmio()
526 new_v = vgpu_vreg_t(next, mmio->reg); in switch_mmio()
528 if (mmio->in_context) in switch_mmio()
530 if (mmio->mask) in switch_mmio()
531 new_v = mmio->value | (mmio->mask << 16); in switch_mmio()
533 new_v = mmio->value; in switch_mmio()
536 intel_uncore_write_fw(uncore, mmio->reg, new_v); in switch_mmio()
538 trace_render_mmio(pre ? pre->id : 0, in switch_mmio()
539 next ? next->id : 0, in switch_mmio()
541 i915_mmio_reg_offset(mmio->reg), in switch_mmio()
550 * intel_gvt_switch_mmio - switch mmio context of specific engine
563 engine->name)) in intel_gvt_switch_mmio()
566 gvt_dbg_render("switch ring %s from %s to %s\n", engine->name, in intel_gvt_switch_mmio()
574 intel_uncore_forcewake_get(engine->uncore, FORCEWAKE_ALL); in intel_gvt_switch_mmio()
576 intel_uncore_forcewake_put(engine->uncore, FORCEWAKE_ALL); in intel_gvt_switch_mmio()
580 * intel_gvt_init_engine_mmio_context - Initiate the engine mmio list
588 if (GRAPHICS_VER(gvt->gt->i915) >= 9) { in intel_gvt_init_engine_mmio_context()
589 gvt->engine_mmio_list.mmio = gen9_engine_mmio_list; in intel_gvt_init_engine_mmio_context()
590 gvt->engine_mmio_list.tlb_mmio_offset_list = gen8_tlb_mmio_offset_list; in intel_gvt_init_engine_mmio_context()
591 gvt->engine_mmio_list.tlb_mmio_offset_list_cnt = ARRAY_SIZE(gen8_tlb_mmio_offset_list); in intel_gvt_init_engine_mmio_context()
592 gvt->engine_mmio_list.mocs_mmio_offset_list = gen9_mocs_mmio_offset_list; in intel_gvt_init_engine_mmio_context()
593 gvt->engine_mmio_list.mocs_mmio_offset_list_cnt = ARRAY_SIZE(gen9_mocs_mmio_offset_list); in intel_gvt_init_engine_mmio_context()
595 gvt->engine_mmio_list.mmio = gen8_engine_mmio_list; in intel_gvt_init_engine_mmio_context()
596 gvt->engine_mmio_list.tlb_mmio_offset_list = gen8_tlb_mmio_offset_list; in intel_gvt_init_engine_mmio_context()
597 gvt->engine_mmio_list.tlb_mmio_offset_list_cnt = ARRAY_SIZE(gen8_tlb_mmio_offset_list); in intel_gvt_init_engine_mmio_context()
600 for (mmio = gvt->engine_mmio_list.mmio; in intel_gvt_init_engine_mmio_context()
601 i915_mmio_reg_valid(mmio->reg); mmio++) { in intel_gvt_init_engine_mmio_context()
602 if (mmio->in_context) { in intel_gvt_init_engine_mmio_context()
603 gvt->engine_mmio_list.ctx_mmio_count[mmio->id]++; in intel_gvt_init_engine_mmio_context()
604 intel_gvt_mmio_set_sr_in_ctx(gvt, mmio->reg.reg); in intel_gvt_init_engine_mmio_context()