/openbmc/linux/arch/arm64/kvm/vgic/ |
H A D | vgic.c | 98 return &vcpu->arch.vgic_cpu.private_irqs[intid]; in vgic_get_irq() 154 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_flush_pending_lpis() local 158 raw_spin_lock_irqsave(&vgic_cpu->ap_list_lock, flags); in vgic_flush_pending_lpis() 160 list_for_each_entry_safe(irq, tmp, &vgic_cpu->ap_list_head, ap_list) { in vgic_flush_pending_lpis() 170 raw_spin_unlock_irqrestore(&vgic_cpu->ap_list_lock, flags); in vgic_flush_pending_lpis() 301 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_sort_ap_list() local 303 lockdep_assert_held(&vgic_cpu->ap_list_lock); in vgic_sort_ap_list() 305 list_sort(NULL, &vgic_cpu->ap_list_head, vgic_irq_cmp); in vgic_sort_ap_list() 381 raw_spin_lock_irqsave(&vcpu->arch.vgic_cpu.ap_list_lock, flags); in vgic_queue_irq_unlock() 398 raw_spin_unlock_irqrestore(&vcpu->arch.vgic_cpu.ap_list_lock, in vgic_queue_irq_unlock() [all …]
|
H A D | vgic-mmio-v3.c | 240 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_lpis_enabled() local 242 return atomic_read(&vgic_cpu->ctlr) == GICR_CTLR_ENABLE_LPIS; in vgic_lpis_enabled() 248 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_mmio_read_v3r_ctlr() local 251 val = atomic_read(&vgic_cpu->ctlr); in vgic_mmio_read_v3r_ctlr() 262 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_mmio_write_v3r_ctlr() local 273 ctlr = atomic_cmpxchg_acquire(&vgic_cpu->ctlr, in vgic_mmio_write_v3r_ctlr() 281 atomic_set_release(&vgic_cpu->ctlr, 0); in vgic_mmio_write_v3r_ctlr() 283 ctlr = atomic_cmpxchg_acquire(&vgic_cpu->ctlr, 0, in vgic_mmio_write_v3r_ctlr() 295 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_mmio_vcpu_rdist_is_last() local 296 struct vgic_redist_region *iter, *rdreg = vgic_cpu->rdreg; in vgic_mmio_vcpu_rdist_is_last() [all …]
|
H A D | vgic-v2.c | 31 struct vgic_v2_cpu_if *cpuif = &vcpu->arch.vgic_cpu.vgic_v2; in vgic_v2_set_underflow() 51 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_v2_fold_lr_state() local 52 struct vgic_v2_cpu_if *cpuif = &vgic_cpu->vgic_v2; in vgic_v2_fold_lr_state() 59 for (lr = 0; lr < vgic_cpu->vgic_v2.used_lrs; lr++) { in vgic_v2_fold_lr_state() 197 vcpu->arch.vgic_cpu.vgic_v2.vgic_lr[lr] = val; in vgic_v2_populate_lr() 202 vcpu->arch.vgic_cpu.vgic_v2.vgic_lr[lr] = 0; in vgic_v2_clear_lr() 207 struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v2; in vgic_v2_set_vmcr() 234 struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v2; in vgic_v2_get_vmcr() 267 vcpu->arch.vgic_cpu.vgic_v2.vgic_vmcr = 0; in vgic_v2_enable() 270 vcpu->arch.vgic_cpu.vgic_v2.vgic_hcr = GICH_HCR_EN; in vgic_v2_enable() [all …]
|
H A D | vgic-init.c | 196 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in kvm_vgic_vcpu_init() local 201 vgic_cpu->rd_iodev.base_addr = VGIC_ADDR_UNDEF; in kvm_vgic_vcpu_init() 203 INIT_LIST_HEAD(&vgic_cpu->ap_list_head); in kvm_vgic_vcpu_init() 204 raw_spin_lock_init(&vgic_cpu->ap_list_lock); in kvm_vgic_vcpu_init() 205 atomic_set(&vgic_cpu->vgic_v3.its_vpe.vlpi_count, 0); in kvm_vgic_vcpu_init() 212 struct vgic_irq *irq = &vgic_cpu->private_irqs[i]; in kvm_vgic_vcpu_init() 288 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_init() local 291 struct vgic_irq *irq = &vgic_cpu->private_irqs[i]; in vgic_init() 373 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in __kvm_vgic_vcpu_destroy() local 381 INIT_LIST_HEAD(&vgic_cpu->ap_list_head); in __kvm_vgic_vcpu_destroy() [all …]
|
H A D | vgic-v3.c | 24 struct vgic_v3_cpu_if *cpuif = &vcpu->arch.vgic_cpu.vgic_v3; in vgic_v3_set_underflow() 37 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_v3_fold_lr_state() local 38 struct vgic_v3_cpu_if *cpuif = &vgic_cpu->vgic_v3; in vgic_v3_fold_lr_state() 186 vcpu->arch.vgic_cpu.vgic_v3.vgic_lr[lr] = val; in vgic_v3_populate_lr() 191 vcpu->arch.vgic_cpu.vgic_v3.vgic_lr[lr] = 0; in vgic_v3_clear_lr() 196 struct vgic_v3_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v3; in vgic_v3_set_vmcr() 226 struct vgic_v3_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v3; in vgic_v3_get_vmcr() 262 struct vgic_v3_cpu_if *vgic_v3 = &vcpu->arch.vgic_cpu.vgic_v3; in vgic_v3_enable() 281 vcpu->arch.vgic_cpu.pendbaser = INITIAL_PENDBASER_VALUE; in vgic_v3_enable() 286 vcpu->arch.vgic_cpu.num_id_bits = (kvm_vgic_global_state.ich_vtr_el2 & in vgic_v3_enable() [all …]
|
H A D | vgic-v4.c | 98 raw_spin_lock(&vcpu->arch.vgic_cpu.vgic_v3.its_vpe.vpe_lock); in vgic_v4_doorbell_handler() 99 vcpu->arch.vgic_cpu.vgic_v3.its_vpe.pending_last = true; in vgic_v4_doorbell_handler() 100 raw_spin_unlock(&vcpu->arch.vgic_cpu.vgic_v3.its_vpe.vpe_lock); in vgic_v4_doorbell_handler() 117 struct its_vpe *vpe = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe; in vgic_v4_enable_vsgis() 215 struct its_vpe *vpe = &irq->target_vcpu->arch.vgic_cpu.vgic_v3.its_vpe; in vgic_v4_get_vlpi_state() 264 dist->its_vm.vpes[i] = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe; in vgic_v4_init() 341 struct its_vpe *vpe = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe; in vgic_v4_put() 351 struct its_vpe *vpe = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe; in vgic_v4_load() 387 struct its_vpe *vpe = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe; in vgic_v4_commit() 447 .vpe = &irq->target_vcpu->arch.vgic_cpu.vgic_v3.its_vpe, in kvm_vgic_v4_set_forwarding() [all …]
|
H A D | vgic-mmio-v2.c | 373 return vcpu->arch.vgic_cpu.vgic_v2.vgic_apr; in vgic_mmio_read_apr() 375 struct vgic_v3_cpu_if *vgicv3 = &vcpu->arch.vgic_cpu.vgic_v3; in vgic_mmio_read_apr() 399 vcpu->arch.vgic_cpu.vgic_v2.vgic_apr = val; in vgic_mmio_write_apr() 401 struct vgic_v3_cpu_if *vgicv3 = &vcpu->arch.vgic_cpu.vgic_v3; in vgic_mmio_write_apr()
|
H A D | vgic.h | 300 struct vgic_cpu *cpu_if = &vcpu->arch.vgic_cpu; in vgic_v3_max_apr_idx()
|
H A D | vgic-its.c | 372 map.vpe = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe; in update_affinity() 430 gpa_t pendbase = GICR_PENDBASER_ADDRESS(vcpu->arch.vgic_cpu.pendbaser); in its_sync_lpi_pending_table() 1374 if (vcpu->arch.vgic_cpu.vgic_v3.its_vpe.its_vm) in vgic_its_invall() 1375 its_invall_vpe(&vcpu->arch.vgic_cpu.vgic_v3.its_vpe); in vgic_its_invall() 1869 if (!(vcpu->arch.vgic_cpu.pendbaser & GICR_PENDBASER_PTZ)) in vgic_enable_lpis()
|
/openbmc/linux/arch/arm64/kvm/ |
H A D | vgic-sys-reg-v3.c | 17 struct vgic_cpu *vgic_v3_cpu = &vcpu->arch.vgic_cpu; in set_gic_ctlr() 62 struct vgic_cpu *vgic_v3_cpu = &vcpu->arch.vgic_cpu; in get_gic_ctlr() 210 struct vgic_v3_cpu_if *vgicv3 = &vcpu->arch.vgic_cpu.vgic_v3; in set_apr_reg() 220 struct vgic_v3_cpu_if *vgicv3 = &vcpu->arch.vgic_cpu.vgic_v3; in get_apr_reg() 293 struct vgic_v3_cpu_if *vgicv3 = &vcpu->arch.vgic_cpu.vgic_v3; in get_gic_sre()
|
H A D | sys_regs.c | 356 p->regval = vcpu->arch.vgic_cpu.vgic_v3.vgic_sre; in access_gic_sre()
|
/openbmc/linux/arch/arm64/kvm/hyp/nvhe/ |
H A D | switch.c | 119 __vgic_v3_save_state(&vcpu->arch.vgic_cpu.vgic_v3); in __hyp_vgic_save_state() 120 __vgic_v3_deactivate_traps(&vcpu->arch.vgic_cpu.vgic_v3); in __hyp_vgic_save_state() 128 __vgic_v3_activate_traps(&vcpu->arch.vgic_cpu.vgic_v3); in __hyp_vgic_restore_state() 129 __vgic_v3_restore_state(&vcpu->arch.vgic_cpu.vgic_v3); in __hyp_vgic_restore_state()
|
H A D | hyp-main.c | 49 hyp_vcpu->vcpu.arch.vgic_cpu.vgic_v3 = host_vcpu->arch.vgic_cpu.vgic_v3; in flush_hyp_vcpu() 55 struct vgic_v3_cpu_if *hyp_cpu_if = &hyp_vcpu->vcpu.arch.vgic_cpu.vgic_v3; in sync_hyp_vcpu() 56 struct vgic_v3_cpu_if *host_cpu_if = &host_vcpu->arch.vgic_cpu.vgic_v3; in sync_hyp_vcpu()
|
/openbmc/linux/include/kvm/ |
H A D | arm_vgic.h | 325 struct vgic_cpu { struct
|
/openbmc/linux/arch/arm64/include/asm/ |
H A D | kvm_host.h | 552 struct vgic_cpu vgic_cpu; member
|
H A D | kvm_emulate.h | 114 if (atomic_read(&vcpu->arch.vgic_cpu.vgic_v3.its_vpe.vlpi_count) || in vcpu_clear_wfx_traps()
|
/openbmc/linux/arch/arm64/kvm/hyp/ |
H A D | vgic-v3-sr.c | 487 unsigned int used_lrs = vcpu->arch.vgic_cpu.vgic_v3.used_lrs; in __vgic_v3_highest_priority_lr() 526 unsigned int used_lrs = vcpu->arch.vgic_cpu.vgic_v3.used_lrs; in __vgic_v3_find_active_lr()
|