Lines Matching +full:migration +full:- +full:compat +full:- +full:aarch64

18 #include "qemu/main-loop.h"
24 #include "target/arm/cpu-features.h"
36 return env->gicv3state; in icc_cs_from_env()
44 * banked even for AArch64, unlike the other CPU system registers. in gicv3_use_ns_bank()
52 return 7 - cs->vprebits; in icv_min_vbpr()
58 int aprmax = 1 << (cs->vprebits - 5); in ich_num_aprs()
59 assert(aprmax <= ARRAY_SIZE(cs->ich_apr[0])); in ich_num_aprs()
113 return extract64(cs->ich_vmcr_el2, ICH_VMCR_EL2_VBPR0_SHIFT, in read_vbpr()
116 return extract64(cs->ich_vmcr_el2, ICH_VMCR_EL2_VBPR1_SHIFT, in read_vbpr()
135 cs->ich_vmcr_el2 = deposit64(cs->ich_vmcr_el2, ICH_VMCR_EL2_VBPR0_SHIFT, in write_vbpr()
138 cs->ich_vmcr_el2 = deposit64(cs->ich_vmcr_el2, ICH_VMCR_EL2_VBPR1_SHIFT, in write_vbpr()
150 return (~0U << (8 - cs->vpribits)) & 0xff; in icv_fullprio_mask()
161 if (cs->ich_apr[GICV3_G1NS][0] & ICV_AP1R_EL1_NMI) { in ich_highest_active_virt_prio()
166 uint32_t apr = cs->ich_apr[GICV3_G0][i] | in ich_highest_active_virt_prio()
167 cs->ich_apr[GICV3_G1NS][i]; in ich_highest_active_virt_prio()
183 * pseudocode. If no pending virtual interrupts, return -1. in hppvi_index()
190 ARMCPU *cpu = ARM_CPU(cs->cpu); in hppvi_index()
191 CPUARMState *env = &cpu->env; in hppvi_index()
192 int idx = -1; in hppvi_index()
201 if (!(cs->ich_vmcr_el2 & (ICH_VMCR_EL2_VENG0 | ICH_VMCR_EL2_VENG1))) { in hppvi_index()
206 for (i = 0; i < cs->num_list_regs; i++) { in hppvi_index()
207 uint64_t lr = cs->ich_lr_el2[i]; in hppvi_index()
218 if (!(cs->ich_vmcr_el2 & ICH_VMCR_EL2_VENG1)) { in hppvi_index()
222 if (!(cs->ich_vmcr_el2 & ICH_VMCR_EL2_VENG0)) { in hppvi_index()
240 * when we are in Non-Secure state. in hppvi_index()
242 if (cs->hppvlpi.prio < prio && !arm_is_secure(env)) { in hppvi_index()
243 if (cs->hppvlpi.grp == GICV3_G0) { in hppvi_index()
244 if (cs->ich_vmcr_el2 & ICH_VMCR_EL2_VENG0) { in hppvi_index()
248 if (cs->ich_vmcr_el2 & ICH_VMCR_EL2_VENG1) { in hppvi_index()
279 if (group == GICV3_G1NS && cs->ich_vmcr_el2 & ICH_VMCR_EL2_VCBPR) { in icv_gprio_mask()
286 bpr--; in icv_gprio_mask()
297 * Compare also icc_hppi_can_preempt() which is the non-virtual in icv_hppi_can_preempt()
304 if (!(cs->ich_hcr_el2 & ICH_HCR_EL2_EN)) { in icv_hppi_can_preempt()
315 vpmr = extract64(cs->ich_vmcr_el2, ICH_VMCR_EL2_VPMR_SHIFT, in icv_hppi_can_preempt()
341 !(cs->ich_apr[GICV3_G1NS][0] & ICV_AP1R_EL1_NMI)) { in icv_hppi_can_preempt()
352 * We can assume we're Non-secure because hppvi_index() already in icv_hppvlpi_can_preempt()
357 if (!(cs->ich_hcr_el2 & ICH_HCR_EL2_EN)) { in icv_hppvlpi_can_preempt()
362 vpmr = extract64(cs->ich_vmcr_el2, ICH_VMCR_EL2_VPMR_SHIFT, in icv_hppvlpi_can_preempt()
365 if (cs->hppvlpi.prio >= vpmr) { in icv_hppvlpi_can_preempt()
376 mask = icv_gprio_mask(cs, cs->hppvlpi.grp); in icv_hppvlpi_can_preempt()
382 if ((cs->hppvlpi.prio & mask) < (rprio & mask)) { in icv_hppvlpi_can_preempt()
404 for (i = 0; i < cs->num_list_regs; i++) { in eoi_maintenance_interrupt_state()
405 uint64_t lr = cs->ich_lr_el2[i]; in eoi_maintenance_interrupt_state()
420 if (validcount < 2 && (cs->ich_hcr_el2 & ICH_HCR_EL2_UIE)) { in eoi_maintenance_interrupt_state()
423 if (!seenpending && (cs->ich_hcr_el2 & ICH_HCR_EL2_NPIE)) { in eoi_maintenance_interrupt_state()
443 if ((cs->ich_hcr_el2 & ICH_HCR_EL2_LRENPIE) && in maintenance_interrupt_state()
444 (cs->ich_hcr_el2 & ICH_HCR_EL2_EOICOUNT_MASK)) { in maintenance_interrupt_state()
448 if ((cs->ich_hcr_el2 & ICH_HCR_EL2_VGRP0EIE) && in maintenance_interrupt_state()
449 (cs->ich_vmcr_el2 & ICH_VMCR_EL2_VENG0)) { in maintenance_interrupt_state()
453 if ((cs->ich_hcr_el2 & ICH_HCR_EL2_VGRP0DIE) && in maintenance_interrupt_state()
454 !(cs->ich_vmcr_el2 & ICH_VMCR_EL2_VENG1)) { in maintenance_interrupt_state()
457 if ((cs->ich_hcr_el2 & ICH_HCR_EL2_VGRP1EIE) && in maintenance_interrupt_state()
458 (cs->ich_vmcr_el2 & ICH_VMCR_EL2_VENG1)) { in maintenance_interrupt_state()
462 if ((cs->ich_hcr_el2 & ICH_HCR_EL2_VGRP1DIE) && in maintenance_interrupt_state()
463 !(cs->ich_vmcr_el2 & ICH_VMCR_EL2_VENG1)) { in maintenance_interrupt_state()
488 cs->hppvlpi.irq, cs->hppvlpi.grp, in gicv3_cpuif_virt_irq_fiq_update()
489 cs->hppvlpi.prio); in gicv3_cpuif_virt_irq_fiq_update()
492 if (cs->hppvlpi.grp == GICV3_G0) { in gicv3_cpuif_virt_irq_fiq_update()
499 uint64_t lr = cs->ich_lr_el2[idx]; in gicv3_cpuif_virt_irq_fiq_update()
505 * non-maskable property. in gicv3_cpuif_virt_irq_fiq_update()
520 qemu_set_irq(cs->parent_vfiq, fiqlevel); in gicv3_cpuif_virt_irq_fiq_update()
521 qemu_set_irq(cs->parent_virq, irqlevel); in gicv3_cpuif_virt_irq_fiq_update()
522 qemu_set_irq(cs->parent_vnmi, nmilevel); in gicv3_cpuif_virt_irq_fiq_update()
534 * to the GIC as a per-CPU interrupt. This means that it in gicv3_cpuif_virt_update()
545 ARMCPU *cpu = ARM_CPU(cs->cpu); in gicv3_cpuif_virt_update()
550 if ((cs->ich_hcr_el2 & ICH_HCR_EL2_EN) && in gicv3_cpuif_virt_update()
556 qemu_set_irq(cpu->gicv3_maintenance_interrupt, maintlevel); in gicv3_cpuif_virt_update()
562 int regno = ri->opc2 & 3; in icv_ap_read()
563 int grp = (ri->crm & 1) ? GICV3_G1NS : GICV3_G0; in icv_ap_read()
564 uint64_t value = cs->ich_apr[grp][regno]; in icv_ap_read()
566 trace_gicv3_icv_ap_read(ri->crm & 1, regno, gicv3_redist_affid(cs), value); in icv_ap_read()
574 int regno = ri->opc2 & 3; in icv_ap_write()
575 int grp = (ri->crm & 1) ? GICV3_G1NS : GICV3_G0; in icv_ap_write()
577 trace_gicv3_icv_ap_write(ri->crm & 1, regno, gicv3_redist_affid(cs), value); in icv_ap_write()
579 if (cs->nmi_support) { in icv_ap_write()
580 cs->ich_apr[grp][regno] = value & (0xFFFFFFFFU | ICV_AP1R_EL1_NMI); in icv_ap_write()
582 cs->ich_apr[grp][regno] = value & 0xFFFFFFFFU; in icv_ap_write()
592 int grp = (ri->crm == 8) ? GICV3_G0 : GICV3_G1NS; in icv_bpr_read()
596 if (grp == GICV3_G1NS && (cs->ich_vmcr_el2 & ICH_VMCR_EL2_VCBPR)) { in icv_bpr_read()
609 trace_gicv3_icv_bpr_read(ri->crm == 8 ? 0 : 1, gicv3_redist_affid(cs), bpr); in icv_bpr_read()
618 int grp = (ri->crm == 8) ? GICV3_G0 : GICV3_G1NS; in icv_bpr_write()
620 trace_gicv3_icv_bpr_write(ri->crm == 8 ? 0 : 1, in icv_bpr_write()
623 if (grp == GICV3_G1NS && (cs->ich_vmcr_el2 & ICH_VMCR_EL2_VCBPR)) { in icv_bpr_write()
638 value = extract64(cs->ich_vmcr_el2, ICH_VMCR_EL2_VPMR_SHIFT, in icv_pmr_read()
654 cs->ich_vmcr_el2 = deposit64(cs->ich_vmcr_el2, ICH_VMCR_EL2_VPMR_SHIFT, in icv_pmr_write()
666 enbit = ri->opc2 & 1 ? ICH_VMCR_EL2_VENG1_SHIFT : ICH_VMCR_EL2_VENG0_SHIFT; in icv_igrpen_read()
667 value = extract64(cs->ich_vmcr_el2, enbit, 1); in icv_igrpen_read()
669 trace_gicv3_icv_igrpen_read(ri->opc2 & 1 ? 1 : 0, in icv_igrpen_read()
680 trace_gicv3_icv_igrpen_write(ri->opc2 & 1 ? 1 : 0, in icv_igrpen_write()
683 enbit = ri->opc2 & 1 ? ICH_VMCR_EL2_VENG1_SHIFT : ICH_VMCR_EL2_VENG0_SHIFT; in icv_igrpen_write()
685 cs->ich_vmcr_el2 = deposit64(cs->ich_vmcr_el2, enbit, 1, value); in icv_igrpen_write()
698 ((cs->vpribits - 1) << ICC_CTLR_EL1_PRIBITS_SHIFT); in icv_ctlr_read()
700 if (cs->ich_vmcr_el2 & ICH_VMCR_EL2_VEOIM) { in icv_ctlr_read()
704 if (cs->ich_vmcr_el2 & ICH_VMCR_EL2_VCBPR) { in icv_ctlr_read()
719 cs->ich_vmcr_el2 = deposit64(cs->ich_vmcr_el2, ICH_VMCR_EL2_VCBPR_SHIFT, in icv_ctlr_write()
721 cs->ich_vmcr_el2 = deposit64(cs->ich_vmcr_el2, ICH_VMCR_EL2_VEOIM_SHIFT, in icv_ctlr_write()
732 if (cs->ich_apr[GICV3_G1NS][0] & ICV_AP1R_EL1_NMI) { in icv_rpr_read()
743 int grp = ri->crm == 8 ? GICV3_G0 : GICV3_G1NS; in icv_hppir_read()
748 if (cs->hppvlpi.grp == grp) { in icv_hppir_read()
749 value = cs->hppvlpi.irq; in icv_hppir_read()
752 uint64_t lr = cs->ich_lr_el2[idx]; in icv_hppir_read()
760 trace_gicv3_icv_hppir_read(ri->crm == 8 ? 0 : 1, in icv_hppir_read()
772 int prio = ich_lr_prio(cs->ich_lr_el2[idx]) & mask; in icv_activate_irq()
773 bool nmi = cs->ich_lr_el2[idx] & ICH_LR_EL2_NMI; in icv_activate_irq()
774 int aprbit = prio >> (8 - cs->vprebits); in icv_activate_irq()
778 cs->ich_lr_el2[idx] &= ~ICH_LR_EL2_STATE_PENDING_BIT; in icv_activate_irq()
779 cs->ich_lr_el2[idx] |= ICH_LR_EL2_STATE_ACTIVE_BIT; in icv_activate_irq()
782 cs->ich_apr[grp][regno] |= ICV_AP1R_EL1_NMI; in icv_activate_irq()
784 cs->ich_apr[grp][regno] |= (1U << regbit); in icv_activate_irq()
790 uint32_t mask = icv_gprio_mask(cs, cs->hppvlpi.grp); in icv_activate_vlpi()
791 int prio = cs->hppvlpi.prio & mask; in icv_activate_vlpi()
792 int aprbit = prio >> (8 - cs->vprebits); in icv_activate_vlpi()
796 cs->ich_apr[cs->hppvlpi.grp][regno] |= (1U << regbit); in icv_activate_vlpi()
797 gicv3_redist_vlpi_pending(cs, cs->hppvlpi.irq, 0); in icv_activate_vlpi()
803 int grp = ri->crm == 8 ? GICV3_G0 : GICV3_G1NS; in icv_iar_read()
809 if (cs->hppvlpi.grp == grp && icv_hppvlpi_can_preempt(cs)) { in icv_iar_read()
810 intid = cs->hppvlpi.irq; in icv_iar_read()
814 uint64_t lr = cs->ich_lr_el2[idx]; in icv_iar_read()
816 bool nmi = env->cp15.sctlr_el[el] & SCTLR_NMI && lr & ICH_LR_EL2_NMI; in icv_iar_read()
828 cs->ich_lr_el2[idx] &= ~ICH_LR_EL2_STATE_PENDING_BIT; in icv_iar_read()
836 trace_gicv3_icv_iar_read(ri->crm == 8 ? 0 : 1, in icv_iar_read()
851 uint64_t lr = cs->ich_lr_el2[idx]; in icv_nmiar1_read()
864 cs->ich_lr_el2[idx] &= ~ICH_LR_EL2_STATE_PENDING_BIT; in icv_nmiar1_read()
888 return (~0U << (8 - cs->pribits)) & 0xff; in icc_fullprio_mask()
894 return 7 - cs->prebits; in icc_min_bpr()
905 int aprmax = 1 << MAX(cs->prebits - 5, 0); in icc_num_aprs()
906 assert(aprmax <= ARRAY_SIZE(cs->icc_apr[0])); in icc_num_aprs()
917 if (cs->nmi_support) { in icc_highest_active_prio()
924 * prioritization of NMI vs non-NMI. in icc_highest_active_prio()
926 if (cs->icc_apr[GICV3_G1][0] & ICC_AP1R_EL1_NMI) { in icc_highest_active_prio()
929 if (cs->icc_apr[GICV3_G1NS][0] & ICC_AP1R_EL1_NMI) { in icc_highest_active_prio()
930 return (cs->gic->gicd_ctlr & GICD_CTLR_DS) ? 0 : 0x80; in icc_highest_active_prio()
935 uint32_t apr = cs->icc_apr[GICV3_G0][i] | in icc_highest_active_prio()
936 cs->icc_apr[GICV3_G1][i] | cs->icc_apr[GICV3_G1NS][i]; in icc_highest_active_prio()
968 if ((group == GICV3_G1 && cs->icc_ctlr_el1[GICV3_S] & ICC_CTLR_EL1_CBPR) || in icc_gprio_mask()
970 cs->icc_ctlr_el1[GICV3_NS] & ICC_CTLR_EL1_CBPR)) { in icc_gprio_mask()
974 bpr = cs->icc_bpr[group] & 7; in icc_gprio_mask()
978 bpr--; in icc_gprio_mask()
990 return cs->hppi.prio == 0xff || (cs->icc_igrpen[cs->hppi.grp] == 0); in icc_no_enabled_hppi()
1000 ARMCPU *cpu = ARM_CPU(cs->cpu); in icc_hppi_can_preempt()
1001 CPUARMState *env = &cpu->env; in icc_hppi_can_preempt()
1007 if (cs->hppi.nmi) { in icc_hppi_can_preempt()
1008 if (!(cs->gic->gicd_ctlr & GICD_CTLR_DS) && in icc_hppi_can_preempt()
1009 cs->hppi.grp == GICV3_G1NS) { in icc_hppi_can_preempt()
1010 if (cs->icc_pmr_el1 < 0x80) { in icc_hppi_can_preempt()
1013 if (arm_is_secure(env) && cs->icc_pmr_el1 == 0x80) { in icc_hppi_can_preempt()
1017 } else if (cs->hppi.prio >= cs->icc_pmr_el1) { in icc_hppi_can_preempt()
1028 mask = icc_gprio_mask(cs, cs->hppi.grp); in icc_hppi_can_preempt()
1033 if ((cs->hppi.prio & mask) < (rprio & mask)) { in icc_hppi_can_preempt()
1037 if (cs->hppi.nmi && (cs->hppi.prio & mask) == (rprio & mask)) { in icc_hppi_can_preempt()
1038 if (!(cs->icc_apr[cs->hppi.grp][0] & ICC_AP1R_EL1_NMI)) { in icc_hppi_can_preempt()
1052 ARMCPU *cpu = ARM_CPU(cs->cpu); in gicv3_cpuif_update()
1053 CPUARMState *env = &cpu->env; in gicv3_cpuif_update()
1057 trace_gicv3_cpuif_update(gicv3_redist_affid(cs), cs->hppi.irq, in gicv3_cpuif_update()
1058 cs->hppi.grp, cs->hppi.prio); in gicv3_cpuif_update()
1060 if (cs->hppi.grp == GICV3_G1 && !arm_feature(env, ARM_FEATURE_EL3)) { in gicv3_cpuif_update()
1061 /* If a Security-enabled GIC sends a G1S interrupt to a in gicv3_cpuif_update()
1062 * Security-disabled CPU, we must treat it as if it were G0. in gicv3_cpuif_update()
1064 cs->hppi.grp = GICV3_G0; in gicv3_cpuif_update()
1073 switch (cs->hppi.grp) { in gicv3_cpuif_update()
1090 } else if (cs->hppi.nmi) { in gicv3_cpuif_update()
1099 qemu_set_irq(cs->parent_fiq, fiqlevel); in gicv3_cpuif_update()
1100 qemu_set_irq(cs->parent_irq, irqlevel); in gicv3_cpuif_update()
1101 qemu_set_irq(cs->parent_nmi, nmilevel); in gicv3_cpuif_update()
1107 uint32_t value = cs->icc_pmr_el1; in icc_pmr_read()
1114 (env->cp15.scr_el3 & SCR_FIQ)) { in icc_pmr_read()
1143 (env->cp15.scr_el3 & SCR_FIQ)) { in icc_pmr_write()
1147 if (!(cs->icc_pmr_el1 & 0x80)) { in icc_pmr_write()
1154 cs->icc_pmr_el1 = value; in icc_pmr_write()
1163 uint32_t mask = icc_gprio_mask(cs, cs->hppi.grp); in icc_activate_irq()
1164 int prio = cs->hppi.prio & mask; in icc_activate_irq()
1165 int aprbit = prio >> (8 - cs->prebits); in icc_activate_irq()
1168 bool nmi = cs->hppi.nmi; in icc_activate_irq()
1171 cs->icc_apr[cs->hppi.grp][regno] |= ICC_AP1R_EL1_NMI; in icc_activate_irq()
1173 cs->icc_apr[cs->hppi.grp][regno] |= (1U << regbit); in icc_activate_irq()
1177 cs->gicr_iactiver0 = deposit32(cs->gicr_iactiver0, irq, 1, 1); in icc_activate_irq()
1178 cs->gicr_ipendr0 = deposit32(cs->gicr_ipendr0, irq, 1, 0); in icc_activate_irq()
1181 gicv3_gicd_active_set(cs->gic, irq); in icc_activate_irq()
1182 gicv3_gicd_pending_clear(cs->gic, irq); in icc_activate_irq()
1183 gicv3_update(cs->gic, irq, 1); in icc_activate_irq()
1205 irq_is_secure = (!(cs->gic->gicd_ctlr & GICD_CTLR_DS) && in icc_hppir0_value()
1206 (cs->hppi.grp != GICV3_G1NS)); in icc_hppir0_value()
1208 if (cs->hppi.grp != GICV3_G0 && !arm_is_el3_or_mon(env)) { in icc_hppir0_value()
1216 if (cs->hppi.grp != GICV3_G0) { in icc_hppir0_value()
1223 return cs->hppi.irq; in icc_hppir0_value()
1242 irq_is_secure = (!(cs->gic->gicd_ctlr & GICD_CTLR_DS) && in icc_hppir1_value()
1243 (cs->hppi.grp != GICV3_G1NS)); in icc_hppir1_value()
1245 if (cs->hppi.grp == GICV3_G0) { in icc_hppir1_value()
1251 /* Secure interrupts not visible in Non-secure */ in icc_hppir1_value()
1255 /* Group 1 non-secure interrupts not visible in Secure EL1 */ in icc_hppir1_value()
1259 return cs->hppi.irq; in icc_hppir1_value()
1302 if (cs->hppi.nmi && env->cp15.sctlr_el[el] & SCTLR_NMI) { in icc_iar1_read()
1329 if (!cs->hppi.nmi) { in icc_nmiar1_read()
1361 uint64_t *papr = &cs->icc_apr[grp][i]; in icc_drop_prio()
1367 if (i == 0 && cs->nmi_support && (*papr & ICC_AP1R_EL1_NMI)) { in icc_drop_prio()
1373 *papr &= *papr - 1; in icc_drop_prio()
1387 return cs->icc_ctlr_el3 & ICC_CTLR_EL3_EOIMODE_EL3; in icc_eoi_split()
1390 return cs->icc_ctlr_el1[GICV3_S] & ICC_CTLR_EL1_EOIMODE; in icc_eoi_split()
1392 return cs->icc_ctlr_el1[GICV3_NS] & ICC_CTLR_EL1_EOIMODE; in icc_eoi_split()
1406 if (cs->nmi_support) { in icc_highest_active_group()
1407 if (cs->icc_apr[GICV3_G1][0] & ICC_AP1R_EL1_NMI) { in icc_highest_active_group()
1410 if (cs->icc_apr[GICV3_G1NS][0] & ICC_AP1R_EL1_NMI) { in icc_highest_active_group()
1415 for (i = 0; i < ARRAY_SIZE(cs->icc_apr[0]); i++) { in icc_highest_active_group()
1416 int g0ctz = ctz32(cs->icc_apr[GICV3_G0][i]); in icc_highest_active_group()
1417 int g1ctz = ctz32(cs->icc_apr[GICV3_G1][i]); in icc_highest_active_group()
1418 int g1nsctz = ctz32(cs->icc_apr[GICV3_G1NS][i]); in icc_highest_active_group()
1430 /* No set active bits? UNPREDICTABLE; return -1 so the caller in icc_highest_active_group()
1433 return -1; in icc_highest_active_group()
1439 cs->gicr_iactiver0 = deposit32(cs->gicr_iactiver0, irq, 1, 0); in icc_deactivate_irq()
1442 gicv3_gicd_active_clear(cs->gic, irq); in icc_deactivate_irq()
1443 gicv3_update(cs->gic, irq, 1); in icc_deactivate_irq()
1452 return cs->ich_vmcr_el2 & ICH_VMCR_EL2_VEOIM; in icv_eoi_split()
1458 * of the corresponding list register, or -1 if there is no match. in icv_find_active()
1463 for (i = 0; i < cs->num_list_regs; i++) { in icv_find_active()
1464 uint64_t lr = cs->ich_lr_el2[i]; in icv_find_active()
1471 return -1; in icv_find_active()
1477 uint64_t lr = cs->ich_lr_el2[idx]; in icv_deactivate_irq()
1488 /* Clear the 'active' part of the state, so ActivePending->Pending in icv_deactivate_irq()
1489 * and Active->Invalid. in icv_deactivate_irq()
1492 cs->ich_lr_el2[idx] = lr; in icv_deactivate_irq()
1498 int eoicount = extract64(cs->ich_hcr_el2, ICH_HCR_EL2_EOICOUNT_SHIFT, in icv_increment_eoicount()
1501 cs->ich_hcr_el2 = deposit64(cs->ich_hcr_el2, ICH_HCR_EL2_EOICOUNT_SHIFT, in icv_increment_eoicount()
1519 uint64_t *papr0 = &cs->ich_apr[GICV3_G0][i]; in icv_drop_prio()
1520 uint64_t *papr1 = &cs->ich_apr[GICV3_G1NS][i]; in icv_drop_prio()
1527 if (i == 0 && cs->nmi_support && (*papr1 & ICV_AP1R_EL1_NMI)) { in icv_drop_prio()
1533 /* We can't just use the bit-twiddling hack icc_drop_prio() does in icv_drop_prio()
1541 *papr0 &= *papr0 - 1; in icv_drop_prio()
1544 *papr1 &= *papr1 - 1; in icv_drop_prio()
1590 int grp = ri->crm == 8 ? GICV3_G0 : GICV3_G1NS; in icv_eoir_write()
1594 trace_gicv3_icv_eoir_write(ri->crm == 8 ? 0 : 1, in icv_eoir_write()
1625 uint64_t lr = cs->ich_lr_el2[idx]; in icv_eoir_write()
1652 bool is_eoir0 = ri->crm == 8; in icc_eoir_write()
1662 if ((irq >= cs->gic->num_irq) && in icc_eoir_write()
1663 !(cs->gic->lpi_enable && (irq >= GICV3_LPI_INTID_START))) { in icc_eoir_write()
1665 * 1. If software writes the ID of a spurious interrupt [ie 1020-1023] in icc_eoir_write()
1667 * 2. If software writes the number of a non-existent interrupt in icc_eoir_write()
1681 if (!(cs->gic->gicd_ctlr & GICD_CTLR_DS) in icc_eoir_write()
1747 int grp = (ri->crm == 8) ? GICV3_G0 : GICV3_G1; in icc_bpr_read()
1760 (cs->icc_ctlr_el1[GICV3_S] & ICC_CTLR_EL1_CBPR)) { in icc_bpr_read()
1768 (cs->icc_ctlr_el1[GICV3_NS] & ICC_CTLR_EL1_CBPR)) { in icc_bpr_read()
1774 bpr = cs->icc_bpr[grp]; in icc_bpr_read()
1780 trace_gicv3_icc_bpr_read(ri->crm == 8 ? 0 : 1, gicv3_redist_affid(cs), bpr); in icc_bpr_read()
1789 int grp = (ri->crm == 8) ? GICV3_G0 : GICV3_G1; in icc_bpr_write()
1797 trace_gicv3_icc_bpr_write(ri->crm == 8 ? 0 : 1, in icc_bpr_write()
1805 (cs->icc_ctlr_el1[GICV3_S] & ICC_CTLR_EL1_CBPR)) { in icc_bpr_write()
1813 (cs->icc_ctlr_el1[GICV3_NS] & ICC_CTLR_EL1_CBPR)) { in icc_bpr_write()
1823 cs->icc_bpr[grp] = value & 7; in icc_bpr_write()
1832 int regno = ri->opc2 & 3; in icc_ap_read()
1833 int grp = (ri->crm & 1) ? GICV3_G1 : GICV3_G0; in icc_ap_read()
1843 value = cs->icc_apr[grp][regno]; in icc_ap_read()
1845 trace_gicv3_icc_ap_read(ri->crm & 1, regno, gicv3_redist_affid(cs), value); in icc_ap_read()
1854 int regno = ri->opc2 & 3; in icc_ap_write()
1855 int grp = (ri->crm & 1) ? GICV3_G1 : GICV3_G0; in icc_ap_write()
1862 trace_gicv3_icc_ap_write(ri->crm & 1, regno, gicv3_redist_affid(cs), value); in icc_ap_write()
1868 /* It's not possible to claim that a Non-secure interrupt is active in icc_ap_write()
1869 * at a priority outside the Non-secure range (128..255), since this in icc_ap_write()
1877 if (cs->nmi_support) { in icc_ap_write()
1878 cs->icc_apr[grp][regno] = value & (0xFFFFFFFFU | ICC_AP1R_EL1_NMI); in icc_ap_write()
1880 cs->icc_apr[grp][regno] = value & 0xFFFFFFFFU; in icc_ap_write()
1901 if (irq >= cs->gic->num_irq) { in icc_dir_write()
1910 int grp = gicv3_irq_group(cs->gic, cs, irq); in icc_dir_write()
1912 single_sec_state = cs->gic->gicd_ctlr & GICD_CTLR_DS; in icc_dir_write()
1920 route_fiq_to_el3 = env->cp15.scr_el3 & SCR_FIQ; in icc_dir_write()
1921 route_irq_to_el3 = env->cp15.scr_el3 & SCR_IRQ; in icc_dir_write()
1980 !arm_is_secure(env) && (env->cp15.scr_el3 & SCR_FIQ)) { in icc_rpr_read()
1986 /* Non-idle priority: show the Non-secure view of it */ in icc_rpr_read()
1991 if (cs->nmi_support) { in icc_rpr_read()
1994 if (cs->icc_apr[GICV3_G1NS][0] & ICC_AP1R_EL1_NMI) { in icc_rpr_read()
1998 if (cs->icc_apr[GICV3_G1NS][0] & ICC_AP1R_EL1_NMI) { in icc_rpr_read()
2001 if (cs->icc_apr[GICV3_G1][0] & ICC_AP1R_EL1_NMI) { in icc_rpr_read()
2014 GICv3State *s = cs->gic; in icc_generate_sgi()
2025 if (grp == GICV3_G1 && s->gicd_ctlr & GICD_CTLR_DS) { in icc_generate_sgi()
2036 for (i = 0; i < s->num_cpu; i++) { in icc_generate_sgi()
2037 GICv3CPUState *ocs = &s->cpu[i]; in icc_generate_sgi()
2050 if (ocs->gicr_typer >> 40 != aff) { in icc_generate_sgi()
2053 aff0 = extract64(ocs->gicr_typer, 32, 8); in icc_generate_sgi()
2103 int grp = ri->opc2 & 1 ? GICV3_G1 : GICV3_G0; in icc_igrpen_read()
2114 value = cs->icc_igrpen[grp]; in icc_igrpen_read()
2115 trace_gicv3_icc_igrpen_read(ri->opc2 & 1 ? 1 : 0, in icc_igrpen_read()
2124 int grp = ri->opc2 & 1 ? GICV3_G1 : GICV3_G0; in icc_igrpen_write()
2131 trace_gicv3_icc_igrpen_write(ri->opc2 & 1 ? 1 : 0, in icc_igrpen_write()
2138 cs->icc_igrpen[grp] = value & ICC_IGRPEN_ENABLE; in icc_igrpen_write()
2148 value = cs->icc_igrpen[GICV3_G1NS] | (cs->icc_igrpen[GICV3_G1] << 1); in icc_igrpen1_el3_read()
2161 cs->icc_igrpen[GICV3_G1NS] = extract32(value, 0, 1); in icc_igrpen1_el3_write()
2162 cs->icc_igrpen[GICV3_G1] = extract32(value, 1, 1); in icc_igrpen1_el3_write()
2176 value = cs->icc_ctlr_el1[bank]; in icc_ctlr_el1_read()
2196 * for us PMHE is RAZ/WI (we don't implement 1-of-N interrupts or in icc_ctlr_el1_write()
2197 * the asseciated priority-based routing of them); in icc_ctlr_el1_write()
2201 ((cs->gic->gicd_ctlr & GICD_CTLR_DS) == 0)) { in icc_ctlr_el1_write()
2207 cs->icc_ctlr_el1[bank] &= ~mask; in icc_ctlr_el1_write()
2208 cs->icc_ctlr_el1[bank] |= (value & mask); in icc_ctlr_el1_write()
2218 value = cs->icc_ctlr_el3; in icc_ctlr_el3_read()
2219 if (cs->icc_ctlr_el1[GICV3_NS] & ICC_CTLR_EL1_EOIMODE) { in icc_ctlr_el3_read()
2222 if (cs->icc_ctlr_el1[GICV3_NS] & ICC_CTLR_EL1_CBPR) { in icc_ctlr_el3_read()
2225 if (cs->icc_ctlr_el1[GICV3_NS] & ICC_CTLR_EL1_EOIMODE) { in icc_ctlr_el3_read()
2228 if (cs->icc_ctlr_el1[GICV3_NS] & ICC_CTLR_EL1_CBPR) { in icc_ctlr_el3_read()
2245 cs->icc_ctlr_el1[GICV3_NS] &= ~(ICC_CTLR_EL1_CBPR | ICC_CTLR_EL1_EOIMODE); in icc_ctlr_el3_write()
2247 cs->icc_ctlr_el1[GICV3_NS] |= ICC_CTLR_EL1_EOIMODE; in icc_ctlr_el3_write()
2250 cs->icc_ctlr_el1[GICV3_NS] |= ICC_CTLR_EL1_CBPR; in icc_ctlr_el3_write()
2253 cs->icc_ctlr_el1[GICV3_S] &= ~(ICC_CTLR_EL1_CBPR | ICC_CTLR_EL1_EOIMODE); in icc_ctlr_el3_write()
2255 cs->icc_ctlr_el1[GICV3_S] |= ICC_CTLR_EL1_EOIMODE; in icc_ctlr_el3_write()
2258 cs->icc_ctlr_el1[GICV3_S] |= ICC_CTLR_EL1_CBPR; in icc_ctlr_el3_write()
2264 cs->icc_ctlr_el3 &= ~mask; in icc_ctlr_el3_write()
2265 cs->icc_ctlr_el3 |= (value & mask); in icc_ctlr_el3_write()
2276 if ((cs->ich_hcr_el2 & ICH_HCR_EL2_TC) && in gicv3_irqfiq_access()
2282 if ((env->cp15.scr_el3 & (SCR_FIQ | SCR_IRQ)) == (SCR_FIQ | SCR_IRQ)) { in gicv3_irqfiq_access()
2311 if ((cs->ich_hcr_el2 & ICH_HCR_EL2_TDIR) && in gicv3_dir_access()
2339 if ((cs->ich_hcr_el2 & ICH_HCR_EL2_TALL0) && in gicv3_fiq_access()
2345 if (env->cp15.scr_el3 & SCR_FIQ) { in gicv3_fiq_access()
2375 if ((cs->ich_hcr_el2 & ICH_HCR_EL2_TALL1) && in gicv3_irq_access()
2381 if (env->cp15.scr_el3 & SCR_IRQ) { in gicv3_irq_access()
2408 cs->icc_ctlr_el1[GICV3_S] = ICC_CTLR_EL1_A3V | in icc_reset()
2410 ((cs->pribits - 1) << ICC_CTLR_EL1_PRIBITS_SHIFT); in icc_reset()
2411 cs->icc_ctlr_el1[GICV3_NS] = ICC_CTLR_EL1_A3V | in icc_reset()
2413 ((cs->pribits - 1) << ICC_CTLR_EL1_PRIBITS_SHIFT); in icc_reset()
2414 cs->icc_pmr_el1 = 0; in icc_reset()
2415 cs->icc_bpr[GICV3_G0] = icc_min_bpr(cs); in icc_reset()
2416 cs->icc_bpr[GICV3_G1] = icc_min_bpr(cs); in icc_reset()
2417 cs->icc_bpr[GICV3_G1NS] = icc_min_bpr_ns(cs); in icc_reset()
2418 memset(cs->icc_apr, 0, sizeof(cs->icc_apr)); in icc_reset()
2419 memset(cs->icc_igrpen, 0, sizeof(cs->icc_igrpen)); in icc_reset()
2420 cs->icc_ctlr_el3 = ICC_CTLR_EL3_NDS | ICC_CTLR_EL3_A3V | in icc_reset()
2422 ((cs->pribits - 1) << ICC_CTLR_EL3_PRIBITS_SHIFT); in icc_reset()
2424 memset(cs->ich_apr, 0, sizeof(cs->ich_apr)); in icc_reset()
2425 cs->ich_hcr_el2 = 0; in icc_reset()
2426 memset(cs->ich_lr_el2, 0, sizeof(cs->ich_lr_el2)); in icc_reset()
2427 cs->ich_vmcr_el2 = ICH_VMCR_EL2_VFIQEN | in icc_reset()
2689 int regno = ri->opc2 & 3; in ich_ap_read()
2690 int grp = (ri->crm & 1) ? GICV3_G1NS : GICV3_G0; in ich_ap_read()
2693 value = cs->ich_apr[grp][regno]; in ich_ap_read()
2694 trace_gicv3_ich_ap_read(ri->crm & 1, regno, gicv3_redist_affid(cs), value); in ich_ap_read()
2702 int regno = ri->opc2 & 3; in ich_ap_write()
2703 int grp = (ri->crm & 1) ? GICV3_G1NS : GICV3_G0; in ich_ap_write()
2705 trace_gicv3_ich_ap_write(ri->crm & 1, regno, gicv3_redist_affid(cs), value); in ich_ap_write()
2707 if (cs->nmi_support) { in ich_ap_write()
2708 cs->ich_apr[grp][regno] = value & (0xFFFFFFFFU | ICV_AP1R_EL1_NMI); in ich_ap_write()
2710 cs->ich_apr[grp][regno] = value & 0xFFFFFFFFU; in ich_ap_write()
2718 uint64_t value = cs->ich_hcr_el2; in ich_hcr_read()
2737 cs->ich_hcr_el2 = value; in ich_hcr_write()
2744 uint64_t value = cs->ich_vmcr_el2; in ich_vmcr_read()
2762 cs->ich_vmcr_el2 = value; in ich_vmcr_write()
2775 int regno = ri->opc2 | ((ri->crm & 1) << 3); in ich_lr_read()
2779 * 64-bit reads of the whole LR in ich_lr_read()
2780 * 32-bit reads of the low half of the LR in ich_lr_read()
2781 * 32-bit reads of the high half of the LR in ich_lr_read()
2783 if (ri->state == ARM_CP_STATE_AA32) { in ich_lr_read()
2784 if (ri->crm >= 14) { in ich_lr_read()
2785 value = extract64(cs->ich_lr_el2[regno], 32, 32); in ich_lr_read()
2788 value = extract64(cs->ich_lr_el2[regno], 0, 32); in ich_lr_read()
2792 value = cs->ich_lr_el2[regno]; in ich_lr_read()
2803 int regno = ri->opc2 | ((ri->crm & 1) << 3); in ich_lr_write()
2806 * 64-bit writes to the whole LR in ich_lr_write()
2807 * 32-bit writes to the low half of the LR in ich_lr_write()
2808 * 32-bit writes to the high half of the LR in ich_lr_write()
2810 if (ri->state == ARM_CP_STATE_AA32) { in ich_lr_write()
2811 if (ri->crm >= 14) { in ich_lr_write()
2813 value = deposit64(cs->ich_lr_el2[regno], 32, 32, value); in ich_lr_write()
2816 value = deposit64(cs->ich_lr_el2[regno], 0, 32, value); in ich_lr_write()
2823 if (cs->vpribits < 8) { in ich_lr_write()
2825 8 - cs->vpribits, 0); in ich_lr_write()
2829 if (!cs->nmi_support) { in ich_lr_write()
2833 cs->ich_lr_el2[regno] = value; in ich_lr_write()
2842 value = ((cs->num_list_regs - 1) << ICH_VTR_EL2_LISTREGS_SHIFT) in ich_vtr_read()
2845 | ((cs->vprebits - 1) << ICH_VTR_EL2_PREBITS_SHIFT) in ich_vtr_read()
2846 | ((cs->vpribits - 1) << ICH_VTR_EL2_PRIBITS_SHIFT); in ich_vtr_read()
2848 if (cs->gic->revision < 4) { in ich_vtr_read()
2880 for (i = 0; i < cs->num_list_regs; i++) { in ich_elrsr_read()
2881 uint64_t lr = cs->ich_lr_el2[i]; in ich_elrsr_read()
3026 for (i = 0; i < s->num_cpu; i++) { in gicv3_init_cpuif()
3028 GICv3CPUState *cs = &s->cpu[i]; in gicv3_init_cpuif()
3034 * cpu->gic_num_lrs in gicv3_init_cpuif()
3035 * cpu->gic_vpribits in gicv3_init_cpuif()
3036 * cpu->gic_vprebits in gicv3_init_cpuif()
3037 * cpu->gic_pribits in gicv3_init_cpuif()
3044 * So instead we define the regs with no ri->opaque info, and in gicv3_init_cpuif()
3056 * that is a property of the GIC device in s->nmi_support; in gicv3_init_cpuif()
3057 * cs->nmi_support indicates the CPU interface's support. in gicv3_init_cpuif()
3060 cs->nmi_support = true; in gicv3_init_cpuif()
3067 * of migration, we have a compat property that forces use in gicv3_init_cpuif()
3070 if (s->force_8bit_prio) { in gicv3_init_cpuif()
3071 cs->pribits = 8; in gicv3_init_cpuif()
3073 cs->pribits = cpu->gic_pribits ?: 5; in gicv3_init_cpuif()
3086 cs->prebits = cs->pribits; in gicv3_init_cpuif()
3087 if (cs->prebits == 8) { in gicv3_init_cpuif()
3088 cs->prebits--; in gicv3_init_cpuif()
3094 g_assert(cs->pribits >= 4 && cs->pribits <= 8); in gicv3_init_cpuif()
3100 if (cs->prebits >= 6) { in gicv3_init_cpuif()
3103 if (cs->prebits == 7) { in gicv3_init_cpuif()
3107 if (arm_feature(&cpu->env, ARM_FEATURE_EL2)) { in gicv3_init_cpuif()
3110 cs->num_list_regs = cpu->gic_num_lrs ?: 4; in gicv3_init_cpuif()
3111 cs->vpribits = cpu->gic_vpribits ?: 5; in gicv3_init_cpuif()
3112 cs->vprebits = cpu->gic_vprebits ?: 5; in gicv3_init_cpuif()
3118 g_assert(cs->vprebits <= cs->vpribits); in gicv3_init_cpuif()
3119 g_assert(cs->vprebits >= 5 && cs->vprebits <= 7); in gicv3_init_cpuif()
3120 g_assert(cs->vpribits >= 5 && cs->vpribits <= 8); in gicv3_init_cpuif()
3124 for (j = 0; j < cs->num_list_regs; j++) { in gicv3_init_cpuif()
3125 /* Note that the AArch64 LRs are 64-bit; the AArch32 LRs in gicv3_init_cpuif()
3127 * same encoding as the AArch64 LR) and LRC (the high part). in gicv3_init_cpuif()
3150 if (cs->vprebits >= 6) { in gicv3_init_cpuif()
3153 if (cs->vprebits == 7) { in gicv3_init_cpuif()
3161 * the non-TCG case this is OK, as EL2 and EL3 can't exist. in gicv3_init_cpuif()
3165 assert(!arm_feature(&cpu->env, ARM_FEATURE_EL2)); in gicv3_init_cpuif()
3166 assert(!arm_feature(&cpu->env, ARM_FEATURE_EL3)); in gicv3_init_cpuif()