Lines Matching +full:intc +full:- +full:nr +full:- +full:irqs

4  * Copyright (c) 2019-2022, IBM Corporation.
7 * COPYING file in the top-level directory.
26 #include "hw/qdev-properties.h"
72 * fifos of the VC sub-engine in case of overflow.
74 * 0 - IPI,
75 * 1 - HWD,
76 * 2 - NxC,
77 * 3 - INT,
78 * 4 - OS-Queue,
79 * 5 - Pool-Queue,
80 * 6 - Hard-Queue
86 qemu_log_mask(LOG_GUEST_ERROR, "XIVE[%x] - " fmt "\n", \
87 (xive)->chip->chip_id, ## __VA_ARGS__);
94 uint8_t blk = xive->chip->chip_id; in pnv_xive2_block_id()
95 uint64_t cfg_val = xive->cq_regs[CQ_XIVE_CFG >> 3]; in pnv_xive2_block_id()
115 for (i = 0; i < pnv->num_chips; i++) { in pnv_xive2_get_remote()
116 Pnv10Chip *chip10 = PNV10_CHIP(pnv->chips[i]); in pnv_xive2_get_remote()
117 PnvXive2 *xive = &chip10->xive; in pnv_xive2_get_remote()
150 idx_max = vst_tsize / info->size - 1; in pnv_xive2_vst_addr_direct()
154 info->name, idx, idx_max); in pnv_xive2_vst_addr_direct()
159 return vst_addr + idx * info->size; in pnv_xive2_vst_addr_direct()
177 xive2_error(xive, "VST: invalid %s entry %x !?", info->name, idx); in pnv_xive2_vst_addr_indirect()
185 xive2_error(xive, "VST: invalid %s page shift %d", info->name, in pnv_xive2_vst_addr_indirect()
190 vst_per_page = (1ull << page_shift) / info->size; in pnv_xive2_vst_addr_indirect()
201 xive2_error(xive, "VST: invalid %s entry %x !?", info->name, idx); in pnv_xive2_vst_addr_indirect()
212 info->name, idx); in pnv_xive2_vst_addr_indirect()
223 xive->pc_regs[PC_NXC_PROC_CONFIG >> 3]); in pnv_xive2_nvc_table_compress_shift()
230 xive->pc_regs[PC_NXC_PROC_CONFIG >> 3]); in pnv_xive2_nvg_table_compress_shift()
240 if (blk >= info->max_blocks) { in pnv_xive2_vst_addr()
242 blk, info->name, idx); in pnv_xive2_vst_addr()
246 vsd = xive->vsds[type][blk]; in pnv_xive2_vst_addr()
249 blk, info->name, idx); in pnv_xive2_vst_addr()
281 return -1; in pnv_xive2_vst_read()
286 info->size); in pnv_xive2_vst_read()
289 " for VST %s %x/%x\n", addr, info->name, blk, idx); in pnv_xive2_vst_read()
290 return -1; in pnv_xive2_vst_read()
295 #define XIVE_VST_WORD_ALL -1
305 return -1; in pnv_xive2_vst_write()
311 info->size); in pnv_xive2_vst_write()
321 "for VST %s %x/%x\n", addr, info->name, blk, idx); in pnv_xive2_vst_write()
322 return -1; in pnv_xive2_vst_write()
334 return -1; in pnv_xive2_get_pq()
337 *pq = xive_source_esb_get(&xive->ipi_source, idx); in pnv_xive2_get_pq()
348 return -1; in pnv_xive2_set_pq()
351 *pq = xive_source_esb_set(&xive->ipi_source, idx, *pq); in pnv_xive2_set_pq()
414 int ic_topo_id = xive->chip->chip_id; in pnv_xive2_inject_notify()
425 return -1; in pnv_xive2_inject_notify()
446 blk = GETFIELD(VC_ENDC_WATCH_BLOCK_ID, xive->vc_regs[spec_reg]); in pnv_xive2_end_update()
447 idx = GETFIELD(VC_ENDC_WATCH_INDEX, xive->vc_regs[spec_reg]); in pnv_xive2_end_update()
450 endc_watch[i] = cpu_to_be64(xive->vc_regs[data_reg + i]); in pnv_xive2_end_update()
468 blk = GETFIELD(VC_ENDC_WATCH_BLOCK_ID, xive->vc_regs[spec_reg]); in pnv_xive2_end_cache_load()
469 idx = GETFIELD(VC_ENDC_WATCH_INDEX, xive->vc_regs[spec_reg]); in pnv_xive2_end_cache_load()
476 xive->vc_regs[data_reg + i] = be64_to_cpu(endc_watch[i]); in pnv_xive2_end_cache_load()
525 return -1; in pnv_xive2_nxc_to_table_type()
533 uint32_t idx, table_type = -1; in pnv_xive2_nxc_update()
541 nxc_type = GETFIELD(PC_NXC_WATCH_NXC_TYPE, xive->pc_regs[spec_reg]); in pnv_xive2_nxc_update()
542 blk = GETFIELD(PC_NXC_WATCH_BLOCK_ID, xive->pc_regs[spec_reg]); in pnv_xive2_nxc_update()
543 idx = GETFIELD(PC_NXC_WATCH_INDEX, xive->pc_regs[spec_reg]); in pnv_xive2_nxc_update()
548 nxc_watch[i] = cpu_to_be64(xive->pc_regs[data_reg + i]); in pnv_xive2_nxc_update()
558 uint32_t idx, table_type = -1; in pnv_xive2_nxc_cache_load()
566 nxc_type = GETFIELD(PC_NXC_WATCH_NXC_TYPE, xive->pc_regs[spec_reg]); in pnv_xive2_nxc_cache_load()
567 blk = GETFIELD(PC_NXC_WATCH_BLOCK_ID, xive->pc_regs[spec_reg]); in pnv_xive2_nxc_cache_load()
568 idx = GETFIELD(PC_NXC_WATCH_INDEX, xive->pc_regs[spec_reg]); in pnv_xive2_nxc_cache_load()
578 xive->pc_regs[data_reg + i] = be64_to_cpu(nxc_watch[i]); in pnv_xive2_nxc_cache_load()
589 return -1; in pnv_xive2_get_eas()
600 if (xive->cq_regs[CQ_XIVE_CFG >> 3] & CQ_XIVE_CFG_GEN1_TIMA_OS) { in pnv_xive2_get_config()
604 if (xive->cq_regs[CQ_XIVE_CFG >> 3] & CQ_XIVE_CFG_EN_VP_SAVE_RESTORE) { in pnv_xive2_get_config()
609 xive->cq_regs[CQ_XIVE_CFG >> 3]) == CQ_XIVE_CFG_THREADID_8BITS) { in pnv_xive2_get_config()
623 return xive->tctxt_regs[reg >> 3] & PPC_BIT(bit); in pnv_xive2_is_cpu_enabled()
632 PnvChip *chip = xive->chip; in pnv_xive2_match_nvt()
636 xive->cq_regs[CQ_XIVE_CFG >> 3] & CQ_XIVE_CFG_GEN1_TIMA_OS; in pnv_xive2_match_nvt()
638 for (i = 0; i < chip->nr_cores; i++) { in pnv_xive2_match_nvt()
639 PnvCore *pc = chip->cores[i]; in pnv_xive2_match_nvt()
642 for (j = 0; j < cc->nr_threads; j++) { in pnv_xive2_match_nvt()
643 PowerPCCPU *cpu = pc->threads[j]; in pnv_xive2_match_nvt()
651 tctx = XIVE_TCTX(pnv_cpu_state(cpu)->intc); in pnv_xive2_match_nvt()
667 if (ring != -1) { in pnv_xive2_match_nvt()
668 if (match->tctx) { in pnv_xive2_match_nvt()
675 match->ring = ring; in pnv_xive2_match_nvt()
676 match->tctx = tctx; in pnv_xive2_match_nvt()
690 if (xive->cq_regs[CQ_XIVE_CFG >> 3] & CQ_XIVE_CFG_GEN1_TIMA_OS) { in pnv_xive2_presenter_get_config()
709 XivePresenter *xptr = XIVE_TCTX(pnv_cpu_state(cpu)->intc)->xptr; in pnv_xive2_tm_get_xive()
739 uint8_t tsel = GETFIELD(CQ_TAR_SELECT, xive->cq_regs[CQ_TAR >> 3]); in pnv_xive2_stt_set_data()
741 xive->cq_regs[CQ_TAR >> 3]); in pnv_xive2_stt_set_data()
748 xive->tables[tsel][entry] = val; in pnv_xive2_stt_set_data()
752 return -1; in pnv_xive2_stt_set_data()
755 if (xive->cq_regs[CQ_TAR >> 3] & CQ_TAR_AUTOINC) { in pnv_xive2_stt_set_data()
756 xive->cq_regs[CQ_TAR >> 3] = SETFIELD(CQ_TAR_ENTRY_SELECT, in pnv_xive2_stt_set_data()
757 xive->cq_regs[CQ_TAR >> 3], ++entry); in pnv_xive2_stt_set_data()
768 Xive2EndSource *end_xsrc = &xive->end_source; in pnv_xive2_vst_set_exclusive()
769 XiveSource *xsrc = &xive->ipi_source; in pnv_xive2_vst_set_exclusive()
779 xive2_error(xive, "VST: invalid %s page shift %d", info->name, in pnv_xive2_vst_set_exclusive()
788 info->name, vst_addr, page_shift); in pnv_xive2_vst_set_exclusive()
793 xive->vsds[type][blk] = vsd; in pnv_xive2_vst_set_exclusive()
808 if (memory_region_is_mapped(&xsrc->esb_mmio)) { in pnv_xive2_vst_set_exclusive()
809 memory_region_del_subregion(&xive->esb_mmio, &xsrc->esb_mmio); in pnv_xive2_vst_set_exclusive()
812 memory_region_set_size(&xsrc->esb_mmio, vst_tsize * SBE_PER_BYTE in pnv_xive2_vst_set_exclusive()
813 * (1ull << xsrc->esb_shift)); in pnv_xive2_vst_set_exclusive()
816 memory_region_add_subregion(&xive->esb_mmio, 0, &xsrc->esb_mmio); in pnv_xive2_vst_set_exclusive()
826 if (memory_region_is_mapped(&end_xsrc->esb_mmio)) { in pnv_xive2_vst_set_exclusive()
827 memory_region_del_subregion(&xive->end_mmio, &end_xsrc->esb_mmio); in pnv_xive2_vst_set_exclusive()
830 memory_region_set_size(&end_xsrc->esb_mmio, (vst_tsize / info->size) in pnv_xive2_vst_set_exclusive()
831 * (1ull << end_xsrc->esb_shift)); in pnv_xive2_vst_set_exclusive()
833 memory_region_add_subregion(&xive->end_mmio, 0, &end_xsrc->esb_mmio); in pnv_xive2_vst_set_exclusive()
850 * Both PC and VC sub-engines are configured as each use the Virtual
878 xive->vsds[type][blk] = vsd; in pnv_xive2_vst_set_data()
894 xive->vc_regs[VC_VSD_TABLE_ADDR >> 3]); in pnv_xive2_vc_vst_set_data()
896 xive->vc_regs[VC_VSD_TABLE_ADDR >> 3]); in pnv_xive2_vc_vst_set_data()
919 * Pages 10-255: Reserved
920 * Pages 256-383: Direct mapped Thread Context Area (reads & writes)
922 * Pages 384-511: Reserved
942 { "xive-ic-cq", 0, 1, &pnv_xive2_ic_cq_ops },
943 { "xive-ic-vc", 1, 1, &pnv_xive2_ic_vc_ops },
944 { "xive-ic-pc", 2, 1, &pnv_xive2_ic_pc_ops },
945 { "xive-ic-tctxt", 3, 1, &pnv_xive2_ic_tctxt_ops },
946 { "xive-ic-notify", 4, 1, &pnv_xive2_ic_notify_ops },
948 { "xive-ic-sync", 6, 2, &pnv_xive2_ic_sync_ops },
949 { "xive-ic-lsi", 8, 2, &pnv_xive2_ic_lsi_ops },
950 /* pages 10-255 reserved */
951 { "xive-ic-tm-indirect", 256, 128, &pnv_xive2_ic_tm_indirect_ops },
952 /* pages 384-511 reserved */
969 val = xive->cq_regs[reg]; in pnv_xive2_ic_cq_read()
1003 xive->ic_shift = val & CQ_IC_BAR_64K ? 16 : 12; in pnv_xive2_ic_cq_write()
1005 xive->ic_base = 0; in pnv_xive2_ic_cq_write()
1006 if (xive->cq_regs[reg] & CQ_IC_BAR_VALID) { in pnv_xive2_ic_cq_write()
1007 for (i = 0; i < ARRAY_SIZE(xive->ic_mmios); i++) { in pnv_xive2_ic_cq_write()
1008 memory_region_del_subregion(&xive->ic_mmio, in pnv_xive2_ic_cq_write()
1009 &xive->ic_mmios[i]); in pnv_xive2_ic_cq_write()
1011 memory_region_del_subregion(sysmem, &xive->ic_mmio); in pnv_xive2_ic_cq_write()
1014 xive->ic_base = val & ~(CQ_IC_BAR_VALID | CQ_IC_BAR_64K); in pnv_xive2_ic_cq_write()
1015 if (!(xive->cq_regs[reg] & CQ_IC_BAR_VALID)) { in pnv_xive2_ic_cq_write()
1016 for (i = 0; i < ARRAY_SIZE(xive->ic_mmios); i++) { in pnv_xive2_ic_cq_write()
1017 memory_region_add_subregion(&xive->ic_mmio, in pnv_xive2_ic_cq_write()
1018 pnv_xive2_ic_regions[i].pgoff << xive->ic_shift, in pnv_xive2_ic_cq_write()
1019 &xive->ic_mmios[i]); in pnv_xive2_ic_cq_write()
1021 memory_region_add_subregion(sysmem, xive->ic_base, in pnv_xive2_ic_cq_write()
1022 &xive->ic_mmio); in pnv_xive2_ic_cq_write()
1028 xive->tm_shift = val & CQ_TM_BAR_64K ? 16 : 12; in pnv_xive2_ic_cq_write()
1030 xive->tm_base = 0; in pnv_xive2_ic_cq_write()
1031 if (xive->cq_regs[reg] & CQ_TM_BAR_VALID) { in pnv_xive2_ic_cq_write()
1032 memory_region_del_subregion(sysmem, &xive->tm_mmio); in pnv_xive2_ic_cq_write()
1035 xive->tm_base = val & ~(CQ_TM_BAR_VALID | CQ_TM_BAR_64K); in pnv_xive2_ic_cq_write()
1036 if (!(xive->cq_regs[reg] & CQ_TM_BAR_VALID)) { in pnv_xive2_ic_cq_write()
1037 memory_region_add_subregion(sysmem, xive->tm_base, in pnv_xive2_ic_cq_write()
1038 &xive->tm_mmio); in pnv_xive2_ic_cq_write()
1044 xive->esb_shift = val & CQ_BAR_64K ? 16 : 12; in pnv_xive2_ic_cq_write()
1046 xive->esb_base = 0; in pnv_xive2_ic_cq_write()
1047 if (xive->cq_regs[reg] & CQ_BAR_VALID) { in pnv_xive2_ic_cq_write()
1048 memory_region_del_subregion(sysmem, &xive->esb_mmio); in pnv_xive2_ic_cq_write()
1051 xive->esb_base = val & CQ_BAR_ADDR; in pnv_xive2_ic_cq_write()
1052 if (!(xive->cq_regs[reg] & CQ_BAR_VALID)) { in pnv_xive2_ic_cq_write()
1053 memory_region_set_size(&xive->esb_mmio, in pnv_xive2_ic_cq_write()
1055 memory_region_add_subregion(sysmem, xive->esb_base, in pnv_xive2_ic_cq_write()
1056 &xive->esb_mmio); in pnv_xive2_ic_cq_write()
1062 xive->end_shift = val & CQ_BAR_64K ? 16 : 12; in pnv_xive2_ic_cq_write()
1064 xive->end_base = 0; in pnv_xive2_ic_cq_write()
1065 if (xive->cq_regs[reg] & CQ_BAR_VALID) { in pnv_xive2_ic_cq_write()
1066 memory_region_del_subregion(sysmem, &xive->end_mmio); in pnv_xive2_ic_cq_write()
1069 xive->end_base = val & CQ_BAR_ADDR; in pnv_xive2_ic_cq_write()
1070 if (!(xive->cq_regs[reg] & CQ_BAR_VALID)) { in pnv_xive2_ic_cq_write()
1071 memory_region_set_size(&xive->end_mmio, in pnv_xive2_ic_cq_write()
1073 memory_region_add_subregion(sysmem, xive->end_base, in pnv_xive2_ic_cq_write()
1074 &xive->end_mmio); in pnv_xive2_ic_cq_write()
1080 xive->nvc_shift = val & CQ_BAR_64K ? 16 : 12; in pnv_xive2_ic_cq_write()
1082 xive->nvc_base = 0; in pnv_xive2_ic_cq_write()
1083 if (xive->cq_regs[reg] & CQ_BAR_VALID) { in pnv_xive2_ic_cq_write()
1084 memory_region_del_subregion(sysmem, &xive->nvc_mmio); in pnv_xive2_ic_cq_write()
1087 xive->nvc_base = val & CQ_BAR_ADDR; in pnv_xive2_ic_cq_write()
1088 if (!(xive->cq_regs[reg] & CQ_BAR_VALID)) { in pnv_xive2_ic_cq_write()
1089 memory_region_set_size(&xive->nvc_mmio, in pnv_xive2_ic_cq_write()
1091 memory_region_add_subregion(sysmem, xive->nvc_base, in pnv_xive2_ic_cq_write()
1092 &xive->nvc_mmio); in pnv_xive2_ic_cq_write()
1098 xive->nvpg_shift = val & CQ_BAR_64K ? 16 : 12; in pnv_xive2_ic_cq_write()
1100 xive->nvpg_base = 0; in pnv_xive2_ic_cq_write()
1101 if (xive->cq_regs[reg] & CQ_BAR_VALID) { in pnv_xive2_ic_cq_write()
1102 memory_region_del_subregion(sysmem, &xive->nvpg_mmio); in pnv_xive2_ic_cq_write()
1105 xive->nvpg_base = val & CQ_BAR_ADDR; in pnv_xive2_ic_cq_write()
1106 if (!(xive->cq_regs[reg] & CQ_BAR_VALID)) { in pnv_xive2_ic_cq_write()
1107 memory_region_set_size(&xive->nvpg_mmio, in pnv_xive2_ic_cq_write()
1109 memory_region_add_subregion(sysmem, xive->nvpg_base, in pnv_xive2_ic_cq_write()
1110 &xive->nvpg_mmio); in pnv_xive2_ic_cq_write()
1127 xive->cq_regs[reg] = val; in pnv_xive2_ic_cq_write()
1150 for (i = 3; i >= 0; i--) { in pnv_xive2_cache_watch_assign()
1154 val = 3 - i; in pnv_xive2_cache_watch_assign()
1164 uint8_t engine_bit = 3 - watch_engine; in pnv_xive2_cache_watch_release()
1174 xive->vc_regs[VC_ENDC_CFG >> 3]); in pnv_xive2_endc_cache_watch_assign()
1175 uint64_t state = xive->vc_regs[VC_ENDC_WATCH_ASSIGN >> 3]; in pnv_xive2_endc_cache_watch_assign()
1187 xive->vc_regs[VC_ENDC_WATCH_ASSIGN >> 3] = state; in pnv_xive2_endc_cache_watch_assign()
1195 uint64_t state = xive->vc_regs[VC_ENDC_WATCH_ASSIGN >> 3]; in pnv_xive2_endc_cache_watch_release()
1198 xive->vc_regs[VC_ENDC_WATCH_ASSIGN >> 3] = state; in pnv_xive2_endc_cache_watch_release()
1215 val = xive->vc_regs[reg]; in pnv_xive2_ic_vc_read()
1222 xive->vc_regs[reg] &= ~VC_ESBC_FLUSH_CTRL_POLL_VALID; in pnv_xive2_ic_vc_read()
1223 val = xive->vc_regs[reg]; in pnv_xive2_ic_vc_read()
1227 val = xive->vc_regs[reg]; in pnv_xive2_ic_vc_read()
1234 xive->vc_regs[reg] &= ~VC_EASC_FLUSH_CTRL_POLL_VALID; in pnv_xive2_ic_vc_read()
1235 val = xive->vc_regs[reg]; in pnv_xive2_ic_vc_read()
1243 val = xive->vc_regs[reg]; in pnv_xive2_ic_vc_read()
1253 watch_engine = (offset - VC_ENDC_WATCH0_SPEC) >> 6; in pnv_xive2_ic_vc_read()
1254 xive->vc_regs[reg] &= ~(VC_ENDC_WATCH_FULL | VC_ENDC_WATCH_CONFLICT); in pnv_xive2_ic_vc_read()
1256 val = xive->vc_regs[reg]; in pnv_xive2_ic_vc_read()
1267 watch_engine = (offset - VC_ENDC_WATCH0_DATA0) >> 6; in pnv_xive2_ic_vc_read()
1269 val = xive->vc_regs[reg]; in pnv_xive2_ic_vc_read()
1276 val = xive->vc_regs[reg]; in pnv_xive2_ic_vc_read()
1280 xive->vc_regs[reg] &= ~VC_ENDC_FLUSH_CTRL_POLL_VALID; in pnv_xive2_ic_vc_read()
1281 val = xive->vc_regs[reg]; in pnv_xive2_ic_vc_read()
1288 val = xive->vc_regs[reg]; in pnv_xive2_ic_vc_read()
1292 xive->vc_regs[reg] &= ~VC_AT_MACRO_KILL_VALID; in pnv_xive2_ic_vc_read()
1293 val = xive->vc_regs[reg]; in pnv_xive2_ic_vc_read()
1300 val = xive->vc_regs[reg]; in pnv_xive2_ic_vc_read()
1338 xive->vc_regs[VC_ESBC_FLUSH_CTRL >> 3] |= VC_ESBC_FLUSH_CTRL_POLL_VALID; in pnv_xive2_ic_vc_write()
1354 xive->vc_regs[VC_EASC_FLUSH_CTRL >> 3] |= VC_EASC_FLUSH_CTRL_POLL_VALID; in pnv_xive2_ic_vc_write()
1385 watch_engine = (offset - VC_ENDC_WATCH0_DATA0) >> 6; in pnv_xive2_ic_vc_write()
1386 xive->vc_regs[reg] = val; in pnv_xive2_ic_vc_write()
1393 xive->vc_regs[VC_ENDC_FLUSH_CTRL >> 3] |= VC_ENDC_FLUSH_CTRL_POLL_VALID; in pnv_xive2_ic_vc_write()
1424 xive->vc_regs[reg] = val; in pnv_xive2_ic_vc_write()
1444 xive->pc_regs[PC_NXC_PROC_CONFIG >> 3]); in pnv_xive2_nxc_cache_watch_assign()
1445 uint64_t state = xive->pc_regs[PC_NXC_WATCH_ASSIGN >> 3]; in pnv_xive2_nxc_cache_watch_assign()
1457 xive->pc_regs[PC_NXC_WATCH_ASSIGN >> 3] = state; in pnv_xive2_nxc_cache_watch_assign()
1465 uint64_t state = xive->pc_regs[PC_NXC_WATCH_ASSIGN >> 3]; in pnv_xive2_nxc_cache_watch_release()
1468 xive->pc_regs[PC_NXC_WATCH_ASSIGN >> 3] = state; in pnv_xive2_nxc_cache_watch_release()
1475 uint64_t val = -1; in pnv_xive2_ic_pc_read()
1485 val = xive->pc_regs[reg]; in pnv_xive2_ic_pc_read()
1493 val = xive->pc_regs[reg]; in pnv_xive2_ic_pc_read()
1503 watch_engine = (offset - PC_NXC_WATCH0_SPEC) >> 6; in pnv_xive2_ic_pc_read()
1504 xive->pc_regs[reg] &= ~(PC_NXC_WATCH_FULL | PC_NXC_WATCH_CONFLICT); in pnv_xive2_ic_pc_read()
1506 val = xive->pc_regs[reg]; in pnv_xive2_ic_pc_read()
1517 watch_engine = (offset - PC_NXC_WATCH0_DATA0) >> 6; in pnv_xive2_ic_pc_read()
1519 val = xive->pc_regs[reg]; in pnv_xive2_ic_pc_read()
1526 val = xive->pc_regs[reg]; in pnv_xive2_ic_pc_read()
1530 xive->pc_regs[reg] &= ~PC_NXC_FLUSH_CTRL_POLL_VALID; in pnv_xive2_ic_pc_read()
1531 val = xive->pc_regs[reg]; in pnv_xive2_ic_pc_read()
1538 xive->pc_regs[reg] &= ~PC_AT_KILL_VALID; in pnv_xive2_ic_pc_read()
1539 val = xive->pc_regs[reg]; in pnv_xive2_ic_pc_read()
1552 xive->pc_regs[PC_VSD_TABLE_ADDR >> 3]); in pnv_xive2_pc_vst_set_data()
1554 xive->pc_regs[PC_VSD_TABLE_ADDR >> 3]); in pnv_xive2_pc_vst_set_data()
1570 * The Xive2Router model combines both VC and PC sub-engines. We in pnv_xive2_ic_pc_write()
1606 watch_engine = (offset - PC_NXC_WATCH0_DATA0) >> 6; in pnv_xive2_ic_pc_write()
1607 xive->pc_regs[reg] = val; in pnv_xive2_ic_pc_write()
1613 xive->pc_regs[PC_NXC_FLUSH_CTRL >> 3] |= PC_NXC_FLUSH_CTRL_POLL_VALID; in pnv_xive2_ic_pc_write()
1632 xive->pc_regs[reg] = val; in pnv_xive2_ic_pc_write()
1654 uint64_t val = -1; in pnv_xive2_ic_tctxt_read()
1663 val = xive->tctxt_regs[reg]; in pnv_xive2_ic_tctxt_read()
1668 val = xive->tctxt_regs[TCTXT_EN0 >> 3]; in pnv_xive2_ic_tctxt_read()
1672 val = xive->tctxt_regs[TCTXT_EN1 >> 3]; in pnv_xive2_ic_tctxt_read()
1675 val = xive->tctxt_regs[reg]; in pnv_xive2_ic_tctxt_read()
1696 xive->tctxt_regs[reg] = val; in pnv_xive2_ic_tctxt_write()
1700 xive->tctxt_regs[TCTXT_EN0 >> 3] |= val; in pnv_xive2_ic_tctxt_write()
1703 xive->tctxt_regs[TCTXT_EN1 >> 3] |= val; in pnv_xive2_ic_tctxt_write()
1706 xive->tctxt_regs[TCTXT_EN0 >> 3] &= ~val; in pnv_xive2_ic_tctxt_write()
1709 xive->tctxt_regs[TCTXT_EN1 >> 3] &= ~val; in pnv_xive2_ic_tctxt_write()
1712 xive->tctxt_regs[reg] = val; in pnv_xive2_ic_tctxt_write()
1741 uint64_t val = -1; in pnv_xive2_xscom_read()
1808 * 0x000 - 0x7FF IPI interrupt (NPU)
1809 * 0x800 - 0xFFF HW interrupt triggers (PSI, PHB)
1844 /* TODO: check IPI notify sub-page routing */ in pnv_xive2_ic_notify_write()
1865 return -1; in pnv_xive2_ic_notify_read()
1888 return -1; in pnv_xive2_ic_lsi_read()
1937 return -1; in pnv_xive2_ic_sync_read()
1953 hwaddr pg_offset_mask = (1ull << xive->ic_shift) - 1; in pnv_xive2_ic_sync_write()
2030 return xive->chip->chip_id << 8 | offset >> xive->ic_shift; in pnv_xive2_ic_tm_get_pir()
2042 return offset & ((1ull << xive->ic_shift) - 1); in pnv_xive2_ic_tm_get_hw_page_offset()
2047 PnvChip *chip = xive->chip; in pnv_xive2_get_indirect_tctx()
2060 return XIVE_TCTX(pnv_cpu_state(cpu)->intc); in pnv_xive2_get_indirect_tctx()
2071 uint64_t val = -1; in pnv_xive2_ic_tm_indirect_read()
2122 XiveTCTX *tctx = XIVE_TCTX(pnv_cpu_state(cpu)->intc); in pnv_xive2_tm_write()
2132 XiveTCTX *tctx = XIVE_TCTX(pnv_cpu_state(cpu)->intc); in pnv_xive2_tm_read()
2158 return -1; in pnv_xive2_nvc_read()
2189 return -1; in pnv_xive2_nvpg_read()
2229 XiveSource *xsrc = &xive->ipi_source; in pnv_xive2_reset()
2230 Xive2EndSource *end_xsrc = &xive->end_source; in pnv_xive2_reset()
2232 xive->cq_regs[CQ_XIVE_CAP >> 3] = xive->capabilities; in pnv_xive2_reset()
2233 xive->cq_regs[CQ_XIVE_CFG >> 3] = xive->config; in pnv_xive2_reset()
2236 xive->cq_regs[CQ_XIVE_CFG >> 3] |= in pnv_xive2_reset()
2237 SETFIELD(CQ_XIVE_CFG_HYP_HARD_BLOCK_ID, 0ull, xive->chip->chip_id); in pnv_xive2_reset()
2240 xive->vc_regs[VC_ENDC_CFG >> 3] = in pnv_xive2_reset()
2242 xive->pc_regs[PC_NXC_PROC_CONFIG >> 3] = in pnv_xive2_reset()
2246 xive->ic_shift = xive->esb_shift = xive->end_shift = 16; in pnv_xive2_reset()
2247 xive->nvc_shift = xive->nvpg_shift = xive->tm_shift = 16; in pnv_xive2_reset()
2250 if (memory_region_is_mapped(&xsrc->esb_mmio)) { in pnv_xive2_reset()
2251 memory_region_del_subregion(&xive->esb_mmio, &xsrc->esb_mmio); in pnv_xive2_reset()
2254 if (memory_region_is_mapped(&end_xsrc->esb_mmio)) { in pnv_xive2_reset()
2255 memory_region_del_subregion(&xive->end_mmio, &end_xsrc->esb_mmio); in pnv_xive2_reset()
2260 * Maximum number of IRQs and ENDs supported by HW. Will be tuned by
2270 XiveSource *xsrc = &xive->ipi_source; in pnv_xive2_realize()
2271 Xive2EndSource *end_xsrc = &xive->end_source; in pnv_xive2_realize()
2275 pxc->parent_realize(dev, &local_err); in pnv_xive2_realize()
2281 assert(xive->chip); in pnv_xive2_realize()
2291 object_property_set_int(OBJECT(xsrc), "nr-irqs", PNV_XIVE2_NR_IRQS, in pnv_xive2_realize()
2301 object_property_set_int(OBJECT(end_xsrc), "nr-ends", PNV_XIVE2_NR_ENDS, in pnv_xive2_realize()
2312 memory_region_init_io(&xive->xscom_regs, OBJECT(dev), in pnv_xive2_realize()
2313 &pnv_xive2_xscom_ops, xive, "xscom-xive", in pnv_xive2_realize()
2317 xive->ic_shift = 16; in pnv_xive2_realize()
2318 memory_region_init(&xive->ic_mmio, OBJECT(dev), "xive-ic", in pnv_xive2_realize()
2321 for (i = 0; i < ARRAY_SIZE(xive->ic_mmios); i++) { in pnv_xive2_realize()
2322 memory_region_init_io(&xive->ic_mmios[i], OBJECT(dev), in pnv_xive2_realize()
2325 pnv_xive2_ic_regions[i].pgsize << xive->ic_shift); in pnv_xive2_realize()
2331 xive->esb_shift = 16; in pnv_xive2_realize()
2332 xive->end_shift = 16; in pnv_xive2_realize()
2333 memory_region_init(&xive->esb_mmio, OBJECT(xive), "xive-esb", in pnv_xive2_realize()
2335 memory_region_init(&xive->end_mmio, OBJECT(xive), "xive-end", in pnv_xive2_realize()
2339 xive->nvc_shift = 16; in pnv_xive2_realize()
2340 xive->nvpg_shift = 16; in pnv_xive2_realize()
2341 memory_region_init_io(&xive->nvc_mmio, OBJECT(dev), in pnv_xive2_realize()
2343 "xive-nvc", PNV10_XIVE2_NVC_SIZE); in pnv_xive2_realize()
2345 memory_region_init_io(&xive->nvpg_mmio, OBJECT(dev), in pnv_xive2_realize()
2347 "xive-nvpg", PNV10_XIVE2_NVPG_SIZE); in pnv_xive2_realize()
2350 xive->tm_shift = 16; in pnv_xive2_realize()
2351 memory_region_init_io(&xive->tm_mmio, OBJECT(dev), &pnv_xive2_tm_ops, in pnv_xive2_realize()
2352 xive, "xive-tima", PNV10_XIVE2_TM_SIZE); in pnv_xive2_realize()
2358 DEFINE_PROP_UINT64("ic-bar", PnvXive2, ic_base, 0),
2359 DEFINE_PROP_UINT64("esb-bar", PnvXive2, esb_base, 0),
2360 DEFINE_PROP_UINT64("end-bar", PnvXive2, end_base, 0),
2361 DEFINE_PROP_UINT64("nvc-bar", PnvXive2, nvc_base, 0),
2362 DEFINE_PROP_UINT64("nvpg-bar", PnvXive2, nvpg_base, 0),
2363 DEFINE_PROP_UINT64("tm-bar", PnvXive2, tm_base, 0),
2376 object_initialize_child(obj, "ipi_source", &xive->ipi_source, in pnv_xive2_instance_init()
2378 object_initialize_child(obj, "end_source", &xive->end_source, in pnv_xive2_instance_init()
2385 const char compat_p10[] = "ibm,power10-xive-x"; in pnv_xive2_dt_xscom()
2413 xdc->dt_xscom = pnv_xive2_dt_xscom; in pnv_xive2_class_init()
2415 dc->desc = "PowerNV XIVE2 Interrupt Controller (POWER10)"; in pnv_xive2_class_init()
2417 &pxc->parent_realize); in pnv_xive2_class_init()
2420 xrc->get_eas = pnv_xive2_get_eas; in pnv_xive2_class_init()
2421 xrc->get_pq = pnv_xive2_get_pq; in pnv_xive2_class_init()
2422 xrc->set_pq = pnv_xive2_set_pq; in pnv_xive2_class_init()
2423 xrc->get_end = pnv_xive2_get_end; in pnv_xive2_class_init()
2424 xrc->write_end = pnv_xive2_write_end; in pnv_xive2_class_init()
2425 xrc->get_nvp = pnv_xive2_get_nvp; in pnv_xive2_class_init()
2426 xrc->write_nvp = pnv_xive2_write_nvp; in pnv_xive2_class_init()
2427 xrc->get_nvgc = pnv_xive2_get_nvgc; in pnv_xive2_class_init()
2428 xrc->write_nvgc = pnv_xive2_write_nvgc; in pnv_xive2_class_init()
2429 xrc->get_config = pnv_xive2_get_config; in pnv_xive2_class_init()
2430 xrc->get_block_id = pnv_xive2_get_block_id; in pnv_xive2_class_init()
2432 xnc->notify = pnv_xive2_notify; in pnv_xive2_class_init()
2434 xpc->match_nvt = pnv_xive2_match_nvt; in pnv_xive2_class_init()
2435 xpc->get_config = pnv_xive2_presenter_get_config; in pnv_xive2_class_init()
2465 uint64_t vsd = xive->vsds[VST_ESB][blk]; in type_init()
2477 uint64_t vsd = xive->vsds[type][blk]; in pnv_xive2_vst_per_subpage()
2493 xive2_error(xive, "VST: invalid %s entry!?", info->name); in pnv_xive2_vst_per_subpage()
2501 xive2_error(xive, "VST: invalid %s page shift %d", info->name, in pnv_xive2_vst_per_subpage()
2506 return (1ull << page_shift) / info->size; in pnv_xive2_vst_per_subpage()
2513 uint8_t chip_id = xive->chip->chip_id; in pnv_xive2_pic_print_info()
2524 blk, srcno0, srcno0 + nr_esbs - 1); in pnv_xive2_pic_print_info()
2525 xive_source_pic_print_info(&xive->ipi_source, srcno0, buf); in pnv_xive2_pic_print_info()
2528 blk, srcno0, srcno0 + nr_esbs - 1); in pnv_xive2_pic_print_info()
2552 chip_id, blk, 0, XIVE2_NVP_COUNT - 1); in pnv_xive2_pic_print_info()
2561 chip_id, blk, 0, XIVE2_NVP_COUNT - 1); in pnv_xive2_pic_print_info()
2570 chip_id, blk, 0, XIVE2_NVP_COUNT - 1); in pnv_xive2_pic_print_info()