1 /*
2 * QEMU PowerPC XIVE2 internal structure definitions (POWER10)
3 *
4 * Copyright (c) 2019-2024, IBM Corporation.
5 *
6 * SPDX-License-Identifier: GPL-2.0-or-later
7 */
8
9 #ifndef PPC_XIVE2_REGS_H
10 #define PPC_XIVE2_REGS_H
11
12 #include "qemu/bswap.h"
13
14 /*
15 * Thread Interrupt Management Area (TIMA)
16 *
17 * In Gen1 mode (P9 compat mode) word 2 is the same. However in Gen2
18 * mode (P10), the CAM line is slightly different as the VP space was
19 * increased.
20 */
21 #define TM2_W2_VALID PPC_BIT32(0)
22 #define TM2_W2_HW PPC_BIT32(1)
23 #define TM2_QW0W2_VU TM2_W2_VALID
24 #define TM2_QW0W2_LOGIC_SERV PPC_BITMASK32(4, 31)
25 #define TM2_QW1W2_VO TM2_W2_VALID
26 #define TM2_QW1W2_HO TM2_W2_HW
27 #define TM2_QW1W2_OS_CAM PPC_BITMASK32(4, 31)
28 #define TM2_QW2W2_VP TM2_W2_VALID
29 #define TM2_QW2W2_HP TM2_W2_HW
30 #define TM2_QW2W2_POOL_CAM PPC_BITMASK32(4, 31)
31 #define TM2_QW3W2_VT TM2_W2_VALID
32 #define TM2_QW3W2_HT TM2_W2_HW
33 #define TM2_QW3W2_LP PPC_BIT32(6)
34 #define TM2_QW3W2_LE PPC_BIT32(7)
35
36 /*
37 * Event Assignment Structure (EAS)
38 */
39
40 typedef struct Xive2Eas {
41 uint64_t w;
42 #define EAS2_VALID PPC_BIT(0)
43 #define EAS2_QOS PPC_BIT(1, 2) /* Quality of Service(unimp) */
44 #define EAS2_RESUME PPC_BIT(3) /* END Resume(unimp) */
45 #define EAS2_END_BLOCK PPC_BITMASK(4, 7) /* Destination EQ block# */
46 #define EAS2_END_INDEX PPC_BITMASK(8, 31) /* Destination EQ index */
47 #define EAS2_MASKED PPC_BIT(32) /* Masked */
48 #define EAS2_END_DATA PPC_BITMASK(33, 63) /* written to the EQ */
49 } Xive2Eas;
50
51 #define xive2_eas_is_valid(eas) (be64_to_cpu((eas)->w) & EAS2_VALID)
52 #define xive2_eas_is_masked(eas) (be64_to_cpu((eas)->w) & EAS2_MASKED)
53 #define xive2_eas_is_resume(eas) (be64_to_cpu((eas)->w) & EAS2_RESUME)
54
55 void xive2_eas_pic_print_info(Xive2Eas *eas, uint32_t lisn, GString *buf);
56
57 /*
58 * Event Notifification Descriptor (END)
59 */
60
61 typedef struct Xive2End {
62 uint32_t w0;
63 #define END2_W0_VALID PPC_BIT32(0) /* "v" bit */
64 #define END2_W0_ENQUEUE PPC_BIT32(5) /* "q" bit */
65 #define END2_W0_UCOND_NOTIFY PPC_BIT32(6) /* "n" bit */
66 #define END2_W0_SILENT_ESCALATE PPC_BIT32(7) /* "s" bit */
67 #define END2_W0_BACKLOG PPC_BIT32(8) /* "b" bit */
68 #define END2_W0_PRECL_ESC_CTL PPC_BIT32(9) /* "p" bit */
69 #define END2_W0_UNCOND_ESCALATE PPC_BIT32(10) /* "u" bit */
70 #define END2_W0_ESCALATE_CTL PPC_BIT32(11) /* "e" bit */
71 #define END2_W0_ADAPTIVE_ESC PPC_BIT32(12) /* "a" bit */
72 #define END2_W0_ESCALATE_END PPC_BIT32(13) /* "N" bit */
73 #define END2_W0_FIRMWARE1 PPC_BIT32(16) /* Owned by FW */
74 #define END2_W0_FIRMWARE2 PPC_BIT32(17) /* Owned by FW */
75 #define END2_W0_AEC_SIZE PPC_BITMASK32(18, 19)
76 #define END2_W0_AEG_SIZE PPC_BITMASK32(20, 23)
77 #define END2_W0_EQ_VG_PREDICT PPC_BITMASK32(24, 31) /* Owned by HW */
78 uint32_t w1;
79 #define END2_W1_ESn PPC_BITMASK32(0, 1)
80 #define END2_W1_ESn_P PPC_BIT32(0)
81 #define END2_W1_ESn_Q PPC_BIT32(1)
82 #define END2_W1_ESe PPC_BITMASK32(2, 3)
83 #define END2_W1_ESe_P PPC_BIT32(2)
84 #define END2_W1_ESe_Q PPC_BIT32(3)
85 #define END2_W1_GEN_FLIPPED PPC_BIT32(8)
86 #define END2_W1_GENERATION PPC_BIT32(9)
87 #define END2_W1_PAGE_OFF PPC_BITMASK32(10, 31)
88 uint32_t w2;
89 #define END2_W2_RESERVED PPC_BITMASK32(4, 7)
90 #define END2_W2_EQ_ADDR_HI PPC_BITMASK32(8, 31)
91 uint32_t w3;
92 #define END2_W3_EQ_ADDR_LO PPC_BITMASK32(0, 24)
93 #define END2_W3_CL PPC_BIT32(27)
94 #define END2_W3_QSIZE PPC_BITMASK32(28, 31)
95 uint32_t w4;
96 #define END2_W4_END_BLOCK PPC_BITMASK32(4, 7)
97 #define END2_W4_ESC_END_INDEX PPC_BITMASK32(8, 31)
98 #define END2_W4_ESB_BLOCK PPC_BITMASK32(0, 3)
99 #define END2_W4_ESC_ESB_INDEX PPC_BITMASK32(4, 31)
100 uint32_t w5;
101 #define END2_W5_ESC_END_DATA PPC_BITMASK32(1, 31)
102 uint32_t w6;
103 #define END2_W6_FORMAT_BIT PPC_BIT32(0)
104 #define END2_W6_IGNORE PPC_BIT32(1)
105 #define END2_W6_CROWD PPC_BIT32(2)
106 #define END2_W6_VP_BLOCK PPC_BITMASK32(4, 7)
107 #define END2_W6_VP_OFFSET PPC_BITMASK32(8, 31)
108 #define END2_W6_VP_OFFSET_GEN1 PPC_BITMASK32(13, 31)
109 uint32_t w7;
110 #define END2_W7_TOPO PPC_BITMASK32(0, 3) /* Owned by HW */
111 #define END2_W7_F0_PRIORITY PPC_BITMASK32(8, 15)
112 #define END2_W7_F1_LOG_SERVER_ID PPC_BITMASK32(4, 31)
113 } Xive2End;
114
115 #define xive2_end_is_valid(end) (be32_to_cpu((end)->w0) & END2_W0_VALID)
116 #define xive2_end_is_enqueue(end) (be32_to_cpu((end)->w0) & END2_W0_ENQUEUE)
117 #define xive2_end_is_notify(end) \
118 (be32_to_cpu((end)->w0) & END2_W0_UCOND_NOTIFY)
119 #define xive2_end_is_backlog(end) (be32_to_cpu((end)->w0) & END2_W0_BACKLOG)
120 #define xive2_end_is_precluded_escalation(end) \
121 (be32_to_cpu((end)->w0) & END2_W0_PRECL_ESC_CTL)
122 #define xive2_end_is_escalate(end) \
123 (be32_to_cpu((end)->w0) & END2_W0_ESCALATE_CTL)
124 #define xive2_end_is_uncond_escalation(end) \
125 (be32_to_cpu((end)->w0) & END2_W0_UNCOND_ESCALATE)
126 #define xive2_end_is_silent_escalation(end) \
127 (be32_to_cpu((end)->w0) & END2_W0_SILENT_ESCALATE)
128 #define xive2_end_is_escalate_end(end) \
129 (be32_to_cpu((end)->w0) & END2_W0_ESCALATE_END)
130 #define xive2_end_is_firmware1(end) \
131 (be32_to_cpu((end)->w0) & END2_W0_FIRMWARE1)
132 #define xive2_end_is_firmware2(end) \
133 (be32_to_cpu((end)->w0) & END2_W0_FIRMWARE2)
134 #define xive2_end_is_ignore(end) \
135 (be32_to_cpu((end)->w6) & END2_W6_IGNORE)
136 #define xive2_end_is_crowd(end) \
137 (be32_to_cpu((end)->w6) & END2_W6_CROWD)
138
xive2_end_qaddr(Xive2End * end)139 static inline uint64_t xive2_end_qaddr(Xive2End *end)
140 {
141 return ((uint64_t) be32_to_cpu(end->w2) & END2_W2_EQ_ADDR_HI) << 32 |
142 (be32_to_cpu(end->w3) & END2_W3_EQ_ADDR_LO);
143 }
144
145 void xive2_end_pic_print_info(Xive2End *end, uint32_t end_idx, GString *buf);
146 void xive2_end_queue_pic_print_info(Xive2End *end, uint32_t width,
147 GString *buf);
148 void xive2_end_eas_pic_print_info(Xive2End *end, uint32_t end_idx,
149 GString *buf);
150
151 /*
152 * Notification Virtual Processor (NVP)
153 */
154 typedef struct Xive2Nvp {
155 uint32_t w0;
156 #define NVP2_W0_VALID PPC_BIT32(0)
157 #define NVP2_W0_HW PPC_BIT32(7)
158 #define NVP2_W0_L PPC_BIT32(8)
159 #define NVP2_W0_G PPC_BIT32(9)
160 #define NVP2_W0_T PPC_BIT32(10)
161 #define NVP2_W0_P PPC_BIT32(11)
162 #define NVP2_W0_ESC_END PPC_BIT32(25) /* 'N' bit 0:ESB 1:END */
163 #define NVP2_W0_PGOFIRST PPC_BITMASK32(26, 31)
164 uint32_t w1;
165 #define NVP2_W1_CO PPC_BIT32(13)
166 #define NVP2_W1_CO_PRIV PPC_BITMASK32(14, 15)
167 #define NVP2_W1_CO_THRID_VALID PPC_BIT32(16)
168 #define NVP2_W1_CO_THRID PPC_BITMASK32(17, 31)
169 uint32_t w2;
170 #define NVP2_W2_CPPR PPC_BITMASK32(0, 7)
171 #define NVP2_W2_IPB PPC_BITMASK32(8, 15)
172 #define NVP2_W2_LSMFB PPC_BITMASK32(16, 23)
173 #define NVP2_W2_T PPC_BIT32(27)
174 #define NVP2_W2_LGS PPC_BITMASK32(28, 31)
175 uint32_t w3;
176 uint32_t w4;
177 #define NVP2_W4_ESC_ESB_BLOCK PPC_BITMASK32(0, 3) /* N:0 */
178 #define NVP2_W4_ESC_ESB_INDEX PPC_BITMASK32(4, 31) /* N:0 */
179 #define NVP2_W4_ESC_END_BLOCK PPC_BITMASK32(4, 7) /* N:1 */
180 #define NVP2_W4_ESC_END_INDEX PPC_BITMASK32(8, 31) /* N:1 */
181 uint32_t w5;
182 #define NVP2_W5_PSIZE PPC_BITMASK32(0, 1)
183 #define NVP2_W5_VP_END_BLOCK PPC_BITMASK32(4, 7)
184 #define NVP2_W5_VP_END_INDEX PPC_BITMASK32(8, 31)
185 uint32_t w6;
186 #define NVP2_W6_REPORTING_LINE PPC_BITMASK32(4, 31)
187 uint32_t w7;
188 #define NVP2_W7_REPORTING_LINE PPC_BITMASK32(0, 23)
189 } Xive2Nvp;
190
191 #define xive2_nvp_is_valid(nvp) (be32_to_cpu((nvp)->w0) & NVP2_W0_VALID)
192 #define xive2_nvp_is_hw(nvp) (be32_to_cpu((nvp)->w0) & NVP2_W0_HW)
193 #define xive2_nvp_is_co(nvp) (be32_to_cpu((nvp)->w1) & NVP2_W1_CO)
194
195 /*
196 * The VP number space in a block is defined by the END2_W6_VP_OFFSET
197 * field of the XIVE END. When running in Gen1 mode (P9 compat mode),
198 * the VP space is reduced to (1 << 19) VPs per block
199 */
200 #define XIVE2_NVP_SHIFT 24
201 #define XIVE2_NVP_COUNT (1 << XIVE2_NVP_SHIFT)
202
xive2_nvp_cam_line(uint8_t nvp_blk,uint32_t nvp_idx)203 static inline uint32_t xive2_nvp_cam_line(uint8_t nvp_blk, uint32_t nvp_idx)
204 {
205 return (nvp_blk << XIVE2_NVP_SHIFT) | nvp_idx;
206 }
207
xive2_nvp_idx(uint32_t cam_line)208 static inline uint32_t xive2_nvp_idx(uint32_t cam_line)
209 {
210 return cam_line & ((1 << XIVE2_NVP_SHIFT) - 1);
211 }
212
xive2_nvp_blk(uint32_t cam_line)213 static inline uint8_t xive2_nvp_blk(uint32_t cam_line)
214 {
215 return (uint8_t)((cam_line >> XIVE2_NVP_SHIFT) & 0xf);
216 }
217
218 void xive2_nvp_pic_print_info(Xive2Nvp *nvp, uint32_t nvp_idx, GString *buf);
219
220 /*
221 * Notification Virtual Group or Crowd (NVG/NVC)
222 */
223 typedef struct Xive2Nvgc {
224 uint32_t w0;
225 #define NVGC2_W0_VALID PPC_BIT32(0)
226 #define NVGC2_W0_PGONEXT PPC_BITMASK32(26, 31)
227 uint32_t w1;
228 #define NVGC2_W1_PSIZE PPC_BITMASK32(0, 1)
229 #define NVGC2_W1_END_BLK PPC_BITMASK32(4, 7)
230 #define NVGC2_W1_END_IDX PPC_BITMASK32(8, 31)
231 uint32_t w2;
232 uint32_t w3;
233 uint32_t w4;
234 uint32_t w5;
235 uint32_t w6;
236 uint32_t w7;
237 } Xive2Nvgc;
238
239 #define xive2_nvgc_is_valid(nvgc) (be32_to_cpu((nvgc)->w0) & NVGC2_W0_VALID)
240
241 void xive2_nvgc_pic_print_info(Xive2Nvgc *nvgc, uint32_t nvgc_idx,
242 GString *buf);
243
244 #define NVx_BACKLOG_OP PPC_BITMASK(52, 53)
245 #define NVx_BACKLOG_PRIO PPC_BITMASK(57, 59)
246
247 /* split the 6-bit crowd/group level */
248 #define NVx_CROWD_LVL(level) ((level >> 4) & 0b11)
249 #define NVx_GROUP_LVL(level) (level & 0b1111)
250
251 #endif /* PPC_XIVE2_REGS_H */
252