Lines Matching +full:4 +full:- +full:31

4  * Copyright (c) 2019-2022, IBM Corporation.
7 * COPYING file in the top-level directory.
25 #define TM2_QW0W2_LOGIC_SERV PPC_BITMASK32(4, 31)
28 #define TM2_QW1W2_OS_CAM PPC_BITMASK32(4, 31)
31 #define TM2_QW2W2_POOL_CAM PPC_BITMASK32(4, 31)
44 #define EAS2_END_BLOCK PPC_BITMASK(4, 7) /* Destination EQ block# */
45 #define EAS2_END_INDEX PPC_BITMASK(8, 31) /* Destination EQ index */
50 #define xive2_eas_is_valid(eas) (be64_to_cpu((eas)->w) & EAS2_VALID)
51 #define xive2_eas_is_masked(eas) (be64_to_cpu((eas)->w) & EAS2_MASKED)
75 #define END2_W0_EQ_VG_PREDICT PPC_BITMASK32(24, 31) /* Owned by HW */
85 #define END2_W1_PAGE_OFF PPC_BITMASK32(10, 31)
87 #define END2_W2_RESERVED PPC_BITMASK32(4, 7)
88 #define END2_W2_EQ_ADDR_HI PPC_BITMASK32(8, 31)
91 #define END2_W3_QSIZE PPC_BITMASK32(28, 31)
93 #define END2_W4_END_BLOCK PPC_BITMASK32(4, 7)
94 #define END2_W4_ESC_END_INDEX PPC_BITMASK32(8, 31)
96 #define END2_W4_ESC_ESB_INDEX PPC_BITMASK32(4, 31)
98 #define END2_W5_ESC_END_DATA PPC_BITMASK32(1, 31)
103 #define END2_W6_VP_BLOCK PPC_BITMASK32(4, 7)
104 #define END2_W6_VP_OFFSET PPC_BITMASK32(8, 31)
105 #define END2_W6_VP_OFFSET_GEN1 PPC_BITMASK32(13, 31)
109 #define END2_W7_F1_LOG_SERVER_ID PPC_BITMASK32(4, 31)
112 #define xive2_end_is_valid(end) (be32_to_cpu((end)->w0) & END2_W0_VALID)
113 #define xive2_end_is_enqueue(end) (be32_to_cpu((end)->w0) & END2_W0_ENQUEUE)
115 (be32_to_cpu((end)->w0) & END2_W0_UCOND_NOTIFY)
116 #define xive2_end_is_backlog(end) (be32_to_cpu((end)->w0) & END2_W0_BACKLOG)
118 (be32_to_cpu((end)->w0) & END2_W0_PRECL_ESC_CTL)
120 (be32_to_cpu((end)->w0) & END2_W0_ESCALATE_CTL)
122 (be32_to_cpu((end)->w0) & END2_W0_UNCOND_ESCALATE)
124 (be32_to_cpu((end)->w0) & END2_W0_SILENT_ESCALATE)
126 (be32_to_cpu((end)->w0) & END2_W0_ESCALATE_END)
128 (be32_to_cpu((end)->w0) & END2_W0_FIRMWARE1)
130 (be32_to_cpu((end)->w0) & END2_W0_FIRMWARE2)
132 (be32_to_cpu((end)->w6) & END2_W6_IGNORE)
134 (be32_to_cpu((end)->w6) & END2_W6_CROWD)
138 return ((uint64_t) be32_to_cpu(end->w2) & END2_W2_EQ_ADDR_HI) << 32 | in xive2_end_qaddr()
139 (be32_to_cpu(end->w3) & END2_W3_EQ_ADDR_LO); in xive2_end_qaddr()
156 #define NVP2_W0_PGOFIRST PPC_BITMASK32(26, 31)
161 #define NVP2_W1_CO_THRID PPC_BITMASK32(17, 31)
169 #define NVP2_W4_ESC_ESB_INDEX PPC_BITMASK32(4, 31) /* N:0 */
170 #define NVP2_W4_ESC_END_BLOCK PPC_BITMASK32(4, 7) /* N:1 */
171 #define NVP2_W4_ESC_END_INDEX PPC_BITMASK32(8, 31) /* N:1 */
174 #define NVP2_W5_VP_END_BLOCK PPC_BITMASK32(4, 7)
175 #define NVP2_W5_VP_END_INDEX PPC_BITMASK32(8, 31)
177 #define NVP2_W6_REPORTING_LINE PPC_BITMASK32(4, 31)
182 #define xive2_nvp_is_valid(nvp) (be32_to_cpu((nvp)->w0) & NVP2_W0_VALID)
183 #define xive2_nvp_is_hw(nvp) (be32_to_cpu((nvp)->w0) & NVP2_W0_HW)
184 #define xive2_nvp_is_co(nvp) (be32_to_cpu((nvp)->w1) & NVP2_W1_CO)
201 return cam_line & ((1 << XIVE2_NVP_SHIFT) - 1); in xive2_nvp_idx()
217 #define NVGC2_W0_PGONEXT PPC_BITMASK32(26, 31)
227 #define xive2_nvgc_is_valid(nvgc) (be32_to_cpu((nvgc)->w0) & NVGC2_W0_VALID)