1 #ifndef BLOCK_NVME_H 2 #define BLOCK_NVME_H 3 4 typedef struct QEMU_PACKED NvmeBar { 5 uint64_t cap; 6 uint32_t vs; 7 uint32_t intms; 8 uint32_t intmc; 9 uint32_t cc; 10 uint8_t rsvd24[4]; 11 uint32_t csts; 12 uint32_t nssr; 13 uint32_t aqa; 14 uint64_t asq; 15 uint64_t acq; 16 uint32_t cmbloc; 17 uint32_t cmbsz; 18 uint32_t bpinfo; 19 uint32_t bprsel; 20 uint64_t bpmbl; 21 uint64_t cmbmsc; 22 uint32_t cmbsts; 23 uint8_t rsvd92[3492]; 24 uint32_t pmrcap; 25 uint32_t pmrctl; 26 uint32_t pmrsts; 27 uint32_t pmrebs; 28 uint32_t pmrswtp; 29 uint32_t pmrmscl; 30 uint32_t pmrmscu; 31 uint8_t css[484]; 32 } NvmeBar; 33 34 enum NvmeBarRegs { 35 NVME_REG_CAP = offsetof(NvmeBar, cap), 36 NVME_REG_VS = offsetof(NvmeBar, vs), 37 NVME_REG_INTMS = offsetof(NvmeBar, intms), 38 NVME_REG_INTMC = offsetof(NvmeBar, intmc), 39 NVME_REG_CC = offsetof(NvmeBar, cc), 40 NVME_REG_CSTS = offsetof(NvmeBar, csts), 41 NVME_REG_NSSR = offsetof(NvmeBar, nssr), 42 NVME_REG_AQA = offsetof(NvmeBar, aqa), 43 NVME_REG_ASQ = offsetof(NvmeBar, asq), 44 NVME_REG_ACQ = offsetof(NvmeBar, acq), 45 NVME_REG_CMBLOC = offsetof(NvmeBar, cmbloc), 46 NVME_REG_CMBSZ = offsetof(NvmeBar, cmbsz), 47 NVME_REG_BPINFO = offsetof(NvmeBar, bpinfo), 48 NVME_REG_BPRSEL = offsetof(NvmeBar, bprsel), 49 NVME_REG_BPMBL = offsetof(NvmeBar, bpmbl), 50 NVME_REG_CMBMSC = offsetof(NvmeBar, cmbmsc), 51 NVME_REG_CMBSTS = offsetof(NvmeBar, cmbsts), 52 NVME_REG_PMRCAP = offsetof(NvmeBar, pmrcap), 53 NVME_REG_PMRCTL = offsetof(NvmeBar, pmrctl), 54 NVME_REG_PMRSTS = offsetof(NvmeBar, pmrsts), 55 NVME_REG_PMREBS = offsetof(NvmeBar, pmrebs), 56 NVME_REG_PMRSWTP = offsetof(NvmeBar, pmrswtp), 57 NVME_REG_PMRMSCL = offsetof(NvmeBar, pmrmscl), 58 NVME_REG_PMRMSCU = offsetof(NvmeBar, pmrmscu), 59 }; 60 61 enum NvmeCapShift { 62 CAP_MQES_SHIFT = 0, 63 CAP_CQR_SHIFT = 16, 64 CAP_AMS_SHIFT = 17, 65 CAP_TO_SHIFT = 24, 66 CAP_DSTRD_SHIFT = 32, 67 CAP_NSSRS_SHIFT = 36, 68 CAP_CSS_SHIFT = 37, 69 CAP_MPSMIN_SHIFT = 48, 70 CAP_MPSMAX_SHIFT = 52, 71 CAP_PMRS_SHIFT = 56, 72 CAP_CMBS_SHIFT = 57, 73 }; 74 75 enum NvmeCapMask { 76 CAP_MQES_MASK = 0xffff, 77 CAP_CQR_MASK = 0x1, 78 CAP_AMS_MASK = 0x3, 79 CAP_TO_MASK = 0xff, 80 CAP_DSTRD_MASK = 0xf, 81 CAP_NSSRS_MASK = 0x1, 82 CAP_CSS_MASK = 0xff, 83 CAP_MPSMIN_MASK = 0xf, 84 CAP_MPSMAX_MASK = 0xf, 85 CAP_PMRS_MASK = 0x1, 86 CAP_CMBS_MASK = 0x1, 87 }; 88 89 #define NVME_CAP_MQES(cap) (((cap) >> CAP_MQES_SHIFT) & CAP_MQES_MASK) 90 #define NVME_CAP_CQR(cap) (((cap) >> CAP_CQR_SHIFT) & CAP_CQR_MASK) 91 #define NVME_CAP_AMS(cap) (((cap) >> CAP_AMS_SHIFT) & CAP_AMS_MASK) 92 #define NVME_CAP_TO(cap) (((cap) >> CAP_TO_SHIFT) & CAP_TO_MASK) 93 #define NVME_CAP_DSTRD(cap) (((cap) >> CAP_DSTRD_SHIFT) & CAP_DSTRD_MASK) 94 #define NVME_CAP_NSSRS(cap) (((cap) >> CAP_NSSRS_SHIFT) & CAP_NSSRS_MASK) 95 #define NVME_CAP_CSS(cap) (((cap) >> CAP_CSS_SHIFT) & CAP_CSS_MASK) 96 #define NVME_CAP_MPSMIN(cap)(((cap) >> CAP_MPSMIN_SHIFT) & CAP_MPSMIN_MASK) 97 #define NVME_CAP_MPSMAX(cap)(((cap) >> CAP_MPSMAX_SHIFT) & CAP_MPSMAX_MASK) 98 #define NVME_CAP_PMRS(cap) (((cap) >> CAP_PMRS_SHIFT) & CAP_PMRS_MASK) 99 #define NVME_CAP_CMBS(cap) (((cap) >> CAP_CMBS_SHIFT) & CAP_CMBS_MASK) 100 101 #define NVME_CAP_SET_MQES(cap, val) (cap |= (uint64_t)(val & CAP_MQES_MASK) \ 102 << CAP_MQES_SHIFT) 103 #define NVME_CAP_SET_CQR(cap, val) (cap |= (uint64_t)(val & CAP_CQR_MASK) \ 104 << CAP_CQR_SHIFT) 105 #define NVME_CAP_SET_AMS(cap, val) (cap |= (uint64_t)(val & CAP_AMS_MASK) \ 106 << CAP_AMS_SHIFT) 107 #define NVME_CAP_SET_TO(cap, val) (cap |= (uint64_t)(val & CAP_TO_MASK) \ 108 << CAP_TO_SHIFT) 109 #define NVME_CAP_SET_DSTRD(cap, val) (cap |= (uint64_t)(val & CAP_DSTRD_MASK) \ 110 << CAP_DSTRD_SHIFT) 111 #define NVME_CAP_SET_NSSRS(cap, val) (cap |= (uint64_t)(val & CAP_NSSRS_MASK) \ 112 << CAP_NSSRS_SHIFT) 113 #define NVME_CAP_SET_CSS(cap, val) (cap |= (uint64_t)(val & CAP_CSS_MASK) \ 114 << CAP_CSS_SHIFT) 115 #define NVME_CAP_SET_MPSMIN(cap, val) (cap |= (uint64_t)(val & CAP_MPSMIN_MASK)\ 116 << CAP_MPSMIN_SHIFT) 117 #define NVME_CAP_SET_MPSMAX(cap, val) (cap |= (uint64_t)(val & CAP_MPSMAX_MASK)\ 118 << CAP_MPSMAX_SHIFT) 119 #define NVME_CAP_SET_PMRS(cap, val) (cap |= (uint64_t)(val & CAP_PMRS_MASK) \ 120 << CAP_PMRS_SHIFT) 121 #define NVME_CAP_SET_CMBS(cap, val) (cap |= (uint64_t)(val & CAP_CMBS_MASK) \ 122 << CAP_CMBS_SHIFT) 123 124 enum NvmeCapCss { 125 NVME_CAP_CSS_NVM = 1 << 0, 126 NVME_CAP_CSS_CSI_SUPP = 1 << 6, 127 NVME_CAP_CSS_ADMIN_ONLY = 1 << 7, 128 }; 129 130 enum NvmeCcShift { 131 CC_EN_SHIFT = 0, 132 CC_CSS_SHIFT = 4, 133 CC_MPS_SHIFT = 7, 134 CC_AMS_SHIFT = 11, 135 CC_SHN_SHIFT = 14, 136 CC_IOSQES_SHIFT = 16, 137 CC_IOCQES_SHIFT = 20, 138 }; 139 140 enum NvmeCcMask { 141 CC_EN_MASK = 0x1, 142 CC_CSS_MASK = 0x7, 143 CC_MPS_MASK = 0xf, 144 CC_AMS_MASK = 0x7, 145 CC_SHN_MASK = 0x3, 146 CC_IOSQES_MASK = 0xf, 147 CC_IOCQES_MASK = 0xf, 148 }; 149 150 #define NVME_CC_EN(cc) ((cc >> CC_EN_SHIFT) & CC_EN_MASK) 151 #define NVME_CC_CSS(cc) ((cc >> CC_CSS_SHIFT) & CC_CSS_MASK) 152 #define NVME_CC_MPS(cc) ((cc >> CC_MPS_SHIFT) & CC_MPS_MASK) 153 #define NVME_CC_AMS(cc) ((cc >> CC_AMS_SHIFT) & CC_AMS_MASK) 154 #define NVME_CC_SHN(cc) ((cc >> CC_SHN_SHIFT) & CC_SHN_MASK) 155 #define NVME_CC_IOSQES(cc) ((cc >> CC_IOSQES_SHIFT) & CC_IOSQES_MASK) 156 #define NVME_CC_IOCQES(cc) ((cc >> CC_IOCQES_SHIFT) & CC_IOCQES_MASK) 157 158 enum NvmeCcCss { 159 NVME_CC_CSS_NVM = 0x0, 160 NVME_CC_CSS_CSI = 0x6, 161 NVME_CC_CSS_ADMIN_ONLY = 0x7, 162 }; 163 164 #define NVME_SET_CC_EN(cc, val) \ 165 (cc |= (uint32_t)((val) & CC_EN_MASK) << CC_EN_SHIFT) 166 #define NVME_SET_CC_CSS(cc, val) \ 167 (cc |= (uint32_t)((val) & CC_CSS_MASK) << CC_CSS_SHIFT) 168 #define NVME_SET_CC_MPS(cc, val) \ 169 (cc |= (uint32_t)((val) & CC_MPS_MASK) << CC_MPS_SHIFT) 170 #define NVME_SET_CC_AMS(cc, val) \ 171 (cc |= (uint32_t)((val) & CC_AMS_MASK) << CC_AMS_SHIFT) 172 #define NVME_SET_CC_SHN(cc, val) \ 173 (cc |= (uint32_t)((val) & CC_SHN_MASK) << CC_SHN_SHIFT) 174 #define NVME_SET_CC_IOSQES(cc, val) \ 175 (cc |= (uint32_t)((val) & CC_IOSQES_MASK) << CC_IOSQES_SHIFT) 176 #define NVME_SET_CC_IOCQES(cc, val) \ 177 (cc |= (uint32_t)((val) & CC_IOCQES_MASK) << CC_IOCQES_SHIFT) 178 179 enum NvmeCstsShift { 180 CSTS_RDY_SHIFT = 0, 181 CSTS_CFS_SHIFT = 1, 182 CSTS_SHST_SHIFT = 2, 183 CSTS_NSSRO_SHIFT = 4, 184 }; 185 186 enum NvmeCstsMask { 187 CSTS_RDY_MASK = 0x1, 188 CSTS_CFS_MASK = 0x1, 189 CSTS_SHST_MASK = 0x3, 190 CSTS_NSSRO_MASK = 0x1, 191 }; 192 193 enum NvmeCsts { 194 NVME_CSTS_READY = 1 << CSTS_RDY_SHIFT, 195 NVME_CSTS_FAILED = 1 << CSTS_CFS_SHIFT, 196 NVME_CSTS_SHST_NORMAL = 0 << CSTS_SHST_SHIFT, 197 NVME_CSTS_SHST_PROGRESS = 1 << CSTS_SHST_SHIFT, 198 NVME_CSTS_SHST_COMPLETE = 2 << CSTS_SHST_SHIFT, 199 NVME_CSTS_NSSRO = 1 << CSTS_NSSRO_SHIFT, 200 }; 201 202 #define NVME_CSTS_RDY(csts) ((csts >> CSTS_RDY_SHIFT) & CSTS_RDY_MASK) 203 #define NVME_CSTS_CFS(csts) ((csts >> CSTS_CFS_SHIFT) & CSTS_CFS_MASK) 204 #define NVME_CSTS_SHST(csts) ((csts >> CSTS_SHST_SHIFT) & CSTS_SHST_MASK) 205 #define NVME_CSTS_NSSRO(csts) ((csts >> CSTS_NSSRO_SHIFT) & CSTS_NSSRO_MASK) 206 207 enum NvmeAqaShift { 208 AQA_ASQS_SHIFT = 0, 209 AQA_ACQS_SHIFT = 16, 210 }; 211 212 enum NvmeAqaMask { 213 AQA_ASQS_MASK = 0xfff, 214 AQA_ACQS_MASK = 0xfff, 215 }; 216 217 #define NVME_AQA_ASQS(aqa) ((aqa >> AQA_ASQS_SHIFT) & AQA_ASQS_MASK) 218 #define NVME_AQA_ACQS(aqa) ((aqa >> AQA_ACQS_SHIFT) & AQA_ACQS_MASK) 219 220 enum NvmeCmblocShift { 221 CMBLOC_BIR_SHIFT = 0, 222 CMBLOC_CQMMS_SHIFT = 3, 223 CMBLOC_CQPDS_SHIFT = 4, 224 CMBLOC_CDPMLS_SHIFT = 5, 225 CMBLOC_CDPCILS_SHIFT = 6, 226 CMBLOC_CDMMMS_SHIFT = 7, 227 CMBLOC_CQDA_SHIFT = 8, 228 CMBLOC_OFST_SHIFT = 12, 229 }; 230 231 enum NvmeCmblocMask { 232 CMBLOC_BIR_MASK = 0x7, 233 CMBLOC_CQMMS_MASK = 0x1, 234 CMBLOC_CQPDS_MASK = 0x1, 235 CMBLOC_CDPMLS_MASK = 0x1, 236 CMBLOC_CDPCILS_MASK = 0x1, 237 CMBLOC_CDMMMS_MASK = 0x1, 238 CMBLOC_CQDA_MASK = 0x1, 239 CMBLOC_OFST_MASK = 0xfffff, 240 }; 241 242 #define NVME_CMBLOC_BIR(cmbloc) \ 243 ((cmbloc >> CMBLOC_BIR_SHIFT) & CMBLOC_BIR_MASK) 244 #define NVME_CMBLOC_CQMMS(cmbloc) \ 245 ((cmbloc >> CMBLOC_CQMMS_SHIFT) & CMBLOC_CQMMS_MASK) 246 #define NVME_CMBLOC_CQPDS(cmbloc) \ 247 ((cmbloc >> CMBLOC_CQPDS_SHIFT) & CMBLOC_CQPDS_MASK) 248 #define NVME_CMBLOC_CDPMLS(cmbloc) \ 249 ((cmbloc >> CMBLOC_CDPMLS_SHIFT) & CMBLOC_CDPMLS_MASK) 250 #define NVME_CMBLOC_CDPCILS(cmbloc) \ 251 ((cmbloc >> CMBLOC_CDPCILS_SHIFT) & CMBLOC_CDPCILS_MASK) 252 #define NVME_CMBLOC_CDMMMS(cmbloc) \ 253 ((cmbloc >> CMBLOC_CDMMMS_SHIFT) & CMBLOC_CDMMMS_MASK) 254 #define NVME_CMBLOC_CQDA(cmbloc) \ 255 ((cmbloc >> CMBLOC_CQDA_SHIFT) & CMBLOC_CQDA_MASK) 256 #define NVME_CMBLOC_OFST(cmbloc) \ 257 ((cmbloc >> CMBLOC_OFST_SHIFT) & CMBLOC_OFST_MASK) 258 259 #define NVME_CMBLOC_SET_BIR(cmbloc, val) \ 260 (cmbloc |= (uint64_t)(val & CMBLOC_BIR_MASK) << CMBLOC_BIR_SHIFT) 261 #define NVME_CMBLOC_SET_CQMMS(cmbloc, val) \ 262 (cmbloc |= (uint64_t)(val & CMBLOC_CQMMS_MASK) << CMBLOC_CQMMS_SHIFT) 263 #define NVME_CMBLOC_SET_CQPDS(cmbloc, val) \ 264 (cmbloc |= (uint64_t)(val & CMBLOC_CQPDS_MASK) << CMBLOC_CQPDS_SHIFT) 265 #define NVME_CMBLOC_SET_CDPMLS(cmbloc, val) \ 266 (cmbloc |= (uint64_t)(val & CMBLOC_CDPMLS_MASK) << CMBLOC_CDPMLS_SHIFT) 267 #define NVME_CMBLOC_SET_CDPCILS(cmbloc, val) \ 268 (cmbloc |= (uint64_t)(val & CMBLOC_CDPCILS_MASK) << CMBLOC_CDPCILS_SHIFT) 269 #define NVME_CMBLOC_SET_CDMMMS(cmbloc, val) \ 270 (cmbloc |= (uint64_t)(val & CMBLOC_CDMMMS_MASK) << CMBLOC_CDMMMS_SHIFT) 271 #define NVME_CMBLOC_SET_CQDA(cmbloc, val) \ 272 (cmbloc |= (uint64_t)(val & CMBLOC_CQDA_MASK) << CMBLOC_CQDA_SHIFT) 273 #define NVME_CMBLOC_SET_OFST(cmbloc, val) \ 274 (cmbloc |= (uint64_t)(val & CMBLOC_OFST_MASK) << CMBLOC_OFST_SHIFT) 275 276 #define NVME_CMBMSMC_SET_CRE (cmbmsc, val) \ 277 (cmbmsc |= (uint64_t)(val & CMBLOC_OFST_MASK) << CMBMSC_CRE_SHIFT) 278 279 enum NvmeCmbszShift { 280 CMBSZ_SQS_SHIFT = 0, 281 CMBSZ_CQS_SHIFT = 1, 282 CMBSZ_LISTS_SHIFT = 2, 283 CMBSZ_RDS_SHIFT = 3, 284 CMBSZ_WDS_SHIFT = 4, 285 CMBSZ_SZU_SHIFT = 8, 286 CMBSZ_SZ_SHIFT = 12, 287 }; 288 289 enum NvmeCmbszMask { 290 CMBSZ_SQS_MASK = 0x1, 291 CMBSZ_CQS_MASK = 0x1, 292 CMBSZ_LISTS_MASK = 0x1, 293 CMBSZ_RDS_MASK = 0x1, 294 CMBSZ_WDS_MASK = 0x1, 295 CMBSZ_SZU_MASK = 0xf, 296 CMBSZ_SZ_MASK = 0xfffff, 297 }; 298 299 #define NVME_CMBSZ_SQS(cmbsz) ((cmbsz >> CMBSZ_SQS_SHIFT) & CMBSZ_SQS_MASK) 300 #define NVME_CMBSZ_CQS(cmbsz) ((cmbsz >> CMBSZ_CQS_SHIFT) & CMBSZ_CQS_MASK) 301 #define NVME_CMBSZ_LISTS(cmbsz)((cmbsz >> CMBSZ_LISTS_SHIFT) & CMBSZ_LISTS_MASK) 302 #define NVME_CMBSZ_RDS(cmbsz) ((cmbsz >> CMBSZ_RDS_SHIFT) & CMBSZ_RDS_MASK) 303 #define NVME_CMBSZ_WDS(cmbsz) ((cmbsz >> CMBSZ_WDS_SHIFT) & CMBSZ_WDS_MASK) 304 #define NVME_CMBSZ_SZU(cmbsz) ((cmbsz >> CMBSZ_SZU_SHIFT) & CMBSZ_SZU_MASK) 305 #define NVME_CMBSZ_SZ(cmbsz) ((cmbsz >> CMBSZ_SZ_SHIFT) & CMBSZ_SZ_MASK) 306 307 #define NVME_CMBSZ_SET_SQS(cmbsz, val) \ 308 (cmbsz |= (uint64_t)(val & CMBSZ_SQS_MASK) << CMBSZ_SQS_SHIFT) 309 #define NVME_CMBSZ_SET_CQS(cmbsz, val) \ 310 (cmbsz |= (uint64_t)(val & CMBSZ_CQS_MASK) << CMBSZ_CQS_SHIFT) 311 #define NVME_CMBSZ_SET_LISTS(cmbsz, val) \ 312 (cmbsz |= (uint64_t)(val & CMBSZ_LISTS_MASK) << CMBSZ_LISTS_SHIFT) 313 #define NVME_CMBSZ_SET_RDS(cmbsz, val) \ 314 (cmbsz |= (uint64_t)(val & CMBSZ_RDS_MASK) << CMBSZ_RDS_SHIFT) 315 #define NVME_CMBSZ_SET_WDS(cmbsz, val) \ 316 (cmbsz |= (uint64_t)(val & CMBSZ_WDS_MASK) << CMBSZ_WDS_SHIFT) 317 #define NVME_CMBSZ_SET_SZU(cmbsz, val) \ 318 (cmbsz |= (uint64_t)(val & CMBSZ_SZU_MASK) << CMBSZ_SZU_SHIFT) 319 #define NVME_CMBSZ_SET_SZ(cmbsz, val) \ 320 (cmbsz |= (uint64_t)(val & CMBSZ_SZ_MASK) << CMBSZ_SZ_SHIFT) 321 322 #define NVME_CMBSZ_GETSIZE(cmbsz) \ 323 (NVME_CMBSZ_SZ(cmbsz) * (1 << (12 + 4 * NVME_CMBSZ_SZU(cmbsz)))) 324 325 enum NvmeCmbmscShift { 326 CMBMSC_CRE_SHIFT = 0, 327 CMBMSC_CMSE_SHIFT = 1, 328 CMBMSC_CBA_SHIFT = 12, 329 }; 330 331 enum NvmeCmbmscMask { 332 CMBMSC_CRE_MASK = 0x1, 333 CMBMSC_CMSE_MASK = 0x1, 334 CMBMSC_CBA_MASK = ((1ULL << 52) - 1), 335 }; 336 337 #define NVME_CMBMSC_CRE(cmbmsc) \ 338 ((cmbmsc >> CMBMSC_CRE_SHIFT) & CMBMSC_CRE_MASK) 339 #define NVME_CMBMSC_CMSE(cmbmsc) \ 340 ((cmbmsc >> CMBMSC_CMSE_SHIFT) & CMBMSC_CMSE_MASK) 341 #define NVME_CMBMSC_CBA(cmbmsc) \ 342 ((cmbmsc >> CMBMSC_CBA_SHIFT) & CMBMSC_CBA_MASK) 343 344 345 #define NVME_CMBMSC_SET_CRE(cmbmsc, val) \ 346 (cmbmsc |= (uint64_t)(val & CMBMSC_CRE_MASK) << CMBMSC_CRE_SHIFT) 347 #define NVME_CMBMSC_SET_CMSE(cmbmsc, val) \ 348 (cmbmsc |= (uint64_t)(val & CMBMSC_CMSE_MASK) << CMBMSC_CMSE_SHIFT) 349 #define NVME_CMBMSC_SET_CBA(cmbmsc, val) \ 350 (cmbmsc |= (uint64_t)(val & CMBMSC_CBA_MASK) << CMBMSC_CBA_SHIFT) 351 352 enum NvmeCmbstsShift { 353 CMBSTS_CBAI_SHIFT = 0, 354 }; 355 enum NvmeCmbstsMask { 356 CMBSTS_CBAI_MASK = 0x1, 357 }; 358 359 #define NVME_CMBSTS_CBAI(cmbsts) \ 360 ((cmbsts >> CMBSTS_CBAI_SHIFT) & CMBSTS_CBAI_MASK) 361 362 #define NVME_CMBSTS_SET_CBAI(cmbsts, val) \ 363 (cmbsts |= (uint64_t)(val & CMBSTS_CBAI_MASK) << CMBSTS_CBAI_SHIFT) 364 365 enum NvmePmrcapShift { 366 PMRCAP_RDS_SHIFT = 3, 367 PMRCAP_WDS_SHIFT = 4, 368 PMRCAP_BIR_SHIFT = 5, 369 PMRCAP_PMRTU_SHIFT = 8, 370 PMRCAP_PMRWBM_SHIFT = 10, 371 PMRCAP_PMRTO_SHIFT = 16, 372 PMRCAP_CMSS_SHIFT = 24, 373 }; 374 375 enum NvmePmrcapMask { 376 PMRCAP_RDS_MASK = 0x1, 377 PMRCAP_WDS_MASK = 0x1, 378 PMRCAP_BIR_MASK = 0x7, 379 PMRCAP_PMRTU_MASK = 0x3, 380 PMRCAP_PMRWBM_MASK = 0xf, 381 PMRCAP_PMRTO_MASK = 0xff, 382 PMRCAP_CMSS_MASK = 0x1, 383 }; 384 385 #define NVME_PMRCAP_RDS(pmrcap) \ 386 ((pmrcap >> PMRCAP_RDS_SHIFT) & PMRCAP_RDS_MASK) 387 #define NVME_PMRCAP_WDS(pmrcap) \ 388 ((pmrcap >> PMRCAP_WDS_SHIFT) & PMRCAP_WDS_MASK) 389 #define NVME_PMRCAP_BIR(pmrcap) \ 390 ((pmrcap >> PMRCAP_BIR_SHIFT) & PMRCAP_BIR_MASK) 391 #define NVME_PMRCAP_PMRTU(pmrcap) \ 392 ((pmrcap >> PMRCAP_PMRTU_SHIFT) & PMRCAP_PMRTU_MASK) 393 #define NVME_PMRCAP_PMRWBM(pmrcap) \ 394 ((pmrcap >> PMRCAP_PMRWBM_SHIFT) & PMRCAP_PMRWBM_MASK) 395 #define NVME_PMRCAP_PMRTO(pmrcap) \ 396 ((pmrcap >> PMRCAP_PMRTO_SHIFT) & PMRCAP_PMRTO_MASK) 397 #define NVME_PMRCAP_CMSS(pmrcap) \ 398 ((pmrcap >> PMRCAP_CMSS_SHIFT) & PMRCAP_CMSS_MASK) 399 400 #define NVME_PMRCAP_SET_RDS(pmrcap, val) \ 401 (pmrcap |= (uint64_t)(val & PMRCAP_RDS_MASK) << PMRCAP_RDS_SHIFT) 402 #define NVME_PMRCAP_SET_WDS(pmrcap, val) \ 403 (pmrcap |= (uint64_t)(val & PMRCAP_WDS_MASK) << PMRCAP_WDS_SHIFT) 404 #define NVME_PMRCAP_SET_BIR(pmrcap, val) \ 405 (pmrcap |= (uint64_t)(val & PMRCAP_BIR_MASK) << PMRCAP_BIR_SHIFT) 406 #define NVME_PMRCAP_SET_PMRTU(pmrcap, val) \ 407 (pmrcap |= (uint64_t)(val & PMRCAP_PMRTU_MASK) << PMRCAP_PMRTU_SHIFT) 408 #define NVME_PMRCAP_SET_PMRWBM(pmrcap, val) \ 409 (pmrcap |= (uint64_t)(val & PMRCAP_PMRWBM_MASK) << PMRCAP_PMRWBM_SHIFT) 410 #define NVME_PMRCAP_SET_PMRTO(pmrcap, val) \ 411 (pmrcap |= (uint64_t)(val & PMRCAP_PMRTO_MASK) << PMRCAP_PMRTO_SHIFT) 412 #define NVME_PMRCAP_SET_CMSS(pmrcap, val) \ 413 (pmrcap |= (uint64_t)(val & PMRCAP_CMSS_MASK) << PMRCAP_CMSS_SHIFT) 414 415 enum NvmePmrctlShift { 416 PMRCTL_EN_SHIFT = 0, 417 }; 418 419 enum NvmePmrctlMask { 420 PMRCTL_EN_MASK = 0x1, 421 }; 422 423 #define NVME_PMRCTL_EN(pmrctl) ((pmrctl >> PMRCTL_EN_SHIFT) & PMRCTL_EN_MASK) 424 425 #define NVME_PMRCTL_SET_EN(pmrctl, val) \ 426 (pmrctl |= (uint64_t)(val & PMRCTL_EN_MASK) << PMRCTL_EN_SHIFT) 427 428 enum NvmePmrstsShift { 429 PMRSTS_ERR_SHIFT = 0, 430 PMRSTS_NRDY_SHIFT = 8, 431 PMRSTS_HSTS_SHIFT = 9, 432 PMRSTS_CBAI_SHIFT = 12, 433 }; 434 435 enum NvmePmrstsMask { 436 PMRSTS_ERR_MASK = 0xff, 437 PMRSTS_NRDY_MASK = 0x1, 438 PMRSTS_HSTS_MASK = 0x7, 439 PMRSTS_CBAI_MASK = 0x1, 440 }; 441 442 #define NVME_PMRSTS_ERR(pmrsts) \ 443 ((pmrsts >> PMRSTS_ERR_SHIFT) & PMRSTS_ERR_MASK) 444 #define NVME_PMRSTS_NRDY(pmrsts) \ 445 ((pmrsts >> PMRSTS_NRDY_SHIFT) & PMRSTS_NRDY_MASK) 446 #define NVME_PMRSTS_HSTS(pmrsts) \ 447 ((pmrsts >> PMRSTS_HSTS_SHIFT) & PMRSTS_HSTS_MASK) 448 #define NVME_PMRSTS_CBAI(pmrsts) \ 449 ((pmrsts >> PMRSTS_CBAI_SHIFT) & PMRSTS_CBAI_MASK) 450 451 #define NVME_PMRSTS_SET_ERR(pmrsts, val) \ 452 (pmrsts |= (uint64_t)(val & PMRSTS_ERR_MASK) << PMRSTS_ERR_SHIFT) 453 #define NVME_PMRSTS_SET_NRDY(pmrsts, val) \ 454 (pmrsts |= (uint64_t)(val & PMRSTS_NRDY_MASK) << PMRSTS_NRDY_SHIFT) 455 #define NVME_PMRSTS_SET_HSTS(pmrsts, val) \ 456 (pmrsts |= (uint64_t)(val & PMRSTS_HSTS_MASK) << PMRSTS_HSTS_SHIFT) 457 #define NVME_PMRSTS_SET_CBAI(pmrsts, val) \ 458 (pmrsts |= (uint64_t)(val & PMRSTS_CBAI_MASK) << PMRSTS_CBAI_SHIFT) 459 460 enum NvmePmrebsShift { 461 PMREBS_PMRSZU_SHIFT = 0, 462 PMREBS_RBB_SHIFT = 4, 463 PMREBS_PMRWBZ_SHIFT = 8, 464 }; 465 466 enum NvmePmrebsMask { 467 PMREBS_PMRSZU_MASK = 0xf, 468 PMREBS_RBB_MASK = 0x1, 469 PMREBS_PMRWBZ_MASK = 0xffffff, 470 }; 471 472 #define NVME_PMREBS_PMRSZU(pmrebs) \ 473 ((pmrebs >> PMREBS_PMRSZU_SHIFT) & PMREBS_PMRSZU_MASK) 474 #define NVME_PMREBS_RBB(pmrebs) \ 475 ((pmrebs >> PMREBS_RBB_SHIFT) & PMREBS_RBB_MASK) 476 #define NVME_PMREBS_PMRWBZ(pmrebs) \ 477 ((pmrebs >> PMREBS_PMRWBZ_SHIFT) & PMREBS_PMRWBZ_MASK) 478 479 #define NVME_PMREBS_SET_PMRSZU(pmrebs, val) \ 480 (pmrebs |= (uint64_t)(val & PMREBS_PMRSZU_MASK) << PMREBS_PMRSZU_SHIFT) 481 #define NVME_PMREBS_SET_RBB(pmrebs, val) \ 482 (pmrebs |= (uint64_t)(val & PMREBS_RBB_MASK) << PMREBS_RBB_SHIFT) 483 #define NVME_PMREBS_SET_PMRWBZ(pmrebs, val) \ 484 (pmrebs |= (uint64_t)(val & PMREBS_PMRWBZ_MASK) << PMREBS_PMRWBZ_SHIFT) 485 486 enum NvmePmrswtpShift { 487 PMRSWTP_PMRSWTU_SHIFT = 0, 488 PMRSWTP_PMRSWTV_SHIFT = 8, 489 }; 490 491 enum NvmePmrswtpMask { 492 PMRSWTP_PMRSWTU_MASK = 0xf, 493 PMRSWTP_PMRSWTV_MASK = 0xffffff, 494 }; 495 496 #define NVME_PMRSWTP_PMRSWTU(pmrswtp) \ 497 ((pmrswtp >> PMRSWTP_PMRSWTU_SHIFT) & PMRSWTP_PMRSWTU_MASK) 498 #define NVME_PMRSWTP_PMRSWTV(pmrswtp) \ 499 ((pmrswtp >> PMRSWTP_PMRSWTV_SHIFT) & PMRSWTP_PMRSWTV_MASK) 500 501 #define NVME_PMRSWTP_SET_PMRSWTU(pmrswtp, val) \ 502 (pmrswtp |= (uint64_t)(val & PMRSWTP_PMRSWTU_MASK) << PMRSWTP_PMRSWTU_SHIFT) 503 #define NVME_PMRSWTP_SET_PMRSWTV(pmrswtp, val) \ 504 (pmrswtp |= (uint64_t)(val & PMRSWTP_PMRSWTV_MASK) << PMRSWTP_PMRSWTV_SHIFT) 505 506 enum NvmePmrmsclShift { 507 PMRMSCL_CMSE_SHIFT = 1, 508 PMRMSCL_CBA_SHIFT = 12, 509 }; 510 511 enum NvmePmrmsclMask { 512 PMRMSCL_CMSE_MASK = 0x1, 513 PMRMSCL_CBA_MASK = 0xfffff, 514 }; 515 516 #define NVME_PMRMSCL_CMSE(pmrmscl) \ 517 ((pmrmscl >> PMRMSCL_CMSE_SHIFT) & PMRMSCL_CMSE_MASK) 518 #define NVME_PMRMSCL_CBA(pmrmscl) \ 519 ((pmrmscl >> PMRMSCL_CBA_SHIFT) & PMRMSCL_CBA_MASK) 520 521 #define NVME_PMRMSCL_SET_CMSE(pmrmscl, val) \ 522 (pmrmscl |= (uint32_t)(val & PMRMSCL_CMSE_MASK) << PMRMSCL_CMSE_SHIFT) 523 #define NVME_PMRMSCL_SET_CBA(pmrmscl, val) \ 524 (pmrmscl |= (uint32_t)(val & PMRMSCL_CBA_MASK) << PMRMSCL_CBA_SHIFT) 525 526 enum NvmeSglDescriptorType { 527 NVME_SGL_DESCR_TYPE_DATA_BLOCK = 0x0, 528 NVME_SGL_DESCR_TYPE_BIT_BUCKET = 0x1, 529 NVME_SGL_DESCR_TYPE_SEGMENT = 0x2, 530 NVME_SGL_DESCR_TYPE_LAST_SEGMENT = 0x3, 531 NVME_SGL_DESCR_TYPE_KEYED_DATA_BLOCK = 0x4, 532 533 NVME_SGL_DESCR_TYPE_VENDOR_SPECIFIC = 0xf, 534 }; 535 536 enum NvmeSglDescriptorSubtype { 537 NVME_SGL_DESCR_SUBTYPE_ADDRESS = 0x0, 538 }; 539 540 typedef struct QEMU_PACKED NvmeSglDescriptor { 541 uint64_t addr; 542 uint32_t len; 543 uint8_t rsvd[3]; 544 uint8_t type; 545 } NvmeSglDescriptor; 546 547 #define NVME_SGL_TYPE(type) ((type >> 4) & 0xf) 548 #define NVME_SGL_SUBTYPE(type) (type & 0xf) 549 550 typedef union NvmeCmdDptr { 551 struct { 552 uint64_t prp1; 553 uint64_t prp2; 554 }; 555 556 NvmeSglDescriptor sgl; 557 } NvmeCmdDptr; 558 559 enum NvmePsdt { 560 NVME_PSDT_PRP = 0x0, 561 NVME_PSDT_SGL_MPTR_CONTIGUOUS = 0x1, 562 NVME_PSDT_SGL_MPTR_SGL = 0x2, 563 }; 564 565 typedef struct QEMU_PACKED NvmeCmd { 566 uint8_t opcode; 567 uint8_t flags; 568 uint16_t cid; 569 uint32_t nsid; 570 uint64_t res1; 571 uint64_t mptr; 572 NvmeCmdDptr dptr; 573 uint32_t cdw10; 574 uint32_t cdw11; 575 uint32_t cdw12; 576 uint32_t cdw13; 577 uint32_t cdw14; 578 uint32_t cdw15; 579 } NvmeCmd; 580 581 #define NVME_CMD_FLAGS_FUSE(flags) (flags & 0x3) 582 #define NVME_CMD_FLAGS_PSDT(flags) ((flags >> 6) & 0x3) 583 584 enum NvmeAdminCommands { 585 NVME_ADM_CMD_DELETE_SQ = 0x00, 586 NVME_ADM_CMD_CREATE_SQ = 0x01, 587 NVME_ADM_CMD_GET_LOG_PAGE = 0x02, 588 NVME_ADM_CMD_DELETE_CQ = 0x04, 589 NVME_ADM_CMD_CREATE_CQ = 0x05, 590 NVME_ADM_CMD_IDENTIFY = 0x06, 591 NVME_ADM_CMD_ABORT = 0x08, 592 NVME_ADM_CMD_SET_FEATURES = 0x09, 593 NVME_ADM_CMD_GET_FEATURES = 0x0a, 594 NVME_ADM_CMD_ASYNC_EV_REQ = 0x0c, 595 NVME_ADM_CMD_ACTIVATE_FW = 0x10, 596 NVME_ADM_CMD_DOWNLOAD_FW = 0x11, 597 NVME_ADM_CMD_NS_ATTACHMENT = 0x15, 598 NVME_ADM_CMD_FORMAT_NVM = 0x80, 599 NVME_ADM_CMD_SECURITY_SEND = 0x81, 600 NVME_ADM_CMD_SECURITY_RECV = 0x82, 601 }; 602 603 enum NvmeIoCommands { 604 NVME_CMD_FLUSH = 0x00, 605 NVME_CMD_WRITE = 0x01, 606 NVME_CMD_READ = 0x02, 607 NVME_CMD_WRITE_UNCOR = 0x04, 608 NVME_CMD_COMPARE = 0x05, 609 NVME_CMD_WRITE_ZEROES = 0x08, 610 NVME_CMD_DSM = 0x09, 611 NVME_CMD_VERIFY = 0x0c, 612 NVME_CMD_COPY = 0x19, 613 NVME_CMD_ZONE_MGMT_SEND = 0x79, 614 NVME_CMD_ZONE_MGMT_RECV = 0x7a, 615 NVME_CMD_ZONE_APPEND = 0x7d, 616 }; 617 618 typedef struct QEMU_PACKED NvmeDeleteQ { 619 uint8_t opcode; 620 uint8_t flags; 621 uint16_t cid; 622 uint32_t rsvd1[9]; 623 uint16_t qid; 624 uint16_t rsvd10; 625 uint32_t rsvd11[5]; 626 } NvmeDeleteQ; 627 628 typedef struct QEMU_PACKED NvmeCreateCq { 629 uint8_t opcode; 630 uint8_t flags; 631 uint16_t cid; 632 uint32_t rsvd1[5]; 633 uint64_t prp1; 634 uint64_t rsvd8; 635 uint16_t cqid; 636 uint16_t qsize; 637 uint16_t cq_flags; 638 uint16_t irq_vector; 639 uint32_t rsvd12[4]; 640 } NvmeCreateCq; 641 642 #define NVME_CQ_FLAGS_PC(cq_flags) (cq_flags & 0x1) 643 #define NVME_CQ_FLAGS_IEN(cq_flags) ((cq_flags >> 1) & 0x1) 644 645 enum NvmeFlagsCq { 646 NVME_CQ_PC = 1, 647 NVME_CQ_IEN = 2, 648 }; 649 650 typedef struct QEMU_PACKED NvmeCreateSq { 651 uint8_t opcode; 652 uint8_t flags; 653 uint16_t cid; 654 uint32_t rsvd1[5]; 655 uint64_t prp1; 656 uint64_t rsvd8; 657 uint16_t sqid; 658 uint16_t qsize; 659 uint16_t sq_flags; 660 uint16_t cqid; 661 uint32_t rsvd12[4]; 662 } NvmeCreateSq; 663 664 #define NVME_SQ_FLAGS_PC(sq_flags) (sq_flags & 0x1) 665 #define NVME_SQ_FLAGS_QPRIO(sq_flags) ((sq_flags >> 1) & 0x3) 666 667 enum NvmeFlagsSq { 668 NVME_SQ_PC = 1, 669 670 NVME_SQ_PRIO_URGENT = 0, 671 NVME_SQ_PRIO_HIGH = 1, 672 NVME_SQ_PRIO_NORMAL = 2, 673 NVME_SQ_PRIO_LOW = 3, 674 }; 675 676 typedef struct QEMU_PACKED NvmeIdentify { 677 uint8_t opcode; 678 uint8_t flags; 679 uint16_t cid; 680 uint32_t nsid; 681 uint64_t rsvd2[2]; 682 uint64_t prp1; 683 uint64_t prp2; 684 uint8_t cns; 685 uint8_t rsvd10; 686 uint16_t ctrlid; 687 uint16_t nvmsetid; 688 uint8_t rsvd11; 689 uint8_t csi; 690 uint32_t rsvd12[4]; 691 } NvmeIdentify; 692 693 typedef struct QEMU_PACKED NvmeRwCmd { 694 uint8_t opcode; 695 uint8_t flags; 696 uint16_t cid; 697 uint32_t nsid; 698 uint64_t rsvd2; 699 uint64_t mptr; 700 NvmeCmdDptr dptr; 701 uint64_t slba; 702 uint16_t nlb; 703 uint16_t control; 704 uint32_t dsmgmt; 705 uint32_t reftag; 706 uint16_t apptag; 707 uint16_t appmask; 708 } NvmeRwCmd; 709 710 enum { 711 NVME_RW_LR = 1 << 15, 712 NVME_RW_FUA = 1 << 14, 713 NVME_RW_DSM_FREQ_UNSPEC = 0, 714 NVME_RW_DSM_FREQ_TYPICAL = 1, 715 NVME_RW_DSM_FREQ_RARE = 2, 716 NVME_RW_DSM_FREQ_READS = 3, 717 NVME_RW_DSM_FREQ_WRITES = 4, 718 NVME_RW_DSM_FREQ_RW = 5, 719 NVME_RW_DSM_FREQ_ONCE = 6, 720 NVME_RW_DSM_FREQ_PREFETCH = 7, 721 NVME_RW_DSM_FREQ_TEMP = 8, 722 NVME_RW_DSM_LATENCY_NONE = 0 << 4, 723 NVME_RW_DSM_LATENCY_IDLE = 1 << 4, 724 NVME_RW_DSM_LATENCY_NORM = 2 << 4, 725 NVME_RW_DSM_LATENCY_LOW = 3 << 4, 726 NVME_RW_DSM_SEQ_REQ = 1 << 6, 727 NVME_RW_DSM_COMPRESSED = 1 << 7, 728 NVME_RW_PIREMAP = 1 << 9, 729 NVME_RW_PRINFO_PRACT = 1 << 13, 730 NVME_RW_PRINFO_PRCHK_GUARD = 1 << 12, 731 NVME_RW_PRINFO_PRCHK_APP = 1 << 11, 732 NVME_RW_PRINFO_PRCHK_REF = 1 << 10, 733 NVME_RW_PRINFO_PRCHK_MASK = 7 << 10, 734 735 }; 736 737 #define NVME_RW_PRINFO(control) ((control >> 10) & 0xf) 738 739 enum { 740 NVME_PRINFO_PRACT = 1 << 3, 741 NVME_PRINFO_PRCHK_GUARD = 1 << 2, 742 NVME_PRINFO_PRCHK_APP = 1 << 1, 743 NVME_PRINFO_PRCHK_REF = 1 << 0, 744 NVME_PRINFO_PRCHK_MASK = 7 << 0, 745 }; 746 747 typedef struct QEMU_PACKED NvmeDsmCmd { 748 uint8_t opcode; 749 uint8_t flags; 750 uint16_t cid; 751 uint32_t nsid; 752 uint64_t rsvd2[2]; 753 NvmeCmdDptr dptr; 754 uint32_t nr; 755 uint32_t attributes; 756 uint32_t rsvd12[4]; 757 } NvmeDsmCmd; 758 759 enum { 760 NVME_DSMGMT_IDR = 1 << 0, 761 NVME_DSMGMT_IDW = 1 << 1, 762 NVME_DSMGMT_AD = 1 << 2, 763 }; 764 765 typedef struct QEMU_PACKED NvmeDsmRange { 766 uint32_t cattr; 767 uint32_t nlb; 768 uint64_t slba; 769 } NvmeDsmRange; 770 771 enum { 772 NVME_COPY_FORMAT_0 = 0x0, 773 }; 774 775 typedef struct QEMU_PACKED NvmeCopyCmd { 776 uint8_t opcode; 777 uint8_t flags; 778 uint16_t cid; 779 uint32_t nsid; 780 uint32_t rsvd2[4]; 781 NvmeCmdDptr dptr; 782 uint64_t sdlba; 783 uint8_t nr; 784 uint8_t control[3]; 785 uint16_t rsvd13; 786 uint16_t dspec; 787 uint32_t reftag; 788 uint16_t apptag; 789 uint16_t appmask; 790 } NvmeCopyCmd; 791 792 typedef struct QEMU_PACKED NvmeCopySourceRange { 793 uint8_t rsvd0[8]; 794 uint64_t slba; 795 uint16_t nlb; 796 uint8_t rsvd18[6]; 797 uint32_t reftag; 798 uint16_t apptag; 799 uint16_t appmask; 800 } NvmeCopySourceRange; 801 802 enum NvmeAsyncEventRequest { 803 NVME_AER_TYPE_ERROR = 0, 804 NVME_AER_TYPE_SMART = 1, 805 NVME_AER_TYPE_NOTICE = 2, 806 NVME_AER_TYPE_IO_SPECIFIC = 6, 807 NVME_AER_TYPE_VENDOR_SPECIFIC = 7, 808 NVME_AER_INFO_ERR_INVALID_DB_REGISTER = 0, 809 NVME_AER_INFO_ERR_INVALID_DB_VALUE = 1, 810 NVME_AER_INFO_ERR_DIAG_FAIL = 2, 811 NVME_AER_INFO_ERR_PERS_INTERNAL_ERR = 3, 812 NVME_AER_INFO_ERR_TRANS_INTERNAL_ERR = 4, 813 NVME_AER_INFO_ERR_FW_IMG_LOAD_ERR = 5, 814 NVME_AER_INFO_SMART_RELIABILITY = 0, 815 NVME_AER_INFO_SMART_TEMP_THRESH = 1, 816 NVME_AER_INFO_SMART_SPARE_THRESH = 2, 817 NVME_AER_INFO_NOTICE_NS_ATTR_CHANGED = 0, 818 }; 819 820 typedef struct QEMU_PACKED NvmeAerResult { 821 uint8_t event_type; 822 uint8_t event_info; 823 uint8_t log_page; 824 uint8_t resv; 825 } NvmeAerResult; 826 827 typedef struct QEMU_PACKED NvmeZonedResult { 828 uint64_t slba; 829 } NvmeZonedResult; 830 831 typedef struct QEMU_PACKED NvmeCqe { 832 uint32_t result; 833 uint32_t dw1; 834 uint16_t sq_head; 835 uint16_t sq_id; 836 uint16_t cid; 837 uint16_t status; 838 } NvmeCqe; 839 840 enum NvmeStatusCodes { 841 NVME_SUCCESS = 0x0000, 842 NVME_INVALID_OPCODE = 0x0001, 843 NVME_INVALID_FIELD = 0x0002, 844 NVME_CID_CONFLICT = 0x0003, 845 NVME_DATA_TRAS_ERROR = 0x0004, 846 NVME_POWER_LOSS_ABORT = 0x0005, 847 NVME_INTERNAL_DEV_ERROR = 0x0006, 848 NVME_CMD_ABORT_REQ = 0x0007, 849 NVME_CMD_ABORT_SQ_DEL = 0x0008, 850 NVME_CMD_ABORT_FAILED_FUSE = 0x0009, 851 NVME_CMD_ABORT_MISSING_FUSE = 0x000a, 852 NVME_INVALID_NSID = 0x000b, 853 NVME_CMD_SEQ_ERROR = 0x000c, 854 NVME_INVALID_SGL_SEG_DESCR = 0x000d, 855 NVME_INVALID_NUM_SGL_DESCRS = 0x000e, 856 NVME_DATA_SGL_LEN_INVALID = 0x000f, 857 NVME_MD_SGL_LEN_INVALID = 0x0010, 858 NVME_SGL_DESCR_TYPE_INVALID = 0x0011, 859 NVME_INVALID_USE_OF_CMB = 0x0012, 860 NVME_INVALID_PRP_OFFSET = 0x0013, 861 NVME_CMD_SET_CMB_REJECTED = 0x002b, 862 NVME_INVALID_CMD_SET = 0x002c, 863 NVME_LBA_RANGE = 0x0080, 864 NVME_CAP_EXCEEDED = 0x0081, 865 NVME_NS_NOT_READY = 0x0082, 866 NVME_NS_RESV_CONFLICT = 0x0083, 867 NVME_FORMAT_IN_PROGRESS = 0x0084, 868 NVME_INVALID_CQID = 0x0100, 869 NVME_INVALID_QID = 0x0101, 870 NVME_MAX_QSIZE_EXCEEDED = 0x0102, 871 NVME_ACL_EXCEEDED = 0x0103, 872 NVME_RESERVED = 0x0104, 873 NVME_AER_LIMIT_EXCEEDED = 0x0105, 874 NVME_INVALID_FW_SLOT = 0x0106, 875 NVME_INVALID_FW_IMAGE = 0x0107, 876 NVME_INVALID_IRQ_VECTOR = 0x0108, 877 NVME_INVALID_LOG_ID = 0x0109, 878 NVME_INVALID_FORMAT = 0x010a, 879 NVME_FW_REQ_RESET = 0x010b, 880 NVME_INVALID_QUEUE_DEL = 0x010c, 881 NVME_FID_NOT_SAVEABLE = 0x010d, 882 NVME_FEAT_NOT_CHANGEABLE = 0x010e, 883 NVME_FEAT_NOT_NS_SPEC = 0x010f, 884 NVME_FW_REQ_SUSYSTEM_RESET = 0x0110, 885 NVME_NS_ALREADY_ATTACHED = 0x0118, 886 NVME_NS_PRIVATE = 0x0119, 887 NVME_NS_NOT_ATTACHED = 0x011a, 888 NVME_NS_CTRL_LIST_INVALID = 0x011c, 889 NVME_CONFLICTING_ATTRS = 0x0180, 890 NVME_INVALID_PROT_INFO = 0x0181, 891 NVME_WRITE_TO_RO = 0x0182, 892 NVME_CMD_SIZE_LIMIT = 0x0183, 893 NVME_INVALID_ZONE_OP = 0x01b6, 894 NVME_NOZRWA = 0x01b7, 895 NVME_ZONE_BOUNDARY_ERROR = 0x01b8, 896 NVME_ZONE_FULL = 0x01b9, 897 NVME_ZONE_READ_ONLY = 0x01ba, 898 NVME_ZONE_OFFLINE = 0x01bb, 899 NVME_ZONE_INVALID_WRITE = 0x01bc, 900 NVME_ZONE_TOO_MANY_ACTIVE = 0x01bd, 901 NVME_ZONE_TOO_MANY_OPEN = 0x01be, 902 NVME_ZONE_INVAL_TRANSITION = 0x01bf, 903 NVME_WRITE_FAULT = 0x0280, 904 NVME_UNRECOVERED_READ = 0x0281, 905 NVME_E2E_GUARD_ERROR = 0x0282, 906 NVME_E2E_APP_ERROR = 0x0283, 907 NVME_E2E_REF_ERROR = 0x0284, 908 NVME_CMP_FAILURE = 0x0285, 909 NVME_ACCESS_DENIED = 0x0286, 910 NVME_DULB = 0x0287, 911 NVME_MORE = 0x2000, 912 NVME_DNR = 0x4000, 913 NVME_NO_COMPLETE = 0xffff, 914 }; 915 916 typedef struct QEMU_PACKED NvmeFwSlotInfoLog { 917 uint8_t afi; 918 uint8_t reserved1[7]; 919 uint8_t frs1[8]; 920 uint8_t frs2[8]; 921 uint8_t frs3[8]; 922 uint8_t frs4[8]; 923 uint8_t frs5[8]; 924 uint8_t frs6[8]; 925 uint8_t frs7[8]; 926 uint8_t reserved2[448]; 927 } NvmeFwSlotInfoLog; 928 929 typedef struct QEMU_PACKED NvmeErrorLog { 930 uint64_t error_count; 931 uint16_t sqid; 932 uint16_t cid; 933 uint16_t status_field; 934 uint16_t param_error_location; 935 uint64_t lba; 936 uint32_t nsid; 937 uint8_t vs; 938 uint8_t resv[35]; 939 } NvmeErrorLog; 940 941 typedef struct QEMU_PACKED NvmeSmartLog { 942 uint8_t critical_warning; 943 uint16_t temperature; 944 uint8_t available_spare; 945 uint8_t available_spare_threshold; 946 uint8_t percentage_used; 947 uint8_t reserved1[26]; 948 uint64_t data_units_read[2]; 949 uint64_t data_units_written[2]; 950 uint64_t host_read_commands[2]; 951 uint64_t host_write_commands[2]; 952 uint64_t controller_busy_time[2]; 953 uint64_t power_cycles[2]; 954 uint64_t power_on_hours[2]; 955 uint64_t unsafe_shutdowns[2]; 956 uint64_t media_errors[2]; 957 uint64_t number_of_error_log_entries[2]; 958 uint8_t reserved2[320]; 959 } NvmeSmartLog; 960 961 #define NVME_SMART_WARN_MAX 6 962 enum NvmeSmartWarn { 963 NVME_SMART_SPARE = 1 << 0, 964 NVME_SMART_TEMPERATURE = 1 << 1, 965 NVME_SMART_RELIABILITY = 1 << 2, 966 NVME_SMART_MEDIA_READ_ONLY = 1 << 3, 967 NVME_SMART_FAILED_VOLATILE_MEDIA = 1 << 4, 968 NVME_SMART_PMR_UNRELIABLE = 1 << 5, 969 }; 970 971 typedef struct NvmeEffectsLog { 972 uint32_t acs[256]; 973 uint32_t iocs[256]; 974 uint8_t resv[2048]; 975 } NvmeEffectsLog; 976 977 enum { 978 NVME_CMD_EFF_CSUPP = 1 << 0, 979 NVME_CMD_EFF_LBCC = 1 << 1, 980 NVME_CMD_EFF_NCC = 1 << 2, 981 NVME_CMD_EFF_NIC = 1 << 3, 982 NVME_CMD_EFF_CCC = 1 << 4, 983 NVME_CMD_EFF_CSE_MASK = 3 << 16, 984 NVME_CMD_EFF_UUID_SEL = 1 << 19, 985 }; 986 987 enum NvmeLogIdentifier { 988 NVME_LOG_ERROR_INFO = 0x01, 989 NVME_LOG_SMART_INFO = 0x02, 990 NVME_LOG_FW_SLOT_INFO = 0x03, 991 NVME_LOG_CHANGED_NSLIST = 0x04, 992 NVME_LOG_CMD_EFFECTS = 0x05, 993 }; 994 995 typedef struct QEMU_PACKED NvmePSD { 996 uint16_t mp; 997 uint16_t reserved; 998 uint32_t enlat; 999 uint32_t exlat; 1000 uint8_t rrt; 1001 uint8_t rrl; 1002 uint8_t rwt; 1003 uint8_t rwl; 1004 uint8_t resv[16]; 1005 } NvmePSD; 1006 1007 #define NVME_CONTROLLER_LIST_SIZE 2048 1008 #define NVME_IDENTIFY_DATA_SIZE 4096 1009 1010 enum NvmeIdCns { 1011 NVME_ID_CNS_NS = 0x00, 1012 NVME_ID_CNS_CTRL = 0x01, 1013 NVME_ID_CNS_NS_ACTIVE_LIST = 0x02, 1014 NVME_ID_CNS_NS_DESCR_LIST = 0x03, 1015 NVME_ID_CNS_CS_NS = 0x05, 1016 NVME_ID_CNS_CS_CTRL = 0x06, 1017 NVME_ID_CNS_CS_NS_ACTIVE_LIST = 0x07, 1018 NVME_ID_CNS_NS_PRESENT_LIST = 0x10, 1019 NVME_ID_CNS_NS_PRESENT = 0x11, 1020 NVME_ID_CNS_NS_ATTACHED_CTRL_LIST = 0x12, 1021 NVME_ID_CNS_CTRL_LIST = 0x13, 1022 NVME_ID_CNS_CS_NS_PRESENT_LIST = 0x1a, 1023 NVME_ID_CNS_CS_NS_PRESENT = 0x1b, 1024 NVME_ID_CNS_IO_COMMAND_SET = 0x1c, 1025 }; 1026 1027 typedef struct QEMU_PACKED NvmeIdCtrl { 1028 uint16_t vid; 1029 uint16_t ssvid; 1030 uint8_t sn[20]; 1031 uint8_t mn[40]; 1032 uint8_t fr[8]; 1033 uint8_t rab; 1034 uint8_t ieee[3]; 1035 uint8_t cmic; 1036 uint8_t mdts; 1037 uint16_t cntlid; 1038 uint32_t ver; 1039 uint32_t rtd3r; 1040 uint32_t rtd3e; 1041 uint32_t oaes; 1042 uint32_t ctratt; 1043 uint8_t rsvd100[11]; 1044 uint8_t cntrltype; 1045 uint8_t fguid[16]; 1046 uint8_t rsvd128[128]; 1047 uint16_t oacs; 1048 uint8_t acl; 1049 uint8_t aerl; 1050 uint8_t frmw; 1051 uint8_t lpa; 1052 uint8_t elpe; 1053 uint8_t npss; 1054 uint8_t avscc; 1055 uint8_t apsta; 1056 uint16_t wctemp; 1057 uint16_t cctemp; 1058 uint16_t mtfa; 1059 uint32_t hmpre; 1060 uint32_t hmmin; 1061 uint8_t tnvmcap[16]; 1062 uint8_t unvmcap[16]; 1063 uint32_t rpmbs; 1064 uint16_t edstt; 1065 uint8_t dsto; 1066 uint8_t fwug; 1067 uint16_t kas; 1068 uint16_t hctma; 1069 uint16_t mntmt; 1070 uint16_t mxtmt; 1071 uint32_t sanicap; 1072 uint8_t rsvd332[180]; 1073 uint8_t sqes; 1074 uint8_t cqes; 1075 uint16_t maxcmd; 1076 uint32_t nn; 1077 uint16_t oncs; 1078 uint16_t fuses; 1079 uint8_t fna; 1080 uint8_t vwc; 1081 uint16_t awun; 1082 uint16_t awupf; 1083 uint8_t nvscc; 1084 uint8_t rsvd531; 1085 uint16_t acwu; 1086 uint16_t ocfs; 1087 uint32_t sgls; 1088 uint8_t rsvd540[228]; 1089 uint8_t subnqn[256]; 1090 uint8_t rsvd1024[1024]; 1091 NvmePSD psd[32]; 1092 uint8_t vs[1024]; 1093 } NvmeIdCtrl; 1094 1095 typedef struct NvmeIdCtrlZoned { 1096 uint8_t zasl; 1097 uint8_t rsvd1[4095]; 1098 } NvmeIdCtrlZoned; 1099 1100 typedef struct NvmeIdCtrlNvm { 1101 uint8_t vsl; 1102 uint8_t wzsl; 1103 uint8_t wusl; 1104 uint8_t dmrl; 1105 uint32_t dmrsl; 1106 uint64_t dmsl; 1107 uint8_t rsvd16[4080]; 1108 } NvmeIdCtrlNvm; 1109 1110 enum NvmeIdCtrlOaes { 1111 NVME_OAES_NS_ATTR = 1 << 8, 1112 }; 1113 1114 enum NvmeIdCtrlOacs { 1115 NVME_OACS_SECURITY = 1 << 0, 1116 NVME_OACS_FORMAT = 1 << 1, 1117 NVME_OACS_FW = 1 << 2, 1118 NVME_OACS_NS_MGMT = 1 << 3, 1119 }; 1120 1121 enum NvmeIdCtrlOncs { 1122 NVME_ONCS_COMPARE = 1 << 0, 1123 NVME_ONCS_WRITE_UNCORR = 1 << 1, 1124 NVME_ONCS_DSM = 1 << 2, 1125 NVME_ONCS_WRITE_ZEROES = 1 << 3, 1126 NVME_ONCS_FEATURES = 1 << 4, 1127 NVME_ONCS_RESRVATIONS = 1 << 5, 1128 NVME_ONCS_TIMESTAMP = 1 << 6, 1129 NVME_ONCS_VERIFY = 1 << 7, 1130 NVME_ONCS_COPY = 1 << 8, 1131 }; 1132 1133 enum NvmeIdCtrlOcfs { 1134 NVME_OCFS_COPY_FORMAT_0 = 1 << 0, 1135 }; 1136 1137 enum NvmeIdctrlVwc { 1138 NVME_VWC_PRESENT = 1 << 0, 1139 NVME_VWC_NSID_BROADCAST_NO_SUPPORT = 0 << 1, 1140 NVME_VWC_NSID_BROADCAST_RESERVED = 1 << 1, 1141 NVME_VWC_NSID_BROADCAST_CTRL_SPEC = 2 << 1, 1142 NVME_VWC_NSID_BROADCAST_SUPPORT = 3 << 1, 1143 }; 1144 1145 enum NvmeIdCtrlFrmw { 1146 NVME_FRMW_SLOT1_RO = 1 << 0, 1147 }; 1148 1149 enum NvmeIdCtrlLpa { 1150 NVME_LPA_NS_SMART = 1 << 0, 1151 NVME_LPA_CSE = 1 << 1, 1152 NVME_LPA_EXTENDED = 1 << 2, 1153 }; 1154 1155 enum NvmeIdCtrlCmic { 1156 NVME_CMIC_MULTI_CTRL = 1 << 1, 1157 }; 1158 1159 enum NvmeNsAttachmentOperation { 1160 NVME_NS_ATTACHMENT_ATTACH = 0x0, 1161 NVME_NS_ATTACHMENT_DETACH = 0x1, 1162 }; 1163 1164 #define NVME_CTRL_SQES_MIN(sqes) ((sqes) & 0xf) 1165 #define NVME_CTRL_SQES_MAX(sqes) (((sqes) >> 4) & 0xf) 1166 #define NVME_CTRL_CQES_MIN(cqes) ((cqes) & 0xf) 1167 #define NVME_CTRL_CQES_MAX(cqes) (((cqes) >> 4) & 0xf) 1168 1169 #define NVME_CTRL_SGLS_SUPPORT_MASK (0x3 << 0) 1170 #define NVME_CTRL_SGLS_SUPPORT_NO_ALIGN (0x1 << 0) 1171 #define NVME_CTRL_SGLS_SUPPORT_DWORD_ALIGN (0x1 << 1) 1172 #define NVME_CTRL_SGLS_KEYED (0x1 << 2) 1173 #define NVME_CTRL_SGLS_BITBUCKET (0x1 << 16) 1174 #define NVME_CTRL_SGLS_MPTR_CONTIGUOUS (0x1 << 17) 1175 #define NVME_CTRL_SGLS_EXCESS_LENGTH (0x1 << 18) 1176 #define NVME_CTRL_SGLS_MPTR_SGL (0x1 << 19) 1177 #define NVME_CTRL_SGLS_ADDR_OFFSET (0x1 << 20) 1178 1179 #define NVME_ARB_AB(arb) (arb & 0x7) 1180 #define NVME_ARB_AB_NOLIMIT 0x7 1181 #define NVME_ARB_LPW(arb) ((arb >> 8) & 0xff) 1182 #define NVME_ARB_MPW(arb) ((arb >> 16) & 0xff) 1183 #define NVME_ARB_HPW(arb) ((arb >> 24) & 0xff) 1184 1185 #define NVME_INTC_THR(intc) (intc & 0xff) 1186 #define NVME_INTC_TIME(intc) ((intc >> 8) & 0xff) 1187 1188 #define NVME_INTVC_NOCOALESCING (0x1 << 16) 1189 1190 #define NVME_TEMP_THSEL(temp) ((temp >> 20) & 0x3) 1191 #define NVME_TEMP_THSEL_OVER 0x0 1192 #define NVME_TEMP_THSEL_UNDER 0x1 1193 1194 #define NVME_TEMP_TMPSEL(temp) ((temp >> 16) & 0xf) 1195 #define NVME_TEMP_TMPSEL_COMPOSITE 0x0 1196 1197 #define NVME_TEMP_TMPTH(temp) (temp & 0xffff) 1198 1199 #define NVME_AEC_SMART(aec) (aec & 0xff) 1200 #define NVME_AEC_NS_ATTR(aec) ((aec >> 8) & 0x1) 1201 #define NVME_AEC_FW_ACTIVATION(aec) ((aec >> 9) & 0x1) 1202 1203 #define NVME_ERR_REC_TLER(err_rec) (err_rec & 0xffff) 1204 #define NVME_ERR_REC_DULBE(err_rec) (err_rec & 0x10000) 1205 1206 enum NvmeFeatureIds { 1207 NVME_ARBITRATION = 0x1, 1208 NVME_POWER_MANAGEMENT = 0x2, 1209 NVME_LBA_RANGE_TYPE = 0x3, 1210 NVME_TEMPERATURE_THRESHOLD = 0x4, 1211 NVME_ERROR_RECOVERY = 0x5, 1212 NVME_VOLATILE_WRITE_CACHE = 0x6, 1213 NVME_NUMBER_OF_QUEUES = 0x7, 1214 NVME_INTERRUPT_COALESCING = 0x8, 1215 NVME_INTERRUPT_VECTOR_CONF = 0x9, 1216 NVME_WRITE_ATOMICITY = 0xa, 1217 NVME_ASYNCHRONOUS_EVENT_CONF = 0xb, 1218 NVME_TIMESTAMP = 0xe, 1219 NVME_COMMAND_SET_PROFILE = 0x19, 1220 NVME_SOFTWARE_PROGRESS_MARKER = 0x80, 1221 NVME_FID_MAX = 0x100, 1222 }; 1223 1224 typedef enum NvmeFeatureCap { 1225 NVME_FEAT_CAP_SAVE = 1 << 0, 1226 NVME_FEAT_CAP_NS = 1 << 1, 1227 NVME_FEAT_CAP_CHANGE = 1 << 2, 1228 } NvmeFeatureCap; 1229 1230 typedef enum NvmeGetFeatureSelect { 1231 NVME_GETFEAT_SELECT_CURRENT = 0x0, 1232 NVME_GETFEAT_SELECT_DEFAULT = 0x1, 1233 NVME_GETFEAT_SELECT_SAVED = 0x2, 1234 NVME_GETFEAT_SELECT_CAP = 0x3, 1235 } NvmeGetFeatureSelect; 1236 1237 #define NVME_GETSETFEAT_FID_MASK 0xff 1238 #define NVME_GETSETFEAT_FID(dw10) (dw10 & NVME_GETSETFEAT_FID_MASK) 1239 1240 #define NVME_GETFEAT_SELECT_SHIFT 8 1241 #define NVME_GETFEAT_SELECT_MASK 0x7 1242 #define NVME_GETFEAT_SELECT(dw10) \ 1243 ((dw10 >> NVME_GETFEAT_SELECT_SHIFT) & NVME_GETFEAT_SELECT_MASK) 1244 1245 #define NVME_SETFEAT_SAVE_SHIFT 31 1246 #define NVME_SETFEAT_SAVE_MASK 0x1 1247 #define NVME_SETFEAT_SAVE(dw10) \ 1248 ((dw10 >> NVME_SETFEAT_SAVE_SHIFT) & NVME_SETFEAT_SAVE_MASK) 1249 1250 typedef struct QEMU_PACKED NvmeRangeType { 1251 uint8_t type; 1252 uint8_t attributes; 1253 uint8_t rsvd2[14]; 1254 uint64_t slba; 1255 uint64_t nlb; 1256 uint8_t guid[16]; 1257 uint8_t rsvd48[16]; 1258 } NvmeRangeType; 1259 1260 typedef struct QEMU_PACKED NvmeLBAF { 1261 uint16_t ms; 1262 uint8_t ds; 1263 uint8_t rp; 1264 } NvmeLBAF; 1265 1266 typedef struct QEMU_PACKED NvmeLBAFE { 1267 uint64_t zsze; 1268 uint8_t zdes; 1269 uint8_t rsvd9[7]; 1270 } NvmeLBAFE; 1271 1272 #define NVME_NSID_BROADCAST 0xffffffff 1273 1274 typedef struct QEMU_PACKED NvmeIdNs { 1275 uint64_t nsze; 1276 uint64_t ncap; 1277 uint64_t nuse; 1278 uint8_t nsfeat; 1279 uint8_t nlbaf; 1280 uint8_t flbas; 1281 uint8_t mc; 1282 uint8_t dpc; 1283 uint8_t dps; 1284 uint8_t nmic; 1285 uint8_t rescap; 1286 uint8_t fpi; 1287 uint8_t dlfeat; 1288 uint16_t nawun; 1289 uint16_t nawupf; 1290 uint16_t nacwu; 1291 uint16_t nabsn; 1292 uint16_t nabo; 1293 uint16_t nabspf; 1294 uint16_t noiob; 1295 uint8_t nvmcap[16]; 1296 uint16_t npwg; 1297 uint16_t npwa; 1298 uint16_t npdg; 1299 uint16_t npda; 1300 uint16_t nows; 1301 uint16_t mssrl; 1302 uint32_t mcl; 1303 uint8_t msrc; 1304 uint8_t rsvd81[23]; 1305 uint8_t nguid[16]; 1306 uint64_t eui64; 1307 NvmeLBAF lbaf[16]; 1308 uint8_t rsvd192[192]; 1309 uint8_t vs[3712]; 1310 } NvmeIdNs; 1311 1312 typedef struct QEMU_PACKED NvmeIdNsDescr { 1313 uint8_t nidt; 1314 uint8_t nidl; 1315 uint8_t rsvd2[2]; 1316 } NvmeIdNsDescr; 1317 1318 enum NvmeNsIdentifierLength { 1319 NVME_NIDL_EUI64 = 8, 1320 NVME_NIDL_NGUID = 16, 1321 NVME_NIDL_UUID = 16, 1322 NVME_NIDL_CSI = 1, 1323 }; 1324 1325 enum NvmeNsIdentifierType { 1326 NVME_NIDT_EUI64 = 0x01, 1327 NVME_NIDT_NGUID = 0x02, 1328 NVME_NIDT_UUID = 0x03, 1329 NVME_NIDT_CSI = 0x04, 1330 }; 1331 1332 enum NvmeIdNsNmic { 1333 NVME_NMIC_NS_SHARED = 1 << 0, 1334 }; 1335 1336 enum NvmeCsi { 1337 NVME_CSI_NVM = 0x00, 1338 NVME_CSI_ZONED = 0x02, 1339 }; 1340 1341 #define NVME_SET_CSI(vec, csi) (vec |= (uint8_t)(1 << (csi))) 1342 1343 typedef struct QEMU_PACKED NvmeIdNsZoned { 1344 uint16_t zoc; 1345 uint16_t ozcs; 1346 uint32_t mar; 1347 uint32_t mor; 1348 uint32_t rrl; 1349 uint32_t frl; 1350 uint8_t rsvd12[24]; 1351 uint32_t numzrwa; 1352 uint16_t zrwafg; 1353 uint16_t zrwas; 1354 uint8_t zrwacap; 1355 uint8_t rsvd53[2763]; 1356 NvmeLBAFE lbafe[16]; 1357 uint8_t rsvd3072[768]; 1358 uint8_t vs[256]; 1359 } NvmeIdNsZoned; 1360 1361 enum NvmeIdNsZonedOzcs { 1362 NVME_ID_NS_ZONED_OZCS_RAZB = 1 << 0, 1363 NVME_ID_NS_ZONED_OZCS_ZRWASUP = 1 << 1, 1364 }; 1365 1366 enum NvmeIdNsZonedZrwacap { 1367 NVME_ID_NS_ZONED_ZRWACAP_EXPFLUSHSUP = 1 << 0, 1368 }; 1369 1370 /*Deallocate Logical Block Features*/ 1371 #define NVME_ID_NS_DLFEAT_GUARD_CRC(dlfeat) ((dlfeat) & 0x10) 1372 #define NVME_ID_NS_DLFEAT_WRITE_ZEROES(dlfeat) ((dlfeat) & 0x08) 1373 1374 #define NVME_ID_NS_DLFEAT_READ_BEHAVIOR(dlfeat) ((dlfeat) & 0x7) 1375 #define NVME_ID_NS_DLFEAT_READ_BEHAVIOR_UNDEFINED 0 1376 #define NVME_ID_NS_DLFEAT_READ_BEHAVIOR_ZEROES 1 1377 #define NVME_ID_NS_DLFEAT_READ_BEHAVIOR_ONES 2 1378 1379 1380 #define NVME_ID_NS_NSFEAT_THIN(nsfeat) ((nsfeat & 0x1)) 1381 #define NVME_ID_NS_NSFEAT_DULBE(nsfeat) ((nsfeat >> 2) & 0x1) 1382 #define NVME_ID_NS_FLBAS_EXTENDED(flbas) ((flbas >> 4) & 0x1) 1383 #define NVME_ID_NS_FLBAS_INDEX(flbas) ((flbas & 0xf)) 1384 #define NVME_ID_NS_MC_SEPARATE(mc) ((mc >> 1) & 0x1) 1385 #define NVME_ID_NS_MC_EXTENDED(mc) ((mc & 0x1)) 1386 #define NVME_ID_NS_DPC_LAST_EIGHT(dpc) ((dpc >> 4) & 0x1) 1387 #define NVME_ID_NS_DPC_FIRST_EIGHT(dpc) ((dpc >> 3) & 0x1) 1388 #define NVME_ID_NS_DPC_TYPE_3(dpc) ((dpc >> 2) & 0x1) 1389 #define NVME_ID_NS_DPC_TYPE_2(dpc) ((dpc >> 1) & 0x1) 1390 #define NVME_ID_NS_DPC_TYPE_1(dpc) ((dpc & 0x1)) 1391 #define NVME_ID_NS_DPC_TYPE_MASK 0x7 1392 1393 enum NvmeIdNsDps { 1394 NVME_ID_NS_DPS_TYPE_NONE = 0, 1395 NVME_ID_NS_DPS_TYPE_1 = 1, 1396 NVME_ID_NS_DPS_TYPE_2 = 2, 1397 NVME_ID_NS_DPS_TYPE_3 = 3, 1398 NVME_ID_NS_DPS_TYPE_MASK = 0x7, 1399 NVME_ID_NS_DPS_FIRST_EIGHT = 8, 1400 }; 1401 1402 enum NvmeIdNsFlbas { 1403 NVME_ID_NS_FLBAS_EXTENDED = 1 << 4, 1404 }; 1405 1406 enum NvmeIdNsMc { 1407 NVME_ID_NS_MC_EXTENDED = 1 << 0, 1408 NVME_ID_NS_MC_SEPARATE = 1 << 1, 1409 }; 1410 1411 #define NVME_ID_NS_DPS_TYPE(dps) (dps & NVME_ID_NS_DPS_TYPE_MASK) 1412 1413 typedef struct NvmeDifTuple { 1414 uint16_t guard; 1415 uint16_t apptag; 1416 uint32_t reftag; 1417 } NvmeDifTuple; 1418 1419 enum NvmeZoneAttr { 1420 NVME_ZA_FINISHED_BY_CTLR = 1 << 0, 1421 NVME_ZA_FINISH_RECOMMENDED = 1 << 1, 1422 NVME_ZA_RESET_RECOMMENDED = 1 << 2, 1423 NVME_ZA_ZRWA_VALID = 1 << 3, 1424 NVME_ZA_ZD_EXT_VALID = 1 << 7, 1425 }; 1426 1427 typedef struct QEMU_PACKED NvmeZoneReportHeader { 1428 uint64_t nr_zones; 1429 uint8_t rsvd[56]; 1430 } NvmeZoneReportHeader; 1431 1432 enum NvmeZoneReceiveAction { 1433 NVME_ZONE_REPORT = 0, 1434 NVME_ZONE_REPORT_EXTENDED = 1, 1435 }; 1436 1437 enum NvmeZoneReportType { 1438 NVME_ZONE_REPORT_ALL = 0, 1439 NVME_ZONE_REPORT_EMPTY = 1, 1440 NVME_ZONE_REPORT_IMPLICITLY_OPEN = 2, 1441 NVME_ZONE_REPORT_EXPLICITLY_OPEN = 3, 1442 NVME_ZONE_REPORT_CLOSED = 4, 1443 NVME_ZONE_REPORT_FULL = 5, 1444 NVME_ZONE_REPORT_READ_ONLY = 6, 1445 NVME_ZONE_REPORT_OFFLINE = 7, 1446 }; 1447 1448 enum NvmeZoneType { 1449 NVME_ZONE_TYPE_RESERVED = 0x00, 1450 NVME_ZONE_TYPE_SEQ_WRITE = 0x02, 1451 }; 1452 1453 typedef struct QEMU_PACKED NvmeZoneSendCmd { 1454 uint8_t opcode; 1455 uint8_t flags; 1456 uint16_t cid; 1457 uint32_t nsid; 1458 uint32_t rsvd8[4]; 1459 NvmeCmdDptr dptr; 1460 uint64_t slba; 1461 uint32_t rsvd48; 1462 uint8_t zsa; 1463 uint8_t zsflags; 1464 uint8_t rsvd54[2]; 1465 uint32_t rsvd56[2]; 1466 } NvmeZoneSendCmd; 1467 1468 enum NvmeZoneSendAction { 1469 NVME_ZONE_ACTION_RSD = 0x00, 1470 NVME_ZONE_ACTION_CLOSE = 0x01, 1471 NVME_ZONE_ACTION_FINISH = 0x02, 1472 NVME_ZONE_ACTION_OPEN = 0x03, 1473 NVME_ZONE_ACTION_RESET = 0x04, 1474 NVME_ZONE_ACTION_OFFLINE = 0x05, 1475 NVME_ZONE_ACTION_SET_ZD_EXT = 0x10, 1476 NVME_ZONE_ACTION_ZRWA_FLUSH = 0x11, 1477 }; 1478 1479 enum { 1480 NVME_ZSFLAG_SELECT_ALL = 1 << 0, 1481 NVME_ZSFLAG_ZRWA_ALLOC = 1 << 1, 1482 }; 1483 1484 typedef struct QEMU_PACKED NvmeZoneDescr { 1485 uint8_t zt; 1486 uint8_t zs; 1487 uint8_t za; 1488 uint8_t rsvd3[5]; 1489 uint64_t zcap; 1490 uint64_t zslba; 1491 uint64_t wp; 1492 uint8_t rsvd32[32]; 1493 } NvmeZoneDescr; 1494 1495 typedef enum NvmeZoneState { 1496 NVME_ZONE_STATE_RESERVED = 0x00, 1497 NVME_ZONE_STATE_EMPTY = 0x01, 1498 NVME_ZONE_STATE_IMPLICITLY_OPEN = 0x02, 1499 NVME_ZONE_STATE_EXPLICITLY_OPEN = 0x03, 1500 NVME_ZONE_STATE_CLOSED = 0x04, 1501 NVME_ZONE_STATE_READ_ONLY = 0x0d, 1502 NVME_ZONE_STATE_FULL = 0x0e, 1503 NVME_ZONE_STATE_OFFLINE = 0x0f, 1504 } NvmeZoneState; 1505 1506 static inline void _nvme_check_size(void) 1507 { 1508 QEMU_BUILD_BUG_ON(sizeof(NvmeBar) != 4096); 1509 QEMU_BUILD_BUG_ON(sizeof(NvmeAerResult) != 4); 1510 QEMU_BUILD_BUG_ON(sizeof(NvmeZonedResult) != 8); 1511 QEMU_BUILD_BUG_ON(sizeof(NvmeCqe) != 16); 1512 QEMU_BUILD_BUG_ON(sizeof(NvmeDsmRange) != 16); 1513 QEMU_BUILD_BUG_ON(sizeof(NvmeCopySourceRange) != 32); 1514 QEMU_BUILD_BUG_ON(sizeof(NvmeCmd) != 64); 1515 QEMU_BUILD_BUG_ON(sizeof(NvmeDeleteQ) != 64); 1516 QEMU_BUILD_BUG_ON(sizeof(NvmeCreateCq) != 64); 1517 QEMU_BUILD_BUG_ON(sizeof(NvmeCreateSq) != 64); 1518 QEMU_BUILD_BUG_ON(sizeof(NvmeIdentify) != 64); 1519 QEMU_BUILD_BUG_ON(sizeof(NvmeRwCmd) != 64); 1520 QEMU_BUILD_BUG_ON(sizeof(NvmeDsmCmd) != 64); 1521 QEMU_BUILD_BUG_ON(sizeof(NvmeCopyCmd) != 64); 1522 QEMU_BUILD_BUG_ON(sizeof(NvmeRangeType) != 64); 1523 QEMU_BUILD_BUG_ON(sizeof(NvmeErrorLog) != 64); 1524 QEMU_BUILD_BUG_ON(sizeof(NvmeFwSlotInfoLog) != 512); 1525 QEMU_BUILD_BUG_ON(sizeof(NvmeSmartLog) != 512); 1526 QEMU_BUILD_BUG_ON(sizeof(NvmeEffectsLog) != 4096); 1527 QEMU_BUILD_BUG_ON(sizeof(NvmeIdCtrl) != 4096); 1528 QEMU_BUILD_BUG_ON(sizeof(NvmeIdCtrlZoned) != 4096); 1529 QEMU_BUILD_BUG_ON(sizeof(NvmeIdCtrlNvm) != 4096); 1530 QEMU_BUILD_BUG_ON(sizeof(NvmeLBAF) != 4); 1531 QEMU_BUILD_BUG_ON(sizeof(NvmeLBAFE) != 16); 1532 QEMU_BUILD_BUG_ON(sizeof(NvmeIdNs) != 4096); 1533 QEMU_BUILD_BUG_ON(sizeof(NvmeIdNsZoned) != 4096); 1534 QEMU_BUILD_BUG_ON(sizeof(NvmeSglDescriptor) != 16); 1535 QEMU_BUILD_BUG_ON(sizeof(NvmeIdNsDescr) != 4); 1536 QEMU_BUILD_BUG_ON(sizeof(NvmeZoneDescr) != 64); 1537 QEMU_BUILD_BUG_ON(sizeof(NvmeDifTuple) != 8); 1538 } 1539 #endif 1540