1 #ifndef BLOCK_NVME_H 2 #define BLOCK_NVME_H 3 4 typedef struct QEMU_PACKED NvmeBar { 5 uint64_t cap; 6 uint32_t vs; 7 uint32_t intms; 8 uint32_t intmc; 9 uint32_t cc; 10 uint32_t rsvd1; 11 uint32_t csts; 12 uint32_t nssrc; 13 uint32_t aqa; 14 uint64_t asq; 15 uint64_t acq; 16 uint32_t cmbloc; 17 uint32_t cmbsz; 18 uint8_t padding[3520]; /* not used by QEMU */ 19 uint32_t pmrcap; 20 uint32_t pmrctl; 21 uint32_t pmrsts; 22 uint32_t pmrebs; 23 uint32_t pmrswtp; 24 uint64_t pmrmsc; 25 uint8_t reserved[484]; 26 } NvmeBar; 27 28 enum NvmeCapShift { 29 CAP_MQES_SHIFT = 0, 30 CAP_CQR_SHIFT = 16, 31 CAP_AMS_SHIFT = 17, 32 CAP_TO_SHIFT = 24, 33 CAP_DSTRD_SHIFT = 32, 34 CAP_NSSRS_SHIFT = 36, 35 CAP_CSS_SHIFT = 37, 36 CAP_MPSMIN_SHIFT = 48, 37 CAP_MPSMAX_SHIFT = 52, 38 CAP_PMR_SHIFT = 56, 39 }; 40 41 enum NvmeCapMask { 42 CAP_MQES_MASK = 0xffff, 43 CAP_CQR_MASK = 0x1, 44 CAP_AMS_MASK = 0x3, 45 CAP_TO_MASK = 0xff, 46 CAP_DSTRD_MASK = 0xf, 47 CAP_NSSRS_MASK = 0x1, 48 CAP_CSS_MASK = 0xff, 49 CAP_MPSMIN_MASK = 0xf, 50 CAP_MPSMAX_MASK = 0xf, 51 CAP_PMR_MASK = 0x1, 52 }; 53 54 #define NVME_CAP_MQES(cap) (((cap) >> CAP_MQES_SHIFT) & CAP_MQES_MASK) 55 #define NVME_CAP_CQR(cap) (((cap) >> CAP_CQR_SHIFT) & CAP_CQR_MASK) 56 #define NVME_CAP_AMS(cap) (((cap) >> CAP_AMS_SHIFT) & CAP_AMS_MASK) 57 #define NVME_CAP_TO(cap) (((cap) >> CAP_TO_SHIFT) & CAP_TO_MASK) 58 #define NVME_CAP_DSTRD(cap) (((cap) >> CAP_DSTRD_SHIFT) & CAP_DSTRD_MASK) 59 #define NVME_CAP_NSSRS(cap) (((cap) >> CAP_NSSRS_SHIFT) & CAP_NSSRS_MASK) 60 #define NVME_CAP_CSS(cap) (((cap) >> CAP_CSS_SHIFT) & CAP_CSS_MASK) 61 #define NVME_CAP_MPSMIN(cap)(((cap) >> CAP_MPSMIN_SHIFT) & CAP_MPSMIN_MASK) 62 #define NVME_CAP_MPSMAX(cap)(((cap) >> CAP_MPSMAX_SHIFT) & CAP_MPSMAX_MASK) 63 64 #define NVME_CAP_SET_MQES(cap, val) (cap |= (uint64_t)(val & CAP_MQES_MASK) \ 65 << CAP_MQES_SHIFT) 66 #define NVME_CAP_SET_CQR(cap, val) (cap |= (uint64_t)(val & CAP_CQR_MASK) \ 67 << CAP_CQR_SHIFT) 68 #define NVME_CAP_SET_AMS(cap, val) (cap |= (uint64_t)(val & CAP_AMS_MASK) \ 69 << CAP_AMS_SHIFT) 70 #define NVME_CAP_SET_TO(cap, val) (cap |= (uint64_t)(val & CAP_TO_MASK) \ 71 << CAP_TO_SHIFT) 72 #define NVME_CAP_SET_DSTRD(cap, val) (cap |= (uint64_t)(val & CAP_DSTRD_MASK) \ 73 << CAP_DSTRD_SHIFT) 74 #define NVME_CAP_SET_NSSRS(cap, val) (cap |= (uint64_t)(val & CAP_NSSRS_MASK) \ 75 << CAP_NSSRS_SHIFT) 76 #define NVME_CAP_SET_CSS(cap, val) (cap |= (uint64_t)(val & CAP_CSS_MASK) \ 77 << CAP_CSS_SHIFT) 78 #define NVME_CAP_SET_MPSMIN(cap, val) (cap |= (uint64_t)(val & CAP_MPSMIN_MASK)\ 79 << CAP_MPSMIN_SHIFT) 80 #define NVME_CAP_SET_MPSMAX(cap, val) (cap |= (uint64_t)(val & CAP_MPSMAX_MASK)\ 81 << CAP_MPSMAX_SHIFT) 82 #define NVME_CAP_SET_PMRS(cap, val) (cap |= (uint64_t)(val & CAP_PMR_MASK)\ 83 << CAP_PMR_SHIFT) 84 85 enum NvmeCcShift { 86 CC_EN_SHIFT = 0, 87 CC_CSS_SHIFT = 4, 88 CC_MPS_SHIFT = 7, 89 CC_AMS_SHIFT = 11, 90 CC_SHN_SHIFT = 14, 91 CC_IOSQES_SHIFT = 16, 92 CC_IOCQES_SHIFT = 20, 93 }; 94 95 enum NvmeCcMask { 96 CC_EN_MASK = 0x1, 97 CC_CSS_MASK = 0x7, 98 CC_MPS_MASK = 0xf, 99 CC_AMS_MASK = 0x7, 100 CC_SHN_MASK = 0x3, 101 CC_IOSQES_MASK = 0xf, 102 CC_IOCQES_MASK = 0xf, 103 }; 104 105 #define NVME_CC_EN(cc) ((cc >> CC_EN_SHIFT) & CC_EN_MASK) 106 #define NVME_CC_CSS(cc) ((cc >> CC_CSS_SHIFT) & CC_CSS_MASK) 107 #define NVME_CC_MPS(cc) ((cc >> CC_MPS_SHIFT) & CC_MPS_MASK) 108 #define NVME_CC_AMS(cc) ((cc >> CC_AMS_SHIFT) & CC_AMS_MASK) 109 #define NVME_CC_SHN(cc) ((cc >> CC_SHN_SHIFT) & CC_SHN_MASK) 110 #define NVME_CC_IOSQES(cc) ((cc >> CC_IOSQES_SHIFT) & CC_IOSQES_MASK) 111 #define NVME_CC_IOCQES(cc) ((cc >> CC_IOCQES_SHIFT) & CC_IOCQES_MASK) 112 113 enum NvmeCstsShift { 114 CSTS_RDY_SHIFT = 0, 115 CSTS_CFS_SHIFT = 1, 116 CSTS_SHST_SHIFT = 2, 117 CSTS_NSSRO_SHIFT = 4, 118 }; 119 120 enum NvmeCstsMask { 121 CSTS_RDY_MASK = 0x1, 122 CSTS_CFS_MASK = 0x1, 123 CSTS_SHST_MASK = 0x3, 124 CSTS_NSSRO_MASK = 0x1, 125 }; 126 127 enum NvmeCsts { 128 NVME_CSTS_READY = 1 << CSTS_RDY_SHIFT, 129 NVME_CSTS_FAILED = 1 << CSTS_CFS_SHIFT, 130 NVME_CSTS_SHST_NORMAL = 0 << CSTS_SHST_SHIFT, 131 NVME_CSTS_SHST_PROGRESS = 1 << CSTS_SHST_SHIFT, 132 NVME_CSTS_SHST_COMPLETE = 2 << CSTS_SHST_SHIFT, 133 NVME_CSTS_NSSRO = 1 << CSTS_NSSRO_SHIFT, 134 }; 135 136 #define NVME_CSTS_RDY(csts) ((csts >> CSTS_RDY_SHIFT) & CSTS_RDY_MASK) 137 #define NVME_CSTS_CFS(csts) ((csts >> CSTS_CFS_SHIFT) & CSTS_CFS_MASK) 138 #define NVME_CSTS_SHST(csts) ((csts >> CSTS_SHST_SHIFT) & CSTS_SHST_MASK) 139 #define NVME_CSTS_NSSRO(csts) ((csts >> CSTS_NSSRO_SHIFT) & CSTS_NSSRO_MASK) 140 141 enum NvmeAqaShift { 142 AQA_ASQS_SHIFT = 0, 143 AQA_ACQS_SHIFT = 16, 144 }; 145 146 enum NvmeAqaMask { 147 AQA_ASQS_MASK = 0xfff, 148 AQA_ACQS_MASK = 0xfff, 149 }; 150 151 #define NVME_AQA_ASQS(aqa) ((aqa >> AQA_ASQS_SHIFT) & AQA_ASQS_MASK) 152 #define NVME_AQA_ACQS(aqa) ((aqa >> AQA_ACQS_SHIFT) & AQA_ACQS_MASK) 153 154 enum NvmeCmblocShift { 155 CMBLOC_BIR_SHIFT = 0, 156 CMBLOC_OFST_SHIFT = 12, 157 }; 158 159 enum NvmeCmblocMask { 160 CMBLOC_BIR_MASK = 0x7, 161 CMBLOC_OFST_MASK = 0xfffff, 162 }; 163 164 #define NVME_CMBLOC_BIR(cmbloc) ((cmbloc >> CMBLOC_BIR_SHIFT) & \ 165 CMBLOC_BIR_MASK) 166 #define NVME_CMBLOC_OFST(cmbloc)((cmbloc >> CMBLOC_OFST_SHIFT) & \ 167 CMBLOC_OFST_MASK) 168 169 #define NVME_CMBLOC_SET_BIR(cmbloc, val) \ 170 (cmbloc |= (uint64_t)(val & CMBLOC_BIR_MASK) << CMBLOC_BIR_SHIFT) 171 #define NVME_CMBLOC_SET_OFST(cmbloc, val) \ 172 (cmbloc |= (uint64_t)(val & CMBLOC_OFST_MASK) << CMBLOC_OFST_SHIFT) 173 174 enum NvmeCmbszShift { 175 CMBSZ_SQS_SHIFT = 0, 176 CMBSZ_CQS_SHIFT = 1, 177 CMBSZ_LISTS_SHIFT = 2, 178 CMBSZ_RDS_SHIFT = 3, 179 CMBSZ_WDS_SHIFT = 4, 180 CMBSZ_SZU_SHIFT = 8, 181 CMBSZ_SZ_SHIFT = 12, 182 }; 183 184 enum NvmeCmbszMask { 185 CMBSZ_SQS_MASK = 0x1, 186 CMBSZ_CQS_MASK = 0x1, 187 CMBSZ_LISTS_MASK = 0x1, 188 CMBSZ_RDS_MASK = 0x1, 189 CMBSZ_WDS_MASK = 0x1, 190 CMBSZ_SZU_MASK = 0xf, 191 CMBSZ_SZ_MASK = 0xfffff, 192 }; 193 194 #define NVME_CMBSZ_SQS(cmbsz) ((cmbsz >> CMBSZ_SQS_SHIFT) & CMBSZ_SQS_MASK) 195 #define NVME_CMBSZ_CQS(cmbsz) ((cmbsz >> CMBSZ_CQS_SHIFT) & CMBSZ_CQS_MASK) 196 #define NVME_CMBSZ_LISTS(cmbsz)((cmbsz >> CMBSZ_LISTS_SHIFT) & CMBSZ_LISTS_MASK) 197 #define NVME_CMBSZ_RDS(cmbsz) ((cmbsz >> CMBSZ_RDS_SHIFT) & CMBSZ_RDS_MASK) 198 #define NVME_CMBSZ_WDS(cmbsz) ((cmbsz >> CMBSZ_WDS_SHIFT) & CMBSZ_WDS_MASK) 199 #define NVME_CMBSZ_SZU(cmbsz) ((cmbsz >> CMBSZ_SZU_SHIFT) & CMBSZ_SZU_MASK) 200 #define NVME_CMBSZ_SZ(cmbsz) ((cmbsz >> CMBSZ_SZ_SHIFT) & CMBSZ_SZ_MASK) 201 202 #define NVME_CMBSZ_SET_SQS(cmbsz, val) \ 203 (cmbsz |= (uint64_t)(val & CMBSZ_SQS_MASK) << CMBSZ_SQS_SHIFT) 204 #define NVME_CMBSZ_SET_CQS(cmbsz, val) \ 205 (cmbsz |= (uint64_t)(val & CMBSZ_CQS_MASK) << CMBSZ_CQS_SHIFT) 206 #define NVME_CMBSZ_SET_LISTS(cmbsz, val) \ 207 (cmbsz |= (uint64_t)(val & CMBSZ_LISTS_MASK) << CMBSZ_LISTS_SHIFT) 208 #define NVME_CMBSZ_SET_RDS(cmbsz, val) \ 209 (cmbsz |= (uint64_t)(val & CMBSZ_RDS_MASK) << CMBSZ_RDS_SHIFT) 210 #define NVME_CMBSZ_SET_WDS(cmbsz, val) \ 211 (cmbsz |= (uint64_t)(val & CMBSZ_WDS_MASK) << CMBSZ_WDS_SHIFT) 212 #define NVME_CMBSZ_SET_SZU(cmbsz, val) \ 213 (cmbsz |= (uint64_t)(val & CMBSZ_SZU_MASK) << CMBSZ_SZU_SHIFT) 214 #define NVME_CMBSZ_SET_SZ(cmbsz, val) \ 215 (cmbsz |= (uint64_t)(val & CMBSZ_SZ_MASK) << CMBSZ_SZ_SHIFT) 216 217 #define NVME_CMBSZ_GETSIZE(cmbsz) \ 218 (NVME_CMBSZ_SZ(cmbsz) * (1 << (12 + 4 * NVME_CMBSZ_SZU(cmbsz)))) 219 220 enum NvmePmrcapShift { 221 PMRCAP_RDS_SHIFT = 3, 222 PMRCAP_WDS_SHIFT = 4, 223 PMRCAP_BIR_SHIFT = 5, 224 PMRCAP_PMRTU_SHIFT = 8, 225 PMRCAP_PMRWBM_SHIFT = 10, 226 PMRCAP_PMRTO_SHIFT = 16, 227 PMRCAP_CMSS_SHIFT = 24, 228 }; 229 230 enum NvmePmrcapMask { 231 PMRCAP_RDS_MASK = 0x1, 232 PMRCAP_WDS_MASK = 0x1, 233 PMRCAP_BIR_MASK = 0x7, 234 PMRCAP_PMRTU_MASK = 0x3, 235 PMRCAP_PMRWBM_MASK = 0xf, 236 PMRCAP_PMRTO_MASK = 0xff, 237 PMRCAP_CMSS_MASK = 0x1, 238 }; 239 240 #define NVME_PMRCAP_RDS(pmrcap) \ 241 ((pmrcap >> PMRCAP_RDS_SHIFT) & PMRCAP_RDS_MASK) 242 #define NVME_PMRCAP_WDS(pmrcap) \ 243 ((pmrcap >> PMRCAP_WDS_SHIFT) & PMRCAP_WDS_MASK) 244 #define NVME_PMRCAP_BIR(pmrcap) \ 245 ((pmrcap >> PMRCAP_BIR_SHIFT) & PMRCAP_BIR_MASK) 246 #define NVME_PMRCAP_PMRTU(pmrcap) \ 247 ((pmrcap >> PMRCAP_PMRTU_SHIFT) & PMRCAP_PMRTU_MASK) 248 #define NVME_PMRCAP_PMRWBM(pmrcap) \ 249 ((pmrcap >> PMRCAP_PMRWBM_SHIFT) & PMRCAP_PMRWBM_MASK) 250 #define NVME_PMRCAP_PMRTO(pmrcap) \ 251 ((pmrcap >> PMRCAP_PMRTO_SHIFT) & PMRCAP_PMRTO_MASK) 252 #define NVME_PMRCAP_CMSS(pmrcap) \ 253 ((pmrcap >> PMRCAP_CMSS_SHIFT) & PMRCAP_CMSS_MASK) 254 255 #define NVME_PMRCAP_SET_RDS(pmrcap, val) \ 256 (pmrcap |= (uint64_t)(val & PMRCAP_RDS_MASK) << PMRCAP_RDS_SHIFT) 257 #define NVME_PMRCAP_SET_WDS(pmrcap, val) \ 258 (pmrcap |= (uint64_t)(val & PMRCAP_WDS_MASK) << PMRCAP_WDS_SHIFT) 259 #define NVME_PMRCAP_SET_BIR(pmrcap, val) \ 260 (pmrcap |= (uint64_t)(val & PMRCAP_BIR_MASK) << PMRCAP_BIR_SHIFT) 261 #define NVME_PMRCAP_SET_PMRTU(pmrcap, val) \ 262 (pmrcap |= (uint64_t)(val & PMRCAP_PMRTU_MASK) << PMRCAP_PMRTU_SHIFT) 263 #define NVME_PMRCAP_SET_PMRWBM(pmrcap, val) \ 264 (pmrcap |= (uint64_t)(val & PMRCAP_PMRWBM_MASK) << PMRCAP_PMRWBM_SHIFT) 265 #define NVME_PMRCAP_SET_PMRTO(pmrcap, val) \ 266 (pmrcap |= (uint64_t)(val & PMRCAP_PMRTO_MASK) << PMRCAP_PMRTO_SHIFT) 267 #define NVME_PMRCAP_SET_CMSS(pmrcap, val) \ 268 (pmrcap |= (uint64_t)(val & PMRCAP_CMSS_MASK) << PMRCAP_CMSS_SHIFT) 269 270 enum NvmePmrctlShift { 271 PMRCTL_EN_SHIFT = 0, 272 }; 273 274 enum NvmePmrctlMask { 275 PMRCTL_EN_MASK = 0x1, 276 }; 277 278 #define NVME_PMRCTL_EN(pmrctl) ((pmrctl >> PMRCTL_EN_SHIFT) & PMRCTL_EN_MASK) 279 280 #define NVME_PMRCTL_SET_EN(pmrctl, val) \ 281 (pmrctl |= (uint64_t)(val & PMRCTL_EN_MASK) << PMRCTL_EN_SHIFT) 282 283 enum NvmePmrstsShift { 284 PMRSTS_ERR_SHIFT = 0, 285 PMRSTS_NRDY_SHIFT = 8, 286 PMRSTS_HSTS_SHIFT = 9, 287 PMRSTS_CBAI_SHIFT = 12, 288 }; 289 290 enum NvmePmrstsMask { 291 PMRSTS_ERR_MASK = 0xff, 292 PMRSTS_NRDY_MASK = 0x1, 293 PMRSTS_HSTS_MASK = 0x7, 294 PMRSTS_CBAI_MASK = 0x1, 295 }; 296 297 #define NVME_PMRSTS_ERR(pmrsts) \ 298 ((pmrsts >> PMRSTS_ERR_SHIFT) & PMRSTS_ERR_MASK) 299 #define NVME_PMRSTS_NRDY(pmrsts) \ 300 ((pmrsts >> PMRSTS_NRDY_SHIFT) & PMRSTS_NRDY_MASK) 301 #define NVME_PMRSTS_HSTS(pmrsts) \ 302 ((pmrsts >> PMRSTS_HSTS_SHIFT) & PMRSTS_HSTS_MASK) 303 #define NVME_PMRSTS_CBAI(pmrsts) \ 304 ((pmrsts >> PMRSTS_CBAI_SHIFT) & PMRSTS_CBAI_MASK) 305 306 #define NVME_PMRSTS_SET_ERR(pmrsts, val) \ 307 (pmrsts |= (uint64_t)(val & PMRSTS_ERR_MASK) << PMRSTS_ERR_SHIFT) 308 #define NVME_PMRSTS_SET_NRDY(pmrsts, val) \ 309 (pmrsts |= (uint64_t)(val & PMRSTS_NRDY_MASK) << PMRSTS_NRDY_SHIFT) 310 #define NVME_PMRSTS_SET_HSTS(pmrsts, val) \ 311 (pmrsts |= (uint64_t)(val & PMRSTS_HSTS_MASK) << PMRSTS_HSTS_SHIFT) 312 #define NVME_PMRSTS_SET_CBAI(pmrsts, val) \ 313 (pmrsts |= (uint64_t)(val & PMRSTS_CBAI_MASK) << PMRSTS_CBAI_SHIFT) 314 315 enum NvmePmrebsShift { 316 PMREBS_PMRSZU_SHIFT = 0, 317 PMREBS_RBB_SHIFT = 4, 318 PMREBS_PMRWBZ_SHIFT = 8, 319 }; 320 321 enum NvmePmrebsMask { 322 PMREBS_PMRSZU_MASK = 0xf, 323 PMREBS_RBB_MASK = 0x1, 324 PMREBS_PMRWBZ_MASK = 0xffffff, 325 }; 326 327 #define NVME_PMREBS_PMRSZU(pmrebs) \ 328 ((pmrebs >> PMREBS_PMRSZU_SHIFT) & PMREBS_PMRSZU_MASK) 329 #define NVME_PMREBS_RBB(pmrebs) \ 330 ((pmrebs >> PMREBS_RBB_SHIFT) & PMREBS_RBB_MASK) 331 #define NVME_PMREBS_PMRWBZ(pmrebs) \ 332 ((pmrebs >> PMREBS_PMRWBZ_SHIFT) & PMREBS_PMRWBZ_MASK) 333 334 #define NVME_PMREBS_SET_PMRSZU(pmrebs, val) \ 335 (pmrebs |= (uint64_t)(val & PMREBS_PMRSZU_MASK) << PMREBS_PMRSZU_SHIFT) 336 #define NVME_PMREBS_SET_RBB(pmrebs, val) \ 337 (pmrebs |= (uint64_t)(val & PMREBS_RBB_MASK) << PMREBS_RBB_SHIFT) 338 #define NVME_PMREBS_SET_PMRWBZ(pmrebs, val) \ 339 (pmrebs |= (uint64_t)(val & PMREBS_PMRWBZ_MASK) << PMREBS_PMRWBZ_SHIFT) 340 341 enum NvmePmrswtpShift { 342 PMRSWTP_PMRSWTU_SHIFT = 0, 343 PMRSWTP_PMRSWTV_SHIFT = 8, 344 }; 345 346 enum NvmePmrswtpMask { 347 PMRSWTP_PMRSWTU_MASK = 0xf, 348 PMRSWTP_PMRSWTV_MASK = 0xffffff, 349 }; 350 351 #define NVME_PMRSWTP_PMRSWTU(pmrswtp) \ 352 ((pmrswtp >> PMRSWTP_PMRSWTU_SHIFT) & PMRSWTP_PMRSWTU_MASK) 353 #define NVME_PMRSWTP_PMRSWTV(pmrswtp) \ 354 ((pmrswtp >> PMRSWTP_PMRSWTV_SHIFT) & PMRSWTP_PMRSWTV_MASK) 355 356 #define NVME_PMRSWTP_SET_PMRSWTU(pmrswtp, val) \ 357 (pmrswtp |= (uint64_t)(val & PMRSWTP_PMRSWTU_MASK) << PMRSWTP_PMRSWTU_SHIFT) 358 #define NVME_PMRSWTP_SET_PMRSWTV(pmrswtp, val) \ 359 (pmrswtp |= (uint64_t)(val & PMRSWTP_PMRSWTV_MASK) << PMRSWTP_PMRSWTV_SHIFT) 360 361 enum NvmePmrmscShift { 362 PMRMSC_CMSE_SHIFT = 1, 363 PMRMSC_CBA_SHIFT = 12, 364 }; 365 366 enum NvmePmrmscMask { 367 PMRMSC_CMSE_MASK = 0x1, 368 PMRMSC_CBA_MASK = 0xfffffffffffff, 369 }; 370 371 #define NVME_PMRMSC_CMSE(pmrmsc) \ 372 ((pmrmsc >> PMRMSC_CMSE_SHIFT) & PMRMSC_CMSE_MASK) 373 #define NVME_PMRMSC_CBA(pmrmsc) \ 374 ((pmrmsc >> PMRMSC_CBA_SHIFT) & PMRMSC_CBA_MASK) 375 376 #define NVME_PMRMSC_SET_CMSE(pmrmsc, val) \ 377 (pmrmsc |= (uint64_t)(val & PMRMSC_CMSE_MASK) << PMRMSC_CMSE_SHIFT) 378 #define NVME_PMRMSC_SET_CBA(pmrmsc, val) \ 379 (pmrmsc |= (uint64_t)(val & PMRMSC_CBA_MASK) << PMRMSC_CBA_SHIFT) 380 381 enum NvmeSglDescriptorType { 382 NVME_SGL_DESCR_TYPE_DATA_BLOCK = 0x0, 383 NVME_SGL_DESCR_TYPE_BIT_BUCKET = 0x1, 384 NVME_SGL_DESCR_TYPE_SEGMENT = 0x2, 385 NVME_SGL_DESCR_TYPE_LAST_SEGMENT = 0x3, 386 NVME_SGL_DESCR_TYPE_KEYED_DATA_BLOCK = 0x4, 387 388 NVME_SGL_DESCR_TYPE_VENDOR_SPECIFIC = 0xf, 389 }; 390 391 enum NvmeSglDescriptorSubtype { 392 NVME_SGL_DESCR_SUBTYPE_ADDRESS = 0x0, 393 }; 394 395 typedef struct QEMU_PACKED NvmeSglDescriptor { 396 uint64_t addr; 397 uint32_t len; 398 uint8_t rsvd[3]; 399 uint8_t type; 400 } NvmeSglDescriptor; 401 402 #define NVME_SGL_TYPE(type) ((type >> 4) & 0xf) 403 #define NVME_SGL_SUBTYPE(type) (type & 0xf) 404 405 typedef union NvmeCmdDptr { 406 struct { 407 uint64_t prp1; 408 uint64_t prp2; 409 }; 410 411 NvmeSglDescriptor sgl; 412 } NvmeCmdDptr; 413 414 enum NvmePsdt { 415 PSDT_PRP = 0x0, 416 PSDT_SGL_MPTR_CONTIGUOUS = 0x1, 417 PSDT_SGL_MPTR_SGL = 0x2, 418 }; 419 420 typedef struct QEMU_PACKED NvmeCmd { 421 uint8_t opcode; 422 uint8_t flags; 423 uint16_t cid; 424 uint32_t nsid; 425 uint64_t res1; 426 uint64_t mptr; 427 NvmeCmdDptr dptr; 428 uint32_t cdw10; 429 uint32_t cdw11; 430 uint32_t cdw12; 431 uint32_t cdw13; 432 uint32_t cdw14; 433 uint32_t cdw15; 434 } NvmeCmd; 435 436 #define NVME_CMD_FLAGS_FUSE(flags) (flags & 0x3) 437 #define NVME_CMD_FLAGS_PSDT(flags) ((flags >> 6) & 0x3) 438 439 enum NvmeAdminCommands { 440 NVME_ADM_CMD_DELETE_SQ = 0x00, 441 NVME_ADM_CMD_CREATE_SQ = 0x01, 442 NVME_ADM_CMD_GET_LOG_PAGE = 0x02, 443 NVME_ADM_CMD_DELETE_CQ = 0x04, 444 NVME_ADM_CMD_CREATE_CQ = 0x05, 445 NVME_ADM_CMD_IDENTIFY = 0x06, 446 NVME_ADM_CMD_ABORT = 0x08, 447 NVME_ADM_CMD_SET_FEATURES = 0x09, 448 NVME_ADM_CMD_GET_FEATURES = 0x0a, 449 NVME_ADM_CMD_ASYNC_EV_REQ = 0x0c, 450 NVME_ADM_CMD_ACTIVATE_FW = 0x10, 451 NVME_ADM_CMD_DOWNLOAD_FW = 0x11, 452 NVME_ADM_CMD_FORMAT_NVM = 0x80, 453 NVME_ADM_CMD_SECURITY_SEND = 0x81, 454 NVME_ADM_CMD_SECURITY_RECV = 0x82, 455 }; 456 457 enum NvmeIoCommands { 458 NVME_CMD_FLUSH = 0x00, 459 NVME_CMD_WRITE = 0x01, 460 NVME_CMD_READ = 0x02, 461 NVME_CMD_WRITE_UNCOR = 0x04, 462 NVME_CMD_COMPARE = 0x05, 463 NVME_CMD_WRITE_ZEROES = 0x08, 464 NVME_CMD_DSM = 0x09, 465 }; 466 467 typedef struct QEMU_PACKED NvmeDeleteQ { 468 uint8_t opcode; 469 uint8_t flags; 470 uint16_t cid; 471 uint32_t rsvd1[9]; 472 uint16_t qid; 473 uint16_t rsvd10; 474 uint32_t rsvd11[5]; 475 } NvmeDeleteQ; 476 477 typedef struct QEMU_PACKED NvmeCreateCq { 478 uint8_t opcode; 479 uint8_t flags; 480 uint16_t cid; 481 uint32_t rsvd1[5]; 482 uint64_t prp1; 483 uint64_t rsvd8; 484 uint16_t cqid; 485 uint16_t qsize; 486 uint16_t cq_flags; 487 uint16_t irq_vector; 488 uint32_t rsvd12[4]; 489 } NvmeCreateCq; 490 491 #define NVME_CQ_FLAGS_PC(cq_flags) (cq_flags & 0x1) 492 #define NVME_CQ_FLAGS_IEN(cq_flags) ((cq_flags >> 1) & 0x1) 493 494 typedef struct QEMU_PACKED NvmeCreateSq { 495 uint8_t opcode; 496 uint8_t flags; 497 uint16_t cid; 498 uint32_t rsvd1[5]; 499 uint64_t prp1; 500 uint64_t rsvd8; 501 uint16_t sqid; 502 uint16_t qsize; 503 uint16_t sq_flags; 504 uint16_t cqid; 505 uint32_t rsvd12[4]; 506 } NvmeCreateSq; 507 508 #define NVME_SQ_FLAGS_PC(sq_flags) (sq_flags & 0x1) 509 #define NVME_SQ_FLAGS_QPRIO(sq_flags) ((sq_flags >> 1) & 0x3) 510 511 enum NvmeQueueFlags { 512 NVME_Q_PC = 1, 513 NVME_Q_PRIO_URGENT = 0, 514 NVME_Q_PRIO_HIGH = 1, 515 NVME_Q_PRIO_NORMAL = 2, 516 NVME_Q_PRIO_LOW = 3, 517 }; 518 519 typedef struct QEMU_PACKED NvmeIdentify { 520 uint8_t opcode; 521 uint8_t flags; 522 uint16_t cid; 523 uint32_t nsid; 524 uint64_t rsvd2[2]; 525 uint64_t prp1; 526 uint64_t prp2; 527 uint32_t cns; 528 uint32_t rsvd11[5]; 529 } NvmeIdentify; 530 531 typedef struct QEMU_PACKED NvmeRwCmd { 532 uint8_t opcode; 533 uint8_t flags; 534 uint16_t cid; 535 uint32_t nsid; 536 uint64_t rsvd2; 537 uint64_t mptr; 538 NvmeCmdDptr dptr; 539 uint64_t slba; 540 uint16_t nlb; 541 uint16_t control; 542 uint32_t dsmgmt; 543 uint32_t reftag; 544 uint16_t apptag; 545 uint16_t appmask; 546 } NvmeRwCmd; 547 548 enum { 549 NVME_RW_LR = 1 << 15, 550 NVME_RW_FUA = 1 << 14, 551 NVME_RW_DSM_FREQ_UNSPEC = 0, 552 NVME_RW_DSM_FREQ_TYPICAL = 1, 553 NVME_RW_DSM_FREQ_RARE = 2, 554 NVME_RW_DSM_FREQ_READS = 3, 555 NVME_RW_DSM_FREQ_WRITES = 4, 556 NVME_RW_DSM_FREQ_RW = 5, 557 NVME_RW_DSM_FREQ_ONCE = 6, 558 NVME_RW_DSM_FREQ_PREFETCH = 7, 559 NVME_RW_DSM_FREQ_TEMP = 8, 560 NVME_RW_DSM_LATENCY_NONE = 0 << 4, 561 NVME_RW_DSM_LATENCY_IDLE = 1 << 4, 562 NVME_RW_DSM_LATENCY_NORM = 2 << 4, 563 NVME_RW_DSM_LATENCY_LOW = 3 << 4, 564 NVME_RW_DSM_SEQ_REQ = 1 << 6, 565 NVME_RW_DSM_COMPRESSED = 1 << 7, 566 NVME_RW_PRINFO_PRACT = 1 << 13, 567 NVME_RW_PRINFO_PRCHK_GUARD = 1 << 12, 568 NVME_RW_PRINFO_PRCHK_APP = 1 << 11, 569 NVME_RW_PRINFO_PRCHK_REF = 1 << 10, 570 }; 571 572 typedef struct QEMU_PACKED NvmeDsmCmd { 573 uint8_t opcode; 574 uint8_t flags; 575 uint16_t cid; 576 uint32_t nsid; 577 uint64_t rsvd2[2]; 578 NvmeCmdDptr dptr; 579 uint32_t nr; 580 uint32_t attributes; 581 uint32_t rsvd12[4]; 582 } NvmeDsmCmd; 583 584 enum { 585 NVME_DSMGMT_IDR = 1 << 0, 586 NVME_DSMGMT_IDW = 1 << 1, 587 NVME_DSMGMT_AD = 1 << 2, 588 }; 589 590 typedef struct QEMU_PACKED NvmeDsmRange { 591 uint32_t cattr; 592 uint32_t nlb; 593 uint64_t slba; 594 } NvmeDsmRange; 595 596 enum NvmeAsyncEventRequest { 597 NVME_AER_TYPE_ERROR = 0, 598 NVME_AER_TYPE_SMART = 1, 599 NVME_AER_TYPE_IO_SPECIFIC = 6, 600 NVME_AER_TYPE_VENDOR_SPECIFIC = 7, 601 NVME_AER_INFO_ERR_INVALID_DB_REGISTER = 0, 602 NVME_AER_INFO_ERR_INVALID_DB_VALUE = 1, 603 NVME_AER_INFO_ERR_DIAG_FAIL = 2, 604 NVME_AER_INFO_ERR_PERS_INTERNAL_ERR = 3, 605 NVME_AER_INFO_ERR_TRANS_INTERNAL_ERR = 4, 606 NVME_AER_INFO_ERR_FW_IMG_LOAD_ERR = 5, 607 NVME_AER_INFO_SMART_RELIABILITY = 0, 608 NVME_AER_INFO_SMART_TEMP_THRESH = 1, 609 NVME_AER_INFO_SMART_SPARE_THRESH = 2, 610 }; 611 612 typedef struct QEMU_PACKED NvmeAerResult { 613 uint8_t event_type; 614 uint8_t event_info; 615 uint8_t log_page; 616 uint8_t resv; 617 } NvmeAerResult; 618 619 typedef struct QEMU_PACKED NvmeCqe { 620 uint32_t result; 621 uint32_t rsvd; 622 uint16_t sq_head; 623 uint16_t sq_id; 624 uint16_t cid; 625 uint16_t status; 626 } NvmeCqe; 627 628 enum NvmeStatusCodes { 629 NVME_SUCCESS = 0x0000, 630 NVME_INVALID_OPCODE = 0x0001, 631 NVME_INVALID_FIELD = 0x0002, 632 NVME_CID_CONFLICT = 0x0003, 633 NVME_DATA_TRAS_ERROR = 0x0004, 634 NVME_POWER_LOSS_ABORT = 0x0005, 635 NVME_INTERNAL_DEV_ERROR = 0x0006, 636 NVME_CMD_ABORT_REQ = 0x0007, 637 NVME_CMD_ABORT_SQ_DEL = 0x0008, 638 NVME_CMD_ABORT_FAILED_FUSE = 0x0009, 639 NVME_CMD_ABORT_MISSING_FUSE = 0x000a, 640 NVME_INVALID_NSID = 0x000b, 641 NVME_CMD_SEQ_ERROR = 0x000c, 642 NVME_INVALID_SGL_SEG_DESCR = 0x000d, 643 NVME_INVALID_NUM_SGL_DESCRS = 0x000e, 644 NVME_DATA_SGL_LEN_INVALID = 0x000f, 645 NVME_MD_SGL_LEN_INVALID = 0x0010, 646 NVME_SGL_DESCR_TYPE_INVALID = 0x0011, 647 NVME_INVALID_USE_OF_CMB = 0x0012, 648 NVME_LBA_RANGE = 0x0080, 649 NVME_CAP_EXCEEDED = 0x0081, 650 NVME_NS_NOT_READY = 0x0082, 651 NVME_NS_RESV_CONFLICT = 0x0083, 652 NVME_INVALID_CQID = 0x0100, 653 NVME_INVALID_QID = 0x0101, 654 NVME_MAX_QSIZE_EXCEEDED = 0x0102, 655 NVME_ACL_EXCEEDED = 0x0103, 656 NVME_RESERVED = 0x0104, 657 NVME_AER_LIMIT_EXCEEDED = 0x0105, 658 NVME_INVALID_FW_SLOT = 0x0106, 659 NVME_INVALID_FW_IMAGE = 0x0107, 660 NVME_INVALID_IRQ_VECTOR = 0x0108, 661 NVME_INVALID_LOG_ID = 0x0109, 662 NVME_INVALID_FORMAT = 0x010a, 663 NVME_FW_REQ_RESET = 0x010b, 664 NVME_INVALID_QUEUE_DEL = 0x010c, 665 NVME_FID_NOT_SAVEABLE = 0x010d, 666 NVME_FEAT_NOT_CHANGEABLE = 0x010e, 667 NVME_FEAT_NOT_NS_SPEC = 0x010f, 668 NVME_FW_REQ_SUSYSTEM_RESET = 0x0110, 669 NVME_CONFLICTING_ATTRS = 0x0180, 670 NVME_INVALID_PROT_INFO = 0x0181, 671 NVME_WRITE_TO_RO = 0x0182, 672 NVME_WRITE_FAULT = 0x0280, 673 NVME_UNRECOVERED_READ = 0x0281, 674 NVME_E2E_GUARD_ERROR = 0x0282, 675 NVME_E2E_APP_ERROR = 0x0283, 676 NVME_E2E_REF_ERROR = 0x0284, 677 NVME_CMP_FAILURE = 0x0285, 678 NVME_ACCESS_DENIED = 0x0286, 679 NVME_MORE = 0x2000, 680 NVME_DNR = 0x4000, 681 NVME_NO_COMPLETE = 0xffff, 682 }; 683 684 typedef struct QEMU_PACKED NvmeFwSlotInfoLog { 685 uint8_t afi; 686 uint8_t reserved1[7]; 687 uint8_t frs1[8]; 688 uint8_t frs2[8]; 689 uint8_t frs3[8]; 690 uint8_t frs4[8]; 691 uint8_t frs5[8]; 692 uint8_t frs6[8]; 693 uint8_t frs7[8]; 694 uint8_t reserved2[448]; 695 } NvmeFwSlotInfoLog; 696 697 typedef struct QEMU_PACKED NvmeErrorLog { 698 uint64_t error_count; 699 uint16_t sqid; 700 uint16_t cid; 701 uint16_t status_field; 702 uint16_t param_error_location; 703 uint64_t lba; 704 uint32_t nsid; 705 uint8_t vs; 706 uint8_t resv[35]; 707 } NvmeErrorLog; 708 709 typedef struct QEMU_PACKED NvmeSmartLog { 710 uint8_t critical_warning; 711 uint16_t temperature; 712 uint8_t available_spare; 713 uint8_t available_spare_threshold; 714 uint8_t percentage_used; 715 uint8_t reserved1[26]; 716 uint64_t data_units_read[2]; 717 uint64_t data_units_written[2]; 718 uint64_t host_read_commands[2]; 719 uint64_t host_write_commands[2]; 720 uint64_t controller_busy_time[2]; 721 uint64_t power_cycles[2]; 722 uint64_t power_on_hours[2]; 723 uint64_t unsafe_shutdowns[2]; 724 uint64_t media_errors[2]; 725 uint64_t number_of_error_log_entries[2]; 726 uint8_t reserved2[320]; 727 } NvmeSmartLog; 728 729 enum NvmeSmartWarn { 730 NVME_SMART_SPARE = 1 << 0, 731 NVME_SMART_TEMPERATURE = 1 << 1, 732 NVME_SMART_RELIABILITY = 1 << 2, 733 NVME_SMART_MEDIA_READ_ONLY = 1 << 3, 734 NVME_SMART_FAILED_VOLATILE_MEDIA = 1 << 4, 735 }; 736 737 enum NvmeLogIdentifier { 738 NVME_LOG_ERROR_INFO = 0x01, 739 NVME_LOG_SMART_INFO = 0x02, 740 NVME_LOG_FW_SLOT_INFO = 0x03, 741 }; 742 743 typedef struct QEMU_PACKED NvmePSD { 744 uint16_t mp; 745 uint16_t reserved; 746 uint32_t enlat; 747 uint32_t exlat; 748 uint8_t rrt; 749 uint8_t rrl; 750 uint8_t rwt; 751 uint8_t rwl; 752 uint8_t resv[16]; 753 } NvmePSD; 754 755 #define NVME_IDENTIFY_DATA_SIZE 4096 756 757 enum { 758 NVME_ID_CNS_NS = 0x0, 759 NVME_ID_CNS_CTRL = 0x1, 760 NVME_ID_CNS_NS_ACTIVE_LIST = 0x2, 761 NVME_ID_CNS_NS_DESCR_LIST = 0x3, 762 }; 763 764 typedef struct QEMU_PACKED NvmeIdCtrl { 765 uint16_t vid; 766 uint16_t ssvid; 767 uint8_t sn[20]; 768 uint8_t mn[40]; 769 uint8_t fr[8]; 770 uint8_t rab; 771 uint8_t ieee[3]; 772 uint8_t cmic; 773 uint8_t mdts; 774 uint16_t cntlid; 775 uint32_t ver; 776 uint32_t rtd3r; 777 uint32_t rtd3e; 778 uint32_t oaes; 779 uint32_t ctratt; 780 uint8_t rsvd100[12]; 781 uint8_t fguid[16]; 782 uint8_t rsvd128[128]; 783 uint16_t oacs; 784 uint8_t acl; 785 uint8_t aerl; 786 uint8_t frmw; 787 uint8_t lpa; 788 uint8_t elpe; 789 uint8_t npss; 790 uint8_t avscc; 791 uint8_t apsta; 792 uint16_t wctemp; 793 uint16_t cctemp; 794 uint16_t mtfa; 795 uint32_t hmpre; 796 uint32_t hmmin; 797 uint8_t tnvmcap[16]; 798 uint8_t unvmcap[16]; 799 uint32_t rpmbs; 800 uint16_t edstt; 801 uint8_t dsto; 802 uint8_t fwug; 803 uint16_t kas; 804 uint16_t hctma; 805 uint16_t mntmt; 806 uint16_t mxtmt; 807 uint32_t sanicap; 808 uint8_t rsvd332[180]; 809 uint8_t sqes; 810 uint8_t cqes; 811 uint16_t maxcmd; 812 uint32_t nn; 813 uint16_t oncs; 814 uint16_t fuses; 815 uint8_t fna; 816 uint8_t vwc; 817 uint16_t awun; 818 uint16_t awupf; 819 uint8_t nvscc; 820 uint8_t rsvd531; 821 uint16_t acwu; 822 uint8_t rsvd534[2]; 823 uint32_t sgls; 824 uint8_t rsvd540[228]; 825 uint8_t subnqn[256]; 826 uint8_t rsvd1024[1024]; 827 NvmePSD psd[32]; 828 uint8_t vs[1024]; 829 } NvmeIdCtrl; 830 831 enum NvmeIdCtrlOacs { 832 NVME_OACS_SECURITY = 1 << 0, 833 NVME_OACS_FORMAT = 1 << 1, 834 NVME_OACS_FW = 1 << 2, 835 }; 836 837 enum NvmeIdCtrlOncs { 838 NVME_ONCS_COMPARE = 1 << 0, 839 NVME_ONCS_WRITE_UNCORR = 1 << 1, 840 NVME_ONCS_DSM = 1 << 2, 841 NVME_ONCS_WRITE_ZEROES = 1 << 3, 842 NVME_ONCS_FEATURES = 1 << 4, 843 NVME_ONCS_RESRVATIONS = 1 << 5, 844 NVME_ONCS_TIMESTAMP = 1 << 6, 845 }; 846 847 enum NvmeIdCtrlFrmw { 848 NVME_FRMW_SLOT1_RO = 1 << 0, 849 }; 850 851 enum NvmeIdCtrlLpa { 852 NVME_LPA_EXTENDED = 1 << 2, 853 }; 854 855 #define NVME_CTRL_SQES_MIN(sqes) ((sqes) & 0xf) 856 #define NVME_CTRL_SQES_MAX(sqes) (((sqes) >> 4) & 0xf) 857 #define NVME_CTRL_CQES_MIN(cqes) ((cqes) & 0xf) 858 #define NVME_CTRL_CQES_MAX(cqes) (((cqes) >> 4) & 0xf) 859 860 #define NVME_CTRL_SGLS_SUPPORT_MASK (0x3 << 0) 861 #define NVME_CTRL_SGLS_SUPPORT_NO_ALIGN (0x1 << 0) 862 #define NVME_CTRL_SGLS_SUPPORT_DWORD_ALIGN (0x1 << 1) 863 #define NVME_CTRL_SGLS_KEYED (0x1 << 2) 864 #define NVME_CTRL_SGLS_BITBUCKET (0x1 << 16) 865 #define NVME_CTRL_SGLS_MPTR_CONTIGUOUS (0x1 << 17) 866 #define NVME_CTRL_SGLS_EXCESS_LENGTH (0x1 << 18) 867 #define NVME_CTRL_SGLS_MPTR_SGL (0x1 << 19) 868 #define NVME_CTRL_SGLS_ADDR_OFFSET (0x1 << 20) 869 870 #define NVME_ARB_AB(arb) (arb & 0x7) 871 #define NVME_ARB_AB_NOLIMIT 0x7 872 #define NVME_ARB_LPW(arb) ((arb >> 8) & 0xff) 873 #define NVME_ARB_MPW(arb) ((arb >> 16) & 0xff) 874 #define NVME_ARB_HPW(arb) ((arb >> 24) & 0xff) 875 876 #define NVME_INTC_THR(intc) (intc & 0xff) 877 #define NVME_INTC_TIME(intc) ((intc >> 8) & 0xff) 878 879 #define NVME_INTVC_NOCOALESCING (0x1 << 16) 880 881 #define NVME_TEMP_THSEL(temp) ((temp >> 20) & 0x3) 882 #define NVME_TEMP_THSEL_OVER 0x0 883 #define NVME_TEMP_THSEL_UNDER 0x1 884 885 #define NVME_TEMP_TMPSEL(temp) ((temp >> 16) & 0xf) 886 #define NVME_TEMP_TMPSEL_COMPOSITE 0x0 887 888 #define NVME_TEMP_TMPTH(temp) (temp & 0xffff) 889 890 #define NVME_AEC_SMART(aec) (aec & 0xff) 891 #define NVME_AEC_NS_ATTR(aec) ((aec >> 8) & 0x1) 892 #define NVME_AEC_FW_ACTIVATION(aec) ((aec >> 9) & 0x1) 893 894 enum NvmeFeatureIds { 895 NVME_ARBITRATION = 0x1, 896 NVME_POWER_MANAGEMENT = 0x2, 897 NVME_LBA_RANGE_TYPE = 0x3, 898 NVME_TEMPERATURE_THRESHOLD = 0x4, 899 NVME_ERROR_RECOVERY = 0x5, 900 NVME_VOLATILE_WRITE_CACHE = 0x6, 901 NVME_NUMBER_OF_QUEUES = 0x7, 902 NVME_INTERRUPT_COALESCING = 0x8, 903 NVME_INTERRUPT_VECTOR_CONF = 0x9, 904 NVME_WRITE_ATOMICITY = 0xa, 905 NVME_ASYNCHRONOUS_EVENT_CONF = 0xb, 906 NVME_TIMESTAMP = 0xe, 907 NVME_SOFTWARE_PROGRESS_MARKER = 0x80, 908 NVME_FID_MAX = 0x100, 909 }; 910 911 typedef enum NvmeFeatureCap { 912 NVME_FEAT_CAP_SAVE = 1 << 0, 913 NVME_FEAT_CAP_NS = 1 << 1, 914 NVME_FEAT_CAP_CHANGE = 1 << 2, 915 } NvmeFeatureCap; 916 917 typedef enum NvmeGetFeatureSelect { 918 NVME_GETFEAT_SELECT_CURRENT = 0x0, 919 NVME_GETFEAT_SELECT_DEFAULT = 0x1, 920 NVME_GETFEAT_SELECT_SAVED = 0x2, 921 NVME_GETFEAT_SELECT_CAP = 0x3, 922 } NvmeGetFeatureSelect; 923 924 #define NVME_GETSETFEAT_FID_MASK 0xff 925 #define NVME_GETSETFEAT_FID(dw10) (dw10 & NVME_GETSETFEAT_FID_MASK) 926 927 #define NVME_GETFEAT_SELECT_SHIFT 8 928 #define NVME_GETFEAT_SELECT_MASK 0x7 929 #define NVME_GETFEAT_SELECT(dw10) \ 930 ((dw10 >> NVME_GETFEAT_SELECT_SHIFT) & NVME_GETFEAT_SELECT_MASK) 931 932 #define NVME_SETFEAT_SAVE_SHIFT 31 933 #define NVME_SETFEAT_SAVE_MASK 0x1 934 #define NVME_SETFEAT_SAVE(dw10) \ 935 ((dw10 >> NVME_SETFEAT_SAVE_SHIFT) & NVME_SETFEAT_SAVE_MASK) 936 937 typedef struct QEMU_PACKED NvmeRangeType { 938 uint8_t type; 939 uint8_t attributes; 940 uint8_t rsvd2[14]; 941 uint64_t slba; 942 uint64_t nlb; 943 uint8_t guid[16]; 944 uint8_t rsvd48[16]; 945 } NvmeRangeType; 946 947 typedef struct QEMU_PACKED NvmeLBAF { 948 uint16_t ms; 949 uint8_t ds; 950 uint8_t rp; 951 } NvmeLBAF; 952 953 #define NVME_NSID_BROADCAST 0xffffffff 954 955 typedef struct QEMU_PACKED NvmeIdNs { 956 uint64_t nsze; 957 uint64_t ncap; 958 uint64_t nuse; 959 uint8_t nsfeat; 960 uint8_t nlbaf; 961 uint8_t flbas; 962 uint8_t mc; 963 uint8_t dpc; 964 uint8_t dps; 965 uint8_t nmic; 966 uint8_t rescap; 967 uint8_t fpi; 968 uint8_t dlfeat; 969 uint16_t nawun; 970 uint16_t nawupf; 971 uint16_t nacwu; 972 uint16_t nabsn; 973 uint16_t nabo; 974 uint16_t nabspf; 975 uint16_t noiob; 976 uint8_t nvmcap[16]; 977 uint8_t rsvd64[40]; 978 uint8_t nguid[16]; 979 uint64_t eui64; 980 NvmeLBAF lbaf[16]; 981 uint8_t rsvd192[192]; 982 uint8_t vs[3712]; 983 } NvmeIdNs; 984 985 typedef struct QEMU_PACKED NvmeIdNsDescr { 986 uint8_t nidt; 987 uint8_t nidl; 988 uint8_t rsvd2[2]; 989 } NvmeIdNsDescr; 990 991 enum { 992 NVME_NIDT_EUI64_LEN = 8, 993 NVME_NIDT_NGUID_LEN = 16, 994 NVME_NIDT_UUID_LEN = 16, 995 }; 996 997 enum NvmeNsIdentifierType { 998 NVME_NIDT_EUI64 = 0x1, 999 NVME_NIDT_NGUID = 0x2, 1000 NVME_NIDT_UUID = 0x3, 1001 }; 1002 1003 /*Deallocate Logical Block Features*/ 1004 #define NVME_ID_NS_DLFEAT_GUARD_CRC(dlfeat) ((dlfeat) & 0x10) 1005 #define NVME_ID_NS_DLFEAT_WRITE_ZEROES(dlfeat) ((dlfeat) & 0x08) 1006 1007 #define NVME_ID_NS_DLFEAT_READ_BEHAVIOR(dlfeat) ((dlfeat) & 0x7) 1008 #define NVME_ID_NS_DLFEAT_READ_BEHAVIOR_UNDEFINED 0 1009 #define NVME_ID_NS_DLFEAT_READ_BEHAVIOR_ZEROES 1 1010 #define NVME_ID_NS_DLFEAT_READ_BEHAVIOR_ONES 2 1011 1012 1013 #define NVME_ID_NS_NSFEAT_THIN(nsfeat) ((nsfeat & 0x1)) 1014 #define NVME_ID_NS_FLBAS_EXTENDED(flbas) ((flbas >> 4) & 0x1) 1015 #define NVME_ID_NS_FLBAS_INDEX(flbas) ((flbas & 0xf)) 1016 #define NVME_ID_NS_MC_SEPARATE(mc) ((mc >> 1) & 0x1) 1017 #define NVME_ID_NS_MC_EXTENDED(mc) ((mc & 0x1)) 1018 #define NVME_ID_NS_DPC_LAST_EIGHT(dpc) ((dpc >> 4) & 0x1) 1019 #define NVME_ID_NS_DPC_FIRST_EIGHT(dpc) ((dpc >> 3) & 0x1) 1020 #define NVME_ID_NS_DPC_TYPE_3(dpc) ((dpc >> 2) & 0x1) 1021 #define NVME_ID_NS_DPC_TYPE_2(dpc) ((dpc >> 1) & 0x1) 1022 #define NVME_ID_NS_DPC_TYPE_1(dpc) ((dpc & 0x1)) 1023 #define NVME_ID_NS_DPC_TYPE_MASK 0x7 1024 1025 enum NvmeIdNsDps { 1026 DPS_TYPE_NONE = 0, 1027 DPS_TYPE_1 = 1, 1028 DPS_TYPE_2 = 2, 1029 DPS_TYPE_3 = 3, 1030 DPS_TYPE_MASK = 0x7, 1031 DPS_FIRST_EIGHT = 8, 1032 }; 1033 1034 static inline void _nvme_check_size(void) 1035 { 1036 QEMU_BUILD_BUG_ON(sizeof(NvmeBar) != 4096); 1037 QEMU_BUILD_BUG_ON(sizeof(NvmeAerResult) != 4); 1038 QEMU_BUILD_BUG_ON(sizeof(NvmeCqe) != 16); 1039 QEMU_BUILD_BUG_ON(sizeof(NvmeDsmRange) != 16); 1040 QEMU_BUILD_BUG_ON(sizeof(NvmeCmd) != 64); 1041 QEMU_BUILD_BUG_ON(sizeof(NvmeDeleteQ) != 64); 1042 QEMU_BUILD_BUG_ON(sizeof(NvmeCreateCq) != 64); 1043 QEMU_BUILD_BUG_ON(sizeof(NvmeCreateSq) != 64); 1044 QEMU_BUILD_BUG_ON(sizeof(NvmeIdentify) != 64); 1045 QEMU_BUILD_BUG_ON(sizeof(NvmeRwCmd) != 64); 1046 QEMU_BUILD_BUG_ON(sizeof(NvmeDsmCmd) != 64); 1047 QEMU_BUILD_BUG_ON(sizeof(NvmeRangeType) != 64); 1048 QEMU_BUILD_BUG_ON(sizeof(NvmeErrorLog) != 64); 1049 QEMU_BUILD_BUG_ON(sizeof(NvmeFwSlotInfoLog) != 512); 1050 QEMU_BUILD_BUG_ON(sizeof(NvmeSmartLog) != 512); 1051 QEMU_BUILD_BUG_ON(sizeof(NvmeIdCtrl) != 4096); 1052 QEMU_BUILD_BUG_ON(sizeof(NvmeIdNs) != 4096); 1053 QEMU_BUILD_BUG_ON(sizeof(NvmeSglDescriptor) != 16); 1054 QEMU_BUILD_BUG_ON(sizeof(NvmeIdNsDescr) != 4); 1055 } 1056 #endif 1057