1#!/usr/bin/env python3 2# 3# Migration Stream Analyzer 4# 5# Copyright (c) 2015 Alexander Graf <agraf@suse.de> 6# 7# This library is free software; you can redistribute it and/or 8# modify it under the terms of the GNU Lesser General Public 9# License as published by the Free Software Foundation; either 10# version 2.1 of the License, or (at your option) any later version. 11# 12# This library is distributed in the hope that it will be useful, 13# but WITHOUT ANY WARRANTY; without even the implied warranty of 14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 15# Lesser General Public License for more details. 16# 17# You should have received a copy of the GNU Lesser General Public 18# License along with this library; if not, see <http://www.gnu.org/licenses/>. 19 20import json 21import os 22import argparse 23import collections 24import struct 25import sys 26 27 28def mkdir_p(path): 29 try: 30 os.makedirs(path) 31 except OSError: 32 pass 33 34 35class MigrationFile(object): 36 def __init__(self, filename): 37 self.filename = filename 38 self.file = open(self.filename, "rb") 39 40 def read64(self): 41 return int.from_bytes(self.file.read(8), byteorder='big', signed=False) 42 43 def read32(self): 44 return int.from_bytes(self.file.read(4), byteorder='big', signed=False) 45 46 def read16(self): 47 return int.from_bytes(self.file.read(2), byteorder='big', signed=False) 48 49 def read8(self): 50 return int.from_bytes(self.file.read(1), byteorder='big', signed=True) 51 52 def readstr(self, len = None): 53 return self.readvar(len).decode('utf-8') 54 55 def readvar(self, size = None): 56 if size is None: 57 size = self.read8() 58 if size == 0: 59 return "" 60 value = self.file.read(size) 61 if len(value) != size: 62 raise Exception("Unexpected end of %s at 0x%x" % (self.filename, self.file.tell())) 63 return value 64 65 def tell(self): 66 return self.file.tell() 67 68 def seek(self, a, b): 69 return self.file.seek(a, b) 70 71 # The VMSD description is at the end of the file, after EOF. Look for 72 # the last NULL byte, then for the beginning brace of JSON. 73 def read_migration_debug_json(self): 74 QEMU_VM_VMDESCRIPTION = 0x06 75 76 # Remember the offset in the file when we started 77 entrypos = self.file.tell() 78 79 # Read the last 10MB 80 self.file.seek(0, os.SEEK_END) 81 endpos = self.file.tell() 82 self.file.seek(max(-endpos, -10 * 1024 * 1024), os.SEEK_END) 83 datapos = self.file.tell() 84 data = self.file.read() 85 # The full file read closed the file as well, reopen it 86 self.file = open(self.filename, "rb") 87 88 # Find the last NULL byte, then the first brace after that. This should 89 # be the beginning of our JSON data. 90 nulpos = data.rfind(b'\0') 91 jsonpos = data.find(b'{', nulpos) 92 93 # Check backwards from there and see whether we guessed right 94 self.file.seek(datapos + jsonpos - 5, 0) 95 if self.read8() != QEMU_VM_VMDESCRIPTION: 96 raise Exception("No Debug Migration device found") 97 98 jsonlen = self.read32() 99 100 # Seek back to where we were at the beginning 101 self.file.seek(entrypos, 0) 102 103 # explicit decode() needed for Python 3.5 compatibility 104 return data[jsonpos:jsonpos + jsonlen].decode("utf-8") 105 106 def close(self): 107 self.file.close() 108 109class RamSection(object): 110 RAM_SAVE_FLAG_COMPRESS = 0x02 111 RAM_SAVE_FLAG_MEM_SIZE = 0x04 112 RAM_SAVE_FLAG_PAGE = 0x08 113 RAM_SAVE_FLAG_EOS = 0x10 114 RAM_SAVE_FLAG_CONTINUE = 0x20 115 RAM_SAVE_FLAG_XBZRLE = 0x40 116 RAM_SAVE_FLAG_HOOK = 0x80 117 RAM_SAVE_FLAG_COMPRESS_PAGE = 0x100 118 RAM_SAVE_FLAG_MULTIFD_FLUSH = 0x200 119 120 def __init__(self, file, version_id, ramargs, section_key): 121 if version_id != 4: 122 raise Exception("Unknown RAM version %d" % version_id) 123 124 self.file = file 125 self.section_key = section_key 126 self.TARGET_PAGE_SIZE = ramargs['page_size'] 127 self.dump_memory = ramargs['dump_memory'] 128 self.write_memory = ramargs['write_memory'] 129 self.ignore_shared = ramargs['ignore_shared'] 130 self.sizeinfo = collections.OrderedDict() 131 self.data = collections.OrderedDict() 132 self.data['section sizes'] = self.sizeinfo 133 self.name = '' 134 if self.write_memory: 135 self.files = { } 136 if self.dump_memory: 137 self.memory = collections.OrderedDict() 138 self.data['memory'] = self.memory 139 140 def __repr__(self): 141 return self.data.__repr__() 142 143 def __str__(self): 144 return self.data.__str__() 145 146 def getDict(self): 147 return self.data 148 149 def read(self): 150 # Read all RAM sections 151 while True: 152 addr = self.file.read64() 153 flags = addr & (self.TARGET_PAGE_SIZE - 1) 154 addr &= ~(self.TARGET_PAGE_SIZE - 1) 155 156 if flags & self.RAM_SAVE_FLAG_MEM_SIZE: 157 total_length = addr 158 while total_length > 0: 159 namelen = self.file.read8() 160 self.name = self.file.readstr(len = namelen) 161 len = self.file.read64() 162 total_length -= len 163 self.sizeinfo[self.name] = '0x%016x' % len 164 if self.write_memory: 165 print(self.name) 166 mkdir_p('./' + os.path.dirname(self.name)) 167 f = open('./' + self.name, "wb") 168 f.truncate(0) 169 f.truncate(len) 170 self.files[self.name] = f 171 if self.ignore_shared: 172 mr_addr = self.file.read64() 173 flags &= ~self.RAM_SAVE_FLAG_MEM_SIZE 174 175 if flags & self.RAM_SAVE_FLAG_COMPRESS: 176 if flags & self.RAM_SAVE_FLAG_CONTINUE: 177 flags &= ~self.RAM_SAVE_FLAG_CONTINUE 178 else: 179 self.name = self.file.readstr() 180 fill_char = self.file.read8() 181 # The page in question is filled with fill_char now 182 if self.write_memory and fill_char != 0: 183 self.files[self.name].seek(addr, os.SEEK_SET) 184 self.files[self.name].write(chr(fill_char) * self.TARGET_PAGE_SIZE) 185 if self.dump_memory: 186 self.memory['%s (0x%016x)' % (self.name, addr)] = 'Filled with 0x%02x' % fill_char 187 flags &= ~self.RAM_SAVE_FLAG_COMPRESS 188 elif flags & self.RAM_SAVE_FLAG_PAGE: 189 if flags & self.RAM_SAVE_FLAG_CONTINUE: 190 flags &= ~self.RAM_SAVE_FLAG_CONTINUE 191 else: 192 self.name = self.file.readstr() 193 194 if self.write_memory or self.dump_memory: 195 data = self.file.readvar(size = self.TARGET_PAGE_SIZE) 196 else: # Just skip RAM data 197 self.file.file.seek(self.TARGET_PAGE_SIZE, 1) 198 199 if self.write_memory: 200 self.files[self.name].seek(addr, os.SEEK_SET) 201 self.files[self.name].write(data) 202 if self.dump_memory: 203 hexdata = " ".join("{0:02x}".format(ord(c)) for c in data) 204 self.memory['%s (0x%016x)' % (self.name, addr)] = hexdata 205 206 flags &= ~self.RAM_SAVE_FLAG_PAGE 207 elif flags & self.RAM_SAVE_FLAG_XBZRLE: 208 raise Exception("XBZRLE RAM compression is not supported yet") 209 elif flags & self.RAM_SAVE_FLAG_HOOK: 210 raise Exception("RAM hooks don't make sense with files") 211 if flags & self.RAM_SAVE_FLAG_MULTIFD_FLUSH: 212 continue 213 214 # End of RAM section 215 if flags & self.RAM_SAVE_FLAG_EOS: 216 break 217 218 if flags != 0: 219 raise Exception("Unknown RAM flags: %x" % flags) 220 221 def __del__(self): 222 if self.write_memory: 223 for key in self.files: 224 self.files[key].close() 225 226 227class HTABSection(object): 228 HASH_PTE_SIZE_64 = 16 229 230 def __init__(self, file, version_id, device, section_key): 231 if version_id != 1: 232 raise Exception("Unknown HTAB version %d" % version_id) 233 234 self.file = file 235 self.section_key = section_key 236 237 def read(self): 238 239 header = self.file.read32() 240 241 if (header == -1): 242 # "no HPT" encoding 243 return 244 245 if (header > 0): 246 # First section, just the hash shift 247 return 248 249 # Read until end marker 250 while True: 251 index = self.file.read32() 252 n_valid = self.file.read16() 253 n_invalid = self.file.read16() 254 255 if index == 0 and n_valid == 0 and n_invalid == 0: 256 break 257 258 self.file.readvar(n_valid * self.HASH_PTE_SIZE_64) 259 260 def getDict(self): 261 return "" 262 263 264class S390StorageAttributes(object): 265 STATTR_FLAG_EOS = 0x01 266 STATTR_FLAG_MORE = 0x02 267 STATTR_FLAG_ERROR = 0x04 268 STATTR_FLAG_DONE = 0x08 269 270 def __init__(self, file, version_id, device, section_key): 271 if version_id != 0: 272 raise Exception("Unknown storage_attributes version %d" % version_id) 273 274 self.file = file 275 self.section_key = section_key 276 277 def read(self): 278 pos = 0 279 while True: 280 addr_flags = self.file.read64() 281 flags = addr_flags & 0xfff 282 283 if flags & self.STATTR_FLAG_DONE: 284 pos = self.file.tell() 285 continue 286 elif flags & self.STATTR_FLAG_EOS: 287 return 288 else: 289 # No EOS came after DONE, that's OK, but rewind the 290 # stream because this is not our data. 291 if pos: 292 self.file.seek(pos, os.SEEK_SET) 293 return 294 raise Exception("Unknown flags %x", flags) 295 296 if (flags & self.STATTR_FLAG_ERROR): 297 raise Exception("Error in migration stream") 298 count = self.file.read64() 299 self.file.readvar(count) 300 301 def getDict(self): 302 return "" 303 304 305class ConfigurationSection(object): 306 def __init__(self, file, desc): 307 self.file = file 308 self.desc = desc 309 self.caps = [] 310 311 def parse_capabilities(self, vmsd_caps): 312 if not vmsd_caps: 313 return 314 315 ncaps = vmsd_caps.data['caps_count'].data 316 self.caps = vmsd_caps.data['capabilities'] 317 318 if type(self.caps) != list: 319 self.caps = [self.caps] 320 321 if len(self.caps) != ncaps: 322 raise Exception("Number of capabilities doesn't match " 323 "caps_count field") 324 325 def has_capability(self, cap): 326 return any([str(c) == cap for c in self.caps]) 327 328 def read(self): 329 if self.desc: 330 version_id = self.desc['version'] 331 section = VMSDSection(self.file, version_id, self.desc, 332 'configuration') 333 section.read() 334 self.parse_capabilities( 335 section.data.get("configuration/capabilities")) 336 else: 337 # backward compatibility for older streams that don't have 338 # the configuration section in the json 339 name_len = self.file.read32() 340 name = self.file.readstr(len = name_len) 341 342class VMSDFieldGeneric(object): 343 def __init__(self, desc, file): 344 self.file = file 345 self.desc = desc 346 self.data = "" 347 348 def __repr__(self): 349 return str(self.__str__()) 350 351 def __str__(self): 352 return " ".join("{0:02x}".format(c) for c in self.data) 353 354 def getDict(self): 355 return self.__str__() 356 357 def read(self): 358 size = int(self.desc['size']) 359 self.data = self.file.readvar(size) 360 return self.data 361 362class VMSDFieldCap(object): 363 def __init__(self, desc, file): 364 self.file = file 365 self.desc = desc 366 self.data = "" 367 368 def __repr__(self): 369 return self.data 370 371 def __str__(self): 372 return self.data 373 374 def read(self): 375 len = self.file.read8() 376 self.data = self.file.readstr(len) 377 378 379class VMSDFieldInt(VMSDFieldGeneric): 380 def __init__(self, desc, file): 381 super(VMSDFieldInt, self).__init__(desc, file) 382 self.size = int(desc['size']) 383 self.format = '0x%%0%dx' % (self.size * 2) 384 self.sdtype = '>i%d' % self.size 385 self.udtype = '>u%d' % self.size 386 387 def __repr__(self): 388 if self.data < 0: 389 return ('%s (%d)' % ((self.format % self.udata), self.data)) 390 else: 391 return self.format % self.data 392 393 def __str__(self): 394 return self.__repr__() 395 396 def getDict(self): 397 return self.__str__() 398 399 def read(self): 400 super(VMSDFieldInt, self).read() 401 self.sdata = int.from_bytes(self.data, byteorder='big', signed=True) 402 self.udata = int.from_bytes(self.data, byteorder='big', signed=False) 403 self.data = self.sdata 404 return self.data 405 406class VMSDFieldUInt(VMSDFieldInt): 407 def __init__(self, desc, file): 408 super(VMSDFieldUInt, self).__init__(desc, file) 409 410 def read(self): 411 super(VMSDFieldUInt, self).read() 412 self.data = self.udata 413 return self.data 414 415class VMSDFieldIntLE(VMSDFieldInt): 416 def __init__(self, desc, file): 417 super(VMSDFieldIntLE, self).__init__(desc, file) 418 self.dtype = '<i%d' % self.size 419 420class VMSDFieldNull(VMSDFieldGeneric): 421 NULL_PTR_MARKER = b'0' 422 423 def __init__(self, desc, file): 424 super(VMSDFieldNull, self).__init__(desc, file) 425 426 def __repr__(self): 427 # A NULL pointer is encoded in the stream as a '0' to 428 # disambiguate from a mere 0x0 value and avoid consumers 429 # trying to follow the NULL pointer. Displaying '0', 0x30 or 430 # 0x0 when analyzing the JSON debug stream could become 431 # confusing, so use an explicit term instead. 432 return "nullptr" 433 434 def __str__(self): 435 return self.__repr__() 436 437 def read(self): 438 super(VMSDFieldNull, self).read() 439 assert(self.data == self.NULL_PTR_MARKER) 440 return self.data 441 442class VMSDFieldBool(VMSDFieldGeneric): 443 def __init__(self, desc, file): 444 super(VMSDFieldBool, self).__init__(desc, file) 445 446 def __repr__(self): 447 return self.data.__repr__() 448 449 def __str__(self): 450 return self.data.__str__() 451 452 def getDict(self): 453 return self.data 454 455 def read(self): 456 super(VMSDFieldBool, self).read() 457 if self.data[0] == 0: 458 self.data = False 459 else: 460 self.data = True 461 return self.data 462 463class VMSDFieldStruct(VMSDFieldGeneric): 464 QEMU_VM_SUBSECTION = 0x05 465 466 def __init__(self, desc, file): 467 super(VMSDFieldStruct, self).__init__(desc, file) 468 self.data = collections.OrderedDict() 469 470 if 'fields' not in self.desc['struct']: 471 raise Exception("No fields in struct. VMSD:\n%s" % self.desc) 472 473 # When we see compressed array elements, unfold them here 474 new_fields = [] 475 for field in self.desc['struct']['fields']: 476 if not 'array_len' in field: 477 new_fields.append(field) 478 continue 479 array_len = field.pop('array_len') 480 field['index'] = 0 481 new_fields.append(field) 482 for i in range(1, array_len): 483 c = field.copy() 484 c['index'] = i 485 new_fields.append(c) 486 487 self.desc['struct']['fields'] = new_fields 488 489 def __repr__(self): 490 return self.data.__repr__() 491 492 def __str__(self): 493 return self.data.__str__() 494 495 def read(self): 496 for field in self.desc['struct']['fields']: 497 try: 498 reader = vmsd_field_readers[field['type']] 499 except: 500 reader = VMSDFieldGeneric 501 502 field['data'] = reader(field, self.file) 503 field['data'].read() 504 505 fname = field['name'] 506 fdata = field['data'] 507 508 # The field could be: 509 # i) a single data entry, e.g. uint64 510 # ii) an array, indicated by it containing the 'index' key 511 # 512 # However, the overall data after parsing the whole 513 # stream, could be a mix of arrays and single data fields, 514 # all sharing the same field name due to how QEMU breaks 515 # up arrays with NULL pointers into multiple compressed 516 # array segments. 517 if fname not in self.data: 518 self.data[fname] = fdata 519 elif type(self.data[fname]) == list: 520 self.data[fname].append(fdata) 521 else: 522 tmp = self.data[fname] 523 self.data[fname] = [tmp, fdata] 524 525 if 'subsections' in self.desc['struct']: 526 for subsection in self.desc['struct']['subsections']: 527 if self.file.read8() != self.QEMU_VM_SUBSECTION: 528 raise Exception("Subsection %s not found at offset %x" % ( subsection['vmsd_name'], self.file.tell())) 529 name = self.file.readstr() 530 version_id = self.file.read32() 531 532 if not subsection: 533 raise Exception("Empty description for subsection: %s" % name) 534 535 self.data[name] = VMSDSection(self.file, version_id, subsection, (name, 0)) 536 self.data[name].read() 537 538 def getDictItem(self, value): 539 # Strings would fall into the array category, treat 540 # them specially 541 if value.__class__ is ''.__class__: 542 return value 543 544 try: 545 return self.getDictOrderedDict(value) 546 except: 547 try: 548 return self.getDictArray(value) 549 except: 550 try: 551 return value.getDict() 552 except: 553 return value 554 555 def getDictArray(self, array): 556 r = [] 557 for value in array: 558 r.append(self.getDictItem(value)) 559 return r 560 561 def getDictOrderedDict(self, dict): 562 r = collections.OrderedDict() 563 for (key, value) in dict.items(): 564 r[key] = self.getDictItem(value) 565 return r 566 567 def getDict(self): 568 return self.getDictOrderedDict(self.data) 569 570vmsd_field_readers = { 571 "bool" : VMSDFieldBool, 572 "int8" : VMSDFieldInt, 573 "int16" : VMSDFieldInt, 574 "int32" : VMSDFieldInt, 575 "int32 equal" : VMSDFieldInt, 576 "int32 le" : VMSDFieldIntLE, 577 "int64" : VMSDFieldInt, 578 "uint8" : VMSDFieldUInt, 579 "uint16" : VMSDFieldUInt, 580 "uint32" : VMSDFieldUInt, 581 "uint32 equal" : VMSDFieldUInt, 582 "uint64" : VMSDFieldUInt, 583 "int64 equal" : VMSDFieldInt, 584 "uint8 equal" : VMSDFieldInt, 585 "uint16 equal" : VMSDFieldInt, 586 "float64" : VMSDFieldGeneric, 587 "timer" : VMSDFieldGeneric, 588 "buffer" : VMSDFieldGeneric, 589 "unused_buffer" : VMSDFieldGeneric, 590 "bitmap" : VMSDFieldGeneric, 591 "struct" : VMSDFieldStruct, 592 "capability": VMSDFieldCap, 593 "nullptr": VMSDFieldNull, 594 "unknown" : VMSDFieldGeneric, 595} 596 597class VMSDSection(VMSDFieldStruct): 598 def __init__(self, file, version_id, device, section_key): 599 self.file = file 600 self.data = "" 601 self.vmsd_name = "" 602 self.section_key = section_key 603 desc = device 604 if 'vmsd_name' in device: 605 self.vmsd_name = device['vmsd_name'] 606 607 # A section really is nothing but a FieldStruct :) 608 super(VMSDSection, self).__init__({ 'struct' : desc }, file) 609 610############################################################################### 611 612class MigrationDump(object): 613 QEMU_VM_FILE_MAGIC = 0x5145564d 614 QEMU_VM_FILE_VERSION = 0x00000003 615 QEMU_VM_EOF = 0x00 616 QEMU_VM_SECTION_START = 0x01 617 QEMU_VM_SECTION_PART = 0x02 618 QEMU_VM_SECTION_END = 0x03 619 QEMU_VM_SECTION_FULL = 0x04 620 QEMU_VM_SUBSECTION = 0x05 621 QEMU_VM_VMDESCRIPTION = 0x06 622 QEMU_VM_CONFIGURATION = 0x07 623 QEMU_VM_SECTION_FOOTER= 0x7e 624 625 def __init__(self, filename): 626 self.section_classes = { 627 ( 'ram', 0 ) : [ RamSection, None ], 628 ( 's390-storage_attributes', 0 ) : [ S390StorageAttributes, None], 629 ( 'spapr/htab', 0) : ( HTABSection, None ) 630 } 631 self.filename = filename 632 self.vmsd_desc = None 633 self.vmsd_json = "" 634 635 def read(self, desc_only = False, dump_memory = False, 636 write_memory = False): 637 # Read in the whole file 638 file = MigrationFile(self.filename) 639 self.vmsd_json = file.read_migration_debug_json() 640 641 # File magic 642 data = file.read32() 643 if data != self.QEMU_VM_FILE_MAGIC: 644 raise Exception("Invalid file magic %x" % data) 645 646 # Version (has to be v3) 647 data = file.read32() 648 if data != self.QEMU_VM_FILE_VERSION: 649 raise Exception("Invalid version number %d" % data) 650 651 self.load_vmsd_json(file) 652 653 # Read sections 654 self.sections = collections.OrderedDict() 655 656 if desc_only: 657 return 658 659 ramargs = {} 660 ramargs['page_size'] = self.vmsd_desc['page_size'] 661 ramargs['dump_memory'] = dump_memory 662 ramargs['write_memory'] = write_memory 663 ramargs['ignore_shared'] = False 664 self.section_classes[('ram',0)][1] = ramargs 665 666 while True: 667 section_type = file.read8() 668 if section_type == self.QEMU_VM_EOF: 669 break 670 elif section_type == self.QEMU_VM_CONFIGURATION: 671 config_desc = self.vmsd_desc.get('configuration') 672 section = ConfigurationSection(file, config_desc) 673 section.read() 674 ramargs['ignore_shared'] = section.has_capability('x-ignore-shared') 675 elif section_type == self.QEMU_VM_SECTION_START or section_type == self.QEMU_VM_SECTION_FULL: 676 section_id = file.read32() 677 name = file.readstr() 678 instance_id = file.read32() 679 version_id = file.read32() 680 section_key = (name, instance_id) 681 classdesc = self.section_classes[section_key] 682 section = classdesc[0](file, version_id, classdesc[1], section_key) 683 self.sections[section_id] = section 684 section.read() 685 elif section_type == self.QEMU_VM_SECTION_PART or section_type == self.QEMU_VM_SECTION_END: 686 section_id = file.read32() 687 self.sections[section_id].read() 688 elif section_type == self.QEMU_VM_SECTION_FOOTER: 689 read_section_id = file.read32() 690 if read_section_id != section_id: 691 raise Exception("Mismatched section footer: %x vs %x" % (read_section_id, section_id)) 692 else: 693 raise Exception("Unknown section type: %d" % section_type) 694 file.close() 695 696 def load_vmsd_json(self, file): 697 self.vmsd_desc = json.loads(self.vmsd_json, 698 object_pairs_hook=collections.OrderedDict) 699 for device in self.vmsd_desc['devices']: 700 if 'fields' not in device: 701 raise Exception("vmstate for device %s has no fields" % device['name']) 702 key = (device['name'], device['instance_id']) 703 value = ( VMSDSection, device ) 704 self.section_classes[key] = value 705 706 def getDict(self): 707 r = collections.OrderedDict() 708 for (key, value) in self.sections.items(): 709 key = "%s (%d)" % ( value.section_key[0], key ) 710 r[key] = value.getDict() 711 return r 712 713############################################################################### 714 715class JSONEncoder(json.JSONEncoder): 716 def default(self, o): 717 if isinstance(o, VMSDFieldGeneric): 718 return str(o) 719 return json.JSONEncoder.default(self, o) 720 721parser = argparse.ArgumentParser() 722parser.add_argument("-f", "--file", help='migration dump to read from', required=True) 723parser.add_argument("-m", "--memory", help='dump RAM contents as well', action='store_true') 724parser.add_argument("-d", "--dump", help='what to dump ("state" or "desc")', default='state') 725parser.add_argument("-x", "--extract", help='extract contents into individual files', action='store_true') 726args = parser.parse_args() 727 728jsonenc = JSONEncoder(indent=4, separators=(',', ': ')) 729 730if not any([args.extract, args.dump == "state", args.dump == "desc"]): 731 raise Exception("Please specify either -x, -d state or -d desc") 732 733try: 734 dump = MigrationDump(args.file) 735 736 if args.extract: 737 dump.read(desc_only = True) 738 739 print("desc.json") 740 f = open("desc.json", "w") 741 f.truncate() 742 f.write(jsonenc.encode(dump.vmsd_desc)) 743 f.close() 744 745 dump.read(write_memory = True) 746 dict = dump.getDict() 747 print("state.json") 748 f = open("state.json", "w") 749 f.truncate() 750 f.write(jsonenc.encode(dict)) 751 f.close() 752 elif args.dump == "state": 753 dump.read(dump_memory = args.memory) 754 dict = dump.getDict() 755 print(jsonenc.encode(dict)) 756 elif args.dump == "desc": 757 dump.read(desc_only = True) 758 print(jsonenc.encode(dump.vmsd_desc)) 759except Exception: 760 raise Exception("Full JSON dump:\n%s", dump.vmsd_json) 761