def read_file(filename, config=DefaultConfig()): if (os.path.splitext(filename)[1].lower() != '.sc2replay'): raise TypeError("Target file must of the SC2Replay file extension") with open(filename) as replay_file: release, frames = read_header(replay_file) replay = config.ReplayClass(filename, release, frames) archive = MPQArchive(filename, listfile=False) #Extract and Parse the relevant files for file, readers in config.readers.iteritems(): for reader in readers: if reader.reads(replay.build): reader.read(archive.read_file(file), replay) break else: raise NotYetImplementedError( "No parser was found that accepted the replay file;check configuration" ) #Do cleanup and post processing for processor in config.processors: replay = processor.process(replay) return replay
def __init__(self, map_file, filename=None, gateway=None, map_hash=None, **options): super(Map, self).__init__(map_file, filename, **options) self.hash = map_hash self.gateway = gateway self.url = Map.get_url(gateway, map_hash) self.archive = MPQArchive(map_file) self.minimap = self.archive.read_file('Minimap.tga') # This will only populate the fields for maps with enUS localizations. # Clearly this isn't a great solution but we can't be throwing exceptions # just because US English wasn't a concern of the map author. # TODO: Make this work regardless of the localizations available. game_strings = self.archive.read_file( 'enUS.SC2Data\LocalizedData\GameStrings.txt') if game_strings: for line in game_strings.split('\r\n'): parts = line.split('=') if parts[0] == 'DocInfo/Name': self.name = parts[1] elif parts[0] == 'DocInfo/Author': self.author = parts[1] elif parts[0] == 'DocInfo/DescLong': self.description = parts[1]
class Map(Resource): url_template = 'http://{0}.depot.battle.net:1119/{1}.s2ma' def __init__(self, map_file, filename=None, gateway=None, map_hash=None, **options): super(Map, self).__init__(map_file, filename, **options) self.hash = map_hash self.gateway = gateway self.url = Map.get_url(gateway, map_hash) self.archive = MPQArchive(StringIO(self.file)) self.minimap = self.archive.read_file('Minimap.tga') @classmethod def get_url(gateway, map_hash): if gateway and map_hash: return Map.url_template.format(gateway, map_hash) else: return None def load(self): self.read_game_strings() def read_game_strings(self): self.game_strings = self.archive.read_file('enUS.SC2Data\LocalizedData\GameStrings.txt') for line in self.game_strings.split('\r\n'): parts = line.split('=') if parts[0] == 'DocInfo/Name': self.name = parts[1] elif parts[0] == 'DocInfo/Author': self.author = parts[1] elif parts[0] == 'DocInfo/DescLong': self.description = parts[1]
def extract_mmr_apm(path, your_id, opponent_id): try: archive = MPQArchive(path) players = json.loads( archive.extract()[b'replay.gamemetadata.json'])['Players'] if "MMR" not in players[opponent_id]: players[opponent_id]["MMR"] = "Unknown" if "MMR" not in players[your_id]: players[your_id]["MMR"] = "Unkown" return { "yourMMR": str(players[your_id]['MMR']), "opponentMMR": str(players[opponent_id]['MMR']), "yourAPM": str(players[your_id]['APM']), "opponentAPM": str(players[opponent_id]['APM']) } except Exception: traceback.print_exc() print("failed to parse mmr/apm for path {}".format(path)) return { "yourMMR": "Unknown", "opponentMMR": "Unknown", "yourAPM": "Unknown", "opponentAPM": "Unknown" }
def __init__(self, replay_file): self.teams = [] self.replay_file = replay_file self.parsers = {} try: archive = MPQArchive(self.replay_file) files = archive.extract() # bootstrap the right parsers, expand here for different version parsing too self.parsers['header'] = DetailsParser(archive.header['user_data_header']['content']) for file_name, data in files.iteritems(): if(file_name == self.FILES['attributes']): self.parsers[file_name] = AttributesParser(data) if(file_name == self.FILES['details']): self.parsers[file_name] = DetailsParser(data) teams = self.attribute(2001) num_teams = 2 if teams == '1v1': teams_lookup_attribute = 2002 elif teams == '2v2': teams_lookup_attribute = 2003 elif teams == '3v3': teams_lookup_attribute = 2004 elif teams == '4v4': teams_lookup_attribute = 2005 elif teams == 'FFA': teams_lookup_attribute = 2006 num_teams = 10 elif teams == '6v6': teams_lookup_attribute = 2008 # create the teams before the players for i in range(num_teams): self.teams.append(Team(i+1)) # bootstrap the player object with some raw data for i, player_details in enumerate(self.parsers[self.FILES['details']].parse()[0]): player = Player(player_details, self.player_attributes(i+1)) # team players_team = player.attribute(teams_lookup_attribute) self.teams[int(players_team[1])-1].players.append(player) except IOError as (errno, strerror): print strerror
def winners(filepath): archive = MPQArchive(str(filepath)) files = archive.extract() data = json.loads(files[b"replay.gamemetadata.json"]) result_by_playerid = {p["PlayerID"]: p["Result"] for p in data["Players"]} return { playerid: result == "Win" for playerid, result in result_by_playerid.items() }
class Map(Resource): url_template = 'http://{0}.depot.battle.net:1119/{1}.s2ma' #: The unique hash used to identify this map on bnet's depots. hash = str() #: The gateway this map was posted to. #: Maps must be posted individually to each gateway. gateway = str() #: A URL reference to the location of this map on bnet's depots. url = str() #: The localized (only enUS supported right now) map name name = str() #: The map's author author = str() #: The map description as written by author description = str() #: A byte string representing the minimap in tga format. minimap = str() def __init__(self, map_file, filename=None, gateway=None, map_hash=None, **options): super(Map, self).__init__(map_file, filename, **options) self.hash = map_hash self.gateway = gateway self.url = Map.get_url(gateway, map_hash) self.archive = MPQArchive(map_file) self.minimap = self.archive.read_file('Minimap.tga') # This will only populate the fields for maps with enUS localizations. # Clearly this isn't a great solution but we can't be throwing exceptions # just because US English wasn't a concern of the map author. # TODO: Make this work regardless of the localizations available. game_strings = self.archive.read_file('enUS.SC2Data\LocalizedData\GameStrings.txt') if game_strings: for line in game_strings.split('\r\n'): parts = line.split('=') if parts[0] == 'DocInfo/Name': self.name = parts[1] elif parts[0] == 'DocInfo/Author': self.author = parts[1] elif parts[0] == 'DocInfo/DescLong': self.description = parts[1] @classmethod def get_url(cls, gateway, map_hash): """Builds a download URL for the map from its components.""" if gateway and map_hash: # it seems like sea maps are stored on us depots. gateway = 'us' if gateway=='sea' else gateway return cls.url_template.format(gateway, map_hash) else: return None
def load(self): print "Fetching map: {0}".format(self.url); self.file = urllib2.urlopen(self.url).read() print "Map Received" self.archive = MPQArchive(StringIO(self.file)) self.minimap = self.archive.read_file('Minimap.tga') self.game_strings = self.archive.read_file('enUS.SC2Data\LocalizedData\GameStrings.txt') for line in self.game_strings.split('\r\n'): parts = line.split('=') if parts[0] == 'DocInfo/Name': self.name = parts[1] elif parts[0] == 'DocInfo/Author': self.author = parts[1] elif parts[0] == 'DocInfo/DescLong': self.description = parts[1]
def extractResult(filepath): archive = MPQArchive(str(filepath)) files = archive.extract() data = json.loads(files[b"replay.gamemetadata.json"]) # print(data) result_by_playerid = {p["PlayerID"]: p["Result"] for p in data["Players"]} # print(data["Players"]) # print(result_by_playerid) return { playerid: result == "Win" for playerid, result in result_by_playerid.items() } # winners("./replays/MyBot_vs_DefaultRandomHard_DreamcatcherLE_11_05_063.SC2Replay")
def __init__(self, map_file, filename=None, gateway=None, map_hash=None, **options): super(Map, self).__init__(map_file, filename, **options) self.hash = map_hash self.gateway = gateway self.url = Map.get_url(gateway, map_hash) self.archive = MPQArchive(StringIO(self.file)) self.minimap = self.archive.read_file('Minimap.tga')
class SC2Remix(object): def __init__(self, replay): self.replay = replay # self.file = self._open(replay) self._get_mpq() self.archive = None def _get_mpq(self): try: self.mpq = MPQArchive(self.replay) except IOError: return None def _open(self, replay): try: self.file = open(replay, 'rb') except: print "Error opening replay." def zip(self, path=''): self.archive = zipfile.ZipFile(self.mpq.file.name+'.zip','w') files = self.mpq.extract() for f in self.mpq.files: self.archive.writestr(f, files[f]) self.archive.close()
def get_commands(filename): archive = MPQArchive(filename, listfile=False) script = archive.read_file('war3map.j') if not script: script = archive.read_file(r'Scripts\war3map.j') script = script.decode('utf-8', errors='replace') re_str = r'call TriggerRegisterPlayerChatEvent\([^,]*,([^,]*),[ ]*"([^"]*)"[ ]*,[^,]*\)' cmds = dict() for player, cmd in re.findall(re_str, script): if cmd not in cmds: cmds[cmd] = [] cmds[cmd].append(player.strip()) return cmds
def __init__(self, replay: str): self.archive = MPQArchive(replay) self.protocol = self.read_protocol(self.archive) self._initdata = None self._details = None self._events = None self._attributeevents = None self._trackerevents = None self._metadata = None
def __init__(self, base_path: Text, *archives: Text): """Create a new multi-archive. Args: base_path: The path to the WoW client. *archives: A list of MPQ archive names to load from the client. """ self.archives = [ MPQArchive(os.path.join(base_path, 'Data', a)) for a in archives ]
def read_file(filename,config=DefaultConfig()): if(os.path.splitext(filename)[1].lower() != '.sc2replay'): raise TypeError("Target file must of the SC2Replay file extension") with open(filename) as replay_file: release,frames = read_header(replay_file) replay = config.ReplayClass(filename,release,frames) archive = MPQArchive(filename,listfile=False) #Extract and Parse the relevant files for file,readers in config.readers.iteritems(): for reader in readers: if reader.reads(replay.build): reader.read(archive.read_file(file),replay) break else: raise NotYetImplementedError("No parser was found that accepted the replay file;check configuration") #Do cleanup and post processing for processor in config.processors: replay = processor.process(replay) return replay
class Map(object): url_template = 'http://{0}.depot.battle.net:1119/{1}.s2ma' def __init__(self, gateway, map_hash, map_name=''): self.hash = map_hash.encode('hex') self.gateway = gateway self.url = Map.url_template.format(self.gateway, self.hash) self.name = map_name def load(self): print "Fetching map: {0}".format(self.url); self.file = urllib2.urlopen(self.url).read() print "Map Received" self.archive = MPQArchive(StringIO(self.file)) self.minimap = self.archive.read_file('Minimap.tga') self.game_strings = self.archive.read_file('enUS.SC2Data\LocalizedData\GameStrings.txt') for line in self.game_strings.split('\r\n'): parts = line.split('=') if parts[0] == 'DocInfo/Name': self.name = parts[1] elif parts[0] == 'DocInfo/Author': self.author = parts[1] elif parts[0] == 'DocInfo/DescLong': self.description = parts[1]
def parse_replay(self, filename): """ Given a Heroes of the Storm replay file (mpq archive), parses the file and returns a Replay object. """ try: logging.info("Attempting to parse replay: {}".format(filename)) mpq = MPQArchive(filename) replay = Replay() return self._parse_replay(replay, mpq) except Exception, e: logging.error( "Something went wrong with replay {}".format(filename)) logging.error("{0}\n{1}".format(str(e), repr(e))) return None
def filter(self, replay): """Apply filter to a particular SC2Replay Args: replay (str): Absolute file path to a SC2Replay Returns: Replay metadata if matching filter else None """ # Extract JSON archive data from replay SC2_archive = MPQArchive(replay).extract() # Load JSON archive data metadata = json.loads(SC2_archive[b"replay.gamemetadata.json"].decode("utf-8")) # If game_version provided and non-matching then return None replay_game_version = int(metadata['DataBuild']) if self.game_version: if not self.game_version == replay_game_version: return None # If map_title provided and non-matching then return None replay_map_title = metadata['Title'] if self.map_title: if not self.map_title == replay_map_title: return None # Evaluate players and against APM, MMR, races, and winning_races threshholds players = metadata["Players"] raceFound = False for player in players: try: if player['APM'] < self.apm_threshold or player['MMR'] < self.mmr_threshold: return None player_race = player['AssignedRace'] if player_race in self.races: raceFound = True if player['Result'] == 'Win': if not player_race in self.winning_races: return None except KeyError: return None if not raceFound: return None # Replay has met all criteria so return it's metadata return metadata
def __init__(self, map_file, filename=None, gateway=None, map_hash=None, **options): super(Map, self).__init__(map_file, filename, **options) self.hash = map_hash self.gateway = gateway self.url = Map.get_url(gateway, map_hash) self.archive = MPQArchive(map_file) self.minimap = self.archive.read_file('Minimap.tga') # This will only populate the fields for maps with enUS localizations. # Clearly this isn't a great solution but we can't be throwing exceptions # just because US English wasn't a concern of the map author. # TODO: Make this work regardless of the localizations available. game_strings = self.archive.read_file('enUS.SC2Data\LocalizedData\GameStrings.txt') if game_strings: for line in game_strings.split('\r\n'): parts = line.split('=') if parts[0] == 'DocInfo/Name': self.name = parts[1] elif parts[0] == 'DocInfo/Author': self.author = parts[1] elif parts[0] == 'DocInfo/DescLong': self.description = parts[1]
def read_metadata(protocol, archive: MPQArchive) -> dict: return archive.read_file('replay.gamemetadata.json')
class TestMPQArchive(unittest.TestCase): def setUp(self): self.archive = MPQArchive(TEST_DIR + 'test.SC2Replay') def test_init_with_file(self): self.archive = MPQArchive(open(TEST_DIR + 'test.SC2Replay', 'rb')) def test_header(self): self.assertEqual(self.archive.header['magic'], b'MPQ\x1a') self.assertEqual(self.archive.header['header_size'], 44) self.assertEqual(self.archive.header['archive_size'], 205044) self.assertEqual(self.archive.header['format_version'], 1) self.assertEqual(self.archive.header['sector_size_shift'], 3) self.assertEqual(self.archive.header['hash_table_offset'], 204628) self.assertEqual(self.archive.header['block_table_offset'], 204884) self.assertEqual(self.archive.header['hash_table_entries'], 16) self.assertEqual(self.archive.header['block_table_entries'], 10) self.assertEqual(self.archive.header['extended_block_table_offset'], 0) self.assertEqual(self.archive.header['hash_table_offset_high'], 0) self.assertEqual(self.archive.header['block_table_offset_high'], 0) self.assertEqual(self.archive.header['offset'], 1024) def test_files(self): self.assertEqual(self.archive.files, [ b'replay.attributes.events', b'replay.details', b'replay.game.events', b'replay.initData', b'replay.load.info', b'replay.message.events', b'replay.smartcam.events', b'replay.sync.events' ]) def test_print_hash_table(self): self.archive.print_hash_table() self.assertEqual( sys.stdout.getvalue(), "MPQ archive hash table\n" "----------------------\n" " Hash A Hash B Locl Plat BlockIdx\n" "D38437CB 07DFEAEC 0000 0000 00000009\n" "AAC2A54B F4762B95 0000 0000 00000002\n" "FFFFFFFF FFFFFFFF FFFF FFFF FFFFFFFF\n" "FFFFFFFF FFFFFFFF FFFF FFFF FFFFFFFF\n" "FFFFFFFF FFFFFFFF FFFF FFFF FFFFFFFF\n" "C9E5B770 3B18F6B6 0000 0000 00000005\n" "343C087B 278E3682 0000 0000 00000004\n" "3B2B1EA0 B72EF057 0000 0000 00000006\n" "5A7E8BDC FF253F5C 0000 0000 00000001\n" "FD657910 4E9B98A7 0000 0000 00000008\n" "D383C29C EF402E92 0000 0000 00000000\n" "FFFFFFFF FFFFFFFF FFFF FFFF FFFFFFFF\n" "FFFFFFFF FFFFFFFF FFFF FFFF FFFFFFFF\n" "FFFFFFFF FFFFFFFF FFFF FFFF FFFFFFFF\n" "1DA8B0CF A2CEFF28 0000 0000 00000007\n" "31952289 6A5FFAA3 0000 0000 00000003\n" "\n") def test_print_block_table(self): self.archive.print_block_table() self.assertEqual( sys.stdout.getvalue(), "MPQ archive block table\n" "-----------------------\n" " Offset ArchSize RealSize Flags\n" "0000002C 727 890 81000200\n" "00000303 801 1257 81000200\n" "00000624 194096 479869 81000200\n" "0002FC54 226 334 81000200\n" "0002FD36 97 97 81000200\n" "0002FD97 1323 1970 81000200\n" "000302C2 6407 12431 81000200\n" "00031BC9 533 2400 81000200\n" "00031DDE 120 164 81000200\n" "00031E56 254 288 81000200\n" "\n")
def __init__(self, replay, partial_parse=True, full_parse=True): self.filename = replay self.speed = "" self.release_string = "" self.build = "" self.type = "" self.category = "" self.is_ladder = False self.is_private = False self.map = "" self.realm = "" self.events = list() self.results = dict() self.teams = defaultdict(list) self.players = list() #Unordered list of Player self.player = PlayerDict() #Maps pid to Player self.events_by_type = dict() self.attributes = list() self.length = None # (minutes, seconds) tuple self.messages = list() self.seconds = None # Length of the game in seconds self.versions = None # (number,number,number,number) tuple self.recorder = None # Player object self.frames = None # Integer representing FPS self.winner_known = False # Set in parsers.DetailParser.load, should we hide this? self.file_time = None # Probably number milliseconds since EPOCH # Marked as private in case people want raw file access self._files = dict() # Files extracted from mpyq #Used internally to ensure parse ordering self.__parsed = dict(details=False, attributes=False, messages=False, events=False, initdata=False) # TODO: Change to something better # http://en.wikipedia.org/wiki/Epoch_(reference_date) # Notice that Windows and Mac have different EPOCHs, I wonder whether # this is different depending on the OS on which the replay was played. self.date = "" # Date when the game was played #Make sure the file exists and is readable, first and foremost if not os.access(self.filename, os.F_OK): raise ValueError("File at '%s' cannot be found" % self.filename) elif not os.access(self.filename, os.R_OK): raise ValueError("File at '%s' cannot be read" % self.filename) #Always parse the header first, the extract the files self._parse_header() #Manually extract the contents of SC2Replay file (bypass the listfile) archive = MPQArchive(replay, listfile=False) self._files['replay.initData'] = archive.read_file('replay.initData') self._files['replay.details'] = archive.read_file('replay.details') self._files['replay.attributes.events'] = archive.read_file('replay.attributes.events') self._files['replay.message.events'] = archive.read_file('replay.message.events') self._files['replay.game.events'] = archive.read_file('replay.game.events') #These are quickly parsed files that contain most of the game information #The order is important, I need some way to reinforce it in the future if partial_parse or full_parse: self._parse_initdata() self._parse_details() self._parse_attributes() self._parse_messages() #Parsing events takes forever, so only do this on request if full_parse: self._parse_events()
def read_game_events(protocol, archive: MPQArchive) -> dict: game_event_file = archive.read_file('replay.game.events') return protocol.decode_replay_game_events(game_event_file)
def read_initdata(protocol, archive: MPQArchive) -> dict: init_file = archive.read_file('replay.initData') return protocol.decode_replay_initdata(init_file)
def test_init_with_file(self): self.archive = MPQArchive(open(TEST_DIR + 'test.SC2Replay', 'rb'))
def read_tracker_events(protocol, archive: MPQArchive) -> dict: tracker_event_file = archive.read_file('replay.tracker.events') return protocol.decode_replay_tracker_events(tracker_event_file)
local_regex = re.compile(r'^[a-z][a-z][A-Z][A-Z]\.SC2Data\\LocalizedData\\GameStrings\.txt$') for filename in mpq.files: if filename == thumbnail_name: data = mpq.read_file(filename) f = open(filename, 'wb') f.write(data) f.close() elif local_regex.match(filename): data = mpq.read_file(filename) f = open(filename, 'wb') if data != None: f.write(data) f.close() if len(sys.argv) < 2: print 'syntax: extract_map.py <map> [thumbnail-name] [/output/dir/]' sys.exit(1) mapdir = os.path.dirname(sys.argv[1]) if mapdir == "": mapdir = "." os.chdir(mapdir) archive = MPQArchive(sys.argv[1]) outdir = None thumbnail_name = None if len(sys.argv) >= 3: outdir = sys.argv[2] if len(sys.argv) >= 4: outdir = sys.argv[3] extract_map(archive, outdir)
def _get_mpq(self): try: self.mpq = MPQArchive(self.replay) except IOError: return None
def setUp(self): self.archive = MPQArchive(TEST_DIR + 'test.SC2Replay')
class TestMPQArchive(unittest.TestCase): def setUp(self): self.archive = MPQArchive(TEST_DIR + 'test.SC2Replay') def tearDown(self): self.archive.close() self.archive = None def test_init_with_file(self): self.archive = MPQArchive(open(TEST_DIR + 'test.SC2Replay', 'rb')) def test_header(self): self.assertEqual(self.archive.header['magic'], b'MPQ\x1a') self.assertEqual(self.archive.header['header_size'], 44) self.assertEqual(self.archive.header['archive_size'], 205044) self.assertEqual(self.archive.header['format_version'], 1) self.assertEqual(self.archive.header['sector_size_shift'], 3) self.assertEqual(self.archive.header['hash_table_offset'], 204628) self.assertEqual(self.archive.header['block_table_offset'], 204884) self.assertEqual(self.archive.header['hash_table_entries'], 16) self.assertEqual(self.archive.header['block_table_entries'], 10) self.assertEqual(self.archive.header['extended_block_table_offset'], 0) self.assertEqual(self.archive.header['hash_table_offset_high'], 0) self.assertEqual(self.archive.header['block_table_offset_high'], 0) self.assertEqual(self.archive.header['offset'], 1024) def test_files(self): self.assertEqual(self.archive.files, [b'replay.attributes.events', b'replay.details', b'replay.game.events', b'replay.initData', b'replay.load.info', b'replay.message.events', b'replay.smartcam.events', b'replay.sync.events']) @mock.patch('sys.stdout', new_callable=six.StringIO) def test_print_hash_table(self, mock_stdout): self.archive.print_hash_table() self.assertEqual(mock_stdout.getvalue(), "MPQ archive hash table\n" "----------------------\n" " Hash A Hash B Locl Plat BlockIdx\n" "D38437CB 07DFEAEC 0000 0000 00000009\n" "AAC2A54B F4762B95 0000 0000 00000002\n" "FFFFFFFF FFFFFFFF FFFF FFFF FFFFFFFF\n" "FFFFFFFF FFFFFFFF FFFF FFFF FFFFFFFF\n" "FFFFFFFF FFFFFFFF FFFF FFFF FFFFFFFF\n" "C9E5B770 3B18F6B6 0000 0000 00000005\n" "343C087B 278E3682 0000 0000 00000004\n" "3B2B1EA0 B72EF057 0000 0000 00000006\n" "5A7E8BDC FF253F5C 0000 0000 00000001\n" "FD657910 4E9B98A7 0000 0000 00000008\n" "D383C29C EF402E92 0000 0000 00000000\n" "FFFFFFFF FFFFFFFF FFFF FFFF FFFFFFFF\n" "FFFFFFFF FFFFFFFF FFFF FFFF FFFFFFFF\n" "FFFFFFFF FFFFFFFF FFFF FFFF FFFFFFFF\n" "1DA8B0CF A2CEFF28 0000 0000 00000007\n" "31952289 6A5FFAA3 0000 0000 00000003\n" "\n") @mock.patch('sys.stdout', new_callable=six.StringIO) def test_print_block_table(self, mock_stdout): self.archive.print_block_table() self.assertEqual(mock_stdout.getvalue(), "MPQ archive block table\n" "-----------------------\n" " Offset ArchSize RealSize Flags\n" "0000002C 727 890 81000200\n" "00000303 801 1257 81000200\n" "00000624 194096 479869 81000200\n" "0002FC54 226 334 81000200\n" "0002FD36 97 97 81000200\n" "0002FD97 1323 1970 81000200\n" "000302C2 6407 12431 81000200\n" "00031BC9 533 2400 81000200\n" "00031DDE 120 164 81000200\n" "00031E56 254 288 81000200\n" "\n")
def read_details(protocol, archive: MPQArchive) -> dict: detail_file = archive.read_file('replay.details') return protocol.decode_replay_details(detail_file)
def main(): """ Get command line arguments and invoke the command line functionality. """ filters = [] parser = argparse.ArgumentParser() parser.add_argument('replay_file', help='.SC2Replay file to load', nargs='?') parser.add_argument("--gameevents", help="print game events", action="store_true") parser.add_argument("--messageevents", help="print message events", action="store_true") parser.add_argument("--trackerevents", help="print tracker events", action="store_true") parser.add_argument("--attributeevents", help="print attributes events", action="store_true") parser.add_argument("--attributeparse", help="parse attributes events", action="store_true") parser.add_argument("--header", help="print protocol header", action="store_true") parser.add_argument("--metadata", help="print game metadata", action="store_true") parser.add_argument("--details", help="print protocol details", action="store_true") parser.add_argument("--details_backup", help="print protocol anoynmized details", action="store_true") parser.add_argument("--initdata", help="print protocol initdata", action="store_true") parser.add_argument("--all", help="print all data", action="store_true") parser.add_argument("--quiet", help="disable printing", action="store_true") parser.add_argument("--stats", help="print stats", action="store_true") parser.add_argument("--diff", help="diff two protocols", default=None, action="store") parser.add_argument("--versions", help="show all protocol versions", action="store_true") parser.add_argument("--types", help="show type information in event output", action="store_true") parser.add_argument("--json", help="print output as json", action="store_true") parser.add_argument("--ndjson", help="print output as ndjson (newline delimited)", action="store_true") parser.add_argument("--profile", help="Whether to profile or not", action="store_true") args = parser.parse_args() if args.profile: pr = cProfile.Profile() pr.enable() # TODO: clean up the command line arguments to allow cleaner sub-command # style commands # List all protocol versions if args.versions: files = list_all() pattern = re.compile('^protocol([0-9]+).py$') captured = [] for f in files: captured.append(pattern.match(f).group(1)) if len(captured) == 8: print(captured[0:8]) captured = [] print(captured) return # Diff two protocols if args.diff and args.diff is not None: version_list = args.diff.split(',') if len(version_list) < 2: print( "--diff requires two versions separated by comma e.g. --diff=1,2", file=sys.stderr) sys.exit(1) diff(version_list[0], version_list[1]) return # Check/test the replay file if args.replay_file is None: print(".S2Replay file not specified", file=sys.stderr) sys.exit(1) archive = MPQArchive(args.replay_file) filters = [] if args.json: filters.insert(0, JSONOutputFilter(sys.stdout)) elif args.ndjson: filters.insert(0, NDJSONOutputFilter(sys.stdout)) elif not args.quiet: filters.insert(0, PrettyPrintFilter(sys.stdout)) if args.types: filters.insert(0, TypeDumpFilter()) if args.stats: filters.insert(0, StatCollectionFilter()) def process_event(event): for f in filters: event = f.process(event) # Read the protocol header, this can be read with any protocol contents = archive.header['user_data_header']['content'] header = latest().decode_replay_header(contents) if args.header: process_event(header) # The header's baseBuild determines which protocol to use baseBuild = header['m_version']['m_baseBuild'] try: protocol = build(baseBuild) except Exception as e: print('Unsupported base build: {0} ({1!s})'.format(baseBuild, e), file=sys.stderr) sys.exit(1) # Process game metadata if args.all or args.metadata: contents = read_contents(archive, 'replay.gamemetadata.json') process_event(json.loads(contents)) # Print protocol details if args.all or args.details: contents = read_contents(archive, 'replay.details') details = protocol.decode_replay_details(contents) details = process_details_data(details) process_event(details) # Print protocol details if args.all or args.details_backup: contents = read_contents(archive, 'replay.details.backup') details_backup = protocol.decode_replay_details(contents) details_backup = process_details_data(details_backup) process_event(details_backup) # Print protocol init data if args.all or args.initdata: contents = read_contents(archive, 'replay.initData') initdata = protocol.decode_replay_initdata(contents) initdata = process_init_data(initdata) process_event(initdata) # Print game events and/or game events stats if args.all or args.gameevents: contents = read_contents(archive, 'replay.game.events') map(process_event, protocol.decode_replay_game_events(contents)) # Print message events if args.all or args.messageevents: contents = read_contents(archive, 'replay.message.events') map(process_event, protocol.decode_replay_message_events(contents)) # Print tracker events if args.all or args.trackerevents: if hasattr(protocol, 'decode_replay_tracker_events'): contents = read_contents(archive, 'replay.tracker.events') map(process_event, protocol.decode_replay_tracker_events(contents)) # Print attributes events if args.all or args.attributeevents or args.attributeparse: contents = read_contents(archive, 'replay.attributes.events') attributes = protocol.decode_replay_attributes_events(contents) # Process raw attribute events structure if args.attributeevents: process_event(attributes) # Convert attributes to higher level requested data, will # call prcess_event for each new event that it creates if args.attributeparse: process_scope_attributes(attributes['scopes'], process_event) for f in filters: f.finish() if args.profile: pr.disable() print("Profiler Results") print("----------------") s = get_stream() sortby = 'cumulative' ps = pstats.Stats(pr, stream=s).sort_stats(sortby) ps.print_stats() print(s.getvalue())
def read_attribute_events(protocol, archive: MPQArchive) -> dict: attrib_event_file = archive.read_file('replay.attributes.events') return protocol.decode_replay_attributes_events(attrib_event_file)