def setup(filename): archive = mpyq.MPQArchive(filename) # getting correct game version and protocol contents = archive.header['user_data_header']['content'] header = versions.latest().decode_replay_header(contents) baseBuild = header['m_version']['m_baseBuild'] protocol = versions.build(baseBuild) metadata = json.loads(archive.read_file('replay.gamemetadata.json')) # accessing neccessary parts of file for data contents = archive.read_file('replay.tracker.events') # translating data into dict format info trackerEvents = protocol.decode_replay_tracker_events(contents) players = metadata['Players'] game_length = metadata['Duration'] collection_rates = {} races = {} for player in players: collection_rates[player['PlayerID']] = { 'mineral': [(0, 0)], 'gas': [(0, 0)] } races[player['PlayerID']] = player['SelectedRace'] return trackerEvents, collection_rates, races, game_length
def get_events(replay_file): """ :param replay_file: path to a sc2replay file :return: events: a list of upgrade, unit born, and unit death events """ archive = mpyq.MPQArchive(replay_file) header = versions.latest().decode_replay_header( archive.header['user_data_header']['content']) base_build = header['m_version']['m_baseBuild'] decoder = versions.build(base_build) game_events_gen = decoder.decode_replay_game_events( archive.read_file('replay.game.events')) tracker_events_gen = decoder.decode_replay_tracker_events( archive.read_file('replay.tracker.events')) relevant_tracker_event_types = [ "NNet.Replay.Tracker.SUnitDiedEvent", "NNet.Replay.Tracker.SUnitBornEvent", "NNet.Replay.Tracker.SUnitTypeChangeEvent", "NNet.Replay.Tracker.SUnitInitEvent", "NNet.Replay.Tracker.SPlayerSetupEvent" ] tracker_events = [ event for event in tracker_events_gen if event['_event'] in relevant_tracker_event_types ] game_events = [ event for event in game_events_gen if event['_event'] == "NNet.Game.SGameUserLeaveEvent" ] return tracker_events + game_events
def _get_map_info(player_info, game_map, creep_flag=True): game_map_info = { 'name': game_map, } if not creep_flag: return game_map_info if game_map not in maps: map_bytes = player_info['m_cacheHandles'][-1] server = map_bytes[4:8].decode('utf8').strip('\x00 ').lower() file_hash = binascii.b2a_hex(map_bytes[8:]).decode('utf8') file_type = map_bytes[0:4].decode('utf8') map_file = None for i in range(0, 5): map_response = requests.get(f'http://{server}.depot.battle.net:1119/{file_hash}.{file_type}') if map_response.status_code == 200: map_file = BytesIO(map_response.content) break logger.warning(f'Could not fetch {game_map} map file. Retrying') if not map_file: logger.error(f'Failed to fetch {game_map} map file') return game_map_info map_archive = mpyq.MPQArchive(map_file) map_data = BytesIO(map_archive.read_file('MapInfo')) map_data.seek(4) # returns tuple of 32 byte unsigned integer unpack_int = struct.Struct('<I').unpack version = unpack_int(map_data.read(4))[0] if version >= 0x18: # trash bytes unpack_int(map_data.read(4)) unpack_int(map_data.read(4)) map_width = unpack_int(map_data.read(4))[0] map_height = unpack_int(map_data.read(4))[0] maps.update({ game_map: { 'width': map_width, 'height': map_height, }, }) try: map_info_path = Path(__file__).resolve().parent / 'gamedata' / 'map_info.py' with open(map_info_path, 'w', encoding='utf-8') as map_info: map_info.write(f'maps = {maps}') except OSError: logger.warning('Could not write map details to file') game_map_info.update({ 'dimensions': Map( maps[game_map]['width'], maps[game_map]['height'], ), }) return game_map_info
def process_sing_file(self, replay_file_name): archive = mpyq.MPQArchive(replay_file_name) contents = archive.header['user_data_header']['content'] header = versions.latest().decode_replay_header(contents) base_build = header['m_version']['m_baseBuild'] try: protocol = versions.build(base_build) except Exception as e: self.fail('Unsupported base build: {0} ({1!s})'.format(base_build, e)) required_internal_files = ['replay.details', 'replay.details.backup', 'replay.initData', 'replay.game.events', 'replay.message.events', 'replay.tracker.events' ] for internal_file_name in required_internal_files: contents = archive.read_file(internal_file_name) self.assertIsNotNone(contents) # just decode init data if internal_file_name is 'replay.initData': result = protocol.decode_replay_initdata(contents) self.assertIsNotNone(result) print('processed {}, base_build: {}'.format(replay_file_name, base_build))
def __init__(self, path, debug): self.DEBUG = debug self.invalid = False try: self.archive = mpyq.MPQArchive(path) except: self.invalid = True return self.archive_contents = self.archive.header['user_data_header'][ 'content'] _header = versions.latest().decode_replay_header(self.archive_contents) self.build = _header['m_version']['m_baseBuild'] try: self.protocol = versions.build(self.build) except: next_lower = closest_version(self.build, versions)[0] self.protocol = versions.build(next_lower) try: self.meta = json.loads( self.archive.read_file('replay.gamemetadata.json').decode( 'utf-8')) except: self.invalid = True return self._events = None self._tracker_events = None self._init_data = None self._details = None self._attribute_events = None self.tracked_units = TrackedUnits() self.game_ended = False self.winner = None self._phase = "Build" self.players = [] self._game = {"wave": 0, "builders": {}} self.towers = [] self.towerList = {} self.sends = [] self.buildersByWave = [] self.buildersOnWave = {} self.workers = [] self.speedUps = [] self.kills = [] self.gasNumber = {} self.upgradeNumber = {} self.filename = path self._db = None
def get_game_version(replay): replay_io = six.BytesIO() replay_io.write(replay) replay_io.seek(0) archive = mpyq.MPQArchive(replay_io).extract() metadata = json.loads(archive[b'replay.gamemetadata.json'].decode('utf-8')) version = metadata['GameVersion'] return '.'.join(version.split('.')[:-1])
def get_game_version(replay_data): replay_io = six.BytesIO() replay_io.write(replay_data) replay_io.seek(0) archive = mpyq.MPQArchive(replay_io).extract() metadata = json.loads(archive[b"replay.gamemetadata.json"].decode("utf-8")) version = metadata["GameVersion"] return ".".join(version.split(".")[:-1])
def extract_replay_info(loc): """ Replays are stored as MPQArchives and need to be extracted using the scprotocol provided by blizzard. This function takes that protocol and uses it to extract the data inside the replay. Items returned are player names, map name, player races, official blizzard map, the patch version, and the location of the starcraft2 replay. :param loc: File path location where a single StarCraft2 replay is :rtype: Dictionary :return: Returns data from the replay """ if os.path.isfile(loc): archive = mpyq.MPQArchive(loc) contents = archive.header['user_data_header']['content'] header = versions.latest().decode_replay_header(contents) base_build = header['m_version']['m_baseBuild'] try: protocol = versions.build(base_build) except ImportError: print("Replay too old, protocol does not exist: " + loc) return {} # Get patch version from replay header patch = str(header['m_version']['m_major']) + "." + \ str(header['m_version']['m_minor']) + "." + \ str(header['m_version']['m_revision']) contents = archive.read_file('replay.details') details = protocol.decode_replay_details(contents) try: map_name = details['m_title'] player1_name = details['m_playerList'][0]['m_name'].decode('utf-8') player1_race = details['m_playerList'][0]['m_race'].decode('utf-8') player2_name = details['m_playerList'][1]['m_name'].decode('utf-8') player2_race = details['m_playerList'][1]['m_race'].decode('utf-8') blizz_map = details['m_isBlizzardMap'] # Removes clan tag of the player if r"><sp/>" in player1_name: player1_name = player1_name.split("><sp/>")[1] if r"><sp/>" in player2_name: player2_name = player2_name.split("><sp/>")[1] return { "map": map_name, "blizz_map": blizz_map, "player1_name": str(player1_name), "player1_race": str(player1_race), "player2_name": str(player2_name), "player2_race": str(player2_race), "patch": patch, "file": loc } except: return {}
def _get_map_info(player_info: Dict, map_name: str, network_flag: bool) -> Map: game_map = Map(map_name) if map_name not in maps and network_flag: map_bytes = player_info['m_cacheHandles'][-1] server = map_bytes[4:8].decode('utf8').strip('\x00 ').lower() file_hash = binascii.b2a_hex(map_bytes[8:]).decode('utf8') file_type = map_bytes[0:4].decode('utf8') map_file = None for i in range(0, 5): map_response = requests.get( f'http://{server}.depot.battle.net:1119/{file_hash}.{file_type}' ) if map_response.status_code == 200: map_file = BytesIO(map_response.content) break logger.warning(f'Could not fetch {map_name} map file. Retrying') if not map_file: logger.error(f'Failed to fetch {map_name} map file') return game_map map_archive = mpyq.MPQArchive(map_file) map_data = BytesIO(map_archive.read_file('MapInfo')) map_data.seek(4) # returns tuple of 32 byte unsigned integer unpack_int = struct.Struct('<I').unpack version = unpack_int(map_data.read(4))[0] if version >= 0x18: # trash bytes unpack_int(map_data.read(4)) unpack_int(map_data.read(4)) map_width = unpack_int(map_data.read(4))[0] map_height = unpack_int(map_data.read(4))[0] maps.update({ map_name: { 'width': map_width, 'height': map_height, }, }) try: map_info_path = Path( __file__).resolve().parent / 'gamedata' / 'map_info.py' with open(map_info_path, 'w', encoding='utf-8') as map_info: map_info.write(f'maps = {maps}') except OSError: logger.warning('Could not write map details to file') # something may have gone wrong when trying to get map details # or network_flag may be False if map_name in maps: game_map.width = maps[map_name]['width'] game_map.height = maps[map_name]['height'] return game_map
def get_replay_version(replay_path): with open(replay_path, "rb") as f: replay_data = f.read() replay_io = six.BytesIO() replay_io.write(replay_data) replay_io.seek(0) archive = mpyq.MPQArchive(replay_io).extract() metadata = json.loads(archive[b"replay.gamemetadata.json"].decode("utf-8")) return metadata["BaseBuild"], metadata["DataVersion"]
def get_replay_version(replay_path: Union[str, Path]) -> Tuple[str, str]: with open(replay_path, 'rb') as f: replay_data = f.read() replay_io = BytesIO() replay_io.write(replay_data) replay_io.seek(0) archive = mpyq.MPQArchive(replay_io).extract() metadata = json.loads(archive[b"replay.gamemetadata.json"].decode("utf-8")) return metadata["BaseBuild"], metadata["DataVersion"]
def __init__(self, map_file, filename=None, gateway=None, map_hash=None, **options): super(Map, self).__init__(map_file, filename, **options) #: The unique hash used to identify this map on bnet's depots. self.hash = map_hash #: The gateway this map was posted to. Maps must be posted individually to each gateway. self.gateway = gateway #: A URL reference to the location of this map on bnet's depots. self.url = Map.get_url(gateway, map_hash) #: The opened MPQArchive for this map self.archive = mpyq.MPQArchive(map_file) #: A byte string representing the minimap in tga format. self.minimap = self.archive.read_file('Minimap.tga') # This will only populate the fields for maps with enUS localizations. # Clearly this isn't a great solution but we can't be throwing exceptions # just because US English wasn't a concern of the map author. # TODO: Make this work regardless of the localizations available. game_strings = self.archive.read_file('enUS.SC2Data\LocalizedData\GameStrings.txt').decode('utf8') if game_strings: for line in game_strings.split('\r\n'): if len(line) == 0: continue key, value = line.split('=', 1) if key == 'DocInfo/Name': self.name = value elif key == 'DocInfo/Author': self.author = value elif key == 'DocInfo/DescLong': self.description = value elif key == 'DocInfo/Website': self.website = value #: A reference to the map's :class:`~sc2reader.objects.MapInfo` object self.map_info = MapInfo(self.archive.read_file('MapInfo')) doc_info = ElementTree.fromstring(self.archive.read_file('DocumentInfo').decode('utf8')) icon_path_node = doc_info.find('Icon/Value') #: (Optional) The path to the icon for the map, relative to the archive root self.icon_path = icon_path_node.text if icon_path_node is not None else None #: (Optional) The icon image for the map in tga format self.icon = self.archive.read_file(self.icon_path) if self.icon_path is not None else None #: A list of module names this map depends on self.dependencies = list() for dependency_node in doc_info.findall('Dependencies/Value'): self.dependencies.append(dependency_node.text)
def process_sing_file(self, replay_file_name): archive = mpyq.MPQArchive(replay_file_name) contents = archive.header['user_data_header']['content'] header = versions.latest().decode_replay_header(contents) base_build = header['m_version']['m_baseBuild'] try: protocol = versions.build(base_build) except Exception, e: self.fail('Unsupported base build: {0} ({1})'.format( base_build, str(e)))
def initProtocols(self, replayPath: str) -> None: """ Initializes the protocols needed to parse the passed in replay Parameters ---------- replayPath: str Path to the replay. """ self.archive = mpyq.MPQArchive(replayPath) self.protocol = self._getProtocol()
def get_replay_version(replay_data): replay_io = six.BytesIO() replay_io.write(replay_data) replay_io.seek(0) archive = mpyq.MPQArchive(replay_io).extract() metadata = json.loads(archive[b"replay.gamemetadata.json"].decode("utf-8")) return run_configs_lib.Version( game_version=".".join(metadata["GameVersion"].split(".")[:-1]), build_version=int(metadata["BaseBuild"][4:]), data_version=metadata.get("DataVersion"), # Only in replays version 4.1+. binary=None)
def parse_replay(filename): replay = sc2reader.load_replay(filename, debug=True) archive = mpyq.MPQArchive(filename) replay_data = json.loads( archive.read_file("replay.gamemetadata.json").decode("utf-8")) player_meta = {} for player in replay_data['Players']: player_meta[player["PlayerID"]] = player players = list() for team in replay.teams: for player in team.players: players.append({ "team_id": team.number, "clan": getattr(player, "clan_tag", None), "name": getattr(player, "name", None), "race": getattr(player, "play_race", None), "result": player_meta[getattr(player, "pid", None)]['Result'] if 'Result' in player_meta[getattr(player, "pid", None)].keys() else 0, "profile_url": getattr(player, "url", None), "apm": player_meta[getattr(player, "pid", None)]['APM'] if 'APM' in player_meta[getattr(player, "pid", None)].keys() else 0, "mmr": player_meta[getattr(player, "pid", None)]['MMR'] if 'MMR' in player_meta[getattr(player, "pid", None)].keys() else 0, "color": player.color.__dict__ if hasattr(player, "color") else None, "is_ai": str(player.__class__) == "<class 'sc2reader.objects.Computer'>", "is_random": getattr(player, "pick_race", None) == "Random" }) # todo: should be the minimap image - but doesn't appear in any replays i have? # "map_image": archive.read_file('Minimap.tga') return { "timeUTC": getattr(replay, "unix_timestamp", None), "map": getattr(replay, "map_name", None), "map_hash": getattr(replay, "map_hash", None), "duration": duration(replay.real_length.seconds), "players": players }
def process_uploaded_replay(replayFiles): print("Task Running...") for file in replayFiles: archive = mpyq.MPQArchive(file) contents = archive.header['user_data_header']['content'] header = versions.latest().decode_replay_header(contents) baseBuild = header['m_version']['m_baseBuild'] try: protocol = versions.build(baseBuild) analyze_sentiments(archive, protocol) except ImportError as err: print(err.args)
def __init__(self, replay_path): self.__archive = mpyq.MPQArchive(replay_path) # Read the protocol header, this can be read with any protocol contents = self.__archive.header['user_data_header']['content'] self.__header = latest().decode_replay_header(contents) # The header's baseBuild determines which protocol to use self.__base_build = self.__header['m_version']['m_baseBuild'] try: self.__protocol = build(self.__base_build) except: print('Unsupported base build: ' + str(self.__base_build)) sys.exit(1)
def __init__(self, path): self.archive = mpyq.MPQArchive(path) self.fallback_versions = None _header_contents = self.archive.header['user_data_header']['content'] protocol = versions.latest() _header = protocol.decode_replay_header(_header_contents) self.base_build = _header['m_version']['m_baseBuild'] try: self.protocol = versions.build(self.base_build) except ImportError: # fall back version next_lower = closest_version(self.base_build, versions)[0] log.info( f"Missing Protocol {self.base_build}. Using Next Lower {next_lower}" ) self.protocol = versions.build(next_lower) self.fallback_versions = closest_version(self.base_build, versions) self._header = _header self._events = None self._init_data = None self._details = None self._players = [] self._teams = None self._attribute_events = None self._tracker_events = None self.match_events = [] self.snapshots: typing.List[Snapshot] = [] self.tracked_units = TrackedUnits() self._time = None self.units = [] self._message_lookup = {} self._message_keys = [] self.segments = { 'early': {}, 'three_teams': {}, 'two_teams': {}, 'final': {} } meta = json.loads( self.archive.read_file('replay.gamemetadata.json').decode('utf-8')) self.meta = meta if meta['Title'] not in ['Zone Control CE', 'Zone Control CE Dev']: raise NotZCReplay("Not a valid replay for game") # Check to see if the replay is complete or not if max([p['m_result'] for p in self.details.get('m_playerList', [])]) == 0: raise IncompleteReplay("Replay is incomplete")
def build_replay(self, path): self.archive = mpyq.MPQArchive(path) replay = self.archive.header['user_data_header']['content'] header = versions.latest().decode_replay_header(replay) self.contents = self.archive.read_file('replay.tracker.events') self.details = self.archive.read_file('replay.details') self.game_events = self.archive.read_file('replay.game.events') self.init_data = self.archive.read_file('replay.initData') self.metadata = json.loads( self.archive.read_file('replay.gamemetadata.json')) base_build = header['m_version']['m_baseBuild'] try: return (replay, versions.build(base_build)) except Exception as e: raise Exception('Unsupported base build: {0} ({1!s})'.format( base_build, e))
def race_winrate(directory): # Using mypq, load the replay file matcher = re.compile(r'\.SC2Replay$', re.IGNORECASE) replays = [file for file in os.listdir(directory) if matcher.search(file)] print("Found %d replays to scan" % len(replays)) # KEY: Name. VALUE: PlayerObject race_dictionary = { "Protoss": "P", "Zerg": "Z", "Terran": "T", "异虫": "Z", "星灵": "P", "人类": "T" } matchup_dictionary = {"PvZ": 0, "PvT": 0, "ZvT": 0} for replay in replays: try: # necessary stuff from s2protocol archive = mpyq.MPQArchive(os.path.join(directory, replay)) contents = archive.header['user_data_header']['content'] header = versions.latest().decode_replay_header(contents) base_build = header['m_version']['m_baseBuild'] protocol = versions.build(base_build) # get the general info about the replay contents = archive.read_file('replay.details') result = protocol.decode_replay_details(contents) player_list = result['m_playerList'] # player result is 1 if won, 2 if not. player_result = [ player_list[0]['m_result'] == 1, player_list[1]['m_result'] == 1 ] # ex: [P, Z] player_races = [ player_list[0]['m_race'].decode('UTF-8'), player_list[1]['m_race'].decode('UTF-8') ] player_races = [race_dictionary[race] for race in player_races] except: print("error")
def load_value(replay_filename, value): """Gets values from replay file Args: replay_filename (Replay): Replay value (String): Key to get from replay. (I.e MMR) Returns: TYPE: Description """ archive = mpyq.MPQArchive(replay_filename) jsondata = archive.read_file("replay.gamemetadata.json").decode("utf-8") obj = json.loads(jsondata) mmrs = [0, 0] for i in [0, 1]: mmrs[i] = 0 if value not in obj['Players'][i] else obj['Players'][i][ value] return mmrs
def __init__(self, replayFilename, game="sc2"): if game not in self.GAME_PROTOCOLS: raise UnknownGameException(game) self.game = game self.replayFilename = replayFilename self.mpqArchive = mpyq.MPQArchive(self.replayFilename) # The header's baseBuild determines which protocol to use (this works with every version) baseBuild = self.getHeader()['m_version']['m_baseBuild'] packageName = self.GAME_PROTOCOLS[game]["protocol"] if __package__ is not None: packageName = '%s.%s' % (__package__, packageName) try: # Will raise an ImportError-exception if the basebuild is unknown self.protocol = __import__(packageName + '.protocol%s' % baseBuild, fromlist=[packageName]) except ImportError: raise UnknownBaseBuildException(baseBuild)
def update_battle_net_cache(replays, bnet_base): """Download the battle.net cache files needed by replays.""" test_looks_like_battle_net(bnet_base) downloaded = 0 failed = set() for replay_path in replays: try: archive = mpyq.MPQArchive(replay_path) except ValueError: print("Failed to parse replay:", replay_path) continue extracted = archive.extract() contents = archive.header["user_data_header"]["content"] header = s2versions.latest().decode_replay_header(contents) base_build = header["m_version"]["m_baseBuild"] prot = s2versions.build(base_build) details_bytes = (extracted.get(b"replay.details") or extracted.get(b"replay.details.backup")) details = prot.decode_replay_details(details_bytes) for map_handle in details["m_cacheHandles"]: # server = map_handle[4:8].decode("utf-8").strip("\x00 ") map_hash = binascii.b2a_hex(map_handle[8:]).decode("utf8") file_type = map_handle[0:4].decode("utf8") cache_path = os.path.join(bnet_base, "Cache", map_hash[0:2], map_hash[2:4], "%s.%s" % (map_hash, file_type)) url = DEPOT_URL_TEMPLATE.format(hash=map_hash, type=file_type) if not os.path.exists(cache_path) and url not in failed: mkdirs(os.path.dirname(cache_path)) print(url) try: urllib.request.urlretrieve(url, cache_path) except urllib.error.HTTPError as e: print("Download failed:", e) failed.add(url) else: downloaded += 1 return downloaded
def insertIntoGoogleSheet(): sheet = getGoogleSheetObject() # iterating through all the directories specified in config for directory in config.replaysDirectories: replays = getListOfReplayNames(directory) # iterating through all the replays in specified directories for replay in replays: archive = mpyq.MPQArchive(replay) replayInfo = ReplayInfo(archive) playerIndex = replayInfo.getPlayerIndex() oppIndex = replayInfo.getOpponentIndex() date, time = replayInfo.getDateAndTime() archive = '' if(replayInfo.getPlayerCount() == 2 and replayInfo.getPlayerHighestLeague(oppIndex) != "VSAI"): print("INSERTING " + replay) sheet.append_row([ resultAsString(replayInfo.didPlayerWin(playerIndex)), replayInfo.getMatchup(), replayInfo.getMapName(), replayInfo.getDuration(), replayInfo.getPlayerMMR(playerIndex), str(replayInfo.getPlayerMMR(oppIndex)), replayInfo.getPlayerName(playerIndex), replayInfo.getPlayerName(oppIndex), replayInfo.getPlayerHighestLeague(oppIndex), date, time, ]) renameAndMoveReplays(directory, replay, replayInfo.didPlayerWin(playerIndex), replayInfo.getMatchup(), str(replayInfo.getPlayerMMR(oppIndex)), replayInfo.getMapName(), date, time, replayInfo.getDuration())
def get_already_played(self): rs = replay_stats() processes = [] for files in listdir(self.replay_folder): if (path.splitext(files)[1] == '.SC2Replay'): archive = mpyq.MPQArchive(self.replay_folder + '\\' + files) contents = str(archive.header['user_data_header']['content']) #figure out build version of replay header = versions.latest().decode_replay_header(contents) baseBuild = header['m_version']['m_baseBuild'] protocol = versions.build(baseBuild) #decode game events contents = archive.read_file('replay.details') details = protocol.decode_replay_details(contents) datetime_of_replay = datetime.utcfromtimestamp( ((details['m_timeUTC']) / (10000000) - 11644473600 + ((details['m_timeLocalOffset']) / 10000000))) p1 = Process() if (datetime_of_replay.date() == datetime.today().date()): p1 = Process(target=self.minutes_in_replay, args=((self.replay_folder + '\\' + files), rs.replay_stats_dict, True)) #start of week credit https://stackoverflow.com/questions/39441639/getting-the-date-of-the-first-day-of-the-week?rq=1 elif (datetime_of_replay.date() >= (datetime.today() - timedelta( days=datetime.today().isoweekday() % 7)).date()): p1 = Process(target=self.minutes_in_replay, args=((self.replay_folder + '\\' + files), rs.replay_stats_dict)) processes.append(p1) p1.start() for process in processes: process.join() return rs.replay_stats_dict
def main(): parser = argparse.ArgumentParser() parser.add_argument('replay_file', help='.SC2Replay file to load', nargs='?') args = parser.parse_args() # Check/test the replay file if args.replay_file is None: print(sys.stderr, ".S2Replay file not specified") sys.exit(1) archive = mpyq.MPQArchive(args.replay_file) # HEADER # contents = archive.header['user_data_header']['content'] # header = versions.latest().decode_replay_header(contents) contents = read_contents(archive, 'replay.game.events') details = versions.latest().decode_replay_game_events(contents) for x in details['m_playerList']: print('hello')
if 'crdownload' not in file_name: break loopCount += 1 if loopCount == 16: browser.get(curMapLink + str(j)) continue fileNameList.append(file_name) # browser.get(curMapLink+str(j)) print(fileNameList) for fileName in fileNameList: archive = mpyq.MPQArchive(getcwd() + '\\TempReplays\\' + fileName) print(archive.files) contents = archive.header['user_data_header']['content'] header = versions.latest().decode_replay_header(contents) baseBuild = header['m_version']['m_baseBuild'] try: protocol = versions.build(baseBuild) analyze_sentiments(archive, protocol, races) except ImportError as err: print(err.args) # contents = archive.read_file('replay.initData') # lobbyDetails = protocol.decode_replay_initdata(contents)
def __init__(self, replay_file, filename=None, load_level=4, engine=sc2reader.engine, **options): super(Replay, self).__init__(replay_file, filename, **options) self.datapack = None self.raw_data = dict() # The current load level of the replay self.load_level = None #default values, filled in during file read self.player_names = list() self.other_people = set() self.speed = "" self.type = "" self.game_type = "" self.real_type = "" self.category = "" self.is_ladder = False self.is_private = False self.map = None self.map_hash = "" self.gateway = "" self.events = list() self.events_by_type = defaultdict(list) self.teams, self.team = list(), dict() self.player = utils.PersonDict() self.observer = utils.PersonDict() self.human = utils.PersonDict() self.computer = utils.PersonDict() self.entity = utils.PersonDict() self.players = list() self.observers = list() # Unordered list of Observer self.humans = list() self.computers = list() self.entities = list() self.attributes = defaultdict(dict) self.messages = list() self.recorder = None # Player object self.packets = list() self.objects = {} self.active_units = {} self.game_fps = 16.0 self.tracker_events = list() self.game_events = list() # Bootstrap the readers. self.registered_readers = defaultdict(list) self.register_default_readers() # Bootstrap the datapacks. self.registered_datapacks = list() self.register_default_datapacks() # Unpack the MPQ and read header data if requested # Since the underlying traceback isn't important to most people, don't expose it in python2 anymore if load_level >= 0: self.load_level = 0 try: self.archive = mpyq.MPQArchive(replay_file, listfile=False) except Exception as e: raise exceptions.MPQError("Unable to construct the MPQArchive", e) header_content = self.archive.header['user_data_header']['content'] header_data = BitPackedDecoder(header_content).read_struct() self.versions = list(header_data[1].values()) self.frames = header_data[3] self.build = self.versions[4] self.base_build = self.versions[5] self.release_string = "{0}.{1}.{2}.{3}".format(*self.versions[1:5]) self.game_length = utils.Length(seconds=self.frames / 16) self.length = self.real_length = utils.Length( seconds=int(self.frames / self.game_fps)) # Load basic details if requested if load_level >= 1: self.load_level = 1 for data_file in [ 'replay.initData', 'replay.details', 'replay.attributes.events' ]: self._read_data(data_file, self._get_reader(data_file)) self.load_details() self.datapack = self._get_datapack() # Can only be effective if map data has been loaded if options.get('load_map', False): self.load_map() # Load players if requested if load_level >= 2: self.load_level = 2 for data_file in ['replay.message.events']: self._read_data(data_file, self._get_reader(data_file)) self.load_message_events() self.load_players() # Load tracker events if requested if load_level >= 3: self.load_level = 3 for data_file in ['replay.tracker.events']: self._read_data(data_file, self._get_reader(data_file)) self.load_tracker_events() # Load events if requested if load_level >= 4: self.load_level = 4 for data_file in ['replay.game.events']: self._read_data(data_file, self._get_reader(data_file)) self.load_game_events() # Run this replay through the engine as indicated if engine: engine.run(self)
def download(key, secret, version, replays_dir, download_dir, extract=False, remove=False, filter_version='keep'): # Get OAuth token from us region api = BnetAPI(key, secret) # Get meta file infos for the give client version print('Searching replay packs with client version:', version) meta_file_urls = api.search_by_client_version(version) if len(meta_file_urls) == 0: sys.exit('No matching replay packs found for the client version!') # Download replay packs. download_base_url = api.get_base_url() print('Found {} replay packs'.format(len(meta_file_urls))) print('Downloading to:', download_dir) print('Extracting to:', replays_dir) mkdirs(download_dir) for i, meta_file_url in enumerate(sorted(meta_file_urls), 1): # Construct full url to download replay packs meta_file_info = api.get(meta_file_url) archive_url = requests.compat.urljoin(download_base_url, meta_file_info['path']) print_part('{}/{}: {} ... '.format(i, len(meta_file_urls), archive_url)) file_name = archive_url.split('/')[-1] file_path = os.path.join(download_dir, file_name) with requests.get(archive_url, stream=True) as response: print_part( int(response.headers['Content-Length']) // 1024**2, 'Mb ... ') if (not os.path.exists(file_path) or os.path.getsize(file_path) != int(response.headers['Content-Length'])): with open(file_path, 'wb') as f: shutil.copyfileobj(response.raw, f) print_part('downloaded') else: print_part('found') if extract: print_part(' ... extracting') if os.path.getsize(file_path) <= 22: # Size of an empty zip file. print_part(' ... zip file is empty') else: subprocess.call([ 'unzip', '-P', 'iagreetotheeula', '-u', '-o', '-q', '-d', replays_dir, file_path ]) if remove: os.remove(file_path) print() if mpyq is not None and filter_version != 'keep': print('Filtering replays.') found_versions = collections.defaultdict(int) found_str = lambda: ', '.join( '%s: %s' % (v, c) for v, c in sorted(found_versions.items())) all_replays = [ f for f in os.listdir(replays_dir) if f.endswith('.SC2Replay') ] for i, file_name in enumerate(all_replays): if i % 100 == 0: print_part('\r%s/%s: %d%%, found: %s' % (i, len(all_replays), 100 * i / len(all_replays), found_str())) file_path = os.path.join(replays_dir, file_name) with open(file_path, "rb") as fd: try: archive = mpyq.MPQArchive(fd).extract() except KeyboardInterrupt: return except: found_versions['corrupt'] += 1 os.remove(file_path) continue metadata = json.loads( archive[b'replay.gamemetadata.json'].decode('utf-8')) game_version = '.'.join(metadata['GameVersion'].split('.')[:-1]) found_versions[game_version] += 1 if filter_version == 'sort': version_dir = os.path.join(replays_dir, game_version) if found_versions[ game_version] == 1: # First one of this version. mkdirs(version_dir) os.rename(file_path, os.path.join(version_dir, file_name)) elif filter_version == 'delete': if game_version != version: os.remove(file_path) print('\nFound replays:', found_str())