def replay(): match_id = request.args.get('match_id') profile_id = request.args.get('profile_id') url = 'https://aoe.ms/replay/?gameId={}&profileId={}'.format(match_id, profile_id) try: filename = aoeapi.download_rec(url, 'recs') except aoeapi.AoeApiError: print('could not download valid rec: ', match_id, ' - ', profile_id) abort(404) return except RuntimeError: print('could not download valid rec: ', match_id, ' - ', profile_id) abort(404) return print('recs/' + filename) start = time.time() with open('recs/' + filename, 'rb') as handle: data = handle.read() handle = io.BytesIO(data) summary = mgz.summary.Summary(handle, None) end = time.time() print('process', end - start, 's') os.remove('recs/' + filename) map = summary.get_map() del map['tiles'] return json.dumps({ "players": summary.get_players(), "completed": summary.get_completed(), "chat": summary.get_chat(), "dataset": summary.get_dataset(), "diplomacy": summary.get_diplomacy(), "duration": summary.get_duration(), "encoding": summary.get_encoding(), "file_hash": summary.get_file_hash(), "language": summary.get_language(), "mirror": summary.get_mirror(), "owner": summary.get_owner(), "platform": summary.get_platform(), "postgame": summary.get_postgame(), # null "teams": summary.get_teams(), "start_time": summary.get_start_time(), # 0 "restored": summary.get_restored(), "ratings": summary.get_ratings(), # empty "profile_ids": summary.get_profile_ids(), "map": map, }, default=str)
def won(): match_id = request.args.get('match_id') try: match = aoeapi.get_match(match_id) except aoeapi.AoeApiError: raise RuntimeError('could not get match') players = match['players'] # print(json.dumps(match['players2'])) for player in players: if not player['url']: continue try: start = time.time() filename = aoeapi.download_rec(player['url'], 'recs') end = time.time() # print('download', end - start, 's') except aoeapi.AoeApiError: # raise RuntimeError("could not download valid rec: %s", match_id) print('error') continue except RuntimeError: raise RuntimeError("could not download valid rec: %s", match_id) print('error') continue print('recs/' + filename) start = time.time() with open('recs/' + filename, 'rb') as handle: data = handle.read() handle = io.BytesIO(data) summary = mgz.summary.Summary(handle, None) end = time.time() print('process', end - start, 's') # print('--- BREAK') # for player2 in summary.get_players(): # print(player2['name'] + ': ' + str(player2['winner'])) os.remove('recs/' + filename) return jsonify({ "players": summary.get_players(), "completed": summary.get_completed(), "chat": summary.get_chat(), "dataset": summary.get_dataset(), "diplomacy": summary.get_diplomacy(), "duration": summary.get_duration(), "encoding": summary.get_encoding(), "file_hash": summary.get_file_hash(), "language": summary.get_language(), "mirror": summary.get_mirror(), "owner": summary.get_owner(), "platform": summary.get_platform(), "postgame": summary.get_postgame(), # null "teams": summary.get_teams(), "start_time": summary.get_start_time(), # 0 "restored": summary.get_restored(), "ratings": summary.get_ratings(), # empty "profile_ids": summary.get_profile_ids(), }) # print(json.dumps(summary.get_hash())) # non serializable # print(json.dumps(summary.get_header())) # non serializable # print(json.dumps(summary.get_version())) # non serializable # print(json.dumps(summary.get_objects())) #80KB # print(json.dumps(summary.get_map())) # 2.1MB # print('get_completed', summary.get_completed()) # print('get_chat', summary.get_chat()) # print('get_objects', summary.get_objects()) # print('get_hash', summary.get_hash()) # print('get_map', summary.get_map()) # print('get_dataset', summary.get_dataset()) # print('get_diplomacy', summary.get_diplomacy()) # print('get_duration', summary.get_duration()) # print('get_encoding', summary.get_encoding()) # print('get_file_hash', summary.get_file_hash()) # print('get_header', summary.get_header()) # print('get_version', summary.get_version()) # print('get_language', summary.get_language()) # print('get_mirror', summary.get_mirror()) # print('get_owner', summary.get_owner()) # print('get_platform', summary.get_platform()) # print('get_postgame', summary.get_postgame()) # print('get_teams', summary.get_teams()) # print('get_start_time', summary.get_start_time()) # print('get_restored', summary.get_restored()) # print('get_ratings', summary.get_ratings()) # print('get_profile_ids', summary.get_profile_ids()) abort(404)
map = summary.get_map() del map['tiles'] sys.stdout = old_stdout print( json.dumps( { "players": summary.get_players(), "completed": summary.get_completed(), "chat": summary.get_chat(), "dataset": summary.get_dataset(), "diplomacy": summary.get_diplomacy(), "duration": summary.get_duration(), "encoding": summary.get_encoding(), "file_hash": summary.get_file_hash(), "language": summary.get_language(), "mirror": summary.get_mirror(), "owner": summary.get_owner(), "platform": summary.get_platform(), "postgame": summary.get_postgame(), # null "teams": summary.get_teams(), "start_time": summary.get_start_time(), # 0 "restored": summary.get_restored(), "ratings": summary.get_ratings(), # empty "profile_ids": summary.get_profile_ids(), # "operations": summary.get_operations(), "map": map, }, default=str))
def add_file( # pylint: disable=too-many-return-statements, too-many-branches self, rec_path, reference, series_name=None, series_id=None, platform_id=None, platform_match_id=None, platform_metadata=None, played=None, ladder=None, user_data=None): """Add a single mgz file.""" start = time.time() if not os.path.isfile(rec_path): LOGGER.error("%s is not a file", rec_path) return False, 'Not a file' original_filename = os.path.basename(rec_path) modified = datetime.fromtimestamp(os.path.getmtime(rec_path)) with open(rec_path, 'rb') as handle: data = handle.read() try: handle = io.BytesIO(data) playback = self.playback if rec_path.endswith('aoe2record') and os.path.exists( rec_path.replace('.aoe2record', '.json')): playback = open(rec_path.replace('.aoe2record', '.json')) summary = mgz.summary.Summary(handle, playback) # Hash against body only because header can vary based on compression file_hash = summary.get_file_hash() log_id = file_hash[:LOG_ID_LENGTH] LOGGER.info("[f:%s] add started", log_id) except RuntimeError as error: LOGGER.error("[f] invalid mgz file: %s", str(error)) return False, 'Invalid mgz file' except LookupError: LOGGER.error("[f] unknown encoding") return False, 'Unknown text encoding' except ValueError as error: LOGGER.error("[f] error: %s", error) return False, error existing_match_id = file_exists(self.session, file_hash, series_name, series_id, modified) if existing_match_id: LOGGER.warning("[f:%s] file already exists (%d)", log_id, existing_match_id) #self._handle_file(file_hash, data, Version(summary.get_version()[0])) return None, existing_match_id try: encoding = summary.get_encoding() except ValueError as error: LOGGER.error("[f] error: %s", error) return False, error match_hash_obj = summary.get_hash() if not match_hash_obj: LOGGER.error("f:%s] not enough data to calculate safe match hash", log_id) return False, 'Not enough data to calculate safe match hash' match_hash = match_hash_obj.hexdigest() build = None try: if not platform_match_id and summary.get_platform( )['platform_match_id']: platform_match_id = summary.get_platform()['platform_match_id'] where = (Match.hash == match_hash) if platform_match_id: where |= (Match.platform_match_id == platform_match_id) match = self.session.query(Match).filter(where).one() LOGGER.info("[f:%s] match already exists (%d); appending", log_id, match.id) series_match_exists(self.session, match, series_id, series_name) except MultipleResultsFound: LOGGER.error("[f:%s] mismatched hash and match id: %s, %s", log_id, match_hash, platform_match_id) return False, 'Mismatched hash and match id' except NoResultFound: LOGGER.info("[f:%s] adding match", log_id) parsed_played, build = parse_filename(original_filename) if not played: played = parsed_played try: match, message = self._add_match(summary, played, match_hash, user_data, series_name, series_id, platform_id, platform_match_id, platform_metadata, ladder, build) if not match: return False, message self._update_match_users(platform_id, match.id, user_data) self._update_match_hd(match) self._update_match_dashboard(match, user_data) except IntegrityError: LOGGER.error( "[f:%s] constraint violation: could not add match", log_id) return False, 'Failed to add match' except MemoryError: LOGGER.error("[f:%s] compression failed", log_id) return False, 'Compression failed' try: compressed_filename, compressed_size = self._handle_file( file_hash, data, Version(match.version_id)) except MemoryError: LOGGER.error("[f:%s] compression failed", log_id) return False, 'Compression failed' try: new_file = get_unique( self.session, File, ['hash'], filename=compressed_filename, original_filename=original_filename, hash=file_hash, size=summary.size, modified=modified, compressed_size=compressed_size, encoding=encoding, language=summary.get_language(), reference=reference, match=match, owner_number=summary.get_owner(), parser_version=pkg_resources.get_distribution('mgz').version) self.session.add(new_file) self.session.commit() except RuntimeError: LOGGER.error("[f:%s] unable to add file, likely hash collision", log_id) return False, 'File hash collision' LOGGER.info( "[f:%s] add finished in %.2f seconds, file id: %d, match id: %d", log_id, time.time() - start, new_file.id, match.id) return file_hash, match.id