def import_data(self): home = path.expanduser('~') replay_folder = home + '\\Documents\\My Games\\\Rocket League\\TAGame\\Demos' # replay_folder = path.dirname(path.realpath(__file__))+'\\testfiles' if not path.isdir(replay_folder): replay_folder = home ext = 'Replay (*.pyrope *.replay)' fname = QFileDialog.getOpenFileName(self, 'Load Replay', replay_folder, ext) if fname[0]: ext = fname[0].split('.')[-1] if ext == 'replay': self.replay = Replay(path=fname[0]) logger.info('Rocket League Replay File loaded and Validated') msg = 'Header Parsed. Decode Netstream now?\n(This might take a while)' question = QMessageBox().question(self, 'Proceed', msg, QMessageBox.Yes, QMessageBox.No) if question == QMessageBox.Yes: self.show_progress() else: logger.warn( 'Netstream not Parsed. Only Metadata for view available' ) elif ext == 'pyrope': # start = time() self.replay = pickle.load(open(fname[0], 'rb')) # print("UNPICKLING: %f" % (time()-start)) logger.info('pyrain Parsed Replay File sucessfully loaded') self.netstream_loaded() self.meta_tab.set_replay(self.replay)
def __init__(self, file_path=None): if not file_path: file_path = sys.argv[1] try: self.replay = pickle.load(open(file_path + '.pickle', "rb")) self.replay_id = self.replay.header['Id'] except: self.replay = Replay(path=file_path) self.replay_id = self.replay.header['Id'] self.replay.parse_netstream() pickle.dump(self.replay, open(file_path + '.pickle', 'wb')) # Extract the goal information. if 'Goals' in self.replay.header: for goal in self.replay.header['Goals']: self.extract_goal_data(goal['frame']) self.get_actors() for player in self.actors.copy(): # Get their position data. if self.actors[player]['type'] == 'player': self.actors[player][ 'position_data'] = self.get_player_position_data(player) elif self.actors[player]['type'] == 'ball': if 'ball' not in self.actors: self.actors['ball'] = {'position_data': {}} ball_data = self.get_player_position_data(player) self.actors['ball']['position_data'] = { **self.actors['ball']['position_data'], **ball_data } del self.actors[player] collated_data = { 'actors': self.actors, 'goal_metadata': self.goal_metadata, 'actor_metadata': self.actor_metadata, 'match_metadata': self.match_metadata, } self.json = json.dumps(collated_data, indent=2)
f.write(r.content) f.seek(0) file_path = f.name elif sys.argv[1].startswith('http'): r = requests.get(sys.argv[1], headers=headers) f = tempfile.TemporaryFile('wb') f.write(r.content) f.seek(0) file_path = f.name else: file_path = sys.argv[1] replay = Replay(path=file_path) replay_id = replay.header['Id'] replay.parse_netstream() def _extract_data(replay, replay_analyser, player): datasets = {} lst_plots = [] slicing = False data = replay_analyser.get_actor_pos(player, slicing) new_datasets = analyser.AnalyserUtils.filter_coords( data, True, True, False) for entry in new_datasets: if entry['title_short'] in datasets: print("Dataset already in Plotlist")
def __init__(self): file_path = sys.argv[1] try: self.replay = pickle.load(open(file_path + '.pickle', "rb")) self.replay_id = self.replay.header['Id'] except: self.replay = Replay(path=file_path) self.replay_id = self.replay.header['Id'] self.replay.parse_netstream() pickle.dump(self.replay, open(file_path + '.pickle', 'wb')) self.players = self.get_players() self.ballData = self.get_ball_data() for player in self.players: # Get their position data. self.players[player][ 'position_data'] = self.get_player_position_data(player) # Restructure the data so that it's chunkable. frame_data = [] for frame in range(self.replay.header['NumFrames']): frame_dict = { 'time': self.replay.netstream[frame].current, 'actors': [] } for player in self.players: position_data = self.players[player]['position_data'] if frame in position_data: frame_dict['actors'].append({ 'id': player, 'type': 'car', **position_data[frame] }) if frame in self.ballData: frame_dict['actors'].append({ 'id': 'ball', 'type': 'ball', **self.ballData[frame] }) frame_data.append(frame_dict) assert len(frame_data) == self.replay.header[ 'NumFrames'], "Missing {} frames from data output.".format( self.replay.header['NumFrames'] - len(frame_data)) # Get min and max z values. for axis in ['x', 'y', 'z']: values = [ actor[axis] for frame in frame_data for actor in frame['actors'] ] print(axis, min(values), max(values)) json.dump(frame_data, open(file_path + '.json', 'w'), indent=2)
def __init__(self, file_path, parse_netstream=False, obj=None): self.replay = Replay(file_path) self.replay_id = self.replay.header['Id'] self.actor_metadata = {} self.goal_metadata = {} self.match_metadata = {} self.team_metadata = {} self.actors = {} self.cars = {} self.boost_data = {} self.heatmap_json_filename = None assert len(self.team_metadata) == 0 pickle_filename = 'uploads/pickle_files/{}.pickle'.format( self.replay_id) heatmap_json_filename = 'uploads/replay_json_files/{}.json'.format( self.replay_id) location_json_filename = 'uploads/replay_location_json_files/{}.json'.format( self.replay_id) if parse_netstream: try: self.replay = pickle.loads( default_storage.open(pickle_filename).read()) except (FileNotFoundError, OSError, S3ResponseError): try: self.replay.parse_netstream() default_storage.save( pickle_filename, ContentFile(pickle.dumps(self.replay))) except FrameParsingError: # Bail us out of here early, just provide an 'old school' parse. parse_netstream = False traceback.print_exc() if not parse_netstream: return self._get_actors() # If the number of goals in the header doesn't match the number of goals # in the game, try to get the missing goal data from the netstream. """ ('3e_Team1', {'actor_id': 3, 'actor_type': 'Archetypes.Teams.Team1', 'data': {'Engine.TeamInfo:Score': 1}, 'new': False, 'startpos': 2053839}), """ if len(self.replay.header.get('Goals', [])) < self.replay.header.get( 'Team0Score', 0) + self.replay.header.get('Team1Score', 0): for index, frame in self.replay.netstream.items(): for _, actor in frame.actors.items(): if 'data' not in actor: continue if ('Engine.TeamInfo:Score' in actor['data'] and 'TAGame.Team_TA:GameEvent' not in actor['data'] and actor['actor_type'].startswith( 'Archetypes.Teams.Team')): if 'Goals' not in self.replay.header: self.replay.header['Goals'] = [] self.replay.header['Goals'].append({ 'PlayerName': 'Unknown player (own goal?)', 'PlayerTeam': actor['actor_type'].replace( 'Archetypes.Teams.Team', ''), 'frame': index }) # Extract the goal information. if 'Goals' in self.replay.header: for goal in self.replay.header['Goals']: self._extract_goal_data(goal['frame']) if 'NumFrames' in self.replay.header: assert len(self.team_metadata) == 2 for player in self.actors.copy(): # Get their position data. if 'type' not in self.actors[player]: continue if self.actors[player]['type'] == 'player': self.actors[player][ 'position_data'] = self._get_player_position_data(player) elif self.actors[player]['type'] == 'ball': if 'ball' not in self.actors: self.actors['ball'] = {'position_data': {}} ball_data = self._get_player_position_data(player) self.actors['ball']['position_data'] = { **self.actors['ball']['position_data'], **ball_data } del self.actors[player] # Compress the location data per (player) actor. compressed_data = {} for actor in self.actors: if 'type' not in self.actors[actor]: continue if self.actors[actor]['type'] == 'player': compressed_data[actor] = {} current_key = '' key = '' keys = self.actors[actor]['position_data'].keys() if len(keys) == 0: continue for frame in range(min(keys), max(keys)): if frame in self.actors[actor]['position_data']: data = self.actors[actor]['position_data'][frame] key = '{},{}'.format(data['x'], data['y']) if key == current_key: compressed_data[actor][key] += 1 else: if key not in compressed_data[actor]: compressed_data[actor][key] = 1 else: compressed_data[actor][key] += 1 assert sum([ i[1] for i in compressed_data[actor].items() ]) == max(self.actors[actor]['position_data'], key=int) - min( self.actors[actor]['position_data'], key=int) if default_storage.exists(heatmap_json_filename): default_storage.delete(heatmap_json_filename) heatmap_json_filename = default_storage.save( heatmap_json_filename, ContentFile(json.dumps(compressed_data, separators=(',', ':')))) self.heatmap_json_filename = heatmap_json_filename if obj.eligble_for_analysis(): # Advanced replay parsing. # Restructure the data so that it's chunkable. frame_data = [] for frame in range(self.replay.header['NumFrames']): frame_dict = { 'time': self.replay.netstream[frame].current, 'actors': [] } for player in self.actors: position_data = self.actors[player]['position_data'] if frame in position_data: frame_dict['actors'].append({ 'id': player, 'type': self.actors[player].get('type', 'ball'), **position_data[frame] }) frame_data.append(frame_dict) if default_storage.exists(location_json_filename): default_storage.delete(location_json_filename) self._get_boost_data() self._get_seconds_remaining() # pprint(self.boost_data) small_actors = {} for key, value in self.actors.items(): small_actors[key] = value del small_actors[key]['position_data'] final_data = { 'frame_data': frame_data, 'goals': self.replay.header.get('Goals', []), 'boost': self.boost_data, 'seconds_mapping': self.seconds_mapping, 'actors': self.actors, 'teams': self.team_metadata } location_json_filename = default_storage.save( location_json_filename, ContentFile(json.dumps(final_data, separators=(',', ':')))) self.location_json_filename = location_json_filename
def handle(self, *args, **options): ids = [ 79, 328, 329, 383, 407, 516, 598, 602, 679, 680, 736, 817, 841, 842, 843, 844, 845, 846, 847, 848, 849, 850, 851, 852, 853, 857, 1025, 2138, 2405, 2597, 2713, 2885, 3344, 3987, 4110, 4252, 4653, 4742, 4743, 4744, 4745, 4746, 5019, 5110, 5216, 5217, 5218, 6007, 6009, 6026, 6802, 6803, 6804, 6809, 6810, 6811, 6818, 6819, 6820, 6821, 6822, 6839, 6840, 6842, 6874, 6875, 6876, 6877, 6878, 6879, 6880, 6881, 6882, 7013, 7209, 7211, 7369, 7630, 8727, 11008, 11009, 11010, 11011, 11012, 11013, 11014, 11015, 11016, 11017, 11018, 11019, 11020, 11021, 11022, 11023, 11024, 11025, 11026, 11027, 11028, 11029, 11030, 11031, 11032, 11033, 11034, 11035, 11036, 11037, 11038, 11039, 11040, 11041, 11042, 11043, 11044, 11045, 11046, 11047, 11048, 11049, 11050, 11051, 11052, 11053, 11054, 11055, 11056, 11057, 11058, 11059, 11060, 11061, 11062, 11063, 12173, 14799, 16364, 16976, 17166, 17274, 17275, 17276, 17278, 17281, 21851, 21864, 21919, 22009, 22038, 23336, 23337, 25799, 27277, 29572, 30730, 31180, 31181, 31182, 31183, 31185, 31186, 31189, 31190, 31191, 31193, 31194, 31195, 31196, 31197, 31198, 31199, 31200, 31201, 31202, 31204, 31205, 31206, 31207, 31208, 31209, 31210, 31211, 31212, 31213, 31214, 31215, 31217, 31218, 31219, 31220, 31221, 31222, 31223, 31224, 31225, 31226, 31227, 31228, 32260, 32262, 34471, 36447, 36448, 36676, 36678, 36679, 36680, 36681 ] # Match the files which aren't UUIDs. exp = re.compile( r'^uploads\/replay_files\/[A-F0-9]{8}4[A-F0-9]{23}\.replay$') conn = boto.s3.connect_to_region( 'eu-west-1', aws_access_key_id=os.getenv('AWS_ACCESS_KEY_ID'), aws_secret_access_key=os.getenv('AWS_SECRET_ACCESS_KEY'), calling_format=OrdinaryCallingFormat()) bucket = conn.get_bucket('media.rocketleaguereplays.com') s3_files = [ key.name.replace('uploads/replay_files/', '') for key in bucket.list(prefix='uploads/replay_files') if re.match(exp, key.name) is None and key.name != 'uploads/replay_files/' ] # Now we need to parse the header for each of these files and determine # what their replay ID is. replay_mappings = {} for s3_file in s3_files: local_file_path = os.path.join(settings.MEDIA_ROOT, 'uploads/replay_files', s3_file) with open(local_file_path, 'rb') as f: try: replay = Replay(f.read()) replay_mappings[replay.header['Id']] = s3_file except: pass for i in ids: r = ReplayModel.objects.get(pk=i) if r.replay_id in replay_mappings: print( 'Replay.objects.get(pk={}).update(file="uploads/replay_files/{}")' .format(i, replay_mappings[r.replay_id])) else: print('#', i, 'not found :(')