def parse_replay( self, replay_to_parse_path, preserve_upload_date: bool = False, # url parameters query_params: Dict[str, any] = None, # test parameters force_reparse: bool = False) -> str: """ :param self: :param replay_to_parse_path: the path to the replay that is being parsed. :param query_params: The arguments from the url :param preserve_upload_date: If true the upload date is retained :param force_reparse: if true parsing will happen even if a file already exists. :return: The replay ID """ parsed_data_path = os.path.join(FileManager.get_default_parse_folder(), os.path.basename(replay_to_parse_path)) failed_dir = os.path.join( os.path.dirname(FileManager.get_default_parse_folder()), 'failed') # Todo preparse replay ID here to save on extra parsing and on locks. (remember to delete locks older than 1 day) if os.path.isfile(parsed_data_path) and not force_reparse: return analysis_manager = parse_replay_wrapper(replay_to_parse_path, parsed_data_path, failed_dir, force_reparse, logger, query_params) if analysis_manager is None: return # success! proto_game = analysis_manager.protobuf_game if proto_game.game_metadata.match_guid is None or proto_game.game_metadata.match_guid == '': proto_game.game_metadata.match_guid = proto_game.game_metadata.id parsed_replay_processing(proto_game, query_params, preserve_upload_date=preserve_upload_date) return save_replay(proto_game, replay_to_parse_path, parsed_data_path, parsed_data_path)
def api_v1_get_stats(session=None): # TODO: stats? ct = session.query(Game).count() dct = len([ f for f in os.listdir(FileManager.get_default_parse_folder()) if f.endswith('pts') ]) return jsonify({'db_count': ct, 'count': dct})
def api_v1_download_parsed(fn): return send_from_directory(FileManager.get_default_parse_folder(), fn, as_attachment=True)
def api_v1_list_parsed_replays(): fs = os.listdir(FileManager.get_default_parse_folder()) return jsonify(fs)