def fetch_replays(replayfilters={},tagfilters={},playerfilters={},groupfilters={},queue=None): logger.info("Fetching replays") with DB_Manager() as mann: if replayfilters or tagfilters or playerfilters or groupfilters: replays = mann.filter_replays(replayfilters,tagfilters,playerfilters,groupfilters) logger.debug("Fetched replays from database with parameters %s %s %s %s", replayfilters,tagfilters,playerfilters,groupfilters) else: replays = mann.get_all("replays","date_time desc") logger.debug("Fetched all replays (paramless)") # print "REPLAYS: ",replays for replay in replays: if not os.path.isfile(rl_paths.tracked_folder(replay[1])) and os.path.isfile(rl_paths.backup_folder(replay[1])): shutil.copy2(rl_paths.backup_folder(replay[1]), rl_paths.tracked_folder(replay[1])) logger.info("Restored missing replay %s from backup",replay[1]) # print "Putting replay",replay queue.put( (replay, os.path.isfile(rl_paths.demo_folder(replay[1]))) ) logger.info("Inserted replays into tracked_replay_list") print "Fetch replays done" queue.put(QueueOp.STOP)
def copy_to_staging(variables, queue): if not os.path.isfile(rl_paths.demo_folder(variables[1])): shutil.copy2(rl_paths.tracked_folder(variables[1]),rl_paths.demo_folder(variables[1])) logger.info("Copied %s to demo_folder",variables[1]) if not os.path.isfile(rl_paths.tracked_folder(variables[1])): shutil.copy2(rl_paths.demo_folder(variables[1]),rl_paths.tracked_folder(variables[1])) logger.info("Copied %s to tracked folder",variables[1]) if not os.path.isfile(rl_paths.backup_folder(variables[1])): shutil.copy2(rl_paths.demo_folder(variables[1]),rl_paths.backup_folder(variables[1])) logger.info("Copied %s to backup folder",variables[1]) queue.put(QueueOp.STOP)
def dump_to_zip(jsondata): with zipfile.ZipFile("RL_"+time.strftime("%Y-%m-%d_%H-%M-%S")+".zip","w") as fzip: fzip.writestr("data.json",json.dumps(jsondata,separators=(",",":"),sort_keys=True,indent=1)) if type(jsondata) == list: for replay in jsondata: fzip.write(rl_paths.tracked_folder(replay['filename']),replay['filename']+".replay") else: raise ValueError("jsondata must be list of dicts") print "Created zipfile"
def delete_tracked_replay(self): varlist = self.tracked_replays.get_variables(self.tracked_replays.selected_item) self.tracked_replays.delete_selected() logger.debug("DELETING: %s",varlist) with DB_Manager() as dmann: dmann.delete_replay(varlist[0]) os.remove(rl_paths.tracked_folder(varlist[1])) if self.tracked_replays.size() == 0: self.info.clear() logger.info("Deleted replay")
def restore(var): print "Restoring demo folder" logger.info("Restoring demo folder to its original state") logger.info("Path to backups: %s",rl_paths.backup_folder()) logger.debug("Files there: %s",os.listdir(rl_paths.backup_folder())) for f in os.listdir(rl_paths.backup_folder()): f = os.path.splitext(f)[0] src = rl_paths.backup_folder(f) dst = rl_paths.demo_folder(f) shutil.copy2(src,dst) logger.debug("Copied from %s to %s",src,dst) shutil.rmtree(rl_paths.tracked_folder()) shutil.rmtree(rl_paths.untracked_folder()) logger.info("Removed tracked and untracked folder") logger.info("Restore Complete!") var.set("Demo folder restored!")
def import_zip(path_to_zip, progressQueue=None): data = {} with zipfile.ZipFile(path_to_zip,"r") as fzip: with fzip.open("data.json","r") as fdata: data = json.loads(fdata.read()) if progressQueue: progressQueue.put("Loaded data") for d in data: print d print "derp" fzip.extract(d['filename']+".replay",rl_paths.tracked_folder()) if progressQueue: progressQueue.put("Extracted:" +d['filename']) with DB_Manager() as dmann: dmann.add_import(d) if progressQueue: progressQueue.put("Loaded data from:"+d['filename'])