def cleanup_data(opt): now = time.time() keep_time = int(opt['keep_time']) while lock(opt, 'data_all'): pass train_data_files = get_train_data_files(opt) if len(train_data_files) == 0: unlock(opt, 'data_all') return files_to_erase = list( filter(lambda x: float(x[10:]) < now - 60 * keep_time, train_data_files)) for f in files_to_erase: subprocess.getoutput('rm {}/data/{}'.format(opt['save_dir'], f)) subprocess.getoutput('rm {}/data/timestamp_{}'.format( opt['save_dir'], f)) unlock(opt, 'data_all') print('[controller] erased {}/{} old training data'.format( len(files_to_erase), len(train_data_files)), flush=True)
import sys import os import time import glob from model_updater import gen_graph, has_best, lock, unlock from lib.lib import IndependentSetLib if __name__ == '__main__': api = IndependentSetLib(sys.argv) opt = {} for i in range(1, len(sys.argv), 2): opt[sys.argv[i][1:]] = sys.argv[i + 1] save_dir = opt['save_dir'] if has_best(opt): while lock(opt, 'best'): pass api.LoadModel('best') unlock(opt, 'best') else: print('[generator] best model not found', flush=True) api.SetCurrentGraph(gen_graph(opt)) filename = 'train_data{}'.format(time.time()) api.GenerateTrainData(filename) os.system('touch {}/data/timestamp_{}'.format(save_dir, filename)) print('[generator] generated new data', flush=True)
from lib.lib import IndependentSetLib if __name__ == '__main__': api = IndependentSetLib(sys.argv) opt = {} for i in range(1, len(sys.argv), 2): opt[sys.argv[i][1:]] = sys.argv[i + 1] save_dir = opt['save_dir'] if has_best(opt): while lock(opt, 'best'): pass api.LoadModel('best') unlock(opt, 'best') else: print('[learner] best model not found', flush=True) for _ in range(int(opt['save_interval'])): api.ClearTrainData() train_data_files = [] while 1: while lock(opt, 'data_all'): pass train_data_files = get_train_data_files(opt) if len(train_data_files) < int(opt['batch_num']): unlock(opt, 'data_all') print('[learner] training data not found', flush=True) time.sleep(np.random.randint(30, 120))