def run_testers(tester_dict, framework, test_data_loader, test_dir): lib_util.make_dir(test_dir) for key, tester in tester_dict.items(): tester_dir = os.path.join(test_dir, key) tester.run(framework, test_data_loader, tester_dir) print('[TEST] %s: %s' % (key, tester_dir)) print('')
def save_snapshot(network, optimizer, save_dir): lib_util.make_dir(save_dir) network_path = os.path.join(save_dir, 'network.pth') optimizer_path = os.path.join(save_dir, 'optimizer.pth') network.save(network_path) torch.save(optimizer.state_dict(), optimizer_path) print('[OPTIMIZER] save: %s' % optimizer_path) print('')
def create_result_dir(result_dir, names=None): result_dir_dict = dict() lib_util.make_dir(result_dir) for name in names: dir_path = os.path.join(result_dir, name) lib_util.make_dir(dir_path) result_dir_dict[name] = dir_path return result_dir_dict
def register_picture(object_name: str) -> None: # object_name example: 'tank_id-sample/sample_camera_id/2020/01/01/00_00_00.jpg' object_name_elements = object_name.split('/') camera_id = object_name_elements[1] # sample_camera_id year = object_name_elements[2] # 2020 month = object_name_elements[3] # 01 day = object_name_elements[4] # 01 file_name = object_name_elements[5] # 00_00_00.jpg target_dir_path = f'{AMS_ROOT_PATH}/pictures/{camera_id}/{year}/{month}' target_file_path = f'{target_dir_path}/{day}.json' # make new directory make_new_dir = make_dir(target_dir_path) if make_new_dir: logger(INFO, f'make new directory: {target_dir_path}', True) target_day_pictures = {'pictures': []} if is_exist_file(target_file_path): target_day_pictures = get_json_file(target_file_path) # register new picture target_day_pictures['pictures'].append(file_name) set_json_file( file_path = target_file_path, data = target_day_pictures ) return
def create_workdir(self, wd): state = None if os.path.exists(wd): while True: ans = raw_input("%s already exists. Continue learning? (y/n): " % (wd)) if ans in ["Y","y"]: state = State.read_from_dir(wd) break elif ans in ["N", "n"]: logging.warning("Deleting {}".format(wd)) shutil.rmtree(wd) make_dir(wd) break else: logging.warning("incomprehensible answer") else: make_dir(wd) return wd, state
def create_workdir(self, wd): state = None if os.path.exists(wd): while True: ans = raw_input( "%s already exists. Continue learning? (y/n): " % (wd)) if ans in ["Y", "y"]: state = State.read_from_dir(wd) break elif ans in ["N", "n"]: logging.warning("Deleting {}".format(wd)) shutil.rmtree(wd) make_dir(wd) break else: logging.warning("incomprehensible answer") else: make_dir(wd) return wd, state
def runner(x, vars, working_dir, wait=False, id=None, min=0.0, max=1.0): if id is None: id = uuid.uuid1() working_dir = pj(working_dir, str(id)) if os.path.exists(working_dir): raise Exception("Working dir is already exists {}!".format(working_dir)) make_dir(working_dir) const_json = pj(working_dir, os.path.basename(GlobalConfig.ConstFilename)) specs = read_json(GlobalConfig.VarSpecsFile) with open(const_json, "w") as fptr: fptr.write( proc_vars( const = read_json(GlobalConfig.ConstFilename) , var_specs = specs , vars = dict(zip(vars, x)) , min = min , max = max ) ) cmd = [ RUN_SIM_PY , "--working-dir", working_dir , "--epochs", str(GlobalConfig.Epochs) , "--const", const_json , "--slave" , "--jobs", str(GlobalConfig.SimJobs) ] + GlobalConfig.AddOptions for v in vars: path, range = specs[v] if "prepare_data" in path: cmd += ["--prepare-data"] break logging.info(" ".join(cmd)) if GlobalConfig.Mock: p = sub.Popen("sleep 1.0 && echo 1.0", shell=True, stdout=sub.PIPE, stderr=sub.PIPE) if wait: return communicate(p) return p p = sub.Popen(cmd, stdout=sub.PIPE, stderr=sub.PIPE) if wait: return communicate(p) return p
def __call__(self, vars, tag=None): if tag is None: raise Exception("SimpleRunner need a tag") if self.id is None: raise Exception("id must be set for runner") wd = make_dir(pj(env.runs_dir, tag)) d = np.loadtxt(sys.stdin) p = runner(d, vars, wd, wait=True, id=self.id, min=self.min_bound, max=self.max_bound) print p
def runner(x, vars, working_dir, wait=False, id=None, min=0.0, max=1.0): if id is None: id = uuid.uuid1() working_dir = pj(working_dir, str(id)) if os.path.exists(working_dir): raise Exception( "Working dir is already exists {}!".format(working_dir)) make_dir(working_dir) const_json = pj(working_dir, os.path.basename(GlobalConfig.ConstFilename)) specs = read_json(GlobalConfig.VarSpecsFile) with open(const_json, "w") as fptr: fptr.write( proc_vars(const=read_json(GlobalConfig.ConstFilename), var_specs=specs, vars=dict(zip(vars, x)), min=min, max=max)) cmd = [ RUN_SIM_PY, "--working-dir", working_dir, "--epochs", str(GlobalConfig.Epochs), "--const", const_json, "--slave", "--jobs", str(GlobalConfig.SimJobs) ] + GlobalConfig.AddOptions for v in vars: path, range = specs[v] if "prepare_data" in path: cmd += ["--prepare-data"] break logging.info(" ".join(cmd)) if GlobalConfig.Mock: p = sub.Popen("sleep 1.0 && echo 1.0", shell=True, stdout=sub.PIPE, stderr=sub.PIPE) if wait: return communicate(p) return p p = sub.Popen(cmd, stdout=sub.PIPE, stderr=sub.PIPE) if wait: return communicate(p) return p
def run(self, framework, data_loader, result_dir): assert data_loader.batch_size == 1 pre_proc = data_loader.dataset.pre_proc class_map = data_loader.dataset.get_number2name_map() util.make_dir(result_dir) for i, data_dict in enumerate(data_loader): if i >= self.n_samples: break output_dict, result_dict = framework.infer_forward(data_dict) pred_boxes_s = util.cvt_torch2numpy(result_dict['boxes_l'])[0] pred_confs_s = util.cvt_torch2numpy(result_dict['confs_l'])[0] pred_labels_s = util.cvt_torch2numpy(result_dict['labels_l'])[0] data_dict = pre_proc.inv_transform_batch(data_dict) img_s = data_dict['img'][0] gt_boxes_s = data_dict['boxes'][0] gt_labels_s = data_dict['labels'][0] sort_idx = 0 gt_img_path = os.path.join(result_dir, '%03d_%d_%s.png' % (i, sort_idx, 'gt')) gt_img_s = tester_util.draw_boxes(img_s, gt_boxes_s, None, gt_labels_s, class_map, self.conf_thresh, self.max_boxes) scipy.misc.imsave(gt_img_path, gt_img_s) sort_idx += 1 # draw_boxes pred_img_path = os.path.join( result_dir, '%03d_%d_%s.png' % (i, sort_idx, 'pred')) pred_img_s = tester_util.draw_boxes(img_s, pred_boxes_s, pred_confs_s, pred_labels_s, class_map, self.conf_thresh, self.max_boxes) scipy.misc.imsave(pred_img_path, pred_img_s)
def __call__(self, vars, tag=None): if tag is None: raise Exception("SimpleRunner need a tag") if self.id is None: raise Exception("id must be set for runner") wd = make_dir(pj(env.runs_dir, tag)) d = np.loadtxt(sys.stdin) p = runner(d, vars, wd, wait=True, id=self.id, min=self.min_bound, max=self.max_bound) print p
re.GlobalConfig.ConstFilename = pj(cases_dir, "lrule", "lrule_find.json") re.GlobalConfig.Epochs = 5 re.GlobalConfig.AddOptions = [ "--spike-input", pj(spikes_dir, "timed_pattern_spikes.pb"), "--evaluation-data", pj(spikes_dir, "timed_pattern_spikes_test.pb") ] re.GlobalConfig.SimJobs = 8 re.GlobalConfig.Jobs = 1 work_dir = make_dir(pj(runs_dir, "bo")) vars = read_json(re.GlobalConfig.VarSpecsFile).keys() #id = 0 class ConcreteContinuousGaussModel(ContinuousGaussModel): def __init__(self, ndim, params): assert "Gaussian" in params["surr_name"] ContinuousGaussModel.__init__(self, ndim, params) def evaluateSample(self, Xin):
re.GlobalConfig.ConstFilename = pj(cases_dir, "hedonistic_synapse_research.json") re.GlobalConfig.Epochs = 10 re.GlobalConfig.AddOptions = [ "--spike-input", pj(spikes_dir, "timed_pattern_spikes.pb"), "--evaluation-data", pj(spikes_dir, "timed_pattern_spikes_test.pb") ] re.GlobalConfig.SimJobs = 1 re.GlobalConfig.Jobs = 1 work_dir = make_dir(pj(runs_dir, "bo_gpyopt")) vars = read_json(re.GlobalConfig.VarSpecsFile).keys() n_cores = 4 def evaluateSample(Xin): Xin = np.ndarray.tolist(Xin) procs = [] answers = [] for x in Xin: logging.info("Running with input {}".format(x)) p = re.runner(x, vars, work_dir, wait=False) procs.append(p) if len(procs) >= n_cores: for p in procs:
re.GlobalConfig.ConstFilename = pj(cases_dir, "lrule", "lrule_find.json") re.GlobalConfig.Epochs = 5 re.GlobalConfig.AddOptions = [ "--spike-input", pj(spikes_dir, "timed_pattern_spikes.pb"), "--evaluation-data", pj(spikes_dir, "timed_pattern_spikes_test.pb") ] re.GlobalConfig.SimJobs = 1 re.GlobalConfig.Jobs = 1 work_dir = make_dir(pj(runs_dir, "bo_gpyopt")) vars = read_json(re.GlobalConfig.VarSpecsFile).keys() n_cores = 8 def evaluateSample(Xin): Xin = np.ndarray.tolist(Xin) procs = [] answers = [] for x in Xin: logging.info("Running with input {}".format(x)) p = re.runner(x, vars, work_dir, wait=False) procs.append(p) if len(procs) >= n_cores: for p in procs:
re.GlobalConfig.VarSpecsFile = pj(cases_dir, "lrule", "lrule_find_var_specs.json") re.GlobalConfig.ConstFilename = pj(cases_dir, "lrule", "lrule_find.json") re.GlobalConfig.Epochs = 5 re.GlobalConfig.AddOptions = [ "--spike-input", pj(spikes_dir, "timed_pattern_spikes.pb"), "--evaluation-data", pj(spikes_dir, "timed_pattern_spikes_test.pb") ] re.GlobalConfig.SimJobs = 8 re.GlobalConfig.Jobs = 1 work_dir = make_dir(pj(runs_dir, "bo")) vars = read_json(re.GlobalConfig.VarSpecsFile).keys() #id = 0 class ConcreteContinuousGaussModel(ContinuousGaussModel): def __init__(self, ndim, params): assert "Gaussian" in params["surr_name"] ContinuousGaussModel.__init__(self, ndim, params) def evaluateSample(self, Xin): ans = re.runner(Xin, vars, work_dir, wait=True) return -np.log(np.abs(ans))