def main(params): with open(params.lowshotmeta, 'r') as f: lowshotmeta = json.load(f) accs = np.zeros(6) with open(params.experimentpath.format(params.experimentid), 'r') as f: exp = json.load(f) novel_idx = np.array(exp)[:, :params.lowshotn] if params.testsetup: novel_classes = lowshotmeta['novel_classes_2'] base_classes = lowshotmeta['base_classes_2'] else: novel_classes = lowshotmeta['novel_classes_1'] base_classes = lowshotmeta['base_classes_1'] novel_idx = np.sort(novel_idx[novel_classes, :].reshape(-1)) generator = None generator_fn = None if params.generator_name != '': generator_fn, generator = generation.get_generator( params.generator_name, params.generator_file) with h5py.File(params.trainfile, 'r') as f: lowshot_dataset = LowShotDataset(f, base_classes, novel_classes, novel_idx, params.max_per_label, generator_fn, generator) model = training_loop(lowshot_dataset, params.numclasses, params, params.batchsize, params.maxiters) print('trained') with h5py.File(params.testfile, 'r') as f: test_loader = get_test_loader(f) accs, results = eval_loop(test_loader, model, base_classes, novel_classes) modelrootdir = os.path.basename(os.path.dirname(params.trainfile)) outpath = os.path.join( params.outdir, modelrootdir + '_lr_{:.3f}_wd_{:.3f}_expid_{:d}_lowshotn_{:d}_maxgen_{:d}.json'. format(params.lr, params.wd, params.experimentid, params.lowshotn, params.max_per_label)) with open(outpath, 'w+') as f: json.dump( dict(lr=params.lr, wd=params.wd, expid=params.experimentid, lowshotn=params.lowshotn, accs=accs.tolist()), f) return results
exp = json.load(f) novel_idx = np.array(exp)[:, :params.lowshotn] if params.testsetup: novel_classes = lowshotmeta['novel_classes_2'][:params.n_way] base_classes = lowshotmeta['base_classes_2'] else: novel_classes = lowshotmeta['novel_classes_1'][:params.n_way] base_classes = lowshotmeta['base_classes_1'] novel_idx = np.sort(novel_idx[novel_classes, :].reshape(-1)) generator = None generator_fn = None if params.generator_name != '': generator_fn, generator = generation.get_generator( params.generator_name, params.generator_file) with h5py.File(params.trainfile, 'r') as f: lowshot_dataset = LowShotDataset(f, base_classes, novel_classes, novel_idx, params.max_per_label, generator_fn, generator) model = training_loop(lowshot_dataset, params.numclasses, params, params.batchsize, params.maxiters) print('trained') with h5py.File(params.testfile, 'r') as f: test_loader = get_test_loader(f) accs = eval_loop(test_loader, model, base_classes, novel_classes) modelrootdir = os.path.basename(os.path.dirname(params.trainfile)) outpath = os.path.join(
def test(test_loader, test_dataset, model, cfg): model.eval() generator = get_generator(model) model_counter = defaultdict(int) if cfg['mode'] == 'eval': eval_meshes(test_loader, test_dataset, model, cfg) return 0 for it, data in enumerate(tqdm(test_loader)): # Output folders mesh_dir = os.path.join(cfg['out']['out_dir'], 'meshes') pointcloud_dir = os.path.join(cfg['out']['out_dir'], 'pointcloud') in_dir = os.path.join(cfg['out']['out_dir'], 'input') generation_vis_dir = os.path.join( cfg['out']['out_dir'], 'vis', ) # Get index etc. idx = data['idx'].item() try: model_dict = test_dataset.get_model_dict(idx) except AttributeError: model_dict = {'model': str(idx), 'category': 'n/a'} modelname = model_dict['model'] category_id = model_dict.get('category', 'n/a') try: category_name = test_dataset.metadata[category_id].get( 'name', 'n/a') except AttributeError: category_name = 'n/a' if category_id != 'n/a': mesh_dir = os.path.join(mesh_dir, str(category_id)) pointcloud_dir = os.path.join(pointcloud_dir, str(category_id)) in_dir = os.path.join(in_dir, str(category_id)) folder_name = str(category_id) if category_name != 'n/a': folder_name = str(folder_name) + '_' + category_name.split( ',')[0] generation_vis_dir = os.path.join(generation_vis_dir, folder_name) if not os.path.exists(generation_vis_dir): os.makedirs(generation_vis_dir) if not os.path.exists(mesh_dir): os.makedirs(mesh_dir) if not os.path.exists(pointcloud_dir): os.makedirs(pointcloud_dir) if not os.path.exists(in_dir): os.makedirs(in_dir) # Generate outputs out_file_dict = {} # Also copy ground truth out = generator.generate_mesh(data) # Get statistics try: mesh, stats_dict = out except TypeError: mesh, stats_dict = out, {} #time_dict.update(stats_dict) # Write output mesh_out_file = os.path.join(mesh_dir, '%s.off' % modelname) mesh.export(mesh_out_file) out_file_dict['mesh'] = mesh_out_file # Save inputs inputs_path = os.path.join(in_dir, '%s.jpg' % modelname) inputs = data['inputs'].squeeze(0).cpu() visualize_data(inputs, 'img', inputs_path) out_file_dict['in'] = inputs_path # Copy to visualization directory for first vis_n_output samples c_it = model_counter[category_id] if c_it < cfg['test']['vis_n_outputs']: # Save output files img_name = '%02d.off' % c_it for k, filepath in out_file_dict.items(): ext = os.path.splitext(filepath)[1] out_file = os.path.join(generation_vis_dir, '%02d_%s%s' % (c_it, k, ext)) shutil.copyfile(filepath, out_file) model_counter[category_id] += 1
exp = json.load(f) novel_idx = np.array(exp)[:,:params.lowshotn] if params.testsetup: novel_classes = lowshotmeta['novel_classes_2'] base_classes = lowshotmeta['base_classes_2'] else: novel_classes = lowshotmeta['novel_classes_1'] base_classes = lowshotmeta['base_classes_1'] novel_idx = np.sort(novel_idx[novel_classes,:].reshape(-1)) generator=None generator_fn=None if params.generator_name!='': generator_fn, generator = generation.get_generator(params.generator_name, params.generator_file) with h5py.File(params.trainfile, 'r') as f: lowshot_dataset = LowShotDataset(f, base_classes, novel_classes, novel_idx, params.max_per_label, generator_fn, generator) model = training_loop(lowshot_dataset, params.numclasses, params, params.batchsize, params.maxiters) print('trained') with h5py.File(params.testfile, 'r') as f: test_loader = get_test_loader(f) accs = eval_loop(test_loader, model, base_classes, novel_classes) modelrootdir = os.path.basename(os.path.dirname(params.trainfile)) outpath = os.path.join(params.outdir, modelrootdir+'_lr_{:.3f}_wd_{:.3f}_expid_{:d}_lowshotn_{:d}_maxgen_{:d}.json'.format( params.lr, params.wd, params.experimentid, params.lowshotn, params.max_per_label)) with open(outpath, 'w') as f:
exp = json.load(f) novel_idx = np.array(exp)[:, :params.lowshotn] if params.testsetup: novel_classes = lowshotmeta['novel_classes_2'] base_classes = lowshotmeta['base_classes_2'] else: novel_classes = lowshotmeta['novel_classes_1'] base_classes = lowshotmeta['base_classes_1'] novel_idx = np.sort(novel_idx[novel_classes, :].reshape(-1)) generator = None generator_fn = None if params.generator_name != '': generator_fn, generator, trial_gen = generation.get_generator( params.generator_name, params.generator_file) with h5py.File(params.trainfile, 'r') as f: if params.generator_type == 1: lowshot_dataset = LowShotDataset( f, base_classes, novel_classes, novel_idx, params.max_per_label, trial_gen, generator, centroid_file=params.centroid_file, generator_index=params.generator_type) else: lowshot_dataset = LowShotDataset(