def __call__(self, epoch): """ Saves the training state. Args: epoch (int): Epoch number. """ if self._model: ms_path_new = \ hlp.jn(self._path_dir, 'model_params_ep{}.h5'.format(epoch)) self.save_model_params(self._model, ms_path_new) if self._verbose: print('Saved model params in {}'.format(ms_path_new)) if self._ms_path and self._ms_path != ms_path_new: os.remove(self._ms_path) self._ms_path = ms_path_new if self._opt: os_path_new = \ hlp.jn(self._path_dir, 'optim_params_ep{}.pkl'.format(epoch)) self.save_optim_params(self._opt, os_path_new) if self._verbose: print('Saved optimizer params in {}'.format(os_path_new)) if self._os_path: os.remove(self._os_path) self._os_path = os_path_new
def get_file_list(path_root, seqs, exts=None): """ Gets the list of paths to files contained in dirs listed in `seqs`. The paths are relative w.r.t. the `path_root`. Args: path_root (str): Path to root dir containing sequences (directories) of data samples. seqs (lsit of str): Names of sequences (dirs) to load. exts (list of str): Supported file name extensions. Returns: list of str: List of paths to files. """ return [ hlp.jn(s, f) for s in seqs for f in hlp.ls(hlp.jn(path_root, s), exts=exts) ]
def __getitem__(self, idx): d = np.load(hlp.jn(self._path_root, self._files[idx]))[self._key].\ astype(self._dtype) if self._transform is not None: d = self._transform(d) return d
def process_sequence(obj): # Get list of paths in obj. category in AtlasNet. pth_files_an = jn(pth_root_an, obj, 'ply') files = ls(pth_files_an, exts='txt') # Peocess files. for f in files: # Extract file name. fn_base = f.split('.')[0] # Load .obj mesh from ShapeNet. pth_f_sn = jn( pth_root_sn, obj, fn_base, 'models', 'model_normalized.obj') assert os.path.exists(pth_f_sn) verts, faces = load_obj(pth_f_sn) # Load tf and apply. pth_f_an = jn(pth_files_an, f) T, s = load_tf(pth_f_an) verts = (verts - T) / s # Compute area. area = mesh_area(verts, faces) # Write area to the file. with open(pth_f_an, 'r') as fobj: txt = fobj.read() assert len(txt.splitlines()) == 2 has_nl = txt.endswith('\n') with open(pth_f_an, 'a') as fobj: fobj.write('{}{:.6f}'.format(('\n', '')[has_nl], area)) with num_samples_done.get_lock(): num_samples_done.value += 1 with finished_seqs.get_lock(): finished_seqs.value += 1
default='./data') parser.add_argument('--resume', help='Resume training from the given path', default=False) args = parser.parse_args() args.resume = False # Load the config file, prepare paths. conf = helpers.load_conf(args.conf) # Model type, color mode. model_type = 'atlasnet_orig' # Prepare TB writers. writer_tr = SummaryWriter(helpers.jn(args.output, 'tr')) writer_va = SummaryWriter(helpers.jn(args.output, 'va')) # Build a model. model = AtlasNetReimplEncImg( M=conf['M'], code=conf['code'], num_patches=conf['num_patches'], normalize_cw=conf['normalize_cw'], freeze_encoder=conf['enc_freeze'], enc_load_weights=conf['enc_weights'], dec_activ_fns=conf['dec_activ_fns'], dec_use_tanh=conf['dec_use_tanh'], dec_batch_norm=conf['dec_batch_norm'], loss_scaled_isometry=conf['loss_scaled_isometry'], loss_patch_areas=conf['loss_patch_area'], # zhantao
def __init__(self, rootimg, rootpc, path_category_file, class_choice='chair', train=True, npoints=2500, balanced=False, gen_view=False, SVR=False, idx=0, load_area=False): super(ShapeNet, self).__init__() self.balanced = balanced self.train = train self.rootimg = rootimg self.rootpc = rootpc self.npoints = npoints self.datapath = [] self.catfile = path_category_file self.cat = {} self.meta = {} self.SVR = SVR self.gen_view = gen_view self.idx = idx self._load_area = load_area self._zero = torch.tensor(0., dtype=torch.float32) with open(self.catfile, 'r') as f: for line in f: ls = line.strip().split() self.cat[ls[0]] = ls[1] if class_choice is not None: self.cat = {k: v for k, v in self.cat.items() if k in class_choice} print(self.cat) empty = [] for item in self.cat: dir_img = os.path.join(self.rootimg, self.cat[item]) fns_img = sorted(os.listdir(dir_img)) try: dir_point = os.path.join(self.rootpc, self.cat[item], 'ply') fns_pc = sorted(os.listdir(dir_point)) except: fns_pc = [] fns = [val for val in fns_img if val + '.points.ply' in fns_pc] print('category ', self.cat[item], 'files ' + str(len(fns)), len(fns) / float(len(fns_img)), "%"), if train: fns = fns[:int(len(fns) * 0.8)] else: fns = fns[int(len(fns) * 0.8):] if len(fns) != 0: self.meta[item] = [] for fn in fns: objpath = jn( '/cvlabdata2/cvlab/datasets_jan/shapenet/' 'ShapeNetCore.v2', self.cat[item], fn, 'models/model_normalized.obj') self.meta[item].append( (os.path.join(dir_img, fn, 'rendering'), os.path.join(dir_point, fn + '.points.ply'), os.path.join(dir_point, fn + '.points.ply2.txt'), item, objpath, fn)) else: empty.append(item) for item in empty: del self.cat[item] self.idx2cat = {} self.size = {} i = 0 for item in self.cat: self.idx2cat[i] = item self.size[i] = len(self.meta[item]) i = i + 1 for fn in self.meta[item]: self.datapath.append(fn) normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) self.transforms = transforms.Compose([ transforms.Resize(size=224, interpolation=2), transforms.ToTensor(), # normalize, ]) # RandomResizedCrop or RandomCrop self.dataAugmentation = transforms.Compose([ transforms.RandomCrop(127), transforms.RandomHorizontalFlip(), ]) self.validating = transforms.Compose([ transforms.CenterCrop(127), ]) self.perCatValueMeter = {} for item in self.cat: self.perCatValueMeter[item] = AverageValueMeter() self.perCatValueMeter_metro = {} for item in self.cat: self.perCatValueMeter_metro[item] = AverageValueMeter() self.transformsb = transforms.Compose([ transforms.Resize(size=224, interpolation=2), ])
def __init__(self, rootimg, rootpc, path_category_file, class_choice='chair', train=True, test=False, npoints=2500, gen_view=False, SVR=False, idx=0, load_area=False): super(ShapeNet, self).__init__() self.train = train self.test = test self.rootimg = rootimg self.rootpc = rootpc self.npoints = npoints self.datapath = [] self.catfile = path_category_file self.cat = {} self.meta = {} self.SVR = SVR self.gen_view = gen_view self.idx = idx self._load_area = load_area self._zero = torch.tensor(0., dtype=torch.float32) # Find all category names in the dataset. with open(self.catfile, 'r') as f: for line in f: ls = line.strip().split() self.cat[ls[0]] = ls[1] # Keep only selected object categories. if class_choice is not None: self.cat = {k: v for k, v in self.cat.items() if k in class_choice} print(self.cat) empty = [] for item in self.cat: # Get image file names. dir_img = os.path.join(self.rootimg, self.cat[item]) fns_img = sorted(os.listdir(dir_img)) # Get pts file names. try: dir_point = os.path.join(self.rootpc, self.cat[item], 'ply') fns_pc = sorted(os.listdir(dir_point)) except: fns_pc = [] # Select only those file names present both in imgs and pts dirs. fns = [val for val in fns_img if val + '.points.ply' in fns_pc] print('category ', self.cat[item], 'files ' + str(len(fns)), len(fns) / float(len(fns_img)), "%"), # Select train/valid/test subset. if train: # the first 80% data print('construct training set') fns = fns[:int(len(fns) * 0.8)] elif test: # test last 10% data print('construct testing set') fns = fns[int(len(fns) * 0.9):] else: # the 80% - 90% data print('construct validation set') fns = fns[int(len(fns) * 0.8):int(len(fns) * 0.9)] if len(fns) != 0: self.meta[item] = [] for fn in fns: objpath = jn( '/cvlabdata2/cvlab/datasets_jan/shapenet/' 'ShapeNetCore.v2', self.cat[item], fn, 'models/model_normalized.obj') self.meta[item].append( (os.path.join(dir_img, fn, 'rendering'), os.path.join(dir_point, fn + '.points.ply'), os.path.join(dir_point, fn + '.points.ply2.txt'), item, objpath, fn)) else: empty.append(item) for item in empty: del self.cat[item] self.idx2cat = {} self.size = {} i = 0 for item in self.cat: self.idx2cat[i] = item self.size[i] = len(self.meta[item]) i = i + 1 for fn in self.meta[item]: self.datapath.append(fn) normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) self.transforms = transforms.Compose([ transforms.Resize(size=224, interpolation=2), transforms.ToTensor(), # normalize, ]) # RandomResizedCrop or RandomCrop self.dataAugmentation = transforms.Compose([ transforms.RandomCrop(127), transforms.RandomHorizontalFlip(), ]) self.validating = transforms.Compose([ transforms.CenterCrop(127), ])
def __getitem__(self, idx): return hlp.load_img(hlp.jn(self._path_root_imgs, self._files[idx]))
gpu = torch.cuda.is_available() # Parse arguments. parser = argparse.ArgumentParser() parser.add_argument('--conf', help='Path to the main config file of the model.', default='config.yaml') parser.add_argument('--output', help='Path to the output directory for storing ' 'weights and tensorboard data.', default='config.yaml') args = parser.parse_args() # Load the config file, prepare paths. conf = helpers.load_conf(args.conf) # Prepare TB writers. writer_tr = SummaryWriter(helpers.jn(args.output, 'tr')) writer_va = SummaryWriter(helpers.jn(args.output, 'va')) # Build a model. model = AtlasNetReimpl( M=conf['M'], code=conf['code'], num_patches=conf['num_patches'], normalize_cw=conf['normalize_cw'], freeze_encoder=conf['enc_freeze'], enc_load_weights=conf['enc_weights'], dec_activ_fns=conf['dec_activ_fns'], dec_use_tanh=conf['dec_use_tanh'], dec_batch_norm=conf['dec_batch_norm'], loss_scaled_isometry=conf['loss_scaled_isometry'], alpha_scaled_isometry=conf['alpha_scaled_isometry'], alphas_sciso=conf['alphas_sciso'], gpu=gpu)
help='Path to the main config file of the model.', default='conf_texless_defsurf.yaml') parser.add_argument('--output', help='Path to the output directory for storing ' 'weights and tensorboard data.', default='./data') parser.add_argument('--resume', help='Resume training from the given path', default=False) args = parser.parse_args() # Load the config file, prepare paths. conf = helpers.load_conf(args.conf) # Prepare TB writers. writer_tr = SummaryWriter(helpers.jn(args.output, 'tr')) writer_va = SummaryWriter(helpers.jn(args.output, 'va')) # Build a model. model = AtlasNetReimplEncImg( M=conf['M'], code=conf['code'], num_patches=conf['num_patches'], normalize_cw=conf['normalize_cw'], freeze_encoder=conf['enc_freeze'], enc_load_weights=conf['enc_weights'], dec_activ_fns=conf['dec_activ_fns'], dec_use_tanh=conf['dec_use_tanh'], dec_batch_norm=conf['dec_batch_norm'], loss_scaled_isometry=conf['loss_scaled_isometry'], loss_patch_areas=conf['loss_patch_area'], # zhantao
def get_total_num_samples(): num_total = 0 seqs = lsd(pth_root_an) for s in seqs: num_total += len(ls(jn(pth_root_an, s), exts='txt')) return num_total