def setup_pose_net(config, prepared, **kwargs): """ Create a pose network Parameters ---------- config : CfgNode Network configuration prepared : bool True if the network has been prepared before kwargs : dict Extra parameters for the network Returns ------- pose_net : nn.Module Created pose network """ print0(pcolor('PoseNet: %s' % config.name, 'yellow')) pose_net = load_class_args_create( config.name, paths=[ 'fbnet.networks.pose', ], args={ **config, **kwargs }, ) if not prepared and config.checkpoint_path is not '': pose_net = load_network(pose_net, config.checkpoint_path, ['pose_net', 'pose_network']) return pose_net
def setup_depth_net2(config, prepared, **kwargs): """ Create a depth network Parameters ---------- config : CfgNode Network configuration prepared : bool True if the network has been prepared before kwargs : dict Extra parameters for the network Returns ------- depth_net : nn.Module Create depth network """ print0(pcolor('DepthNet2: %s' % config.name, 'yellow')) depth_net = load_class_args_create( config.name, paths=[ 'fbnet.networks.depth2', ], args={ **config, **kwargs }, ) if not prepared and config.checkpoint_path is not '': depth_net = load_network(depth_net, config.checkpoint_path, ['depth_net2', 'disp_network2']) return depth_net
def prepare_model(self, resume=None): """Prepare self.model (incl. loading previous state)""" print0(pcolor('### Preparing Model', 'green')) self.model = setup_model(self.config.model, self.config.prepared) # Resume model if available if resume: print0( pcolor('### Resuming from {}'.format(resume['file']), 'magenta', attrs=['bold'])) self.model = load_network(self.model, resume['state_dict'], 'model') if 'epoch' in resume: self.current_epoch = resume['epoch']
def load_network(network, path, prefixes=''): """ Loads a pretrained network Parameters ---------- network : nn.Module Network that will receive the pretrained weights path : str File containing a 'state_dict' key with pretrained network weights prefixes : str or list of str Layer name prefixes to consider when loading the network Returns ------- network : nn.Module Updated network with pretrained weights """ prefixes = make_list(prefixes) # If path is a string if is_str(path): saved_state_dict = torch.load(path, map_location='cpu')['state_dict'] if path.endswith('.pth.tar'): saved_state_dict = backwards_state_dict(saved_state_dict) # If state dict is already provided else: saved_state_dict = path # Get network state dict network_state_dict = network.state_dict() updated_state_dict = OrderedDict() n, n_total = 0, len(network_state_dict.keys()) for key, val in saved_state_dict.items(): for prefix in prefixes: prefix = prefix + '.' if prefix in key: idx = key.find(prefix) + len(prefix) key = key[idx:] if key in network_state_dict.keys() and \ same_shape(val.shape, network_state_dict[key].shape): updated_state_dict[key] = val n += 1 network.load_state_dict(updated_state_dict, strict=False) base_color, attrs = 'cyan', ['bold', 'dark'] color = 'green' if n == n_total else 'yellow' if n > 0 else 'red' print0(pcolor('###### Pretrained {} loaded:'.format(prefixes[0]), base_color, attrs=attrs) + pcolor(' {}/{} '.format(n, n_total), color, attrs=attrs) + pcolor('tensors', base_color, attrs=attrs)) return network
def prepare_datasets(self, validation_requirements, test_requirements): """Prepare datasets for training, validation and test.""" # Prepare datasets print0(pcolor('### Preparing Datasets', 'green')) augmentation = self.config.datasets.augmentation # Setup train dataset (requirements are given by the model itself) self.train_dataset = setup_dataset(self.config.datasets.train, 'train', self.model.train_requirements, **augmentation) # Setup validation dataset self.validation_dataset = setup_dataset( self.config.datasets.validation, 'validation', validation_requirements, **augmentation) # Setup test dataset self.test_dataset = setup_dataset(self.config.datasets.test, 'test', test_requirements, **augmentation)
def setup_model(config, prepared, **kwargs): """ Create a model Parameters ---------- config : CfgNode Model configuration (cf. configs/default_config.py) prepared : bool True if the model has been prepared before kwargs : dict Extra parameters for the model Returns ------- model : nn.Module Created model """ print0(pcolor('Model: %s' % config.name, 'yellow')) model = load_class(config.name, paths=[ 'fbnet.models', ])(**{ **config.loss, **kwargs }) # Add depth network if required if model.network_requirements['depth_net']: model.add_depth_net(setup_depth_net(config.depth_net, prepared)) # Add depth network if required if model.network_requirements['depth_net2']: model.add_depth_net2(setup_depth_net2(config.depth_net2, prepared)) # Add pose network if required if model.network_requirements['pose_net']: model.add_pose_net(setup_pose_net(config.pose_net, prepared)) # If a checkpoint is provided, load pretrained model if not prepared and config.checkpoint_path is not '': model = load_network(model, config.checkpoint_path, 'model') # Return model return model
def setup_dataset(config, mode, requirements, **kwargs): """ Create a dataset class Parameters ---------- config : CfgNode Configuration (cf. configs/default_config.py) mode : str {'train', 'validation', 'test'} Mode from which we want the dataset requirements : dict (string -> bool) Different requirements for dataset loading (gt_depth, gt_pose, etc) kwargs : dict Extra parameters for dataset creation Returns ------- dataset : Dataset Dataset class for that mode """ # If no dataset is given, return None if len(config.path) == 0: return None print0(pcolor('###### Setup %s datasets' % mode, 'red')) # Global shared dataset arguments dataset_args = { 'back_context': config.back_context, 'forward_context': config.forward_context, 'data_transform': get_transforms(mode, **kwargs) } # Loop over all datasets datasets = [] for i in range(len(config.split)): path_split = os.path.join(config.path[i], config.split[i]) # Individual shared dataset arguments dataset_args_i = { 'depth_type': config.depth_type[i] if requirements['gt_depth'] else None, 'with_pose': requirements['gt_pose'], } # KITTI dataset if config.dataset[i] == 'KITTI': from fbnet.datasets.kitti_dataset import KITTIDataset dataset = KITTIDataset( config.path[i], path_split, **dataset_args, **dataset_args_i, ) # DGP dataset elif config.dataset[i] == 'DGP': from fbnet.datasets.dgp_dataset import DGPDataset dataset = DGPDataset( config.path[i], config.split[i], **dataset_args, **dataset_args_i, cameras=config.cameras[i], ) # Image dataset elif config.dataset[i] == 'Image': from fbnet.datasets.image_dataset import ImageDataset dataset = ImageDataset( config.path[i], config.split[i], **dataset_args, **dataset_args_i, ) elif config.dataset[i] == "Custom": from fbnet.datasets.kitti_dataset_custom import CustomDataset dataset = CustomDataset( config.path[i], path_split, **dataset_args, **dataset_args_i, ) else: ValueError('Unknown dataset %d' % config.dataset[i]) # Repeat if needed if 'repeat' in config and config.repeat[i] > 1: dataset = ConcatDataset([dataset for _ in range(config.repeat[i])]) datasets.append(dataset) # Display dataset information bar = '######### {:>7}'.format(len(dataset)) if 'repeat' in config: bar += ' (x{})'.format(config.repeat[i]) bar += ': {:<}'.format(path_split) print0(pcolor(bar, 'yellow')) # If training, concatenate all datasets into a single one if mode == 'train': datasets = [ConcatDataset(datasets)] return datasets