Пример #1
0
 def test_progress_bar(self, dataloader, config, n=0, ncols=120):
     return tqdm(enumerate(dataloader, 0),
                 unit=' images', unit_scale=self.world_size * config.batch_size,
                 total=len(dataloader), smoothing=0,
                 disable=not self.is_rank_0, ncols=ncols,
                 desc=prepare_dataset_prefix(config, n)
                 )
Пример #2
0
def set_checkpoint(config):
    """
    Set checkpoint information

    Parameters
    ----------
    config : CfgNode
        Model configuration

    Returns
    -------
    config : CfgNode
        Updated model configuration
    """
    # If checkpoint is enabled
    if config.checkpoint.filepath is not '':
        # Create proper monitor string
        config.checkpoint.monitor = os.path.join('{}-{}'.format(
            prepare_dataset_prefix(config.datasets.validation,
                                   config.checkpoint.monitor_index),
            config.checkpoint.monitor))
        # Join checkpoint folder with run name
        config.checkpoint.filepath = os.path.join(
            config.checkpoint.filepath, config.name,
            '{epoch:02d}_{%s:.3f}' % config.checkpoint.monitor)
        # Set s3 url
        if config.checkpoint.s3_path is not '':
            config.checkpoint.s3_url = s3_url(config)
    else:
        # If not saving checkpoint, do not sync to s3
        config.checkpoint.s3_path = ''
    return config.checkpoint
Пример #3
0
    def log_images(self, func, mode, batch, output, args, dataset, world_size,
                   config):
        """
        Adds images to metrics for later logging.

        Parameters
        ----------
        func : Function
            Function used to process the image before logging
        mode : str {"train", "val"}
            Training stage where the images come from (serve as prefix for logging)
        batch : dict
            Data batch
        output : dict
            Model output
        args : tuple
            Step arguments
        dataset : CfgNode
            Dataset configuration
        world_size : int
            Number of GPUs, used to get logging samples at consistent intervals
        config : CfgNode
            Model configuration
        """
        dataset_idx = 0 if len(args) == 1 else args[1]
        prefix = prepare_dataset_prefix(config, dataset_idx)
        interval = len(dataset[dataset_idx]) // world_size // config.num_logs
        if args[0] % interval == 0:
            prefix_idx = '{}-{}-{}'.format(mode, prefix,
                                           batch['idx'][0].item())
            func(prefix_idx, batch, output)
Пример #4
0
def save_depth(batch, output, args, dataset, save):
    """
    Save depth predictions in various ways

    Parameters
    ----------
    batch : dict
        Batch from dataloader
    output : dict
        Output from model
    args : tuple
        Step arguments
    dataset : CfgNode
        Dataset configuration
    save : CfgNode
        Save configuration
    """
    # If there is no save folder, don't save
    if save.folder is '':
        return

    # If we want to save depth maps
    if save.viz or save.npz:
        # Retrieve useful tensors
        rgb = batch['rgb']
        pred_inv_depth = output['inv_depth']

        # Prepare path strings
        filename = batch['filename']
        dataset_idx = 0 if len(args) == 1 else args[1]
        save_path = os.path.join(
            save.folder, 'depth', prepare_dataset_prefix(dataset, dataset_idx),
            os.path.basename(save.pretrained).split('.')[0])
        # Create folder
        os.makedirs(save_path, exist_ok=True)

        # For each image in the batch
        length = rgb.shape[0]
        for i in range(length):
            # Save numpy depth maps
            if save.npz:
                # Get depth from predicted depth map and save to .npz
                np.savez_compressed(
                    '{}/{}.npz'.format(save_path, filename[i]),
                    depth=inv2depth(
                        pred_inv_depth[i]).squeeze().detach().cpu().numpy())
            # Save inverse depth visualizations
            if save.viz:
                # Prepare RGB image
                rgb_i = rgb[i].permute(1, 2, 0).detach().cpu().numpy() * 255
                # Prepare inverse depth
                pred_inv_depth_i = viz_inv_depth(pred_inv_depth[i]) * 255
                # Concatenate both vertically
                image = np.concatenate([rgb_i, pred_inv_depth_i], 0)
                # Write to disk
                cv2.imwrite('{}/{}.png'.format(save_path, filename[i]),
                            image[:, :, ::-1])


########################################################################################################################
Пример #5
0
def save_depth(batch, output, args, dataset, save):
    """
    Save depth predictions in various ways

    Parameters
    ----------
    batch : dict
        Batch from dataloader
    output : dict
        Output from model
    args : tuple
        Step arguments
    dataset : CfgNode
        Dataset configuration
    save : CfgNode
        Save configuration
    """
    # If there is no save folder, don't save
    if save.folder is '':
        return

    # If we want to save
    if save.depth.rgb or save.depth.viz or save.depth.npz or save.depth.png:
        # Retrieve useful tensors
        rgb = batch['rgb']
        pred_inv_depth = output['inv_depth']

        # Prepare path strings
        print("save:",batch['sensor_name'])
        # print("save:",batch['filename'])
        filename = [os.path.split(batch['filename'][i])[-1]  for i in range(len(batch['filename']))]
        dataset_idx = 0 if len(args) == 1 else args[1]
        save_path = [os.path.join(save.folder, 'depth',
                                 prepare_dataset_prefix(dataset, dataset_idx),
                                 os.path.basename(save.pretrained).split('.')[0]
                                 ,batch['sensor_name'][i]) for i in range(len(batch['sensor_name']))]
        # Create folder
        for i in range(len(save_path)):
            os.makedirs(save_path[i], exist_ok=True)
        # For each image in the batch
        length = rgb.shape[0]
        for i in range(length):
            # Save numpy depth maps
            if save.depth.npz:
                write_depth('{}/{}_depth.npz'.format(save_path[i], filename[i]),
                            depth=inv2depth(pred_inv_depth[i]),
                            intrinsics=batch['intrinsics'][i] if 'intrinsics' in batch else None)
            # Save png depth maps
            if save.depth.png:
                write_depth('{}/{}_depth.png'.format(save_path[i], filename[i]),
                            depth=inv2depth(pred_inv_depth[i]))
            # Save rgb images
            if save.depth.rgb:
                rgb_i = rgb[i].permute(1, 2, 0).detach().cpu().numpy() * 255
                write_image('{}/{}_rgb.png'.format(save_path[i], filename[i]), rgb_i)
            # Save inverse depth visualizations
            if save.depth.viz:
                viz_i = viz_inv_depth(pred_inv_depth[i]) * 255
                write_image('{}/{}_viz.png'.format(save_path[i], filename[i]), viz_i)
Пример #6
0
def create_dict(metrics_data,
                metrics_keys,
                metrics_modes,
                dataset,
                name='depth'):
    """Creates a dictionary from collated metrics."""

    # Create metrics dictionary
    metrics_dict = {}
    # For all datasets
    for n, metrics in enumerate(metrics_data):
        if metrics:  # If there are calculated metrics
            prefix = prepare_dataset_prefix(dataset, n)
            # For all keys
            for i, key in enumerate(metrics_keys):
                for mode in metrics_modes:
                    metrics_dict['{}-{}{}'.format(prefix, key, mode)] =\
                        metrics['{}{}'.format(name, mode)][i].item()
    # Return metrics dictionary
    return metrics_dict
Пример #7
0
def create_dict(metrics_data,
                metrics_keys,
                metrics_modes,
                dataset,
                name='depth'):
    """
    Creates a dictionary from collated metrics

    Parameters
    ----------
    metrics_data : list
        List containing collated metrics
    metrics_keys : list
        List of keys for the metrics
    metrics_modes
        List of modes for the metrics
    dataset : CfgNode
        Dataset configuration file
    name : str
        Name of the task for the metric

    Returns
    -------
    metrics_dict : dict
        Metrics dictionary
    """
    # Create metrics dictionary
    metrics_dict = {}
    # For all datasets
    for n, metrics in enumerate(metrics_data):
        if metrics:  # If there are calculated metrics
            prefix = prepare_dataset_prefix(dataset, n)
            # For all keys
            for i, key in enumerate(metrics_keys):
                for mode in metrics_modes:
                    metrics_dict['{}-{}{}'.format(prefix, key, mode)] =\
                        metrics['{}{}'.format(name, mode)][i].item()
    # Return metrics dictionary
    return metrics_dict