Exemplo n.º 1
0
def create_coco_loader(*paths):
    transform = utils.get_transform(config.image_size, config.central_fraction)
    datasets = [data.CocoImages(path, transform=transform) for path in paths]
    dataset = data.Composite(*datasets)
    data_loader = torch.utils.data.DataLoader(
        dataset,
        batch_size=config.preprocess_batch_size,
        num_workers=config.data_workers,
        shuffle=False,
        pin_memory=True,
    )
    return data_loader
Exemplo n.º 2
0
def create_coco_loader(*paths):
    transform = utils.get_transform(config.image_size, config.central_fraction)
    datasets = [data.CocoImages(path, transform=transform) for path in paths]
    #ipdb.set_trace()  ## datasets[0].__getitem__(116591)[0]  print the largest coco_id - this is within torch int64 bound!
    dataset = data.Composite(*datasets)
    data_loader = torch.utils.data.DataLoader(
        dataset,
        batch_size=config.preprocess_batch_size,
        num_workers=config.data_workers,
        shuffle=False,
        pin_memory=True,
    )
    return data_loader
Exemplo n.º 3
0
    def create_data_loader(self, *paths):
        """ Create a united PyTorch COCO data loader for every given path in the arguments"""
        transform = utils.get_transform(self.image_size,
                                        self.keep_central_fraction)
        datasets = [
            data.CocoImages(path, transform=transform) for path in paths
        ]
        dataset = data.Composite(*datasets)
        data_loader = torch.utils.data.DataLoader(
            dataset,
            batch_size=self.batch_size,
            num_workers=self.num_threads_to_use,
            shuffle=False,
            pin_memory=True,
        )

        features_shape = (len(data_loader.dataset), config.output_features,
                          config.output_size, config.output_size)

        return data_loader, features_shape