Exemple #1
0
 def dataset_setup(self):
     """Sets up the datasets for the application."""
     settings = self.settings
     if settings.crowd_dataset == CrowdDataset.ucf_qnrf:
         self.dataset_class = UcfQnrfFullImageDataset
         self.train_dataset = UcfQnrfTransformedDataset(
             middle_transform=data.RandomHorizontalFlip(),
             seed=settings.labeled_dataset_seed,
             number_of_examples=settings.labeled_dataset_size)
         self.train_dataset_loader = DataLoader(
             self.train_dataset,
             batch_size=settings.batch_size,
             pin_memory=self.settings.pin_memory,
             num_workers=settings.number_of_data_workers)
         self.validation_dataset = UcfQnrfTransformedDataset(dataset='test',
                                                             seed=101)
     elif settings.crowd_dataset == CrowdDataset.shanghai_tech:
         self.dataset_class = ShanghaiTechFullImageDataset
         self.train_dataset = ShanghaiTechTransformedDataset(
             middle_transform=data.RandomHorizontalFlip(),
             seed=settings.labeled_dataset_seed,
             number_of_examples=settings.labeled_dataset_size,
             map_directory_name=settings.map_directory_name,
             image_patch_size=self.settings.image_patch_size,
             label_patch_size=self.settings.label_patch_size)
         self.train_dataset_loader = DataLoader(
             self.train_dataset,
             batch_size=settings.batch_size,
             pin_memory=self.settings.pin_memory,
             num_workers=settings.number_of_data_workers)
         self.validation_dataset = ShanghaiTechTransformedDataset(
             dataset='test',
             seed=101,
             map_directory_name=settings.map_directory_name,
             image_patch_size=self.settings.image_patch_size,
             label_patch_size=self.settings.label_patch_size)
     elif settings.crowd_dataset == CrowdDataset.ucf_cc_50:
         seed = 0
         self.dataset_class = UcfCc50FullImageDataset
         self.train_dataset = UcfCc50TransformedDataset(
             middle_transform=data.RandomHorizontalFlip(),
             seed=seed,
             test_start=settings.labeled_dataset_seed * 10,
             inverse_map=settings.inverse_map,
             map_directory_name=settings.map_directory_name)
         self.train_dataset_loader = DataLoader(
             self.train_dataset,
             batch_size=settings.batch_size,
             pin_memory=self.settings.pin_memory,
             num_workers=settings.number_of_data_workers)
         self.validation_dataset = UcfCc50TransformedDataset(
             dataset='test',
             seed=seed,
             test_start=settings.labeled_dataset_seed * 10,
             inverse_map=settings.inverse_map,
             map_directory_name=settings.map_directory_name)
     else:
         raise ValueError('{} is not an understood crowd dataset.'.format(
             settings.crowd_dataset))
Exemple #2
0
 def dataset_setup(self):
     """Sets up the datasets for the application."""
     settings = self.settings
     if settings.crowd_dataset == 'World Expo':
         train_transform = torchvision.transforms.Compose([data.RandomlySelectPathWithNoPerspectiveRescale(),
                                                           data.RandomHorizontalFlip(),
                                                           data.NegativeOneToOneNormalizeImage(),
                                                           data.NumpyArraysToTorchTensors()])
         validation_transform = torchvision.transforms.Compose([data.RandomlySelectPathWithNoPerspectiveRescale(),
                                                                data.NegativeOneToOneNormalizeImage(),
                                                                data.NumpyArraysToTorchTensors()])
         dataset_path = '../World Expo/'
         with open(os.path.join(dataset_path, 'viable_with_validation_and_random_test.json')) as json_file:
             cameras_dict = json.load(json_file)
         self.train_dataset = CrowdDataset(dataset_path, camera_names=cameras_dict['train'],
                                           number_of_cameras=settings.number_of_cameras,
                                           number_of_images_per_camera=settings.number_of_images_per_camera,
                                           transform=train_transform, seed=settings.labeled_dataset_seed)
         self.train_dataset_loader = DataLoader(self.train_dataset, batch_size=settings.batch_size, shuffle=True,
                                                pin_memory=True, num_workers=settings.number_of_data_workers)
         # self.unlabeled_dataset = CrowdDataset(dataset_path, camera_names=cameras_dict['validation'],
         #                                       transform=train_transform, unlabeled=True,
         #                                       seed=100)
         self.unlabeled_dataset = CrowdDataset(dataset_path, camera_names=cameras_dict['train'],
                                               number_of_cameras=settings.number_of_cameras,
                                               transform=train_transform, unlabeled=True,
                                               seed=settings.labeled_dataset_seed)
         self.unlabeled_dataset_loader = DataLoader(self.unlabeled_dataset, batch_size=settings.batch_size, shuffle=True,
                                                    pin_memory=True, num_workers=settings.number_of_data_workers)
         self.validation_dataset = CrowdDataset(dataset_path, camera_names=cameras_dict['validation'],
                                                transform=validation_transform, seed=101)
     elif settings.crowd_dataset == 'ShanghaiTech':
         train_transform = torchvision.transforms.Compose([data.ExtractPatchForRandomPosition(),
                                                           data.RandomHorizontalFlip(),
                                                           data.NegativeOneToOneNormalizeImage(),
                                                           data.NumpyArraysToTorchTensors()])
         validation_transform = torchvision.transforms.Compose([data.ExtractPatchForRandomPosition(),
                                                                data.NegativeOneToOneNormalizeImage(),
                                                                data.NumpyArraysToTorchTensors()])
         self.train_dataset = ShanghaiTechDataset(transform=train_transform, seed=settings.labeled_dataset_seed)
         self.train_dataset_loader = DataLoader(self.train_dataset, batch_size=settings.batch_size, shuffle=True,
                                                pin_memory=True, num_workers=settings.number_of_data_workers)
         self.unlabeled_dataset = ShanghaiTechDataset(transform=train_transform, seed=settings.labeled_dataset_seed,
                                                      unlabeled=True)
         self.unlabeled_dataset_loader = DataLoader(self.unlabeled_dataset, batch_size=settings.batch_size, shuffle=True,
                                                    pin_memory=True, num_workers=settings.number_of_data_workers)
         self.validation_dataset = ShanghaiTechDataset(dataset='test', transform=validation_transform, seed=101)
     else:
         raise ValueError('{} is not an understood crowd dataset.'.format(settings.crowd_dataset))
Exemple #3
0
    def dataset_setup(self):
        """Sets up the datasets for the application."""
        settings = self.settings
        # self.dataset_class = UcfQnrfFullImageDataset
        # self.train_dataset = UcfQnrfTransformedDataset(middle_transform=data.RandomHorizontalFlip(),
        #                                                seed=settings.labeled_dataset_seed,
        #                                                number_of_examples=settings.labeled_dataset_size)
        # self.train_dataset_loader = DataLoader(self.train_dataset, batch_size=settings.batch_size,
        #                                        pin_memory=self.settings.pin_memory,
        #                                        num_workers=settings.number_of_data_workers)
        # self.unlabeled_dataset = UcfQnrfTransformedDataset(middle_transform=data.RandomHorizontalFlip(),
        #                                                    seed=100,
        #                                                    number_of_examples=settings.unlabeled_dataset_size)
        # self.unlabeled_dataset_loader = DataLoader(self.unlabeled_dataset, batch_size=settings.batch_size,
        #                                            pin_memory=self.settings.pin_memory,
        #                                            num_workers=settings.number_of_data_workers)
        # self.validation_dataset = UcfQnrfTransformedDataset(dataset='test', seed=101)

        self.dataset_class = ShanghaiTechFullImageDataset
        self.train_dataset = ShanghaiTechTransformedDataset(
            middle_transform=data.RandomHorizontalFlip(),
            seed=settings.labeled_dataset_seed,
            number_of_examples=settings.labeled_dataset_size,
            inverse_map=settings.inverse_map,
            map_directory_name=settings.map_directory_name)
        self.train_dataset_loader = DataLoader(
            self.train_dataset,
            batch_size=settings.batch_size,
            pin_memory=self.settings.pin_memory,
            num_workers=settings.number_of_data_workers)
        self.unlabeled_dataset = ShanghaiTechTransformedDataset(
            middle_transform=data.RandomHorizontalFlip(),
            seed=100,
            number_of_examples=settings.unlabeled_dataset_size,
            inverse_map=settings.inverse_map,
            map_directory_name=settings.map_directory_name)
        self.unlabeled_dataset_loader = DataLoader(
            self.train_dataset,
            batch_size=settings.batch_size,
            pin_memory=self.settings.pin_memory,
            num_workers=settings.number_of_data_workers)
        self.validation_dataset = ShanghaiTechTransformedDataset(
            dataset='test',
            seed=101,
            inverse_map=settings.inverse_map,
            map_directory_name=settings.map_directory_name)
Exemple #4
0
 def dataset_setup(self):
     """Sets up the datasets for the application."""
     settings = self.settings
     self.dataset_class = UcfQnrfFullImageDataset
     self.train_dataset = UcfQnrfTransformedDataset(
         middle_transform=data.RandomHorizontalFlip(),
         seed=settings.labeled_dataset_seed,
         number_of_examples=settings.labeled_dataset_size)
     self.train_dataset_loader = DataLoader(
         self.train_dataset,
         batch_size=settings.batch_size,
         pin_memory=self.settings.pin_memory,
         num_workers=settings.number_of_data_workers)
     self.unlabeled_dataset = UcfQnrfTransformedDataset(
         middle_transform=data.RandomHorizontalFlip(),
         seed=100,
         number_of_examples=settings.unlabeled_dataset_size)
     self.unlabeled_dataset_loader = DataLoader(
         self.unlabeled_dataset,
         batch_size=settings.batch_size,
         pin_memory=self.settings.pin_memory,
         num_workers=settings.number_of_data_workers)
     self.validation_dataset = UcfQnrfTransformedDataset(dataset='test',
                                                         seed=101)
    def dataset_setup(self):
        """Sets up the datasets for the application."""
        settings = self.settings
        if settings.crowd_dataset == CrowdDataset.ucf_qnrf:
            self.dataset_class = UcfQnrfFullImageDataset
            self.train_dataset = UcfQnrfTransformedDataset(
                middle_transform=data.RandomHorizontalFlip(),
                seed=settings.labeled_dataset_seed,
                number_of_examples=settings.labeled_dataset_size,
                map_directory_name=settings.map_directory_name)
            self.train_dataset_loader = DataLoader(
                self.train_dataset,
                batch_size=settings.batch_size,
                pin_memory=self.settings.pin_memory,
                num_workers=settings.number_of_data_workers)
            self.unlabeled_dataset = UcfQnrfTransformedDataset(
                middle_transform=data.RandomHorizontalFlip(),
                seed=settings.labeled_dataset_seed,
                number_of_examples=settings.unlabeled_dataset_size,
                map_directory_name=settings.map_directory_name,
                examples_start=settings.labeled_dataset_size)
            self.unlabeled_dataset_loader = DataLoader(
                self.unlabeled_dataset,
                batch_size=settings.batch_size,
                pin_memory=self.settings.pin_memory,
                num_workers=settings.number_of_data_workers)
            self.validation_dataset = UcfQnrfTransformedDataset(
                dataset='test',
                seed=101,
                map_directory_name=settings.map_directory_name)
        elif settings.crowd_dataset == CrowdDataset.shanghai_tech:
            self.dataset_class = ShanghaiTechFullImageDataset
            self.train_dataset = ShanghaiTechTransformedDataset(
                middle_transform=data.RandomHorizontalFlip(),
                seed=settings.labeled_dataset_seed,
                number_of_examples=settings.labeled_dataset_size,
                map_directory_name=settings.map_directory_name)
            self.train_dataset_loader = DataLoader(
                self.train_dataset,
                batch_size=settings.batch_size,
                pin_memory=self.settings.pin_memory,
                num_workers=settings.number_of_data_workers)
            self.unlabeled_dataset = ShanghaiTechTransformedDataset(
                middle_transform=data.RandomHorizontalFlip(),
                seed=100,
                number_of_examples=settings.unlabeled_dataset_size,
                map_directory_name=settings.map_directory_name)
            self.unlabeled_dataset_loader = DataLoader(
                self.train_dataset,
                batch_size=settings.batch_size,
                pin_memory=self.settings.pin_memory,
                num_workers=settings.number_of_data_workers)
            self.validation_dataset = ShanghaiTechTransformedDataset(
                dataset='test',
                seed=101,
                map_directory_name=settings.map_directory_name)

        elif settings.crowd_dataset == CrowdDataset.world_expo:
            self.dataset_class = WorldExpoFullImageDataset
            self.train_dataset = WorldExpoTransformedDataset(
                middle_transform=data.RandomHorizontalFlip(),
                seed=settings.labeled_dataset_seed,
                number_of_cameras=settings.number_of_cameras,
                number_of_images_per_camera=settings.
                number_of_images_per_camera)
            self.train_dataset_loader = DataLoader(
                self.train_dataset,
                batch_size=settings.batch_size,
                pin_memory=self.settings.pin_memory,
                num_workers=settings.number_of_data_workers)
            self.unlabeled_dataset = WorldExpoTransformedDataset(
                middle_transform=data.RandomHorizontalFlip(),
                seed=settings.labeled_dataset_seed,
                number_of_cameras=settings.number_of_cameras,
                number_of_images_per_camera=settings.
                number_of_images_per_camera)
            self.unlabeled_dataset_loader = DataLoader(
                self.unlabeled_dataset,
                batch_size=settings.batch_size,
                pin_memory=self.settings.pin_memory,
                num_workers=settings.number_of_data_workers)
            self.validation_dataset = WorldExpoTransformedDataset(
                dataset='validation', seed=101)
            if self.settings.batch_size > self.train_dataset.length:
                self.settings.batch_size = self.train_dataset.length