Beispiel #1
0
    def __init__(self,
                 model: Module,
                 dataset: Dataset,
                 batch_size: int = 4,
                 num_workers: int = 4):
        """
        AUTHORS:
        --------

        :author: Alix Leroy

        DESCRIPTION:
        ------------

        Initialize a GenericInferer instance

        PARAMETERS:
        -----------

        :param model (torch.nn.Module): The model to infer
        :param dataset (Dataset): A dataset
        :param batch_size (int): The number of instances per batch
        :param num_workers (int): The number of processes / threads used for data loading
        """

        self.model = model
        self.batch_size = batch_size
        self.num_workers = num_workers
        self.dataset = dataset
        self.dataloader = DataLoader(dataset=dataset,
                                     batch_size=batch_size,
                                     shuffle=False,
                                     num_workers=num_workers)
        self.num_minibatches = self.compute_num_minibatches(
            batch_size=batch_size, length_dataset=dataset.__len__())
    def __load_trainer(self, history, dataloader, data, transforms, name):
        """
        AUTHORS:
        --------

        :author: Alix Leroy
        :author: Samuel Westlake

        DESCRIPTION:
        ------------

        Load a trainer

        PARAMETERS:
        -----------
        :param history:
        :param dataloader:
        :param data:
        :param transforms:
        :param name:

        RETURN:
        -------

        :return trainer->Trainer: The loaded trainer
        """
        inputs = [item for item in data.inputs]
        labels = [item for item in data.labels]
        additional_data = [item for item in data.additional_data]
        transform_manager = TransformManager(transforms)
        dataset = Dataset(list_inputs=inputs,
                          list_labels=labels,
                          list_additional_data=additional_data,
                          transform_manager=transform_manager,
                          cv_library=DEEP_LIB_PIL,
                          name=name)
        dataset.load()
        dataset.set_len_dataset(data.number)
        dataset.summary()

        trainer = Trainer(model=self.model,
                          dataset=dataset,
                          metrics=self.metrics,
                          losses=self.losses,
                          optimizer=self.optimizer,
                          num_epochs=self.config.training.num_epochs,
                          initial_epoch=self.config.training.initial_epoch,
                          shuffle=self.config.training.shuffle,
                          verbose=history.verbose,
                          tester=self.validator,
                          num_workers=dataloader.num_workers,
                          batch_size=dataloader.batch_size)
        return trainer
    def __load_tester(self, dataloader, data, transforms, name):
        """
        AUTHORS:
        --------

        :author: Alix Leroy
        :author: Samuel Westlake

        DESCRIPTION:
        ------------

        Load a tester/validator

        PARAMETERS:
        -----------
        :param dataloader:
        :param data:
        :param transforms:
        :param name:

        RETURN:
        -------

        :return tester->Tester: The loaded tester
        """
        inputs = [item for item in data.inputs]
        labels = [item for item in data.labels]
        additional_data = [item for item in data.additional_data]
        transform_manager = TransformManager(transforms)
        dataset = Dataset(list_inputs=inputs,
                          list_labels=labels,
                          list_additional_data=additional_data,
                          transform_manager=transform_manager,
                          cv_library=DEEP_LIB_PIL,
                          name=name)
        dataset.load()
        dataset.set_len_dataset(data.number)
        dataset.summary()
        tester = Tester(model=self.model,
                        dataset=dataset,
                        metrics=self.metrics,
                        losses=self.losses,
                        batch_size=dataloader.batch_size,
                        num_workers=dataloader.num_workers)
        return tester
    def load_trainer(self):
        """
        AUTHORS:
        --------

        :author: Alix Leroy
        :author: Samuel Westlake

        DESCRIPTION:
        ------------

        Load a trainer

        PARAMETERS:
        -----------
        None

        RETURN:
        -------

        :return None
        """
        # If the train step is enabled
        if self.config.data.enabled.train:
            Notification(DEEP_NOTIF_INFO, DEEP_NOTIF_DATA_LOADING % self.config.data.dataset.train.name)

            # Transform Manager
            transform_manager = TransformManager(**self.config.transform.train.get())

            # Dataset
            dataset = Dataset(**self.config.data.dataset.train.get(),
                              transform_manager=transform_manager,
                              cv_library=self.config.project.cv_library)
            # Trainer
            self.trainer = Trainer(**self.config.data.dataloader.get(),
                                   model=self.model,
                                   dataset=dataset,
                                   metrics=self.metrics,
                                   losses=self.losses,
                                   optimizer=self.optimizer,
                                   num_epochs=self.config.training.num_epochs,
                                   initial_epoch=self.config.training.initial_epoch,
                                   shuffle_method=self.config.training.shuffle,
                                   verbose=self.config.history.verbose,
                                   tester=self.validator)
        else:
            Notification(DEEP_NOTIF_INFO, DEEP_MSG_DATA_DISABLED % self.config.data.dataset.train.name)
    def load_validator(self):
        """
        AUTHORS:
        --------

        :author: Alix Leroy
        :author: Samuel Westlake

        DESCRIPTION:
        ------------

        Load the validation inferer in memory

        PARAMETERS:
        -----------

        None

        RETURN:
        -------

        :return: None
        """
        # If the validation step is enabled
        if self.config.data.enabled.validation:
            Notification(DEEP_NOTIF_INFO, DEEP_NOTIF_DATA_LOADING % self.config.data.dataset.validation.name)

            # Transform Manager
            transform_manager = TransformManager(**self.config.transform.validation.get())

            # Dataset
            dataset = Dataset(**self.config.data.dataset.validation.get(),
                              transform_manager=transform_manager,
                              cv_library=self.config.project.cv_library)

            # Validator
            self.validator = Tester(**self.config.data.dataloader.get(),
                                    model=self.model,
                                    dataset=dataset,
                                    metrics=self.metrics,
                                    losses=self.losses)
        else:
            Notification(DEEP_NOTIF_INFO, DEEP_MSG_DATA_DISABLED % self.config.data.dataset.validation.name)
    def load_predictor(self):
        # If the predict step is enabled
        if self.config.data.enabled.predict:
            Notification(DEEP_NOTIF_INFO, DEEP_NOTIF_DATA_LOADING % self.config.data.dataset.predict.name)

            # Transform Manager
            transform_manager = TransformManager(**self.config.transform.predict.get(ignore="outputs"))

            # Dataset
            dataset = Dataset(
                **self.config.data.dataset.predict.get(),
                transform_manager=transform_manager,
                cv_library=self.config.project.cv_library
            )
            # Predictor
            self.predictor = Predictor(
                **self.config.data.dataloader.get(),
                model=self.model,
                dataset=dataset
            )
        else:
            Notification(DEEP_NOTIF_INFO, DEEP_MSG_DATA_DISABLED % self.config.data.dataset.predict.name)
inputs = []
labels = []
additional_data = []
inputs.append([r"input1.txt", r"input2.txt"])
inputs.append([r"./images", r"./images"])
labels.append([r"label1.txt", r"label2.txt"])
#labels.append(r"label3.txt")
#labels = []

additional_data.append(r"label3.txt")
#additional_data.append(r"additional_data.txt")
additional_data = []

dataset = Dataset(inputs,
                  labels,
                  additional_data,
                  transform_manager=transform_manager_train,
                  cv_library=DEEP_LIB_OPENCV,
                  name="Test")
dataset.load()
dataset.set_len_dataset(1000)
dataset.summary()
#inputs, labels, additional_data = dataset.__getitem__(1)

t0 = time.time()
inputs, labels, additional_data = dataset.__getitem__(500)
t1 = time.time()

print(t1 - t0)
print(labels)
print(len(inputs))
cv2.imshow("test", inputs[1])
Beispiel #8
0
labels = []
additional_data = []
inputs.append([r"input1.txt", r"input2.txt"])
inputs.append([r"./images", r"./images"])
labels.append([r"label1.txt", r"label2.txt"])
labels.append(r"label3.txt")
labels = []

additional_data.append(r"label3.txt")
#additional_data.append(r"additional_data.txt")
additional_data = []

dataset = Dataset(inputs,
                  labels,
                  additional_data,
                  transform_manager=None,
                  cv_library=DEEP_OPENCV,
                  write_logs=False,
                  name="Test")
dataset.load()
dataset.set_len_dataset(1000)
dataset.summary()
#inputs, labels, additional_data = dataset.__getitem__(1)

#inputs, labels = dataset.__getitem__(1)
num_images = 100

t0 = time.time()
for i in range(num_images // 2):
    #inputs, labels, additional_data = dataset.__getitem__(1
    inputs = dataset.__getitem__(1)