Пример #1
0
    def test_results_normal(self):
        result = Result(self.cardinality)
        result.update(torch.Tensor([[0.4, 0.3], [0.3, 0.4], [0.4, 0.3],
                                    [0.3, 0.4]]),
                      labels=[0, 1, 1, 0])

        result_dump = pickle.dumps({
            "Result": {
                "1": {
                    "Test": result,
                    "Train": result
                },
                "2": {
                    "Test": result,
                    "Train": result
                }
            }
        })
        mockOpen = mock_open(read_data=result_dump)
        with patch('builtins.open', mockOpen):
            results = Results(path_model="model/", name_model="Test")
            results.load_files()
            results.compute_results(condition="Test")

            self.assertEqual(results.global_TP, 4)
            self.assertEqual(results.global_FP, 4)
            self.assertEqual(results.global_FN, 4)

            self.assertEqual(results.macro_precision, 0.50)
            self.assertEqual(results.macro_recall, 0.50)
            self.assertTrue(math.isclose(results.micro_recall, 1 / 3))
            self.assertTrue(math.isclose(results.micro_precision, 1 / 3))
        results.print_results()
Пример #2
0
    def test_update(self):
        result = Result(self.cardinality)
        result.update(torch.Tensor([[0.4, 0.3], [0.3, 0.4], [0.4, 0.3],
                                    [0.3, 0.4]]),
                      labels=[0, 1, 1, 0])

        self.assertEqual(result.conf_matrix[0, 1], 1)
        self.assertEqual(result.conf_matrix[1, 0], 1)
        self.assertEqual(result.conf_matrix[1, 1], 1)
        self.assertEqual(result.conf_matrix[0, 0], 1)
 def setUp(self):
     self.lock = torch.multiprocessing.get_context('spawn').Lock()
     self.model = LSTMLayer(num_classes=5)
     cardinality = Cardinality(3, "", "")
     cardinality.list_classes = [1,1,1,2,2,3,4,5,6]
     cardinality.counter= {1:10, 2:100, 3:100, 4:100, 6:1000, 5:1000}
     cardinality.compute_position()
     self.result = Result(cardinality)
Пример #4
0
    def test_no_class(self):
        result = Result(self.cardinality)
        result.number_of_classes = 0
        result.update(torch.Tensor([[0.4, 0.3], [0.3, 0.4], [0.4, 0.3],
                                    [0.3, 0.4]]),
                      labels=[0, 1, 1, 0])
        result.computing_result()

        self.assertEqual(result.micro_precision, 0)
        self.assertEqual(result.micro_recall, 0)
Пример #5
0
    def test_compute(self):
        result = Result(self.cardinality)
        result.update(torch.Tensor([[0.4, 0.3], [0.3, 0.4], [0.4, 0.3],
                                    [0.3, 0.4]]),
                      labels=[0, 1, 1, 0])

        result.computing_result()
        self.assertEqual(result.global_TP, 2)
        self.assertEqual(result.global_FP, 2)
        self.assertEqual(result.global_FN, 2)

        self.assertEqual(result.macro_precision, 0.50)
        self.assertEqual(result.macro_recall, 0.50)
        self.assertTrue(math.isclose(result.micro_recall, 1 / 3))
        self.assertTrue(math.isclose(result.micro_precision, 1 / 3))
Пример #6
0
    def test(self,
             validation_split=0.6,
             subsample=False,
             subsample_split=0.01):
        """Test the model

        Args:
            validation_split (float, optional): ratio between testing and learning set. Defaults to 0.6.
            subsample (bool, optional): if False, use all the available data, if True, use only a ratio of the data (subsample_split*data). Defaults to False.
            subsample_split (float, optional): ratio of the data to use. Defaults to 0.01.
        """
        dataloader_test = self.create_dataloader(
            validation_split=validation_split,
            condition="Test",
            subsample=subsample,
            subsample_split=0.01)
        result = Result(self.dataset, condition="Test", subsample=subsample)
        if self.model == -1:
            self.load_model()
        self.model.eval()
        self.conf_matrix = torch.zeros(self.dataset.number_of_classes,
                                       self.dataset.number_of_classes)
        for index_batch, batch in enumerate(dataloader_test):
            label = batch['output']
            input_data = batch['input'].to(self.device)
            prediction = self.model(input_data)
            result.update(prediction, label)
            if index_batch % self.batch_result == 0:
                result.computing_result(reinit=False,
                                        progress=index_batch /
                                        len(dataloader_test))
        self.model.train()
        self.saver.save(model=self.model, result=result, condition="Test")
        result.computing_result(reinit=True, progress=1)
        self.stopping_condition.update(result.microf1)
        self.stopping_condition.stop()
Пример #7
0
    def test_print(self):
        result = Result(self.cardinality, "Test")
        result.number_of_classes = 0
        result.update(torch.Tensor([[0.4, 0.3], [0.3, 0.4], [0.4, 0.3],
                                    [0.3, 0.4]]),
                      labels=[0, 1, 1, 0])
        result.computing_result()

        result = Result(self.cardinality)
        result.number_of_classes = 0
        result.update(torch.Tensor([[0.4, 0.3], [0.3, 0.4], [0.4, 0.3],
                                    [0.3, 0.4]]),
                      labels=[0, 1, 1, 0])
        result.computing_result()
Пример #8
0
 def test_creating(self):
     result = Result(self.cardinality)
     self.assertEqual(result.number_of_classes, 3)
Пример #9
0
    def train(self, validation_split=0.6, resuming=False):
        """Train the model

        Args:
            validation_split (float, optional): ratio between testing and learning set. Defaults to 0.6.
            resuming (bool, optional): resume the learning from a previous step. Not implemented yet. Defaults to False.
        """
        # Create the dataloader
        dataloader_train = self.create_dataloader(
            validation_split=validation_split, condition="train")
        if resuming:
            self.load_model()
        else:
            self.model = LSTMLayer(num_classes=self.dataset.number_of_classes,
                                   batch_size=self.batch_size).to(self.device)
        # Create the results
        result = Result(self.dataset, condition="Train")
        optimizer = optim.Adam(self.model.parameters())
        loss_fn = nn.CrossEntropyLoss()
        self.model.train()
        logger.info("Cardinality: " + str(self.cardinality) +
                    " Starting the learning step")
        # Start the learning
        while not self.stopping_condition.stop():
            for index_batch, batch in enumerate(dataloader_train):
                optimizer.zero_grad()
                label = batch['output']
                input_data = batch['input'].to(self.device)
                prediction = self.model(input_data)
                loss = loss_fn(prediction, label.to(self.device))
                loss.backward()
                optimizer.step()
                result.update(prediction, label)
                # Compute the results each 2000 batchs.
                if index_batch % self.batch_result == 0 and index_batch != 0:
                    result.computing_result(progress=index_batch /
                                            len(dataloader_train))
                    self.saver.save(model=self.model,
                                    result=result,
                                    condition="temp")
                    if not self.exlude_test:
                        # Test only on a subsample
                        self.test(subsample=True, subsample_split=0.1)
                    print(self.stopping_condition)
                # Test if we stop only for the timer method at each batch
                if self.stopping_condition.method == "timer" and self.stopping_condition.stop(
                ):
                    logger.debug("[Stopping] Cardinality: " +
                                 str(self.cardinality) + " " +
                                 str(self.stopping_condition) +
                                 " stopping learning step.")
                    break
            # At the end of one epoch, use the all testing test and update the condition
            if not self.exlude_test:
                self.test()
            result.computing_result(reinit=True, progress=1)
            if self.stopping_condition.stop():
                logger.debug("[Stopping] Cardinality: " +
                             str(self.cardinality) + " " +
                             str(self.stopping_condition) +
                             " stopping learning step.")
            self.saver.save(model=self.model, result=result, condition="Train")