Пример #1
0
    def test_model(self):
        # create the batched examples of data
        print("Entered testing phase")
        result_dict = {}
        self.dump_dict = {}

        self.demark_testing()
        with torch.no_grad():
            for keys in (self.testing_dict.keys()):
                self.dump_dict[keys] = []
                accuracy = 0
                total_example = 0
                for x_test_minibatch, y_test_minibatch in (
                        self.testing_dict[keys]):
                    total_example += x_test_minibatch.shape[0]
                    predicted = self.model.predict(
                        x_test_minibatch.to(torch.long))
                    accuracy += np.sum(y_test_minibatch.numpy() ==
                                       predicted.squeeze().numpy())
                    self.update_dump_dict(keys, x_test_minibatch,
                                          y_test_minibatch, predicted)

                result_dict[keys] = (accuracy / total_example, total_example)

        dump_dict_to_csv(self.dump_dict)
        self.log(str(result_dict))
        return result_dict
    def test_model(self):
        # create the batched examples of data
        print("Entered testing phase")
        result_dict = {}
        self.dump_dict = {}
        if not hasattr(self, "testing_dict"):
            self.demark_testing()

        with torch.no_grad():
            for keys in (self.testing_dict.keys()):
                self.dump_dict[keys] = []
                accuracy = 0
                total_example = 0
                for x_test, y_test in self.testing_dict[keys]:
                    total_example += 1
                    y_test = np.asarray(y_test)
                    x_test = torch.tensor(x_test, dtype=torch.long)
                    pred, _, _ = self.model(x_test)
                    if (pred[0][0] > pred[0][1]):
                        predicted = 0
                    else:
                        predicted = 1

                    if (predicted == (y_test)):
                        accuracy += 1
                    self.update_dump_dict(keys, x_test, y_test, predicted)

                result_dict[keys] = (accuracy / total_example, total_example)

        dump_dict_to_csv(self.dump_dict)
        self.log(str(result_dict))
        return result_dict
Пример #3
0
    def test_model(self):
        # we will do this in batched fashion
        print("Entered testing phase")
        result_dict = {}
        self.dump_dict = {}
        if not hasattr(self, "testing_dict"):
            self.demark_testing()

        with torch.no_grad():
            for keys in (self.testing_dict.keys()):
                x_, y_ = zip(*self.testing_dict[keys])
                x_ = np.asarray(list(x_))
                y_ = np.asarray(list(y_))
                batch_generator = BatchedDataset(x_, y_)
                DataGenerator = DataLoader(
                    batch_generator,
                    batch_size=self.batch_size,
                    drop_last=False
                )  # we should not drop the last ones, its bad
                self.dump_dict[keys] = []
                accuracy = 0
                total_example = 0
                for x_test, y_test in DataGenerator:
                    x_test = torch.tensor(x_test, dtype=torch.long).to(device)
                    y_test = torch.tensor(y_test, dtype=torch.long).to(device)
                    m = x_test.shape[0]
                    total_example += m
                    assert m == y_test.shape[0]
                    x_test = x_test.view(m, -1)
                    y_test = y_test.view(m, )
                    pred, _, _ = self.model(x_test)
                    y_hat = torch.argmax(pred, dim=1)
                    y_hat = y_hat.view(m, )
                    accuracy += torch.sum(y_hat == y_test).item()
                    self.update_dump_dict(keys, x_test, y_test, predicted)

                result_dict[keys] = (accuracy / total_example, total_example)

        dump_dict_to_csv(self.dump_dict)
        self.log(str(result_dict))
        return result_dict