예제 #1
0
    def test_multi_class_summary_even_targets(self):
        # df = DF_TEST_MULTI_CASS
        target = np.arange(1, 5)
        price = np.arange(0, 5) + 0.5
        labels = [
            one_hot(np.random.randint(0, len(target)),
                    len(target) + 1).tolist() for _ in range(len(price))
        ]

        predictions = range(len(price))
        expected_losses = [0.0, -0.5, -0.5, 0]
        for prediction, expected_loss in zip(predictions, expected_losses):
            df = pd.DataFrame([target for _ in range(len(price))])
            df.columns = pd.MultiIndex.from_product([[TARGET_COLUMN_NAME],
                                                     df.columns])
            df[(GROSS_LOSS_COLUMN_NAME, "loss")] = price
            df[(PREDICTION_COLUMN_NAME, "prediction")] = [
                one_hot(prediction,
                        len(target) + 1).tolist() for _ in range(len(price))
            ]
            df[(LABEL_COLUMN_NAME, "label")] = labels
            print(df)

            s = WeightedClassificationSummary(df,
                                              None,
                                              classes=len(target) + 1)
            print(s.df_gross_loss)
예제 #2
0
    def test__parabolic_crossentropy(self):
        """when"""
        categories = 11
        loss_function = ParabolicPenaltyLoss(categories, 1)
        """then"""
        for truth in range(categories):
            losses = []
            for prediction in range(categories):
                loss = loss_function(t.tensor(one_hot(prediction, categories)),
                                     t.tensor(one_hot(truth, categories)))
                losses.append(loss)

            # all predictions left of truth need to increase
            for i in range(1, truth):
                self.assertGreater(losses[i - 1], losses[i])

            # right of truth need to decrease
            for i in range(truth, categories - 1):
                self.assertLess(losses[i], losses[i + 1])

            if truth > 0 and truth < categories - 1:
                if truth > categories / 2:
                    # right tail:
                    self.assertGreater(losses[truth - 1], losses[truth + 1])
                else:
                    # left tail
                    self.assertGreater(losses[truth + 1], losses[truth - 1])
예제 #3
0
 def test__tailed_categorical_crossentropy(self):
     """when"""
     categories = 11
     loss = tailed_categorical_crossentropy(categories, 1)
     """then"""
     truth = K.constant(one_hot(3, 11))
     prediction = K.softmax(K.constant(one_hot(6, 11)))
     l = K.eval(loss(truth, prediction))
     pickle
     """then"""
     np.testing.assert_almost_equal(l, 11.817837, decimal=5)
     save_object(loss, '/tmp/test__tailed_categorical_crossentropy.dill')
예제 #4
0
 def test__tailed_categorical_crossentropy(self):
     """when"""
     categories = 11
     loss = TailedCategoricalCrossentropyLoss(categories, 1)
     """then"""
     truth = one_hot(3, 11)
     l = loss(t.tensor([one_hot(6, 11)]), t.tensor([truth]))
     l2 = loss(t.tensor([one_hot(6, 11), one_hot(9, 11)]),
               t.tensor([truth, truth]))
     """then"""
     np.testing.assert_almost_equal(l, 11.817837, decimal=5)
     np.testing.assert_array_almost_equal(l2, [11.817837, 42.46247],
                                          decimal=5)
     self.assertGreater(l.mean().numpy(), 0)
예제 #5
0
 def test_one_hot(self):
     """given integer numbers"""
     x = np.arange(5)
     """when one hot encoded"""
     ohx = one_hot(x, None)
     """then x is one hot encoded"""
     np.testing.assert_array_equal(ohx, np.eye(5))
예제 #6
0
    def test_multi_class_summary_odd_targets(self):
        # df = DF_TEST_MULTI_CASS
        target = np.arange(1, 4)
        price = np.arange(0, 4) + 0.5
        labels = [one_hot(np.arange(0, 4), len(target) + 1).tolist() for _ in range(len(price))]

        predictions = range(len(price))
        expected_losses = [0.0, -0.5, -0.5, 0]
        for prediction, expected_loss in zip(predictions, expected_losses):
            df = pd.DataFrame([target for _ in range(len(price))])
            df.columns = pd.MultiIndex.from_product([[TARGET_COLUMN_NAME], df.columns])
            df[(GROSS_LOSS_COLUMN_NAME, "loss")] = price
            df[(PREDICTION_COLUMN_NAME, "prediction")] = [one_hot(prediction, len(target) + 1).tolist() for _ in range(len(price))]
            df[(LABEL_COLUMN_NAME, "label")] = labels

            s = WeightedClassificationSummary(df, classes= len(target) + 1)
            self.assertEqual(s.df_gross_loss["loss"].clip(upper=0).sum(), expected_loss)
            print(s._gross_confusion())
예제 #7
0
    def test__differentiable_argmax(self):
        """given"""
        args = 10
        argmax = DifferentiableArgmax(args)
        """when"""

        res = np.array(
            [argmax(t.tensor(one_hot(i, args))).numpy() for i in range(args)])
        """then"""
        print(res)
        np.testing.assert_array_almost_equal(res, np.arange(0, args))
예제 #8
0
    def test_normal_penalized_crossentropy(self):
        """when"""
        loss = normal_penalized_crossentropy(11)
        """then"""
        for i in range(11):
            self.assertLess(
                K.eval(
                    loss(K.variable(one_hot(i, 11)),
                         K.variable(one_hot(i, 11)))), 0.00001)

        self.assertLess(
            K.eval(loss(K.variable(one_hot(7, 11)), K.variable(one_hot(8,
                                                                       11)))),
            K.eval(loss(K.variable(one_hot(7, 11)), K.variable(one_hot(6,
                                                                       11)))))

        self.assertLess(
            K.eval(loss(K.variable(one_hot(6, 11)), K.variable(one_hot(7,
                                                                       11)))),
            K.eval(loss(K.variable(one_hot(6, 11)), K.variable(one_hot(5,
                                                                       11)))))

        self.assertLess(
            K.eval(loss(K.variable(one_hot(3, 11)), K.variable(one_hot(2,
                                                                       11)))),
            K.eval(loss(K.variable(one_hot(3, 11)), K.variable(one_hot(4,
                                                                       11)))))