Пример #1
0
def test_entropy_func_three_classes():
    n_classes = 3
    supports = np.array([[0.33, 0.33, 0.33], [1.0, 0.0, 0.0], [1.0, 0.0, 0.0]])
    is_correct = np.array([0, 1, 0])
    expected = [0.0, 1.0, -1.0]
    result = entropy_func(n_classes, supports, is_correct)
    assert np.isclose(result, expected, atol=0.01).all()
Пример #2
0
    def source_competence(self):
        """Calculates the source of competence using the KL divergence method.

        The source of competence C_src at the validation point :math:`\mathbf{x}_{k}` is calculated by the KL divergence
        between the vector of class supports produced by the base classifier and the outputs of a random classifier (RC)
        RC = 1/L, L being the number of classes in the problem. The value of C_src is negative if the base classifier
        misclassified the instance :math:`\mathbf{x}_{k}`.

        Returns
        ----------
        C_src : array of shape = [n_samples, n_classifiers]
                The competence source for each base classifier at each data point.
        """

        C_src = np.zeros((self.n_samples, self.n_classifiers))
        for clf_index in range(self.n_classifiers):
            supports = self.dsel_scores[:, clf_index, :]
            is_correct = self.processed_dsel[:, clf_index]
            C_src[:, clf_index] = entropy_func(self.n_classes, supports,
                                               is_correct)

        return C_src
Пример #3
0
def test_entropy_func_parameter_shape():
    with pytest.raises(ValueError):
        entropy_func(2, np.array([0, 1]), np.array([0]))