def _score_(self, y_predicted, y_actual):
     confusion_matrix = LogisticRegressionUtil.get_confusion_matrix(
         y_predicted, y_actual)
     accuracy = LogisticRegressionUtil.calculate_accuracy(
         confusion_matrix[0][0], confusion_matrix[0][1],
         confusion_matrix[1][0], confusion_matrix[1][1])
     return accuracy
Esempio n. 2
0
 def test_calculate_false_positive_rate(self):
     true_negatives = 50
     false_positives = 10
     expected = 0.16666666666666666
     actual = LogisticRegressionUtil.calculate_false_positive_rate(
         false_positives, true_negatives)
     self.assertEqual(expected, actual)
Esempio n. 3
0
 def test_calculate_recall(self):
     true_positives = 100
     false_negatives = 5
     expected = 0.9523809523809523
     actual = LogisticRegressionUtil.calculate_recall(
         true_positives, false_negatives)
     self.assertEqual(expected, actual)
Esempio n. 4
0
    def test_calculate_precision(self):
        true_positives = 100
        false_positives = 10

        expected = 0.9090909090909091
        actual = LogisticRegressionUtil.calculate_precision(
            true_positives, false_positives)
        self.assertEqual(expected, actual)
Esempio n. 5
0
    def test_calculate_f_measure(self):
        true_positives = 100
        true_negatives = 50
        false_positives = 10
        false_negatives = 5

        expected = 0.9302325581395349
        actual = LogisticRegressionUtil.calculate_f_measure(
            true_positives, false_positives, false_negatives)
        self.assertEqual(expected, actual)
Esempio n. 6
0
    def test_calculate_accuracy(self):
        true_positives = 100
        true_negatives = 50
        false_positives = 10
        false_negatives = 5

        expected = 0.9090909090909091
        actual = LogisticRegressionUtil.calculate_accuracy(
            true_positives, false_positives, true_negatives, false_negatives)
        self.assertEqual(expected, actual)
Esempio n. 7
0
 def test_confusion_matrix(self):
     y_predicted = np.array([0, 1, 0, 1, 1])
     y_actual = np.array([0, 1, 1, 0, 1])
     actual_confusion_matrix = LogisticRegressionUtil.get_confusion_matrix(
         y_predicted, y_actual)
     expected_true_positives_count = 2
     expected_true_negatives_count = 1
     expected_false_positives_count = 1
     expected_false_negatives_count = 1
     expected_confusion_matrix = [[
         expected_true_positives_count, expected_false_positives_count
     ], [expected_true_negatives_count, expected_false_negatives_count]]
     self.assertEqual(expected_confusion_matrix, actual_confusion_matrix)
Esempio n. 8
0
 def test_calculate_ROC(self):
     y_predicted = np.array([0, 1, 0, 1, 1])
     y_actual = np.array([0, 1, 1, 0, 1])
     expected = [0.6666666666666666, 0.5], [[2, 1], [1, 1]]
     actual = LogisticRegressionUtil.calculate_ROC(y_predicted, y_actual)
     self.assertEqual(expected, actual)
 def calculate_roc_point(self, X_actual: np.ndarray, y_actual: np.ndarray,
                         threshold: int) -> (list, list):
     y_predicted = self.predict(X_actual, threshold)
     return LogisticRegressionUtil.calculate_ROC(y_predicted, y_actual)