def test_learner_adequacy(self):
     table = Table("housing")
     learner = NaiveBayesLearner()
     self.assertRaises(ValueError, learner, table)
Exemple #2
0
        if any(np.isnan(possible_values)):
            return 0
        indices = np.argsort(possible_values)
        sorted_values = possible_values[indices]
        sorted_coefficients = coefficients[indices]
        for i, val in enumerate(sorted_values):
            if current_value < val:
                break
        diff = sorted_values[i] - sorted_values[i - 1]
        k = 0 if diff < 1e-6 else (sorted_values[i] - current_value) / \
                                  (sorted_values[i] - sorted_values[i - 1])
        return sorted_coefficients[i - 1] * sorted_values[i - 1] * k + \
               sorted_coefficients[i] * sorted_values[i] * (1 - k)


if __name__ == "__main__":
    from Orange.classification import NaiveBayesLearner, \
        LogisticRegressionLearner
    from AnyQt.QtWidgets import QApplication

    app = QApplication([])
    ow = OWNomogram()
    titanic = Table("titanic")
    clf = NaiveBayesLearner()(titanic)
    # clf = LogisticRegressionLearner()(titanic)
    ow.set_classifier(clf)
    ow.set_instance(titanic[0:])
    ow.show()
    app.exec_()
    ow.saveSettings()
Exemple #3
0
        if not round_to_nearest:
            _range = np.arange(start + step, stop + r, step) - r
            start, stop = np.floor(start * 100) / 100, np.ceil(stop * 100) / 100
            return np.round(np.hstack((start, _range, stop)), 2)
        return np.round(np.arange(start, stop + r + step, step) - r, round_by)

    @staticmethod
    def get_points_from_coeffs(current_value, coefficients, possible_values):
        if np.isnan(possible_values).any():
            return 0
        # pylint: disable=undefined-loop-variable
        indices = np.argsort(possible_values)
        sorted_values = possible_values[indices]
        sorted_coefficients = coefficients[indices]
        for i, val in enumerate(sorted_values):
            if current_value < val:
                break
        diff = sorted_values[i] - sorted_values[i - 1]
        k = 0 if diff < 1e-6 else (sorted_values[i] - current_value) / \
                                  (sorted_values[i] - sorted_values[i - 1])
        return sorted_coefficients[i - 1] * sorted_values[i - 1] * k + \
               sorted_coefficients[i] * sorted_values[i] * (1 - k)


if __name__ == "__main__":  # pragma: no cover
    from Orange.classification import NaiveBayesLearner  #, LogisticRegressionLearner
    data = Table("heart_disease")
    clf = NaiveBayesLearner()(data)
    # clf = LogisticRegressionLearner()(data)
    WidgetPreview(OWNomogram).run(set_classifier=clf, set_data=data)
Exemple #4
0
 def test_nomogram_nb_multiclass(self):
     """Check probabilities for naive bayes classifier for various values
     of classes and radio buttons for multiclass data"""
     cls = NaiveBayesLearner()(self.lenses)
     self._test_helper(cls, [19, 53, 13])
        class IdentityWrapper(Learner):
            def fit_storage(self, data):
                return self.base_learner.fit_storage(data)

        if self.base_learner is None:
            return None
        learner = self.base_learner
        if self.calibration != self.NoCalibration:
            learner = CalibratedLearner(learner,
                                        self.CalibrationMap[self.calibration])
        if self.threshold != self.NoThresholdOptimization:
            learner = ThresholdLearner(learner,
                                       self.ThresholdMap[self.threshold])
        if self.preprocessors:
            if learner is self.base_learner:
                learner = IdentityWrapper()
            learner.preprocessors = (self.preprocessors, )
        return learner

    def get_learner_parameters(self):
        return (("Calibrate probabilities",
                 self.CalibrationOptions[self.calibration]),
                ("Threshold optimization",
                 self.ThresholdOptions[self.threshold]))


if __name__ == "__main__":  # pragma: no cover
    WidgetPreview(OWCalibratedLearner).run(
        Table("heart_disease"),
        set_learner=NaiveBayesLearner())
Exemple #6
0
 def test_no_data(self):
     self.assertRaises(ValueError,
                       CrossValidation,
                       learners=[NaiveBayesLearner()])
Exemple #7
0
 def setUpClass(cls):
     cls.data = data = Table('titanic')
     cls.learner = NaiveBayesLearner()
     cls.table = data[::20]
Exemple #8
0
from Orange.data import Domain, Table
from Orange.classification import LogisticRegressionLearner
from Orange.classification import NaiveBayesLearner
from Orange.classification import TreeLearner
from Orange.classification import RandomForestLearner
from Orange.classification import KNNLearner
from Orange.classification import SVMLearner

### create models ###

models = [
    LogisticRegressionLearner(),
    NaiveBayesLearner(),
    TreeLearner(),
    RandomForestLearner(),
    KNNLearner(),
    SVMLearner(),
]

### read train data ###

train = Table.from_file('train.csv')
# move `sex` from X to Y (from attributes/features to class_var/target)
domain = Domain(train.domain.attributes[1:], train.domain.attributes[0])
train = train.transform(domain)

print('\n=== train.X ===')
print(train.X)
print('\n=== train.Y ===')
print(train.Y)
Exemple #9
0
 def test_folds(self):
     res = CrossValidation(k=5)(self.random_table, [NaiveBayesLearner()])
     self.check_folds(res, 5, self.nrows)
 def test_njobs(self):
     with patch('Orange.evaluation.testing.CrossValidation._MIN_NJOBS_X_SIZE', 1):
         res = CrossValidation(self.random_table, [NaiveBayesLearner()], k=5, n_jobs=3)
     self.check_folds(res, 5, self.nrows)
Exemple #11
0
 def setUpClass(cls):
     data = Table('titanic')
     cls.learner = NaiveBayesLearner()
     cls.model = cls.learner(data)
     cls.table = data[::20]
Exemple #12
0
 def test_predict_numpy(self):
     bayes = NaiveBayesLearner()
     c = bayes(self.titanic)
     X = self.titanic.X[::20]
     c(X)
     vals, probs = c(X, c.ValueProbs)
Exemple #13
0
 def test_predict_table(self):
     bayes = NaiveBayesLearner()
     c = bayes(self.titanic)
     table = self.titanic[::20]
     c(table)
     vals, probs = c(table, c.ValueProbs)
Exemple #14
0
 def test_predict_single_instance(self):
     bayes = NaiveBayesLearner()
     c = bayes(self.titanic)
     for ins in self.titanic[::20]:
         c(ins)
         val, prob = c(ins, c.ValueProbs)
Exemple #15
0
 def test_auc_orange_model(self):
     data = self.titanic
     model = NaiveBayesLearner()(data)
     res = permutation_feature_importance(model, data, AUC(),
                                          self.n_repeats)
     self.assertAlmostEqual(res[0].mean(), 0.044, 3)
Exemple #16
0
 def test_NaiveBayes(self):
     bayes = NaiveBayesLearner()
     results = Orange.evaluation.CrossValidation(self.titanic[::20], [bayes], k=10)
     ca = Orange.evaluation.CA(results)
     self.assertGreater(ca, 0.7)
     self.assertLess(ca, 0.9)