def test_learner_scorer_previous_transformation(self):
     learner = LogisticRegressionLearner()
     from Orange.preprocess import Discretize
     data = Discretize()(self.iris)
     scores = learner.score_data(data)
     # scores should be defined and positive
     self.assertTrue(np.all(scores > 0))
 def test_learner_scorer(self):
     data = Table('voting')
     learner = LogisticRegressionLearner()
     scores = learner.score_data(data)
     self.assertEqual('physician-fee-freeze',
                      data.domain.attributes[np.argmax(scores)].name)
     self.assertEqual(len(scores), len(data.domain.attributes))
 def test_learner_scorer_multiclass(self):
     attr = self.zoo.domain.attributes
     learner = LogisticRegressionLearner()
     scores = learner.score_data(self.zoo)
     self.assertEqual('aquatic', attr[np.argmax(scores[0])].name)  # amphibian
     self.assertEqual('feathers', attr[np.argmax(scores[1])].name)  # bird
     self.assertEqual('fins', attr[np.argmax(scores[2])].name)  # fish
     self.assertEqual('legs', attr[np.argmax(scores[3])].name)  # insect
     self.assertEqual('backbone', attr[np.argmax(scores[4])].name)  # invertebrate
     self.assertEqual('milk', attr[np.argmax(scores[5])].name)  # mammal
     self.assertEqual('hair', attr[np.argmax(scores[6])].name)  # reptile
     self.assertEqual(scores.shape,
                      (len(self.zoo.domain.class_var.values), len(attr)))
 def test_learner_scorer_multiclass(self):
     attr = self.zoo.domain.attributes
     learner = LogisticRegressionLearner()
     scores = learner.score_data(self.zoo)
     self.assertEqual('aquatic', attr[np.argmax(scores[0])].name)
     self.assertEqual('feathers', attr[np.argmax(scores[1])].name)
     self.assertEqual('fins', attr[np.argmax(scores[2])].name)
     self.assertEqual('backbone', attr[np.argmax(scores[3])].name)
     self.assertEqual('backbone', attr[np.argmax(scores[4])].name)
     self.assertEqual('milk', attr[np.argmax(scores[5])].name)
     self.assertEqual('hair', attr[np.argmax(scores[6])].name)
     self.assertEqual(scores.shape,
                      (len(self.zoo.domain.class_var.values), len(attr)))
    def test_bad_learner(self):
        """
        Some learners on input might raise error.
        GH-38
        """
        w = self.widget

        self.assertFalse(w.Error.fitting_failed.is_shown())
        learner = LogisticRegressionLearner()
        learner.preprocessors = [Discretize()]
        self.send_signal(w.Inputs.learner, learner)
        self.send_signal(w.Inputs.data, self.iris)
        self.assertTrue(w.Error.fitting_failed.is_shown())
        learner.preprocessors = []
        self.send_signal(w.Inputs.learner, learner)
        self.assertFalse(w.Error.fitting_failed.is_shown())
 def test_learner_scorer(self):
     learner = LogisticRegressionLearner()
     scores = learner.score_data(self.heart_disease)
     self.assertEqual('major vessels colored',
                      self.heart_disease.domain.attributes[np.argmax(scores)].name)
     self.assertEqual(scores.shape, (1, len(self.heart_disease.domain.attributes)))
 def test_LogisticRegressionNormalization_todo(self):
     with self.assertRaises(TypeError):
         lr = LogisticRegressionLearner(normalize=True)
 def test_predict_on_instance(self):
     lr = LogisticRegressionLearner()
     m = lr(self.zoo)
     probs = m(self.zoo[50], m.Probs)
     probs2 = m(self.zoo[50, :], m.Probs)
     np.testing.assert_almost_equal(probs, probs2[0])
 def test_learner_scorer_multiclass_feature(self):
     learner = LogisticRegressionLearner()
     scores = learner.score_data(self.zoo)
     for i, attr in enumerate(self.zoo.domain.attributes):
         score = learner.score_data(self.zoo, attr)
         np.testing.assert_array_almost_equal(score, scores[:, i])
Exemple #10
0
 def test_nomogram_lr_multiclass(self):
     """Check probabilities for logistic regression classifier for various
     values of classes and radio buttons for multiclass data"""
     cls = LogisticRegressionLearner()(self.lenses)
     self._test_helper(cls, [9, 45, 52])
Exemple #11
0
 def test_predict_savgov_same_domain(self):
     data = SavitzkyGolayFiltering(window=9, polyorder=2,
                                   deriv=2)(self.collagen)
     train, test = separate_learn_test(data)
     auc = AUC(TestOnTestData()(train, test, [LogisticRegressionLearner()]))
     self.assertGreater(auc, 0.85)
Exemple #12
0
 def test_coefficients(self):
     data = Table("voting")
     learn = LogisticRegressionLearner()
     model = learn(data)
     coef = model.coefficients
     self.assertEqual(len(coef[0]), len(model.domain.attributes))
Exemple #13
0
 def test_probability(self):
     table = Table('iris')
     learn = LogisticRegressionLearner(penalty='l1')
     clf = learn(table[:100])
     p = clf(table[100:], ret=Model.Probs)
     self.assertTrue(all(abs(p.sum(axis=1) - 1) < 1e-6))
""" Documentation script """
from Orange.classification import LogisticRegressionLearner
from Orange.evaluation.testing import CrossValidation
from Orange.evaluation.scoring import AUC

from orangecontrib.bioinformatics.geo.dataset import GDS

gds = GDS("GDS2960")
data = gds.get_data(sample_type="disease state", transpose=True, report_genes=True)
print("Samples: %d, Genes: %d" % (len(data), len(data.domain.attributes)))

learners = [LogisticRegressionLearner()]
results = CrossValidation(data, learners, k=10)

print("AUC = %.3f" % AUC(results)[0])
Exemple #15
0
 def test_logreg(self):
     self.assertTrue(LogisticRegressionLearner().supports_weights,
                     "Either LogisticRegression no longer supports weighted tables"
                     "or SklLearner.supports_weights is out-of-date.")
 def test_multinomial(self):
     table = Table("titanic")
     lr = LogisticRegressionLearner()
     assert isinstance(lr, Orange.classification.SklLearner)
     res = CrossValidation(table, [lr], k=2)
     self.assertTrue(0.7 < Orange.evaluation.AUC(res)[0] < 0.9)
 def test_LogisticRegression(self):
     table = Orange.data.Table('iris')
     learn = LogisticRegressionLearner()
     results = Orange.evaluation.CrossValidation(table, [learn], k=2)
     ca = Orange.evaluation.CA(results)
     self.assertTrue(0.9 < ca < 1.0)
from Orange.classification import LogisticRegressionLearner
from Orange.data import ContinuousVariable
from Orange.evaluation.testing import TestOnTestData
from Orange.evaluation.scoring import AUC

from orangecontrib.spectroscopy.tests.test_preprocess import \
    PREPROCESSORS_INDEPENDENT_SAMPLES, \
    PREPROCESSORS

from orangecontrib.spectroscopy.tests.test_preprocess import SMALL_COLLAGEN, preprocessor_data

from orangecontrib.spectroscopy.preprocess import Interpolate, \
    Cut, SavitzkyGolayFiltering
from orangecontrib.spectroscopy.data import getx

logreg = LogisticRegressionLearner(max_iter=1000)


def separate_learn_test(data):
    sf = ms.ShuffleSplit(n_splits=1,
                         test_size=0.2,
                         random_state=np.random.RandomState(0))
    (traini, testi), = sf.split(y=data.Y, X=data.X)
    return data[traini], data[testi]


def slightly_change_wavenumbers(data, change):
    natts = [
        ContinuousVariable(float(a.name) + change)
        for a in data.domain.attributes
    ]
 def __init__(self, learners, aggregate=LogisticRegressionLearner(), k=5):
     super().__init__(learners=learners, aggregate=aggregate, k=k)
Exemple #20
0
 def test_LogisticRegression(self):
     table = Table('iris')
     learn = LogisticRegressionLearner()
     results = CrossValidation(table, [learn], k=2)
     ca = CA(results)
     self.assertTrue(0.8 < ca < 1.0)
Exemple #21
0
 def test_predict_same_domain(self):
     train, test = separate_learn_test(self.collagen)
     auc = AUC(TestOnTestData()(train, test, [LogisticRegressionLearner()]))
     self.assertGreater(auc, 0.9)  # easy dataset
 def test_LogisticRegression(self):
     learn = LogisticRegressionLearner()
     results = CrossValidation(self.voting, [learn], k=2)
     ca = CA(results)
     self.assertGreater(ca, 0.8)
     self.assertLess(ca, 1.0)
Exemple #23
0
 def setUp(self):
     self.widget = self.create_widget(OWPythonScript)
     self.iris = Table("iris")
     self.learner = LogisticRegressionLearner()
     self.model = self.learner(self.iris)
 def test_learner_scorer(self):
     learner = LogisticRegressionLearner()
     scores = learner.score_data(self.voting)
     self.assertEqual('physician-fee-freeze',
                      self.voting.domain.attributes[np.argmax(scores)].name)
     self.assertEqual(scores.shape, (1, len(self.voting.domain.attributes)))
 def test_coefficients(self):
     learn = LogisticRegressionLearner()
     model = learn(self.heart_disease)
     coef = model.coefficients
     self.assertEqual(len(coef[0]), len(model.domain.attributes))
Exemple #26
0
from Orange.classification import LogisticRegressionLearner, NaiveBayesLearner, \
                                  RandomForestLearner
import orangecontrib.evcrules.rules as rules
from orangecontrib.evcrules.logistic import LRRulesLearner

datasets = [
    'ionosphere', 'adult_sample', 'iris', 'breast-cancer', 'bupa', 'titanic'
]
for d in datasets:
    data = Table(d)
    rule_learner = rules.RulesStar(evc=True,
                                   add_sub_rules=True,
                                   parent_alpha=0.5)
    rule_learner_m = rules.RulesStar(evc=False,
                                     m=22,
                                     add_sub_rules=True,
                                     parent_alpha=0.5)
    # compare lr with rules, lr without rules and sklearn's lr
    learners = [
        LRRulesLearner(opt_penalty=True, rule_learner=rule_learner),
        LRRulesLearner(opt_penalty=True, rule_learner=rule_learner_m),
        LRRulesLearner(opt_penalty=True),
        LogisticRegressionLearner(C=1),
        NaiveBayesLearner(),
        RandomForestLearner()
    ]
    res = CrossValidation(data, learners, k=5)
    print("Dataset: ", d)
    for l, ca, auc, ll in zip(learners, CA(res), AUC(res), LogLoss(res)):
        print("learner: {}\nCA: {}\nAUC: {}\n LL: {}".format(l, ca, auc, ll))
 def test_single_class(self):
     t = self.iris[60:90]
     self.assertEqual(len(np.unique(t.Y)), 1)
     learn = LogisticRegressionLearner()
     with self.assertRaises(ValueError):
         learn(t)
 def test_multiclass_auc_multi_learners(self):
     learners = [LogisticRegressionLearner(), MajorityLearner()]
     res = CrossValidation(self.iris, learners, k=10)
     self.assertGreater(AUC(res)[0], 0.6)
     self.assertLess(AUC(res)[1], 0.6)
     self.assertGreater(AUC(res)[1], 0.4)
 def test_probability(self):
     learn = LogisticRegressionLearner(penalty='l1')
     clf = learn(self.iris[:100])
     p = clf(self.iris[100:], ret=Model.Probs)
     self.assertLess(abs(p.sum(axis=1) - 1).all(), 1e-6)
Exemple #30
0
 def test_precision(self):
     learner = LogisticRegressionLearner(preprocessors=[])
     results = TestOnTrainingData(self.data, [learner])
     self.assertAlmostEqual(Precision(results)[0], 0.962, 3)
Exemple #31
0
 def test_predict_samename_domain(self):
     train, test = separate_learn_test(self.collagen)
     test = destroy_atts_conversion(test)
     aucdestroyed = AUC(
         TestOnTestData(train, test, [LogisticRegressionLearner()]))
     self.assertTrue(0.45 < aucdestroyed < 0.55)
 def test_learner_scorer_multiclass_feature(self):
     learner = LogisticRegressionLearner()
     scores = learner.score_data(self.zoo)
     for i, attr in enumerate(self.zoo.domain.attributes):
         score = learner.score_data(self.zoo, attr)
         np.testing.assert_array_almost_equal(score, scores[:, i])
 def test_learner_scorer(self):
     learner = LogisticRegressionLearner()
     scores = learner.score_data(self.voting)
     self.assertEqual('physician-fee-freeze',
                      self.voting.domain.attributes[np.argmax(scores)].name)
     self.assertEqual(scores.shape, (1, len(self.voting.domain.attributes)))
Exemple #34
0
 def test_logreg(self):
     self.assertFalse(
         LogisticRegressionLearner().supports_weights,
         "Logistic regression has its supports_weights overridden because "
         "liblinear doesn't support them (even though the parameter exists)"
     )
Exemple #35
0
 def test_predict_different_domain(self):
     train, test = separate_learn_test(self.collagen)
     test = Interpolate(points=getx(test) - 1)(test)  # other test domain
     aucdestroyed = AUC(
         TestOnTestData(train, test, [LogisticRegressionLearner()]))
     self.assertTrue(0.45 < aucdestroyed < 0.55)
Exemple #36
0
 def __init__(self,
              learners,
              aggregate=LogisticRegressionLearner(),
              k=5,
              preprocessors=None):
     super().__init__(learners, aggregate, k=k, preprocessors=preprocessors)
 def test_unknown(self):
     table = Table("iris")
     tree = LogisticRegressionLearner()(table)
     tree([1, 2, None])
                [(data_attributes[i].name, dat.x[i]) for i in positive_idx],
                [(data_attributes[i].name, dat.x[i]) for i in negative_ids],
            )
        )
        pos_segments = _compute_segments(sv[positive_idx], pred[target_class])
        neg_segments = _compute_segments(sv[negative_ids], pred[target_class])
        segments.append((pos_segments, neg_segments))
        ranges.append(
            (
                pos_segments[-1][1]
                if len(pos_segments)
                else pred[target_class],
                neg_segments[-1][1]
                if len(neg_segments)
                else pred[target_class],
            )
        )
    return selected_shap_values, segments, selected_labels, ranges


if __name__ == "__main__":
    from Orange.classification import LogisticRegressionLearner

    data_ = Table.from_file("heart_disease.tab")
    learner = LogisticRegressionLearner()
    model_ = learner(data_)

    shap_val, transformed_domain, mask, colors_ = get_shap_values_and_colors(
        model_, data_
    )