def print_results(learners, results): # output the results print "Learner CA IS Brier AUC" for i in range(len(learners)): print "%-8s %5.4f %5.3f %5.3f %5.3f" % ( learners[i].name, orngStat.CA(results)[i], orngStat.IS(results)[i], orngStat.BrierScore(results)[i], orngStat.AUC(results)[i])
def cforange_auc(input_dict): import orngStat results = input_dict['results'] method = int(input_dict['method']) auc = orngStat.AUC(results,method) output_dict = {} output_dict['AUC']=auc return output_dict
# Description: Demostration of use of cross-validation as provided in orngEval module # Category: evaluation # Uses: voting.tab # Classes: orngTest.crossValidation # Referenced: c_performance.htm import orange, orngTest, orngStat, orngTree # set up the learners bayes = orange.BayesLearner() tree = orngTree.TreeLearner(mForPruning=2) bayes.name = "bayes" tree.name = "tree" learners = [bayes, tree] # compute accuracies on data data = orange.ExampleTable("voting") results = orngTest.crossValidation(learners, data, folds=10) # output the results print "Learner CA IS Brier AUC" for i in range(len(learners)): print "%-8s %5.3f %5.3f %5.3f %5.3f" % (learners[i].name, \ orngStat.CA(results)[i], orngStat.IS(results)[i], orngStat.BrierScore(results)[i], orngStat.AUC(results)[i])
import orange import Orange.classify.svm as svm data = orange.ExampleTable("vehicle.tab") svm_easy = svm.SVMLearnerEasy(name="svm easy", folds=3) svm_normal = svm.SVMLearner(name="svm") learners = [svm_easy, svm_normal] import orngStat, orngTest results = orngTest.crossValidation(learners, data, folds=5) print "Name CA AUC" for learner, CA, AUC in zip(learners, orngStat.CA(results), orngStat.AUC(results)): print "%-8s %.2f %.2f" % (learner.name, CA, AUC)
for l in range(len(learners)): print "%s\t%5.3f\t%5.3f" % (learners[l].name, orngStat.sens(cm[l]), orngStat.spec(cm[l])) cm = orngStat.confusionMatrices(resVeh, vehicle.domain.classVar.values.index("van")) print print "Sensitivity and specificity for 'vehicle=van'" print "method\tsens\tspec" for l in range(len(learners)): print "%s\t%5.3f\t%5.3f" % (learners[l].name, orngStat.sens(cm[l]), orngStat.spec(cm[l])) print print "AUC (voting)" AUCs = orngStat.AUC(res) for l in range(len(learners)): print "%10s: %5.3f" % (learners[l].name, AUCs[l]) reload(orngStat) print print "AUC for vehicle using weighted single-out method" print "bayes\ttree\tmajority" AUCs = orngStat.AUC(resVeh, orngStat.AUC.WeightedOneAgainstAll) print "%5.3f\t%5.3f\t%5.3f" % tuple(AUCs) print print "AUC for vehicle, using different methods" methods = ["by pairs, weighted", "by pairs", "one vs. all, weighted", "one vs. all"] print " " *25 + " \tbayes\ttree\tmajority"
import orange, orngTree, orngWrap, orngStat learner = orngTree.TreeLearner() data = orange.ExampleTable("voting") tuner = orngWrap.Tune1Parameter(object=learner, parameter="minSubset", values=[1, 2, 3, 4, 5, 10, 15, 20], evaluate = orngStat.AUC, verbose=2) classifier = tuner(data) print "Optimal setting: ", learner.minSubset import orngTest untuned = orngTree.TreeLearner() res = orngTest.crossValidation([untuned, tuner], data) AUCs = orngStat.AUC(res) print "Untuned tree: %5.3f" % AUCs[0] print "Tuned tree: %5.3f" % AUCs[1] learner = orngTree.TreeLearner(minSubset=10).instance() data = orange.ExampleTable("voting") tuner = orngWrap.Tune1Parameter(object=learner, parameter=["split.continuousSplitConstructor.minSubset", "split.discreteSplitConstructor.minSubset"], values=[1, 2, 3, 4, 5, 10, 15, 20], evaluate = orngStat.AUC, verbose=2) classifier = tuner(data) print "Optimal setting: ", learner.split.continuousSplitConstructor.minSubset
import orange, orngSVM data = orange.ExampleTable("iris.tab") lin = orngSVM.SVMLearner(kernel_type=orngSVM.SVMLearner.Linear, name="SVM - Linear") poly = orngSVM.SVMLearner(kernel_type=orngSVM.SVMLearner.Polynomial, name="SVM - Poly") rbf = orngSVM.SVMLearner(kernel_type=orngSVM.SVMLearner.RBF, name="SVM - RBF") learners = [lin, poly, rbf] import orngTest, orngStat res = orngTest.crossValidation(learners, data) print "%15s%8s%8s" % ("Name", "CA", "AUC") for l, ca, auc in zip(learners, orngStat.CA(res), orngStat.AUC(res)): print "%-15s %.3f %.3f" % (l.name, ca, auc)
bayes = orange.BayesLearner() tree = orngTree.TreeLearner(mForPruning=2) bayes.name = "bayes" tree.name = "tree" learners = [bayes, tree] # compute accuracies on data data = orange.ExampleTable("voting") res = orngTest.crossValidation(learners, data, folds=10) cm = orngStat.computeConfusionMatrices( res, classIndex=data.domain.classVar.values.index('democrat')) stat = ( ('CA', lambda res, cm: orngStat.CA(res)), ('Sens', lambda res, cm: orngStat.sens(cm)), ('Spec', lambda res, cm: orngStat.spec(cm)), ('AUC', lambda res, cm: orngStat.AUC(res)), ('IS', lambda res, cm: orngStat.IS(res)), ('Brier', lambda res, cm: orngStat.BrierScore(res)), ('F1', lambda res, cm: orngStat.F1(cm)), ('F2', lambda res, cm: orngStat.Falpha(cm, alpha=2.0)), ('MCC', lambda res, cm: orngStat.MCC(cm)), ('sPi', lambda res, cm: orngStat.scottsPi(cm)), ) scores = [s[1](res, cm) for s in stat] print print "Learner " + "".join(["%-7s" % s[0] for s in stat]) for (i, l) in enumerate(learners): print "%-8s " % l.name + "".join(["%5.3f " % s[i] for s in scores])
# Description: Demonstrates the use of random forests from orngEnsemble module # Category: classification, ensembles # Classes: RandomForestLearner # Uses: bupa.tab # Referenced: orngEnsemble.htm import orange, orngTree, orngEnsemble data = orange.ExampleTable('bupa.tab') tree = orngTree.TreeLearner(minExamples=2, mForPrunning=2, \ sameMajorityPruning=True, name='tree') forest = orngEnsemble.RandomForestLearner(trees=50, name="forest") learners = [tree, forest] import orngTest, orngStat results = orngTest.crossValidation(learners, data, folds=3) print "Learner CA Brier AUC" for i in range(len(learners)): print "%-8s %5.3f %5.3f %5.3f" % (learners[i].name, \ orngStat.CA(results)[i], orngStat.BrierScore(results)[i], orngStat.AUC(results)[i])