def displayResults(results): for accuracy, cm in zip(orngStat.CA(results), orngStat.confusionMatrices(results, classIndex=0)): print "accuracy", accuracy print " TP: %i, FP: %i, FN: %s, TN: %i" % (cm.TP, cm.FP, cm.FN, cm.TN) print "precision", orngStat.precision(cm) print "recall", orngStat.recall(cm) print "f1", fScore(cm) print
def cforange_confusion_matrix_computations(input_dict): import orngStat cm = input_dict['cm'] alpha = float(input_dict['alpha']) output_dict = {} output_dict['sens']=orngStat.sens(cm) output_dict['spec']=orngStat.spec(cm) output_dict['PPV']=orngStat.PPV(cm) output_dict['NPV']=orngStat.NPV(cm) output_dict['precision']=orngStat.precision(cm) output_dict['recall']=orngStat.recall(cm) output_dict['F1']=orngStat.F1(cm) output_dict['Falpha']=orngStat.Falpha(cm,alpha=alpha) output_dict['MCC']=orngStat.MCC(cm) return output_dict
] new_domain = Orange.data.Domain(attributes, data.domain.class_var) new_data = Orange.data.Table(new_domain, data) classIndex = list(new_data.domain.classVar.values).index("yes") svm = Orange.classification.svm.SVMLearner() res = Orange.evaluation.testing.cross_validation([svm], new_data, folds=10) cm = orngStat.computeConfusionMatrices(res, classIndex=classIndex) print "======" print test[1] print "attributes: ", print ",".join([attr.name for attr in new_data.domain.features]) print "Accuracy: %.1f%%" % (100 * orngStat.CA(res)[0]) print "Precision: %.3f" % orngStat.precision(cm)[0] print "Recall: %.3f" % orngStat.recall(cm)[0] print "F1: %.3f" % orngStat.F1(cm)[0] res.all_results = res.results res.number_of_iterations = 1 CAs = [] Ps = [] Rs = [] F1s = [] for fold in range(10): # select only examples from the fold res.results = [ result for result in res.all_results if result.iteration_number == fold