Exemplo n.º 1
0
def get_stats(results):
    result_dict = {}
    cm = get_confusion_matrix(results)

    result_dict['Accuracy'] = orngStat.CA(results)[0]
    result_dict['Sensitivity'] = orngStat.sens(cm)
    result_dict['Specificity'] = orngStat.spec(cm)

    return result_dict
Exemplo n.º 2
0
def get_stats(results):
    result_dict = {}
    cm = get_confusion_matrix(results)

    result_dict['Accuracy'] = orngStat.CA(results)[0]
    result_dict['Sensitivity'] = orngStat.sens(cm)
    result_dict['Specificity'] = orngStat.spec(cm)

    return result_dict
Exemplo n.º 3
0
def cforange_confusion_matrix_computations(input_dict):
    import orngStat
    cm = input_dict['cm']
    alpha = float(input_dict['alpha'])
    output_dict = {}
    output_dict['sens']=orngStat.sens(cm)
    output_dict['spec']=orngStat.spec(cm)
    output_dict['PPV']=orngStat.PPV(cm)
    output_dict['NPV']=orngStat.NPV(cm)
    output_dict['precision']=orngStat.precision(cm)
    output_dict['recall']=orngStat.recall(cm)
    output_dict['F1']=orngStat.F1(cm)
    output_dict['Falpha']=orngStat.Falpha(cm,alpha=alpha)
    output_dict['MCC']=orngStat.MCC(cm)
    return output_dict  
Exemplo n.º 4
0
# set up the learners
bayes = orange.BayesLearner()
tree = orngTree.TreeLearner(mForPruning=2)
bayes.name = "bayes"
tree.name = "tree"
learners = [bayes, tree]

# compute accuracies on data
data = orange.ExampleTable("voting")
res = orngTest.crossValidation(learners, data, folds=10)
cm = orngStat.computeConfusionMatrices(res, classIndex=data.domain.classVar.values.index("democrat"))

stat = (
    ("CA", lambda res, cm: orngStat.CA(res)),
    ("Sens", lambda res, cm: orngStat.sens(cm)),
    ("Spec", lambda res, cm: orngStat.spec(cm)),
    ("AUC", lambda res, cm: orngStat.AUC(res)),
    ("IS", lambda res, cm: orngStat.IS(res)),
    ("Brier", lambda res, cm: orngStat.BrierScore(res)),
    ("F1", lambda res, cm: orngStat.F1(cm)),
    ("F2", lambda res, cm: orngStat.Falpha(cm, alpha=2.0)),
    ("MCC", lambda res, cm: orngStat.MCC(cm)),
    ("sPi", lambda res, cm: orngStat.scottsPi(cm)),
)

scores = [s[1](res, cm) for s in stat]
print
print "Learner  " + "".join(["%-7s" % s[0] for s in stat])
for (i, l) in enumerate(learners):
    print "%-8s " % l.name + "".join(["%5.3f  " % s[i] for s in scores])
Exemplo n.º 5
0
print "Confusion matrix for naive Bayes for 'opel':"
print "TP: %i, FP: %i, FN: %s, TN: %i" % (cm.TP, cm.FP, cm.FN, cm.TN)

print
cm = orngStat.confusionMatrices(resVeh)[0]
classes = vehicle.domain.classVar.values
print "\t"+"\t".join(classes)
for className, classConfusions in zip(classes, cm):
    print ("%s" + ("\t%i" * len(classes))) % ((className, ) + tuple(classConfusions))

cm = orngStat.confusionMatrices(res)
print
print "Sensitivity and specificity for 'voting'"
print "method\tsens\tspec"
for l in range(len(learners)):
    print "%s\t%5.3f\t%5.3f" % (learners[l].name, orngStat.sens(cm[l]), orngStat.spec(cm[l]))

cm = orngStat.confusionMatrices(resVeh, vehicle.domain.classVar.values.index("van"))
print
print "Sensitivity and specificity for 'vehicle=van'"
print "method\tsens\tspec"
for l in range(len(learners)):
    print "%s\t%5.3f\t%5.3f" % (learners[l].name, orngStat.sens(cm[l]), orngStat.spec(cm[l]))



print
print "AUC (voting)"

AUCs = orngStat.AUC(res)
for l in range(len(learners)):
Exemplo n.º 6
0
# set up the learners
bayes = orange.BayesLearner()
tree = orngTree.TreeLearner(mForPruning=2)
bayes.name = "bayes"
tree.name = "tree"
learners = [bayes, tree]

# compute accuracies on data
data = orange.ExampleTable("voting")
res = orngTest.crossValidation(learners, data, folds=10)
cm = orngStat.computeConfusionMatrices(
    res, classIndex=data.domain.classVar.values.index('democrat'))

stat = (
    ('CA', lambda res, cm: orngStat.CA(res)),
    ('Sens', lambda res, cm: orngStat.sens(cm)),
    ('Spec', lambda res, cm: orngStat.spec(cm)),
    ('AUC', lambda res, cm: orngStat.AUC(res)),
    ('IS', lambda res, cm: orngStat.IS(res)),
    ('Brier', lambda res, cm: orngStat.BrierScore(res)),
    ('F1', lambda res, cm: orngStat.F1(cm)),
    ('F2', lambda res, cm: orngStat.Falpha(cm, alpha=2.0)),
    ('MCC', lambda res, cm: orngStat.MCC(cm)),
    ('sPi', lambda res, cm: orngStat.scottsPi(cm)),
)

scores = [s[1](res, cm) for s in stat]
print
print "Learner  " + "".join(["%-7s" % s[0] for s in stat])
for (i, l) in enumerate(learners):
    print "%-8s " % l.name + "".join(["%5.3f  " % s[i] for s in scores])
Exemplo n.º 7
0
CMs = orngStat.confusionMatrices(result)

learnersCount = len(learners)
first = True
for index in range(learnersCount):
    rowName = ''
    roc = ''
    if first:
        rowName = "\\multirow{%i}{3cm}{%s}" % (learnersCount, name)
        roc = "\\multirow{%i}{*}{\includegraphics[scale=0.3]{%s} }" % (learnersCount, os.path.join('..', directory, ROC_PLOT))
        first = False

    print "%s & %s & %5.3f & %5.3f & %i & %i & %i & %i & %5.3f & %5.3f & %5.3f & %s \\\\" % \
        (rowName, learners[index].name, CAs[index], APs[index], \
             CMs[index].TP, CMs[index].FP, CMs[index].FN, CMs[index].TN, \
             orngStat.sens(CMs[index]), orngStat.PPV(CMs[index]), orngStat.F1(CMs[index]), \
             roc)

minimalRows = 9
if (minimalRows - learnersCount) > 0:
    for _ in range(minimalRows - learnersCount):
        print ' &  &  &  &  &  &  &  &  &  &  &  \\\\'


iterations = orngStat.splitByIterations(result)
curves = []
for iteration in iterations:
    ROCs = orngStat.computeROC(iteration)
    for ROC in ROCs:
        curves.append(ROC)