Ejemplo n.º 1
0
def rocCurve(results,
             titleText,
             classIndex=0,
             stepSize=0.01,
             marker='x',
             plotArgs=dict()):
    thresholds, confusionMatrices = thresholdConfusionMatrices(
        results, classIndex, stepSize)
    recalls = [orngStat.recall(cm) for cm in confusionMatrices]
    specificities = [1 - orngStat.spec(cm) for cm in confusionMatrices]

    #fig = figure()

    xlabel("FP", fontsize=25)
    ylabel("TP", fontsize=25)
    #plot([0, 1], [0, 1])
    line = plot(specificities, recalls, marker=marker, **plotArgs)

    #scatter(specificities, recalls, picker=True, marker=marker) # to get picking.
    def onclick(event):
        #point = event.artist.get_data()
        indexes = event.ind
        str = "\n".join(["%.3f" % thresholds[idx] for idx in indexes])
        print str
        print


#    fig.canvas.mpl_connect('pick_event', onclick)

    xlim(0, 1.1)
    ylim(0, 1.1)
    title(titleText)
    return line
Ejemplo n.º 2
0
def get_stats(results):
    result_dict = {}
    cm = get_confusion_matrix(results)

    result_dict['Accuracy'] = orngStat.CA(results)[0]
    result_dict['Sensitivity'] = orngStat.sens(cm)
    result_dict['Specificity'] = orngStat.spec(cm)

    return result_dict
Ejemplo n.º 3
0
def get_stats(results):
    result_dict = {}
    cm = get_confusion_matrix(results)

    result_dict['Accuracy'] = orngStat.CA(results)[0]
    result_dict['Sensitivity'] = orngStat.sens(cm)
    result_dict['Specificity'] = orngStat.spec(cm)

    return result_dict
Ejemplo n.º 4
0
def cforange_confusion_matrix_computations(input_dict):
    import orngStat
    cm = input_dict['cm']
    alpha = float(input_dict['alpha'])
    output_dict = {}
    output_dict['sens']=orngStat.sens(cm)
    output_dict['spec']=orngStat.spec(cm)
    output_dict['PPV']=orngStat.PPV(cm)
    output_dict['NPV']=orngStat.NPV(cm)
    output_dict['precision']=orngStat.precision(cm)
    output_dict['recall']=orngStat.recall(cm)
    output_dict['F1']=orngStat.F1(cm)
    output_dict['Falpha']=orngStat.Falpha(cm,alpha=alpha)
    output_dict['MCC']=orngStat.MCC(cm)
    return output_dict  
Ejemplo n.º 5
0
# set up the learners
bayes = orange.BayesLearner()
tree = orngTree.TreeLearner(mForPruning=2)
bayes.name = "bayes"
tree.name = "tree"
learners = [bayes, tree]

# compute accuracies on data
data = orange.ExampleTable("voting")
res = orngTest.crossValidation(learners, data, folds=10)
cm = orngStat.computeConfusionMatrices(res, classIndex=data.domain.classVar.values.index("democrat"))

stat = (
    ("CA", lambda res, cm: orngStat.CA(res)),
    ("Sens", lambda res, cm: orngStat.sens(cm)),
    ("Spec", lambda res, cm: orngStat.spec(cm)),
    ("AUC", lambda res, cm: orngStat.AUC(res)),
    ("IS", lambda res, cm: orngStat.IS(res)),
    ("Brier", lambda res, cm: orngStat.BrierScore(res)),
    ("F1", lambda res, cm: orngStat.F1(cm)),
    ("F2", lambda res, cm: orngStat.Falpha(cm, alpha=2.0)),
    ("MCC", lambda res, cm: orngStat.MCC(cm)),
    ("sPi", lambda res, cm: orngStat.scottsPi(cm)),
)

scores = [s[1](res, cm) for s in stat]
print
print "Learner  " + "".join(["%-7s" % s[0] for s in stat])
for (i, l) in enumerate(learners):
    print "%-8s " % l.name + "".join(["%5.3f  " % s[i] for s in scores])
Ejemplo n.º 6
0
print "Confusion matrix for naive Bayes for 'opel':"
print "TP: %i, FP: %i, FN: %s, TN: %i" % (cm.TP, cm.FP, cm.FN, cm.TN)

print
cm = orngStat.confusionMatrices(resVeh)[0]
classes = vehicle.domain.classVar.values
print "\t"+"\t".join(classes)
for className, classConfusions in zip(classes, cm):
    print ("%s" + ("\t%i" * len(classes))) % ((className, ) + tuple(classConfusions))

cm = orngStat.confusionMatrices(res)
print
print "Sensitivity and specificity for 'voting'"
print "method\tsens\tspec"
for l in range(len(learners)):
    print "%s\t%5.3f\t%5.3f" % (learners[l].name, orngStat.sens(cm[l]), orngStat.spec(cm[l]))

cm = orngStat.confusionMatrices(resVeh, vehicle.domain.classVar.values.index("van"))
print
print "Sensitivity and specificity for 'vehicle=van'"
print "method\tsens\tspec"
for l in range(len(learners)):
    print "%s\t%5.3f\t%5.3f" % (learners[l].name, orngStat.sens(cm[l]), orngStat.spec(cm[l]))



print
print "AUC (voting)"

AUCs = orngStat.AUC(res)
for l in range(len(learners)):
Ejemplo n.º 7
0
bayes = orange.BayesLearner()
tree = orngTree.TreeLearner(mForPruning=2)
bayes.name = "bayes"
tree.name = "tree"
learners = [bayes, tree]

# compute accuracies on data
data = orange.ExampleTable("voting")
res = orngTest.crossValidation(learners, data, folds=10)
cm = orngStat.computeConfusionMatrices(
    res, classIndex=data.domain.classVar.values.index('democrat'))

stat = (
    ('CA', lambda res, cm: orngStat.CA(res)),
    ('Sens', lambda res, cm: orngStat.sens(cm)),
    ('Spec', lambda res, cm: orngStat.spec(cm)),
    ('AUC', lambda res, cm: orngStat.AUC(res)),
    ('IS', lambda res, cm: orngStat.IS(res)),
    ('Brier', lambda res, cm: orngStat.BrierScore(res)),
    ('F1', lambda res, cm: orngStat.F1(cm)),
    ('F2', lambda res, cm: orngStat.Falpha(cm, alpha=2.0)),
    ('MCC', lambda res, cm: orngStat.MCC(cm)),
    ('sPi', lambda res, cm: orngStat.scottsPi(cm)),
)

scores = [s[1](res, cm) for s in stat]
print
print "Learner  " + "".join(["%-7s" % s[0] for s in stat])
for (i, l) in enumerate(learners):
    print "%-8s " % l.name + "".join(["%5.3f  " % s[i] for s in scores])