Exemplo n.º 1
0
def circle_results(file):
    t = PrettyTable(['Description', 'Total', 'True Pos.', 'False Pos.', 'True Neg.', 'False Neg.', 'Precision', 'Sensitivity',
                     'Dice Coeff.', 'Jaccard Ind.', 'Jaccard Dist.'])
    f = open(file, 'r')
    for line in f:
        totals = line.split()
        description = totals[0]
        tp = totals[1]
        fp = totals[2]
        tn = totals[3]
        fn = totals[4]
        total = int(tp) + int(fp) + int(tn) + int(fn)
        precision = evaluation.precision(tp, fp)
        sensitivity = evaluation.sensitivity(tp, fn)
        # fmeasure = evaluation.fmeasure(tp, fp, fn)
        dicecoeff = evaluation.dicecoeff(tp, fp, fn)
        jaccardindex = evaluation.jaccardindex(tp, fp, fn)
        jaccarddifference = 1 - jaccardindex
        t.add_row([description, str(total), str(tp), str(fp), str(tn), str(fn), str(precision), str(sensitivity), str(dicecoeff),
                  str(jaccardindex), str(jaccarddifference)])

    print "Circle Detection\n"
    print t

    now = "\n" + str(datetime.now()) + "\n"
    data = t.get_string()
    r = open('circle_test_results.txt', 'ab')
    r.write(now)
    r.write(data)
    r.write("\n")
    r.close()
Exemplo n.º 2
0
def test_octavo(
    num_points,
    classes,
    xbound,
    ybound,
    zbound,
    max_depth,
    min_node_size,
    min_loss,
    expected,
):
    xy_parent = data_for_tests.make_octavo(
        num_points, classes, xbound, ybound, zbound
    ).values
    X = xy_parent[:, :-1]
    y = xy_parent[:, -1]

    forest = random_forest.grow_random_forest(
        X, y, num_trees=20, max_features=2, min_node_size=1
    )
    predictions = random_forest.forest_predict(forest, X)
    targets = y
    tfpns = evaluation.tfpn(predictions, targets)
    cm = evaluation.make_confusion_matrix(*tfpns, percentage=True)
    result = np.array(
        [evaluation.precision(cm), evaluation.sensitivity(cm), evaluation.fpr(cm)]
    )
    expected = np.array(expected)
    assert np.any(np.abs(expected - result) < 0.01)
Exemplo n.º 3
0
def test_diagonal_ndim(num_points, dim, max_features, expected, precision_bound):
    xy_parent = data_for_tests.make_diagonal_ndim(num_points, dim).values
    X = xy_parent[:, :-1]
    y = xy_parent[:, -1]

    forest = random_forest.grow_random_forest(
        X, y, num_trees=30, max_depth=20, max_features=max_features, min_node_size=1
    )
    predictions = random_forest.forest_predict(forest, X)
    targets = y
    tfpns = evaluation.tfpn(predictions, targets)
    cm = evaluation.make_confusion_matrix(*tfpns, percentage=True)
    result = np.array(
        [evaluation.precision(cm), evaluation.sensitivity(cm), evaluation.fpr(cm)]
    )
    expected = np.array(expected)
    print(precision_bound)
    assert np.any(np.abs(expected - result) < precision_bound)
Exemplo n.º 4
0
    tn = 0
    fn = 0

    f = open(file, 'r')
    for line in f:
        lines += 1
        totals = line.split()
        # totals[0] is image name
        tp += int(totals[1])
        fp += int(totals[2])
        tn += int(totals[3])
        fn += int(totals[4])
    f.close()

    precision = evaluation.precision(tp, fp)
    sensitivity = evaluation.sensitivity(tp, fn)
    fmeasure = evaluation.fmeasure(tp, fp, fn)
    dicecoeff = evaluation.dicecoeff(tp, fp, fn)
    jaccardindex = evaluation.jaccardindex(tp, fp, fn)

    r = open('circle_found_results.txt', 'a')
    r.write('File name: ' + file + '\n')
    r.write('Lines: ' + str(lines) + '\n')
    r.write('True Positives: ' + str(tp) + '\n')
    r.write('False Positives: ' + str(fp) + '\n')
    r.write('True Negatives: ' + str(tn) + '\n')
    r.write('False Negatives: ' + str(fn) + '\n')
    r.write('Precision: ' + str(precision) + '\n')
    r.write('Sensitivity: ' + str(sensitivity) + '\n')
    r.write('F-Measure: ' + str(fmeasure) + '\n')
    r.write('Dice Coefficent: ' + str(dicecoeff) + '\n')
Exemplo n.º 5
0
    fp = 0
    tn = 0
    fn = 0

    f = open(file, 'r')
    for line in f:
        lines += 1
        totals = line.split()
        tp += int(totals[0])
        fp += int(totals[1])
        tn += int(totals[2])
        fn += int(totals[3])
    f.close()

    precision = evaluation.precision(tp, fp)
    sensitivity = evaluation.sensitivity(tp, fn)
    fmeasure = evaluation.fmeasure(tp, fp, fn)
    dicecoeff = evaluation.dicecoeff(tp, fp, fn)
    jaccardindex = evaluation.jaccardindex(tp, fp, fn)

    r = open('circle_found_results.txt', 'a')
    r.write('File name: ' + file + '\n')
    r.write('Lines: ' + str(lines) + '\n')
    r.write('True Positives: ' + str(tp) + '\n')
    r.write('False Positives: ' + str(fp) + '\n')
    r.write('True Negatives: ' + str(tn) + '\n')
    r.write('False Negatives: ' + str(fn) + '\n')
    r.write('Precision: ' + str(precision) + '\n')
    r.write('Sensitivity: ' + str(sensitivity) + '\n')
    r.write('F-Measure: ' + str(fmeasure) + '\n')
    r.write('Dice Coefficent: ' + str(dicecoeff) + '\n')
Exemplo n.º 6
0
forest = random_forest.grow_random_forest(
    X, y, num_trees=30, max_depth=20, max_features=max_features, min_node_size=1
)

# make predictions
predictions = random_forest.forest_predict(forest, X)

# calculate the numbers of true positives, false positives, true negatives, false negatives
tfpns = evaluation.tfpn(predictions, y)

# calculate the confusion matrix
cm = evaluation.make_confusion_matrix(*tfpns, percentage=True)

# calculate metrics: precision, sensitivity, false-positive-rate
metrics = np.array(
    [evaluation.precision(cm), evaluation.sensitivity(cm), evaluation.fpr(cm)]
)

print(
f"{num_points} points are randomly generated in the unit cube in {dim}-dimensions.\n \
Those with the sum of coordinates >= {dim}/2 are labeled 1, \n those below are \
labeled 0."
)
print("The model achieves the following in sample metrics:")
print("precision:", metrics[0])
print("sensitivity:", metrics[1])
print("false-positive-rate:", metrics[2])
print('If the metrics are not 1,1,0, then there is a problem.')
# if (metrics[0] == 1) & (metrics[1] == 1) & (metrics[2] == 0):
# 	print(0)
# else: