Пример #1
0
 def test_error_measure(self):
     ber = ConfusionMatrix.error_measure('ber', 3)
     i = np.atleast_2d(np.array([0, 0, 1, 2, 0, 0, 2, 2, 1, 2])).T
     t = np.atleast_2d(np.array([0, 0, 0, 1, 1, 0, 2, 2, 1, 1])).T
     self.assertAlmostEqual(ber(i, t), 1.0 / 3)
Пример #2
0
confusion_matrix = ConfusionMatrix.from_data(61, ytestmean , ymean) # 61 classes
print "Error rate: %.4f" % confusion_matrix.error_rate # this comes down to 0-1 loss
print "Balanced error rate: %.4f" % confusion_matrix.ber
print

# compute precision and recall for each class vs. all others
print "Per-class precision and recall"
binary_confusion_matrices = confusion_matrix.binary()
for c in range(61):
    m = binary_confusion_matrices[c]
    print "label %d - precision: %.2f, recall %.2f" % (c, m.precision, m.recall)
print

# properties of the ConfusionMatrix and BinaryConfusionMatrix classes can also be used
# as error measure functions, as follows:
ber = ConfusionMatrix.error_measure('ber', 61) # 61-class balanced error rate
print "Balanced error rate: %.4f" % ber(ytestmean, ymean)

# Plotting
import pylab
import matplotlib.pyplot as plt
from matplotlib import cm

# plot confusion matrix (balanced, each class is equally weighted)
pylab.figure()
plot_conf(confusion_matrix.balance())

# nx = 2
# ny = 1

# plt.subplot(nx, ny, 2)
Пример #3
0
    print "Error : " + str(mdp.numx.mean([loss_01_time(sample, target) for (sample, target) in zip(ytest, outputs[n_train_samples:])]))

    ymean = sp.array([sp.argmax(mdp.numx.atleast_2d(mdp.numx.mean(sample, axis=0))) for sample in
                      outputs[n_train_samples:]])
    ytestmean = sp.array([sp.argmax(mdp.numx.atleast_2d(mdp.numx.mean(sample, axis=0))) for sample in ytest])

    # use ConfusionMatrix to compute some more information about the 
    confusion_matrix = ConfusionMatrix.from_data(10, ytestmean, ymean) # 10 classes
    print "Error rate: %.4f" % confusion_matrix.error_rate # this comes down to 0-1 loss
    print "Balanced error rate: %.4f" % confusion_matrix.ber
    print

    # compute precision and recall for each class vs. all others
    print "Per-class precision and recall"
    binary_confusion_matrices = confusion_matrix.binary()
    for c in range(10):
        m = binary_confusion_matrices[c]
        print "label %d - precision: %.2f, recall %.2f" % (c, m.precision, m.recall)
    print

    # properties of the ConfusionMatrix and BinaryConfusionMatrix classes can also be used
    # as error measure functions, as follows:
    ber = ConfusionMatrix.error_measure('ber', 10) # 10-class balanced error rate
    print "Balanced error rate: %.4f" % ber(ytestmean, ymean)

    # plot confusion matrix (balanced, each class is equally weighted)
    pylab.figure()
    plot_conf(confusion_matrix.balance())

Пример #4
0
    ytestmean = sp.atleast_2d(
        sp.array([
            sp.argmax(mdp.numx.atleast_2d(mdp.numx.mean(sample, axis=0)))
            for sample in ytest
        ])).T

    # use ConfusionMatrix to compute some more information about the
    confusion_matrix = ConfusionMatrix.from_data(10, ytestmean,
                                                 ymean)  # 10 classes
    print "Error rate: %.4f" % confusion_matrix.error_rate  # this comes down to 0-1 loss
    print "Balanced error rate: %.4f" % confusion_matrix.ber
    print

    # compute precision and recall for each class vs. all others
    print "Per-class precision and recall"
    binary_confusion_matrices = confusion_matrix.binary()
    for c in range(10):
        m = binary_confusion_matrices[c]
        print "label %d - precision: %.2f, recall %.2f" % (c, m.precision,
                                                           m.recall)
    print

    # properties of the ConfusionMatrix and BinaryConfusionMatrix classes can also be used
    # as error measure functions, as follows:
    ber = ConfusionMatrix.error_measure('ber',
                                        10)  # 10-class balanced error rate
    print "Balanced error rate: %.4f" % ber(ytestmean, ymean)

    # plot confusion matrix (balanced, each class is equally weighted)
    plot_conf(confusion_matrix.balance())
Пример #5
0
print "Error rate: %.4f" % confusion_matrix.error_rate  # this comes down to 0-1 loss
print "Balanced error rate: %.4f" % confusion_matrix.ber
print

# compute precision and recall for each class vs. all others
print "Per-class precision and recall"
binary_confusion_matrices = confusion_matrix.binary()
for c in range(61):
    m = binary_confusion_matrices[c]
    print "label %d - precision: %.2f, recall %.2f" % (c, m.precision,
                                                       m.recall)
print

# properties of the ConfusionMatrix and BinaryConfusionMatrix classes can also be used
# as error measure functions, as follows:
ber = ConfusionMatrix.error_measure('ber', 61)  # 61-class balanced error rate
print "Balanced error rate: %.4f" % ber(ytestmean, ymean)

# Plotting
import pylab
import matplotlib.pyplot as plt
from matplotlib import cm

# plot confusion matrix (balanced, each class is equally weighted)
pylab.figure()
plot_conf(confusion_matrix.balance())

# nx = 2
# ny = 1

# plt.subplot(nx, ny, 2)