Exemplo n.º 1
0
def classifier_qda_modular (fm_train_real=traindat, fm_test_real=testdat, label_train_twoclass=label_traindat, tolerance=1e-4, store_covs=False):
	from shogun.Features import RealFeatures, MulticlassLabels
	from shogun.Classifier import QDA

	feats_train = RealFeatures(fm_train_real)
	feats_test  = RealFeatures(fm_test_real)

	labels = MulticlassLabels(label_train_twoclass)

	qda = QDA(feats_train, labels, tolerance, store_covs)
	qda.train()

	qda.apply(feats_test).get_labels()
	qda.set_features(feats_test)
	return qda, qda.apply().get_labels()
Exemplo n.º 2
0
def classifier_qda_modular(fm_train_real=traindat,
                           fm_test_real=testdat,
                           label_train_twoclass=label_traindat,
                           tolerance=1e-4,
                           store_covs=False):
    from shogun.Features import RealFeatures, Labels
    from shogun.Classifier import QDA

    feats_train = RealFeatures(fm_train_real)
    feats_test = RealFeatures(fm_test_real)

    labels = Labels(label_train_twoclass)

    qda = QDA(feats_train, labels, tolerance, store_covs)
    qda.train()

    qda.apply(feats_test).get_labels()
    qda.set_features(feats_test)
    return qda, qda.apply().get_labels()
Exemplo n.º 3
0
qda.set_labels(labels)
qda.train(features)

# compute output plot iso-lines
xs = np.array(np.concatenate([x_pos, x_neg]))
ys = np.array(np.concatenate([y_pos, y_neg]))

x1_max = max(1.2*xs)
x1_min = min(1.2*xs)
x2_max = max(1.2*ys)
x2_min = min(1.2*ys)

x1 = np.linspace(x1_min, x1_max, size)
x2 = np.linspace(x2_min, x2_max, size)

x, y = np.meshgrid(x1, x2)

dense = RealFeatures( np.array((np.ravel(x), np.ravel(y))) )
dense_labels = qda.apply(dense).get_labels()

z = dense_labels.reshape((size, size))

pcolor(x, y, z, shading = 'interp')
contour(x, y, z, linewidths = 1, colors = 'black', hold = True)

axis([x1_min, x1_max, x2_min, x2_max])

connect('key_press_event', util.quit)

show()
Exemplo n.º 4
0

# Number of classes
M = 3
# Number of samples of each class
N = 300
# Dimension of the data
dim = 2

cols = ['blue', 'green', 'red']

fig = pylab.figure()
ax = fig.add_subplot(111)
pylab.title('Quadratic Discrimant Analysis')

X, y = gen_data()

labels = MulticlassLabels(y)
features = RealFeatures(X.T)
qda = QDA(features, labels, 1e-4, True)
qda.train()
ypred = qda.apply().get_labels()

plot_data(qda, X, y, ypred, ax)
for i in range(M):
    plot_cov(ax, qda.get_mean(i), qda.get_cov(i), cols[i])
plot_regions(qda)

pylab.connect('key_press_event', util.quit)
pylab.show()
Exemplo n.º 5
0
qda.set_labels(labels)
qda.train(features)

# compute output plot iso-lines
xs = np.array(np.concatenate([x_pos, x_neg]))
ys = np.array(np.concatenate([y_pos, y_neg]))

x1_max = max(1.2 * xs)
x1_min = min(1.2 * xs)
x2_max = max(1.2 * ys)
x2_min = min(1.2 * ys)

x1 = np.linspace(x1_min, x1_max, size)
x2 = np.linspace(x2_min, x2_max, size)

x, y = np.meshgrid(x1, x2)

dense = RealFeatures(np.array((np.ravel(x), np.ravel(y))))
dense_labels = qda.apply(dense).get_labels()

z = dense_labels.reshape((size, size))

pcolor(x, y, z, shading='interp')
contour(x, y, z, linewidths=1, colors='black', hold=True)

axis([x1_min, x1_max, x2_min, x2_max])

connect('key_press_event', util.quit)

show()
Exemplo n.º 6
0
	pylab.contour(xx, yy, Z, linewidths = 3, colors = 'k')

# Number of classes
M = 3
# Number of samples of each class
N = 300
# Dimension of the data
dim = 2

cols = ['blue', 'green', 'red']

fig = pylab.figure()
ax  = fig.add_subplot(111)
pylab.title('Quadratic Discrimant Analysis')

X, y = gen_data()

labels = MulticlassLabels(y)
features = RealFeatures(X.T)
qda = QDA(features, labels, 1e-4, True)
qda.train()
ypred = qda.apply().get_labels()

plot_data(qda, X, y, ypred, ax)
for i in range(M):
	plot_cov(ax, qda.get_mean(i), qda.get_cov(i), cols[i])
plot_regions(qda)

pylab.connect('key_press_event', util.quit)
pylab.show()