def classifier_qda_modular (fm_train_real=traindat, fm_test_real=testdat, label_train_twoclass=label_traindat, tolerance=1e-4, store_covs=False):
	from shogun.Features import RealFeatures, MulticlassLabels
	from shogun.Classifier import QDA

	feats_train = RealFeatures(fm_train_real)
	feats_test  = RealFeatures(fm_test_real)

	labels = MulticlassLabels(label_train_twoclass)

	qda = QDA(feats_train, labels, tolerance, store_covs)
	qda.train()

	qda.apply(feats_test).get_labels()
	qda.set_features(feats_test)
	return qda, qda.apply().get_labels()
def classifier_qda_modular(fm_train_real=traindat,
                           fm_test_real=testdat,
                           label_train_twoclass=label_traindat,
                           tolerance=1e-4,
                           store_covs=False):
    from shogun.Features import RealFeatures, Labels
    from shogun.Classifier import QDA

    feats_train = RealFeatures(fm_train_real)
    feats_test = RealFeatures(fm_test_real)

    labels = Labels(label_train_twoclass)

    qda = QDA(feats_train, labels, tolerance, store_covs)
    qda.train()

    qda.apply(feats_test).get_labels()
    qda.set_features(feats_test)
    return qda, qda.apply().get_labels()
Exemple #3
0
plot(x_pos, y_pos, 'bo');

# negative examples
mean_neg = [0, -3]
cov_neg  = [[100,50], [20, 3]]

x_neg, y_neg = np.random.multivariate_normal(mean_neg, cov_neg, 500).T
plot(x_neg, y_neg, 'ro');

# train qda
labels = MulticlassLabels( np.concatenate([np.zeros(N), np.ones(N)]) )
pos = np.array([x_pos, y_pos])
neg = np.array([x_neg, y_neg])
features = RealFeatures( np.array(np.concatenate([pos, neg], 1)) )

qda = QDA()
qda.set_labels(labels)
qda.train(features)

# compute output plot iso-lines
xs = np.array(np.concatenate([x_pos, x_neg]))
ys = np.array(np.concatenate([y_pos, y_neg]))

x1_max = max(1.2*xs)
x1_min = min(1.2*xs)
x2_max = max(1.2*ys)
x2_min = min(1.2*ys)

x1 = np.linspace(x1_min, x1_max, size)
x2 = np.linspace(x2_min, x2_max, size)
Exemple #4
0

# Number of classes
M = 3
# Number of samples of each class
N = 300
# Dimension of the data
dim = 2

cols = ['blue', 'green', 'red']

fig = pylab.figure()
ax = fig.add_subplot(111)
pylab.title('Quadratic Discrimant Analysis')

X, y = gen_data()

labels = MulticlassLabels(y)
features = RealFeatures(X.T)
qda = QDA(features, labels, 1e-4, True)
qda.train()
ypred = qda.apply().get_labels()

plot_data(qda, X, y, ypred, ax)
for i in range(M):
    plot_cov(ax, qda.get_mean(i), qda.get_cov(i), cols[i])
plot_regions(qda)

pylab.connect('key_press_event', util.quit)
pylab.show()
Exemple #5
0
plot(x_pos, y_pos, 'bo')

# negative examples
mean_neg = [0, -3]
cov_neg = [[100, 50], [20, 3]]

x_neg, y_neg = np.random.multivariate_normal(mean_neg, cov_neg, 500).T
plot(x_neg, y_neg, 'ro')

# train qda
labels = MulticlassLabels(np.concatenate([np.zeros(N), np.ones(N)]))
pos = np.array([x_pos, y_pos])
neg = np.array([x_neg, y_neg])
features = RealFeatures(np.array(np.concatenate([pos, neg], 1)))

qda = QDA()
qda.set_labels(labels)
qda.train(features)

# compute output plot iso-lines
xs = np.array(np.concatenate([x_pos, x_neg]))
ys = np.array(np.concatenate([y_pos, y_neg]))

x1_max = max(1.2 * xs)
x1_min = min(1.2 * xs)
x2_max = max(1.2 * ys)
x2_min = min(1.2 * ys)

x1 = np.linspace(x1_min, x1_max, size)
x2 = np.linspace(x2_min, x2_max, size)
Exemple #6
0
	pylab.contour(xx, yy, Z, linewidths = 3, colors = 'k')

# Number of classes
M = 3
# Number of samples of each class
N = 300
# Dimension of the data
dim = 2

cols = ['blue', 'green', 'red']

fig = pylab.figure()
ax  = fig.add_subplot(111)
pylab.title('Quadratic Discrimant Analysis')

X, y = gen_data()

labels = MulticlassLabels(y)
features = RealFeatures(X.T)
qda = QDA(features, labels, 1e-4, True)
qda.train()
ypred = qda.apply().get_labels()

plot_data(qda, X, y, ypred, ax)
for i in range(M):
	plot_cov(ax, qda.get_mean(i), qda.get_cov(i), cols[i])
plot_regions(qda)

pylab.connect('key_press_event', util.quit)
pylab.show()