def runLFDA(X_train, X_test, y_train, y_test):
    transformer = LFDA()
    transformer.fit(X_train, y_train)
    X_train_proj = transformer.transform(X_train)
    X_test_proj = transformer.transform(X_test)
    np.save('X_train_LFDA', X_train_proj)
    np.save('X_test_LFDA', X_test_proj)
    return X_train_proj, X_test_proj
def fisher_discriminant(X, Y):

	model = LFDA()
	model.fit(X, Y)


	return model.transform(X), model.metric()
Esempio n. 3
0
class LFDA:
    def __init__(self):
        """Initializes the LFDA model"""
        self.metric_model = LFDA_ml()
        self.X_tr = None
        self.y_train = None
        self.X_te = None

    def fit(self, X_tr, y_train):
        """Fits the model to the prescribed data."""
        self.X_tr = X_tr
        self.y_train = y_train
        return self.metric_model.fit(X_tr, y_train)

    def transform(self, X):
        """Transforms the test data according to the model"""
        return self.metric_model.transform(X)

    def predict_proba(self, X_te):
        """Predicts the probabilities of each of the test samples"""
        test_samples = X_te.shape[0]
        self.X_tr = self.transform(self.X_tr)
        clf = NearestCentroid()
        clf.fit(self.X_tr, self.y_train)
        centroids = clf.centroids_
        probabilities = np.zeros((test_samples, centroids.shape[0]))
        for sample in xrange(test_samples):
            probabilities[sample] = sk_nearest_neighbour_proba(
                centroids, X_te[sample, :])
        return probabilities
Esempio n. 4
0
    def test_iris(self):
        lfda = LFDA(k=2, num_dims=2)
        lfda.fit(self.iris_points, self.iris_labels)
        csep = class_separation(lfda.transform(), self.iris_labels)
        self.assertLess(csep, 0.15)

        # Sanity checks for learned matrices.
        self.assertEqual(lfda.metric().shape, (4, 4))
        self.assertEqual(lfda.transformer().shape, (2, 4))
Esempio n. 5
0
  def test_iris(self):
    lfda = LFDA(k=2, n_components=2)
    lfda.fit(self.iris_points, self.iris_labels)
    csep = class_separation(lfda.transform(self.iris_points), self.iris_labels)
    self.assertLess(csep, 0.15)

    # Sanity checks for learned matrices.
    self.assertEqual(lfda.get_mahalanobis_matrix().shape, (4, 4))
    self.assertEqual(lfda.components_.shape, (2, 4))
Esempio n. 6
0
  def test_iris(self):
    lfda = LFDA(k=2, num_dims=2)
    lfda.fit(self.iris_points, self.iris_labels)
    csep = class_separation(lfda.transform(), self.iris_labels)
    self.assertLess(csep, 0.15)

    # Sanity checks for learned matrices.
    self.assertEqual(lfda.metric().shape, (4, 4))
    self.assertEqual(lfda.transformer().shape, (2, 4))
  def test_lfda(self):
    lfda = LFDA(k=2, n_components=2)
    lfda.fit(self.X, self.y)
    res_1 = lfda.transform(self.X)

    lfda = LFDA(k=2, n_components=2)
    res_2 = lfda.fit_transform(self.X, self.y)

    # signs may be flipped, that's okay
    assert_array_almost_equal(abs(res_1), abs(res_2))
Esempio n. 8
0
    def test_lfda(self):
        lfda = LFDA(k=2, num_dims=2)
        lfda.fit(self.X, self.y)
        res_1 = lfda.transform()

        lfda = LFDA(k=2, num_dims=2)
        res_2 = lfda.fit_transform(self.X, self.y)

        # signs may be flipped, that's okay
        if np.sign(res_1[0, 0]) != np.sign(res_2[0, 0]):
            res_2 *= -1
        assert_array_almost_equal(res_1, res_2)
  def test_lfda(self):
    lfda = LFDA(k=2, num_dims=2)
    lfda.fit(self.X, self.y)
    res_1 = lfda.transform(self.X)

    lfda = LFDA(k=2, num_dims=2)
    res_2 = lfda.fit_transform(self.X, self.y)

    # signs may be flipped, that's okay
    if np.sign(res_1[0,0]) != np.sign(res_2[0,0]):
        res_2 *= -1
    assert_array_almost_equal(res_1, res_2)
 def test_iris(self):
     lfda = LFDA(k=2, dim=2)
     lfda.fit(self.iris_points, self.iris_labels)
     csep = class_separation(lfda.transform(), self.iris_labels)
     self.assertLess(csep, 0.15)
 def test_iris(self):
   lfda = LFDA(k=2, dim=2)
   lfda.fit(self.iris_points, self.iris_labels)
   csep = class_separation(lfda.transform(), self.iris_labels)
   self.assertLess(csep, 0.15)
Esempio n. 12
0
print("done in %0.3fs" % (time() - t0))

eigenfaces = pca.components_.reshape((n_components, h, w))

print("Projecting the input data on the eigenfaces orthonormal basis")
t0 = time()
X_train_pca = pca.transform(X_train)
X_test_pca = pca.transform(X_test)
print("done in %0.3fs" % (time() - t0))

# Try LMNN here.
print("Trying LFDA")
param_grid = {''}
lfda = LFDA()
lfda = lfda.fit(X_train_pca, y_train)
X_tr = lfda.transform(X_train_pca)
X_te = lfda.transform(X_test_pca)

acc, y_pred = classifier.sk_nearest_neighbour(X_tr, y_train, X_te, y_test)
print("accuracy = %s", acc)
print(classification_report(y_test, y_pred, target_names=target_names))
print(confusion_matrix(y_test, y_pred, labels=range(n_classes)))

###############################################################################
# Train a SVM classification model

print("Fitting the classifier to the training set")
t0 = time()
param_grid = {
    'C': [1e3, 5e3, 1e4, 5e4, 1e5],
    'gamma': [0.0001, 0.0005, 0.001, 0.005, 0.01, 0.1],
Esempio n. 13
0
 def lfda(data, label, dim):
     lcda = LFDA(k=2, num_dims=dim)
     lcda.fit(data, label)
     result = lcda.transform(data)
     return result
    X_train = np.load(osp.join(args.data_root, 'feature_train.npy'))
    y_train = np.load(osp.join(args.data_root, 'label_train.npy'))
    X_test = np.load(osp.join(args.data_root, 'feature_test.npy'))
    y_test = np.load(osp.join(args.data_root, 'label_test.npy'))
    return X_train, X_test, y_train, y_test


if __name__ == '__main__':
    parser = argparse.ArgumentParser("LFDA")
    parser.add_argument('--data-root', default='./data/raw_split')
    parser.add_argument('--n-components', type=int, default=2)
    args = parser.parse_args()

    name = f"{args.n_components}"
    data_save_folder = f"./data/LFDA/{name}"
    makedirs(data_save_folder)

    X_train, X_test, y_train, y_test = load_split(args)
    print(X_train.shape)

    t = time.time()

    lfda = LFDA(n_components=args.n_components)
    lfda.fit(X_train, y_train)

    np.save(osp.join(data_save_folder, "feature_train.npy"),
            lfda.transform(X_train))
    np.save(osp.join(data_save_folder, "label_train.npy"), y_train)
    np.save(osp.join(data_save_folder, "feature_test.npy"),
            lfda.transform(X_test))
    np.save(osp.join(data_save_folder, "label_test.npy"), y_test)