コード例 #1
0
def iris_LFDA(x_data, y_data):
    x_shuffle, y_shuffle = shuffle_data(x_data, y_data)
    x_fold = []
    y_fold = []

    for i in range(4):
        x_fold.append(x_shuffle[30 * i:30 * (i + 1), :])
        y_fold.append(y_shuffle[30 * i:30 * (i + 1)])
    x_fold.append(x_shuffle[120:, :])
    y_fold.append(y_shuffle[120:])

    accuracy = []

    for i in range(5):
        temp = range(5)
        temp.remove(i)
        x_train = np.concatenate([x_fold[j] for j in temp], axis=0)
        y_train = np.concatenate([y_fold[j] for j in temp], axis=0)
        x_test = x_fold[i]
        y_test = y_fold[i]

        class_result = []
        lfda = ml.LFDA(k=2, dim=3)
        lfda.fit(x_train, y_train)

        result_y = result_y = lfda.transform(x_test)
        class_result = []
        for k in range(len(result_y)):
            class_result.append(k_NN_classifier(result_y[k], result_y, y_test))

        accuracy.append(class_accuracy(class_result, y_test))

    return accuracy
コード例 #2
0
 def test_lfda(self):
   def_kwargs = {'embedding_type': 'weighted', 'k': None,
                 'n_components': None, 'preprocessor': None}
   nndef_kwargs = {'k': 2}
   merged_kwargs = sk_repr_kwargs(def_kwargs, nndef_kwargs)
   self.assertEqual(remove_spaces(str(metric_learn.LFDA(k=2))),
                    remove_spaces(f"LFDA({merged_kwargs})"))
コード例 #3
0
	def __init__(self, embedding_method):
		self.embedding_method = embedding_method
		if embedding_method == "MLKR":
			learn_metric = mkl.MLKR(n_components=2, init="auto")
		elif embedding_method == "LFDA":
			learn_metric = mkl.LFDA(n_components=2, 
		 		k=10, embedding_type="orthonormalized") 
		 	# weighted, orthonormalized
		elif embedding_method == "LMNN":
			learn_metric = mkl.LMNN(k=10, learn_rate=0.1,
				n_components=3) 
			# k: number of neighbors
		self.learn_metric = learn_metric
コード例 #4
0
  def test_string_repr(self):
    # we don't test LMNN here because it could be python_LMNN

    self.assertEqual(str(metric_learn.Covariance()), "Covariance()")

    self.assertEqual(str(metric_learn.NCA()),
                     "NCA(learning_rate=0.01, max_iter=100, num_dims=None)")

    self.assertEqual(str(metric_learn.LFDA()),
                     "LFDA(dim=None, k=7, metric='weighted')")

    self.assertEqual(str(metric_learn.ITML()), """
ITML(convergence_threshold=0.001, gamma=1.0, max_iters=1000, verbose=False)
""".strip('\n'))
    self.assertEqual(str(metric_learn.ITML_Supervised()), """
ITML_Supervised(A0=None, bounds=None, convergence_threshold=0.001, gamma=1.0,
                max_iters=1000, num_constraints=None, num_labeled=inf,
                verbose=False)
""".strip('\n'))

    self.assertEqual(str(metric_learn.LSML()),
                     "LSML(max_iter=1000, tol=0.001, verbose=False)")
    self.assertEqual(str(metric_learn.LSML_Supervised()), """
LSML_Supervised(max_iter=1000, num_constraints=None, num_labeled=inf,
                prior=None, tol=0.001, verbose=False, weights=None)
""".strip('\n'))

    self.assertEqual(str(metric_learn.SDML()), """
SDML(balance_param=0.5, sparsity_param=0.01, use_cov=True, verbose=False)
""".strip('\n'))
    self.assertEqual(str(metric_learn.SDML_Supervised()), """
SDML_Supervised(balance_param=0.5, num_constraints=None, num_labeled=inf,
                sparsity_param=0.01, use_cov=True, verbose=False)
""".strip('\n'))

    self.assertEqual(str(metric_learn.RCA()), "RCA(dim=None)")
    self.assertEqual(str(metric_learn.RCA_Supervised()),
                     "RCA_Supervised(chunk_size=2, dim=None, num_chunks=100)")

    self.assertEqual(str(metric_learn.MLKR()), """
MLKR(A0=None, alpha=0.0001, epsilon=0.01, max_iter=1000, num_dims=None)
""".strip('\n'))
コード例 #5
0
######################################################################
# Local Fisher Discriminant Analysis
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# LFDA is a linear supervised dimensionality reduction method. It is
# particularly useful when dealing with multimodality, where one ore more
# classes consist of separate clusters in input space. The core
# optimization problem of LFDA is solved as a generalized eigenvalue
# problem. Like LMNN, and NCA, this algorithm does not try to cluster points
# from the same class in a unique cluster.
#
# - See more in the :ref:`User Guide <lfda>`
# - See more in the documentation of the class :py:class:`LFDA
#   <metric_learn.LFDA>`

lfda = metric_learn.LFDA(k=2, n_components=2)
X_lfda = lfda.fit_transform(X, y)

plot_tsne(X_lfda, y)

######################################################################
# Relative Components Analysis
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# RCA is another one of the older algorithms. It learns a full rank
# Mahalanobis distance metric based on a weighted sum of in-class
# covariance matrices. It applies a global linear transformation to assign
# large weights to relevant dimensions and low weights to irrelevant
# dimensions. Those relevant dimensions are estimated using "chunklets",
# subsets of points that are known to belong to the same class.
#
コード例 #6
0
 def test_lfda(self):
   self.assertEqual(str(metric_learn.LFDA()),
                    "LFDA(embedding_type='weighted', k=None, num_dims=None, "
                    "preprocessor=None)")
コード例 #7
0
 def test_lfda(self):
     self.assertEqual(
         remove_spaces(str(metric_learn.LFDA())),
         remove_spaces("LFDA(embedding_type='weighted', k=None, "
                       "n_components=None, num_dims='deprecated',"
                       "preprocessor=None)"))
コード例 #8
0
######################################################################
# Local Fisher Discriminant Analysis
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
# 
# LFDA is a linear supervised dimensionality reduction method. It is
# particularly useful when dealing with multimodality, where one ore more
# classes consist of separate clusters in input space. The core
# optimization problem of LFDA is solved as a generalized eigenvalue
# problem. Like LMNN, and NCA, this algorithm does not try to cluster points
# from the same class in a unique cluster.
#
# - See more in the :ref:`User Guide <lfda>`
# - See more in the documentation of the class :py:class:`LFDA
#   <metric_learn.LFDA>`

lfda = metric_learn.LFDA(k=2, num_dims=2)
X_lfda = lfda.fit_transform(X, y)

plot_tsne(X_lfda, y)


######################################################################
# Relative Components Analysis
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
# 
# RCA is another one of the older algorithms. It learns a full rank
# Mahalanobis distance metric based on a weighted sum of in-class
# covariance matrices. It applies a global linear transformation to assign
# large weights to relevant dimensions and low weights to irrelevant
# dimensions. Those relevant dimensions are estimated using "chunklets",
# subsets of points that are known to belong to the same class.
コード例 #9
0
 def test_lfda(self):
     self.assertEqual(str(metric_learn.LFDA()),
                      "LFDA(k=None, metric='weighted', num_dims=None)")
コード例 #10
0
ファイル: iris.py プロジェクト: zxhou/metric-learn
import numpy as np
from sklearn.datasets import load_iris

import metric_learn

CLASSES = {
    'Covariance':
    metric_learn.Covariance(),
    'ITML_Supervised':
    metric_learn.ITML_Supervised(num_constraints=200),
    'LFDA':
    metric_learn.LFDA(k=2, dim=2),
    'LMNN':
    metric_learn.LMNN(k=5, learn_rate=1e-6, verbose=False),
    'LSML_Supervised':
    metric_learn.LSML_Supervised(num_constraints=200),
    'MLKR':
    metric_learn.MLKR(),
    'NCA':
    metric_learn.NCA(max_iter=700, n_components=2),
    'RCA_Supervised':
    metric_learn.RCA_Supervised(dim=2, num_chunks=30, chunk_size=2),
    'SDML_Supervised':
    metric_learn.SDML_Supervised(num_constraints=1500)
}


class IrisDataset(object):
    params = [sorted(CLASSES)]
    param_names = ['alg']
コード例 #11
0
 def lfda(self, train_X, train_y, test_X, k, dims):
     learner = ml.LFDA(num_dims=dims, k=k)
     train_X = learner.fit_transform(train_X, train_y)
     test_X = learner.transform(test_X)
     return train_X, test_X
コード例 #12
0
 def test_lfda(self):
     self.assertEqual(remove_spaces(str(metric_learn.LFDA(k=2))),
                      remove_spaces("LFDA(k=2)"))