Beispiel #1
0
 def test_lsml(self):
     def_kwargs = {
         'max_iter': 1000,
         'preprocessor': None,
         'prior': 'identity',
         'random_state': None,
         'tol': 0.001,
         'verbose': False
     }
     nndef_kwargs = {'tol': 0.1}
     merged_kwargs = sk_repr_kwargs(def_kwargs, nndef_kwargs)
     self.assertEqual(remove_spaces(str(metric_learn.LSML(tol=0.1))),
                      remove_spaces(f"LSML({merged_kwargs})"))
     def_kwargs = {
         'max_iter': 1000,
         'num_constraints': None,
         'preprocessor': None,
         'prior': 'identity',
         'random_state': None,
         'tol': 0.001,
         'verbose': False,
         'weights': None
     }
     nndef_kwargs = {'verbose': True}
     merged_kwargs = sk_repr_kwargs(def_kwargs, nndef_kwargs)
     self.assertEqual(
         remove_spaces(str(metric_learn.LSML_Supervised(verbose=True))),
         remove_spaces(f"LSML_Supervised({merged_kwargs})"))
Beispiel #2
0
    def test_lsml(self):
        self.assertEqual(
            str(metric_learn.LSML()),
            "LSML(max_iter=1000, prior=None, tol=0.001, verbose=False)")
        self.assertEqual(
            str(metric_learn.LSML_Supervised()), """
LSML_Supervised(max_iter=1000, num_constraints=None, num_labeled=inf,
        prior=None, tol=0.001, verbose=False, weights=None)
""".strip('\n'))
    def test_lsml(self):
        self.assertEqual(
            remove_spaces(str(metric_learn.LSML())),
            remove_spaces(
                "LSML(max_iter=1000, preprocessor=None, prior=None, tol=0.001, "
                "verbose=False)"))
        self.assertEqual(
            remove_spaces(str(metric_learn.LSML_Supervised())),
            remove_spaces("""
LSML_Supervised(max_iter=1000, num_constraints=None, num_labeled='deprecated',
        preprocessor=None, prior=None, tol=0.001, verbose=False,
        weights=None)
"""))
Beispiel #4
0
  def test_string_repr(self):
    # we don't test LMNN here because it could be python_LMNN

    self.assertEqual(str(metric_learn.Covariance()), "Covariance()")

    self.assertEqual(str(metric_learn.NCA()),
                     "NCA(learning_rate=0.01, max_iter=100, num_dims=None)")

    self.assertEqual(str(metric_learn.LFDA()),
                     "LFDA(dim=None, k=7, metric='weighted')")

    self.assertEqual(str(metric_learn.ITML()), """
ITML(convergence_threshold=0.001, gamma=1.0, max_iters=1000, verbose=False)
""".strip('\n'))
    self.assertEqual(str(metric_learn.ITML_Supervised()), """
ITML_Supervised(A0=None, bounds=None, convergence_threshold=0.001, gamma=1.0,
                max_iters=1000, num_constraints=None, num_labeled=inf,
                verbose=False)
""".strip('\n'))

    self.assertEqual(str(metric_learn.LSML()),
                     "LSML(max_iter=1000, tol=0.001, verbose=False)")
    self.assertEqual(str(metric_learn.LSML_Supervised()), """
LSML_Supervised(max_iter=1000, num_constraints=None, num_labeled=inf,
                prior=None, tol=0.001, verbose=False, weights=None)
""".strip('\n'))

    self.assertEqual(str(metric_learn.SDML()), """
SDML(balance_param=0.5, sparsity_param=0.01, use_cov=True, verbose=False)
""".strip('\n'))
    self.assertEqual(str(metric_learn.SDML_Supervised()), """
SDML_Supervised(balance_param=0.5, num_constraints=None, num_labeled=inf,
                sparsity_param=0.01, use_cov=True, verbose=False)
""".strip('\n'))

    self.assertEqual(str(metric_learn.RCA()), "RCA(dim=None)")
    self.assertEqual(str(metric_learn.RCA_Supervised()),
                     "RCA_Supervised(chunk_size=2, dim=None, num_chunks=100)")

    self.assertEqual(str(metric_learn.MLKR()), """
MLKR(A0=None, alpha=0.0001, epsilon=0.01, max_iter=1000, num_dims=None)
""".strip('\n'))
Beispiel #5
0
######################################################################
# Least Squares Metric Learning
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# LSML is a simple, yet effective, algorithm that learns a Mahalanobis
# metric from a given set of relative comparisons. This is done by
# formulating and minimizing a convex loss function that corresponds to
# the sum of squared hinge loss of violated constraints.
#
# - See more in the :ref:`User Guide <lsml>`
# - See more in the documentation of the class :py:class:`LSML
#   <metric_learn.LSML>`

lsml = metric_learn.LSML_Supervised(tol=0.0001,
                                    max_iter=10000,
                                    prior='covariance')
X_lsml = lsml.fit_transform(X, y)

plot_tsne(X_lsml, y)

######################################################################
# Neighborhood Components Analysis
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# NCA is an extremly popular metric learning algorithm.
#
# Neighborhood components analysis aims at "learning" a distance metric
# by finding a linear transformation of input data such that the average
# leave-one-out (LOO) classification performance of a soft-nearest
# neighbors rule is maximized in the transformed space. The key insight to
Beispiel #6
0
import numpy as np
from sklearn.datasets import load_iris

import metric_learn

CLASSES = {
    'Covariance':
    metric_learn.Covariance(),
    'ITML_Supervised':
    metric_learn.ITML_Supervised(num_constraints=200),
    'LFDA':
    metric_learn.LFDA(k=2, dim=2),
    'LMNN':
    metric_learn.LMNN(k=5, learn_rate=1e-6, verbose=False),
    'LSML_Supervised':
    metric_learn.LSML_Supervised(num_constraints=200),
    'MLKR':
    metric_learn.MLKR(),
    'NCA':
    metric_learn.NCA(max_iter=700, n_components=2),
    'RCA_Supervised':
    metric_learn.RCA_Supervised(dim=2, num_chunks=30, chunk_size=2),
    'SDML_Supervised':
    metric_learn.SDML_Supervised(num_constraints=1500)
}


class IrisDataset(object):
    params = [sorted(CLASSES)]
    param_names = ['alg']
 def test_lsml(self):
     self.assertEqual(remove_spaces(str(metric_learn.LSML(tol=0.1))),
                      remove_spaces("LSML(tol=0.1)"))
     self.assertEqual(
         remove_spaces(str(metric_learn.LSML_Supervised(verbose=True))),
         remove_spaces("LSML_Supervised(verbose=True)"))