def test_rca(self): self.assertEqual(str(metric_learn.RCA()), "RCA(num_dims=None, pca_comps=None, preprocessor=None)") self.assertEqual(str(metric_learn.RCA_Supervised()), "RCA_Supervised(chunk_size=2, num_chunks=100, " "num_dims=None, pca_comps=None,\n " "preprocessor=None)")
def test_rca(self): self.assertEqual( remove_spaces(str(metric_learn.RCA())), remove_spaces("RCA(n_components=None, " "num_dims='deprecated', " "pca_comps='deprecated', " "preprocessor=None)")) self.assertEqual( remove_spaces(str(metric_learn.RCA_Supervised())), remove_spaces("RCA_Supervised(chunk_size=2, " "n_components=None, num_chunks=100, " "num_dims='deprecated', pca_comps='deprecated', " "preprocessor=None, random_state=None)"))
def test_rca(self): def_kwargs = {'n_components': None, 'preprocessor': None} nndef_kwargs = {'n_components': 3} merged_kwargs = sk_repr_kwargs(def_kwargs, nndef_kwargs) self.assertEqual(remove_spaces(str(metric_learn.RCA(n_components=3))), remove_spaces(f"RCA({merged_kwargs})")) def_kwargs = {'chunk_size': 2, 'n_components': None, 'num_chunks': 100, 'preprocessor': None, 'random_state': None} nndef_kwargs = {'num_chunks': 5} merged_kwargs = sk_repr_kwargs(def_kwargs, nndef_kwargs) self.assertEqual( remove_spaces(str(metric_learn.RCA_Supervised(num_chunks=5))), remove_spaces(f"RCA_Supervised({merged_kwargs})"))
def test_string_repr(self): # we don't test LMNN here because it could be python_LMNN self.assertEqual(str(metric_learn.Covariance()), "Covariance()") self.assertEqual(str(metric_learn.NCA()), "NCA(learning_rate=0.01, max_iter=100, num_dims=None)") self.assertEqual(str(metric_learn.LFDA()), "LFDA(dim=None, k=7, metric='weighted')") self.assertEqual(str(metric_learn.ITML()), """ ITML(convergence_threshold=0.001, gamma=1.0, max_iters=1000, verbose=False) """.strip('\n')) self.assertEqual(str(metric_learn.ITML_Supervised()), """ ITML_Supervised(A0=None, bounds=None, convergence_threshold=0.001, gamma=1.0, max_iters=1000, num_constraints=None, num_labeled=inf, verbose=False) """.strip('\n')) self.assertEqual(str(metric_learn.LSML()), "LSML(max_iter=1000, tol=0.001, verbose=False)") self.assertEqual(str(metric_learn.LSML_Supervised()), """ LSML_Supervised(max_iter=1000, num_constraints=None, num_labeled=inf, prior=None, tol=0.001, verbose=False, weights=None) """.strip('\n')) self.assertEqual(str(metric_learn.SDML()), """ SDML(balance_param=0.5, sparsity_param=0.01, use_cov=True, verbose=False) """.strip('\n')) self.assertEqual(str(metric_learn.SDML_Supervised()), """ SDML_Supervised(balance_param=0.5, num_constraints=None, num_labeled=inf, sparsity_param=0.01, use_cov=True, verbose=False) """.strip('\n')) self.assertEqual(str(metric_learn.RCA()), "RCA(dim=None)") self.assertEqual(str(metric_learn.RCA_Supervised()), "RCA_Supervised(chunk_size=2, dim=None, num_chunks=100)") self.assertEqual(str(metric_learn.MLKR()), """ MLKR(A0=None, alpha=0.0001, epsilon=0.01, max_iter=1000, num_dims=None) """.strip('\n'))
###################################################################### # Relative Components Analysis # ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ # # RCA is another one of the older algorithms. It learns a full rank # Mahalanobis distance metric based on a weighted sum of in-class # covariance matrices. It applies a global linear transformation to assign # large weights to relevant dimensions and low weights to irrelevant # dimensions. Those relevant dimensions are estimated using "chunklets", # subsets of points that are known to belong to the same class. # # - See more in the :ref:`User Guide <rca>` # - See more in the documentation of the class :py:class:`RCA # <metric_learn.RCA>` rca = metric_learn.RCA_Supervised(n_chunks=30, chunk_size=2) X_rca = rca.fit_transform(X, y) plot_tsne(X_rca, y) ###################################################################### # Regression example: Metric Learning for Kernel Regression # ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ # # The previous algorithms took as input a dataset with class labels. Metric # learning can also be useful for regression, when the labels are real numbers. # An algorithm very similar to NCA but for regression is Metric # Learning for Kernel Regression (MLKR). It will optimize for the average # leave-one-out *regression* performance from a soft-nearest neighbors # regression. #
'Covariance': metric_learn.Covariance(), 'ITML_Supervised': metric_learn.ITML_Supervised(num_constraints=200), 'LFDA': metric_learn.LFDA(k=2, dim=2), 'LMNN': metric_learn.LMNN(k=5, learn_rate=1e-6, verbose=False), 'LSML_Supervised': metric_learn.LSML_Supervised(num_constraints=200), 'MLKR': metric_learn.MLKR(), 'NCA': metric_learn.NCA(max_iter=700, n_components=2), 'RCA_Supervised': metric_learn.RCA_Supervised(dim=2, num_chunks=30, chunk_size=2), 'SDML_Supervised': metric_learn.SDML_Supervised(num_constraints=1500) } class IrisDataset(object): params = [sorted(CLASSES)] param_names = ['alg'] def setup(self, alg): iris_data = load_iris() self.iris_points = iris_data['data'] self.iris_labels = iris_data['target'] def time_fit(self, alg):
def test_rca(self): self.assertEqual(remove_spaces(str(metric_learn.RCA(n_components=3))), remove_spaces("RCA(n_components=3)")) self.assertEqual( remove_spaces(str(metric_learn.RCA_Supervised(num_chunks=5))), remove_spaces("RCA_Supervised(num_chunks=5)"))