def test_itml(self): def_kwargs = { 'convergence_threshold': 0.001, 'gamma': 1.0, 'max_iter': 1000, 'preprocessor': None, 'prior': 'identity', 'random_state': None, 'verbose': False } nndef_kwargs = {'gamma': 0.5} merged_kwargs = sk_repr_kwargs(def_kwargs, nndef_kwargs) self.assertEqual(remove_spaces(str(metric_learn.ITML(gamma=0.5))), remove_spaces(f"ITML({merged_kwargs})")) def_kwargs = { 'convergence_threshold': 0.001, 'gamma': 1.0, 'max_iter': 1000, 'num_constraints': None, 'preprocessor': None, 'prior': 'identity', 'random_state': None, 'verbose': False } nndef_kwargs = {'num_constraints': 7} merged_kwargs = sk_repr_kwargs(def_kwargs, nndef_kwargs) self.assertEqual( remove_spaces(str( metric_learn.ITML_Supervised(num_constraints=7))), remove_spaces(f"ITML_Supervised({merged_kwargs})"))
def test_itml(self): self.assertEqual(remove_spaces(str(metric_learn.ITML(gamma=0.5))), remove_spaces("ITML(gamma=0.5)")) self.assertEqual( remove_spaces(str( metric_learn.ITML_Supervised(num_constraints=7))), remove_spaces("ITML_Supervised(num_constraints=7)"))
def test_itml(self): self.assertEqual(str(metric_learn.ITML()), """ ITML(A0=None, convergence_threshold=0.001, gamma=1.0, max_iter=1000, preprocessor=None, verbose=False) """.strip('\n')) self.assertEqual(str(metric_learn.ITML_Supervised()), """ ITML_Supervised(A0=None, bounds=None, convergence_threshold=0.001, gamma=1.0, max_iter=1000, num_constraints=None, num_labeled='deprecated', preprocessor=None, verbose=False) """.strip('\n'))
def test_itml(self): self.assertEqual( remove_spaces(str(metric_learn.ITML())), remove_spaces(""" ITML(A0='deprecated', convergence_threshold=0.001, gamma=1.0, max_iter=1000, preprocessor=None, prior='identity', random_state=None, verbose=False) """)) self.assertEqual( remove_spaces(str(metric_learn.ITML_Supervised())), remove_spaces(""" ITML_Supervised(A0='deprecated', bounds='deprecated', convergence_threshold=0.001, gamma=1.0, max_iter=1000, num_constraints=None, num_labeled='deprecated', preprocessor=None, prior='identity', random_state=None, verbose=False) """))
def test_string_repr(self): # we don't test LMNN here because it could be python_LMNN self.assertEqual(str(metric_learn.Covariance()), "Covariance()") self.assertEqual(str(metric_learn.NCA()), "NCA(learning_rate=0.01, max_iter=100, num_dims=None)") self.assertEqual(str(metric_learn.LFDA()), "LFDA(dim=None, k=7, metric='weighted')") self.assertEqual(str(metric_learn.ITML()), """ ITML(convergence_threshold=0.001, gamma=1.0, max_iters=1000, verbose=False) """.strip('\n')) self.assertEqual(str(metric_learn.ITML_Supervised()), """ ITML_Supervised(A0=None, bounds=None, convergence_threshold=0.001, gamma=1.0, max_iters=1000, num_constraints=None, num_labeled=inf, verbose=False) """.strip('\n')) self.assertEqual(str(metric_learn.LSML()), "LSML(max_iter=1000, tol=0.001, verbose=False)") self.assertEqual(str(metric_learn.LSML_Supervised()), """ LSML_Supervised(max_iter=1000, num_constraints=None, num_labeled=inf, prior=None, tol=0.001, verbose=False, weights=None) """.strip('\n')) self.assertEqual(str(metric_learn.SDML()), """ SDML(balance_param=0.5, sparsity_param=0.01, use_cov=True, verbose=False) """.strip('\n')) self.assertEqual(str(metric_learn.SDML_Supervised()), """ SDML_Supervised(balance_param=0.5, num_constraints=None, num_labeled=inf, sparsity_param=0.01, use_cov=True, verbose=False) """.strip('\n')) self.assertEqual(str(metric_learn.RCA()), "RCA(dim=None)") self.assertEqual(str(metric_learn.RCA_Supervised()), "RCA_Supervised(chunk_size=2, dim=None, num_chunks=100)") self.assertEqual(str(metric_learn.MLKR()), """ MLKR(A0=None, alpha=0.0001, epsilon=0.01, max_iter=1000, num_dims=None) """.strip('\n'))
# print(pairs) print(pairs_labels) ###################################################################### # Using our constraints, let's now train ITML again. Note that we are no # longer calling the supervised class :py:class:`ITML_Supervised # <metric_learn.ITML_Supervised>` but the more generic # (weakly-supervised) :py:class:`ITML <metric_learn.ITML>`, which # takes the dataset `X` through the `preprocessor` argument (see # :ref:`this section <preprocessor_section>` of the documentation to learn # about more advanced uses of `preprocessor`) and the pair information `pairs` # and `pairs_labels` in the fit method. itml = metric_learn.ITML(preprocessor=X) itml.fit(pairs, pairs_labels) X_itml = itml.transform(X) plot_tsne(X_itml, y) ###################################################################### # And that's the result of ITML after being trained on our manually # constructed constraints! A bit different from our old result, but not too # different. # # RCA and LSML also have their own specific ways of taking in inputs - # it's worth one's while to poke around in the constraints.py file to see # how exactly this is going on. #
def itml(self, train_X, train_y, test_X, gamma): learner = ml.ITML(gamma=gamma) train_X = learner.fit_transform(train_X, train_y) test_X = learner.transform(test_X) return train_X, test_X