Пример #1
0
def run_with_lmnn(label, n_neighbors, n_train_lmnn, train_lmnn_vectors,
                  train_lmnn_labels, train_vectors, train_labels, test_vectors,
                  test_labels):
    t = time.time()
    print('Fitting ' + label + ' lmnn (n = %d) + kNN (k = %d)... [' %
          (n_train_lmnn, n_neighbors) + time.strftime('%H:%M:%S') + ']')
    lmnn = metric_learn.LMNN(k=n_neighbors, max_iter=n_max_iter)
    lmnn.fit(train_lmnn_vectors, train_lmnn_labels)
    path_name = path_strategy + 'metrix/'  #no relationship with day
    if not os.path.exists(path_name): os.mkdir(path_name)
    pandas.DataFrame(lmnn.metric()).to_csv(
        path_name + 'kNN_with_Lmnn(k=%d,n=%d)_' % (n_neighbors, n_train_lmnn) +
        label + '_Matric.csv',
        header=False,
        index=False)
    print('\tdone in %.fs [' % (time.time() - t) + time.strftime('%H:%M:%S') +
          '].')

    filename_partdata = 'kNN_with_Lmnn_Partdata(k=%d,n=%d)_' % (
        n_neighbors, n_train_lmnn) + label
    lmnn_train_vectors = lmnn.transform(train_lmnn_vectors)
    lmnn_test_vectors = lmnn.transform(test_vectors)
    (dic_day_test_labels_partdata, dic_day_estimated_label_partdata, dic_day_accuracy_partdata) \
        = classify_and_test(filename_partdata, n_neighbors, lmnn_train_vectors, train_lmnn_labels, lmnn_test_vectors, test_labels)

    filename_alldata = 'kNN_with_Lmnn_Alldata(k=%d,n=%d)_' % (
        n_neighbors, n_train_lmnn) + label
    lmnn_all_train_vectors = lmnn.transform(train_vectors)
    (dic_day_test_labels_alldata, dic_day_estimated_label_alldata, dic_day_accuracy_alldata) \
        = classify_and_test(filename_alldata, n_neighbors, lmnn_all_train_vectors, train_labels, lmnn_test_vectors, test_labels)
    return (filename_partdata, dic_day_test_labels_partdata, dic_day_estimated_label_partdata, dic_day_accuracy_partdata, \
        filename_alldata, dic_day_test_labels_alldata, dic_day_estimated_label_alldata, dic_day_accuracy_alldata)
Пример #2
0
 def test_lmnn(self):
     self.assertEqual(
         remove_spaces(str(metric_learn.LMNN())),
         remove_spaces(
             "LMNN(convergence_tol=0.001, init=None, k=3, "
             "learn_rate=1e-07, "
             "max_iter=1000, min_iter=50, n_components=None, "
             "num_dims='deprecated', preprocessor=None, random_state=None, "
             "regularization=0.5, use_pca='deprecated', verbose=False)"))
Пример #3
0
 def test_lmnn(self):
   def_kwargs = {'convergence_tol': 0.001, 'init': 'auto', 'k': 3,
                 'learn_rate': 1e-07, 'max_iter': 1000, 'min_iter': 50,
                 'n_components': None, 'preprocessor': None,
                 'random_state': None, 'regularization': 0.5,
                 'verbose': False}
   nndef_kwargs = {'convergence_tol': 0.01, 'k': 6}
   merged_kwargs = sk_repr_kwargs(def_kwargs, nndef_kwargs)
   self.assertEqual(
       remove_spaces(str(metric_learn.LMNN(convergence_tol=0.01, k=6))),
       remove_spaces(f"LMNN({merged_kwargs})"))
Пример #4
0
	def __init__(self, embedding_method):
		self.embedding_method = embedding_method
		if embedding_method == "MLKR":
			learn_metric = mkl.MLKR(n_components=2, init="auto")
		elif embedding_method == "LFDA":
			learn_metric = mkl.LFDA(n_components=2, 
		 		k=10, embedding_type="orthonormalized") 
		 	# weighted, orthonormalized
		elif embedding_method == "LMNN":
			learn_metric = mkl.LMNN(k=10, learn_rate=0.1,
				n_components=3) 
			# k: number of neighbors
		self.learn_metric = learn_metric
Пример #5
0
    def __matrix_m(self):

        X = self.emb_train[:, :-1]
        Y = self.emb_train[:, -1]

        # setting up LMNN
        lmnn = metric_learn.LMNN(k=self.metric_k,
                                 learn_rate=1e-2,
                                 verbose=True)

        # fit the data
        lmnn.fit(X, Y)

        # transform our input space
        X_lmnn = lmnn.transform()

        matrix_m = lmnn.metric()
        np.save('pickled_files/matrix_m_{}'.format(metric_k), matrix_m)

        self.__lmnn_knn(matrix_m)
Пример #6
0
# You'll notice that the points from the same labels are closer together,
# but they are not necessary in a same cluster. This is particular to LMNN
# and we'll see that some other algorithms implicitly enforce points from
# the same class to cluster together.
#
# - See more in the :ref:`User Guide <lmnn>`
# - See more in the documentation of the class :py:class:`LMNN
#   <metric_learn.LMNN>`

######################################################################
# Fit and then transform!
# -----------------------
#

# setting up LMNN
lmnn = metric_learn.LMNN(n_neighbors=5, learn_rate=1e-6)

# fit the data!
lmnn.fit(X, y)

# transform our input space
X_lmnn = lmnn.transform(X)

######################################################################
# So what have we learned? The matrix :math:`M` we talked about before.

######################################################################
# Now let us plot the transformed space - this tells us what the original
# space looks like after being transformed with the new learned metric.
#
Пример #7
0
 def test_lmnn(self):
   self.assertRegexpMatches(
       str(metric_learn.LMNN()),
       r"(python_)?LMNN\(convergence_tol=0.001, k=3, learn_rate=1e-07, "
       r"max_iter=1000,\n      min_iter=50, preprocessor=None, "
       r"regularization=0.5, use_pca=True,\n      verbose=False\)")
# but they are not necessary in a same cluster. This is particular to LMNN
# and we'll see that some other algorithms implicitly enforce points from
# the same class to cluster together.
#
# - See more in the :ref:`User Guide <lmnn>`
# - See more in the documentation of the class :py:class:`LMNN
#   <metric_learn.LMNN>`


######################################################################
# Fit and then transform!
# -----------------------
# 

# setting up LMNN
lmnn = metric_learn.LMNN(k=5, learn_rate=1e-6)

# fit the data!
lmnn.fit(X, y)

# transform our input space
X_lmnn = lmnn.transform(X)


######################################################################
# So what have we learned? The matrix :math:`M` we talked about before.


######################################################################
# Now let us plot the transformed space - this tells us what the original
# space looks like after being transformed with the new learned metric.
Пример #9
0
# ------------------------------------------------------------------------------
# Initialise

tr = start()

# Trainers
pca = PCA(n_components=M_PCA)

kernel = RBFSampler(gamma=1.0, n_components=230, random_state=None)

tsne = TSNE(n_components=2, perplexity=30.0, early_exaggeration=12.0,
            learning_rate=200.0, n_iter=250, n_iter_without_progress=100,
            min_grad_norm=1e-07, metric='euclidean', init='random', verbose=2,
            random_state=None, method='barnes_hut', angle=0.5)

lmnn = metric_learn.LMNN(k=3, min_iter=1, max_iter=10, learn_rate=1e-6,
                         convergence_tol=1e-3, use_pca=False, verbose=True)

mmc = metric_learn.mmc.MMC_Supervised(max_iter=10, convergence_threshold=1e-04,
                                      num_labeled=np.inf, num_constraints=100,
                                      verbose=True)

rca = metric_learn.rca.RCA(num_dims=None, pca_comps=None)
chuncky = np.repeat(list(range(1, 25)), 307)

mlkr = metric_learn.mlkr.MLKR(num_dims=200, A0=None, tol=1e-6, max_iter=10,
                              verbose=True)

itml = metric_learn.itml.ITML_Supervised(gamma=1.0, max_iter=10,
                                         convergence_threshold=0.001,
                                         num_labeled=np.inf,
                                         num_constraints=100, bounds=None,
Пример #10
0
import numpy as np
from sklearn.datasets import load_iris

import metric_learn

CLASSES = {
    'Covariance':
    metric_learn.Covariance(),
    'ITML_Supervised':
    metric_learn.ITML_Supervised(num_constraints=200),
    'LFDA':
    metric_learn.LFDA(k=2, dim=2),
    'LMNN':
    metric_learn.LMNN(k=5, learn_rate=1e-6, verbose=False),
    'LSML_Supervised':
    metric_learn.LSML_Supervised(num_constraints=200),
    'MLKR':
    metric_learn.MLKR(),
    'NCA':
    metric_learn.NCA(max_iter=700, n_components=2),
    'RCA_Supervised':
    metric_learn.RCA_Supervised(dim=2, num_chunks=30, chunk_size=2),
    'SDML_Supervised':
    metric_learn.SDML_Supervised(num_constraints=1500)
}


class IrisDataset(object):
    params = [sorted(CLASSES)]
    param_names = ['alg']
Пример #11
0
            dic_test_data['knn_svd'] = result[1]
            dic_test_labels['knn_svd'] = result[2]
            output_step('\t\tresult: %d/%d, %ds\n' %
                        (result[0], result[3], time.time() - startTime))
            del startTime

            #knn_svd_lmnn
            startTime = time.time()
            output_step('\tknn_svd_lmnn (rest: %d): \n' %
                        len(dic_test_labels['knn_svd_lmnn']))
            test_vectors = vectorizer.fit_transform(
                dic_test_data['knn_svd_lmnn'])
            (train_vectors_svd,
             test_vectors_svd) = svd_transform(train_vectors, test_vectors)
            del test_vectors
            lmnn = metric_learn.LMNN(k=n_neighbors, max_iter=max_iter)
            lmnn.fit(train_vectors_svd, numpy.asarray(train_binary_labels))
            train_vectors_svd_lmnn = lmnn.transform(train_vectors_svd)
            del train_vectors_svd
            test_vectors_svd_lmnn = lmnn.transform(test_vectors_svd)
            del test_vectors_svd
            result = step_classification(n_neighbors, cur_label,
                                         train_vectors_svd_lmnn,
                                         train_binary_labels,
                                         dic_test_data['knn_svd_lmnn'],
                                         test_vectors_svd_lmnn,
                                         dic_test_labels['knn_svd_lmnn'])
            del train_vectors_svd_lmnn, test_vectors_svd_lmnn
            dic_n_right['knn_svd_lmnn'] += result[0]
            dic_test_data['knn_svd_lmnn'] = result[1]
            dic_test_labels['knn_svd_lmnn'] = result[2]
Пример #12
0
import numpy as np
from sklearn.datasets import load_iris

import metric_learn

CLASSES = {
    'Covariance': metric_learn.Covariance(),
    'ITML_Supervised': metric_learn.ITML_Supervised(n_constraints=200),
    'LFDA': metric_learn.LFDA(k=2, dim=2),
    'LMNN': metric_learn.LMNN(n_neighbors=5, learn_rate=1e-6, verbose=False),
    'LSML_Supervised': metric_learn.LSML_Supervised(n_constraints=200),
    'MLKR': metric_learn.MLKR(),
    'NCA': metric_learn.NCA(max_iter=700, n_components=2),
    'RCA_Supervised': metric_learn.RCA_Supervised(dim=2, n_chunks=30,
                                                  chunk_size=2),
    'SDML_Supervised': metric_learn.SDML_Supervised(n_constraints=1500)
}


class IrisDataset(object):
  params = [sorted(CLASSES)]
  param_names = ['alg']

  def setup(self, alg):
    iris_data = load_iris()
    self.iris_points = iris_data['data']
    self.iris_labels = iris_data['target']

  def time_fit(self, alg):
    np.random.seed(5555)
    CLASSES[alg].fit(self.iris_points, self.iris_labels)
Пример #13
0
# rank_accuracies, mAP = evaluate_metric(X_test_pca.T, y_test,
#                                        X_test_pca.T, y_test,
#                                        metric ='mahalanobis',
#                                        parameters = M)
# #
# rank_accuracies_l_2.append(rank_accuracies)
# mAP_l_2.append(mAP)
# metric_l_2.append('Learnt Mahalanobis (Red. Set)')
# #
#

#
#
import metric_learn
#
lmnn = metric_learn.LMNN(k=3, learn_rate=1e-6, max_iter=50)
lmnn.fit(X_train_pca, y_train.T)
M = lmnn.metric()

print('Metric learnt-LMNN')

rank_accuracies, mAP = evaluate_metric(X_test_pca.T,
                                       y_test,
                                       X_test_pca.T,
                                       y_test,
                                       metric='mahalanobis',
                                       parameters=M)

rank_accuracies_l_2.append(rank_accuracies)
mAP_l_2.append(mAP)
metric_l_2.append('Learnt LMNN')
Пример #14
0
 def lmnn(self, train_X, train_y, test_X, k):
     learner = ml.LMNN(k=k)
     train_X = learner.fit_transform(train_X, train_y)
     test_X = learner.transform(test_X)
     return train_X, test_X
Пример #15
0
 def test_lmnn(self):
     self.assertEqual(
         remove_spaces(str(metric_learn.LMNN(convergence_tol=0.01, k=6))),
         remove_spaces("LMNN(convergence_tol=0.01, k=6)"))