def test_mmc_supervised(self):
    seed = np.random.RandomState(1234)
    mmc = MMC_Supervised(num_constraints=200)
    mmc.fit(self.X, self.y, random_state=seed)
    res_1 = mmc.transform(self.X)

    seed = np.random.RandomState(1234)
    mmc = MMC_Supervised(num_constraints=200)
    res_2 = mmc.fit_transform(self.X, self.y, random_state=seed)

    assert_array_almost_equal(res_1, res_2)
Exemple #2
0
pairs_learners = [
    (ITML(), build_pairs),
    (MMC(max_iter=2), build_pairs),  # max_iter=2 for faster
    (SDML(), build_pairs),
]
ids_pairs_learners = list(
    map(lambda x: x.__class__.__name__,
        [learner for (learner, _) in pairs_learners]))

classifiers = [(Covariance(), build_classification),
               (LFDA(), build_classification), (LMNN(), build_classification),
               (NCA(), build_classification), (RCA(), build_classification),
               (ITML_Supervised(max_iter=5), build_classification),
               (LSML_Supervised(), build_classification),
               (MMC_Supervised(max_iter=5), build_classification),
               (RCA_Supervised(num_chunks=10), build_classification),
               (SDML_Supervised(), build_classification)]
ids_classifiers = list(
    map(lambda x: x.__class__.__name__,
        [learner for (learner, _) in classifiers]))

regressors = [(MLKR(), build_regression)]
ids_regressors = list(
    map(lambda x: x.__class__.__name__,
        [learner for (learner, _) in regressors]))

WeaklySupervisedClasses = (_PairsClassifierMixin, _QuadrupletsClassifierMixin)

tuples_learners = pairs_learners + quadruplets_learners
ids_tuples_learners = ids_pairs_learners + ids_quadruplets_learners
 def test_mmc(self):
     check_estimator(MMC_Supervised())
# In[16]:


from metric_learn import MMC_Supervised
from sklearn.decomposition import PCA

#Mahalanobis - learnt - reduced set


pca = PCA(n_components=150)
X_train_pca = pca.fit_transform(X_train)
X_query_pca = pca.transform(X_query)
X_gallery_pca = pca.transform(X_gallery)

mmc = MMC_Supervised(max_iter=50)
mmc.fit(X_train_pca[0:150], y_train[0:150])



M = mmc.metric()

print ('Metric learnt')


rank_accuracies, mAP = evaluate_metric(X_query_pca, camId_query, y_query,
                                       X_gallery_pca, camId_gallery, y_gallery,
                                       metric ='mahalanobis',
                                       parameters = M)

rank_accuracies_l_2.append(rank_accuracies)
# bar.finish()
# end_time = time.time()
# print("Accuracy for Simple Nearest Neighbour @rank 1 : ", "{:.4%}".format(rank_one_score / len(query_labels)))
# print("Accuracy for Simple Nearest Neighbour @rank 5 : ", "{:.4%}".format(rank_five_score / len(query_labels)))
# print("Accuracy for Simple Nearest Neighbour @rank 10 : ", "{:.4%}".format(rank_ten_score / len(query_labels)))
#
# print("Computation Time: %s seconds" % (end_time - start_time))

# PCA-MMC
print("-----PCA_MMC-----")
pca = PCA(original_train_features,
          original_train_labels,
          M=500,
          low_dimension=False)
pca.fit()
mmc = MMC_Supervised(max_iter=20, convergence_threshold=1e-5)
mmc_metric = mmc.fit(pca.train_sample_projection, original_train_labels)
transformed_features = mmc_metric.transform(features)
transformed_query_features = transformed_features[query_idxs - 1]

n = 10
start_time = time.time()
rank_one_score = 0
rank_five_score = 0
rank_ten_score = 0
bar.start()
for k in range(len(query_features)):
    bar.update(k + 1)
    feature_vector = transformed_query_features[k]
    gallery_vectors = transformed_features[gallery_data_idx[k] - 1]
    gallery_labels = labels[gallery_data_idx[k] - 1]
                       convergence_threshold=1e-5,
                       num_constraints=500,
                       verbose=True)
start_time = time.time()
itml.fit(pca.train_sample_projection, original_train_labels)
end_time = time.time()
print("Learning time: %s" % (end_time - start_time))
transformed_query_features = itml.transform(pca_query_features)
transformed_gallery_features = itml.transform(pca_gallery_features)
compute_k_mean(num_of_clusters, transformed_query_features,
               transformed_gallery_features, gallery_labels)

# Compute PCA_MMC (Mahalanobis Metric Learning for Clustering)
print("\n-----PCA_MMC-----")
mmc = MMC_Supervised(max_iter=20,
                     convergence_threshold=1e-5,
                     num_constraints=500,
                     verbose=True)
start_time = time.time()
mmc.fit(pca.train_sample_projection, original_train_labels)
end_time = time.time()
print("Learning time: %s" % (end_time - start_time))
transformed_query_features = mmc.transform(pca_query_features)
transformed_gallery_features = mmc.transform(pca_gallery_features)
compute_k_mean(num_of_clusters, transformed_query_features,
               transformed_gallery_features, gallery_labels)

print("\n-----MMC diagonal-----")
mmc = MMC_Supervised(max_iter=20,
                     convergence_threshold=1e-5,
                     num_constraints=500,
                     diagonal=True,
Exemple #7
0
 def test_mmc_supervised(self):
   mmc = MMC_Supervised(num_constraints=200)
   mmc.fit(self.X, self.y)
   L = mmc.transformer()
   assert_array_almost_equal(L.T.dot(L), mmc.metric())