class LMNNClassifier(BaseEstimator, ClassifierMixin): def __init__(self, k=3, pca=None, train=True, mu=0.5): self.k = k self.train = train self.pca = pca self.pca_trasform = None self.mu = mu self.lmnn = LMNN(k=k, use_pca=False, max_iter=10000, regularization=mu) def fit(self, x, y=None): n, d = x.shape if self.pca is not None: pca = PCA(n_components=self.pca) pca.fit(x) self.pca_trasform = pca.transform x = pca.transform(x) if self.train: self.lmnn.fit(x, y) self.knn = KNeighborsClassifier( n_neighbors=self.k, metric='mahalanobis', metric_params=dict(VI=self.lmnn.metric()), n_jobs=-1) else: self.knn = KNeighborsClassifier(n_neighbors=self.k) self.knn.fit(x, y) return self def predict(self, x, y=None): if self.pca_trasform is not None: x = self.pca_trasform(x) return self.knn.predict(x, y) def score(self, x, y=None): if self.pca_trasform is not None: x = self.pca_trasform(x) return self.knn.score(x, y) def set_params(self, **parameters): for parameter, value in parameters.items(): setattr(self, parameter, value) if parameter == 'mu': setattr(self.lmnn, 'regularization', value) return self
def test_lmnn(self): lmnn = LMNN(k=5, learn_rate=1e-6, verbose=False) lmnn.fit(self.X, self.y) L = lmnn.transformer_ assert_array_almost_equal(L.T.dot(L), lmnn.metric())
rank_accuracies_l_2.append(rank_accuracies) mAP_l_2.append(mAP) metric_l_2.append('Learnt Mahalanobis (Red. Set)') # In[24]: from metric_learn import LMNN lmnn = LMNN(k=3, learn_rate=1e-6, max_iter=50) lmnn.fit(X_train_pca, y_train) M = lmnn.metric() print ('Metric learnt') rank_accuracies, mAP = evaluate_metric(X_query_pca, camId_query, y_query, X_gallery_pca, camId_gallery, y_gallery, metric ='mahalanobis', parameters = M) rank_accuracies_l_2.append(rank_accuracies) mAP_l_2.append(mAP) metric_l_2.append('Learnt LMNN')
def test_lmnn(self): lmnn = LMNN(k=5, learn_rate=1e-6, verbose=False) lmnn.fit(self.X, self.y) L = lmnn.transformer() assert_array_almost_equal(L.T.dot(L), lmnn.metric())