def runITML(X_train, X_test, y_train, y_test): transformer = ITML_Supervised(num_constraints=200, verbose=True) transformer.fit(X_train, y_train) X_train_proj = transformer.transform(X_train) X_test_proj = transformer.transform(X_test) np.save('X_train_ITML', X_train_proj) np.save('X_test_ITML', X_test_proj) return X_train_proj, X_test_proj
def test_iris(self): itml = ITML_Supervised(num_constraints=200) itml.fit(self.iris_points, self.iris_labels) csep = class_separation(itml.transform(self.iris_points), self.iris_labels) self.assertLess(csep, 0.2)
def test_iris(self): num_constraints = 200 itml = ITML_Supervised(num_constraints=num_constraints).fit(self.iris_points, self.iris_labels) csep = class_separation(itml.transform(), self.iris_labels) self.assertLess(csep, 0.4) # it's not great
def test_iris(self): num_constraints = 200 itml = ITML_Supervised(num_constraints=num_constraints).fit( self.iris_points, self.iris_labels) csep = class_separation(itml.transform(), self.iris_labels) self.assertLess(csep, 0.4) # it's not great
def test_itml_supervised(self): seed = np.random.RandomState(1234) itml = ITML_Supervised(n_constraints=200, random_state=seed) itml.fit(self.X, self.y) res_1 = itml.transform(self.X) seed = np.random.RandomState(1234) itml = ITML_Supervised(n_constraints=200, random_state=seed) res_2 = itml.fit_transform(self.X, self.y) assert_array_almost_equal(res_1, res_2)
def test_itml_supervised(self): seed = np.random.RandomState(1234) itml = ITML_Supervised(num_constraints=200) itml.fit(self.X, self.y, random_state=seed) res_1 = itml.transform(self.X) seed = np.random.RandomState(1234) itml = ITML_Supervised(num_constraints=200) res_2 = itml.fit_transform(self.X, self.y, random_state=seed) assert_array_almost_equal(res_1, res_2)
class ITML: def __init__(self, num_constraints=200): self.space_model = PCA() self.metric_model = ITML_Supervised(num_constraints) def fit(self, feats, labels): """Fits the model to the prescribed data.""" pdb.set_trace() self.eigenvecs, self.space = self.space_model.fit(feats, labels) pdb.set_trace() self.metric_model.fit(self.space.T, labels) def transform(self, y): """Transforms the test data according to the model""" test_proj, _ = self.space_model.transform(y) pdb.set_trace() return self.metric_model.transform(y)
class ITML: def __init__(self, num_constraints=200): self.metric_model = ITML_Supervised(num_constraints) def fit(self, features, labels): """Fits the model to the prescribed data.""" return self.metric_model.fit(features, labels) def transform(self, y): """Transforms the test data according to the model""" return self.metric_model.transform(y) def predict_proba(self, X_te): """Predicts the probabilities of each of the test samples""" test_samples = X_te.shape[0] self.X_tr = self.transform(self.X_tr) clf = NearestCentroid() clf.fit(self.X_tr, self.y_train) centroids = clf.centroids_ probabilities = np.zeros((test_samples, centroids.shape[0])) for sample in xrange(test_samples): probabilities[sample] = sk_nearest_neighbour_proba( centroids, X_te[sample, :]) return probabilities
print("done in %0.3fs" % (time() - t0)) eigenfaces = pca.components_.reshape((n_components, h, w)) print("Projecting the input data on the eigenfaces orthonormal basis") t0 = time() X_train_pca = pca.transform(X_train) X_test_pca = pca.transform(X_test) print("done in %0.3fs" % (time() - t0)) # Try LMNN here. print("Trying ITML") param_grid = {''} itml = ITML(num_constraints=200) X_tr = itml.fit(X_train_pca, y_train).transform(X_train_pca) X_te = itml.transform(X_test_pca) acc, y_pred = classifier.sk_nearest_neighbour(X_tr, y_train, X_te, y_test) print("accuracy = %s",acc) print(classification_report(y_test, y_pred, target_names=target_names)) print(confusion_matrix(y_test, y_pred, labels=range(n_classes))) ############################################################################### # Train a SVM classification model print("Fitting the classifier to the training set") t0 = time() param_grid = {'C': [1e3, 5e3, 1e4, 5e4, 1e5], 'gamma': [0.0001, 0.0005, 0.001, 0.005, 0.01, 0.1], } clf = GridSearchCV(SVC(kernel='rbf'), param_grid)
nca.fit(pca.train_sample_projection, original_train_labels) end_time = time.time() print("Learning time: %s" % (end_time - start_time)) transformed_query_features = nca.transform(pca_query_features) transformed_gallery_features = nca.transform(pca_gallery_features) compute_k_mean(num_of_clusters, transformed_query_features, transformed_gallery_features, gallery_labels) # Compute ITML (Information Theoretic Metric Learning) print("\n-----ITML-----") itml = ITML_Supervised(max_iter=20, convergence_threshold=1e-5, num_constraints=500, verbose=True) itml.fit(original_train_features, original_train_labels) transformed_query_features = itml.transform(query_features) transformed_gallery_features = itml.transform(gallery_features) compute_k_mean(num_of_clusters, transformed_query_features, transformed_gallery_features, gallery_labels) # Compute PCA_ITML print("\n-----PCA_ITML-----") itml = ITML_Supervised(max_iter=20, convergence_threshold=1e-5, num_constraints=500, verbose=True) start_time = time.time() itml.fit(pca.train_sample_projection, original_train_labels) end_time = time.time() print("Learning time: %s" % (end_time - start_time)) transformed_query_features = itml.transform(pca_query_features)
def main(): print("importing data...") data = loadmat('assets/cuhk03_new_protocol_config_labeled.mat') with open('assets/feature_data.json') as f: features = ujson.load(f) print("data imported") features = np.array(features) train_idxs = data['train_idx'].flatten() - 1 query_idxs = data['query_idx'].flatten() - 1 camId = data['camId'].flatten() gallery_idxs = data['gallery_idx'].flatten() - 1 labels = data['labels'].flatten() N, m = features[train_idxs].shape # features[train_idxs][:1000, ].shape eigvals, eigvecs = calc_eig_pca_small(features[train_idxs].T, m, N) m = 50 m_eigvecs = eigvecs[:, :m] avg_face = compute_avg_face(features[train_idxs].T) phi = features - avg_face m_features = np.dot(phi, m_eigvecs) itml = ITML_Supervised(verbose=True, num_constraints=5000, gamma=0.1) X = m_features[train_idxs] Y = labels[train_idxs] X_itml = itml.fit_transform(X, Y) M = itml.metric() plot_3d(X_itml, Y) nn_idx_mat = evaluation(knn, features=m_features, gallery_idxs=gallery_idxs, query_idxs=query_idxs, camId=camId, labels=labels, metric='mahalanobis', metric_params={'VI': M}) acc = get_all_rank_acc(nn_idx_mat, query_idxs, labels) print("Accuracy:") print(acc) test_set_idxs = np.append(gallery_idxs, query_idxs) features_ITML = itml.transform(m_features) X_test = features_ITML[test_set_idxs] Y_test = labels[test_set_idxs] n_cluster = np.unique(Y_test).size nmi_kmean, acc_kmean = evaluation_k_means(X_test, n_cluster, Y_test) print("ITML k-means accuracy (test set):") print(acc_kmean) gamma = [i / 10 for i in range(1, 11)] X_itmls = [] all_rank_acc_g = [] for g in gamma: itml = ITML_Supervised(verbose=True, num_constraints=5000, gamma=0.2) X = m_features[train_idxs] X_itml = itml.fit_transform(X, Y) X_itmls.append(X_itml) M = itml.metric() nn_idx_mat = evaluation(knn, features=m_features, gallery_idxs=gallery_idxs, query_idxs=query_idxs, camId=camId, labels=labels, metric='mahalanobis', metric_params={'VI': M}) acc_g = get_all_rank_acc(nn_idx_mat, query_idxs, labels) all_rank_acc_g.append(acc_g) plt.plot(gamma, all_rank_acc_g) plt.legend(('Rank 1', 'Rank 5', 'Rank10')) plt.ylabel('Accuracy') plt.xlabel('gamma') print(all_rank_acc_g) plt.show()