def test_iris(self): cov = Covariance() cov.fit(self.iris_points) csep = class_separation(cov.transform(), self.iris_labels) # deterministic result self.assertAlmostEqual(csep, 0.73068122)
def test_iris(self): cov = Covariance() cov.fit(self.iris_points) csep = class_separation(cov.transform(), self.iris_labels) # deterministic result self.assertAlmostEqual(csep, 0.72981476)
def test_singular_returns_pseudo_inverse(self): """Checks that if the input covariance matrix is singular, we return the pseudo inverse""" X, y = load_iris(return_X_y=True) # We add a virtual column that is a linear combination of the other # columns so that the covariance matrix will be singular X = np.concatenate([X, X[:, :2].dot([[2], [3]])], axis=1) cov_matrix = np.cov(X, rowvar=False) covariance = Covariance() covariance.fit(X) pseudo_inverse = covariance.get_mahalanobis_matrix() # here is the definition of a pseudo inverse according to wikipedia: assert_allclose( cov_matrix.dot(pseudo_inverse).dot(cov_matrix), cov_matrix) assert_allclose( pseudo_inverse.dot(cov_matrix).dot(pseudo_inverse), pseudo_inverse)
def test_cov(self): cov = Covariance() cov.fit(self.X) res_1 = cov.transform(self.X) cov = Covariance() res_2 = cov.fit_transform(self.X) # deterministic result assert_array_almost_equal(res_1, res_2)
def get_dist_func( data: Array[np.float64], target: Array[np.float64] ) -> Callable[[Callable[[np.float64, np.float64], np.float64], int, int], np.float64]: """ Get function that returns distances between examples in learned space. Args: data : Array[np.float64] - training data_trans target : int - target variable values (classes of training examples) Returns: TODO """ # Get transformed data. data_trans: Array[np.float64] = Covariance().fit_transform( StandardScaler().fit_transform(data), target) # Computing distance: def dist_func_res(metric: Callable[[np.float64, np.float64], np.float64], i1: int, i2: int) -> np.float64: """ distance function that takes indices of examples in training set and returns distance in learned space using specified distance metric. Args: i1 : int - index of first training example i2 : int - index of second training example Returns: np.float64 - distance in learned metric space using specified metric between specified training examples. """ # Compute distance in learned metric space using specified metric. return metric(data_trans[i1, :], data_trans[i2, :]) return dist_func_res # Return distance function.
def test_cov(self): cov = Covariance() cov.fit(self.X) L = cov.components_ assert_array_almost_equal(L.T.dot(L), cov.get_mahalanobis_matrix())
def test_covariance(self): check_estimator(Covariance())
def train_covariance(X): model = Covariance() model.fit(X) return model.transform(X), model.metric()
#print(len(TestData)) #print(TrainData) #print(type(TrainData)) #print(TrainLabels) #print(type(TrainLabels)) if Method == 'LMNN': print("Method: LMNN", '\n') lmnn = LMNN(k=3, learn_rate=1e-6, verbose=False) x = lmnn.fit(FSTrainData, TrainLabels) TFSTestData = x.transform(FSTestData) print('Transformation Done', '\n') elif Method == 'COV': print("Method: COV", '\n') cov = Covariance().fit(FSTrainData) TFSTestData = cov.transform(FSTestData) print('Transformation Done', '\n') elif Method == 'ITML': print("Method: ITML", '\n') itml = ITML_Supervised(num_constraints=200, A0=None) x = itml.fit(FSTrainData, TrainLabels) TFSTestData = x.transform(FSTestData) print('Transformation Done', '\n') elif Method == 'LFDA': print("Method: LFDA", '\n') lfda = LFDA(k=4, dim=1) x = lfda.fit(FSTrainData, TrainLabels) TFSTestData = x.transform(FSTestData)
quadruplets_learners = [(LSML(), build_quadruplets)] ids_quadruplets_learners = list( map(lambda x: x.__class__.__name__, [learner for (learner, _) in quadruplets_learners])) pairs_learners = [ (ITML(), build_pairs), (MMC(max_iter=2), build_pairs), # max_iter=2 for faster (SDML(), build_pairs), ] ids_pairs_learners = list( map(lambda x: x.__class__.__name__, [learner for (learner, _) in pairs_learners])) classifiers = [(Covariance(), build_classification), (LFDA(), build_classification), (LMNN(), build_classification), (NCA(), build_classification), (RCA(), build_classification), (ITML_Supervised(max_iter=5), build_classification), (LSML_Supervised(), build_classification), (MMC_Supervised(max_iter=5), build_classification), (RCA_Supervised(num_chunks=10), build_classification), (SDML_Supervised(), build_classification)] ids_classifiers = list( map(lambda x: x.__class__.__name__, [learner for (learner, _) in classifiers])) regressors = [(MLKR(), build_regression)] ids_regressors = list( map(lambda x: x.__class__.__name__, [learner for (learner, _) in regressors]))
def test_cov(self): cov = Covariance() cov.fit(self.X) L = cov.transformer_ assert_array_almost_equal(L.T.dot(L), cov.metric())
def test_covariance(): iris = load_iris()['data'] cov = Covariance().fit(iris) x = cov.transform(iris) print x
def test_cov(self): cov = Covariance() cov.fit(self.X) L = cov.transformer_ assert_array_almost_equal(L.T.dot(L), cov.get_mahalanobis_matrix())
def test_cov(self): cov = Covariance() cov.fit(self.X) L = cov.transformer() assert_array_almost_equal(L.T.dot(L), cov.metric())