def testSorting(self): """components should be sorted by decreasing variance """ x = generate_covsig(np.diag([1, 9, 2, 6, 3, 8, 4, 5, 7]), 500) w, v = pca(x, sort_components=True) c = np.cov(np.dot(w.T, x)) self.assertTrue(np.allclose(c, np.diag([9, 8, 7, 6, 5, 4, 3, 2, 1]), rtol=1e-1, atol=1e-2)) w, v = pca(x, sort_components=True) c = np.cov(np.dot(w.T, x)) self.assertTrue(np.allclose(c, np.diag([9, 8, 7, 6, 5, 4, 3, 2, 1]), rtol=1e-1, atol=1e-2))
def testSorting(self): """components should be sorted by decreasing variance """ x = generate_covsig(np.diag([1, 9, 2, 6, 3, 8, 4, 5, 7]), 500) w, v = pca(x, reducedim=5) c = np.cov(x.dot(w).T) self.assertTrue(np.allclose(c, np.diag([9, 8, 7, 6, 5]), rtol=1e-1, atol=1e-2))
def testDecorrelation(self): """components should be decorrelated after PCA """ x = generate_covsig([[3, 2, 1], [2, 3, 2], [1, 2, 3]], 500) w, v = pca(x) c = np.cov(np.dot(w.T, x)) c -= np.diag(c.diagonal()) self.assertTrue(np.allclose(c, np.zeros((3, 3)), rtol=1e-2, atol=1e-3))
def testInverse(self): i = np.abs(self.v.dot(self.w)) self.assertTrue(np.abs(np.mean(i.diagonal())) - 1 < epsilon) self.assertTrue(np.abs(np.sum(i) - i.trace()) < epsilon) w, v = pca(self.x, subtract_mean=True, normalize=True) i = np.abs(v.dot(w)) self.assertTrue(np.abs(np.mean(i.diagonal())) - 1 < epsilon) self.assertTrue(np.abs(np.sum(i) - i.trace()) < epsilon)
def testIdentity(self): """identity covariance in -> identity covariance out test for up to 50 dimensions """ for i in range(1, 50): x = generate_covsig(np.eye(i), 500) w, v = pca(x) c = np.cov(np.dot(w.T, x)) self.assertTrue(np.allclose(c, np.eye(i)))
def setUp(self): self.x = np.random.rand(10, 100) self.y = self.x.copy() self.m, self.n = self.x.shape self.w1, self.v1 = pca(self.x, reducedim=0.9) self.w2, self.v2 = pca(self.x, reducedim=5)
def testInputSafety(self): self.assertTrue((self.x == self.y).all()) pca(self.x, subtract_mean=True, normalize=True) self.assertTrue((self.x == self.y).all())
def setUp(self): self.x = np.random.rand(10, 100) self.y = self.x.copy() self.m, self.n = self.x.shape self.w, self.v = pca(self.x)
import numpy as np from scot.pca import pca from scot.var import VAR # Set random seed for repeatable results np.random.seed(42) # Generate data from a VAR(1) process model0 = VAR(1) model0.coef = np.array([[0.3, -0.6], [0, -0.9]]) x = model0.simulate(10000).squeeze() # Transform data with PCA w, v = pca(x) y = np.dot(w.T, x) # Verify that transformed data y is decorrelated print("Covariance of x:\n", np.cov(x.squeeze())) print("\nCovariance of y:\n", np.cov(y.squeeze())) model1, model2 = VAR(1), VAR(1) # Fit model1 to the original data model1.fit(x) # Fit model2 to the PCA transformed data model2.fit(y) # The coefficients estimated on x (2) are exactly equal to the back-transformed
import numpy as np from scot.pca import pca from scot.var import VAR # Set random seed for repeatable results np.random.seed(42) # Generate data from a VAR(1) process model0 = VAR(1) model0.coef = np.array([[0.3, -0.6], [0, -0.9]]) x = model0.simulate(10000).squeeze() # Transform data with PCA w, v = pca(x) y = np.dot(w.T, x) # Verify that transformed data y is decorrelated print('Covariance of x:\n', np.cov(x.squeeze())) print('\nCovariance of y:\n', np.cov(y.squeeze())) model1, model2 = VAR(1), VAR(1) # Fit model1 to the original data model1.fit(x) # Fit model2 to the PCA transformed data model2.fit(y) # The coefficients estimated on x (2) are exactly equal to the back-transformed