def test_residuals(self): l = 100000 var0 = VAR(2) var0.coef = np.array([[0.2, 0.1, 0.4, -0.1], [0.3, -0.2, 0.1, 0]]) x = var0.simulate(l) var = VAR(2) var.fit(x) self.assertEqual(x.shape, var.residuals.shape) self.assertTrue(np.allclose(var.rescov, np.eye(var.rescov.shape[0]), 1e-2, 1e-2))
def test_fit(self): var0 = VAR(2) var0.coef = np.array([[0.2, 0.1, 0.4, -0.1], [0.3, -0.2, 0.1, 0]]) l = 100000 x = var0.simulate(l) y = x.copy() var = VAR(2) var.fit(x) # make sure the input remains unchanged self.assertTrue(np.all(x == y)) # that limit is rather generous, but we don't want tests to fail due to random variation self.assertTrue(np.all(np.abs(var0.coef - var.coef) < 0.02))
def test_fit_regularized(self): l = 100000 var0 = VAR(2) var0.coef = np.array([[0.2, 0.1, 0.4, -0.1], [0.3, -0.2, 0.1, 0]]) x = var0.simulate(l) y = x.copy() var = VAR(10, delta=1) var.fit(x) # make sure the input remains unchanged self.assertTrue(np.all(x == y)) b0 = np.zeros((2, 20)) b0[:, 0:2] = var0.coef[:, 0:2] b0[:, 10:12] = var0.coef[:, 2:4] # that limit is rather generous, but we don't want tests to fail due to random variation self.assertTrue(np.all(np.abs(b0 - var.coef) < 0.02))
def testFunctionality(self): """ generate VAR signals, and apply the api to them do this for every backend """ # original model coefficients b01 = np.zeros((3, 6)) b02 = np.zeros((3, 6)) b01[1:3, 2:6] = [[0.4, -0.2, 0.3, 0.0], [-0.7, 0.0, 0.9, 0.0]] b02[0:3, 2:6] = [[0.4, 0.0, 0.0, 0.0], [0.4, 0.0, 0.4, 0.0], [0.0, 0.0, 0.4, 0.0]] m0 = b01.shape[0] cl = np.array([0, 1, 0, 1, 0, 0, 1, 1, 1, 0]) l = 1000 t = len(cl) # generate VAR sources with non-gaussian innovation process, otherwise ICA won't work noisefunc = lambda: np.random.normal(size=(1, m0)) ** 3 var = VAR(2) var.coef = b01 sources1 = var.simulate([l, sum(cl==0)], noisefunc) var.coef = b02 sources2 = var.simulate([l, sum(cl==1)], noisefunc) var.fit(sources1) print(var.coef) var.fit(sources2) print(var.coef) sources = np.zeros((l,m0,t)) sources[:,:,cl==0] = sources1 sources[:,:,cl==1] = sources2 # simulate volume conduction... 3 sources measured with 7 channels mix = [[0.5, 1.0, 0.5, 0.2, 0.0, 0.0, 0.0], [0.0, 0.2, 0.5, 1.0, 0.5, 0.2, 0.0], [0.0, 0.0, 0.0, 0.2, 0.5, 1.0, 0.5]] data = datatools.dot_special(sources, mix) backend_modules = [import_module('scot.backend.' + b) for b in scot.backend.__all__] for bm in backend_modules: api = scot.Workspace({'model_order': 2}, reducedim=3, backend=bm.backend) api.set_data(data) api.do_ica() self.assertEqual(api.mixing_.shape, (3, 7)) self.assertEqual(api.unmixing_.shape, (7, 3)) api.do_mvarica() self.assertEqual(api.get_connectivity('S').shape, (3, 3, 512)) api.set_data(data) api.fit_var() self.assertEqual(api.get_connectivity('S').shape, (3, 3, 512)) self.assertEqual(api.get_tf_connectivity('S', 100, 50).shape, (3, 3, 512, 18)) api.set_data(data, cl) self.assertFalse(np.any(np.isnan(api.data_))) self.assertFalse(np.any(np.isinf(api.data_))) api.do_cspvarica() self.assertFalse(np.any(np.isnan(api.activations_))) self.assertFalse(np.any(np.isinf(api.activations_))) self.assertEqual(api.get_connectivity('S').shape, (3,3,512)) self.assertFalse(np.any(np.isnan(api.activations_))) self.assertFalse(np.any(np.isinf(api.activations_))) for c in np.unique(cl): api.set_used_labels([c]) api.fit_var() fc = api.get_connectivity('S') self.assertEqual(fc.shape, (3, 3, 512)) tfc = api.get_tf_connectivity('S', 100, 50) self.assertEqual(tfc.shape, (3, 3, 512, 18)) api.set_data(data) api.remove_sources([0, 2]) api.fit_var() self.assertEqual(api.get_connectivity('S').shape, (1, 1, 512)) self.assertEqual(api.get_tf_connectivity('S', 100, 50).shape, (1, 1, 512, 18))