Beispiel #1
0
    def testFit(self):
        """ Test submodel fitting on instationary data
        """
        np.random.seed(42)

        # original model coefficients
        b01 = np.array([[0.0, 0], [0, 0]])
        b02 = np.array([[0.5, 0.3], [0.3, 0.5]])
        b03 = np.array([[0.1, 0.1], [0.1, 0.1]])
        t, m, l = 10, 2, 100

        noisefunc = lambda: np.random.normal(size=(1, m))**3 / 1e3

        var = VAR(1)
        var.coef = b01
        sources1 = var.simulate([l, t], noisefunc)
        var.coef = b02
        sources2 = var.simulate([l, t], noisefunc)
        var.coef = b03
        sources3 = var.simulate([l, t * 2], noisefunc)

        sources = np.vstack([sources1, sources2, sources3])
        cl = [1] * t + [2] * t + [1, 2] * t

        var = VAR(1)
        r_trial = varica.cspvarica(sources,
                                   var,
                                   cl,
                                   reducedim=None,
                                   varfit='trial')
        r_class = varica.cspvarica(sources,
                                   var,
                                   cl,
                                   reducedim=None,
                                   varfit='class')
        r_ensemble = varica.cspvarica(sources,
                                      var,
                                      cl,
                                      reducedim=None,
                                      varfit='ensemble')

        vars = [
            np.var(r.var_residuals) for r in [r_trial, r_class, r_ensemble]
        ]

        # class one consists of trials generated with b01 and b03
        # class two consists of trials generated with b02 and b03
        #
        # ensemble fitting cannot resolve any model -> highest residual variance
        # class fitting cannot only resolve (b01+b03) vs (b02+b03) -> medium residual variance
        # trial fitting can resolve all three models -> lowest residual variance
        print(vars)

        self.assertLess(vars[0], vars[1])
        self.assertLess(vars[1], vars[2])
Beispiel #2
0
    def testModelIdentification(self):
        """ generate VAR signals, mix them, and see if MVARICA can reconstruct the signals
            do this for every backend """

        # original model coefficients
        b0 = np.zeros((3, 6))
        b0[1:3, 2:6] = [[0.4, -0.2, 0.3, 0.0], [-0.7, 0.0, 0.9, 0.0]]
        m0 = b0.shape[0]
        l, t = 1000, 100

        # generate VAR sources with non-gaussian innovation process, otherwise ICA won't work
        noisefunc = lambda: np.random.normal(size=(1, m0))**3

        var = VAR(2)
        var.coef = b0
        sources = var.simulate([l, t], noisefunc)

        # simulate volume conduction... 3 sources measured with 7 channels
        mix = [[0.5, 1.0, 0.5, 0.2, 0.0, 0.0, 0.0],
               [0.0, 0.2, 0.5, 1.0, 0.5, 0.2, 0.0],
               [0.0, 0.0, 0.0, 0.2, 0.5, 1.0, 0.5]]
        data = datatools.dot_special(sources, mix)

        backend_modules = [import_module('scot.' + b) for b in scot.backends]

        for bm in backend_modules:

            # apply MVARICA
            #  - default setting of 0.99 variance should reduce to 3 channels with this data
            #  - automatically determine delta (enough data, so it should most likely be 0)
            result = varica.mvarica(data,
                                    var,
                                    optimize_var=True,
                                    backend=bm.backend)

            # ICA does not define the ordering and sign of components
            # so wee need to test all combinations to find if one of them fits the original coefficients
            permutations = np.array([[0, 1, 2, 3, 4, 5], [0, 1, 4, 5, 2, 3],
                                     [2, 3, 4, 5, 0, 1], [2, 3, 0, 1, 4, 5],
                                     [4, 5, 0, 1, 2, 3], [4, 5, 2, 3, 0, 1]])
            signperms = np.array([[1, 1, 1, 1, 1, 1], [1, 1, 1, 1, -1, -1],
                                  [1, 1, -1, -1, 1, 1], [1, 1, -1, -1, -1, -1],
                                  [-1, -1, 1, 1, 1, 1], [-1, -1, 1, 1, -1, -1],
                                  [-1, -1, -1, -1, 1, 1],
                                  [-1, -1, -1, -1, -1, -1]])

            best, d = np.inf, None

            for perm in permutations:
                b = result.b.coef[perm[::2] // 2, :]
                b = b[:, perm]
                for sgn in signperms:
                    c = b * np.repeat([sgn], 3, 0) * np.repeat([sgn[::2]], 6,
                                                               0).T
                    err = np.sum((c - b0)**2)
                    if err < best:
                        best = err
                        d = c

            self.assertTrue(np.all(abs(d - b0) < 0.05))
Beispiel #3
0
    def testModelIdentification(self):
        """ generate VAR signals, mix them, and see if MVARICA can reconstruct the signals
            do this for every backend """

        # original model coefficients
        b0 = np.zeros((3, 6))
        b0[1:3, 2:6] = [[0.4, -0.2, 0.3, 0.0],
                        [-0.7, 0.0, 0.9, 0.0]]
        m0 = b0.shape[0]
        l, t = 1000, 100

        # generate VAR sources with non-gaussian innovation process, otherwise ICA won't work
        noisefunc = lambda: np.random.normal(size=(1, m0)) ** 3

        var = VAR(2)
        var.coef = b0
        sources = var.simulate([l, t], noisefunc)

        # simulate volume conduction... 3 sources measured with 7 channels
        mix = [[0.5, 1.0, 0.5, 0.2, 0.0, 0.0, 0.0],
               [0.0, 0.2, 0.5, 1.0, 0.5, 0.2, 0.0],
               [0.0, 0.0, 0.0, 0.2, 0.5, 1.0, 0.5]]
        data = datatools.dot_special(sources, mix)

        backend_modules = [import_module('scot.' + b) for b in scot.backends]

        for bm in backend_modules:

            api = scot.Workspace({'model_order': 2}, backend=bm.backend)

            api.set_data(data)

            # apply MVARICA
            #  - default setting of 0.99 variance should reduce to 3 channels with this data
            #  - automatically determine delta (enough data, so it should most likely be 0)
            api.do_mvarica()
            #result = varica.mvarica(data, 2, delta='auto', backend=bm.backend)

            # ICA does not define the ordering and sign of components
            # so wee need to test all combinations to find if one of them fits the original coefficients
            permutations = np.array(
                [[0, 1, 2, 3, 4, 5], [0, 1, 4, 5, 2, 3], [2, 3, 4, 5, 0, 1], [2, 3, 0, 1, 4, 5], [4, 5, 0, 1, 2, 3],
                 [4, 5, 2, 3, 0, 1]])
            signperms = np.array(
                [[1, 1, 1, 1, 1, 1], [1, 1, 1, 1, -1, -1], [1, 1, -1, -1, 1, 1], [1, 1, -1, -1, -1, -1],
                 [-1, -1, 1, 1, 1, 1], [-1, -1, 1, 1, -1, -1], [-1, -1, -1, -1, 1, 1], [-1, -1, -1, -1, -1, -1]])

            best, d = np.inf, None

            for perm in permutations:
                b = api.var_.coef[perm[::2] // 2, :]
                b = b[:, perm]
                for sgn in signperms:
                    c = b * np.repeat([sgn], 3, 0) * np.repeat([sgn[::2]], 6, 0).T
                    err = np.sum((c - b0) ** 2)
                    if err < best:
                        best = err
                        d = c

            self.assertTrue(np.all(abs(d - b0) < 0.05))
Beispiel #4
0
    def test_optimize(self):
        np.random.seed(745)
        var0 = VAR(2)
        var0.coef = np.array([[0.2, 0.1, 0.4, -0.1], [0.3, -0.2, 0.1, 0]])
        l = (100, 10)
        x = var0.simulate(l)

        var = VAR(-1)
        for n_jobs in [None, -1, 1, 2]:
            var.optimize_order(x, verbose=True, n_jobs=n_jobs)
            self.assertEqual(var.p, 2)
Beispiel #5
0
    def test_optimize(self):
        np.random.seed(745)
        var0 = VAR(2)
        var0.coef = np.array([[0.2, 0.1, 0.4, -0.1], [0.3, -0.2, 0.1, 0]])
        l = (100, 10)
        x = var0.simulate(l)

        var = VAR(-1)
        for n_jobs in [None, -1, 1, 2]:
            var.optimize_order(x, verbose=True, n_jobs=n_jobs)
            self.assertEqual(var.p, 2)
Beispiel #6
0
    def testFit(self):
        """ Test submodel fitting on instationary data
        """
        np.random.seed(42)

        # original model coefficients
        b01 = np.array([[0.0, 0], [0, 0]])
        b02 = np.array([[0.5, 0.3], [0.3, 0.5]])
        b03 = np.array([[0.1, 0.1], [0.1, 0.1]])
        t, m, l = 10, 2, 100

        noisefunc = lambda: np.random.normal(size=(1, m)) ** 3 / 1e3

        var = VAR(1)
        var.coef = b01
        sources1 = var.simulate([l, t], noisefunc)
        var.coef = b02
        sources2 = var.simulate([l, t], noisefunc)
        var.coef = b03
        sources3 = var.simulate([l, t * 2], noisefunc)

        sources = np.vstack([sources1, sources2, sources3])
        cl = [1] * t + [2] * t + [1, 2] * t

        var = VAR(1)
        r_trial = varica.cspvarica(sources, var, cl, reducedim=None, varfit='trial')
        r_class = varica.cspvarica(sources, var, cl, reducedim=None, varfit='class')
        r_ensemble = varica.cspvarica(sources, var, cl, reducedim=None, varfit='ensemble')

        vars = [np.var(r.var_residuals) for r in [r_trial, r_class, r_ensemble]]

        # class one consists of trials generated with b01 and b03
        # class two consists of trials generated with b02 and b03
        #
        # ensemble fitting cannot resolve any model -> highest residual variance
        # class fitting cannot only resolve (b01+b03) vs (b02+b03) -> medium residual variance
        # trial fitting can resolve all three models -> lowest residual variance
        print(vars)

        self.assertLess(vars[0], vars[1])
        self.assertLess(vars[1], vars[2])
Beispiel #7
0
    def test_residuals(self):
        l = 100000
        var0 = VAR(2)
        var0.coef = np.array([[0.2, 0.1, 0.4, -0.1], [0.3, -0.2, 0.1, 0]])
        x = var0.simulate(l)

        var = VAR(2)
        var.fit(x)

        self.assertEqual(x.shape, var.residuals.shape)

        self.assertTrue(np.allclose(var.rescov, np.eye(var.rescov.shape[0]), 1e-2, 1e-2))
Beispiel #8
0
    def test_bisection_overdetermined(self):
        np.random.seed(42)
        var0 = VAR(2)
        var0.coef = np.array([[0.2, 0.1, 0.4, -0.1], [0.3, -0.2, 0.1, 0]])
        l = (100, 10)
        x = var0.simulate(l)

        var = VAR(2)
        var.optimize_delta_bisection(x)

        # nice data, so the regularization should not be too strong.
        self.assertLess(var.delta, 10)
Beispiel #9
0
    def testModelIdentification(self):
        """ generate VAR signals, mix them, and see if MVARICA can reconstruct the signals
            do this for every backend """

        # original model coefficients
        b0 = np.zeros((3, 6))
        b0[1:3, 2:6] = [[0.4, -0.2, 0.3, 0.0],
                        [-0.7, 0.0, 0.9, 0.0]]
        m0 = b0.shape[0]
        l, t = 1000, 100

        # generate VAR sources with non-gaussian innovation process, otherwise ICA won't work
        noisefunc = lambda: np.random.normal(size=(1, m0)) ** 3 / 1e3

        var = VAR(2)
        var.coef = b0
        sources = var.simulate([l, t], noisefunc)

        # simulate volume conduction... 3 sources measured with 7 channels
        mix = [[0.5, 1.0, 0.5, 0.2, 0.0, 0.0, 0.0],
               [0.0, 0.2, 0.5, 1.0, 0.5, 0.2, 0.0],
               [0.0, 0.0, 0.0, 0.2, 0.5, 1.0, 0.5]]
        data = datatools.dot_special(np.transpose(mix), sources)

        for backend_name, backend_gen in scot.backend.items():

            # apply MVARICA
            #  - default setting of 0.99 variance should reduce to 3 channels with this data
            #  - automatically determine delta (enough data, so it should most likely be 0)
            result = varica.mvarica(data, var, optimize_var=True, backend=backend_gen())

            # ICA does not define the ordering and sign of components
            # so wee need to test all combinations to find if one of them fits the original coefficients
            permutations = np.array(
                [[0, 1, 2, 3, 4, 5], [0, 1, 4, 5, 2, 3], [2, 3, 4, 5, 0, 1], [2, 3, 0, 1, 4, 5], [4, 5, 0, 1, 2, 3],
                 [4, 5, 2, 3, 0, 1]])
            signperms = np.array(
                [[1, 1, 1, 1, 1, 1], [1, 1, 1, 1, -1, -1], [1, 1, -1, -1, 1, 1], [1, 1, -1, -1, -1, -1],
                 [-1, -1, 1, 1, 1, 1], [-1, -1, 1, 1, -1, -1], [-1, -1, -1, -1, 1, 1], [-1, -1, -1, -1, -1, -1]])

            best, d = np.inf, None

            for perm in permutations:
                b = result.b.coef[perm[::2] // 2, :]
                b = b[:, perm]
                for sgn in signperms:
                    c = b * np.repeat([sgn], 3, 0) * np.repeat([sgn[::2]], 6, 0).T
                    err = np.sum((c - b0) ** 2)
                    if err < best:
                        best = err
                        d = c

            assert_allclose(d, b0, rtol=1e-2, atol=1e-2)
Beispiel #10
0
    def test_bisection_overdetermined(self):
        np.random.seed(42)
        var0 = VAR(2)
        var0.coef = np.array([[0.2, 0.1, 0.4, -0.1], [0.3, -0.2, 0.1, 0]])
        l = (100, 10)
        x = var0.simulate(l)

        var = VAR(2)
        var.optimize_delta_bisection(x)

        # nice data, so the regularization should not be too strong.
        self.assertLess(var.delta, 10)
Beispiel #11
0
    def test_bisection_underdetermined(self):
        n_trials, n_samples = 10, 10
        np.random.seed(42)
        var0 = VAR(2)
        var0.coef = np.array([[0.2, 0.1, 0.4, -0.1], [0.3, -0.2, 0.1, 0]])
        x = var0.simulate((n_samples, n_trials))
        x = np.concatenate([x, np.random.randn(n_trials, 8, n_samples)], axis=1)

        var = VAR(7)
        var.optimize_delta_bisection(x)

        # nice data, so the regularization should not be too weak.
        self.assertGreater(var.delta, 10)
Beispiel #12
0
    def test_residuals(self):
        l = 100000
        var0 = VAR(2)
        var0.coef = np.array([[0.2, 0.1, 0.4, -0.1], [0.3, -0.2, 0.1, 0]])
        x = var0.simulate(l)

        var = VAR(2)
        var.fit(x)

        self.assertEqual(x.shape, var.residuals.shape)

        self.assertTrue(
            np.allclose(var.rescov, np.eye(var.rescov.shape[0]), 1e-2, 1e-2))
Beispiel #13
0
    def testModelIdentification(self):
        """ generate independent signals, mix them, and see if ICA can reconstruct the mixing matrix
            do this for every backend """

        # original model coefficients
        b0 = np.zeros((3, 3))  # no connectivity
        m0 = b0.shape[0]
        l, t = 100, 100

        # generate VAR sources with non-gaussian innovation process, otherwise ICA won't work
        noisefunc = lambda: np.random.normal(size=(1, m0))**3

        var = VAR(1)
        var.coef = b0
        sources = var.simulate([l, t], noisefunc)

        # simulate volume conduction... 3 sources measured with 7 channels
        mix = [[0.5, 1.0, 0.5, 0.2, 0.0, 0.0, 0.0],
               [0.0, 0.2, 0.5, 1.0, 0.5, 0.2, 0.0],
               [0.0, 0.0, 0.0, 0.2, 0.5, 1.0, 0.5]]
        data = datatools.dot_special(sources, mix)

        backend_modules = [import_module('scot.' + b) for b in scot.backends]

        for bm in backend_modules:

            result = plainica.plainica(data, backend=bm.backend)

            i = result.mixing.dot(result.unmixing)
            self.assertTrue(
                np.allclose(i, np.eye(i.shape[0]), rtol=1e-6, atol=1e-7))

            permutations = [[0, 1, 2], [0, 2, 1], [1, 0, 2], [1, 2, 0],
                            [2, 0, 1], [2, 1, 0]]

            bestdiff = np.inf
            bestmix = None

            absmix = np.abs(result.mixing)
            absmix /= np.max(absmix)

            for p in permutations:
                estmix = absmix[p, :]
                diff = np.sum((np.abs(estmix) - np.abs(mix))**2)

                if diff < bestdiff:
                    bestdiff = diff
                    bestmix = estmix

            self.assertTrue(np.allclose(bestmix, mix, rtol=1e-1, atol=1e-1))
Beispiel #14
0
    def test_bisection_underdetermined(self):
        n_trials, n_samples = 10, 10
        np.random.seed(42)
        var0 = VAR(2)
        var0.coef = np.array([[0.2, 0.1, 0.4, -0.1], [0.3, -0.2, 0.1, 0]])
        x = var0.simulate((n_samples, n_trials))
        x = np.concatenate([x, np.random.randn(n_trials, 8, n_samples)],
                           axis=1)

        var = VAR(7)
        var.optimize_delta_bisection(x)

        # nice data, so the regularization should not be too weak.
        self.assertGreater(var.delta, 10)
Beispiel #15
0
    def test_fit(self):
        var0 = VAR(2)
        var0.coef = np.array([[0.2, 0.1, 0.4, -0.1], [0.3, -0.2, 0.1, 0]])
        l = 100000
        x = var0.simulate(l)
        y = x.copy()

        var = VAR(2)
        var.fit(x)

        # make sure the input remains unchanged
        self.assertTrue(np.all(x == y))

        # that limit is rather generous, but we don't want tests to fail due to random variation
        self.assertTrue(np.all(np.abs(var0.coef - var.coef) < 0.02))
Beispiel #16
0
    def test_fit(self):
        var0 = VAR(2)
        var0.coef = np.array([[0.2, 0.1, 0.4, -0.1], [0.3, -0.2, 0.1, 0]])
        l = 100000
        x = var0.simulate(l)
        y = x.copy()

        var = VAR(2)
        var.fit(x)

        # make sure the input remains unchanged
        self.assertTrue(np.all(x == y))

        # that limit is rather generous, but we don't want tests to fail due to random variation
        self.assertTrue(np.all(np.abs(var0.coef - var.coef) < 0.02))
Beispiel #17
0
    def testModelIdentification(self):
        """ generate independent signals, mix them, and see if ICA can reconstruct the mixing matrix
            do this for every backend """

        # original model coefficients
        b0 = np.zeros((3, 3))    # no connectivity
        m0 = b0.shape[0]
        l, t = 100, 100

        # generate VAR sources with non-gaussian innovation process, otherwise ICA won't work
        noisefunc = lambda: np.random.normal(size=(1, m0)) ** 3

        var = VAR(1)
        var.coef = b0
        sources = var.simulate([l, t], noisefunc)

        # simulate volume conduction... 3 sources measured with 7 channels
        mix = [[0.5, 1.0, 0.5, 0.2, 0.0, 0.0, 0.0],
               [0.0, 0.2, 0.5, 1.0, 0.5, 0.2, 0.0],
               [0.0, 0.0, 0.0, 0.2, 0.5, 1.0, 0.5]]
        data = datatools.dot_special(sources, mix)

        backend_modules = [import_module('scot.' + b) for b in scot.backends]

        for bm in backend_modules:

            result = plainica.plainica(data, backend=bm.backend)

            i = result.mixing.dot(result.unmixing)
            self.assertTrue(np.allclose(i, np.eye(i.shape[0]), rtol=1e-6, atol=1e-7))

            permutations = [[0, 1, 2], [0, 2, 1], [1, 0, 2], [1, 2, 0], [2, 0, 1], [2, 1, 0]]

            bestdiff = np.inf
            bestmix = None

            absmix = np.abs(result.mixing)
            absmix /= np.max(absmix)

            for p in permutations:
                estmix = absmix[p, :]
                diff = np.sum((np.abs(estmix) - np.abs(mix)) ** 2)

                if diff < bestdiff:
                    bestdiff = diff
                    bestmix = estmix

            self.assertTrue(np.allclose(bestmix, mix, rtol=1e-1, atol=1e-1))
Beispiel #18
0
    def test_fit_regularized(self):
        l = 100000
        var0 = VAR(2)
        var0.coef = np.array([[0.2, 0.1, 0.4, -0.1], [0.3, -0.2, 0.1, 0]])
        x = var0.simulate(l)
        y = x.copy()

        var = VAR(10, delta=1)
        var.fit(x)

        # make sure the input remains unchanged
        self.assertTrue(np.all(x == y))

        b0 = np.zeros((2, 20))
        b0[:, 0:2] = var0.coef[:, 0:2]
        b0[:, 10:12] = var0.coef[:, 2:4]

        # that limit is rather generous, but we don't want tests to fail due to random variation
        self.assertTrue(np.all(np.abs(b0 - var.coef) < 0.02))
Beispiel #19
0
    def test_fit_regularized(self):
        l = 100000
        var0 = VAR(2)
        var0.coef = np.array([[0.2, 0.1, 0.4, -0.1], [0.3, -0.2, 0.1, 0]])
        x = var0.simulate(l)
        y = x.copy()

        var = VAR(10, delta=1)
        var.fit(x)

        # make sure the input remains unchanged
        self.assertTrue(np.all(x == y))

        b0 = np.zeros((2, 20))
        b0[:, 0:2] = var0.coef[:, 0:2]
        b0[:, 10:12] = var0.coef[:, 2:4]

        # that limit is rather generous, but we don't want tests to fail due to random variation
        self.assertTrue(np.all(np.abs(b0 - var.coef) < 0.02))
Beispiel #20
0
    def test_source_selection(self):
        var = VAR(2)
        var.coef = np.random.randn(16, 4)
        x = var.simulate([500, 50], lambda: np.random.randn(16).dot(np.eye(16, 16)))
        api = scot.Workspace({"model_order": 2})
        api.set_data(x)
        self.assertRaises(RuntimeError, api.keep_sources, [0, 5, 11, 12])
        self.assertRaises(RuntimeError, api.remove_sources, [1, 2, 8, 14])

        # keep sources
        api.do_mvarica()
        api.keep_sources([0, 5, 11, 12])
        self.assertEqual(api.mixing_.shape, (4, 16))
        self.assertEqual(api.unmixing_.shape, (16, 4))

        # remove sources
        api.do_mvarica()
        api.remove_sources([1, 2, 8, 14])
        self.assertEqual(api.mixing_.shape, (12, 16))
        self.assertEqual(api.unmixing_.shape, (16, 12))
Beispiel #21
0
    def test_source_selection(self):
        var = VAR(2)
        var.coef = np.random.randn(16, 4)
        x = var.simulate([500, 50],
                         lambda: np.random.randn(16).dot(np.eye(16, 16)))
        api = scot.Workspace({'model_order': 2})
        api.set_data(x)
        self.assertRaises(RuntimeError, api.keep_sources, [0, 5, 11, 12])
        self.assertRaises(RuntimeError, api.remove_sources, [1, 2, 8, 14])

        # keep sources
        api.do_mvarica()
        api.keep_sources([0, 5, 11, 12])
        self.assertEqual(api.mixing_.shape, (4, 16))
        self.assertEqual(api.unmixing_.shape, (16, 4))

        # remove sources
        api.do_mvarica()
        api.remove_sources([1, 2, 8, 14])
        self.assertEqual(api.mixing_.shape, (12, 16))
        self.assertEqual(api.unmixing_.shape, (16, 12))
Beispiel #22
0
"""
This example demonstrates that it is possible to reconstruct sources even if we
include a PCA step in the process.
"""

from __future__ import print_function

import numpy as np

from scot import pca

from scot.var import VAR

# Generate data from a VAR(1) process
model0 = VAR(1)
model0.coef = np.array([[0.3, -0.6], [0, -0.9]])
x = model0.simulate(10000).squeeze()

# Transform data with PCA
w, v = pca(x)
y = x.dot(w)

print('Covariance of x:\n', np.cov(x.squeeze().T))
print('\nCovariance of y:\n', np.cov(y.squeeze().T))

model1, model2 = VAR(1), VAR(1)

# Fit model1 to the original data
model1.fit(x)

# Fit model2 to the PCA transformed data
Beispiel #23
0
 def generate_data():
     var = VAR(2)
     var.coef = np.array([[0.2, 0.1, 0, 0], [0.7, -0.4, 0.1, 0]])
     l = (100, 100)
     x = var.simulate(l)
     return x, var
Beispiel #24
0
 def generate_data():
     var = VAR(2)
     var.coef = np.array([[0.2, 0.1, 0, 0], [0.7, -0.4, 0.1, 0]])
     l = (100, 100)
     x = var.simulate(l)
     return x, var
Beispiel #25
0
    def testFunctionality(self):
        """ generate VAR signals, and apply the api to them
            do this for every backend """
        np.random.seed(3141592)

        # original model coefficients
        b01 = np.zeros((3, 6))
        b02 = np.zeros((3, 6))
        b01[1:3, 2:6] = [[0.4, -0.2, 0.3, 0.0],
                        [-0.7, 0.0, 0.9, 0.0]]
        b02[0:3, 2:6] = [[0.4, 0.0, 0.0, 0.0],
                        [0.4, 0.0, 0.4, 0.0],
                        [0.0, 0.0, 0.4, 0.0]]
        m0 = b01.shape[0]
        cl = np.array([0, 1, 0, 1, 0, 0, 1, 1, 1, 0])
        l = 200
        t = len(cl)

        # generate VAR sources with non-gaussian innovation process, otherwise ICA won't work
        noisefunc = lambda: np.random.normal(size=(1, m0)) ** 3

        var = VAR(2)
        var.coef = b01
        sources1 = var.simulate([l, sum(cl==0)], noisefunc)
        var.coef = b02
        sources2 = var.simulate([l, sum(cl==1)], noisefunc)

        var.fit(sources1)
        var.fit(sources2)

        sources = np.zeros((l,m0,t))

        sources[:,:,cl==0] = sources1
        sources[:,:,cl==1] = sources2

        # simulate volume conduction... 3 sources measured with 7 channels
        mix = [[0.5, 1.0, 0.5, 0.2, 0.0, 0.0, 0.0],
               [0.0, 0.2, 0.5, 1.0, 0.5, 0.2, 0.0],
               [0.0, 0.0, 0.0, 0.2, 0.5, 1.0, 0.5]]
        data = datatools.dot_special(sources, mix)

        backend_modules = [import_module('scot.' + b) for b in scot.backends]

        for bm in backend_modules:
            np.random.seed(3141592)  # reset random seed so we're independent of module order

            api = scot.Workspace({'model_order': 2}, reducedim=3, backend=bm.backend)

            api.set_data(data)

            api.do_ica()

            self.assertEqual(api.mixing_.shape, (3, 7))
            self.assertEqual(api.unmixing_.shape, (7, 3))

            api.do_mvarica()

            self.assertEqual(api.get_connectivity('S').shape, (3, 3, 512))

            self.assertFalse(np.any(np.isnan(api.activations_)))
            self.assertFalse(np.any(np.isinf(api.activations_)))

            api.set_data(data)

            api.fit_var()

            self.assertEqual(api.get_connectivity('S').shape, (3, 3, 512))
            self.assertEqual(api.get_tf_connectivity('S', 100, 50).shape, (3, 3, 512, (l-100)//50))

            api.set_data(data, cl)
            
            self.assertFalse(np.any(np.isnan(api.data_)))
            self.assertFalse(np.any(np.isinf(api.data_)))
            
            api.do_cspvarica()
            
            self.assertEqual(api.get_connectivity('S').shape, (3,3,512))

            self.assertFalse(np.any(np.isnan(api.activations_)))
            self.assertFalse(np.any(np.isinf(api.activations_)))
            
            for c in np.unique(cl):
                api.set_used_labels([c])

                api.fit_var()
                fc = api.get_connectivity('S')
                self.assertEqual(fc.shape, (3, 3, 512))

                tfc = api.get_tf_connectivity('S', 100, 50)
                self.assertEqual(tfc.shape, (3, 3, 512, (l-100)//50))

            api.set_data(data)
            api.remove_sources([0, 2])
            api.fit_var()
            self.assertEqual(api.get_connectivity('S').shape, (1, 1, 512))
            self.assertEqual(api.get_tf_connectivity('S', 100, 50).shape, (1, 1, 512, (l-100)//50))

            try:
                api.optimize_var()
            except NotImplementedError:
                pass
            api.fit_var()
            self.assertEqual(api.get_connectivity('S').shape, (1, 1, 512))
            self.assertEqual(api.get_tf_connectivity('S', 100, 50).shape, (1, 1, 512, (l-100)//50))
Beispiel #26
0
    def testFunctionality(self):
        """ generate VAR signals, and apply the api to them
            do this for every backend """
        np.random.seed(3141592)

        # original model coefficients
        b01 = np.zeros((3, 6))
        b02 = np.zeros((3, 6))
        b01[1:3, 2:6] = [[0.4, -0.2, 0.3, 0.0], [-0.7, 0.0, 0.9, 0.0]]
        b02[0:3, 2:6] = [[0.4, 0.0, 0.0, 0.0], [0.4, 0.0, 0.4, 0.0],
                         [0.0, 0.0, 0.4, 0.0]]
        m0 = b01.shape[0]
        cl = np.array([0, 1, 0, 1, 0, 0, 1, 1, 1, 0])
        l = 200
        t = len(cl)

        # generate VAR sources with non-gaussian innovation process, otherwise ICA won't work
        noisefunc = lambda: np.random.normal(size=(1, m0))**3

        var = VAR(2)
        var.coef = b01
        sources1 = var.simulate([l, sum(cl == 0)], noisefunc)
        var.coef = b02
        sources2 = var.simulate([l, sum(cl == 1)], noisefunc)

        var.fit(sources1)
        var.fit(sources2)

        sources = np.zeros((l, m0, t))

        sources[:, :, cl == 0] = sources1
        sources[:, :, cl == 1] = sources2

        # simulate volume conduction... 3 sources measured with 7 channels
        mix = [[0.5, 1.0, 0.5, 0.2, 0.0, 0.0, 0.0],
               [0.0, 0.2, 0.5, 1.0, 0.5, 0.2, 0.0],
               [0.0, 0.0, 0.0, 0.2, 0.5, 1.0, 0.5]]
        data = datatools.dot_special(sources, mix)

        backend_modules = [import_module('scot.' + b) for b in scot.backends]

        for bm in backend_modules:
            np.random.seed(
                3141592
            )  # reset random seed so we're independent of module order

            api = scot.Workspace({'model_order': 2},
                                 reducedim=3,
                                 backend=bm.backend)

            api.set_data(data)

            api.do_ica()

            self.assertEqual(api.mixing_.shape, (3, 7))
            self.assertEqual(api.unmixing_.shape, (7, 3))

            api.do_mvarica()

            self.assertEqual(api.get_connectivity('S').shape, (3, 3, 512))

            self.assertFalse(np.any(np.isnan(api.activations_)))
            self.assertFalse(np.any(np.isinf(api.activations_)))

            api.set_data(data)

            api.fit_var()

            self.assertEqual(api.get_connectivity('S').shape, (3, 3, 512))
            self.assertEqual(
                api.get_tf_connectivity('S', 100, 50).shape,
                (3, 3, 512, (l - 100) // 50))

            api.set_data(data, cl)

            self.assertFalse(np.any(np.isnan(api.data_)))
            self.assertFalse(np.any(np.isinf(api.data_)))

            api.do_cspvarica()

            self.assertEqual(api.get_connectivity('S').shape, (3, 3, 512))

            self.assertFalse(np.any(np.isnan(api.activations_)))
            self.assertFalse(np.any(np.isinf(api.activations_)))

            for c in np.unique(cl):
                api.set_used_labels([c])

                api.fit_var()
                fc = api.get_connectivity('S')
                self.assertEqual(fc.shape, (3, 3, 512))

                tfc = api.get_tf_connectivity('S', 100, 50)
                self.assertEqual(tfc.shape, (3, 3, 512, (l - 100) // 50))

            api.set_data(data)
            api.remove_sources([0, 2])
            api.fit_var()
            self.assertEqual(api.get_connectivity('S').shape, (1, 1, 512))
            self.assertEqual(
                api.get_tf_connectivity('S', 100, 50).shape,
                (1, 1, 512, (l - 100) // 50))

            try:
                api.optimize_var()
            except NotImplementedError:
                pass
            api.fit_var()
            self.assertEqual(api.get_connectivity('S').shape, (1, 1, 512))
            self.assertEqual(
                api.get_tf_connectivity('S', 100, 50).shape,
                (1, 1, 512, (l - 100) // 50))
Beispiel #27
0
    def testFunctionality(self):
        """ generate VAR signals, and apply the api to them
            do this for every backend """
        np.random.seed(3141592)

        # original model coefficients
        b01 = np.zeros((3, 6))
        b02 = np.zeros((3, 6))
        b01[1:3, 2:6] = [[0.4, -0.2, 0.3, 0.0],
                         [-0.7, 0.0, 0.9, 0.0]]
        b02[0:3, 2:6] = [[0.4, 0.0, 0.0, 0.0],
                         [0.4, 0.0, 0.4, 0.0],
                         [0.0, 0.0, 0.4, 0.0]]
        m0 = b01.shape[0]
        cl = np.array([0, 1, 0, 1, 0, 0, 1, 1, 1, 0])
        l = 200
        t = len(cl)

        # generate VAR sources with non-gaussian innovation process, otherwise ICA won't work
        noisefunc = lambda: np.random.normal(size=(1, m0)) ** 3 / 1e3

        var = VAR(2)
        var.coef = b01
        sources1 = var.simulate([l, sum(cl == 0)], noisefunc)
        var.coef = b02
        sources2 = var.simulate([l, sum(cl == 1)], noisefunc)

        var.fit(sources1)
        var.fit(sources2)

        sources = np.zeros((t, m0, l))

        sources[cl == 0, :, :] = sources1
        sources[cl == 1, :, :] = sources2

        # simulate volume conduction... 3 sources smeared over 7 channels
        mix = [[0.5, 1.0, 0.5, 0.2, 0.0, 0.0, 0.0],
               [0.0, 0.2, 0.5, 1.0, 0.5, 0.2, 0.0],
               [0.0, 0.0, 0.0, 0.2, 0.5, 1.0, 0.5]]
        data = datatools.dot_special(np.transpose(mix), sources)
        data += np.random.randn(*data.shape) * 0.001  # add small noise

        for backend_name, backend_gen in scot.backend.items():
            np.random.seed(3141592)  # reset random seed so we're independent of module order

            api = scot.Workspace({'model_order': 2}, reducedim=3, backend=backend_gen())

            api.set_data(data)

            api.do_ica()

            self.assertEqual(api.mixing_.shape, (3, 7))
            self.assertEqual(api.unmixing_.shape, (7, 3))

            api.do_mvarica()

            self.assertEqual(api.get_connectivity('S').shape, (3, 3, 512))

            self.assertFalse(np.any(np.isnan(api.activations_)))
            self.assertFalse(np.any(np.isinf(api.activations_)))

            api.set_data(data)

            api.fit_var()

            self.assertEqual(api.get_connectivity('S').shape, (3, 3, 512))
            self.assertEqual(api.get_tf_connectivity('S', 100, 50).shape, (3, 3, 512, (l-100)//50))
            
            tfc1 = api.get_tf_connectivity('PDC', 100, 5, baseline=None)        # no baseline
            tfc2 = api.get_tf_connectivity('PDC', 100, 5, baseline=[110, -10])  # invalid baseline
            tfc3 = api.get_tf_connectivity('PDC', 100, 5, baseline=[0, 0])      # one-window baseline
            tfc4 = tfc1 - tfc1[:, :, :, [0]]
            tfc5 = api.get_tf_connectivity('PDC', 100, 5, baseline=[-np.inf, np.inf])  # full trial baseline
            tfc6 = tfc1 - np.mean(tfc1, axis=3, keepdims=True)
            self.assertTrue(np.allclose(tfc1, tfc2))
            self.assertTrue(np.allclose(tfc3, tfc4))
            self.assertTrue(np.allclose(tfc5, tfc6, rtol=1e-05, atol=1e-06))

            api.set_data(data, cl)
            
            self.assertFalse(np.any(np.isnan(api.data_)))
            self.assertFalse(np.any(np.isinf(api.data_)))
            
            api.do_cspvarica()
            
            self.assertEqual(api.get_connectivity('S').shape, (3,3,512))

            self.assertFalse(np.any(np.isnan(api.activations_)))
            self.assertFalse(np.any(np.isinf(api.activations_)))
            
            for c in np.unique(cl):
                api.set_used_labels([c])

                api.fit_var()
                fc = api.get_connectivity('S')
                self.assertEqual(fc.shape, (3, 3, 512))

                tfc = api.get_tf_connectivity('S', 100, 50)
                self.assertEqual(tfc.shape, (3, 3, 512, (l-100)//50))

            api.set_data(data)
            api.remove_sources([0, 2])
            api.fit_var()
            self.assertEqual(api.get_connectivity('S').shape, (1, 1, 512))
            self.assertEqual(api.get_tf_connectivity('S', 100, 50).shape, (1, 1, 512, (l-100)//50))

            try:
                api.optimize_var()
            except NotImplementedError:
                pass
            api.fit_var()
            self.assertEqual(api.get_connectivity('S').shape, (1, 1, 512))
            self.assertEqual(api.get_tf_connectivity('S', 100, 50).shape, (1, 1, 512, (l-100)//50))
Beispiel #28
0
    def testFunctionality(self):
        """ generate VAR signals, and apply the api to them
            do this for every backend """
        np.random.seed(3141592)

        # original model coefficients
        b01 = np.zeros((3, 6))
        b02 = np.zeros((3, 6))
        b01[1:3, 2:6] = [[0.4, -0.2, 0.3, 0.0], [-0.7, 0.0, 0.9, 0.0]]
        b02[0:3, 2:6] = [[0.4, 0.0, 0.0, 0.0], [0.4, 0.0, 0.4, 0.0], [0.0, 0.0, 0.4, 0.0]]
        m0 = b01.shape[0]
        cl = np.array([0, 1, 0, 1, 0, 0, 1, 1, 1, 0])
        l = 200
        t = len(cl)

        # generate VAR sources with non-gaussian innovation process, otherwise ICA won't work
        noisefunc = lambda: np.random.normal(size=(1, m0)) ** 3 / 1e3

        var = VAR(2)
        var.coef = b01
        sources1 = var.simulate([l, sum(cl == 0)], noisefunc)
        var.coef = b02
        sources2 = var.simulate([l, sum(cl == 1)], noisefunc)

        var.fit(sources1)
        var.fit(sources2)

        sources = np.zeros((t, m0, l))

        sources[cl == 0, :, :] = sources1
        sources[cl == 1, :, :] = sources2

        # simulate volume conduction... 3 sources smeared over 7 channels
        mix = [
            [0.5, 1.0, 0.5, 0.2, 0.0, 0.0, 0.0],
            [0.0, 0.2, 0.5, 1.0, 0.5, 0.2, 0.0],
            [0.0, 0.0, 0.0, 0.2, 0.5, 1.0, 0.5],
        ]
        data = datatools.dot_special(np.transpose(mix), sources)
        data += np.random.randn(*data.shape) * 0.001  # add small noise

        for backend_name, backend_gen in scot.backend.items():
            np.random.seed(3141592)  # reset random seed so we're independent of module order

            api = scot.Workspace({"model_order": 2}, reducedim=3, backend=backend_gen())

            api.set_data(data)

            api.do_ica()

            self.assertEqual(api.mixing_.shape, (3, 7))
            self.assertEqual(api.unmixing_.shape, (7, 3))

            api.do_mvarica()

            self.assertEqual(api.get_connectivity("S").shape, (3, 3, 512))

            self.assertFalse(np.any(np.isnan(api.activations_)))
            self.assertFalse(np.any(np.isinf(api.activations_)))

            api.set_data(data)

            api.fit_var()

            self.assertEqual(api.get_connectivity("S").shape, (3, 3, 512))
            self.assertEqual(api.get_tf_connectivity("S", 100, 50).shape, (3, 3, 512, (l - 100) // 50))

            tfc1 = api.get_tf_connectivity("PDC", 100, 5, baseline=None)  # no baseline
            tfc2 = api.get_tf_connectivity("PDC", 100, 5, baseline=[110, -10])  # invalid baseline
            tfc3 = api.get_tf_connectivity("PDC", 100, 5, baseline=[0, 0])  # one-window baseline
            tfc4 = tfc1 - tfc1[:, :, :, [0]]
            tfc5 = api.get_tf_connectivity("PDC", 100, 5, baseline=[-np.inf, np.inf])  # full trial baseline
            tfc6 = tfc1 - np.mean(tfc1, axis=3, keepdims=True)
            self.assertTrue(np.allclose(tfc1, tfc2))
            self.assertTrue(np.allclose(tfc3, tfc4))
            self.assertTrue(np.allclose(tfc5, tfc6, rtol=1e-05, atol=1e-06))

            api.set_data(data, cl)

            self.assertFalse(np.any(np.isnan(api.data_)))
            self.assertFalse(np.any(np.isinf(api.data_)))

            api.do_cspvarica()

            self.assertEqual(api.get_connectivity("S").shape, (3, 3, 512))

            self.assertFalse(np.any(np.isnan(api.activations_)))
            self.assertFalse(np.any(np.isinf(api.activations_)))

            for c in np.unique(cl):
                api.set_used_labels([c])

                api.fit_var()
                fc = api.get_connectivity("S")
                self.assertEqual(fc.shape, (3, 3, 512))

                tfc = api.get_tf_connectivity("S", 100, 50)
                self.assertEqual(tfc.shape, (3, 3, 512, (l - 100) // 50))

            api.set_data(data)
            api.remove_sources([0, 2])
            api.fit_var()
            self.assertEqual(api.get_connectivity("S").shape, (1, 1, 512))
            self.assertEqual(api.get_tf_connectivity("S", 100, 50).shape, (1, 1, 512, (l - 100) // 50))

            try:
                api.optimize_var()
            except NotImplementedError:
                pass
            api.fit_var()
            self.assertEqual(api.get_connectivity("S").shape, (1, 1, 512))
            self.assertEqual(api.get_tf_connectivity("S", 100, 50).shape, (1, 1, 512, (l - 100) // 50))
Beispiel #29
0
Datei: pca.py Projekt: cbrnr/scot
"""

from __future__ import print_function

import numpy as np

from scot.pca import pca
from scot.var import VAR


# Set random seed for repeatable results
np.random.seed(42)

# Generate data from a VAR(1) process
model0 = VAR(1)
model0.coef = np.array([[0.3, -0.6], [0, -0.9]])
x = model0.simulate(10000).squeeze()

# Transform data with PCA
w, v = pca(x)
y = np.dot(w.T, x)

# Verify that transformed data y is decorrelated
print("Covariance of x:\n", np.cov(x.squeeze()))
print("\nCovariance of y:\n", np.cov(y.squeeze()))

model1, model2 = VAR(1), VAR(1)

# Fit model1 to the original data
model1.fit(x)