Ejemplo n.º 1
0
    def _initialize_with_pca(self,
                             datas,
                             inputs=None,
                             masks=None,
                             tags=None,
                             num_iters=20):
        Keff = 1 if self.single_subspace else self.K

        # First solve a linear regression for data given input
        if self.M > 0:
            from sklearn.linear_model import LinearRegression
            lr = LinearRegression(fit_intercept=False)
            lr.fit(np.vstack(inputs), np.vstack(datas))
            self.Fs = np.tile(lr.coef_[None, :, :], (Keff, 1, 1))

        # Compute residual after accounting for input
        resids = [
            data - np.dot(input, self.Fs[0].T)
            for data, input in zip(datas, inputs)
        ]

        # Run PCA to get a linear embedding of the data
        pca, xs, ll = pca_with_imputation(self.D,
                                          resids,
                                          masks,
                                          num_iters=num_iters)

        self.Cs = np.tile(pca.components_.T[None, :, :], (Keff, 1, 1))
        self.ds = np.tile(pca.mean_[None, :], (Keff, 1))

        return pca
Ejemplo n.º 2
0
 def _initialize_with_pca(self, datas, masks, num_iters=20):
     pca, xs = pca_with_imputation(self.D, datas, masks, num_iters=num_iters)
     Keff = 1 if self.single_subspace else self.K
     self.Cs = np.tile(pca.components_.T[None, :, :], (Keff, 1, 1))
     self.ds = np.tile(pca.mean_[None, :], (Keff, 1))
         
     return pca
Ejemplo n.º 3
0
    def _initialize_with_pca(self,
                             datas,
                             inputs=None,
                             masks=None,
                             tags=None,
                             num_iters=20):
        Keff = 1 if self.single_subspace else self.K

        # First solve a linear regression for data given input
        if self.M > 0:
            from sklearn.linear_model import LinearRegression
            lr = LinearRegression(fit_intercept=False)
            lr.fit(np.vstack(inputs), np.vstack(datas))
            self.Fs = np.tile(lr.coef_[None, :, :], (Keff, 1, 1))

        # Compute residual after accounting for input
        resids = [
            data - np.dot(input, self.Fs[0].T)
            for data, input in zip(datas, inputs)
        ]

        # Run PCA to get a linear embedding of the data with the maximum effective dimension
        pca, xs, ll = pca_with_imputation(min(self.D * Keff, self.N),
                                          resids,
                                          masks,
                                          num_iters=num_iters)

        # Assign each state a random projection of these dimensions
        Cs, ds = [], []
        for k in range(Keff):
            weights = npr.randn(self.D, self.D * Keff)
            weights = np.linalg.svd(weights, full_matrices=False)[2]
            Cs.append((weights @ pca.components_).T)
            ds.append(pca.mean_)

        # Find the components with the largest power
        self.Cs = np.array(Cs)
        self.ds = np.array(ds)

        return pca
Ejemplo n.º 4
0
os.chdir("/Users/scott/Projects/zimmer")
ys, masks, z_trues, z_key, neuron_names = load_kato_data(include_unnamed=False,
                                                         signal="dff")
os.chdir(tmp)

# Preprocess the worm data
ys = [trend_filter(y) for y in ys]

K_true = len(z_key)
N = ys[0].shape[1]
W = len(ys)
Ts = [y.shape[0] for y in ys]

# Run factor analysis to get low dimensional continuous states
D = 10
pca, xs, lls = pca_with_imputation(D, ys, masks, num_iters=20)

# # Fit a MoG to one of the worms

# In[5]:

x = xs[-1]

# In[6]:

plt.plot(x[:, 0], x[:, 1])

# In[7]:

mog = MixtureOfGaussians(100, D)
mog.fit(x)