Пример #1
0
Файл: ip.py Проект: jahuth/ni
 def x(self, in_spikes):
         """ converts data into a dependent variable time series, ie. it chooses the cell that was configured and extracts only this timeseries. """
         if isinstance(in_spikes, Data) or isinstance(in_spikes, ni.data.data.Data):
                 data = in_spikes
         else:
                 return in_spikes
         if data.nr_cells == 1:
                 x = data.getFlattend()
         else:
                 x = data.cell(self.configuration.cell).getFlattend()
         return x.squeeze()
 def x(self, in_spikes):
         """ converts data into a dependent variable time series, ie. it chooses the cell that was configured and extracts only this timeseries. """
         if isinstance(in_spikes, Data) or isinstance(in_spikes, ni.data.data.Data):
                 data = in_spikes
         else:
                 return in_spikes
         if data.nr_cells == 1:
                 x = data.getFlattend()
         else:
                 x = data.cell(self.configuration.cell).getFlattend()
         return x.squeeze()
Пример #3
0
Файл: ip.py Проект: jahuth/ni
        def fit(self, data=None,beta=None,x= None,dm = None, nr_trials=None):
                """
                Fits the model
        
                        **in_spikes** `ni.data.data.Data` instance


                example::

                        from scipy.ndimage import gaussian_filter
                        import ni
                        model = ni.model.ip.Model(ni.model.ip.Configuration({'crosshistory':False}))
                        data = ni.data.monkey.Data()
                        data = data.condition(0).trial(range(int(data.nr_trials/2)))
                        dm = model.dm(data)
                        x = model.x(data)
                        from sklearn import linear_model
                        betas = []
                        fm = model.fit(data)
                        betas.append(fm.beta)
                        print "fitted."
                        for clf in [linear_model.LinearRegression(), linear_model.RidgeCV(alphas=[0.1, 1.0, 10.0])]:
                                clf.fit(dm,x)
                                betas.append(clf.coef_)

                                figure()
                                plot(clf.coef_.transpose(),'.')
                                title('coefficients')
                                prediction = np.dot(dm,clf.coef_.transpose())
                                figure()
                                plot(prediction)
                                title('prediction')
                                ll = x * log(prediction) + (len(x)-x)*log(1-prediction)
                                figure()
                                plot(ll)
                                title('ll')
                                print np.sum(ll)

                """
                fittedmodel = FittedModel(self)
                fittedmodel.configuration = copy(self.configuration)

                fittedmodel.history_kernel = self.history_kernel
                if fittedmodel.history_kernel == "log kernel":
                                fittedmodel.history_kernel = cs.create_splines_logspace(self.configuration.history_length, self.configuration.knot_number, 0)
                spike_train_all_trial = []

                if data is not None:
                        spike_train_all_trial = data.getFlattend()
                        firing_rate = np.mean(spike_train_all_trial)
                        fittedmodel.firing_rate = firing_rate
                        fittedmodel.trial_length = data.time_bins
                        fittedmodel.design = self.generateDesignMatrix(data,data.time_bins)
                        if dm is None:
                                dm = self.dm(data,fittedmodel.design)
                        if x is None:
                                x = self.x(data)
                w = np.where(dm.transpose())[0]
                cnt = [np.sum(w== i) for i in range(dm.shape[1])]

                if sum(np.array(cnt) >= dm.shape[0]) > 0:
                        log("!! "+str(sum(np.array(cnt) == dm.shape[0]))+ " Components are only 0. \n"+str(sum(np.array(cnt) <= dm.shape[0]*0.1))+" are mostly 0. "+str(sum(np.array(cnt) <= dm.shape[0]*0.5))+" are half 0.")
                else:
                        log(""+str(sum(np.array(cnt) == dm.shape[0]))+ " Components are only 0. \n"+str(sum(np.array(cnt) <= dm.shape[0]*0.1))+" are mostly 0. "+str(sum(np.array(cnt) <= dm.shape[0]*0.5))+" are half 0.")
                zeroed_components = [i for i in range(len(cnt)) if cnt[i] == 0]

                backend_config = self.configuration.backend_config
                fittedmodel.backend_model = self.backend.Model(backend_config)
                if beta is None:
                        fit = fittedmodel.backend_model.fit(x, dm) # NOTE frequently produces LinAlgError: SVD did not converge
                        if False:
                                # This might be introduced at a later data:
                                #         Components that are 0 can be excluded from the fitting process and then virtually reinserted in the beta and params attributes.
                                beta = fit.params
                                i_z = 0
                                for i in range(len(cnt)):
                                        if i in zeroed_components:
                                                beta.append(0)
                                        else:
                                                beta.append(fit.params[i_z])
                                                i_z = i_z + 1
                                fit.params = beta
                        else:
                                for z in zeroed_components:
                                        fit.params[z] = 0
                        fittedmodel.beta = fit.params
                else:
                        fit = self.backend.Fit(f=None,m=fittedmodel.backend_model)
                        fit.params = beta
                        fittedmodel.beta = beta
                fittedmodel.fit = fit
                if "llf" in fit.statistics:
                        fit.statistics["llf_all"] = fit.statistics["llf"]
                        if hasattr(data,'nr_trials'):
                                fit.statistics["llf"] = fit.statistics["llf"]/data.nr_trials
                        elif nr_trials is not None:
                                fit.statistics["llf"] = fit.statistics["llf"]/nr_trials
                fittedmodel.statistics = fit.statistics
                #log("done fitting.")
                return fittedmodel
        def fit(self, data=None,beta=None,x= None,dm = None, nr_trials=None):
                """
                Fits the model
        
                        **in_spikes** `ni.data.data.Data` instance


                example::

                        from scipy.ndimage import gaussian_filter
                        import ni
                        model = ni.model.ip.Model(ni.model.ip.Configuration({'crosshistory':False}))
                        data = ni.data.monkey.Data()
                        data = data.condition(0).trial(range(int(data.nr_trials/2)))
                        dm = model.dm(data)
                        x = model.x(data)
                        from sklearn import linear_model
                        betas = []
                        fm = model.fit(data)
                        betas.append(fm.beta)
                        print "fitted."
                        for clf in [linear_model.LinearRegression(), linear_model.RidgeCV(alphas=[0.1, 1.0, 10.0])]:
                                clf.fit(dm,x)
                                betas.append(clf.coef_)

                                figure()
                                plot(clf.coef_.transpose(),'.')
                                title('coefficients')
                                prediction = np.dot(dm,clf.coef_.transpose())
                                figure()
                                plot(prediction)
                                title('prediction')
                                ll = x * log(prediction) + (len(x)-x)*log(1-prediction)
                                figure()
                                plot(ll)
                                title('ll')
                                print np.sum(ll)

                """
                fittedmodel = FittedModel(self)
                fittedmodel.configuration = copy(self.configuration)

                fittedmodel.history_kernel = self.history_kernel
                if fittedmodel.history_kernel == "log kernel":
                                fittedmodel.history_kernel = cs.create_splines_logspace(self.configuration.history_length, self.configuration.knot_number, 0)
                spike_train_all_trial = []

                if data is not None:
                        spike_train_all_trial = data.getFlattend()
                        firing_rate = np.mean(spike_train_all_trial)
                        fittedmodel.firing_rate = firing_rate
                        fittedmodel.trial_length = data.time_bins
                        fittedmodel.design = self.generateDesignMatrix(data,data.time_bins)
                        if dm is None:
                                dm = self.dm(data,fittedmodel.design)
                        if x is None:
                                x = self.x(data)
                w = np.where(dm.transpose())[0]
                cnt = [np.sum(w== i) for i in range(dm.shape[1])]

                if sum(np.array(cnt) >= dm.shape[0]) > 0:
                        log("!! "+str(sum(np.array(cnt) == dm.shape[0]))+ " Components are only 0. \n"+str(sum(np.array(cnt) <= dm.shape[0]*0.1))+" are mostly 0. "+str(sum(np.array(cnt) <= dm.shape[0]*0.5))+" are half 0.")
                else:
                        log(""+str(sum(np.array(cnt) == dm.shape[0]))+ " Components are only 0. \n"+str(sum(np.array(cnt) <= dm.shape[0]*0.1))+" are mostly 0. "+str(sum(np.array(cnt) <= dm.shape[0]*0.5))+" are half 0.")
                zeroed_components = [i for i in range(len(cnt)) if cnt[i] == 0]

                backend_config = self.configuration.backend_config
                fittedmodel.backend_model = self.backend.Model(backend_config)
                if beta is None:
                        fit = fittedmodel.backend_model.fit(x, dm) # NOTE frequently produces LinAlgError: SVD did not converge
                        if False:
                                # This might be introduced at a later data:
                                #         Components that are 0 can be excluded from the fitting process and then virtually reinserted in the beta and params attributes.
                                beta = fit.params
                                i_z = 0
                                for i in range(len(cnt)):
                                        if i in zeroed_components:
                                                beta.append(0)
                                        else:
                                                beta.append(fit.params[i_z])
                                                i_z = i_z + 1
                                fit.params = beta
                        else:
                                for z in zeroed_components:
                                        fit.params[z] = 0
                        fittedmodel.beta = fit.params
                else:
                        fit = self.backend.Fit(f=None,m=fittedmodel.backend_model)
                        fit.params = beta
                        fittedmodel.beta = beta
                fittedmodel.fit = fit
                if "llf" in fit.statistics:
                        fit.statistics["llf_all"] = fit.statistics["llf"]
                        if hasattr(data,'nr_trials'):
                                fit.statistics["llf"] = fit.statistics["llf"]/data.nr_trials
                        elif nr_trials is not None:
                                fit.statistics["llf"] = fit.statistics["llf"]/nr_trials
                fittedmodel.statistics = fit.statistics
                #log("done fitting.")
                return fittedmodel