def __init__(self, spikes, history_length=100, knot_number=5, order_flag=1, kernel=False): if type(kernel) == bool: self.history_kernel = cs.create_splines_logspace( history_length, knot_number, 0) else: self.history_kernel = kernel self.matrix = "Not initialized!" try: if order_flag == 1: #print spikes.shape #print self.history_kernel.shape matrix = convolve_spikes(spikes, self.history_kernel) #print matrix.shape self.matrix = matrix[:len(spikes)] #print self.matrix.shape else: self.covariate_matrix, self.covariates, self.morder = cdm.create_design_matrix_vk( self.history_kernel, order_flag) #print spikes.shape #print self.covariate_matrix.shape matrix = scipy.signal.convolve2d(spikes, self.covariate_matrix) #print matrix.shape self.matrix = matrix[:len(spikes)] #print self.matrix.shape except: err(spikes) err(spikes.shape) err(self.history_kernel) report() raise
def fit(self, data=None,beta=None,x= None,dm = None, nr_trials=None): """ Fits the model **in_spikes** `ni.data.data.Data` instance example:: from scipy.ndimage import gaussian_filter import ni model = ni.model.ip.Model(ni.model.ip.Configuration({'crosshistory':False})) data = ni.data.monkey.Data() data = data.condition(0).trial(range(int(data.nr_trials/2))) dm = model.dm(data) x = model.x(data) from sklearn import linear_model betas = [] fm = model.fit(data) betas.append(fm.beta) print "fitted." for clf in [linear_model.LinearRegression(), linear_model.RidgeCV(alphas=[0.1, 1.0, 10.0])]: clf.fit(dm,x) betas.append(clf.coef_) figure() plot(clf.coef_.transpose(),'.') title('coefficients') prediction = np.dot(dm,clf.coef_.transpose()) figure() plot(prediction) title('prediction') ll = x * log(prediction) + (len(x)-x)*log(1-prediction) figure() plot(ll) title('ll') print np.sum(ll) """ fittedmodel = FittedModel(self) fittedmodel.configuration = copy(self.configuration) fittedmodel.history_kernel = self.history_kernel if fittedmodel.history_kernel == "log kernel": fittedmodel.history_kernel = cs.create_splines_logspace(self.configuration.history_length, self.configuration.knot_number, 0) spike_train_all_trial = [] if data is not None: spike_train_all_trial = data.getFlattend() firing_rate = np.mean(spike_train_all_trial) fittedmodel.firing_rate = firing_rate fittedmodel.trial_length = data.time_bins fittedmodel.design = self.generateDesignMatrix(data,data.time_bins) if dm is None: dm = self.dm(data,fittedmodel.design) if x is None: x = self.x(data) w = np.where(dm.transpose())[0] cnt = [np.sum(w== i) for i in range(dm.shape[1])] if sum(np.array(cnt) >= dm.shape[0]) > 0: log("!! "+str(sum(np.array(cnt) == dm.shape[0]))+ " Components are only 0. \n"+str(sum(np.array(cnt) <= dm.shape[0]*0.1))+" are mostly 0. "+str(sum(np.array(cnt) <= dm.shape[0]*0.5))+" are half 0.") else: log(""+str(sum(np.array(cnt) == dm.shape[0]))+ " Components are only 0. \n"+str(sum(np.array(cnt) <= dm.shape[0]*0.1))+" are mostly 0. "+str(sum(np.array(cnt) <= dm.shape[0]*0.5))+" are half 0.") zeroed_components = [i for i in range(len(cnt)) if cnt[i] == 0] backend_config = self.configuration.backend_config fittedmodel.backend_model = self.backend.Model(backend_config) if beta is None: fit = fittedmodel.backend_model.fit(x, dm) # NOTE frequently produces LinAlgError: SVD did not converge if False: # This might be introduced at a later data: # Components that are 0 can be excluded from the fitting process and then virtually reinserted in the beta and params attributes. beta = fit.params i_z = 0 for i in range(len(cnt)): if i in zeroed_components: beta.append(0) else: beta.append(fit.params[i_z]) i_z = i_z + 1 fit.params = beta else: for z in zeroed_components: fit.params[z] = 0 fittedmodel.beta = fit.params else: fit = self.backend.Fit(f=None,m=fittedmodel.backend_model) fit.params = beta fittedmodel.beta = beta fittedmodel.fit = fit if "llf" in fit.statistics: fit.statistics["llf_all"] = fit.statistics["llf"] if hasattr(data,'nr_trials'): fit.statistics["llf"] = fit.statistics["llf"]/data.nr_trials elif nr_trials is not None: fit.statistics["llf"] = fit.statistics["llf"]/nr_trials fittedmodel.statistics = fit.statistics #log("done fitting.") return fittedmodel
def generateDesignMatrix(self,data,trial_length): """ generates a design matrix template. Uses meta data from `data` to determine number of trials and trial length. """ design_template = designmatrix.DesignMatrixTemplate(data.nr_trials * data.time_bins,trial_length) log_kernel = cs.create_splines_logspace(self.configuration.history_length, self.configuration.knot_number, self.configuration.delete_last_spline) if self.configuration.autohistory: kernel = self.history_kernel if kernel == "log kernel": kernel = log_kernel if type(self.configuration.custom_kernels) == list: for c in self.configuration.custom_kernels: if c['Name'] == 'autohistory': kernel = c['Kernel'] design_template.add(designmatrix.HistoryComponent('autohistory', channel=self.configuration.cell, kernel=kernel, delete_last_spline=self.configuration.delete_last_spline)) if self.configuration.autohistory_2d: kernel_1 = self.history_kernel kernel_2 = self.history_kernel if kernel_1 == "log kernel": kernel_1 = log_kernel if kernel_2 == "log kernel": kernel_2 = log_kernel if type(self.configuration.custom_kernels) == list: for c in self.configuration.custom_kernels: if c['Name'] == 'autohistory2d': kernel_1 = c['Kernel'] kernel_2 = c['Kernel'] if c['Name'] == 'autohistory2d_1': kernel_1 = c['Kernel'] if c['Name'] == 'autohistory2d_2': kernel_2 = c['Kernel'] design_template.add(designmatrix.SecondOrderHistoryComponent('autohistory2d', channel_1=self.configuration.cell, channel_2=self.configuration.cell, kernel_1=kernel_1, kernel_2=kernel_2, delete_last_spline=self.configuration.delete_last_spline)) # Generating crosshistory splines for all trials crosshistories = [] if type(self.configuration.crosshistory) == int or type(self.configuration.crosshistory) == float: self.configuration.crosshistory = [int(self.configuration.crosshistory)] if self.configuration.crosshistory == True or type(self.configuration.crosshistory) == list: for i in range(data.nr_cells): if i == self.configuration.cell: continue if self.configuration.crosshistory == True or i in self.configuration.crosshistory: kernel = self.history_kernel if kernel == "log kernel": kernel = log_kernel for c in self.configuration.custom_kernels: if c['Name'] == 'crosshistory'+str(i): kernel = c['Kernel'] design_template.add(designmatrix.HistoryComponent('crosshistory'+str(i), channel=i, kernel = kernel, delete_last_spline=self.configuration.delete_last_spline)) if self.configuration.rate: added_rate = False if type(self.configuration.custom_kernels) == list: for c in self.configuration.custom_kernels: if c['Name'] == 'rate': design_template.add(designmatrix.RateComponent('rate',kernel = c['Kernel'])) added_rate = True if not added_rate: if self.configuration.adaptive_rate: rate = data.firing_rate(self.configuration.adaptive_rate_smooth_width) design_template.add(designmatrix.AdaptiveRateComponent('rate',rate=rate,exponent=self.configuration.adaptive_rate_exponent,knots=self.configuration.knots_rate,length=trial_length)) else: design_template.add(designmatrix.RateComponent('rate',self.configuration.knots_rate,trial_length)) if self.configuration.constant: design_template.add(designmatrix.Component('constant',np.ones((1,1)))) ##log("Combining Design Matrix") for c in self.configuration.custom_components: design_template.add(c) design = design_template design.setMask(self.configuration.mask) return design
def addLogSpline(self, knots, header, length=0): if length == 0: self.add(cs.create_splines_logspace(self.length - 1, knots, 0), header) else: self.add(cs.create_splines_logspace(length - 1, knots, 0), header)
def fit(self, data=None,beta=None,x= None,dm = None, nr_trials=None): """ Fits the model **in_spikes** `ni.data.data.Data` instance example:: from scipy.ndimage import gaussian_filter import ni model = ni.model.ip.Model(ni.model.ip.Configuration({'crosshistory':False})) data = ni.data.monkey.Data() data = data.condition(0).trial(range(int(data.nr_trials/2))) dm = model.dm(data) x = model.x(data) from sklearn import linear_model betas = [] fm = model.fit(data) betas.append(fm.beta) print "fitted." for clf in [linear_model.LinearRegression(), linear_model.RidgeCV(alphas=[0.1, 1.0, 10.0])]: clf.fit(dm,x) betas.append(clf.coef_) figure() plot(clf.coef_.transpose(),'.') title('coefficients') prediction = np.dot(dm,clf.coef_.transpose()) figure() plot(prediction) title('prediction') ll = x * log(prediction) + (len(x)-x)*log(1-prediction) figure() plot(ll) title('ll') print np.sum(ll) """ fittedmodel = FittedModel(self) fittedmodel.configuration = copy(self.configuration) fittedmodel.history_kernel = self.history_kernel if fittedmodel.history_kernel == "log kernel": fittedmodel.history_kernel = cs.create_splines_logspace(self.configuration.history_length, self.configuration.knot_number, 0) spike_train_all_trial = [] if data is not None: spike_train_all_trial = data.getFlattend() firing_rate = np.mean(spike_train_all_trial) fittedmodel.firing_rate = firing_rate fittedmodel.trial_length = data.time_bins fittedmodel.design = self.generateDesignMatrix(data,data.time_bins) if dm is None: dm = self.dm(data,fittedmodel.design) if x is None: x = self.x(data) w = np.where(dm.transpose())[0] cnt = [np.sum(w== i) for i in range(dm.shape[1])] if sum(np.array(cnt) >= dm.shape[0]) > 0: log("!! "+str(sum(np.array(cnt) == dm.shape[0]))+ " Components are only 0. \n"+str(sum(np.array(cnt) <= dm.shape[0]*0.1))+" are mostly 0. "+str(sum(np.array(cnt) <= dm.shape[0]*0.5))+" are half 0.") else: log(""+str(sum(np.array(cnt) == dm.shape[0]))+ " Components are only 0. \n"+str(sum(np.array(cnt) <= dm.shape[0]*0.1))+" are mostly 0. "+str(sum(np.array(cnt) <= dm.shape[0]*0.5))+" are half 0.") zeroed_components = [i for i in range(len(cnt)) if cnt[i] == 0] backend_config = self.configuration.backend_config fittedmodel.backend_model = self.backend.Model(backend_config) if beta is None: fit = fittedmodel.backend_model.fit(x, dm) # NOTE frequently produces LinAlgError: SVD did not converge if False: # This might be introduced at a later data: # Components that are 0 can be excluded from the fitting process and then virtually reinserted in the beta and params attributes. beta = fit.params i_z = 0 for i in range(len(cnt)): if i in zeroed_components: beta.append(0) else: beta.append(fit.params[i_z]) i_z = i_z + 1 fit.params = beta else: for z in zeroed_components: fit.params[z] = 0 fittedmodel.beta = fit.params else: fit = self.backend.Fit(f=None,m=fittedmodel.backend_model) fit.params = beta fittedmodel.beta = beta fittedmodel.fit = fit if "llf" in fit.statistics: fit.statistics["llf_all"] = fit.statistics["llf"] if hasattr(data,'nr_trials'): fit.statistics["llf"] = fit.statistics["llf"]/data.nr_trials elif nr_trials is not None: fit.statistics["llf"] = fit.statistics["llf"]/nr_trials fittedmodel.statistics = fit.statistics #log("done fitting.") return fittedmodel
def generateDesignMatrix(self,data,trial_length): """ generates a design matrix template. Uses meta data from `data` to determine number of trials and trial length. """ design_template = designmatrix.DesignMatrixTemplate(data.nr_trials * data.time_bins,trial_length) log_kernel = cs.create_splines_logspace(self.configuration.history_length, self.configuration.knot_number, self.configuration.delete_last_spline) if self.configuration.autohistory: kernel = self.history_kernel if kernel == "log kernel": kernel = log_kernel if type(self.configuration.custom_kernels) == list: for c in self.configuration.custom_kernels: if c['Name'] == 'autohistory': kernel = c['Kernel'] design_template.add(designmatrix.HistoryComponent('autohistory', channel=self.configuration.cell, kernel=kernel, delete_last_spline=self.configuration.delete_last_spline)) if self.configuration.autohistory_2d: kernel_1 = self.history_kernel kernel_2 = self.history_kernel if kernel_1 == "log kernel": kernel_1 = log_kernel if kernel_2 == "log kernel": kernel_2 = log_kernel if type(self.configuration.custom_kernels) == list: for c in self.configuration.custom_kernels: if c['Name'] == 'autohistory2d': kernel_1 = c['Kernel'] kernel_2 = c['Kernel'] if c['Name'] == 'autohistory2d_1': kernel_1 = c['Kernel'] if c['Name'] == 'autohistory2d_2': kernel_2 = c['Kernel'] design_template.add(designmatrix.SecondOrderHistoryComponent('autohistory2d', channel_1=self.configuration.cell, channel_2=self.configuration.cell, kernel_1=kernel_1, kernel_2=kernel_2, delete_last_spline=self.configuration.delete_last_spline)) # Generating crosshistory splines for all trials crosshistories = [] if type(self.configuration.crosshistory) == int or type(self.configuration.crosshistory) == float: self.configuration.crosshistory = [int(self.configuration.crosshistory)] if self.configuration.crosshistory == True or type(self.configuration.crosshistory) == list: for i in range(data.nr_cells): if i == self.configuration.cell: continue if self.configuration.crosshistory == True or i in self.configuration.crosshistory: kernel = self.history_kernel if kernel == "log kernel": kernel = log_kernel for c in self.configuration.custom_kernels: if c['Name'] == 'crosshistory'+str(i): kernel = c['Kernel'] design_template.add(designmatrix.HistoryComponent('crosshistory'+str(i), channel=i, kernel = kernel, delete_last_spline=self.configuration.delete_last_spline)) if self.configuration.rate: added_rate = False if type(self.configuration.custom_kernels) == list: for c in self.configuration.custom_kernels: if c['Name'] == 'rate': design_template.add(designmatrix.RateComponent('rate',kernel = c['Kernel'])) added_rate = True if not added_rate: if self.configuration.adaptive_rate: rate = data.firing_rate(self.configuration.adaptive_rate_smooth_width) design_template.add(designmatrix.AdaptiveRateComponent('rate',rate=rate,exponent=self.configuration.adaptive_rate_exponent,knots=self.configuration.knots_rate,length=trial_length)) else: design_template.add(designmatrix.RateComponent('rate',self.configuration.knots_rate,trial_length)) if self.configuration.constant: design_template.add(designmatrix.Component('constant',np.ones((1,1)))) ##log("Combining Design Matrix") for c in self.configuration.custom_components: design_template.add(c) design = design_template design.setMask(self.configuration.mask) return design