コード例 #1
0
 def compute_final_error(self):
     if self.model=='NMF':
         V_string='*V_NMF.npy'
         U_string='*U_NMF.npy'
     if self.model=='SparsePCA':
         V_string='*V_sPCA'
         U_string='*U_sPCA'
     for filename_V in glob.glob(os.path.join(self.save_path, V_string)):
         for filename_U in glob.glob(os.path.join(self.save_path, U_string)):
             if filename_V[:-10]==filename_U[:-10]:
                 U=np.load(filename_U)
                 V=np.load(filename_V)
                 data = io.loadmat(self.data_path+'/'+filename_V[43:78]+'.mat')
                 resp = data['stim'][0]['resp'][0]
                 spont = data['stim'][0]['spont'][0]
                 X=subtract_spont(spont,resp)
                 #print(X.shape)
                 if self.model=='SparsePCA':
                     X=stats.zscore(X,axis=0)
                 if self.model=='NMF':
                     X-=X.min(axis=0)
                 residuals_squared=np.mean((X-([email protected]).T)*(X-([email protected]).T))
                 U_V=([email protected]).T
                 #plt.hist(U_V, range=(-10,10))
                 #plt.show()
                 #plt.hist(X.flatten(),range=(-10,10))
                 plt.plot(range(0,100),X[:100,0])
                 plt.plot(range(0,100),U_V[:100,0])
                 plt.legend(('X','U_V'))
                 plt.show()
                 print(residuals_squared)
                 print('corrcoef',np.corrcoef(X[:,0],U_V[:,0]))
コード例 #2
0
 def NMF_regul_exps_fit(self):
     self.model = 'NMF_regularization_experiments'
     alphas = [0.01, 0.1, 1, 10, 100]
     powers = [-2, -1, 0, 1, 2]
     ind_dict = {0.01: 0, 0.1: 1, 1: 3, 10: 4, 100: 5}
     for filename in self.mat_file_lst:
         data = io.loadmat(self.data_path + filename)
         resp = data['stim'][0]['resp'][0]
         spont = data['stim'][0]['spont'][0]
         X = subtract_spont(spont, resp)
         X -= X.min(axis=0)
         for alpha in alphas:
             model = NMF(n_components=self.nr_of_components,
                         init='nndsvd',
                         random_state=7,
                         alpha=alpha,
                         l1_ratio=1.0)
             start = time.time()
             V = model.fit_transform(X)
             end = time.time()
             time_ = end - start
             print(end - start)
             U = model.components_
             np.save(
                 self.save_path + filename + '_' + str(ind_dict[alpha]) +
                 '_U_NMF_reg_exps.npy', U)
             np.save(
                 self.save_path + filename + '_' + str(ind_dict[alpha]) +
                 '_V_NMF_reg_exps.npy', V)
コード例 #3
0
 def variance_explained_across_neurons(self, U, V):
     '''
     From sklearn:
     The coefficient R^2 is defined as (1 - u/v), where u is the residual sum of squares
     ((y_true - y_pred) ** 2).sum() and v is the total sum of squares
     ((y_true - y_true.mean()) ** 2).sum().
     '''
     #Fetch the original data and convert it into the same form as what goes into the
     #matrix factorization model
     data = io.loadmat(self.data_path + self.mouse_filename)
     resp = data['stim'][0]['resp'][0]
     spont = data['stim'][0]['spont'][0]
     X = subtract_spont(spont, resp).T
     X = zscore(X.T).T
     u = []
     v = []
     approx = U @ V.T
     for j in range(X.shape[0]):
         u_j = ((X[j, :] - approx[j, :])**2).sum()
         v_j = ((X[j, :] - np.mean(X[j, :]))**2).sum()
         u.append(u_j)
         v.append(v_j)
     u = np.array(u)
     v = np.array(v)
     plt.plot(-np.divide(u, v) + 1)
     plt.title('Variance explained across neurons')
     plt.show()
     print('Total variance explained, averaged over neurons is:',
           (1 - np.mean(u) / np.mean(v)))
コード例 #4
0
 def knn(self):
    if self.model=='SparsePCA':
          model_string='*V_sPCA.npy'
    if self.model=='EnsemblePursuit':
          model_string='*_V_ep.npy'
    if self.model=='NMF':
          model_string='*_V_NMF.npy'
    if self.model=='PCA':
          model_string='*_V_pca.npy'
    if self.model=='LDA':
          model_string='*_V_lda.npy'
    if self.model=='all':
          #self.save_path=self.data_path
          model_string='*.mat'
    columns=['Experiment','accuracy']
    acc_df=pd.DataFrame(columns=columns)
    print(self.save_path)
    for filename in glob.glob(os.path.join(self.save_path, model_string)):
          if self.model=='all':
              data = io.loadmat(filename)
              resp = data['stim'][0]['resp'][0]
              spont =data['stim'][0]['spont'][0]
              X=subtract_spont(spont,resp)
              V=stats.zscore(X)
          else:
              print(filename)
              V=np.load(filename)
          #if self.model='PCA':
          print(V.shape)
          #print(self.data_path+'/'+filename[43:78]+'.mat')
          istim_path=filename[len(self.save_path):len(self.save_path)+len(self.mat_file_lst[0])]
          print(istim_path)
          istim=sio.loadmat(self.data_path+istim_path)['stim']['istim'][0][0].astype(np.int32)
          istim -= 1 # get out of MATLAB convention
          istim = istim[:,0]
          nimg = istim.max() # these are blank stims (exclude them)
          V = V[istim<nimg, :]
          istim = istim[istim<nimg]
          x_train,x_test,y_train,y_test=test_train_split(V,istim)
          acc=evaluate_model_torch(x_train,x_test)
          acc_df=acc_df.append({'Experiment':filename[len(self.save_path):],'accuracy':acc},ignore_index=True)
    pd.options.display.max_colwidth = 300
    print(acc_df)
    print(acc_df.describe())
    return acc_df
コード例 #5
0
 def fit_model(self):
     data = io.loadmat(self.data_path + self.mouse_filename)
     resp = data['stim'][0]['resp'][0]
     spont = data['stim'][0]['spont'][0]
     if self.model == 'EnsemblePursuit_numpy':
         X = subtract_spont(spont, resp).T
         options_dict = {'seed_neuron_av_nr': 100, 'min_assembly_size': 8}
         ep_np = EnsemblePursuitNumpyFast(n_ensembles=self.nr_of_components,
                                          lambd=self.lambd_,
                                          options_dict=options_dict)
         start = time.time()
         U, V = ep_np.fit_transform(X)
         end = time.time()
         tm = end - start
         print('Time', tm)
         if self.save == True:
             np.save(self.save_path + filename + '_V_ep_numpy.npy', V)
             np.save(self.save_path + filename + '_U_ep_numpy.npy', U)
         return U, V
コード例 #6
0
 def sort_by_variance_explained(self):
     X_path='/home/maria/Documents/EnsemblePursuit/models/natimg2800_M170717_MP034_2017-09-11.mat'
     if self.model=='EnsemblePursuit':
         U_path='/home/maria/Documents/EnsemblePursuit/SAND9/experiments/natimg2800_M170717_MP034_2017-09-11.mat_U_ep.npy'
         V_path='/home/maria/Documents/EnsemblePursuit/SAND9/experiments/natimg2800_M170717_MP034_2017-09-11.mat_V_ep.npy'
     if self.model=='SparsePCA':
         U_path='/home/maria/Documents/EnsemblePursuit/NIPS/natimg2800_M170717_MP034_2017-09-11.mat_0.9_150_U_sPCA.npy'
         V_path='/home/maria/Documents/EnsemblePursuit/NIPS/natimg2800_M170717_MP034_2017-09-11.mat_0.9_150_V_sPCA.npy'
     if self.model=='NMF':
         V_path='/home/maria/Documents/EnsemblePursuit/NIPS/natimg2800_M170717_MP034_2017-09-11.mat_0_150_V_NMF.npy'
         U_path='/home/maria/Documents/EnsemblePursuit/NIPS/natimg2800_M170717_MP034_2017-09-11.mat_0_150_U_NMF.npy'
     if self.model=='LDA':
         V_path='/home/maria/Documents/EnsemblePursuit/NIPS/natimg2800_M170717_MP034_2017-09-11.mat_150_V_lda.npy'
         U_path='/home/maria/Documents/EnsemblePursuit/NIPS/natimg2800_M170717_MP034_2017-09-11.mat_150_U_lda.npy'
     data = io.loadmat(X_path)
     resp = data['stim'][0]['resp'][0]
     spont = data['stim'][0]['spont'][0]
     X=subtract_spont(spont,resp)
     if self.model=='EnsemblePursuit':
         X=stats.zscore(X,axis=0)
     if self.model=='SparsePCA':
         X=stats.zscore(X,axis=0)
     if self.model=='NMF':
         X-=X.min(axis=0)
     if self.model=='LDA':
         X-=X.min(axis=0)
     V=np.load(V_path)
     U=np.load(U_path).T
     print(V.shape)
     print(U.shape)
     var_lst=[]
     for j in range(0,150):
         var=np.mean((X-(U[j,:].reshape(10103,1)@V[:,j].reshape(1,5880)).T)*(X-(U[j,:].reshape(10103,1)@V[:,j].reshape(1,5880)).T))
         #var=(np.sum(U[:,j])**2)#*np.var(V[:,j]) 
         #print(U[:,j])
         var_lst.append(var)
     sortd=np.argsort(var_lst)
     return var_lst, sortd
コード例 #7
0
    def fit_model(self):
        for filename in self.mat_file_lst:
            print(filename)
            data = io.loadmat(self.data_path + filename)
            resp = data['stim'][0]['resp'][0]
            spont = data['stim'][0]['spont'][0]
            if self.model == 'EnsemblePursuit_numpy':
                X = subtract_spont(spont, resp).T
                options_dict = {
                    'seed_neuron_av_nr': 100,
                    'min_assembly_size': 8
                }
                ep_np = EnsemblePursuitNumpy(n_ensembles=self.nr_of_components,
                                             lambd=self.lambd_,
                                             options_dict=options_dict)
                start = time.time()
                U, V = ep_np.fit_transform(X)
                end = time.time()
                tm = end - start
                print('Time', tm)
                np.save(self.save_path + filename + '_V_ep_numpy.npy', V)
                np.save(self.save_path + filename + '_U_ep_numpy.npy', U)
                np.save(self.save_path + filename + '_timing_ep_numpy.npy', tm)
            if self.model == 'EnsemblePursuit_pytorch':
                X = subtract_spont(spont, resp).T
                options_dict = {
                    'seed_neuron_av_nr': 100,
                    'min_assembly_size': 8
                }
                ep_pt = EnsemblePursuitPyTorch(
                    n_ensembles=self.nr_of_components,
                    lambd=self.lambd_,
                    options_dict=options_dict)
                start = time.time()
                U, V = ep_pt.fit_transform(X)
                end = time.time()
                tm = end - start
                print('Time', tm)
                np.save(self.save_path + filename + '_V_ep_pytorch.npy', V)
                np.save(self.save_path + filename + '_U_ep_pytorch.npy', U)
                np.save(self.save_path + filename + '_timing_ep_pytorch.npy',
                        tm)
            if self.model == 'EnsemblePursuit_adaptive':
                X = subtract_spont(spont, resp).T
                options_dict = {
                    'seed_neuron_av_nr': 100,
                    'min_assembly_size': 8
                }
                ep_pt = EnsemblePursuitPyTorch(
                    n_ensembles=self.nr_of_components,
                    lambd=self.lambd_,
                    options_dict=options_dict)
                start = time.time()
                U, V = ep_pt.fit_transform(X)
                end = time.time()
                tm = end - start
                print('Time', tm)
                np.save(self.save_path + filename + '_V_ep_adaptive.npy', V)
                np.save(self.save_path + filename + '_U_ep_adaptive.npy', U)

            if self.model == 'SparsePCA':
                X = subtract_spont(spont, resp)
                X = zscore(X)
                sPCA = SparsePCA(n_components=self.nr_of_components,
                                 random_state=7,
                                 max_iter=100,
                                 n_jobs=-1,
                                 verbose=1)
                start = time.time()
                model = sPCA.fit(X)
                end = time.time()
                elapsed_time = end - start
                U = model.components_
                V = sPCA.transform(X)
                np.save(self.save_path + filename + '_U_sPCA.npy', U)
                np.save(self.save_path + filename + '_V_sPCA.npy', V)
                np.save(self.save_path + filename + '_time_sPCA.npy',
                        elapsed_time)
            if self.model == 'ICA':
                X = subtract_spont(spont, resp)
                X = zscore(X)
                ICA = FastICA(n_components=self.nr_of_components,
                              random_state=7)
                start = time.time()
                V = ICA.fit_transform(X)
                end = time.time()
                elapsed_time = end - start
                U = ICA.components_
                np.save(self.save_path + filename + '_U_ICA.npy', U)
                np.save(self.save_path + filename + '_V_ICA.npy', V)
                np.save(self.save_path + filename + '_time_ICA.npy',
                        elapsed_time)
コード例 #8
0
    def fit_model(self):
        for filename in self.mat_file_lst:
            print(filename)
            data = io.loadmat(self.data_path + filename)
            resp = data['stim'][0]['resp'][0]
            spont = data['stim'][0]['spont'][0]
            if self.model == 'EnsemblePursuit_numpy':
                X = subtract_spont(spont, resp).T
                options_dict = {
                    'seed_neuron_av_nr': 100,
                    'min_assembly_size': 8
                }
                ep_np = EnsemblePursuitNumpy(n_ensembles=self.nr_of_components,
                                             lambd=self.lambd_,
                                             options_dict=options_dict)
                start = time.time()
                U, V = ep_np.fit_transform(X)
                end = time.time()
                tm = end - start
                print('Time', tm)
                np.save(self.save_path + filename + '_V_ep_numpy.npy', V)
                np.save(self.save_path + filename + '_U_ep_numpy.npy', U)
                np.save(self.save_path + filename + '_timing_ep_numpy.npy', tm)
            if self.model == 'EnsemblePursuit_pytorch':
                X = subtract_spont(spont, resp).T
                options_dict = {
                    'seed_neuron_av_nr': 100,
                    'min_assembly_size': 8
                }
                ep_pt = EnsemblePursuitPyTorch(
                    n_ensembles=self.nr_of_components,
                    lambd=self.lambd_,
                    options_dict=options_dict)
                start = time.time()
                U, V = ep_pt.fit_transform(X)
                end = time.time()
                tm = end - start
                print('Time', tm)
                np.save(self.save_path + filename + '_V_ep_pytorch.npy', V)
                np.save(self.save_path + filename + '_U_ep_pytorch.npy', U)
                np.save(self.save_path + filename + '_timing_ep_pytorch.npy',
                        tm)
            if self.model == 'EnsemblePursuit_numpy_fast':
                X = subtract_spont(spont, resp).T
                options_dict = {
                    'seed_neuron_av_nr': 100,
                    'min_assembly_size': 8
                }
                ep_np = EnsemblePursuitNumpyFast(
                    n_ensembles=self.nr_of_components,
                    lambd=self.lambd_,
                    options_dict=options_dict)
                start = time.time()
                U, V = ep_np.fit_transform(X)
                end = time.time()
                tm = end - start
                print('Time', tm)
                np.save(self.save_path + filename + '_V_ep_numpy_fast.npy', V)
                np.save(self.save_path + filename + '_U_ep_numpy_fast.npy', U)
                np.save(
                    self.save_path + filename + '_timing_ep_numpy_fast.npy',
                    tm)
            if self.model == 'EnsemblePursuit_pytorch_fast':
                X = subtract_spont(spont, resp).T
                options_dict = {
                    'seed_neuron_av_nr': 100,
                    'min_assembly_size': 8
                }
                ep_pt = EnsemblePursuitPyTorchFast(
                    n_ensembles=self.nr_of_components,
                    lambd=self.lambd_,
                    options_dict=options_dict)
                start = time.time()
                U, V = ep_pt.fit_transform(X)
                end = time.time()
                tm = end - start
                print('Time', tm)
                np.save(self.save_path + filename + '_V_ep_pytorch_fast.npy',
                        V)
                np.save(self.save_path + filename + '_U_ep_pytorch_fast.npy',
                        U)
                np.save(
                    self.save_path + filename + '_timing_ep_pytorch_fast.npy',
                    tm)
            if self.model == 'EnsemblePursuit_reg':
                X = subtract_spont(spont, resp).T
                options_dict = {
                    'seed_neuron_av_nr': 100,
                    'min_assembly_size': 8
                }
                ep_pt = EnsemblePursuitRegressionInit(
                    n_ensembles=self.nr_of_components,
                    lambd=self.lambd_,
                    options_dict=options_dict)
                start = time.time()
                U, V = ep_pt.fit_transform(X)
                end = time.time()
                tm = end - start
                print('Time', tm)
                np.save(self.save_path + filename + '_V_ep_pytorch_reg.npy', V)
                np.save(self.save_path + filename + '_U_ep_pytorch_reg.npy', U)
                np.save(
                    self.save_path + filename + '_timing_ep_pytorch_reg.npy',
                    tm)

            if self.model == 'EnsemblePursuit_var':
                X = subtract_spont(spont, resp).T
                options_dict = {
                    'seed_neuron_av_nr': 100,
                    'min_assembly_size': 8
                }
                ep_pt = EnsemblePursuitVarianceInit(
                    n_ensembles=self.nr_of_components,
                    lambd=self.lambd_,
                    options_dict=options_dict)
                start = time.time()
                U, V = ep_pt.fit_transform(X)
                end = time.time()
                tm = end - start
                print('Time', tm)
                np.save(self.save_path + filename + '_V_ep_pytorch_var.npy', V)
                np.save(self.save_path + filename + '_U_ep_pytorch_var.npy', U)
                np.save(
                    self.save_path + filename + '_timing_ep_pytorch_var.npy',
                    tm)

            if self.model == 'EnsemblePursuit_thresh':
                X = subtract_spont(spont, resp).T
                options_dict = {
                    'seed_neuron_av_nr': 100,
                    'min_assembly_size': 8
                }
                ep_pt = EnsemblePursuitThresholdInit(
                    n_ensembles=self.nr_of_components,
                    lambd=self.lambd_,
                    options_dict=options_dict)
                start = time.time()
                U, V = ep_pt.fit_transform(X)
                end = time.time()
                tm = end - start
                print('Time', tm)
                np.save(self.save_path + filename + '_V_ep_pytorch_thresh.npy',
                        V)
                np.save(self.save_path + filename + '_U_ep_pytorch_thresh.npy',
                        U)
                np.save(
                    self.save_path + filename +
                    '_timing_ep_pytorch_thresh.npy', tm)
            if self.model == 'EnsemblePursuit_tfidf':
                X = subtract_spont(spont, resp).T
                options_dict = {
                    'seed_neuron_av_nr': 100,
                    'min_assembly_size': 8
                }
                ep_pt = EnsemblePursuitTFIDF(n_ensembles=self.nr_of_components,
                                             lambd=self.lambd_,
                                             options_dict=options_dict)
                start = time.time()
                U, V = ep_pt.fit_transform(X)
                end = time.time()
                tm = end - start
                print('Time', tm)
                np.save(self.save_path + filename + '_V_ep_pytorch_tfidf.npy',
                        V)
                np.save(self.save_path + filename + '_U_ep_pytorch_tfidf.npy',
                        U)
                np.save(
                    self.save_path + filename + '_timing_ep_pytorch_tfidf.npy',
                    tm)
コード例 #9
0
    def fit_model(self):
        #for filename in glob.glob(os.path.join(self.data_path, '*MP034_2017-09-11.mat')):
        #for filename in glob.glob(os.path.join(self.data_path, '*.mat')):
        #self.mat_file_lst=[#'natimg2800_M170717_MP034_2017-09-11.mat',#'natimg2800_M160825_MP027_2016-12-14.mat',
#'natimg2800_M161025_MP030_2017-05-29.mat'#,
#'natimg2800_M170604_MP031_2017-06-28.mat','natimg2800_M170714_MP032_2017-09-14.mat','natimg2800_M170714_MP032_2017-08-07.mat','natimg2800_M170717_MP033_2017-08-20.mat'
#]
        for filename in self.mat_file_lst:
            print(filename)
            data = io.loadmat(self.data_path+filename)
            resp = data['stim'][0]['resp'][0]
            spont =data['stim'][0]['spont'][0]
            if self.model=='EnsemblePursuit':
                X=subtract_spont(spont,resp)
                for lambd_ in self.lambdas:
                    neuron_init_dict={'method':'top_k_corr','parameters':{'n_av_neurons':100,'n_of_neurons':1,'min_assembly_size':8}}
                    print(str(neuron_init_dict['parameters']['n_av_neurons']))
                    ep=EnsemblePursuitPyTorch()
                    start=time.time()
                    U_V,nr_of_neurons,U,V, cost_lst,seed_neurons,ensemble_neuron_lst=ep.fit_transform(X,lambd_,self.nr_of_components,neuron_init_dict)
                    end=time.time()
                    tm=end-start
                    print('Time', tm)
                    #np.save(self.save_path+filename[45:85]+'_n_av_n_'+str(neuron_init_dict['parameters']['n_av_neurons'])+'_'+str(lambd_)+'_'+str(self.nr_of_components)+'_V_ep.npy',V)

                    np.save(self.save_path+filename+'_V_ep.npy',V)

                    np.save(self.save_path+filename+'_U_ep.npy',U)

                    np.save(self.save_path+filename+'_ensemble_pursuit_lst_ep.npy',ensemble_neuron_lst)
                    np.save(self.save_path+filename+'_seed_neurons_ep.npy', seed_neurons)
                    np.save(self.save_path+filename+'_time_ep.npy', tm)

                   

#np.save(self.save_path+filename[45:85]+'_n_av_n_'+str(neuron_init_dict['parameters']['n_av_neurons'])+'_'+str(lambd_)+'_'+str(self.nr_of_components)+'_U_ep.npy',U)
                    #np.save(self.save_path+filename[45:85]+'_n_av_n_'+str(neuron_init_dict['parameters']['n_av_neurons'])+'_'+str(lambd_)+'_'+str(self.nr_of_components)+'_cost_ep.npy',cost_lst)
                    #np.save(self.save_path+filename[45:85]+'_n_av_n_'+str(neuron_init_dict['parameters']['n_av_neurons'])+'_'+str(lambd_)+'_'+str(self.nr_of_components)+'_n_neurons_ep.npy',nr_of_neurons)
                    #np.save(self.save_path+filename[45:85]+'_n_av_n_'+str(neuron_init_dict['parameters']['n_av_neurons'])+'_'+str(lambd_)+'_'+str(self.nr_of_components)+'_ensemble_neuron_lst.npy',ensemble_neuron_lst)
                    #np.save(self.save_path+filename[45:85]+'_n_av_n_'+str(neuron_init_dict['parameters']['n_av_neurons'])+'_'+str(lambd_)+'_'+str(self.nr_of_components)+'_time_ep.npy',tm)
                    #np.save(self.save_path+filename[45:85]+'_n_av_n_'+str(neuron_init_dict['parameters']['n_av_neurons'])+'_'+str(lambd_)+'_'+str(self.nr_of_components)+'_seed_neurons.npy',seed_neurons)
            if self.model=='SparsePCA':
                X=subtract_spont(spont,resp)
                X=stats.zscore(X)
                print(X.shape)
                for alpha in self.alphas:
                    sPCA=SparsePCA(n_components=self.nr_of_components,alpha=alpha,random_state=7, max_iter=100, n_jobs=-1,verbose=1)
                    #X=X.T
                    start=time.time()
                    model=sPCA.fit(X)
                    end=time.time()
                    elapsed_time=end-start
                    U=model.components_
                    print('U',U.shape)
                    #errors=model.error_
                    V=sPCA.transform(X)
                    print('V',V.shape)
                    np.save(self.save_path+filename[45:85]+'_'+str(alpha)+'_'+str(self.nr_of_components)+'_U_sPCA.npy',U)
                    np.save(self.save_path+filename[45:85]+'_'+str(alpha)+'_'+str(self.nr_of_components)+'_V_sPCA.npy',V)
                    np.save(self.save_path+filename[45:85]+'_'+str(alpha)+'_'+str(self.nr_of_components)+'_time_sPCA.npy',elapsed_time)
                    #np.save(self.save_path+filename[45:85]+'_'+str(alpha)+'_'+str(self.nr_of_components)+'_errors_sPCA.npy',errors)
            if self.model=='NMF':
                 X=subtract_spont(spont,resp)
                 X-=X.min(axis=0)
                 for alpha in self.alphas:
                    model = NMF(n_components=self.nr_of_components, init='nndsvd', random_state=7,alpha=alpha)
                    start=time.time()
                    V=model.fit_transform(X)
                    end=time.time()
                    time_=end-start
                    print(end-start)
                    U=model.components_
                    np.save(self.save_path+filename[45:85]+'_'+str(alpha)+'_'+str(self.nr_of_components)+'_U_NMF.npy',U)
                    np.save(self.save_path+filename[45:85]+'_'+str(alpha)+'_'+str(self.nr_of_components)+'_V_NMF.npy',V)
                    np.save(self.save_path+filename[45:85]+'_'+str(alpha)+'_'+str(self.nr_of_components)+'_time_NMF.npy',time_)
            if self.model=='PCA':
                  X=subtract_spont(spont,resp)
                  X=stats.zscore(X)
                  pca=PCA(n_components=self.nr_of_components)
                  start=time.time()
                  V=pca.fit_transform(X)
                  U=pca.components_
                  end=time.time()
                  elapsed_time=end-start
                  #V=pca.components_
                  var=pca.explained_variance_
                  np.save(self.save_path+filename[45:85]+'_'+str(self.nr_of_components)+'_V_pca.npy',V)
                  np.save(self.save_path+filename[45:85]+'_'+str(self.nr_of_components)+'_time_pca.npy',elapsed_time)
                  np.save(self.save_path+filename[45:85]+'_'+str(self.nr_of_components)+'_var_pca.npy',var)
                  np.save(self.save_path+filename[45:85]+'_'+str(self.nr_of_components)+'_U_pca.npy',U)
            if self.model=='LDA':
                  X=resp
                  X-=X.min(axis=0)
                  lda=LatentDirichletAllocation(n_components=self.nr_of_components, random_state=7)
                  start=time.time()
                  V=lda.fit_transform(X)
                  end=time.time()
                  elapsed_time=end-start
                  print('time',elapsed_time)
                  U=lda.components_
                  np.save(self.save_path+filename[45:85]+'_'+str(self.nr_of_components)+'_V_lda.npy',V)
                  np.save(self.save_path+filename[45:85]+'_'+str(self.nr_of_components)+'_U_lda.npy',U) 
                  np.save(self.save_path+filename[45:85]+'_'+str(self.nr_of_components)+'_time_lda.npy',elapsed_time) 
コード例 #10
0
 def fit_model(self):
     #for filename in glob.glob(os.path.join(self.data_path, '*MP034_2017-09-11.mat')):
     for filename in glob.glob(os.path.join(self.data_path, '*.mat')):
         print(filename[45:85])
         data = io.loadmat(filename)
         resp = data['stim'][0]['resp'][0]
         spont = data['stim'][0]['spont'][0]
         if self.model == 'EnsemblePursuit':
             X = subtract_spont(spont, resp)
             for lambd_ in self.lambdas:
                 neuron_init_dict = {
                     'method': 'top_k_corr',
                     'parameters': {
                         'n_av_neurons': 100,
                         'n_of_neurons': 1,
                         'min_assembly_size': 8
                     }
                 }
                 print(str(neuron_init_dict['parameters']['n_av_neurons']))
                 ep = EnsemblePursuitPyTorch()
                 start = time.time()
                 U_V, nr_of_neurons, U, V, cost_lst, seed_neurons, ensemble_neuron_lst = ep.fit_transform(
                     X, lambd_, self.nr_of_components, neuron_init_dict)
                 end = time.time()
                 tm = end - start
                 print('Time', tm)
                 np.save(
                     self.save_path + filename[45:85] + '_n_av_n_' +
                     str(neuron_init_dict['parameters']['n_av_neurons']) +
                     '_' + str(lambd_) + '_' + str(self.nr_of_components) +
                     '_V_ep.npy', V)
                 np.save(
                     self.save_path + filename[45:85] + '_n_av_n_' +
                     str(neuron_init_dict['parameters']['n_av_neurons']) +
                     '_' + str(lambd_) + '_' + str(self.nr_of_components) +
                     '_U_ep.npy', U)
                 np.save(
                     self.save_path + filename[45:85] + '_n_av_n_' +
                     str(neuron_init_dict['parameters']['n_av_neurons']) +
                     '_' + str(lambd_) + '_' + str(self.nr_of_components) +
                     '_cost_ep.npy', cost_lst)
                 np.save(
                     self.save_path + filename[45:85] + '_n_av_n_' +
                     str(neuron_init_dict['parameters']['n_av_neurons']) +
                     '_' + str(lambd_) + '_' + str(self.nr_of_components) +
                     '_n_neurons_ep.npy', nr_of_neurons)
                 np.save(
                     self.save_path + filename[45:85] + '_n_av_n_' +
                     str(neuron_init_dict['parameters']['n_av_neurons']) +
                     '_' + str(lambd_) + '_' + str(self.nr_of_components) +
                     '_ensemble_neuron_lst.npy', ensemble_neuron_lst)
                 np.save(
                     self.save_path + filename[45:85] + '_n_av_n_' +
                     str(neuron_init_dict['parameters']['n_av_neurons']) +
                     '_' + str(lambd_) + '_' + str(self.nr_of_components) +
                     '_time_ep.npy', tm)
                 np.save(
                     self.save_path + filename[45:85] + '_n_av_n_' +
                     str(neuron_init_dict['parameters']['n_av_neurons']) +
                     '_' + str(lambd_) + '_' + str(self.nr_of_components) +
                     '_seed_neurons.npy', seed_neurons)
         if self.model == 'SparsePCA':
             X = subtract_spont(spont, resp)
             X = stats.zscore(X)
             print(X.shape)
             for alpha in self.alphas:
                 sPCA = SparsePCA(n_components=self.nr_of_components,
                                  alpha=alpha,
                                  random_state=7,
                                  max_iter=100,
                                  n_jobs=-1,
                                  verbose=1)
                 #X=X.T
                 start = time.time()
                 model = sPCA.fit(X)
                 end = time.time()
                 elapsed_time = end - start
                 U = model.components_
                 print('U', U.shape)
                 #errors=model.error_
                 V = sPCA.transform(X)
                 print('V', V.shape)
                 np.save(
                     self.save_path + filename[45:85] + '_' + str(alpha) +
                     '_' + str(self.nr_of_components) + '_U_sPCA.npy', U)
                 np.save(
                     self.save_path + filename[45:85] + '_' + str(alpha) +
                     '_' + str(self.nr_of_components) + '_V_sPCA.npy', V)
                 np.save(
                     self.save_path + filename[45:85] + '_' + str(alpha) +
                     '_' + str(self.nr_of_components) + '_time_sPCA.npy',
                     elapsed_time)
                 #np.save(self.save_path+filename[45:85]+'_'+str(alpha)+'_'+str(self.nr_of_components)+'_errors_sPCA.npy',errors)
         if self.model == 'NMF':
             X = subtract_spont(spont, resp)
             X -= X.min(axis=0)
             for alpha in self.alphas:
                 model = NMF(n_components=self.nr_of_components,
                             init='nndsvd',
                             random_state=7,
                             alpha=alpha)
                 V = model.fit_transform(X)
                 U = model.components_
                 np.save(
                     self.save_path + filename[45:85] + '_' + str(alpha) +
                     '_' + str(self.nr_of_components) + '_U_NMF.npy', U)
                 np.save(
                     self.save_path + filename[45:85] + '_' + str(alpha) +
                     '_' + str(self.nr_of_components) + '_V_NMF.npy', V)