def update_temporal(self):
     self.C, self.f, self.Y_res, self.Pnew = update_temporal_components(
         self.Yr,
         self.A,
         self.b,
         self.Cin,
         self.fin,
         ITER=2,
         deconv_method='spgl1')
Example #2
0
def mergeROIS(Y_res,A,b,C,f,d1,d2,dz,nr,P_,thr=0.8,mx=50,sn=None,deconv_method='spgl1',min_size=3,max_size=8,dist=3,method_exp = 'ellipse', expandCore = iterate_structure(generate_binary_structure(2,1), 2).astype(int)):
    """
    merging of spatially overlapping components that have highly correlated tmeporal activity
    % The correlation threshold for merging overlapping components is user specified in P.merge_thr (default value 0.85)
    % Inputs:
    % Y_res:        residual movie after subtracting all found components
    % A:            matrix of spatial components
    % b:            spatial background
    % C:            matrix of temporal components
    % f:            temporal background
    % P:            parameter struct
    
    % Outputs:
    % A:            matrix of new spatial components
    % C:            matrix of new temporal components
    % nr:           new number of components
    % merged_ROIs:  list of old components that were merged
    
    % Written by:
    % Andrea Giovannucci from implementation of Eftychios A. Pnevmatikakis, Simons Foundation, 2015
    """
    
#%
    
    N = len(nr)
    [d,T] = np.shape(Y_res)
    C_corr = np.corrcoef(C[:N,:],C[:N,:])[:N,:N];
    FF1=C_corr>=thr; #find graph of strongly correlated temporal components 
    A_corr=A.T*A
    A_corr.setdiag(0)
    FF2=A_corr>0            # % find graph of overlapping spatial components
    FF3=np.logical_and(FF1,FF2.todense())
    FF3=coo_matrix(FF3)
    c,l=csgraph.connected_components(FF3) # % extract connected components
    
    p=len(P_[0]['gn'])
    MC=[];
    for i in range(c):     
        if np.sum(l==i)>1:
            MC.append((l==i).T)
    MC=np.asarray(MC).T
    
    if MC.ndim>1:
        cor = np.zeros((np.shape(MC)[1],1));
        
            
        for i in range(np.size(cor)):
            fm = np.where(MC[:,i])[0]
            for j1 in range(np.size(fm)):        
                for j2 in range(j1+1,np.size(fm)):
                    print j1,j2
                    cor[i] = cor[i] +C_corr[fm[j1],fm[j2]]
        
        
        Y_res = Y_res + np.dot(b,f);
        if np.size(cor) > 1:
            ind=np.argsort(np.squeeze(cor))[::-1]
        else:
            ind = [0]
    
        nm = min((np.size(ind),mx))   # number of merging operations
    
        A_merged = coo_matrix((d,nm)).tocsr();
        C_merged = np.zeros((nm,T));
        nr_merged = [0]*nm
        
        import pdb; pdb.set_trace()

        P_merged=[];
        merged_ROIs = []
    #%
        for i in range(nm):
            P_cycle=dict()
            merged_ROI=np.where(MC[:,ind[i]])[0]
            merged_ROIs.append(merged_ROI)
            nC = np.sqrt(np.sum(np.array(C[merged_ROI,:])**2,axis=1)) #SVR need to cast to array otherwise assumes matrix power
    #        A_merged[:,i] = np.squeeze((A[:,merged_ROI]*spdiags(nC,0,len(nC),len(nC))).sum(axis=1))

            A = A.tocsr() #SVR A comes as coo_matrix which has no __get_item__
            A_merged[:,i] = csr_matrix((A[:,merged_ROI]*spdiags(nC,0,len(nC),len(nC))).sum(axis=1))
    
            Y_res = Y_res + A[:,merged_ROI]*C[merged_ROI,:]
            
            nr_merged[i] = nr[merged_ROI[0]]

            aa_1=scipy.sparse.linalg.spsolve(spdiags(nC,0,len(nC),len(nC)),C[merged_ROI,:])
            aa_2=(aa_1).mean(axis=0)        
            
            ff = np.nonzero(A_merged[:,i])[0]     
            
            cc,_,_,Ptemp = update_temporal_components(np.asarray(Y_res[ff,:]),A_merged[ff,i],b[ff],aa_2,f,p=p,deconv_method=deconv_method)  
            
            aa,bb,cc,_ = update_spatial_components(np.asarray(Y_res),cc,f,A_merged[:,i],d1=d1,d2=d2,dz=dz,nr=[nr_merged[i]],sn=sn,min_size=min_size,max_size=max_size,dist=dist,method = method_exp, expandCore =expandCore)
    
            A_merged[:,i] = aa.tocsr();        
    
            cc,_,_,Ptemp = update_temporal_components(Y_res[ff,:],A_merged[ff,i],bb[ff],cc,f,p=p,deconv_method=deconv_method)
            
            P_cycle=P_[merged_ROI[0]].copy()
            P_cycle['gn']=Ptemp[0]['gn']
            P_cycle['b']=Ptemp[0]['b']
            P_cycle['c1']=Ptemp[0]['c1']
            P_cycle['neuron_sn']=Ptemp[0]['neuron_sn']
            P_merged.append(P_cycle)
            C_merged[i,:] = cc
            if i+1 < nm:
                Y_res[ff,:] = Y_res[ff,:] - A_merged[ff,i]*cc
                
        #%
        neur_id = np.unique(np.hstack(merged_ROIs))
        
    
    
        good_neurons=np.setdiff1d(range(N),neur_id)    
        
        A= scipy.sparse.hstack((A[:,good_neurons],A_merged.tocsc()))
        C = np.vstack((C[good_neurons,:],C_merged))
        nr = [nrv for nri,nrv in enumerate(nr) if nri in good_neurons] + nr_merged

    #    P_new=list(P_[good_neurons].copy())
        P_new=[P_[pp] for pp in good_neurons]
        
        for p in P_merged:
            P_new.append(p)
    
        #SVR TODO: update nr appropriately after merge

        #nr = nr - len(neur_id) + nm
    
    else:
        warnings.warn('No neurons merged!')
        merged_ROIs=[];
        P_new=P_
        
    return A,C,nr_merged,merged_ROIs,P_new
#demo_=np.load('demo_post_spatial.npz')
#Y=demo_['Y']
##A=demo_['A_out']
#b=demo_['b_out']
#fin=demo_['f_in']
#Cin=demo_['C_in'];
#g=demo_['g']
#sn=demo_['sn']
#d1=demo_['d1']
#d2=demo_['d2']
#P=demo_['P']
#%% 
#TODO: test with restimate_g=True
#TODO: test reordering of list
start=time.time()
C_out,f_out,Y_res_out,P_=update_temporal_components(Y,A,b,C_in,f_in,ITER=2,method='constrained_foopsi',g=None,bas_nonneg=False,p=2,fudge_factor=1);
print time.time()-start

kkk
#%%
#np.savez('after_temporal.npz',P_=P_)
##%%
#P_=np.load('after_temporal.npz')['arr_3']
#thr=0.85
#mx=50
#d1=P.d1
#d2=P.d2
#sn=P.sn
#from scipy.sparse import spdiags,coo_matrix,csgraph
#import scipy
#import numpy as np
#%% nmf

Y_res = Yr - np.dot(Ain,Cin)
model = ProjectedGradientNMF(n_components=1, init='random', random_state=0)
model.fit(np.maximum(Y_res,0)) 

fin = model.components_.squeeze()

#%% update spatial components

t1 = time()
A,b = update_spatial_components(Yr, Cin, fin, Ain, d1=d1, d2=d2, sn = P['sn'])
t_elSPATIAL = time() - t1

#%% 
t1 = time()
C,f,Y_res,Pnew = update_temporal_components(Yr,A,b,Cin,fin,ITER=2)
t_elTEMPORAL1 = time() - t1

#%%  solving using spgl1 for deconvolution
t1 = time()
C2,f2,Y_res2,Pnew2 = update_temporal_components(Yr,A,b,Cin,fin,ITER=2,deconv_method = 'spgl1')
t_elTEMPORAL2 = time() - t1


#%%
t1 = time()
A_m,C_m,nr_m,merged_ROIs,P_m=mergeROIS(Y_res,A.tocsc(),b,np.array(C2),f2,d1,d2,Pnew2,sn=P['sn'],deconv_method = 'spgl1')
t_elMERGE = time() - t1
 def update_temporal(self):
     self.C,self.f,self.Y_res,self.Pnew = update_temporal_components(self.Yr,self.A,self.b,self.Cin,self.fin,ITER=2,deconv_method = 'spgl1')
p = 2;
P = arpfit(Yr,p=1,pixels = active_pixels)
Y_res = Yr - np.dot(Ain,Cin)
model = ProjectedGradientNMF(n_components=1, init='random', random_state=0)
model.fit(np.maximum(Y_res,0))

fin = model.components_.squeeze()
#%%
t1 = time()
A,b,Cin = update_spatial_components(Yr, Cin, fin, Ain, d1=d1, d2=d2, sn = P['sn'],dist=2,max_size=8,min_size=3)
t_elSPATIAL = time() - t1
#%%
crd = plot_contours(A,Cn2,thr=0.9,cmap=pl.cm.gray)
#%%
t1 = time()
C,f,Y_res,Pnew = update_temporal_components(Yr,A,b,Cin,fin,ITER=2,deconv_method = 'spgl1')
t_elTEMPORAL2 = time() - t1
#%%
t1 = time()
A_sp=A.tocsc();
A_m,C_m,nr_m,merged_ROIs,P_m=mergeROIS(Y_res,A_sp,b,np.array(C),f,d1,d2,Pnew,sn=P['sn'],thr=.7,deconv_method='spgl1',min_size=3,max_size=8,dist=2)
t_elMERGE = time() - t1
#%%
crd = plot_contours(A_m,Cn2,thr=0.9)
#%%
A2,b2,C_m_ = update_spatial_components(Yr, C_m, f, A_m, d1=d1, d2=d2, sn = P['sn'],dist=3,max_size=8,min_size=3)
C2,f2,Y_res2,Pnew2 = update_temporal_components(Yr,A2,b2,C_m_,f,ITER=2,deconv_method = 'spgl1')
#%%
crd = plot_contours(A2,Cn2,thr=0.9,cmap=pl.cm.gray)

#%%
#Cin=demo_['C_in'];
#g=demo_['g']
#sn=demo_['sn']
#d1=demo_['d1']
#d2=demo_['d2']
#P=demo_['P']
#%%
#TODO: test with restimate_g=True
#TODO: test reordering of list
start = time.time()
C_out, f_out, Y_res_out, P_ = update_temporal_components(
    Y,
    A,
    b,
    C_in,
    f_in,
    ITER=2,
    method='constrained_foopsi',
    g=None,
    bas_nonneg=False,
    p=2,
    fudge_factor=1)
print time.time() - start

kkk
#%%
#np.savez('after_temporal.npz',P_=P_)
##%%
#P_=np.load('after_temporal.npz')['arr_3']
#thr=0.85
#mx=50
#d1=P.d1
def mergeROIS(Y_res,
              A,
              b,
              C,
              f,
              d1,
              d2,
              P_,
              thr=0.8,
              mx=50,
              sn=None,
              deconv_method='spgl1',
              min_size=3,
              max_size=8,
              dist=3,
              method_exp='ellipse',
              expandCore=iterate_structure(generate_binary_structure(2, 1),
                                           2).astype(int)):
    """
    merging of spatially overlapping components that have highly correlated tmeporal activity
    % The correlation threshold for merging overlapping components is user specified in P.merge_thr (default value 0.85)
    % Inputs:
    % Y_res:        residual movie after subtracting all found components
    % A:            matrix of spatial components
    % b:            spatial background
    % C:            matrix of temporal components
    % f:            temporal background
    % P:            parameter struct
    
    % Outputs:
    % A:            matrix of new spatial components
    % C:            matrix of new temporal components
    % nr:           new number of components
    % merged_ROIs:  list of old components that were merged
    
    % Written by:
    % Andrea Giovannucci from implementation of Eftychios A. Pnevmatikakis, Simons Foundation, 2015
    """

    #%

    nr = A.shape[1]
    [d, T] = np.shape(Y_res)
    C_corr = np.corrcoef(C[:nr, :], C[:nr, :])[:nr, :nr]
    FF1 = C_corr >= thr
    #find graph of strongly correlated temporal components
    A_corr = A.T * A
    A_corr.setdiag(0)
    FF2 = A_corr > 0  # % find graph of overlapping spatial components
    FF3 = np.logical_and(FF1, FF2.todense())
    FF3 = coo_matrix(FF3)
    c, l = csgraph.connected_components(FF3)  # % extract connected components

    p = len(P_[0]['gn'])
    MC = []
    for i in range(c):
        if np.sum(l == i) > 1:
            MC.append((l == i).T)
    MC = np.asarray(MC).T

    if MC.ndim > 1:
        cor = np.zeros((np.shape(MC)[1], 1))

        for i in range(np.size(cor)):
            fm = np.where(MC[:, i])[0]
            for j1 in range(np.size(fm)):
                for j2 in range(j1 + 1, np.size(fm)):
                    print j1, j2
                    cor[i] = cor[i] + C_corr[fm[j1], fm[j2]]

        Y_res = Y_res + np.dot(b, f)
        if np.size(cor) > 1:
            ind = np.argsort(np.squeeze(cor))[::-1]
        else:
            ind = [0]

        nm = min((np.size(ind), mx))  # number of merging operations

        A_merged = coo_matrix((d, nm)).tocsr()
        C_merged = np.zeros((nm, T))

        P_merged = []
        merged_ROIs = []
        #%
        for i in range(nm):
            P_cycle = dict()
            merged_ROI = np.where(MC[:, ind[i]])[0]
            merged_ROIs.append(merged_ROI)
            nC = np.sqrt(np.sum(C[merged_ROI, :]**2, axis=1))
            #        A_merged[:,i] = np.squeeze((A[:,merged_ROI]*spdiags(nC,0,len(nC),len(nC))).sum(axis=1))
            A_merged[:, i] = csr_matrix(
                (A[:, merged_ROI] *
                 spdiags(nC, 0, len(nC), len(nC))).sum(axis=1))

            Y_res = Y_res + A[:, merged_ROI] * C[merged_ROI, :]

            aa_1 = scipy.sparse.linalg.spsolve(
                spdiags(nC, 0, len(nC), len(nC)), C[merged_ROI, :])
            aa_2 = (aa_1).mean(axis=0)

            ff = np.nonzero(A_merged[:, i])[0]

            cc, _, _, Ptemp = update_temporal_components(
                np.asarray(Y_res[ff, :]),
                A_merged[ff, i],
                b[ff],
                aa_2,
                f,
                p=p,
                deconv_method=deconv_method)

            aa, bb, cc = update_spatial_components(np.asarray(Y_res),
                                                   cc,
                                                   f,
                                                   A_merged[:, i],
                                                   d1=d1,
                                                   d2=d2,
                                                   sn=sn,
                                                   min_size=min_size,
                                                   max_size=max_size,
                                                   dist=dist,
                                                   method=method_exp,
                                                   expandCore=expandCore)

            A_merged[:, i] = aa.tocsr()

            cc, _, _, Ptemp = update_temporal_components(
                Y_res[ff, :],
                A_merged[ff, i],
                bb[ff],
                cc,
                f,
                p=p,
                deconv_method=deconv_method)

            P_cycle = P_[merged_ROI[0]].copy()
            P_cycle['gn'] = Ptemp[0]['gn']
            P_cycle['b'] = Ptemp[0]['b']
            P_cycle['c1'] = Ptemp[0]['c1']
            P_cycle['neuron_sn'] = Ptemp[0]['neuron_sn']
            P_merged.append(P_cycle)
            C_merged[i, :] = cc
            if i + 1 < nm:
                Y_res[ff, :] = Y_res[ff, :] - A_merged[ff, i] * cc

        #%
        neur_id = np.unique(np.hstack(merged_ROIs))

        good_neurons = np.setdiff1d(range(nr), neur_id)

        A = scipy.sparse.hstack((A[:, good_neurons], A_merged.tocsc()))
        C = np.vstack((C[good_neurons, :], C_merged))

        #    P_new=list(P_[good_neurons].copy())
        P_new = [P_[pp] for pp in good_neurons]

        for p in P_merged:
            P_new.append(p)

        nr = nr - len(neur_id) + nm

    else:
        warnings.warn('No neurons merged!')
        merged_ROIs = []
        P_new = P_

    return A, C, nr, merged_ROIs, P_new
t1 = time()
A,b = update_spatial_components(Yr, Cin, fin, Ain, d1=d1, d2=d2, min_size=3, max_size=8, dist=3,sn = P['sn'], method = 'ellipse')
t_elSPATIAL = time() - t1

#%% check with matlab
demo_results= sio.loadmat('demo_results.mat', struct_as_record=False, squeeze_me=True)
A_m=demo_results['A']*1.0
P_m=demo_results['P']
b_m =np.expand_dims(demo_results['b']*1.0,axis=1)
print np.sum(np.abs(A-A_m).todense())/np.sum(np.abs(A).todense()) # should give 0.0035824510737
print np.sum(np.abs(b-b_m))/np.sum(np.abs(b_m)) # should give 0.0032486662048


#%% 
t1 = time()
C,f,Y_res_temp,P_temp = update_temporal_components(Yr,A,b,Cin,fin,ITER=2,method='constrained_foopsi',deconv_method = 'cvx', g='None')
t_elTEMPORAL1 = time() - t1
print t_elTEMPORAL1
#%%
t1 = time()
C2,f2,Y_res_temp2,P_temp2 = update_temporal_components(Yr,A,b,Cin,fin,ITER=2,method='constrained_foopsi',deconv_method = 'spgl1', g='None')
t_elTEMPORAL2 = time() - t1
#%% compare with matlab
demo_results= sio.loadmat('demo_results.mat', struct_as_record=False, squeeze_me=True)
C_m=demo_results['C']*1.0
f_m=demo_results['f']*1.0
P_temp_m=demo_results['P_temp']
Y_res_temp_m=demo_results['Y_res_temp']*1.0

C_cor=np.squeeze(np.array([np.array(ca-pt['b']) for pt,ca in zip(P_temp,C)]))
C_cor_m=np.squeeze(np.array([(ca-P_temp_m.b[idx]) if np.size(P_temp_m.b[idx])>0 else ca for idx,ca in enumerate(C)]))