def update_spatial(self): self.A, self.b = update_spatial_components(self.Yr, self.Cin, self.fin, self.Ain, d1=self.Y.shape[0], d2=self.Y.shape[1], sn=self.P['sn'])
def mergeROIS(Y_res,A,b,C,f,d1,d2,dz,nr,P_,thr=0.8,mx=50,sn=None,deconv_method='spgl1',min_size=3,max_size=8,dist=3,method_exp = 'ellipse', expandCore = iterate_structure(generate_binary_structure(2,1), 2).astype(int)): """ merging of spatially overlapping components that have highly correlated tmeporal activity % The correlation threshold for merging overlapping components is user specified in P.merge_thr (default value 0.85) % Inputs: % Y_res: residual movie after subtracting all found components % A: matrix of spatial components % b: spatial background % C: matrix of temporal components % f: temporal background % P: parameter struct % Outputs: % A: matrix of new spatial components % C: matrix of new temporal components % nr: new number of components % merged_ROIs: list of old components that were merged % Written by: % Andrea Giovannucci from implementation of Eftychios A. Pnevmatikakis, Simons Foundation, 2015 """ #% N = len(nr) [d,T] = np.shape(Y_res) C_corr = np.corrcoef(C[:N,:],C[:N,:])[:N,:N]; FF1=C_corr>=thr; #find graph of strongly correlated temporal components A_corr=A.T*A A_corr.setdiag(0) FF2=A_corr>0 # % find graph of overlapping spatial components FF3=np.logical_and(FF1,FF2.todense()) FF3=coo_matrix(FF3) c,l=csgraph.connected_components(FF3) # % extract connected components p=len(P_[0]['gn']) MC=[]; for i in range(c): if np.sum(l==i)>1: MC.append((l==i).T) MC=np.asarray(MC).T if MC.ndim>1: cor = np.zeros((np.shape(MC)[1],1)); for i in range(np.size(cor)): fm = np.where(MC[:,i])[0] for j1 in range(np.size(fm)): for j2 in range(j1+1,np.size(fm)): print j1,j2 cor[i] = cor[i] +C_corr[fm[j1],fm[j2]] Y_res = Y_res + np.dot(b,f); if np.size(cor) > 1: ind=np.argsort(np.squeeze(cor))[::-1] else: ind = [0] nm = min((np.size(ind),mx)) # number of merging operations A_merged = coo_matrix((d,nm)).tocsr(); C_merged = np.zeros((nm,T)); nr_merged = [0]*nm import pdb; pdb.set_trace() P_merged=[]; merged_ROIs = [] #% for i in range(nm): P_cycle=dict() merged_ROI=np.where(MC[:,ind[i]])[0] merged_ROIs.append(merged_ROI) nC = np.sqrt(np.sum(np.array(C[merged_ROI,:])**2,axis=1)) #SVR need to cast to array otherwise assumes matrix power # A_merged[:,i] = np.squeeze((A[:,merged_ROI]*spdiags(nC,0,len(nC),len(nC))).sum(axis=1)) A = A.tocsr() #SVR A comes as coo_matrix which has no __get_item__ A_merged[:,i] = csr_matrix((A[:,merged_ROI]*spdiags(nC,0,len(nC),len(nC))).sum(axis=1)) Y_res = Y_res + A[:,merged_ROI]*C[merged_ROI,:] nr_merged[i] = nr[merged_ROI[0]] aa_1=scipy.sparse.linalg.spsolve(spdiags(nC,0,len(nC),len(nC)),C[merged_ROI,:]) aa_2=(aa_1).mean(axis=0) ff = np.nonzero(A_merged[:,i])[0] cc,_,_,Ptemp = update_temporal_components(np.asarray(Y_res[ff,:]),A_merged[ff,i],b[ff],aa_2,f,p=p,deconv_method=deconv_method) aa,bb,cc,_ = update_spatial_components(np.asarray(Y_res),cc,f,A_merged[:,i],d1=d1,d2=d2,dz=dz,nr=[nr_merged[i]],sn=sn,min_size=min_size,max_size=max_size,dist=dist,method = method_exp, expandCore =expandCore) A_merged[:,i] = aa.tocsr(); cc,_,_,Ptemp = update_temporal_components(Y_res[ff,:],A_merged[ff,i],bb[ff],cc,f,p=p,deconv_method=deconv_method) P_cycle=P_[merged_ROI[0]].copy() P_cycle['gn']=Ptemp[0]['gn'] P_cycle['b']=Ptemp[0]['b'] P_cycle['c1']=Ptemp[0]['c1'] P_cycle['neuron_sn']=Ptemp[0]['neuron_sn'] P_merged.append(P_cycle) C_merged[i,:] = cc if i+1 < nm: Y_res[ff,:] = Y_res[ff,:] - A_merged[ff,i]*cc #% neur_id = np.unique(np.hstack(merged_ROIs)) good_neurons=np.setdiff1d(range(N),neur_id) A= scipy.sparse.hstack((A[:,good_neurons],A_merged.tocsc())) C = np.vstack((C[good_neurons,:],C_merged)) nr = [nrv for nri,nrv in enumerate(nr) if nri in good_neurons] + nr_merged # P_new=list(P_[good_neurons].copy()) P_new=[P_[pp] for pp in good_neurons] for p in P_merged: P_new.append(p) #SVR TODO: update nr appropriately after merge #nr = nr - len(neur_id) + nm else: warnings.warn('No neurons merged!') merged_ROIs=[]; P_new=P_ return A,C,nr_merged,merged_ROIs,P_new
Yr = np.reshape(Y,(d1*d2,T),order='F') p = 2; P = arpfit(Yr,p=2,pixels = active_pixels) #%% nmf Y_res = Yr - np.dot(Ain,Cin) model = ProjectedGradientNMF(n_components=1, init='random', random_state=0) model.fit(np.maximum(Y_res,0)) fin = model.components_.squeeze() #%% update spatial components t1 = time() A,b = update_spatial_components(Yr, Cin, fin, Ain, d1=d1, d2=d2, sn = P['sn']) t_elSPATIAL = time() - t1 #%% t1 = time() C,f,Y_res,Pnew = update_temporal_components(Yr,A,b,Cin,fin,ITER=2) t_elTEMPORAL1 = time() - t1 #%% solving using spgl1 for deconvolution t1 = time() C2,f2,Y_res2,Pnew2 = update_temporal_components(Yr,A,b,Cin,fin,ITER=2,deconv_method = 'spgl1') t_elTEMPORAL2 = time() - t1 #%% t1 = time()
f = efty_params['fin'] * 1.0 A_in = efty_params['Ain'] * 1.0 P = efty_params['P'][0, 0] # necessary because of the way it is stored A = efty_params['A'] * 1.0 b = efty_params['b'] * 1.0 #%% A = sio.loadmat('Amat.mat')['A'] #%% #A_out,b_out=update_spatial_components(Y,C,f,A_in,d1=d1,d2=d2,sn=sn) #%% #A_out,b_out=update_spatial_components(Y,C,f,A_in,d1=P.d1,d2=P.d2,g=P.g,sn=P.sn) A_out, b_out = update_spatial_components(Y, C, f, A_in, d1=P.d1, d2=P.d2, sn=P.sn) #%% np.sum(np.abs(A.todense() - A_out.todense())) / np.sum(np.abs(A.todense())) pl.imshow(A.todense(), aspect='auto', interpolation='none') pl.figure() pl.imshow(A_out.todense(), aspect='auto', interpolation='none') #%% #np.savez('demo_post_spatial',Y=Y,b_out=b_out,C_in=C,f_in=f,d1=P.d1,d2=P.d2,g=P.g,sn=P.sn,P=P) ##%% #import cPickle as pickle #import numpy as np #import scipy.sparse
def update_spatial(self): self.A,self.b = update_spatial_components(self.Yr, self.Cin, self.fin, self.Ain, d1=self.Y.shape[0], d2=self.Y.shape[1], sn=self.P['sn'])
plt.gca().invert_yaxis() #%% crd = plot_contours(coo_matrix(Ain[:,::-1]),Cn,thr=0.9) #%% active_pixels = np.squeeze(np.nonzero(np.sum(Ain,axis=1))) Yr = np.reshape(Y,(d1*d2,T),order='F') p = 2; P = arpfit(Yr,p=1,pixels = active_pixels) Y_res = Yr - np.dot(Ain,Cin) model = ProjectedGradientNMF(n_components=1, init='random', random_state=0) model.fit(np.maximum(Y_res,0)) fin = model.components_.squeeze() #%% t1 = time() A,b,Cin = update_spatial_components(Yr, Cin, fin, Ain, d1=d1, d2=d2, sn = P['sn'],dist=2,max_size=8,min_size=3) t_elSPATIAL = time() - t1 #%% crd = plot_contours(A,Cn2,thr=0.9,cmap=pl.cm.gray) #%% t1 = time() C,f,Y_res,Pnew = update_temporal_components(Yr,A,b,Cin,fin,ITER=2,deconv_method = 'spgl1') t_elTEMPORAL2 = time() - t1 #%% t1 = time() A_sp=A.tocsc(); A_m,C_m,nr_m,merged_ROIs,P_m=mergeROIS(Y_res,A_sp,b,np.array(C),f,d1,d2,Pnew,sn=P['sn'],thr=.7,deconv_method='spgl1',min_size=3,max_size=8,dist=2) t_elMERGE = time() - t1 #%% crd = plot_contours(A_m,Cn2,thr=0.9) #%%
Yr = np.reshape(Y, (d1 * d2, T), order='F') p = 2 P = arpfit(Yr, p=2, pixels=active_pixels) #%% nmf Y_res = Yr - np.dot(Ain, Cin) model = ProjectedGradientNMF(n_components=1, init='random', random_state=0) model.fit(np.maximum(Y_res, 0)) fin = model.components_.squeeze() #%% update spatial components t1 = time() A, b = update_spatial_components(Yr, Cin, fin, Ain, d1=d1, d2=d2, sn=P['sn']) t_elSPATIAL = time() - t1 #%% t1 = time() C, f, Y_res, Pnew = update_temporal_components(Yr, A, b, Cin, fin, ITER=2) t_elTEMPORAL1 = time() - t1 #%% solving using spgl1 for deconvolution t1 = time() C2, f2, Y_res2, Pnew2 = update_temporal_components(Yr, A, b, Cin, fin, ITER=2,
Y=efty_params['Yr']*1.0 C=efty_params['Cin']*1.0 f=efty_params['fin']*1.0 A_in=efty_params['Ain']*1.0 P=efty_params['P'][0,0] # necessary because of the way it is stored A=efty_params['A']*1.0 b=efty_params['b']*1.0 #%% A=sio.loadmat('Amat.mat')['A'] #%% #A_out,b_out=update_spatial_components(Y,C,f,A_in,d1=d1,d2=d2,sn=sn) #%% #A_out,b_out=update_spatial_components(Y,C,f,A_in,d1=P.d1,d2=P.d2,g=P.g,sn=P.sn) A_out,b_out=update_spatial_components(Y,C,f,A_in,d1=P.d1,d2=P.d2,sn=P.sn) #%% np.sum(np.abs(A.todense()-A_out.todense()))/np.sum(np.abs(A.todense())) pl.imshow(A.todense(),aspect='auto',interpolation='none') pl.figure() pl.imshow(A_out.todense(),aspect='auto',interpolation='none') #%% #np.savez('demo_post_spatial',Y=Y,b_out=b_out,C_in=C,f_in=f,d1=P.d1,d2=P.d2,g=P.g,sn=P.sn,P=P) ##%% #import cPickle as pickle #import numpy as np #import scipy.sparse #with open('demo_post.dat', 'wb') as outfile:
def mergeROIS(Y_res, A, b, C, f, d1, d2, P_, thr=0.8, mx=50, sn=None, deconv_method='spgl1', min_size=3, max_size=8, dist=3, method_exp='ellipse', expandCore=iterate_structure(generate_binary_structure(2, 1), 2).astype(int)): """ merging of spatially overlapping components that have highly correlated tmeporal activity % The correlation threshold for merging overlapping components is user specified in P.merge_thr (default value 0.85) % Inputs: % Y_res: residual movie after subtracting all found components % A: matrix of spatial components % b: spatial background % C: matrix of temporal components % f: temporal background % P: parameter struct % Outputs: % A: matrix of new spatial components % C: matrix of new temporal components % nr: new number of components % merged_ROIs: list of old components that were merged % Written by: % Andrea Giovannucci from implementation of Eftychios A. Pnevmatikakis, Simons Foundation, 2015 """ #% nr = A.shape[1] [d, T] = np.shape(Y_res) C_corr = np.corrcoef(C[:nr, :], C[:nr, :])[:nr, :nr] FF1 = C_corr >= thr #find graph of strongly correlated temporal components A_corr = A.T * A A_corr.setdiag(0) FF2 = A_corr > 0 # % find graph of overlapping spatial components FF3 = np.logical_and(FF1, FF2.todense()) FF3 = coo_matrix(FF3) c, l = csgraph.connected_components(FF3) # % extract connected components p = len(P_[0]['gn']) MC = [] for i in range(c): if np.sum(l == i) > 1: MC.append((l == i).T) MC = np.asarray(MC).T if MC.ndim > 1: cor = np.zeros((np.shape(MC)[1], 1)) for i in range(np.size(cor)): fm = np.where(MC[:, i])[0] for j1 in range(np.size(fm)): for j2 in range(j1 + 1, np.size(fm)): print j1, j2 cor[i] = cor[i] + C_corr[fm[j1], fm[j2]] Y_res = Y_res + np.dot(b, f) if np.size(cor) > 1: ind = np.argsort(np.squeeze(cor))[::-1] else: ind = [0] nm = min((np.size(ind), mx)) # number of merging operations A_merged = coo_matrix((d, nm)).tocsr() C_merged = np.zeros((nm, T)) P_merged = [] merged_ROIs = [] #% for i in range(nm): P_cycle = dict() merged_ROI = np.where(MC[:, ind[i]])[0] merged_ROIs.append(merged_ROI) nC = np.sqrt(np.sum(C[merged_ROI, :]**2, axis=1)) # A_merged[:,i] = np.squeeze((A[:,merged_ROI]*spdiags(nC,0,len(nC),len(nC))).sum(axis=1)) A_merged[:, i] = csr_matrix( (A[:, merged_ROI] * spdiags(nC, 0, len(nC), len(nC))).sum(axis=1)) Y_res = Y_res + A[:, merged_ROI] * C[merged_ROI, :] aa_1 = scipy.sparse.linalg.spsolve( spdiags(nC, 0, len(nC), len(nC)), C[merged_ROI, :]) aa_2 = (aa_1).mean(axis=0) ff = np.nonzero(A_merged[:, i])[0] cc, _, _, Ptemp = update_temporal_components( np.asarray(Y_res[ff, :]), A_merged[ff, i], b[ff], aa_2, f, p=p, deconv_method=deconv_method) aa, bb, cc = update_spatial_components(np.asarray(Y_res), cc, f, A_merged[:, i], d1=d1, d2=d2, sn=sn, min_size=min_size, max_size=max_size, dist=dist, method=method_exp, expandCore=expandCore) A_merged[:, i] = aa.tocsr() cc, _, _, Ptemp = update_temporal_components( Y_res[ff, :], A_merged[ff, i], bb[ff], cc, f, p=p, deconv_method=deconv_method) P_cycle = P_[merged_ROI[0]].copy() P_cycle['gn'] = Ptemp[0]['gn'] P_cycle['b'] = Ptemp[0]['b'] P_cycle['c1'] = Ptemp[0]['c1'] P_cycle['neuron_sn'] = Ptemp[0]['neuron_sn'] P_merged.append(P_cycle) C_merged[i, :] = cc if i + 1 < nm: Y_res[ff, :] = Y_res[ff, :] - A_merged[ff, i] * cc #% neur_id = np.unique(np.hstack(merged_ROIs)) good_neurons = np.setdiff1d(range(nr), neur_id) A = scipy.sparse.hstack((A[:, good_neurons], A_merged.tocsc())) C = np.vstack((C[good_neurons, :], C_merged)) # P_new=list(P_[good_neurons].copy()) P_new = [P_[pp] for pp in good_neurons] for p in P_merged: P_new.append(p) nr = nr - len(neur_id) + nm else: warnings.warn('No neurons merged!') merged_ROIs = [] P_new = P_ return A, C, nr, merged_ROIs, P_new
fin=fin_m raise Exception( "Arpfit outputs don't match!") #%% reload all values from matlab result demo_results= sio.loadmat('demo_results.mat',struct_as_record=False, squeeze_me=True) Yr=demo_results['Yr']*1.0; Cin=demo_results['Cin']*1.0 fin=demo_results['fin']*1.0 Ain=demo_results['Ain']*1.0 P_m=demo_results['P'] P=dict() P['g']=P_m.g; P['sn']=P_m.sn; #%% update spatial components t1 = time() A,b = update_spatial_components(Yr, Cin, fin, Ain, d1=d1, d2=d2, min_size=3, max_size=8, dist=3,sn = P['sn'], method = 'ellipse') t_elSPATIAL = time() - t1 #%% check with matlab demo_results= sio.loadmat('demo_results.mat', struct_as_record=False, squeeze_me=True) A_m=demo_results['A']*1.0 P_m=demo_results['P'] b_m =np.expand_dims(demo_results['b']*1.0,axis=1) print np.sum(np.abs(A-A_m).todense())/np.sum(np.abs(A).todense()) # should give 0.0035824510737 print np.sum(np.abs(b-b_m))/np.sum(np.abs(b_m)) # should give 0.0032486662048 #%% t1 = time() C,f,Y_res_temp,P_temp = update_temporal_components(Yr,A,b,Cin,fin,ITER=2,method='constrained_foopsi',deconv_method = 'cvx', g='None') t_elTEMPORAL1 = time() - t1