def _forward_dataset(self, ds): chunks_attr = self.__chunks_attr mds = Dataset([]) mds.a = ds.a # mds.sa =ds.sa # mds.fa =ds.fa if chunks_attr is None: # global kmeans mds.samples = self._kmeans(ds.samples).labels_ print max(mds.samples) else: # per chunk kmeans for c in ds.sa[chunks_attr].unique: slicer = np.where(ds.sa[chunks_attr].value == c)[0] mds.samples = ds.samples[0,:] mds.samples[slicer] = self._kmeans(ds.samples[slicer]).labels_ return mds
def _forward_dataset(self, ds): chunks_attr = self.__chunks_attr mds = Dataset([]) mds.a = ds.a # mds.sa =ds.sa # mds.fa =ds.fa if chunks_attr is None: # global kmeans mds.samples = self._kmeans(ds.samples).labels_ print max(mds.samples) else: # per chunk kmeans for c in ds.sa[chunks_attr].unique: slicer = np.where(ds.sa[chunks_attr].value == c)[0] mds.samples = ds.samples[0, :] mds.samples[slicer] = self._kmeans(ds.samples[slicer]).labels_ return mds
def _forward_dataset(self, ds): out_ds = Dataset([]) out_ds.a = ds.a pdb.set_trace() iv = np.nonzero(ds.samples)[0] coords = ds.sa.values()[0][iv] out_ds.fa = coords dim = ds.a.voxel_dim nbdim = self.__neighbor_shape.nbdim nbsize = self.__neighbor_shape.nbsize shape_type = self.__neighbor_shape.shape_type volnb = volneighbors(coords, dim, nbdim, nbsize, shape_type) distmsk = volnb.compute_offsets() if self.__outsparse == True: out_ds.samples = distmask elif self.__outsparse == False: distmask = distmask.todense() out_ds.samples = distmask else: raise RuntimeError('%outsparse should be True or False.') return out_ds
def test_pcamapper(): # data: 40 sample feature line in 20d space (40x20; samples x features) ndlin = Dataset(np.concatenate([np.arange(40) for i in range(20)]).reshape(20,-1).T) pm = PCAMapper() # train PCA assert_raises(mdp.NodeException, pm.train, ndlin) ndlin.samples = ndlin.samples.astype('float') ndlin_noise = ndlin.copy() ndlin_noise.samples += np.random.random(size=ndlin.samples.shape) # we have no variance for more than one PCA component, hence just one # actual non-zero eigenvalue assert_raises(mdp.NodeException, pm.train, ndlin) pm.train(ndlin_noise) assert_equal(pm.proj.shape, (20, 20)) # now project data into PCA space p = pm.forward(ndlin.samples) assert_equal(p.shape, (40, 20)) # check that the mapped data can be fully recovered by 'reverse()' assert_array_almost_equal(pm.reverse(p), ndlin)
def _forward_dataset(self, ds): mds = Dataset([]) mds.a = ds.a vectordist = self._fdistance(ds.samples) mds.samples = squareform(vectordist, force='no', checks=True) return mds
def main(): ''' Spectral clustering... ''' st = time.time() tmpset = Dataset([]) # hfilename = "/nfs/j3/userhome/dangxiaobin/workingdir/cutROI/%s/fdt_matrix2_targets_sc.T.hdf5"%(id) hfilename = 'fdt_matrix2.T.hdf5' print hfilename #load connectivity profile of seed mask voxels conn = open_conn_mat(hfilename) tmpset.a = conn.a print conn.shape,conn.a #remove some features mask = create_mask(conn.samples,0.5,1) # print mask,mask.shape conn_m = mask_feature(conn.samples,mask) # print conn_m map = conn_m.T print "map:" print map.shape,map.max(),map.min() voxel = np.array(conn.fa.values()) print voxel[0] v = voxel[0] spacedist = ds.cdist(v,v,'euclidean') print spacedist """ similar_mat = create_similarity_mat(map,conn.fa,0.1,2) X = np.array(similar_mat) print "similarity matrix: shape:",X.shape print X """ corr = np.corrcoef(map) corr = np.abs(corr) corr = 0.1*corr + 0.9/(spacedist+1) print "Elaspsed time: ", time.time() - st print corr.shape,corr plt.imshow(corr,interpolation='nearest',cmap=cm.jet) cb = plt.colorbar() pl.xticks(()) pl.yticks(()) pl.show() cnum = 3 near = 100 sc = SpectralClustering(cnum,'arpack',None,100,1,'precomputed',near,None,True) #sc.fit(map) sc.fit_predict(corr) ''' cnum = 3 near = 100 sc = SpectralClustering(cnum,'arpack',None,100,1,'nearest_neighbors',near,None,True) sc.fit(map) # sc.fit_predict(X) # param = sc.get_params(deep=True) ''' tmpset.samples = sc.labels_+1 # print sc.affinity_matrix_ #print list(sc.labels_) print "Elaspsed time: ", time.time() - st print "Number of voxels: ", sc.labels_.size print "Number of clusters: ", np.unique(sc.labels_).size result = map2nifti(tmpset) result.to_filename("fg_parcel_S0006.nii.gz") print ".....The end........"
def spectral_seg(hfilename,outf): ''' Spectral clustering... ''' tmpset = Dataset([]) #pdb.set_trace() print "hdf name:",hfilename st = time.time() ###1.load connectivity profile of seed mask voxels conn = h5load(hfilename) tmpset.a = conn.a print "connection matrix shape:" print conn.shape ###2.features select mask = create_mask(conn.samples,5) conn_m = conn.samples[mask] map = conn_m.T print "masked conn matrix:" print map.shape,map.max(),map.min() ###3.average the connection profile. temp = np.zeros(map.shape) voxel = np.array(conn.fa.values()) v = voxel[0] v = v.tolist() shape = [256,256,256] i = 0 for coor in v: mean_f = map[i] #print mean_f.shape #plt.plot(mean_f) #plt.show() neigh =get_neighbors(coor,2,shape) #print "neigh:",neigh count = 1 for n in neigh: if n in v: mean_f = (mean_f*count + map[v.index(n)])/(count+1) count+=1 temp[i] = mean_f i+=1 #sys.exit(0) map = temp print "average connection matrix" ###4.spacial distance spacedist = ds.cdist(v,v,'euclidean') #print spacedist ###5.correlation matrix corr = np.corrcoef(map) corr = np.abs(corr) ###6.mix similariry matrix. corr = 0.7*corr + 0.3/(spacedist+1) #plt.imshow(corr,interpolation='nearest',cmap=cm.jet) #cb = plt.colorbar() #pl.xticks(()) #pl.yticks(()) #pl.show() print "mix up the corr and spacial matrix" #sys.exit(0) ###7.spectral segmentation print "do segmentation" cnum = 3 near = 100 sc = SpectralClustering(cnum,'arpack',None,100,1,'precomputed',near,None,True) sc.fit_predict(corr) tmpset.samples = sc.labels_+1 print "Number of voxels: ", sc.labels_.size print "Number of clusters: ", np.unique(sc.labels_).size print "Elapsed time: ", time.time() - st ###8.save the segmentation result. print "save the result to xxx_parcel.nii.gz" result = map2nifti(tmpset) result.to_filename(outf) print ".....Segment end........" return True
def main(): ''' Spectral clustering... ''' st = time.time() tmpset = Dataset([]) # hfilename = "/nfs/j3/userhome/dangxiaobin/workingdir/cutROI/%s/fdt_matrix2_targets_sc.T.hdf5"%(id) hfilename = 'fdt_matrix2.T.hdf5' print hfilename #load connectivity profile of seed mask voxels conn = open_conn_mat(hfilename) tmpset.a = conn.a print conn.shape, conn.a #remove some features mask = create_mask(conn.samples, 0.5, 1) # print mask,mask.shape conn_m = mask_feature(conn.samples, mask) # print conn_m map = conn_m.T print "map:" print map.shape, map.max(), map.min() voxel = np.array(conn.fa.values()) print voxel[0] v = voxel[0] spacedist = ds.cdist(v, v, 'euclidean') print spacedist """ similar_mat = create_similarity_mat(map,conn.fa,0.1,2) X = np.array(similar_mat) print "similarity matrix: shape:",X.shape print X """ corr = np.corrcoef(map) corr = np.abs(corr) corr = 0.1 * corr + 0.9 / (spacedist + 1) print "Elaspsed time: ", time.time() - st print corr.shape, corr plt.imshow(corr, interpolation='nearest', cmap=cm.jet) cb = plt.colorbar() pl.xticks(()) pl.yticks(()) pl.show() cnum = 3 near = 100 sc = SpectralClustering(cnum, 'arpack', None, 100, 1, 'precomputed', near, None, True) #sc.fit(map) sc.fit_predict(corr) ''' cnum = 3 near = 100 sc = SpectralClustering(cnum,'arpack',None,100,1,'nearest_neighbors',near,None,True) sc.fit(map) # sc.fit_predict(X) # param = sc.get_params(deep=True) ''' tmpset.samples = sc.labels_ + 1 # print sc.affinity_matrix_ #print list(sc.labels_) print "Elaspsed time: ", time.time() - st print "Number of voxels: ", sc.labels_.size print "Number of clusters: ", np.unique(sc.labels_).size result = map2nifti(tmpset) result.to_filename("fg_parcel_S0006.nii.gz") print ".....The end........"