Пример #1
0
def reduce3_to_bci_lh(reduce3labs):
    class h32k:
        pass

    class h:
        pass

    class s:
        pass

    class bs:
        pass

    class bci:
        pass

    ''' reduce3 to h32k'''
    r3 = readdfs('lh.Yeo2011_17Networks_N1000_reduce3.dfs')
    r3.labels = np.squeeze(reduce3labs.T)
    '''h32k to full res FS'''
    g_surf = nib.load('/big_disk/ajoshi/HCP_data/reference/100307/MNINonLinea\
r/Native/100307.L.very_inflated.native.surf.gii')
    h.vertices = g_surf.darrays[0].data
    h.faces = g_surf.darrays[1].data
    h = interpolate_labels(r3, h)
    ''' native FS ref to native FS BCI'''
    g_surf = nib.load('/big_disk/ajoshi/HCP_data/reference/100307/MNINon\
Linear/Native/100307.L.sphere.reg.native.surf.gii')
    s.vertices = g_surf.darrays[0].data
    s.faces = g_surf.darrays[1].data
    s.labels = h.labels
    ''' map to bc sphere'''
    bs.vertices, bs.faces = fsio.read_geometry('/big_disk/ajoshi/data/BCI\
_DNI_Atlas/surf/lh.sphere.reg')
    bs = interpolate_labels(s, bs)
    bci.vertices, bci.faces = fsio.read_geometry(
        '/big_disk/ajoshi/data/BCI_DNI_A\
tlas/surf/lh.white')
    bci.labels = bs.labels
    #   writedfs('BCI_orig_rh.dfs', bci)

    bci.vertices, bci.faces = fsio.read_geometry(
        '/big_disk/ajoshi/data/BCI_DNI_A\
tlas/surf/lh.inflated')
    #    view_patch(bci, bci.labels)

    #    writedfs('BCI_pial_rh.dfs.', bci)

    bci.vertices, bci.faces = fsio.read_geometry('/big_disk/ajoshi/data/BCI_\
DNI_Atlas/surf/lh.white')
    #    writedfs('BCI_white_rh.dfs.', bci)

    bci.vertices[:, 0] += 96 * 0.8
    bci.vertices[:, 1] += 192 * 0.546875
    bci.vertices[:, 2] += 192 * 0.546875
    bci_bst = readdfs('/big_disk/ajoshi/data/BCI-DNI_brain_atlas/BCI-DNI_\
brain.left.inner.cortex.dfs')
    bci_bst = interpolate_labels(bci, bci_bst)
    labs = bci_bst.labels
    return labs
Пример #2
0
def sub(file1, file2, fileout):
    s1 = dfsio.readdfs(file1)
    s2 = dfsio.readdfs(file2)
    sout = s1
    if not s1.attributes.any() and not s2.attributes.any():
        raise excepts.AttributeMissingError(
            'Missing attributes in {0:s} and/or {1:s}'.format(
                file1,
                file2))  # TODO: Change this to a custom exception in future
    if len(s1.attributes) != len(s2.attributes):
        raise excepts.AttributeLengthError(
            'Attribute lengths of {0:s} and {1:s} do not match.\n'.format(
                file1, file2))
    sout.attributes = s1.attributes - s2.attributes
    dfsio.writedfs(fileout, sout)
def ALL_parcellate_region(roilist, sub, R_all, scan_type):
    p_dir = '/home/ajoshi/data/HCP_data'
    r_factor = 3
    ref_dir = os.path.join(p_dir, 'reference')
    ref = '100307'
    fn1 = ref + '.reduce' + str(r_factor) + '.LR_mask.mat'
    fname1 = os.path.join(ref_dir, fn1)
    msk = scipy.io.loadmat(fname1)  # h5py.File(fname1);
    '''dfs_left = readdfs(os.path.join(p_dir, 'reference', ref + '.aparc.\
a2009s.32k_fs.reduce3.left.dfs'))
    dfs_left_sm = readdfs(os.path.join(p_dir, 'reference', ref + '.aparc.\
a2009s.32k_fs.reduce3.very_smooth.left.dfs'))'''

    dfs_left_sm = readdfs(
        os.path.join('/home/ajoshi/for_gaurav',
                     '100307.BCI2reduce3.very_smooth.' + scan_type + '.dfs'))
    dfs_left = readdfs(
        os.path.join('/home/ajoshi/for_gaurav',
                     '100307.BCI2reduce3.very_smooth.' + scan_type + '.dfs'))

    data = scipy.io.loadmat(
        os.path.join(
            p_dir, 'data', sub, sub + '.rfMRI_REST' + str(1) + '_RL.\
reduce3.ftdata.NLM_11N_hvar_25.mat'))

    LR_flag = msk['LR_flag']

    # 0= right hemisphere && 1== left hemisphere
    if scan_type == 'right':
        LR_flag = np.squeeze(LR_flag) == 0
    else:
        LR_flag = np.squeeze(LR_flag) == 1
    data = data['ftdata_NLM']
    temp = data[LR_flag, :]
    m = np.mean(temp, 1)
    temp = temp - m[:, None]
    s = np.std(temp, 1) + 1e-16
    temp = temp / s[:, None]
    msk_small_region = np.in1d(dfs_left.labels, roilist)
    if R_all.size == 0:
        R_all = temp
    else:
        R_all = np.concatenate((R_all, temp), axis=1)
    print R_all.shape[1]

    return (R_all, msk_small_region, dfs_left_sm.vertices, dfs_left_sm.faces)
Пример #4
0
def RI_mean(lst):
    import scipy as sp
    roilists = []
    scan_type = ['left', 'right']
    for hemi in range(0, 1):
        direct = np.load('very_smooth_data_' + scan_type[hemi] + '.npz')
        if roilists.__len__() == 0:
            roilists = direct['roilists'].tolist()
        else:
            roilists += direct['roilists'].tolist()
    map_roilists = {}
    for i in range(left_hemisphere.shape[0]):
        map_roilists[left_hemisphere[i]] = roiregion[i]
    refined_left = readdfs(
        os.path.join('/home/ajoshi/for_gaurav',
                     '100307.BCI2reduce3.very_smooth.left.dfs'))
    refined_left = refined_left.labels
    refined_right = readdfs(
        os.path.join('/home/ajoshi/for_gaurav',
                     '100307.BCI2reduce3.very_smooth.right.dfs'))
    refined_right = refined_right.labels
    temp = []
    temp1 = []
    cnt = 0
    for roilist in map_roilists.viewkeys():
        #print roilist
        cnt += 1
        # msk_small_region = np.in1d(refined_right,roilist)
        msk_small_region = np.in1d(refined_left, roilist)
        if temp.__len__() > 0:
            # temp = sp.vstack([calculate_mean(msk_small_region, sub, scan_type[1]), temp])
            temp = sp.vstack(
                [dir_session(msk_small_region, scan_type[0], lst), temp])
            temp1 = sp.vstack([
                session_to_session(msk_small_region, scan_type[0], lst), temp1
            ])
        else:
            # temp=calculate_mean(msk_small_region,sub,scan_type[1])
            temp = dir_session(msk_small_region, scan_type[0], lst)
            temp1 = session_to_session(msk_small_region, scan_type[0], lst)
            # msk_small_region = np.in1d(refined_left, roilist+1)
            # temp=sp.vstack([calculate_mean(msk_small_region,sub,scan_type[0]),temp])
    box_plot(temp.tolist(), temp1.tolist(), map_roilists, '60')
Пример #5
0
 def dfs(filename):
     NFV = dfsio.readdfs(filename)
     coords = NFV.vertices
     faces = NFV.faces
     if hasattr(NFV, 'attributes'):
         attributes = NFV.attributes
     else:
         attributes = []
     isMultilevelUCF = False
     return coords, faces, attributes, isMultilevelUCF
Пример #6
0
    def __init__(self, demographics_file, model, max_block_size=2000):
        self.demographic_data = ''
        self.dataframe = None
        self.phenotype_files = []
        self.phenotype_array = None
        self.pre_data_frame = {}
        self.surface_average = None
        self.phenotype_dataframe = None
        # self.attribue_matrix = None
        self.phenotype_array = []
        self.demographic_data = self.read_demographics(demographics_file)
        self.data_read_flag = False
        self.max_block_size = max_block_size

        # if not model.phenotype_attribute_matrix_file and not model.phenotype:
        #     sys.stdout.write('Error: Phenotype is not set. Data frame will not be created.')
        #     return

        # # Choose the phenotype_attribute_matrix binary data if phenotype is also set
        # if model.phenotype_attribute_matrix_file and model.phenotype:
        #     self.read_subject_phenotype_attribute_matrix(model)
        #     self.create_data_frame(model)
        #     return

        # if model.phenotype:
        #     self.read_subject_phenotype(model)

        s1_atlas = dfsio.readdfs(model.atlas_surface)
        self.phenotype_array = self.read_aggregated_attributes_from_surfacefilelist(self.demographic_data[model.fileid],
                                                                                    s1_atlas.vertices.shape[0])

        if len(self.phenotype_array) == 0:
            self.data_read_flag = False
        else:
            self.data_read_flag = True
            # self.create_data_frame(model)

            self.blocks_idx = []
            # At this point the data is completely read, so create indices of blocks
            if self.phenotype_array.shape[1] > self.max_block_size:
                quotient, remainder = divmod(self.phenotype_array.shape[1], self.max_block_size)
                for i in np.arange(quotient)+1:
                    self.blocks_idx.append(((i-1)*self.max_block_size, (i-1)*self.max_block_size + self.max_block_size))
                if remainder != 0:
                    i = quotient + 1
                    self.blocks_idx.append(((i-1)*self.max_block_size, (i-1)*self.max_block_size + remainder))
            else:
                self.blocks_idx.append((0, self.phenotype_array.shape[1]))
            return
Пример #7
0
def label_surf(pval, colorbar_lim, smooth_iter, colormap, bfp_path='.'):
    lsurf = readdfs(os.path.join(bfp_path, 'supp_data/bci32kleft.dfs'))
    rsurf = readdfs(os.path.join(bfp_path, 'supp_data/bci32kright.dfs'))
    num_vert = lsurf.vertices.shape[0]
    lsurf.attributes = sp.zeros((lsurf.vertices.shape[0]))
    rsurf.attributes = sp.zeros((rsurf.vertices.shape[0]))

    if VTK_INSTALLED:
        #smooth surfaces
        lsurf = smooth_patch(lsurf, iterations=smooth_iter)
        rsurf = smooth_patch(rsurf, iterations=smooth_iter)
    else:
        print('VTK is not installed, surface will not be smoothed')

    # write on surface attributes
    lsurf.attributes = pval.squeeze()
    lsurf.attributes = lsurf.attributes[:num_vert]
    rsurf.attributes = pval.squeeze()
    rsurf.attributes = rsurf.attributes[num_vert:2 * num_vert]

    lsurf = patch_color_attrib(lsurf, clim=colorbar_lim, cmap=colormap)
    rsurf = patch_color_attrib(rsurf, clim=colorbar_lim, cmap=colormap)

    return lsurf, rsurf
Пример #8
0
def read_dfs_to_label(path, shape):
    '''
    read .dfs file and return 3d matrix of label
    :param path: (String) file path
    :param shape: (tuple) img shape
    :return: (np.array) 3d matrix of label
    '''
    reader = dfsio.readdfs(path)
    vertices = reader.vertices
    #print(vertices)
    #print('vertices shape {0}'.format(vertices.shape))
    vertices = vertices.astype(np.int32)
    #print(vertices.astype(np.int32))
    labels = np.zeros(shape)
    for vertix in vertices:
        labels[vertix[0]][vertix[1]][vertix[2]] = 1
    return labels
Пример #9
0
    def save(self, outdir, outprefix, atlas_filename):
        sys.stdout.write('Saving output files...\n')
        self.statsresult.adjust_for_multi_comparisons()

        s1 = dfsio.readdfs(atlas_filename)

        s1.attributes = self.statsresult.pvalues
        # print s1.attributes

        if len(s1.attributes) == s1.vertices.shape[0]:
            # Also write color to the field
            s1.vColor = colormaps.Colormap.get_rgb_color_array(
                'pvalue', s1.attributes)
            dfsio.writedfs(
                os.path.join(outdir, outprefix + '_atlas_pvalues.dfs'), s1)
            if len(self.statsresult.pvalues_adjusted) > 0:
                s1.attributes = self.statsresult.pvalues_adjusted
                # Also write color to the field
                s1.vColor = colormaps.Colormap.get_rgb_color_array(
                    'pvalue', s1.attributes)
                dfsio.writedfs(
                    os.path.join(outdir,
                                 outprefix + '_atlas_pvalues_adjusted.dfs'),
                    s1)
        else:
            sys.stdout.write(
                'Error: Dimension mismatch between the p-values and the number of vertices. '
                'Quitting without saving.\n')

        if len(self.statsresult.corrvalues) > 0:
            s1.attributes = self.statsresult.corrvalues
            s1.vColor = colormaps.Colormap.get_rgb_color_array(
                'corr', s1.attributes)
            dfsio.writedfs(os.path.join(outdir, outprefix + '_corr.dfs'), s1)
            self.statsresult.corrvalues[
                np.abs(self.statsresult.pvalues_adjusted) > 0.05] = 0
            s1.attributes = self.statsresult.corrvalues
            # Also write color to the field
            s1.vColor = colormaps.Colormap.get_rgb_color_array(
                'corr', s1.attributes)
            dfsio.writedfs(
                os.path.join(outdir, outprefix + '_corr_adjusted.dfs'), s1)
        sys.stdout.write('Done.\n')
Пример #10
0
#%% Atlas to normal subjects diff & Do PCA of ADHD
diffAdhdInatt = sp.zeros([sub_data.shape[1], 50])
fADHD = sp.zeros((NDim, sub_data.shape[1], 50))

for ind in range(50):
    Y2, _ = brainSync(X=atlas, Y=sub_data[:, :, ind])
    fADHD[:, :, ind] = pca.transform(Y2.T).T
    diffAdhdInatt[:, ind] = sp.sum((Y2 - atlas)**2, axis=0)
    print(ind, )

spio.savemat('ADHD_diff_adhd_inattentive.mat',
             {'diffAdhdInatt': diffAdhdInatt})

#%% Read surfaces for visualization

lsurf = readdfs('/home/ajoshi/coding_ground/bfp/supp_data/bci32kleft.dfs')
rsurf = readdfs('/home/ajoshi/coding_ground/bfp/supp_data/bci32kright.dfs')
a = spio.loadmat(
    '/home/ajoshi/coding_ground/bfp/supp_data/USCBrain_grayord_labels.mat')
labs = a['labels']
lsurf.attributes = np.zeros((lsurf.vertices.shape[0]))
rsurf.attributes = np.zeros((rsurf.vertices.shape[0]))
lsurf = smooth_patch(lsurf, iterations=1500)
rsurf = smooth_patch(rsurf, iterations=1500)
labs[sp.isnan(labs)] = 0
diff = diff * (labs.T > 0)
diffAdhdInatt = diffAdhdInatt * (labs.T > 0)

nVert = lsurf.vertices.shape[0]

#%% Visualization of normal diff from the atlas
Пример #11
0
    def save_surface(self, atlas_filename):

        s1 = dfsio.readdfs(atlas_filename)
        if self.mask_idx.any():
            pvalues = np.ones(s1.vertices.shape[0])
            pvalues[self.mask_idx] = self.statsresult.pvalues
            self.statsresult.pvalues = pvalues
            tvalues = np.zeros(s1.vertices.shape[0])
            tvalues[self.mask_idx] = self.statsresult.tvalues
            self.statsresult.tvalues = tvalues
        s1.attributes = self.statsresult.pvalues
        # print s1.attributes

        if len(s1.attributes) == s1.vertices.shape[0]:
            # Also write color to the field
            self.statsresult.pvalues = log10_transform(
                self.statsresult.pvalues)
            s1.vColor, pex, cmap = colormaps.Colormap.log_pvalues_to_rgb(
                self.statsresult.pvalues)
            s1.attributes = self.statsresult.pvalues
            dfsio.writedfs(
                os.path.join(self.outdir,
                             self.outprefix + '_atlas_log_pvalues.dfs'), s1)
            colormaps.Colormap.save_colorbar(file=os.path.join(
                self.outdir, self.outprefix + '_atlas_log_pvalues_cbar.pdf'),
                                             cmap=cmap,
                                             vmin=-1 * pex,
                                             vmax=pex,
                                             labeltxt='Unadjusted p-values')
            s1.attributes = self.statsresult.tvalues
            s1.vColor, tmin, tmax, cmap_tvalues = colormaps.Colormap.tvalues_to_rgb(
                self.statsresult.tvalues)
            dfsio.writedfs(
                os.path.join(self.outdir,
                             self.outprefix + '_atlas_tvalues_all.dfs'), s1)
            colormaps.Colormap.save_colorbar(file=os.path.join(
                self.outdir, self.outprefix + '_atlas_tvalues_all_cbar.pdf'),
                                             cmap=cmap_tvalues,
                                             vmin=tmin,
                                             vmax=tmax,
                                             labeltxt='t-values (all)')
            self.statsresult.tvalues[np.abs(self.statsresult.pvalues) <= -1 *
                                     np.log10(0.05)] = 0
            s1.vColor, tmin, tmax, cmap_tvalues = colormaps.Colormap.tvalues_to_rgb(
                self.statsresult.tvalues)
            s1.attributes = self.statsresult.tvalues
            dfsio.writedfs(
                os.path.join(self.outdir,
                             self.outprefix + '_atlas_tvalues.dfs'), s1)
            colormaps.Colormap.save_colorbar(file=os.path.join(
                self.outdir, self.outprefix + '_atlas_tvalues_cbar.pdf'),
                                             cmap=cmap_tvalues,
                                             vmin=tmin,
                                             vmax=tmax,
                                             labeltxt='t-values (unadjusted)')
            LUT = cmap_tvalues._lut[0:256, 0:3]
            colormaps.Colormap.exportBrainSuiteLUT(
                os.path.join(self.outdir,
                             self.outprefix + '_atlas_tvalues_cbar.lut'), LUT)

            with open(
                    os.path.join(
                        self.outdir,
                        self.outprefix + '_unadjusted_pvalue_range.txt'),
                    "wt") as text_file:
                text_file.write("Log P-value range: -{0:s} to +{1:s}\n".format(
                    str(pex), str(pex)))
                text_file.write("P-value range: {0:s} to +{1:s}\n".format(
                    str(-1 * 10**(-1 * pex)), str(10**(-1 * pex))))

            if len(self.statsresult.pvalues_adjusted) > 0:
                if self.mask_idx.any():
                    pvalues = np.ones(s1.vertices.shape[0])
                    pvalues[self.mask_idx] = self.statsresult.pvalues_adjusted
                    self.statsresult.pvalues_adjusted = pvalues

                s1.attributes = self.statsresult.pvalues_adjusted
                self.statsresult.pvalues_adjusted = log10_transform(
                    self.statsresult.pvalues_adjusted)
                s1.vColor, pex, cmap = colormaps.Colormap.log_pvalues_to_rgb(
                    self.statsresult.pvalues_adjusted)
                s1.attributes = self.statsresult.pvalues_adjusted
                dfsio.writedfs(
                    os.path.join(
                        self.outdir,
                        self.outprefix + '_atlas_log_pvalues_adjusted.dfs'),
                    s1)
                colormaps.Colormap.save_colorbar(file=os.path.join(
                    self.outdir,
                    self.outprefix + '_atlas_log_pvalues_adjusted_cbar.pdf'),
                                                 cmap=cmap,
                                                 vmin=-1 * pex,
                                                 vmax=pex,
                                                 labeltxt='Adjusted p-values')
                self.statsresult.tvalues[
                    np.abs(self.statsresult.pvalues_adjusted) < -1 *
                    np.log10(0.05)] = 0
                s1.vColor, tmin, tmax, cmap_tvalues = colormaps.Colormap.tvalues_to_rgb(
                    self.statsresult.tvalues)
                s1.attributes = self.statsresult.tvalues
                dfsio.writedfs(
                    os.path.join(
                        self.outdir,
                        self.outprefix + '_atlas_tvalues_adjusted.dfs'), s1)
                colormaps.Colormap.save_colorbar(
                    file=os.path.join(
                        self.outdir,
                        self.outprefix + '_atlas_tvalues_adjusted_cbar.pdf'),
                    cmap=cmap_tvalues,
                    vmin=tmin,
                    vmax=tmax,
                    labeltxt='t-values (adjusted)')
                LUT = cmap_tvalues._lut[0:256, 0:3]
                colormaps.Colormap.exportBrainSuiteLUT(
                    os.path.join(
                        self.outdir,
                        self.outprefix + '_atlas_tvalues_adjusted_cbar.lut'),
                    LUT)

                with open(
                        os.path.join(
                            self.outdir,
                            self.outprefix + '_adjusted_pvalue_range.txt'),
                        "wt") as text_file:
                    text_file.write(
                        "Log P-value range: -{0:s} to +{1:s}\n".format(
                            str(pex), str(abs(pex))))
                    text_file.write("P-value range: {0:s} to +{1:s}\n".format(
                        str(-1 * 10**(-1 * pex)), str(10**(-1 * pex))))
        else:
            sys.stdout.write(
                'Error: Dimension mismatch between the p-values and the number of vertices. '
                'Quitting without saving.\n')

        if len(self.statsresult.corrvalues) > 0:
            if self.mask_idx.any():
                corrvalues = np.zeros(s1.vertices.shape[0])
                corrvalues[self.mask_idx] = self.statsresult.corrvalues
                self.statsresult.corrvalues = corrvalues

            s1.attributes = self.statsresult.corrvalues
            s1.vColor, cex, cmap = colormaps.Colormap.correlation_to_rgb(
                self.statsresult.corrvalues)
            dfsio.writedfs(
                os.path.join(self.outdir, self.outprefix + '_corr.dfs'), s1)
            colormaps.Colormap.save_colorbar(
                file=os.path.join(self.outdir,
                                  self.outprefix + '_corr_cbar.pdf'),
                cmap=cmap,
                vmin=-1 * cex,
                vmax=cex,
                labeltxt='Correlations (unadjusted)')
            with open(
                    os.path.join(self.outdir,
                                 self.outprefix + '_corr_range.txt'),
                    "wt") as text_file:
                text_file.write(
                    "Correlation values range: -{0:s} to +{1:s}\n".format(
                        str(cex), str(cex)))

            # Also write color to the field
            self.statsresult.corrvalues[np.abs(
                self.statsresult.pvalues_adjusted) < -1 * np.log10(0.05)] = 0
            s1.attributes = self.statsresult.corrvalues
            s1.vColor, cex, cmap = colormaps.Colormap.correlation_to_rgb(
                self.statsresult.corrvalues)
            dfsio.writedfs(
                os.path.join(self.outdir,
                             self.outprefix + '_corr_adjusted.dfs'), s1)
            colormaps.Colormap.save_colorbar(
                file=os.path.join(self.outdir,
                                  self.outprefix + '_corr_adjusted_cbar.pdf'),
                cmap=cmap,
                vmin=-1 * cex,
                vmax=cex,
                labeltxt='Correlations (adjusted)')
            with open(
                    os.path.join(self.outdir,
                                 self.outprefix + '_adjusted_corr_range.txt'),
                    "wt") as text_file:
                text_file.write(
                    "Adjusted Correlation values range: {0:s} to +{1:s}\n".
                    format(str(cex), str(cex)))

        sys.stdout.write('Done.\n')
Пример #12
0

class bci:
    pass


outputfile='gaurav_bci.dfs'

p_dir='/home/sgaurav/Documents/git_sandbox/cortical_parcellation/src/intensity_mode_map'
data1 = scipy.io.loadmat(os.path.join(p_dir, 'intensity_file_cingulate_184_nCluster=3_BCI.mat'))

labs = data1['labs_all']


''' reduce3 to h32k'''
r3 = readdfs('rh.Yeo2011_17Networks_N1000_reduce3.dfs')
r3.labels=np.squeeze(labs.T)

'''h32k to full res FS'''
g_surf = gread('/home/ajoshi/data/HCP_data/reference/100307/MNINonLinear/N\
ative/100307.R.very_inflated.native.surf.gii')
h.vertices = g_surf.darrays[0].data
h.faces = g_surf.darrays[1].data
h = interpolate_labels(r3, h)

''' native FS ref to native FS BCI'''
g_surf = gread('/home/ajoshi/data/HCP_data/reference/100307/MNINonLinear/Nativ\
e/100307.R.sphere.reg.native.surf.gii')
s.vertices = g_surf.darrays[0].data
s.faces = g_surf.darrays[1].data
s.labels = h.labels
Пример #13
0
p_dir = '/big_disk/ajoshi/HCP_data/data'
p_dir_ref = '/big_disk/ajoshi/HCP_data'
lst = os.listdir(p_dir)

r_factor = 3
ref_dir = os.path.join(p_dir_ref, 'reference')
nClusters = 30

ref = '100307'
print(ref + '.reduce' + str(r_factor) + '.LR_mask.mat')
fn1 = ref + '.reduce' + str(r_factor) + '.LR_mask.mat'
fname1 = os.path.join(ref_dir, fn1)
msk = scipy.io.loadmat(fname1)  # h5py.File(fname1);
dfs_left = readdfs(
    os.path.join(p_dir_ref, 'reference', ref + '.aparc.\
a2009s.32k_fs.reduce3.left.dfs'))
dfs_left_sm = readdfs(
    os.path.join(p_dir_ref, 'reference', ref + '.aparc.\
a2009s.32k_fs.reduce3.very_smooth.left.dfs'))
view_patch_vtk(dfs_left_sm,
               azimuth=90,
               elevation=180,
               roll=90,
               outfile='sub.png',
               show=1)

count1 = 0
rho_rho = []
rho_all = []
cc_msk = (dfs_left.labels > 0)
# %%
"""

@author: ajoshi
"""
from fmri_methods_sipi import interpolate_labels
from dfsio import readdfs, writedfs
import time
import numpy as np
import scipy as sp
import nibabel as nib
from nilearn import image

subbasename = 'BCI-DNI_DesikanKilliany'
BCI_base = '/home/ajoshi/BrainSuite19b/svreg/BCI-DNI_brain_atlas/BCI-DNI_brain'
left_mid = readdfs(BCI_base + '.left.mid.cortex.dfs')
right_mid = readdfs(BCI_base + '.right.mid.cortex.dfs')
left_inner = readdfs(BCI_base + '.left.inner.cortex.dfs')
right_inner = readdfs(BCI_base + '.right.inner.cortex.dfs')
left_pial = readdfs(BCI_base + '.left.pial.cortex.dfs')
right_pial = readdfs(BCI_base + '.right.pial.cortex.dfs')
lsurf = readdfs(subbasename + '.left.mid.cortex.dfs')
rsurf = readdfs(subbasename + '.right.mid.cortex.dfs')

r1_vert = (right_pial.vertices + right_mid.vertices) / 2.0
r2_vert = (right_inner.vertices + right_mid.vertices) / 2.0
l1_vert = (left_pial.vertices + left_mid.vertices) / 2.0
l2_vert = (left_inner.vertices + left_mid.vertices) / 2.0

vol_lab = image.load_img(
    '/home/ajoshi/BrainSuite19b/svreg/BCI-DNI_brain_atlas/BCI-DNI_brain.dws.label.nii.gz'
    return tosurf


class s:
    pass


class bci:
    pass


inputfile = 'rrh.Yeo2011_17Networks_N1000_reduce3.dfs'
outputfile = '100307.reduce3.very_smooth.right.dfs'
''' BCI to FS processed BCI '''
bci_bst = readdfs(
    '/home/ajoshi/data/BCI-DNI_brain_atlas/BCI-DNI_brain.right.inner.cortex.refined.dfs'
)
bci_bst.vertices[:, 0] -= 96 * 0.8
bci_bst.vertices[:, 1] -= 192 * 0.546875
bci_bst.vertices[:, 2] -= 192 * 0.546875
bci.vertices, bci.faces = fsio.read_geometry(
    '/home/ajoshi/data/BCI_DNI_Atlas/surf/rh.white')
bci = interpolate_labels(bci_bst, bci)
''' FS_BCI to FS BCI Sphere'''
bci.vertices, bci.faces = fsio.read_geometry(
    '/home/ajoshi/data/BCI_DNI_Atlas/surf/rh.sphere.reg')
''' FS BCI Sphere to ref FS Sphere'''
g_surf = gread(
    '/big_disk/ajoshi/HCP_data/reference/100307/MNINonLinear/Native/100307.R.sphere.reg.native.surf.gii'
)
s.vertices = g_surf.darrays[0].data
Пример #16
0
#a.to_filename('outfile_task.nii')
#
#a = nib.cifti2.Cifti2Image(Xtsk-Xnew, sub1.header, file_map=sub1.file_map)
#a.to_filename('outfile_diff.nii')

#loading cifti files has indices garbled
#%%
fname1 = 'right_motor1.png'
fname2 = 'right_motor2.png'

p_dir_ref = '/big_disk/ajoshi/HCP_data/'
ref = '196750'  # '100307'

#lsurf = surfObj
ls = readdfs(
    os.path.join(p_dir_ref, 'reference', ref + '.aparc.\
a2009s.32k_fs.reduce3.very_smooth.right.dfs'))

lsurf = ls
#lind = np.where(ls.labels > -10)[0]
lsurf.attributes = np.zeros((lsurf.vertices.shape[0]))
#lsurf.attributes = X[150,:lsurf.vertices.shape[0]] #

nVert = lsurf.vertices.shape[0]
diffafter = Xtsk - Xnew

lsurf.attributes = np.sum((diffafter)**2, axis=0)
lsurf.attributes = lsurf.attributes[nVert:]
#lsurf.attributes = smooth_surf_function(lsurf, lsurf.attributes)#, a1=1.1, a2=1.1)
lsurf = patch_color_attrib(lsurf, clim=[1, 2])
view_patch_vtk(lsurf,
Пример #17
0
import matlab.engine as meng

USCBrainbaseLatest = '/ImagePTE1/ajoshi/code_farm/hybridatlas/USCBrain_9_8'

eng = meng.start_matlab()
eng.addpath(eng.genpath('/ImagePTE1/ajoshi/code_farm/svreg/MEX_Files'))
eng.addpath(eng.genpath('/ImagePTE1/ajoshi/code_farm/svreg/3rdParty'))
eng.addpath(eng.genpath('/ImagePTE1/ajoshi/code_farm/svreg/src'))
xmlf = USCBrainbaseLatest + '/brainsuite_labeldescription.xml'

for hemi in {'left', 'right'}:

    mid = USCBrainbaseLatest + '/BCI-DNI_brain.' + hemi + '.mid.cortex.dfs'
    eng.recolor_by_label(mid, '', xmlf, nargout=0)

    s = readdfs(mid)
    sin = readdfs(USCBrainbaseLatest + '/BCI-DNI_brain.' + hemi +
                  '.inner.cortex.dfs')
    spial = readdfs(USCBrainbaseLatest + '/BCI-DNI_brain.' + hemi +
                    '.pial.cortex.dfs')

    sin.vColor = s.vColor
    sin.labels = s.labels
    writedfs(
        USCBrainbaseLatest + '/BCI-DNI_brain.' + hemi + '.inner.cortex.dfs',
        sin)

    spial.vColor = s.vColor
    spial.labels = s.labels
    writedfs(
        USCBrainbaseLatest + '/BCI-DNI_brain.' + hemi + '.pial.cortex.dfs',
                                          bci.get_fdata().flatten(),
                                          bci_dict,
                                          tiny_threhsold=80)

    v = ni.new_img_like(bci, error_indicator1.reshape(bci.shape))
    v.to_filename('errorvol.nii.gz')
    print('Tiny region overlaps: %d or %d ' %
          (np.sum(error_indicator1), np.sum(error_indicator2)))

    class error_surf:
        pass

    # Left hemisphere surface
    print('=====Checking Left Hemisphere Surface=====')

    uscbrain = readdfs(USCBrainbaseLatest +
                       '/BCI-DNI_brain.left.mid.cortex.dfs')

    bci = readdfs(BCIbase + '/BCI-DNI_brain.left.mid.cortex.dfs')

    error_indicator1 = check_uscbrain_bci(uscbrain.labels.flatten(),
                                          uscbrain_dict, bci.labels.flatten(),
                                          bci_dict)

    error_indicator2 = check_bci_uscbrain(uscbrain.labels.flatten(),
                                          uscbrain_dict, bci.labels.flatten(),
                                          bci_dict)

    error_surf.vertices = bci.vertices
    error_surf.faces = bci.faces
    error_surf.attributes = 255.0 * error_indicator1
    error_surf.labels = error_indicator1
Пример #19
0
 def read_surface(filename, mask_idx=None):
     filetype = NimgDataio.validatetype(filename)
     if filetype == 'surface':
         return dfsio.readdfs(filename)
def parcellate_region(roilist,
                      sub,
                      nClusters,
                      scan,
                      scan_type,
                      savepng=0,
                      session=1,
                      algo=0,
                      type_cor=0):
    p_dir = '/big_disk/ajoshi/HCP100-fMRI-NLM/HCP100-fMRI-NLM'
    out_dir = '/big_disk/ajoshi/out_dir'
    r_factor = 3
    ref_dir = os.path.join(p_dir, 'reference')
    ref = '100307'
    fn1 = ref + '.reduce' + str(r_factor) + '.LR_mask.mat'
    fname1 = os.path.join(ref_dir, fn1)
    msk = scipy.io.loadmat(fname1)

    dfs_left_sm = readdfs(
        os.path.join('/home/ajoshi/for_gaurav',
                     '100307.BCI2reduce3.very_smooth.' + scan_type + '.dfs'))
    dfs_left = readdfs(
        os.path.join('/home/ajoshi/for_gaurav',
                     '100307.BCI2reduce3.very_smooth.' + scan_type + '.dfs'))

    data = scipy.io.loadmat(
        os.path.join(
            p_dir, sub, sub + '.rfMRI_REST' + str(session) + scan +
            '.reduce3.ftdata.NLM_11N_hvar_25.mat'))

    LR_flag = msk['LR_flag']
    # 0= right hemisphere && 1== left hemisphere
    if scan_type == 'right':
        LR_flag = np.squeeze(LR_flag) == 0
    else:
        LR_flag = np.squeeze(LR_flag) == 1
    data = data['ftdata_NLM']
    temp = data[LR_flag, :]
    m = np.mean(temp, 1)
    temp = temp - m[:, None]
    s = np.std(temp, 1) + 1e-16
    temp = temp / s[:, None]
    msk_small_region = np.in1d(dfs_left.labels, roilist)
    d = temp[msk_small_region, :]
    rho = np.corrcoef(d)
    rho[~np.isfinite(rho)] = 0
    d_corr = temp[~msk_small_region, :]
    rho_1 = np.corrcoef(d, d_corr)
    rho_1 = rho_1[range(d.shape[0]), d.shape[0]:]
    rho_1[~np.isfinite(rho_1)] = 0
    f_rho = np.arctanh(rho_1)
    f_rho[~np.isfinite(f_rho)] = 0
    B = np.corrcoef(f_rho)
    B[~np.isfinite(B)] = 0
    SC = SpectralClustering(n_clusters=nClusters, affinity='precomputed')
    affinity_matrix = np.arcsin(rho)
    labels_corr_sininv = SC.fit_predict(np.abs(affinity_matrix))

    affinity_matrix = sp.exp((-2.0 * (1 - rho)) / (.72**2))
    labels_corr_exp = SC.fit_predict(np.abs(affinity_matrix))

    affinity_matrix = sp.sqrt(2.0 + 2.0 * rho)
    labels_corr_dist = SC.fit_predict(np.abs(affinity_matrix))

    B1 = sp.exp((-2.0 * (1.0 - B)) / (0.72**2.0))
    labels_corr_corr_exp = SC.fit_predict(B1)

    sp.savez(os.path.join(
        out_dir, sub + '.rfMRI_REST' + str(session) + scan + str(roilist) +
        '.labs.npz'),
             labels_corr_sininv=labels_corr_sininv,
             labels_corr_corr_exp=labels_corr_corr_exp,
             labels_corr_dist=labels_corr_dist,
             labels_corr_exp=labels_corr_exp,
             msk_small_region=msk_small_region)
    return labels_corr_sininv, msk_small_region, dfs_left_sm
Пример #21
0
"""

@author: ajoshi
"""
from fmri_methods_sipi import interpolate_labels
from dfsio import readdfs, writedfs
import time
import numpy as np
import scipy as sp
import nibabel as nib
from nilearn import image

subbasename = 'Yeo2011_17Networks'
bcibase = '/home/ajoshi/BrainSuite19b/svreg/BCI-DNI_brain_atlas/BCI-DNI_brain'

left_mid = readdfs(bcibase + '.left.mid.cortex.dfs')
right_mid = readdfs(bcibase + '.right.mid.cortex.dfs')
left_inner = readdfs(bcibase + '.left.inner.cortex.dfs')
right_inner = readdfs(bcibase + '.right.inner.cortex.dfs')
left_pial = readdfs(bcibase + '.left.pial.cortex.dfs')
right_pial = readdfs(bcibase + '.right.pial.cortex.dfs')
left_lab = readdfs(subbasename + '.left.mid.cortex.dfs')
right_lab = readdfs(subbasename + '.right.mid.cortex.dfs')

r1_vert = (right_pial.vertices + right_mid.vertices) / 2.0
r2_vert = (right_inner.vertices + right_mid.vertices) / 2.0
l1_vert = (left_pial.vertices + left_mid.vertices) / 2.0
l2_vert = (left_inner.vertices + left_mid.vertices) / 2.0

vol_lab = image.load_img(
    '/home/ajoshi/BrainSuite19b/svreg/BCI-DNI_brain_atlas/BCI-DNI_brain.dws.label.nii.gz'
"""
from fmri_methods_sipi import interpolate_labels
from dfsio import readdfs
import numpy as np

from nilearn import image as ni
import copy

BCIbase = '/ImagePTE1/ajoshi/code_farm/svreg/BCI-DNI_brain_atlas'
USCBrainbase = '/ImagePTE1/ajoshi/code_farm/hybridatlas/USCBrain_anand_8_29'
USCBrainbaseLatest = '/ImagePTE1/ajoshi/code_farm/hybridatlas/USCBrain_9_3_2020'

#
# %% Change Precentral to match BCI-DNI brain boundaries

left_mid = readdfs(USCBrainbase + '/BCI-DNI_brain.left.mid.cortex.dfs')
right_mid = readdfs(USCBrainbase + '/BCI-DNI_brain.right.mid.cortex.dfs')
left_inner = readdfs(USCBrainbase + '/BCI-DNI_brain.left.inner.cortex.dfs')
right_inner = readdfs(USCBrainbase + '/BCI-DNI_brain.right.inner.cortex.dfs')
left_pial = readdfs(USCBrainbase + '/BCI-DNI_brain.left.pial.cortex.dfs')
right_pial = readdfs(USCBrainbase + '/BCI-DNI_brain.right.pial.cortex.dfs')

r1_vert = (right_pial.vertices + right_mid.vertices) / 2.0
r2_vert = (right_inner.vertices + right_mid.vertices) / 2.0
l1_vert = (left_pial.vertices + left_mid.vertices) / 2.0
l2_vert = (left_inner.vertices + left_mid.vertices) / 2.0

vol_lab_new = ni.load_img(USCBrainbase + '/BCI-DNI_brain.label.nii.gz')
vol_img_new = vol_lab_new.get_fdata()

vol_lab = ni.load_img(BCIbase + '/BCI-DNI_brain.label.nii.gz')
Пример #23
0
print(cmd1)

os.system(cmd1)

vol_lab = image.load_img(outvol)
vol_lab = image.new_img_like(vol_lab, np.int16(vol_lab.get_fdata()))
vol_lab.to_filename(outvol)

vol_img = vol_lab.get_fdata()

xres = vol_lab.header['pixdim'][1]
yres = vol_lab.header['pixdim'][2]
zres = vol_lab.header['pixdim'][3]

sl = readdfs(lmid)
sr = readdfs(rmid)

xx = np.arange(vol_lab.shape[0]) * xres
yy = np.arange(vol_lab.shape[1]) * yres
zz = np.arange(vol_lab.shape[2]) * zres

sl.labels = interpn((xx, yy, zz), vol_img, sl.vertices, method='nearest')
sr.labels = interpn((xx, yy, zz), vol_img, sr.vertices, method='nearest')

sl = smooth_patch(sl, iterations=3000, relaxation=.5)
sr = smooth_patch(sr, iterations=3000, relaxation=.5)

patch_color_labels(sl)
view_patch_vtk(sl)
patch_color_labels(sr)
Пример #24
0
# -*- coding: utf-8 -*-
"""
Created on Tue Aug 16 15:51:16 2016

@author: ajoshi
"""
from dfsio import readdfs
from surfproc import view_patch_vtk

subbasename = '/big_disk/ajoshi/fs_dir/co20050723_090747MPRAGET1Coronals002a001'
hemi = 'left'

s = readdfs(subbasename + '/' + hemi + '.mid.dfs')

view_patch_vtk(s,
               outfile=subbasename + '/mri/BST/fs_' + hemi + '1.png',
               show=0)
view_patch_vtk(s,
               outfile=subbasename + '/mri/BST/fs_' + hemi + '2.png',
               azimuth=-90,
               roll=90,
               show=0)

s = readdfs(subbasename + '/mri/BST/orig.' + hemi + '.mid.cortex.svreg.dfs')

view_patch_vtk(s,
               outfile=subbasename + '/mri/BST/bst_' + hemi + '1.png',
               show=0)
view_patch_vtk(s,
               outfile=subbasename + '/mri/BST/bst_' + hemi + '2.png',
               azimuth=-90,
Пример #25
0
# save 1mm T1
lab1mm = image.resample_img(lab, target_affine=sp.eye(4),
                            interpolation='nearest', target_shape=vol1.shape)
lab1mm.to_filename('/big_disk/ajoshi/coding_ground/hybridatlas/hBCI_DNI_fsl/\
BCI-DNI_brain-1mm.label.nii.gz')

# save 2mm T1
lab2mm = image.resample_img(lab, target_affine=sp.eye(4)*2,
                            interpolation='nearest', target_shape=vol2.shape)
lab2mm.to_filename('/big_disk/ajoshi/coding_ground/hybridatlas/hBCI_DNI_fsl/\
BCI-DNI_brain-2mm.label.nii.gz')

# %%


right_mod = readdfs('/big_disk/ajoshi/coding_ground/hybridatlas/BCI-DNI\
_brain_atlas_refined_4_18_2017/BCI-DNI_brain.right.mid.cortex.mod.dfs')
left_mod = readdfs('/big_disk/ajoshi/coding_ground/hybridatlas/BCI-DNI\
_brain_atlas_refined_4_18_2017/BCI-DNI_brain.left.mid.cortex.mod.dfs')

right_mid = readdfs('/big_disk/ajoshi/coding_ground/hybridatlas/BCI-DNI\
_brain_atlas_refined_4_18_2017/BCI-DNI_brain.right.mid.cortex.dfs')
left_mid = readdfs('/big_disk/ajoshi/coding_ground/hybridatlas/BCI-DNI\
_brain_atlas_refined_4_18_2017/BCI-DNI_brain.left.mid.cortex.dfs')

right_inner = readdfs('/big_disk/ajoshi/coding_ground/hybridatlas/BCI-DNI\
_brain_atlas_refined_4_18_2017/BCI-DNI_brain.right.inner.cortex.dfs')
left_inner = readdfs('/big_disk/ajoshi/coding_ground/hybridatlas/BCI-DNI\
_brain_atlas_refined_4_18_2017/BCI-DNI_brain.left.inner.cortex.dfs')

right_pial = readdfs('/big_disk/ajoshi/coding_ground/hybridatlas/BCI-DNI\
_brain_atlas_refined_4_18_2017/BCI-DNI_brain.right.pial.cortex.dfs')
Пример #26
0

broadmann = ["perirhinal"]
hemi = 'left'
fshemi = 'lh'

for hemi in {'left', 'right'}:
    if hemi == 'left':
        fshemi = 'lh'
    else:
        fshemi = 'rh'

    outfile = 'BCI-DNI_Perirhinal' + '.' + hemi + '.mid.cortex.dfs'
    ''' BCI to FS processed BCI '''
    bci_bsti = readdfs(
        '/home/ajoshi/BrainSuite19b/svreg/BCI-DNI_brain_atlas/BCI-DNI_brain.' +
        hemi + '.inner.cortex.dfs')
    bci_bst_mid = readdfs(
        '/home/ajoshi/BrainSuite19b/svreg/BCI-DNI_brain_atlas/BCI-DNI_brain.' +
        hemi + '.mid.cortex.dfs')

    bci_bsti.vertices[:, 0] -= 96 * 0.8
    bci_bsti.vertices[:, 1] -= 192 * 0.546875
    bci_bsti.vertices[:, 2] -= 192 * 0.546875
    bci.vertices, bci.faces = fsio.read_geometry(
        '/big_disk/ajoshi/data/BCI_DNI_Atlas/surf/' + fshemi + '.white')
    bci.labels = np.zeros(bci.vertices.shape[0])
    for i in range(len(broadmann)):
        labind = fsio.read_label('/big_disk/ajoshi/data/BCI_DNI_Atlas/label/' +
                                 fshemi + '.' + broadmann[i] + '.label')
        bci.labels[labind] = i + 1
Пример #27
0
def readdfsVTK(fname):
    s = readdfs(fname)
    poly = createPolyData(s.vertices, s.faces)
    return poly
Пример #28
0
def parcellate_region(roilist, sub, nClusters, scan, scan_type, savepng=0, session=1, algo=0, type_cor=0, n_samples=0):
    '''algo = 0:Spectral Clustering, 1: region growing '''
    p_dir = '/big_disk/ajoshi/HCP100-fMRI-NLM/HCP100-fMRI-NLM'
    out_dir = '/big_disk/ajoshi/out_dir'
    r_factor = 3
    seeds = sp.zeros(nClusters)
    ref_dir = os.path.join(p_dir, 'reference')
    ref = '100307'
    fn1 = ref + '.reduce' + str(r_factor) + '.LR_mask.mat'
    fname1 = os.path.join(ref_dir, fn1)
    msk = scipy.io.loadmat(fname1)

    dfs_left_sm = readdfs(
        os.path.join('/home/ajoshi/for_gaurav', '100307.BCI2reduce3.very_smooth.' + scan_type + '.dfs'))
    dfs_left = readdfs(os.path.join('/home/ajoshi/for_gaurav', '100307.BCI2reduce3.very_smooth.' + scan_type + '.dfs'))

    data = scipy.io.loadmat(os.path.join(p_dir,  sub, sub + '.rfMRI_REST' + str(
        session) + scan + '.reduce3.ftdata.NLM_11N_hvar_25.mat'))

    LR_flag = msk['LR_flag']
    # 0= right hemisphere && 1== left hemisphere
    if scan_type == 'right':
        LR_flag = np.squeeze(LR_flag) == 0
    else:
        LR_flag = np.squeeze(LR_flag) == 1
    data = data['ftdata_NLM']
    temp = data[LR_flag, :]
    m = np.mean(temp, 1)
    temp = temp - m[:, None]
    s = np.std(temp, 1) + 1e-16
    temp = temp / s[:, None]
    msk_small_region = np.in1d(dfs_left.labels, roilist)
    if n_samples == 0:
        n_samples = temp.shape[1]
    ind = random.sample(range(temp.shape[1]), n_samples)
    temp = temp[:, ind]
    d = temp[msk_small_region, :]
    rho = np.corrcoef(d)
    rho[~np.isfinite(rho)] = 0
    d_corr = temp[~msk_small_region, :]
    rho_1 = np.corrcoef(d, d_corr)
    rho_1 = rho_1[range(d.shape[0]), d.shape[0]:]
    rho_1[~np.isfinite(rho_1)] = 0
    f_rho = np.arctanh(rho_1)
    f_rho[~np.isfinite(f_rho)] = 0
    B = np.corrcoef(f_rho)
    B[~np.isfinite(B)] = 0
    SC = SpectralClustering(n_clusters=nClusters, affinity='precomputed')
    affinity_matrix = np.arcsin(rho)

    if algo == 1:
        s_a = readdfs('100307.reduce3.very_smooth.' + scan_type +
                      '.refined.dfs')
        conn = sp.eye(dfs_left.vertices.shape[0])

        conn[dfs_left.faces[:, 0], dfs_left.faces[:, 1]] = 1
        conn[dfs_left.faces[:, 1], dfs_left.faces[:, 2]] = 1
        conn[dfs_left.faces[:, 0], dfs_left.faces[:, 2]] = 1
        conn = conn + conn.T
        conn = conn > 0
        conn = conn[msk_small_region, ]
        conn = conn[:, msk_small_region]

        for ind in range(nClusters):
            lind = s_a.labels[msk_small_region] == roilist * 10 + ind + 1
            lind = sp.where(lind)[0]
            vert = s_a.vertices[msk_small_region, ]
            m = sp.mean(vert[lind, ], axis=0)
            dist = vert[lind, ] - m
            diff = sp.sum(dist**2, axis=1)
            indc = sp.argmin(diff)
            seeds[ind] = lind[indc]
    
    if algo == 0:
        labels_corr_sininv = SC.fit_predict(np.abs(affinity_matrix))
    else:
        labels_corr_sininv = region_growing_fmri(seeds,
                                                 np.abs(affinity_matrix), conn)
    affinity_matrix = sp.exp((-2.0*(1-rho))/(.72 ** 2))
    if algo == 0:
        labels_corr_exp = SC.fit_predict(np.abs(affinity_matrix))
    else:
        labels_corr_exp = region_growing_fmri(seeds,
                                              np.abs(affinity_matrix), conn)
    affinity_matrix = 2.0 - sp.sqrt(2.0 - 2.0*rho)
    if algo == 0:
        labels_corr_dist = SC.fit_predict(np.abs(affinity_matrix))
    else:
        labels_corr_dist = region_growing_fmri(seeds,
                                               np.abs(affinity_matrix), conn)
    B1 = sp.exp((-2.0*(1.0-B))/(0.72 ** 2.0))
    if algo == 0:
        labels_corr_corr_exp = SC.fit_predict(B1)
    else:
        labels_corr_corr_exp = region_growing_fmri(seeds, B1, conn)

    return labels_corr_sininv, labels_corr_exp, labels_corr_dist,\
        labels_corr_corr_exp, msk_small_region, dfs_left_sm
def main():
    # Import the brain map
    ss = readdfs("./harmonic_mapping_code/surf.cortex.dfs")

    # Generate the u,v map
    uv_map = np.loadtxt("./data/uv_brain_map.txt")

    # Interpolate the 3d coordinates on the 2d grid
    n_pts = 100
    xmin = np.min(uv_map[:, 0])
    xmax = np.max(uv_map[:, 0])
    ymin = np.min(uv_map[:, 1])
    ymax = np.max(uv_map[:, 1])
    xg, yg = np.meshgrid(np.linspace(xmin, xmax, n_pts),
                         np.linspace(ymin, ymax, n_pts))

    # Interpolating the x, y,z coordiantes
    grid_x = griddata(uv_map, ss.vertices[:, 0], (xg, yg), method='linear')
    grid_y = griddata(uv_map, ss.vertices[:, 1], (xg, yg), method='linear')
    grid_z = griddata(uv_map, ss.vertices[:, 2], (xg, yg), method='linear')

    # Compute the differences
    diffx_u = np.diff(grid_x, axis=0)
    diffx_v = np.diff(grid_x, axis=1)
    diffy_u = np.diff(grid_y, axis=0)
    diffy_v = np.diff(grid_y, axis=1)
    diffz_u = np.diff(grid_z, axis=0)
    diffz_v = np.diff(grid_z, axis=1)

    # Compute difference in the u and v direction for the 3d coordinates fn
    alpha_u = np.stack((diffx_u[:, :-1], diffy_u[:, :-1], diffz_u[:, :-1]),
                       axis=2)
    alpha_v = np.stack((diffx_v[:-1, :], diffy_v[:-1, :], diffz_v[:-1, :]),
                       axis=2)

    # Compute E,F,G for the nodes in the grid
    E = np.squeeze(np.sum(np.multiply(alpha_u, alpha_u), axis=2))
    F = np.squeeze(np.sum(np.multiply(alpha_u, alpha_v), axis=2))
    G = np.squeeze(np.sum(np.multiply(alpha_v, alpha_v), axis=2))

    # Visualize E,F and G for the square
    plt.figure()
    plt.imshow(E)
    plt.colorbar()
    plt.title("E over the square")
    plt.savefig("./images/E_brain.png")

    plt.figure()
    plt.imshow(F)
    plt.colorbar()
    plt.title("F over the square")
    plt.savefig("./images/F_brain.png")

    plt.figure()
    plt.imshow(G)
    plt.colorbar()
    plt.title("G over the square")
    plt.savefig("./images/G_brain.png")

    plt.show()

    return
from dfsio import readdfs
import os.path
from surfproc import view_patch_vtk, patch_color_attrib
import pandas as pd
from scipy.ndimage.filters import gaussian_filter
import matplotlib.pyplot as plt

p_dir_ref = '/big_disk/ajoshi/HCP_data/'
hemi = 'left'
ref = '100307'
TR = 2
fmri_run3 = loadmat('/deneb_disk/studyforrest/sub-02-run3\
/fmri_tnlm_0p5_reduce3_v2.mat')  # h5py.File(fname1);

dfs_ref = readdfs(
    os.path.join(
        p_dir_ref, 'reference', ref + '.aparc\
.a2009s.32k_fs.reduce3.smooth.' + hemi + '.dfs'))

segl = 219
fseg1 = normdata(fmri_run3['func_' + hemi][:, 4:segl])
fseg2 = normdata(fmri_run3['func_' + hemi][:, segl:2 * segl - 4])

hemi = 'right'
fseg1r = normdata(fmri_run3['func_' + hemi][:, 4:segl])
fseg2r = normdata(fmri_run3['func_' + hemi][:, segl:2 * segl - 4])

fseg1 = sp.concatenate([fseg1, fseg1r], axis=0)
fseg2 = sp.concatenate([fseg2, fseg2r], axis=0)

annot = pd.read_csv('/deneb_disk/studyforrest/ioats_2s_av_allchar.csv')