#########################################################################
# Extract the largest clusters
# ----------------------------
from nilearn.reporting import get_clusters_table
from nilearn import input_data

table = get_clusters_table(z_map, stat_threshold=3.1,
                           cluster_threshold=20).set_index('Cluster ID',
                                                           drop=True)
table.head()

# get the 6 largest clusters' max x, y, and z coordinates
coords = table.loc[range(1, 7), ['X', 'Y', 'Z']].values

# extract time series from each coordinate
masker = input_data.NiftiSpheresMasker(coords)
real_timeseries = masker.fit_transform(fmri_img)
predicted_timeseries = masker.fit_transform(fmri_glm.predicted[0])

#########################################################################
# Plot predicted and actual time series for 6 most significant clusters
# ---------------------------------------------------------------------
import matplotlib.pyplot as plt

# colors for each of the clusters
colors = ['blue', 'navy', 'purple', 'magenta', 'olive', 'teal']
# plot the time series and corresponding locations
fig1, axs1 = plt.subplots(2, 6)
for i in range(0, 6):
    # plotting time series
    axs1[0, i].set_title('Cluster peak {}\n'.format(coords[i]))
Пример #2
0
# extract the coordinates of Power atlas
power = datasets.fetch_coords_power_2011()
print('Power atlas comes with {0}.'.format(power.keys()))
power_coords = np.vstack((power.rois['x'], power.rois['y'], power.rois['z'])).T
print('Stacked power coordinates in array of shape {0}.'.format(
    power_coords.shape))
###############################################################################

###############################################################################
# extract spheres around ROIs, then detrend, clean from counfounds, band-pass filter
# and standardized to 1 variance the timeseries.
subject_tms = []
spheres_masker = input_data.NiftiSpheresMasker(seeds=power_coords,
                                               smoothing_fwhm=4,
                                               radius=5.,
                                               detrend=True,
                                               standardize=True,
                                               low_pass=0.1,
                                               high_pass=0.01,
                                               t_r=2.5)

for filename, confound in zip(func_filenames, confounds):
    time_series = spheres_masker.fit_transform(filename, confounds=confound)
    subject_tms.append(time_series)
    # plt.plot(time_series)

###############################################################################

###############################################################################
# calculate ROI to ROI correlations
correlation_measure = ConnectivityMeasure(kind='correlation')
correlation_matrices = correlation_measure.fit_transform(subject_tms)
Пример #3
0
def extract_ts_coords(node_size, conf, func_file, coords, dir_path, ID, roi, network, smooth, atlas,
                      uatlas, labels, c_boot, block_size, hpass, detrending=True):
    """
    API for employing Nilearn's NiftiSpheresMasker to extract fMRI time-series data from spherical ROI's based on a
    given list of seed coordinates. The resulting time-series can then optionally be resampled using circular-block
    bootrapping. The final 2D m x n array is ultimately saved to file in .npy format

    Parameters
    ----------
    node_size : int
        Spherical centroid node size in the case that coordinate-based centroids
        are used as ROI's for tracking.
    conf : str
        File path to a confound regressor file for reduce noise in the time-series when extracting from ROI's.
    func_file : str
        File path to a preprocessed functional Nifti1Image in standard space.
    coords : list
        List of (x, y, z) tuples corresponding to an a-priori defined set (e.g. a coordinate atlas).
    dir_path : str
        Path to directory containing subject derivative data for given run.
    ID : str
        A subject id or other unique identifier.
    roi : str
        File path to binarized/boolean region-of-interest Nifti1Image file.
    network : str
        Resting-state network based on Yeo-7 and Yeo-17 naming (e.g. 'Default')
        used to filter nodes in the study of brain subgraphs.
    smooth : int
        Smoothing width (mm fwhm) to apply to time-series when extracting signal from ROI's.
    atlas : str
        Name of atlas parcellation used.
    uatlas : str
        File path to atlas parcellation Nifti1Image in MNI template space.
    labels : list
        List of string labels corresponding to graph nodes.
    c_boot : int
        Number of bootstraps if user specified circular-block bootstrapped resampling of the node-extracted time-series.
    block_size : int
        Size bootstrap blocks if bootstrapping (c_boot) is performed.
    hpass : bool
        High-pass filter values (Hz) to apply to node-extracted time-series.
    detrending : bool
        Indicates whether to remove linear trends from time-series when extracting across nodes. Default is True.

    Returns
    -------
    ts_within_nodes : array
        2D m x n array consisting of the time-series signal for each ROI node where m = number of scans and
        n = number of ROI's, where ROI's are spheres.
    node_size : int
        Spherical centroid node size in the case that coordinate-based centroids
        are used as ROI's for tracking.
    smooth : int
        Smoothing width (mm fwhm) to apply to time-series when extracting signal from ROI's.
    dir_path : str
        Path to directory containing subject derivative data for given run.
    atlas : str
        Name of atlas parcellation used.
    uatlas : str
        File path to atlas parcellation Nifti1Image in MNI template space.
    labels : list
        List of string labels corresponding to ROI nodes.
    coords : list
        List of (x, y, z) tuples corresponding to a coordinate atlas used or
        which represent the center-of-mass of each parcellation node.
    c_boot : int
        Number of bootstraps if user specified circular-block bootstrapped resampling of the node-extracted time-series.
    hpass : bool
        High-pass filter values (Hz) to apply to node-extracted time-series.
    """
    import os.path as op
    from nilearn import input_data
    from pynets import utils

    if not op.isfile(func_file):
        raise ValueError('\nERROR: Functional data input not found! Check that the file(s) specified with the -i flag '
                         'exist(s)')

    if conf:
        if not op.isfile(conf):
            raise ValueError('\nERROR: Confound regressor file not found! Check that the file(s) specified with the '
                             '-conf flag exist(s)')

    if len(coords) > 0:
        spheres_masker = input_data.NiftiSpheresMasker(seeds=coords, radius=float(node_size), allow_overlap=True,
                                                       standardize=True, smoothing_fwhm=float(smooth), high_pass=hpass,
                                                       detrend=detrending, verbose=2)
        ts_within_nodes = spheres_masker.fit_transform(func_file, confounds=conf)
        if float(c_boot) > 0:
            print("%s%s%s" % ('Performing circular block bootstrapping iteration: ', c_boot, '...'))
            ts_within_nodes = utils.timeseries_bootstrap(ts_within_nodes, block_size)[0]
        if ts_within_nodes is None:
            raise RuntimeError('\nERROR: Time-series extraction failed!')
    else:
        raise RuntimeError(
            '\nERROR: Cannot extract time-series from an empty list of coordinates. \nThis usually means '
            'that no nodes were generated based on the specified conditions at runtime (e.g. atlas was '
            'overly restricted by an RSN or some user-defined mask.')

    print("%s%s%d%s" % ('\nTime series has {0} samples'.format(ts_within_nodes.shape[0]), ' mean extracted from ',
                        len(coords), ' coordinate ROI\'s'))
    print("%s%s%s" % ('Using node radius: ', node_size, ' mm'))
    print("%s%s%s" % ('Smoothing FWHM: ', smooth, ' mm\n'))
    print("%s%s%s" % ('Applying high-pass filter: ', hpass, ' Hz\n'))

    # Save time series as txt file
    utils.save_ts_to_file(roi, network, ID, dir_path, ts_within_nodes, c_boot)
    return ts_within_nodes, node_size, smooth, dir_path, atlas, uatlas, labels, coords, c_boot, hpass
Пример #4
0
# print basic information on the dataset
# dmn_coords = [(0, -52, 18), (-46, -68, 32), (46, -68, 32), (1, 50, -5)]
# labels = [
#           'Posterior Cingulate Cortex',
#           'Left Temporoparietal junction',
#           'Right Temporoparietal junction',
#           'Medial prefrontal cortex',
#          ]
dmn_coords = [(-20, -52, 18), (46, -68, 32), (1, 50, -5)]
K = len(dmn_coords)

masker = input_data.NiftiSpheresMasker(dmn_coords,
                                       radius=8,
                                       detrend=True,
                                       standardize=True,
                                       low_pass=0.1,
                                       high_pass=0.01,
                                       t_r=2.5,
                                       verbose=2)

func_filename = adhd_dataset.func[0]
confound_filename = adhd_dataset.confounds[0]

time_series = masker.fit_transform(func_filename,
                                   confounds=[confound_filename])
print(time_series.shape)

connectivity_measure = ConnectivityMeasure(kind='partial correlation')
partial_correlation_matrix = connectivity_measure.fit_transform([time_series
                                                                 ])[0]
Пример #5
0
import os.path as op

import numpy as np
import nibabel as nib
from nilearn import datasets, input_data
from nilearn.image import resample_to_img

from brainconn import utils

mask_img = datasets.load_mni152_brain_mask()
subjects = datasets.fetch_adhd(n_subjects=1)
power = datasets.fetch_coords_power_2011()

conf = subjects.confounds[0]
func_img = nib.load(subjects.func[0])
func_img = resample_to_img(func_img, mask_img)

coords = np.vstack((power.rois['x'], power.rois['y'], power.rois['z'])).T
spheres_masker = input_data.NiftiSpheresMasker(
    seeds=coords, smoothing_fwhm=4, radius=5.,
    detrend=True, standardize=True, low_pass=0.1, high_pass=0.01,
    t_r=func_img.header.get_zooms()[-1])

timeseries = spheres_masker.fit_transform(func_img,
                                          confounds=conf)
corr = np.corrcoef(timeseries.T)
np.savetxt(op.join(utils.get_resource_path(), 'example_corr.txt'), corr)
Пример #6
0
# In[3]:

print(dset_amygdala)
dset_amygdala = dset_amygdala.slice(dset_amygdala.ids[:500])
print(dset_amygdala)

# In[4]:

import numpy as np
from nilearn import input_data, plotting

# In order to plot a sphere with a precise radius around a coordinate with
# nilearn, we need to use a NiftiSpheresMasker
mask_img = neurosynth_dset.masker.mask_img
sphere_masker = input_data.NiftiSpheresMasker([[24, -2, -20]],
                                              radius=6,
                                              mask_img=mask_img)
sphere_masker.fit(mask_img)
sphere_img = sphere_masker.inverse_transform(np.array([[1]]))

fig, axes = plt.subplots(figsize=(6, 4), nrows=2)
display = plotting.plot_roi(
    amygdala_mask,
    annotate=False,
    draw_cross=False,
    axes=axes[0],
    figure=fig,
)
axes[0].set_title("Amygdala ROI")
display = plotting.plot_roi(
    sphere_img,
Пример #7
0
        for part in splitedLine:
            if part is not '':
                newCoord.append(float(part))
        coords.append(newCoord)
    mniCoordsFile.close()

    #create mask according to the extracted rois
    #seeds: List of coordinates of the seeds in the same space as the images (typically MNI or TAL).
    #radius: Indicates, in millimeters, the radius for the sphere around the seed. Default is None (signal is extracted on a single voxel).
    #smoothing_fwhm: If smoothing_fwhm is not None, it gives the full-width half maximum in millimeters of the spatial smoothing to apply to the signal.
    #standardize: If standardize is True, the time-series are centered and normed: their mean is set to 0 and their variance to 1 in the time dimension
    #detrend, low_pass, high_pass and t_r are passed to signal.clean function. This function improve the SNR on masked fMRI signals.
    spheres_masker = input_data.NiftiSpheresMasker(seeds=coords,
                                                   radius=10.,
                                                   allow_overlap=True,
                                                   detrend=True,
                                                   standardize=True,
                                                   low_pass=0.08,
                                                   high_pass=0.009,
                                                   t_r=0.8)

    # Init multiprocessing.Pool()
    pool = mp.Pool(15)
    i = 0
    # For each subject run the postprocessing steps
    for subject_folder in os.listdir(preproc_folder):
        rs_files = []
        motion_files = []
        if (subject_folder not in allParticipantsDic):
            subject_folder_full = os.path.join(preproc_folder, subject_folder)
            if (os.path.isdir(subject_folder_full)):
                for root, dirs, files in os.walk(subject_folder_full):
Пример #8
0
def network_connectome(input_file, ID, atlas_select, NETWORK, node_size, mask,
                       thr, parlistfile, all_nets, conn_model, dens_thresh,
                       conf, adapt_thresh, plot_switch, bedpostx_dir):
    nilearn_atlases = [
        'atlas_aal', 'atlas_craddock_2012', 'atlas_destrieux_2009'
    ]

    ##Input is nifti file
    func_file = input_file

    ##Test if atlas_select is a nilearn atlas
    if atlas_select in nilearn_atlases:
        atlas = getattr(datasets, 'fetch_%s' % atlas_select)()
        try:
            parlistfile = atlas.maps
            try:
                label_names = atlas.labels
            except:
                label_names = None
            try:
                networks_list = atlas.networks
            except:
                networks_list = None
        except RuntimeError:
            print('Error, atlas fetching failed.')
            sys.exit()

    if parlistfile == None and atlas_select not in nilearn_atlases:
        ##Fetch nilearn atlas coords
        [coords, atlas_name, networks_list,
         label_names] = nodemaker.fetch_nilearn_atlas_coords(atlas_select)

        if atlas_name == 'Power 2011 atlas':
            ##Reference RSN list
            import pkgutil
            import io
            network_coords_ref = NETWORK + '_coords.csv'
            atlas_coords = pkgutil.get_data("pynets",
                                            "rsnrefs/" + network_coords_ref)
            df = pd.read_csv(io.BytesIO(atlas_coords)).ix[:, 0:4]
            i = 1
            net_coords = []
            ix_labels = []
            for i in range(len(df)):
                #print("ROI Reference #: " + str(i))
                x = int(df.ix[i, 1])
                y = int(df.ix[i, 2])
                z = int(df.ix[i, 3])
                #print("X:" + str(x) + " Y:" + str(y) + " Z:" + str(z))
                net_coords.append((x, y, z))
                ix_labels.append(i)
                i = i + 1
                #print(net_coords)
                label_names = ix_labels
        elif atlas_name == 'Dosenbach 2010 atlas':
            coords = list(tuple(x) for x in coords)

            ##Get coord membership dictionary
            [membership, membership_plotting
             ] = nodemaker.get_mem_dict(func_file, coords, networks_list)

            ##Convert to membership dataframe
            mem_df = membership.to_frame().reset_index()

            nets_avail = list(set(list(mem_df['index'])))
            ##Get network name equivalents
            if NETWORK == 'DMN':
                NETWORK = 'default'
            elif NETWORK == 'FPTC':
                NETWORK = 'fronto-parietal'
            elif NETWORK == 'CON':
                NETWORK = 'cingulo-opercular'
            elif NETWORK not in nets_avail:
                print('Error: ' + NETWORK + ' not available with this atlas!')
                sys.exit()

            ##Get coords for network-of-interest
            mem_df.loc[mem_df['index'] == NETWORK]
            net_coords = mem_df.loc[mem_df['index'] == NETWORK][[0]].values[:,
                                                                            0]
            net_coords = list(tuple(x) for x in net_coords)
            ix_labels = mem_df.loc[mem_df['index'] == NETWORK].index.values
            ####Add code for any special RSN reference lists for the nilearn atlases here#####
            ##If labels_names are not indices and NETWORK is specified, sub-list label names

        if label_names != ix_labels:
            try:
                label_names = label_names.tolist()
            except:
                pass
            label_names = [label_names[i] for i in ix_labels]

        ##Get subject directory path
        dir_path = os.path.dirname(
            os.path.realpath(func_file)) + '/' + atlas_select
        if not os.path.exists(dir_path):
            os.makedirs(dir_path)

        ##If masking, remove those coords that fall outside of the mask
        if mask != None:
            [net_coords,
             label_names] = nodemaker.coord_masker(mask, net_coords,
                                                   label_names)

        ##Save coords and label_names to pickles
        coord_path = dir_path + '/coords_' + NETWORK + '_' + str(thr) + '.pkl'
        with open(coord_path, 'wb') as f:
            pickle.dump(net_coords, f)

        labels_path = dir_path + '/labelnames_' + NETWORK + '_' + str(
            thr) + '.pkl'
        with open(labels_path, 'wb') as f:
            pickle.dump(label_names, f)

        if bedpostx_dir is not None:
            from pynets.diffconnectometry import run_struct_mapping
            FSLDIR = os.environ['FSLDIR']
            try:
                FSLDIR
            except NameError:
                print('FSLDIR environment variable not set!')
            est_path2 = run_struct_mapping(FSLDIR, ID, bedpostx_dir, dir_path,
                                           NETWORK, net_coords, node_size)

    else:
        ##Fetch user-specified atlas coords
        [coords_all, atlas_name,
         par_max] = nodemaker.get_names_and_coords_of_parcels(parlistfile)
        coords = list(tuple(x) for x in coords_all)

        ##Get subject directory path
        dir_path = os.path.dirname(
            os.path.realpath(func_file)) + '/' + atlas_name
        if not os.path.exists(dir_path):
            os.makedirs(dir_path)

        ##Get coord membership dictionary
        try:
            networks_list
        except:
            networks_list = None
        [membership,
         membership_plotting] = nodemaker.get_mem_dict(func_file, coords,
                                                       networks_list)

        ##Convert to membership dataframe
        mem_df = membership.to_frame().reset_index()

        ##Get coords for network-of-interest
        mem_df.loc[mem_df['index'] == NETWORK]
        net_coords = mem_df.loc[mem_df['index'] == NETWORK][[0]].values[:, 0]
        net_coords = list(tuple(x) for x in net_coords)
        ix_labels = mem_df.loc[mem_df['index'] == NETWORK].index.values
        try:
            label_names = [label_names[i] for i in ix_labels]
        except:
            label_names = ix_labels

        if mask != None:
            [net_coords,
             label_names] = nodemaker.coord_masker(mask, net_coords,
                                                   label_names)

        ##Save coords and label_names to pickles
        coord_path = dir_path + '/coords_' + NETWORK + '_' + str(thr) + '.pkl'
        with open(coord_path, 'wb') as f:
            pickle.dump(net_coords, f)

        labels_path = dir_path + '/labelnames_' + NETWORK + '_' + str(
            thr) + '.pkl'
        with open(labels_path, 'wb') as f:
            pickle.dump(label_names, f)

        if bedpostx_dir is not None:
            from pynets.diffconnectometry import run_struct_mapping
            est_path2 = run_struct_mapping(FSLDIR, ID, bedpostx_dir, dir_path,
                                           NETWORK, net_coords, node_size)

        ##Generate network parcels image (through refinement, this could be used
        ##in place of the 3 lines above)
        #net_parcels_img_path = gen_network_parcels(parlistfile, NETWORK, labels)
        #parcellation = nib.load(net_parcels_img_path)
        #parcel_masker = input_data.NiftiLabelsMasker(labels_img=parcellation, background_label=0, memory='nilearn_cache', memory_level=5, standardize=True)
        #ts_within_parcels = parcel_masker.fit_transform(func_file)
        #net_ts = ts_within_parcels

    ##Grow ROIs
    masker = input_data.NiftiSpheresMasker(seeds=net_coords,
                                           radius=float(node_size),
                                           allow_overlap=True,
                                           memory_level=5,
                                           memory='nilearn_cache',
                                           verbose=2,
                                           standardize=True)
    ts_within_spheres = masker.fit_transform(func_file, confounds=conf)
    net_ts = ts_within_spheres

    ##Save time series as txt file
    out_path_ts = dir_path + '/' + ID + '_' + NETWORK + '_net_ts.txt'
    np.savetxt(out_path_ts, net_ts)

    ##Fit connectivity model
    if adapt_thresh is not False:
        if os.path.isfile(est_path2) == True:
            [conn_matrix, est_path, edge_threshold,
             thr] = thresholding.adaptive_thresholding(ts_within_spheres,
                                                       conn_model, NETWORK, ID,
                                                       est_path2, dir_path)
        else:
            print('No structural mx found! Exiting...')
            sys.exit(0)
    elif dens_thresh is None:
        edge_threshold = str(float(thr) * 100) + '%'
        [conn_matrix,
         est_path] = graphestimation.get_conn_matrix(ts_within_spheres,
                                                     conn_model, NETWORK, ID,
                                                     dir_path, thr)
        conn_matrix = thresholding.threshold_proportional(
            conn_matrix, float(thr), dir_path)
        conn_matrix = thresholding.normalize(conn_matrix)
    elif dens_thresh is not None:
        [conn_matrix, est_path, edge_threshold,
         thr] = thresholding.density_thresholding(ts_within_spheres,
                                                  conn_model, NETWORK, ID,
                                                  dens_thresh, dir_path)

    if plot_switch == True:
        ##Plot connectogram
        plotting.plot_connectogram(conn_matrix, conn_model, atlas_name,
                                   dir_path, ID, NETWORK, label_names)

        ##Plot adj. matrix based on determined inputs
        plotting.plot_conn_mat(conn_matrix, conn_model, atlas_name, dir_path,
                               ID, NETWORK, label_names, mask)

        ##Plot network time-series
        plotting.plot_timeseries(net_ts, NETWORK, ID, dir_path, atlas_name,
                                 label_names)

        ##Plot connectome viz for specific Yeo networks
        title = "Connectivity Projected on the " + NETWORK
        out_path_fig = dir_path + '/' + ID + '_' + NETWORK + '_connectome_plot.png'
        niplot.plot_connectome(conn_matrix,
                               net_coords,
                               edge_threshold=edge_threshold,
                               title=title,
                               display_mode='lyrz',
                               output_file=out_path_fig)
    return est_path, thr
Пример #9
0
def import_mat_func(input_file, ID, atlas_select, NETWORK, pynets_dir,
                    node_size, mask, thr, graph, parlistfile, sps_model,
                    all_nets):
    if '.nii' in input_file and parlistfile == None and NETWORK == None:
        if graph == False:
            func_file = input_file

            if all_nets != None:
                func_img = nib.load(func_file)
                par_path = pynets_dir + '/RSN_refs/yeo.nii.gz'
                par_img = nib.load(par_path)
                par_data = par_img.get_data()

                ref_dict = {
                    0: 'unknown',
                    1: 'VIS',
                    2: 'SM',
                    3: 'DA',
                    4: 'VA',
                    5: 'LIM',
                    6: 'FP',
                    7: 'DEF'
                }

                def get_ref_net(x, y, z):
                    aff_inv = npl.inv(func_img.affine)
                    # apply_affine(aff, (x,y,z)) # vox to mni
                    vox_coord = apply_affine(aff_inv, (x, y, z))  # mni to vox
                    return ref_dict[int(par_data[int(vox_coord[0]),
                                                 int(vox_coord[1]),
                                                 int(vox_coord[2])])]

            dir_path = os.path.dirname(os.path.realpath(func_file))
            atlas = getattr(datasets, 'fetch_%s' % atlas_select)()
            atlas_name = atlas['description'].splitlines()[0]
            print(atlas_name + ' comes with {0}.'.format(atlas.keys()))
            print("\n")
            coords = np.vstack(
                (atlas.rois['x'], atlas.rois['y'], atlas.rois['z'])).T
            if all_nets != None:
                membership = pd.Series([
                    get_ref_net(coord[0], coord[1], coord[2])
                    for coord in coords
                ])
            print('Stacked atlas coordinates in array of shape {0}.'.format(
                coords.shape))
            print("\n")
            if mask is not None:
                from nilearn import masking
                mask_data, _ = masking._load_mask_img(mask)
                mask_coords = list(zip(*np.where(mask_data != 0)))
                for coord in coords:
                    if tuple(coord) not in mask_coords:
                        print('Removing coordinate: ' + str(tuple(coord)) +
                              ' since it falls outside of network mask...')
                        ix = np.where(coords == coord)[0][0]
                        coords = np.delete(coords, ix, axis=0)
                        print(str(len(coords)))
                        print("\n")
            spheres_masker = input_data.NiftiSpheresMasker(
                seeds=coords,
                radius=float(node_size),
                memory='nilearn_cache',
                memory_level=5,
                verbose=2)
            time_series = spheres_masker.fit_transform(func_file)
            correlation_measure = ConnectivityMeasure(kind='correlation')
            correlation_matrix = correlation_measure.fit_transform(
                [time_series])[0]
            print("\n")
            print('Time series has {0} samples'.format(time_series.shape[0]))
            print("\n")
        else:
            correlation_matrix = genfromtxt(graph, delimiter='\t')
        plt.imshow(correlation_matrix,
                   vmin=-1.,
                   vmax=1.,
                   cmap='RdBu_r',
                   interpolation='nearest')
        plt.colorbar()
        plt.title(atlas_name + ' correlation matrix')
        out_path_fig = dir_path + '/' + ID + '_' + atlas_name + '_adj_mat_corr.png'
        plt.savefig(out_path_fig)
        plt.close()
        ##Tweak edge_threshold to keep only the strongest connections.
        atlast_graph_title = atlas_name + ' correlation graph'
        if mask is None:
            atlast_graph_title = atlas_name + ' correlation graph'
        else:
            atlast_graph_title = atlas_name + ' Masked Nodes'
        edge_threshold = str(float(thr) * 100) + '%'

        # plot graph:
        if all_nets != None:
            # coloring code:
            n = len(membership.unique())
            clust_pal = sns.color_palette("Set1", n)
            clust_lut = dict(
                zip(map(str, np.unique(membership.astype('category'))),
                    clust_pal))
            clust_colors = colors.to_rgba_array(membership.map(clust_lut))

            plotting.plot_connectome(correlation_matrix,
                                     coords,
                                     node_color=clust_colors,
                                     title=atlast_graph_title,
                                     edge_threshold=edge_threshold,
                                     node_size=20,
                                     colorbar=True)
        else:
            plotting.plot_connectome(correlation_matrix,
                                     coords,
                                     title=atlast_graph_title,
                                     edge_threshold=edge_threshold,
                                     node_size=20,
                                     colorbar=True)
        out_path_fig = dir_path + '/' + ID + '_' + atlas_name + '_connectome_viz.png'
        plt.savefig(out_path_fig)
        plt.close()
        time_series_path = dir_path + '/' + ID + '_ts.txt'
        np.savetxt(time_series_path, time_series, delimiter='\t')
        mx = genfromtxt(time_series_path, delimiter='')

    elif '.nii' in input_file and parlistfile != None and NETWORK == None:  # block of code for whole brain parcellations
        if all_nets != None:
            par_path = pynets_dir + '/RSN_refs/yeo.nii.gz'
            par_img = nib.load(par_path)
            par_data = par_img.get_data()

            ref_dict = {
                0: 'unknown',
                1: 'VIS',
                2: 'SM',
                3: 'DA',
                4: 'VA',
                5: 'LIM',
                6: 'FP',
                7: 'DEF'
            }

            def get_ref_net(x, y, z):
                aff_inv = npl.inv(bna_img.affine)
                # apply_affine(aff, (x,y,z)) # vox to mni
                vox_coord = apply_affine(aff_inv, (x, y, z))  # mni to vox
                return ref_dict[int(par_data[int(vox_coord[0]),
                                             int(vox_coord[1]),
                                             int(vox_coord[2])])]

        func_file = input_file
        dir_path = os.path.dirname(os.path.realpath(func_file))

        atlas_name = parlistfile.split('/')[-1].split('.')[0]
        # Code for getting name and coordinates of parcels.
        # Adapted from Dan L. (https://github.com/danlurie/despolab_lesion/blob/master/code/sandbox/Sandbox%20-%20Calculate%20and%20plot%20HCP%20mean%20matrix.ipynb)
        bna_img = nib.load(parlistfile)
        bna_data = bna_img.get_data()
        if bna_img.get_data_dtype() != np.dtype(np.int):
            bna_data_for_coords = bna_img.get_data()
            # Number of parcels:
            par_max = np.ceil(np.max(bna_data_for_coords)).astype('int')
            bna_data = bna_data.astype('int16')
        else:
            par_max = np.max(bna_data)

        img_stack = []
        for idx in range(1, par_max + 1):
            roi_img = bna_data == idx
            img_stack.append(roi_img)
        img_stack = np.array(img_stack)
        img_list = []
        for idx in range(par_max):
            roi_img = nilearn.image.new_img_like(bna_img, img_stack[idx])
            img_list.append(roi_img)

        bna_4D = nilearn.image.concat_imgs(img_list)
        coords = []
        for roi_img in img_list:
            coords.append(nilearn.plotting.find_xyz_cut_coords(roi_img))
        coords = np.array(coords)
        if all_nets != None:
            membership = pd.Series([
                get_ref_net(coord[0], coord[1], coord[2]) for coord in coords
            ])
        # atlas = getattr(datasets, 'fetch_%s' % atlas_select)()
        # atlas_name = atlas['description'].splitlines()[0]
        print("\n")
        print(atlas_name + ' comes with {0}.'.format(par_max) + 'parcels')
        print("\n")
        print("\n")
        print('Stacked atlas coordinates in array of shape {0}.'.format(
            coords.shape))
        print("\n")
        if mask is not None:
            from nilearn import masking
            mask_data, _ = masking._load_mask_img(mask)
            mask_coords = list(zip(*np.where(mask_data != 0)))
            for coord in coords:
                if tuple(coord) not in mask_coords:
                    print('Removing coordinate: ' + str(tuple(coord)) +
                          ' since it falls outside of network mask...')
                    ix = np.where(coords == coord)[0][0]
                    coords = np.delete(coords, ix, axis=0)
                    print(str(len(coords)))

        ##extract time series from whole brain parcellaions:
        parcellation = nib.load(parlistfile)
        parcel_masker = input_data.NiftiLabelsMasker(labels_img=parcellation,
                                                     background_label=0,
                                                     memory='nilearn_cache',
                                                     memory_level=5)
        time_series = parcel_masker.fit_transform(func_file)
        ##old ref code for coordinate parcellations:
        #spheres_masker = input_data.NiftiSpheresMasker(seeds=coords, radius=float(node_size), memory='nilearn_cache', memory_level=2, verbose=2)
        #time_series = spheres_masker.fit_transform(func_file)
        correlation_measure = ConnectivityMeasure(kind='correlation')
        correlation_matrix = correlation_measure.fit_transform([time_series
                                                                ])[0]
        print("\n")
        print('Time series has {0} samples'.format(time_series.shape[0]))
        print("\n")
        plt.imshow(correlation_matrix,
                   vmin=-1.,
                   vmax=1.,
                   cmap='RdBu_r',
                   interpolation='nearest')
        plt.colorbar()
        plt.title(atlas_name + ' correlation matrix')
        out_path_fig = dir_path + '/' + ID + '_' + atlas_name + '_adj_mat_corr.png'
        plt.savefig(out_path_fig)
        plt.close()
        ##Tweak edge_threshold to keep only the strongest connections.
        atlast_graph_title = atlas_name + ' correlation graph'
        if mask is None:
            atlast_graph_title = atlas_name + ' correlation graph'
        else:
            atlast_graph_title = atlas_name + ' Masked Nodes'
        edge_threshold = str(float(thr) * 100) + '%'

        if all_nets != None:
            # coloring code:
            n = len(membership.unique())
            clust_pal = sns.color_palette("Set1", n)
            clust_lut = dict(
                zip(map(str, np.unique(membership.astype('category'))),
                    clust_pal))
            clust_colors = colors.to_rgba_array(membership.map(clust_lut))
            plotting.plot_connectome(correlation_matrix,
                                     coords,
                                     node_color=clust_colors,
                                     title=atlast_graph_title,
                                     edge_threshold=edge_threshold,
                                     node_size=20,
                                     colorbar=True)
        else:
            plotting.plot_connectome(correlation_matrix,
                                     coords,
                                     title=atlast_graph_title,
                                     edge_threshold=edge_threshold,
                                     node_size=20,
                                     colorbar=True)
        out_path_fig = dir_path + '/' + ID + '_' + atlas_name + '_connectome_viz.png'
        plt.savefig(out_path_fig)
        plt.close()
        time_series_path = dir_path + '/' + ID + '_ts.txt'
        np.savetxt(time_series_path, time_series, delimiter='\t')
        mx = genfromtxt(time_series_path, delimiter='')

    elif '.nii' in input_file and NETWORK != None:
        func_file = input_file

        ##Reference RSN list
        load_path = pynets_dir + '/RSN_refs/' + NETWORK + '_coords.csv'
        df = pd.read_csv(load_path).ix[:, 0:4]
        i = 1
        coords = []
        labels = []
        for i in range(len(df)):
            print("ROI Reference #: " + str(i))
            x = int(df.ix[i, 1])
            y = int(df.ix[i, 2])
            z = int(df.ix[i, 3])
            print("X:" + str(x) + " Y:" + str(y) + " Z:" + str(z))
            coords.append((x, y, z))
            labels.append(i)
        print("\n")
        print(coords)
        print(labels)
        print("\n")
        print("-------------------")
        i + 1
        dir_path = os.path.dirname(os.path.realpath(func_file))

        ##Grow ROIs
        ##If masking, remove those coords that fall outside of the mask
        if mask != None:
            from nilearn import masking
            mask_data, _ = masking._load_mask_img(mask)
            mask_coords = list(zip(*np.where(mask_data != 0)))
            for coord in coords:
                if coord in mask_coords:
                    print('Removing coordinate: ' + str(coord) +
                          ' since it falls outside of network mask...')
                    coords.remove(coord)
        masker = input_data.NiftiSpheresMasker(seeds=coords,
                                               radius=float(node_size),
                                               allow_overlap=True,
                                               memory_level=5,
                                               memory='nilearn_cache',
                                               verbose=2)
        time_series = masker.fit_transform(func_file)
        for time_serie, label in zip(time_series.T, labels):
            plt.plot(time_serie, label=label)
        plt.title(NETWORK + ' Network Time Series')
        plt.xlabel('Scan Number')
        plt.ylabel('Normalized Signal')
        plt.legend()
        plt.tight_layout()
        out_path_fig = dir_path + '/' + ID + '_' + NETWORK + '_TS_plot.png'
        plt.savefig(out_path_fig)
        plt.close()
        connectivity_measure = ConnectivityMeasure(kind='correlation')
        correlation_matrix = connectivity_measure.fit_transform([time_series
                                                                 ])[0]
        plot_title = NETWORK + ' Network Time Series'
        plotting.plot_connectome(correlation_matrix, coords, title=plot_title)
        ##Display connectome with hemispheric projections.
        title = "Connectivity Projected on the " + NETWORK
        out_path_fig = dir_path + '/' + ID + '_' + NETWORK + '_connectome_plot.png'
        plotting.plot_connectome(correlation_matrix,
                                 coords,
                                 title=title,
                                 display_mode='lyrz',
                                 output_file=out_path_fig)
        time_series_path = dir_path + '/' + ID + '_' + NETWORK + '_ts.txt'
        np.savetxt(time_series_path, time_series, delimiter='\t')
        mx = genfromtxt(time_series_path, delimiter='')
    else:
        DR_st_1 = input_file
        dir_path = os.path.dirname(os.path.realpath(DR_st_1))
        mx = genfromtxt(DR_st_1, delimiter='')
    from sklearn.covariance import GraphLassoCV, ShrunkCovariance, graph_lasso
    estimator = GraphLassoCV()
    try:
        est = estimator.fit(mx)
    except:
        print(
            "Error: Lasso sparse matrix modeling failed. Check your input time-series data..."
        )

#        emp_cov = covariance.empirical_covariance(mx)
#        shrunk_cov = covariance.shrunk_covariance(emp_cov, shrinkage=0.8) # Set shrinkage closer to 1 for poorly-conditioned data
#
#        alphaRange = 10.0 ** np.arange(-8,0) # 1e-7 to 1e-1 by order of magnitude
#        for alpha in alphaRange:
#            try:
#                estimator = covariance.graph_lasso(shrunk_cov, alpha)
#                print("Calculated graph-lasso covariance matrix for alpha=%s"%alpha)
#            except FloatingPointError:
#                print("Failed at alpha=%s"%alpha)
#        estimator = ShrunkCovariance()
#        est = estimator.fit(mx)
    if NETWORK != None:
        est_path = dir_path + '/' + ID + '_' + NETWORK + '_est%s.txt' % (
            '_sps_inv' if sps_model else '')
    else:
        est_path = dir_path + '/' + ID + '_est%s.txt' % ('_sps_inv'
                                                         if sps_model else '')
    if sps_model == False:
        if NETWORK != None:
            np.savetxt(est_path, correlation_matrix, delimiter='\t')
        else:
            np.savetxt(est_path, correlation_matrix, delimiter='\t')
    elif sps_model == True:
        np.savetxt(est_path, estimator.precision_, delimiter='\t')
    return (mx, est_path)
Пример #10
0
    def plot_t(self,
               fig,
               show_time_individual=False,
               show_time_average=False,
               ica_lookup=None,
               show_spectrum=False,
               show_time_group=False,
               coords=(0, 0, 0),
               significance_threshold=0.5,
               *args,
               **kwargs):
        # Determine Axes Layout
        if not (show_time_individual or show_time_average
                ) and not show_time_group and not show_spectrum:
            return  # no plot to render
        fig.clear()
        gs = gridspec.GridSpec(2, 5)
        if not (show_time_individual or
                show_time_average) and show_time_group and not show_spectrum:
            axgr = plt.subplot(gs[:, :])
        if not (show_time_individual
                or show_time_average) and show_time_group and show_spectrum:
            axgr, axps = plt.subplot(gs[:, 3:]), plt.subplot(gs[:, :3])
        if (show_time_individual or show_time_average
            ) and not show_time_group and not show_spectrum:
            axts = plt.subplot(gs[:, :])
        if (show_time_individual or
                show_time_average) and show_time_group and not show_spectrum:
            axts, axgr = plt.subplot(gs[:, 3:]), plt.subplot(gs[:, :3])
        if (show_time_individual
                or show_time_average) and show_time_group and show_spectrum:
            axts, axgr, axps = plt.subplot(gs[:, 3:]), plt.subplot(
                gs[0, :3]), plt.subplot(gs[1, :3])

        # Process Data & Plot
        dat = np.abs(self.gd['ica'][ica_lookup]['img'].get_data().astype(
            np.float)) > significance_threshold
        masked = image.new_img_like(self.gd['smri']['img'], dat.astype(np.int))
        if show_time_individual:
            try:
                seed_masker = input_data.NiftiSpheresMasker(
                    mask_img=masked,
                    seeds=[coords],
                    radius=0,
                    detrend=False,
                    standardize=False,
                    t_r=4.,
                    memory='nilearn_cache',
                    memory_level=1,
                    verbose=0)
                ind_ts = seed_masker.fit_transform(self.gd['fmri']['img'])
            except:
                ind_ts = []
            plt.plot(ind_ts,
                     axes=axts,
                     label='Voxel (%d, %d, %d) Time-Series' %
                     (coords[0], coords[1], coords[2]))
            plt.xlabel('Time (s)')
            plt.ylabel('fMRI signal')
            axts.hold(True)
        if show_time_average:
            brain_masker = input_data.NiftiMasker(mask_img=masked,
                                                  t_r=4.,
                                                  memory='nilearn_cache',
                                                  memory_level=1,
                                                  verbose=0)

            ts = brain_masker.fit_transform(self.gd['fmri']['img'])
            ave_ts = np.mean(ts, axis=1)
            plt.plot(ave_ts, axes=axts, label="Average Signal")
            plt.xlabel('Time (s)')
            plt.ylabel('fMRI signal')
            axts.hold(False)
        if show_time_group:
            # TODO: Plot Group Logic
            pass
        if show_spectrum:
            # TODO: Plot Power Spectrum Logic
            pass
        plt.tight_layout(pad=0.1)
Пример #11
0
def wb_connectome_with_nl_atlas_coords(input_file, ID, atlas_select, NETWORK,
                                       node_size, mask, thr, all_nets,
                                       conn_model, dens_thresh, conf,
                                       adapt_thresh, plot_switch,
                                       bedpostx_dir):
    nilearn_atlases = [
        'atlas_aal', 'atlas_craddock_2012', 'atlas_destrieux_2009'
    ]

    ##Input is nifti file
    func_file = input_file

    ##Fetch nilearn atlas coords
    [coords, atlas_name, networks_list,
     label_names] = nodemaker.fetch_nilearn_atlas_coords(atlas_select)

    ##Get subject directory path
    dir_path = os.path.dirname(
        os.path.realpath(func_file)) + '/' + atlas_select
    if not os.path.exists(dir_path):
        os.makedirs(dir_path)

    ##Get coord membership dictionary if all_nets option triggered
    if all_nets != False:
        try:
            networks_list
        except:
            networks_list = None
        [membership,
         membership_plotting] = nodemaker.get_mem_dict(func_file, coords,
                                                       networks_list)

    ##Mask coordinates
    if mask is not None:
        [coords, label_names] = nodemaker.coord_masker(mask, coords,
                                                       label_names)

    ##Save coords and label_names to pickles
    coord_path = dir_path + '/coords_wb_' + str(thr) + '.pkl'
    with open(coord_path, 'wb') as f:
        pickle.dump(coords, f)

    labels_path = dir_path + '/labelnames_wb_' + str(thr) + '.pkl'
    with open(labels_path, 'wb') as f:
        pickle.dump(label_names, f)

    if bedpostx_dir is not None:
        from pynets.diffconnectometry import run_struct_mapping
        FSLDIR = os.environ['FSLDIR']
        try:
            FSLDIR
        except NameError:
            print('FSLDIR environment variable not set!')
        est_path2 = run_struct_mapping(FSLDIR, ID, bedpostx_dir, dir_path,
                                       NETWORK, coords, node_size)

    ##Extract within-spheres time-series from funct file
    spheres_masker = input_data.NiftiSpheresMasker(seeds=coords,
                                                   radius=float(node_size),
                                                   memory='nilearn_cache',
                                                   memory_level=5,
                                                   verbose=2,
                                                   standardize=True)
    ts_within_spheres = spheres_masker.fit_transform(func_file, confounds=conf)
    print('\n' +
          'Time series has {0} samples'.format(ts_within_spheres.shape[0]) +
          '\n')

    ##Save time series as txt file
    out_path_ts = dir_path + '/' + ID + '_whole_brain_ts_within_spheres.txt'
    np.savetxt(out_path_ts, ts_within_spheres)

    ##Fit connectivity model
    if adapt_thresh is not False:
        if os.path.isfile(est_path2) == True:
            [conn_matrix, est_path, edge_threshold,
             thr] = thresholding.adaptive_thresholding(ts_within_spheres,
                                                       conn_model, NETWORK, ID,
                                                       est_path2, dir_path)
        else:
            print('No structural mx found! Exiting...')
            sys.exit(0)
    elif dens_thresh is None:
        edge_threshold = str(float(thr) * 100) + '%'
        [conn_matrix,
         est_path] = graphestimation.get_conn_matrix(ts_within_spheres,
                                                     conn_model, NETWORK, ID,
                                                     dir_path, thr)
        conn_matrix = thresholding.threshold_proportional(
            conn_matrix, float(thr), dir_path)
        conn_matrix = thresholding.normalize(conn_matrix)
    elif dens_thresh is not None:
        [conn_matrix, est_path, edge_threshold,
         thr] = thresholding.density_thresholding(ts_within_spheres,
                                                  conn_model, NETWORK, ID,
                                                  dens_thresh, dir_path)

    if plot_switch == True:
        ##Plot connectogram
        plotting.plot_connectogram(conn_matrix, conn_model, atlas_name,
                                   dir_path, ID, NETWORK, label_names)

        ##Plot adj. matrix based on determined inputs
        plotting.plot_conn_mat(conn_matrix, conn_model, atlas_name, dir_path,
                               ID, NETWORK, label_names, mask)

        ##Plot connectome viz for all Yeo networks
        if all_nets != False:
            plotting.plot_membership(membership_plotting, conn_matrix,
                                     conn_model, coords, edge_threshold,
                                     atlas_name, dir_path)
        else:
            out_path_fig = dir_path + '/' + ID + '_' + atlas_name + '_connectome_viz.png'
            niplot.plot_connectome(conn_matrix,
                                   coords,
                                   title=atlas_name,
                                   edge_threshold=edge_threshold,
                                   node_size=20,
                                   colorbar=True,
                                   output_file=out_path_fig)
    return est_path, thr
Пример #12
0
def sbc_one_session(subject, session, fmriprep_dir, output_dir, tr):

    confounds_file, brainmask_file, rs_file, anat_file = get_files(
        fmriprep_dir, subject, session)
    out_stub = "{}_{}".format(subject, session)

    # look for output files
    out_file_nii = os.path.join(output_dir,
                                "sbc_pcc_1_" + out_stub + ".nii.gz")
    out_file_thresh = os.path.join(
        output_dir, "sbc_pcc_2_fisherz_thrsh0.5_" + out_stub + ".png")
    out_file_report = os.path.join(output_dir,
                                   "sbc_pcc_9_info_" + out_stub + ".txt")

    if not (os.path.exists(out_file_thresh) and os.path.exists(out_file_nii)
            and os.path.exists(out_file_report)):
        print("*** Running SBC for {} {} ***".format(subject, session))
        # see http://nilearn.github.io/auto_examples/03_connectivity/plot_seed_to_voxel_correlation.html
        pcc_coords = [(0, -52, 18)]
        lp_freq = 0.1
        hp_freq = 0.01

        # load confounds
        confounds, outlier_stats = get_confounds(confounds_file)

        # extract data from seed ROI
        seed_masker = input_data.NiftiSpheresMasker(pcc_coords,
                                                    radius=8,
                                                    mask_img=brainmask_file,
                                                    detrend=True,
                                                    standardize=True,
                                                    low_pass=lp_freq,
                                                    high_pass=hp_freq,
                                                    t_r=tr)
        seed_time_series = seed_masker.fit_transform(
            rs_file, confounds=confounds.values)

        #  extract data for the entire brain
        brain_masker = input_data.NiftiMasker(smoothing_fwhm=6,
                                              mask_img=brainmask_file,
                                              detrend=True,
                                              standardize=True,
                                              low_pass=lp_freq,
                                              high_pass=hp_freq,
                                              t_r=tr)
        brain_time_series = brain_masker.fit_transform(
            rs_file, confounds=confounds.values)

        #  calculate correlation
        seed_based_correlations = np.dot(brain_time_series.T, seed_time_series) / \
                                  seed_time_series.shape[0]
        seed_based_correlations_fisher_z = np.arctanh(seed_based_correlations)

        # plot
        seed_based_correlation_img = brain_masker.inverse_transform(
            seed_based_correlations_fisher_z.T)
        seed_based_correlation_img.to_filename(out_file_nii)

        display = plotting.plot_stat_map(seed_based_correlation_img,
                                         threshold=0.5,
                                         bg_img=anat_file,
                                         cut_coords=pcc_coords[0],
                                         title=out_stub + " (fisher z)")
        display.add_markers(marker_coords=pcc_coords,
                            marker_color='g',
                            marker_size=300)
        display.savefig(out_file_thresh)
        display.close()

        # report
        report = "Confounds:\n"
        report += "\n".join(confounds.columns) + "\n\n"
        report += "lp_freq: {}\n".format(lp_freq)
        report += "hp_freq: {}\n".format(hp_freq)

        report += "confounds_file: {}\n".format(confounds_file)
        report += "brainmask_file: {}\n".format(brainmask_file)
        report += "rs_file: {}\n".format(rs_file)
        report += "anat_file: {}\n\n".format(anat_file)

        report += confounds.to_string()

        with open(out_file_report, "w") as fi:
            fi.write(report)
    else:
        print("*** SBC for {} {} already computed. Do nothing. ***".format(
            subject, session))
Пример #13
0
confounds_list = sorted(confounds_list)

print(f"Number of subjects: {len(fmri_list)}")

# Load Power ROIs coordinates.

power = datasets.fetch_coords_power_2011()
power_coords = np.vstack((power.rois["x"], power.rois["y"], power.rois["z"])).T

# Create masker file.

power_spheres = input_data.NiftiSpheresMasker(
    seeds=power_coords, 
    smoothing_fwhm=6, 
    radius=5,
    detrend=True, 
    standardize=True,
    low_pass=0.08, 
    high_pass=0.009,
    t_r=0.72
)

parcellation = power_spheres

# Extract timeseries. 
# Create empty timeseries to store array.
timeseries_all = np.zeros((len(fmri_list), 405, 264)) 

for sub in range(len(fmri_list)):
             
    # Load confound file.
    confounds = np.loadtxt(confounds_list[sub])
Пример #14
0
def import_mat_func(input_file, ID, atlas_select, NETWORK, pynets_dir, node_size, mask, thr):
    if '.nii' in input_file and NETWORK == None:
        func_file=input_file
        dir_path = os.path.dirname(os.path.realpath(func_file))
        atlas = getattr(datasets, 'fetch_%s' % atlas_select)()
        atlas_name = atlas['description'].splitlines()[0]
        print("\n")
        print(atlas_name + ' comes with {0}.'.format(atlas.keys()))
        print("\n")
        coords = np.vstack((atlas.rois['x'], atlas.rois['y'], atlas.rois['z'])).T
        print("\n")
        print('Stacked atlas coordinates in array of shape {0}.'.format(coords.shape))
        print("\n")
        if mask is not None:
            from nilearn import masking
            mask_data, _ = masking._load_mask_img(mask)
            mask_coords = list(zip(*np.where(mask_data != 0)))
            for coord in coords:
                if tuple(coord) not in mask_coords:
                    print('Removing coordinate: ' + str(tuple(coord)) + ' since it falls outside of network mask...')
                    ix = np.where(coords == coord)[0][0]
                    coords = np.delete(coords, ix, axis=0)
                    print(str(len(coords)))
        spheres_masker = input_data.NiftiSpheresMasker(seeds=coords, radius=float(node_size), memory='nilearn_cache', memory_level=5, verbose=2, standardize=True)
        time_series = spheres_masker.fit_transform(func_file)
        correlation_measure = ConnectivityMeasure(kind='correlation')
        correlation_matrix = correlation_measure.fit_transform([time_series])[0]
        print("\n")
        print('Time series has {0} samples'.format(time_series.shape[0]))
        print("\n")
        plt.imshow(correlation_matrix, vmin=-1., vmax=1., cmap='RdBu_r', interpolation='nearest')
        plt.colorbar()
        plt.title(atlas_name + ' correlation matrix')
        out_path_fig=dir_path + '/' + ID + '_' + atlas_name + '_adj_mat_cov.png'
        plt.savefig(out_path_fig)
        plt.close()
        # Tweak edge_threshold to keep only the strongest connections.
        atlast_graph_title = atlas_name + ' correlation graph'
        if mask is None:
            atlast_graph_title = atlas_name + ' correlation graph'
        else:
            atlast_graph_title = atlas_name + ' Masked Nodes'
        edge_threshold = str(float(thr)*100) +'%'
        plotting.plot_connectome(correlation_matrix, coords, title=atlast_graph_title, edge_threshold=edge_threshold, node_size=20, colorbar=True)
        out_path_fig=dir_path + '/' + ID + '_' + atlas_name + '_connectome_viz.png'
        plt.savefig(out_path_fig)
        plt.close()
        time_series_path = dir_path + '/' + ID + '_ts.txt'
        np.savetxt(time_series_path, time_series, delimiter='\t')
        mx = genfromtxt(time_series_path, delimiter='')

    elif '.nii' in input_file and NETWORK != None:
        func_file=input_file

        ##Reference RSN list
    	load_path= pynets_dir + '/RSN_refs/' + NETWORK + '_coords.csv'
    	df = pd.read_csv(load_path).ix[:,0:4]
    	i=1
    	coords = []
    	labels = []
    	for i in range(len(df)):
      	    print("ROI Reference #: " + str(i))
      	    x = int(df.ix[i,1])
      	    y = int(df.ix[i,2])
      	    z = int(df.ix[i,3])
      	    print("X:" + str(x) + " Y:" + str(y) + " Z:" + str(z))
      	    coords.append((x, y, z))
      	    labels.append(i)
      	print("\n")
     	print(coords)
      	print(labels)
      	print("\n")
      	print("-------------------")
      	i + 1
        dir_path = os.path.dirname(os.path.realpath(func_file))

        ##Grow ROIs
        ##If masking, remove those coords that fall outside of the mask
        if mask != None:
            from nilearn import masking
            mask_data, _ = masking._load_mask_img(mask)
            mask_coords = list(zip(*np.where(mask_data != 0)))
            for coord in coords:
                if coord in mask_coords:
                    print('Removing coordinate: ' + str(coord) + ' since it falls outside of network mask...')
                    coords.remove(coord)
        masker = input_data.NiftiSpheresMasker(
            seeds=coords, radius=float(node_size), allow_overlap=True, memory_level=5,
            memory='nilearn_cache', verbose=2, standardize=True)
        time_series = masker.fit_transform(func_file)
        for time_serie, label in zip(time_series.T, labels):
            plt.plot(time_serie, label=label)
        plt.title(NETWORK + ' Network Time Series')
        plt.xlabel('Scan Number')
        plt.ylabel('Normalized Signal')
        plt.legend()
        plt.tight_layout()
        out_path_fig=dir_path + '/' + ID + '_' + NETWORK + '_TS_plot.png'
        plt.savefig(out_path_fig)
        plt.close()
        connectivity_measure = ConnectivityMeasure(kind='partial correlation')
        partial_correlation_matrix = connectivity_measure.fit_transform([time_series])[0]
        plot_title = NETWORK + ' Network Time Series'
        plotting.plot_connectome(partial_correlation_matrix, coords,
                                 title=plot_title)
        # Display connectome with hemispheric projections.
        title = "Connectivity Projected on the " + NETWORK
        out_path_fig=dir_path + '/' + ID + '_' + NETWORK + '_connectome_plot.png'
        plotting.plot_connectome(partial_correlation_matrix, coords, title=title,
        display_mode='lyrz', output_file=out_path_fig)
        time_series_path = dir_path + '/' + ID + '_' + NETWORK + '_ts.txt'
        np.savetxt(time_series_path, time_series, delimiter='\t')
        mx = genfromtxt(time_series_path, delimiter='')
    else:
        DR_st_1=input_file
        dir_path = os.path.dirname(os.path.realpath(DR_st_1))
        mx = genfromtxt(DR_st_1, delimiter='')
    from sklearn.covariance import GraphLassoCV, ShrunkCovariance, graph_lasso
    estimator = GraphLassoCV()
    try:
        est = estimator.fit(mx)
    except:
#        print("WARNING: Lasso Cross-Validation Failed. Using Shrunk Covariance instead...")
#        emp_cov = covariance.empirical_covariance(mx)
#        shrunk_cov = covariance.shrunk_covariance(emp_cov, shrinkage=0.8) # Set shrinkage closer to 1 for poorly-conditioned data
#
#        alphaRange = 10.0 ** np.arange(-8,0) # 1e-7 to 1e-1 by order of magnitude
#        for alpha in alphaRange:
#            try:
#                estimator = covariance.graph_lasso(shrunk_cov, alpha)
#                print("Calculated graph-lasso covariance matrix for alpha=%s"%alpha)
#            except FloatingPointError:
#                print("Failed at alpha=%s"%alpha)
        estimator = ShrunkCovariance()
        est = estimator.fit(mx)
    if NETWORK != None:
        est_path_cov = dir_path + '/' + ID + '_' + NETWORK + '_est_cov.txt'
        est_path_sps_inv_cov = dir_path + '/' + ID + '_' + NETWORK + '_est_sps_inv_cov.txt'
    else:
        est_path_cov = dir_path + '/' + ID + '_est_cov.txt'
        est_path_sps_inv_cov = dir_path + '/' + ID + '_est_sps_inv_cov.txt'
    np.savetxt(est_path_cov, estimator.covariance_, delimiter='\t')
    np.savetxt(est_path_sps_inv_cov, estimator.precision_, delimiter='\t')
    return(mx, est_path_cov, est_path_sps_inv_cov)
Пример #15
0
def seed_to_voxel_corr(func_filename,
                       confound_filename,
                       resultdir='.',
                       seed_coord=[(0, -52, 18)],
                       output_head='DMN',
                       maps_img='',
                       mask_img=None):
    #print(func_filename)
    #print(confound_filename)
    mask_or_seed = 0
    if maps_img == '':
        mask_or_seed = 1
        seed_masker = input_data.NiftiSpheresMasker(seed_coord,
                                                    radius=8,
                                                    detrend=True,
                                                    standardize=True,
                                                    mask_img=mask_img,
                                                    verbose=0)
        #memory='nilearn_cache', memory_level=1, verbose=0)
    else:
        mask_or_seed = 0
        seed_masker = input_data.NiftiMapsMasker(maps_img=[maps_img],
                                                 standardize=True,
                                                 mask_img=mask_img,
                                                 verbose=0)
        #memory='nilearn_cache',

    seed_time_series = seed_masker.fit_transform(func_filename,
                                                 confounds=[confound_filename])

    brain_masker = input_data.NiftiMasker(smoothing_fwhm=6,
                                          detrend=True,
                                          standardize=True,
                                          memory_level=1,
                                          verbose=0)

    brain_time_series = brain_masker.fit_transform(
        func_filename, confounds=[confound_filename])
    '''
    fig = plt.figure(figsize=(6,3), dpi=300)
    plt.plot(seed_time_series)
    plt.title('Seed time series')
    plt.xlabel('Scan number')
    plt.ylabel('Normalized signal')
    plt.tight_layout()
    fig.savefig(join(resultdir,'%s_curve.png' % output_head), bbox_inches='tight')
    '''

    seed_based_correlations = np.dot(
        brain_time_series.T, seed_time_series) / seed_time_series.shape[0]

    print("seed-based correlation shape: (%s, %s)" %
          seed_based_correlations.shape)
    print("seed-based correlation: min = %.3f; max = %.3f" %
          (seed_based_correlations.min(), seed_based_correlations.max()))

    seed_based_correlations_fisher_z = np.arctanh(seed_based_correlations)
    print(
        "seed-based correlation Fisher-z transformed: min = %.3f; max = %.3f" %
        (seed_based_correlations_fisher_z.min(),
         seed_based_correlations_fisher_z.max()))

    # Finally, we can tranform the correlation array back to a Nifti image
    # object, that we can save.
    seed_based_corr_img = brain_masker.inverse_transform(
        seed_based_correlations.T)
    seed_based_corr_img.to_filename(
        join(resultdir, '%s_z.nii.gz') % output_head)
    '''
    display = plotting.plot_stat_map(seed_based_corr_img, threshold=0.3,
                                     cut_coords=(0,0,0), draw_cross=False)
    if mask_or_seed == 1:
        display.add_markers(marker_coords=seed_coord, marker_color='g',
                            marker_size=20)
    # At last, we save the plot as pdf.
    #display.savefig(join(resultdir,'%s_z.pdf') % output_head)
    display.savefig(join(resultdir,'%s_z.jpg') % output_head)
    plt.close()
    '''
    return join(resultdir, '%s_z.nii.gz') % output_head
Пример #16
0
# Extracts signal from sphere around DMN seeds
# ----------------------------------------------
#
# We can compute the mean signal within **spheres** of a fixed radius
# around a sequence of (x, y, z) coordinates with the object
# :class:`nilearn.input_data.NiftiSpheresMasker`.
# The resulting signal is then prepared by the masker object: Detrended,
# band-pass filtered and **standardized to 1 variance**.

from nilearn import input_data

masker = input_data.NiftiSpheresMasker(dmn_coords,
                                       radius=8,
                                       detrend=True,
                                       standardize=True,
                                       low_pass=0.1,
                                       high_pass=0.01,
                                       t_r=2,
                                       memory='nilearn_cache',
                                       memory_level=1,
                                       verbose=2)

# Additionally, we pass confound information to ensure our extracted
# signal is cleaned from confounds.

func_filename = dataset.func[0]
confounds_filename = dataset.confounds[0]

time_series = masker.fit_transform(func_filename,
                                   confounds=[confounds_filename])

##########################################################################
Пример #17
0
##########################################################################
# We use :class:`nilearn.input_data.NiftiSpheresMasker` to extract the
# **time series from the functional imaging within the sphere**. The
# sphere is centered at pcc_coords and will have the radius we pass the
# NiftiSpheresMasker function (here 8 mm).
#
# The extraction will also detrend, standardize, and bandpass filter the data.
# This will create a NiftiSpheresMasker object.
from nilearn import input_data

seed_masker = input_data.NiftiSpheresMasker(pcc_coords,
                                            radius=8,
                                            detrend=True,
                                            standardize=True,
                                            low_pass=0.1,
                                            high_pass=0.01,
                                            t_r=2.,
                                            memory='nilearn_cache',
                                            memory_level=1,
                                            verbose=0)

##########################################################################
# Then we extract the mean time series within the seed region while
# regressing out the confounds that
# can be found in the dataset's csv file
seed_time_series = seed_masker.fit_transform(func_filename,
                                             confounds=[confound_filename])

##########################################################################
# Next, we can proceed similarly for the **brain-wide voxel-wise time
# series**, using :class:`nilearn.input_data.NiftiMasker` with the same input
Пример #18
0
def run(files,
        motpars,
        out_dir='.',
        n_iters=10000,
        qc_thresh=0.2,
        window=1000,
        earl=False,
        regress=False):
    """
    Run scrubbing, high-low motion, and QCRSFC analyses.

    Parameters
    ----------
    files : (N,) list of nifti files
        List of 4D (X x Y x Z x T) images in MNI space.
    fds_analysis : (N,) list of array-like
        List of 1D (T) numpy arrays with QC metric values per img (e.g., FD or
        respiration).
    out_dir : str
        Output directory.
    n_iters : int
        Number of iterations to run to generate null distributions.
    qc_thresh : float
        Threshold for QC metric used in scrubbing analysis. Default is 0.2
        (for FD).
    window : int
        Number of units (pairs of ROIs) to include when averaging to generate
        smoothing curve.

    Notes
    -----
    This function writes out several files to out_dir:
    - all_sorted_distances.txt: Sorted distances between every pair of ROIs.
    - smc_sorted_distances.txt: Sorted distances for smoothing curves. Does not
        include duplicate distances or pairs of ROIs outside of smoothing curve
        (first and last window/2 pairs).
    - qcrsfc_analysis_values.txt: Results from QC:RSFC analysis. The QC:RSFC
        value for each pair of ROIs, of the same size and in the same order as
        all_sorted_distances.txt.
    - qcrsfc_analysis_smoothing_curve.txt: Smoothing curve for QC:RSFC
        analysis. Same size and order as smc_sorted_distances.txt.
    - qcrsfc_analysis_null_smoothing_curves.txt: Null smoothing curves from
        QC:RSFC analysis. Contains 2D array, where number of columns is same
        size and order as smc_sorted_distances.txt and number of rows is number
        of iterations for permutation analysis.
    - highlow_analysis_values.txt: Results from high-low analysis. The delta r
        value for each pair of ROIs, of the same size and in the same order as
        all_sorted_distances.txt.
    - highlow_analysis_smoothing_curve.txt: Smoothing curve for high-low
        analysis. Same size and order as smc_sorted_distances.txt.
    - highlow_analysis_null_smoothing_curves.txt: Null smoothing curves from
        high-low analysis. Contains 2D array, where number of columns is same
        size and order as smc_sorted_distances.txt and number of rows is number
        of iterations for permutation analysis.
    - scrubbing_analysis_values.txt: Results from scrubbing analysis. The
        mean delta r value for each pair of ROIs, of the same size and in the
        same order as all_sorted_distances.txt.
    - scrubbing_analysis_smoothing_curve.txt: Smoothing curve for scrubbing
        analysis. Same size and order as smc_sorted_distances.txt.
    - scrubbing_analysis_null_smoothing_curves.txt: Null smoothing curves from
        scrubbing analysis. Contains 2D array, where number of columns is same
        size and order as smc_sorted_distances.txt and number of rows is number
        of iterations for permutation analysis.
    """
    # create logger with 'spam_application'
    logger = logging.getLogger('ddmra')
    logger.setLevel(logging.DEBUG)
    # create file handler which logs even debug messages
    fh = logging.FileHandler(op.join(out_dir, 'log.tsv'))
    fh.setLevel(logging.DEBUG)
    # create formatter and add it to the handlers
    formatter = logging.Formatter(
        '%(asctime)s\t%(name)-12s\t%(levelname)-8s\t%(message)s')
    fh.setFormatter(formatter)
    # add the handlers to the logger
    logger.addHandler(fh)

    logger.info('Preallocating matrices')
    assert len(files) == len(motpars)
    n_subjects = len(files)
    atlas = datasets.fetch_coords_power_2011()
    coords = np.vstack((atlas.rois['x'], atlas.rois['y'], atlas.rois['z'])).T
    n_rois = coords.shape[0]
    mat_idx = np.triu_indices(n_rois, k=1)
    dists = squareform(pdist(coords))
    dists = dists[mat_idx]
    sort_idx = dists.argsort()
    all_sorted_dists = dists[sort_idx]
    np.savetxt(op.join(out_dir, 'all_sorted_distances.txt'), all_sorted_dists)
    un_idx = np.array([
        np.where(all_sorted_dists == i)[0][0]
        for i in np.unique(all_sorted_dists)
    ])

    logger.info('Creating masker')
    t_r = nib.load(files[0]).header.get_zooms()[-1]
    spheres_masker = input_data.NiftiSpheresMasker(seeds=coords,
                                                   radius=5.,
                                                   t_r=t_r,
                                                   smoothing_fwhm=4.,
                                                   detrend=False,
                                                   standardize=False,
                                                   low_pass=None,
                                                   high_pass=None)

    # if earl
    motpars_analysis, fds_analysis = [], []
    for motpars_ in motpars:
        if earl:
            logger.info('Filtering motion parameters')
            motpars_filt, fd_filt = ef.filter_earl(motpars_, t_r)
            motpars_analysis.append(motpars_filt)
            fds_analysis.append(fd_filt)
        else:
            motpars_analysis.append(motpars_)
            fds_analysis.append(ef.get_fd_power(motpars_, unit='rad'))

    # prep for qcrsfc and high-low motion analyses
    mean_qcs = np.array([np.mean(qc) for qc in fds_analysis])
    raw_corr_mats = np.zeros((n_subjects, len(mat_idx[0])))

    logger.info('Building correlation matrices')
    # Get correlation matrices
    ts_all = []
    for i_sub in range(n_subjects):
        img = nib.load(files[i_sub])
        if regress:
            raw_ts = spheres_masker.fit_transform(
                img, confounds=motpars_analysis[i_sub]).T
        else:
            raw_ts = spheres_masker.fit_transform(img).T
        ts_all.append(raw_ts)
        raw_corrs = np.corrcoef(raw_ts)
        raw_corrs = raw_corrs[mat_idx]
        raw_corr_mats[i_sub, :] = raw_corrs
    z_corr_mats = np.arctanh(raw_corr_mats)
    del (raw_corrs, raw_ts, spheres_masker, atlas, coords)

    # QC:RSFC r analysis
    # For each pair of ROIs, correlate the z-transformed correlation
    # coefficients across subjects with the subjects' mean QC (generally FD)
    # values.
    logger.info('Performing QC:RSFC analysis')
    qcrsfc_rs = fast_pearson(z_corr_mats.T, mean_qcs)
    qcrsfc_rs = qcrsfc_rs[sort_idx]
    qcrsfc_smc = moving_average(qcrsfc_rs, window)

    # Quick interlude to help reduce arrays
    keep_idx = np.intersect1d(np.where(~np.isnan(qcrsfc_smc))[0], un_idx)
    keep_sorted_dists = all_sorted_dists[keep_idx]
    np.savetxt(op.join(out_dir, 'smc_sorted_distances.txt'), keep_sorted_dists)

    # Now back to the QC:RSFC analysis
    qcrsfc_smc = qcrsfc_smc[keep_idx]
    np.savetxt(op.join(out_dir, 'qcrsfc_analysis_values.txt'), qcrsfc_rs)
    np.savetxt(op.join(out_dir, 'qcrsfc_analysis_smoothing_curve.txt'),
               qcrsfc_smc)
    del qcrsfc_rs, qcrsfc_smc

    # High-low motion analysis
    # Split the sample using a median split of the QC metric (generally mean
    # FD). Then, for each pair of ROIs, calculate the difference between the
    # mean across correlation coefficients for the high motion minus the low
    # motion groups.
    logger.info('Performing high-low motion analysis')
    hm_idx = mean_qcs >= np.median(mean_qcs)
    lm_idx = mean_qcs < np.median(mean_qcs)
    hm_mean_corr = np.mean(raw_corr_mats[hm_idx, :], axis=0)
    lm_mean_corr = np.mean(raw_corr_mats[lm_idx, :], axis=0)
    hl_corr_diff = hm_mean_corr - lm_mean_corr
    hl_corr_diff = hl_corr_diff[sort_idx]
    hl_smc = moving_average(hl_corr_diff, window)
    hl_smc = hl_smc[keep_idx]
    np.savetxt(op.join(out_dir, 'highlow_analysis_values.txt'), hl_corr_diff)
    np.savetxt(op.join(out_dir, 'highlow_analysis_smoothing_curve.txt'),
               hl_smc)
    del hm_idx, lm_idx, hm_mean_corr, lm_mean_corr, hl_corr_diff, hl_smc

    # Scrubbing analysis
    logger.info('Performing scrubbing analysis')
    mean_delta_r = scrubbing_analysis(ts_all,
                                      fds_analysis,
                                      n_rois,
                                      qc_thresh,
                                      perm=False)
    mean_delta_r = mean_delta_r[sort_idx]
    scrub_smc = moving_average(mean_delta_r, window)
    scrub_smc = scrub_smc[keep_idx]
    np.savetxt(op.join(out_dir, 'scrubbing_analysis_values.txt'), mean_delta_r)
    np.savetxt(op.join(out_dir, 'scrubbing_analysis_smoothing_curve.txt'),
               scrub_smc)
    del mean_delta_r, scrub_smc

    # Null distributions
    logger.info('Building null distributions with permutations')
    qcs_copy = [qc.copy() for qc in fds_analysis]
    perm_scrub_smc = np.zeros((n_iters, len(keep_sorted_dists)))
    perm_qcrsfc_smc = np.zeros((n_iters, len(keep_sorted_dists)))
    perm_hl_smc = np.zeros((n_iters, len(keep_sorted_dists)))
    for i in range(n_iters):
        # Prep for QC:RSFC and high-low motion analyses
        perm_mean_qcs = np.random.permutation(mean_qcs)

        # QC:RSFC analysis
        perm_qcrsfc_rs = fast_pearson(z_corr_mats.T, perm_mean_qcs)
        perm_qcrsfc_rs = perm_qcrsfc_rs[sort_idx]
        perm_qcrsfc_smc[i, :] = moving_average(perm_qcrsfc_rs,
                                               window)[keep_idx]

        # High-low analysis
        perm_hm_idx = perm_mean_qcs >= np.median(perm_mean_qcs)
        perm_lm_idx = perm_mean_qcs < np.median(perm_mean_qcs)
        perm_hm_corr = np.mean(raw_corr_mats[perm_hm_idx, :], axis=0)
        perm_lm_corr = np.mean(raw_corr_mats[perm_lm_idx, :], axis=0)
        perm_hl_diff = perm_hm_corr - perm_lm_corr
        perm_hl_diff = perm_hl_diff[sort_idx]
        perm_hl_smc[i, :] = moving_average(perm_hl_diff, window)[keep_idx]

        # Scrubbing analysis
        perm_qcs = [np.random.permutation(perm_qc) for perm_qc in qcs_copy]
        perm_mean_delta_r = scrubbing_analysis(ts_all,
                                               perm_qcs,
                                               n_rois,
                                               qc_thresh,
                                               perm=True)
        perm_mean_delta_r = perm_mean_delta_r[sort_idx]
        perm_scrub_smc[i, :] = moving_average(perm_mean_delta_r,
                                              window)[keep_idx]

    np.savetxt(op.join(out_dir, 'qcrsfc_analysis_null_smoothing_curves.txt'),
               perm_qcrsfc_smc)
    np.savetxt(op.join(out_dir, 'highlow_analysis_null_smoothing_curves.txt'),
               perm_hl_smc)
    np.savetxt(
        op.join(out_dir, 'scrubbing_analysis_null_smoothing_curves.txt'),
        perm_scrub_smc)

    logger.info('Workflow completed')
    logger.shutdown()