예제 #1
0
def test_fetch_atlas_craddock_2012():
    bunch = datasets.fetch_atlas_craddock_2012(data_dir=tmpdir, verbose=0)

    keys = ("scorr_mean", "tcorr_mean", "scorr_2level", "tcorr_2level",
            "random")
    filenames = [
        "scorr05_mean_all.nii.gz",
        "tcorr05_mean_all.nii.gz",
        "scorr05_2level_all.nii.gz",
        "tcorr05_2level_all.nii.gz",
        "random_all.nii.gz",
    ]
    assert_equal(len(url_request.urls), 1)
    for key, fn in zip(keys, filenames):
        assert_equal(bunch[key], os.path.join(tmpdir, 'craddock_2012', fn))
예제 #2
0
def test_fetch_atlas_craddock_2012():
    bunch = datasets.fetch_atlas_craddock_2012(data_dir=tmpdir, verbose=0)

    keys = ("scorr_mean", "tcorr_mean",
            "scorr_2level", "tcorr_2level",
            "random")
    filenames = [
            "scorr05_mean_all.nii.gz",
            "tcorr05_mean_all.nii.gz",
            "scorr05_2level_all.nii.gz",
            "tcorr05_2level_all.nii.gz",
            "random_all.nii.gz",
    ]
    assert_equal(len(url_request.urls), 1)
    for key, fn in zip(keys, filenames):
        assert_equal(bunch[key], os.path.join(tmpdir, 'craddock_2012', fn))
예제 #3
0
파일: extract.py 프로젝트: banilo/prni2016
    try:
        gsc_nets = GraphLassoCV(verbose=2, alphas=20)
        gsc_nets.fit(FS_netproj)

        np.save('%i_nets_cov' % sub_id, gsc_nets.covariance_)
        np.save('%i_nets_prec' % sub_id, gsc_nets.precision_)
    except:
        pass

    ###############################################################################
    # dump region poolings
    ###############################################################################
    from nilearn.image import resample_img

    crad = ds.fetch_atlas_craddock_2012()
    # atlas_nii = index_img(crad['scorr_mean'], 19)  # Craddock 200 region atlas
    atlas_nii = index_img(crad['scorr_mean'], 9)  # Craddock 100 region atlas

    r_atlas_nii = resample_img(
        img=atlas_nii,
        target_affine=mask_file.get_affine(),
        target_shape=mask_file.shape,
        interpolation='nearest'
    )
    r_atlas_nii.to_filename('debug_ratlas.nii.gz')

    from nilearn.input_data import NiftiLabelsMasker
    nlm = NiftiLabelsMasker(
        labels_img=r_atlas_nii, mask_img=mask_file,
        standardize=True, detrend=True)
import os
from nilearn import datasets
import nibabel as nb
import numpy as np
import shutil

atlas = datasets.fetch_atlas_craddock_2012(data_dir=".")

atlas_str = "scorr_mean"

cc_file = atlas[atlas_str]
cc_folder = os.path.split(cc_file)[0]

save_path = os.path.join(cc_folder, atlas_str + "_single_resolution")
if os.path.exists(save_path):
    shutil.rmtree(save_path)
os.mkdir(save_path)

cc_4d_img = nb.load(cc_file)
cc_4d_data = cc_4d_img.get_data()

n_resolution = cc_4d_data.shape[-1]
n_parcels = [
    np.unique(cc_4d_data[:, :, :, i]).shape[0] - 1 for i in range(n_resolution)
]  # -1 to account for 0 background

for parc_to_use in range(n_resolution):
    print "using %s parcels" % n_parcels[parc_to_use]

    # save 3d nii of desired parc.resolution
    cc_3d_filename = os.path.join(
예제 #5
0
def main(
    workdir,
    outdir,
    atlas,
    kernel,
    sparsity,
    affinity,
    approach,
    gradients,
    subcort,
    neurosynth,
    neurosynth_file,
    sleuth_file,
    nimare_dataset,
    roi_mask,
    term,
    topic,
):
    workdir = op.join(workdir, "tmp")
    if op.isdir(workdir):
        shutil.rmtree(workdir)
    os.makedirs(workdir)

    atlas_name = "atlas-{0}".format(atlas)
    kernel_name = "kernel-{0}".format(kernel)
    sparsity_name = "sparsity-{0}".format(sparsity)
    affinity_name = "affinity-{0}".format(affinity)
    approach_name = "approach-{0}".format(approach)
    gradients_name = "gradients-{0}".format(gradients)
    dset = None

    # handle neurosynth dataset, if called
    if neurosynth:
        if neurosynth_file is None:

            ns_data_dir = op.join(workdir, "neurosynth")
            dataset_file = op.join(ns_data_dir, "neurosynth_dataset.pkl.gz")
            # download neurosynth dataset if necessary
            if not op.isfile(dataset_file):
                neurosynth_download(ns_data_dir)

        else:
            dataset_file = neurosynth_file

        dset = Dataset.load(dataset_file)
        dataset_name = "dataset-neurosynth"

    # handle sleuth text file, if called
    if sleuth_file is not None:
        dset = convert_sleuth_to_dataset(sleuth_file, target="mni152_2mm")
        dataset_name = "dataset-{0}".format(op.basename(sleuth_file).split(".")[0])

    if nimare_dataset is not None:
        dset = Dataset.load(nimare_dataset)
        dataset_name = "dataset-{0}".format(op.basename(nimare_dataset).split(".")[0])

    if dset:
        # slice studies, if needed
        if roi_mask is not None:
            roi_ids = dset.get_studies_by_mask(roi_mask)
            print(
                "{}/{} studies report at least one coordinate in the "
                "ROI".format(len(roi_ids), len(dset.ids))
            )
            dset_sel = dset.slice(roi_ids)
            dset = dset_sel
            dataset_name = "dataset-neurosynth_mask-{0}".format(
                op.basename(roi_mask).split(".")[0]
            )

        if term is not None:
            labels = ["Neurosynth_TFIDF__{label}".format(label=label) for label in [term]]
            term_ids = dset.get_studies_by_label(labels=labels, label_threshold=0.1)
            print(
                "{}/{} studies report association "
                "with the term {}".format(len(term_ids), len(dset.ids), term)
            )
            dset_sel = dset.slice(term_ids)
            dset = dset_sel
            # img_inds = np.nonzero(dset.masker.mask_img.get_fdata())  # unused
            # vox_locs = np.unravel_index(img_inds, dset.masker.mask_img.shape)  # unused
            dataset_name = "dataset-neurosynth_term-{0}".format(term)

        if topic is not None:
            topics = [
                "Neurosynth_{version}__{topic}".format(version=topic[0], topic=topic)
                for topic in topic[1:]
            ]
            topics_ids = []
            for topic in topics:
                topic_ids = dset.annotations.id[np.where(dset.annotations[topic])[0]].tolist()
                topics_ids.extend(topic_ids)
                print(
                    "{}/{} studies report association "
                    "with the term {}".format(len(topic_ids), len(dset.ids), topic)
                )
            topics_ids_unique = np.unique(topics_ids)
            print("{} unique ids".format(len(topics_ids_unique)))
            dset_sel = dset.slice(topics_ids_unique)
            dset = dset_sel
            # img_inds = np.nonzero(dset.masker.mask_img.get_fdata())  # unused
            # vox_locs = np.unravel_index(img_inds, dset.masker.mask_img.shape)  # unused
            dataset_name = "dataset-neurosynth_topic-{0}".format("_".join(topic[1:]))

        if (
            neurosynth
            or (sleuth_file is not None)
            or (nimare_dataset is not None)
        ):
            # set kernel for MA smoothing
            if kernel == "peaks2maps":
                print("Running peak2maps")
                k = Peaks2MapsKernel(resample_to_mask=True)
            elif kernel == "alekernel":
                print("Running alekernel")
                k = ALEKernel(fwhm=15)

            if atlas is not None:
                if atlas == "harvard-oxford":
                    print("Parcellating using the Harvard Oxford Atlas")
                    # atlas_labels = atlas.labels[1:]  # unused
                    atlas_shape = atlas.maps.shape
                    atlas_affine = atlas.maps.affine
                    atlas_data = atlas.maps.get_fdata()
                elif atlas == "aal":
                    print("Parcellating using the AAL Atlas")
                    atlas = datasets.fetch_atlas_aal()
                    # atlas_labels = atlas.labels  # unused
                    atlas_shape = nib.load(atlas.maps).shape
                    atlas_affine = nib.load(atlas.maps).affine
                    atlas_data = nib.load(atlas.maps).get_fdata()
                elif atlas == "craddock-2012":
                    print("Parcellating using the Craddock-2012 Atlas")
                    atlas = datasets.fetch_atlas_craddock_2012()
                elif atlas == "destrieux-2009":
                    print("Parcellating using the Destrieux-2009 Atlas")
                    atlas = datasets.fetch_atlas_destrieux_2009(lateralized=True)
                    # atlas_labels = atlas.labels[3:]  # unused
                    atlas_shape = nib.load(atlas.maps).shape
                    atlas_affine = nib.load(atlas.maps).affine
                    atlas_data = nib.load(atlas.maps).get_fdata()
                elif atlas == "msdl":
                    print("Parcellating using the MSDL Atlas")
                    atlas = datasets.fetch_atlas_msdl()
                elif atlas == "surface":
                    print("Generating surface vertices")

                if atlas != "fsaverage5" and atlas != "hcp":
                    imgs = k.transform(dset, return_type="image")

                    masker = NiftiLabelsMasker(
                        labels_img=atlas.maps, standardize=True, memory="nilearn_cache"
                    )
                    time_series = masker.fit_transform(imgs)

                else:
                    # change to array for other approach
                    imgs = k.transform(dset, return_type="image")
                    print(np.shape(imgs))

                    if atlas == "fsaverage5":
                        fsaverage = fetch_surf_fsaverage(mesh="fsaverage5")
                        pial_left = fsaverage.pial_left
                        pial_right = fsaverage.pial_right
                        medial_wall_inds_left = surface.load_surf_data(
                            "./templates/lh.Medial_wall.label"
                        )
                        print(np.shape(medial_wall_inds_left))
                        medial_wall_inds_right = surface.load_surf_data(
                            "./templates/rh.Medial_wall.label"
                        )
                        print(np.shape(medial_wall_inds_right))
                        sulc_left = fsaverage.sulc_left
                        sulc_right = fsaverage.sulc_right

                    elif atlas == "hcp":
                        pial_left = "./templates/S1200.L.pial_MSMAll.32k_fs_LR.surf.gii"
                        pial_right = "./templates/S1200.R.pial_MSMAll.32k_fs_LR.surf.gii"
                        medial_wall_inds_left = np.where(
                            nib.load("./templates/hcp.tmp.lh.dscalar.nii").get_fdata()[0] == 0
                        )[0]
                        medial_wall_inds_right = np.where(
                            nib.load("./templates/hcp.tmp.rh.dscalar.nii").get_fdata()[0] == 0
                        )[0]
                        left_verts = 32492 - len(medial_wall_inds_left)
                        sulc_left = nib.load(
                            "./templates/S1200.sulc_MSMAll.32k_fs_LR.dscalar.nii"
                        ).get_fdata()[0][0:left_verts]
                        sulc_left = np.insert(
                            sulc_left,
                            np.subtract(
                                medial_wall_inds_left, np.arange(len(medial_wall_inds_left))
                            ),
                            0,
                        )
                        sulc_right = nib.load(
                            "./templates/S1200.sulc_MSMAll.32k_fs_LR.dscalar.nii"
                        ).get_fdata()[0][left_verts:]
                        sulc_right = np.insert(
                            sulc_right,
                            np.subtract(
                                medial_wall_inds_right, np.arange(len(medial_wall_inds_right))
                            ),
                            0,
                        )

                    surf_lh = surface.vol_to_surf(
                        imgs,
                        pial_left,
                        radius=6.0,
                        interpolation="nearest",
                        kind="ball",
                        n_samples=None,
                        mask_img=dset.masker.mask_img,
                    )
                    surf_rh = surface.vol_to_surf(
                        imgs,
                        pial_right,
                        radius=6.0,
                        interpolation="nearest",
                        kind="ball",
                        n_samples=None,
                        mask_img=dset.masker.mask_img,
                    )
                    surfs = np.transpose(np.vstack((surf_lh, surf_rh)))
                    del surf_lh, surf_rh

                    # handle cortex first
                    coords_left = surface.load_surf_data(pial_left)[0]
                    coords_left = np.delete(coords_left, medial_wall_inds_left, axis=0)
                    coords_right = surface.load_surf_data(pial_right)[0]
                    coords_right = np.delete(coords_right, medial_wall_inds_right, axis=0)

                    print("Left Hemipshere Vertices")
                    surface_macms_lh, inds_discard_lh = build_macms(dset, surfs, coords_left)
                    print(np.shape(surface_macms_lh))
                    print(inds_discard_lh)

                    print("Right Hemipshere Vertices")
                    surface_macms_rh, inds_discard_rh = build_macms(dset, surfs, coords_right)
                    print(np.shape(surface_macms_rh))
                    print(len(inds_discard_rh))

                    lh_vertices_total = np.shape(surface_macms_lh)[0]
                    rh_vertices_total = np.shape(surface_macms_rh)[0]
                    time_series = np.transpose(np.vstack((surface_macms_lh, surface_macms_rh)))
                    print(np.shape(time_series))
                    del surface_macms_lh, surface_macms_rh

                    if subcort:
                        subcort_img = nib.load("templates/rois-subcortical_mni152_mask.nii.gz")
                        subcort_vox = np.asarray(np.where(subcort_img.get_fdata()))
                        subcort_mm = vox2mm(subcort_vox.T, subcort_img.affine)

                        print("Subcortical Voxels")
                        subcort_macm, inds_discard_subcort = build_macms(dset, surfs, subcort_mm)

                        num_subcort_vox = np.shape(subcort_macm)[0]
                        print(inds_discard_subcort)

                        time_series = np.hstack((time_series, np.asarray(subcort_macm).T))
                        print(np.shape(time_series))

                time_series = time_series.astype("float32")

                print("calculating correlation matrix")
                correlation = ConnectivityMeasure(kind="correlation")
                time_series = correlation.fit_transform([time_series])[0]
                print(np.shape(time_series))

                if affinity == "cosine":
                    time_series = calculate_affinity(time_series, 10 * sparsity)

            else:
                time_series = np.transpose(k.transform(dset, return_type="array"))

    print("Performing gradient analysis")

    gradients, statistics = embed.compute_diffusion_map(
        time_series, alpha=0.5, return_result=True, overwrite=True
    )
    pickle.dump(statistics, open(op.join(workdir, "statistics.p"), "wb"))

    # if subcortical included in gradient decomposition, remove gradient scores
    if subcort:
        subcort_grads = gradients[np.shape(gradients)[0] - num_subcort_vox :, :]
        subcort_grads = insert(subcort_grads, inds_discard_subcort)
        gradients = gradients[0 : np.shape(gradients)[0] - num_subcort_vox, :]

    # get left hemisphere gradient scores, and insert 0's where medial wall is
    gradients_lh = gradients[0:lh_vertices_total, :]
    if len(inds_discard_lh) > 0:
        gradients_lh = insert(gradients_lh, inds_discard_lh)
    gradients_lh = insert(gradients_lh, medial_wall_inds_left)

    # get right hemisphere gradient scores and insert 0's where medial wall is
    gradients_rh = gradients[-rh_vertices_total:, :]
    if len(inds_discard_rh) > 0:
        gradients_rh = insert(gradients_rh, inds_discard_rh)
    gradients_rh = insert(gradients_rh, medial_wall_inds_right)

    grad_dict = {
        "grads_lh": gradients_lh,
        "grads_rh": gradients_rh,
        "pial_left": pial_left,
        "sulc_left": sulc_left,
        "pial_right": pial_right,
        "sulc_right": sulc_right,
    }
    if subcort:
        grad_dict["subcort_grads"] = subcort_grads
    pickle.dump(grad_dict, open(op.join(workdir, "gradients.p"), "wb"))

    # map the gradient to the parcels
    for i in range(np.shape(gradients)[1]):
        if atlas is not None:
            if atlas == "fsaverage5" or atlas == "hcp":

                plot_surfaces(grad_dict, i, workdir)

                if subcort:
                    tmpimg = masking.unmask(subcort_grads[:, i], subcort_img)
                    nib.save(tmpimg, op.join(workdir, "gradient-{0}.nii.gz".format(i)))
            else:
                tmpimg = np.zeros(atlas_shape)
                for j, n in enumerate(np.unique(atlas_data)[1:]):
                    inds = atlas_data == n
                    tmpimg[inds] = gradients[j, i]
                    nib.save(
                        nib.Nifti1Image(tmpimg, atlas_affine),
                        op.join(workdir, "gradient-{0}.nii.gz".format(i)),
                    )
        else:
            tmpimg = np.zeros(np.prod(dset.masker.mask_img.shape))
            inds = np.ravel_multi_index(
                np.nonzero(dset.masker.mask_img.get_fdata()), dset.masker.mask_img.shape
            )
            tmpimg[inds] = gradients[:, i]
            nib.save(
                nib.Nifti1Image(
                    np.reshape(tmpimg, dset.masker.mask_img.shape), dset.masker.mask_img.affine
                ),
                op.join(workdir, "gradient-{0}.nii.gz".format(i)),
            )

            os.system(
                "python3 /Users/miriedel/Desktop/GitHub/surflay/make_figures.py "
                "-f {grad_image} --colormap jet".format(
                    grad_image=op.join(workdir, "gradient-{0}.nii.gz".format(i))
                )
            )

    output_dir = op.join(
        outdir,
        (
            f"{dataset_name}_{atlas_name}_{kernel_name}_{sparsity_name}_{gradients_name}_"
            f"{affinity_name}_{approach_name}"
        )
    )

    shutil.copytree(workdir, output_dir)

    shutil.rmtree(workdir)
import os
from nilearn import datasets
import nibabel as nb
import numpy as np
import shutil

atlas = datasets.fetch_atlas_craddock_2012(data_dir='.')

atlas_str = 'scorr_mean'

cc_file = atlas[atlas_str]
cc_folder = os.path.split(cc_file)[0]

save_path = os.path.join(cc_folder, atlas_str + '_single_resolution')
if os.path.exists(save_path):
    shutil.rmtree(save_path)
os.mkdir(save_path)

cc_4d_img = nb.load(cc_file)
cc_4d_data = cc_4d_img.get_data()

n_resolution = cc_4d_data.shape[-1]
n_parcels = [np.unique(cc_4d_data[:, :, :, i]).shape[0] - 1 for i in
             range(n_resolution)]  # -1 to account for 0 background

for parc_to_use in range(n_resolution):
    print 'using %s parcels' % n_parcels[parc_to_use]

    # save 3d nii of desired parc.resolution
    cc_3d_filename = os.path.join(save_path,
                                  atlas_str + '_parc_n_%s_k_%s_rois.nii.gz' % (parc_to_use+1, n_parcels[parc_to_use]))
예제 #7
0
def make_correlation_matrix(path_to_fmriprep_data,
                            path_to_save_connectivity_matrices,
                            subject_name=False,
                            path_to_save_ts=False,
                            atlas='aal'):
    """
        Process the fmriprep preprocessed functional MRI time-series into 2D correlation matrix as DataFrame using Nilearn lib. 
        Takes `fmriprep/preproc` file as input, frequently with suffix "MNI152NLin2009cAsym_preproc.nii.gz".
        Saves in dedicated folder `path_to_save_connectivity_matrices`.
        Atlas: 'aal' or 'cc200'
    """
    import os

    import pandas as pd
    import numpy as np

    import nilearn
    from nilearn import datasets
    from nilearn.image import concat_imgs
    from nilearn.input_data import NiftiLabelsMasker
    from nilearn.image import high_variance_confounds
    from nilearn.connectome import ConnectivityMeasure

    tr = tr_extractor(path_to_fmriprep_data)
    if subject_name == False:
        subject_name = path_to_fmriprep_data.split('/')[-1][4:11]

    if atlas == 'aal':
        dataset = datasets.fetch_atlas_aal(version='SPM12',
                                           data_dir='./datadir/',
                                           url=None,
                                           resume=True,
                                           verbose=0)
        atlas_filename = dataset.maps
        labels = dataset.labels
    elif atlas == 'cc200':
        dataset = datasets.fetch_atlas_craddock_2012(data_dir='./datadir/',
                                                     url=None,
                                                     resume=True,
                                                     verbose=0)
        atlas_filename = './datadir/craddock_2012/cc200_roi_atlas.nii.gz'
        labels = list(
            pd.read_csv(
                '../data_preprocessing/datadir/craddock_2012/CC200_ROI_labels.csv'
            )['ROI number'])
    else:
        print('Atlas name is not recognized.')

    correlation_measure = ConnectivityMeasure(kind='correlation')

    img = concat_imgs(path_to_fmriprep_data, auto_resample=True, verbose=0)
    atlas = nilearn.image.resample_to_img(atlas_filename,
                                          img,
                                          interpolation='nearest',
                                          copy=True,
                                          order='F',
                                          clip=False)
    # filtering
    masker = NiftiLabelsMasker(labels_img=atlas,
                               standardize=True,
                               detrend=True,
                               low_pass=0.08,
                               high_pass=0.009,
                               t_r=tr,
                               memory='nilearn_cache',
                               memory_level=1,
                               verbose=0)

    confounds = high_variance_confounds(img, 1)
    time_series = masker.fit_transform(img, confounds)

    # Saves time series, for each ROI confound
    if path_to_save_ts:
        os.makedirs(path_to_save_ts, exist_ok=True)
        np.save(path_to_save_ts + '/' + subject_name, time_series)

    correlation_matrix = correlation_measure.fit_transform([time_series])[0]
    np.fill_diagonal(correlation_matrix, 1)
    df = pd.DataFrame(correlation_matrix)

    # Saves connectivity matrix
    os.makedirs(path_to_save_connectivity_matrices, exist_ok=True)
    output_path = os.path.join(path_to_save_connectivity_matrices,
                               subject_name)
    df.to_csv(output_path + '.csv', sep=',')


#   print ('TR: ', tr, ' subject:', subject_name)
subjects_path = []
for pdata in PATH_TO_DATA:
    subjects_path += glob.glob(pdata)
subjects_path = sorted(subjects_path)

PATH_TO_RESTING_STATE = 'session_1/rest_1/rest_res2standard.nii.gz'
PATH_TO_MOTION_CORRECTION = 'session_1/rest_1/rest_mc.1D'

# path to the atlases
ATLASES = [
    fetch_atlas_msdl().maps,
    fetch_atlas_basc_multiscale_2015().scale064,
    fetch_atlas_basc_multiscale_2015().scale122,
    fetch_atlas_basc_multiscale_2015().scale197,
    fetch_atlas_harvard_oxford(atlas_name='cort-prob-2mm').maps,
    fetch_atlas_craddock_2012().scorr_mean,
    fetch_coords_power_2011()
]
ATLASES_DESCR = [
    'msdl', 'basc064', 'basc122', 'basc197', 'harvard_oxford_cort_prob_2mm',
    'craddock_scorr_mean', 'power_2011'
]

# load the list of patient to exclude
excluded_subjects = pd.read_csv(SUBJECTS_EXCLUDED,
                                dtype={'subject_id':
                                       object})['subject_id'].tolist()

###############################################################################
# Build the list with all path
dataset = {'func': [], 'motion': [], 'subject_id': [], 'run': []}