Exemplo n.º 1
0
    def _process_inputs(self):
        """ validate and  process inputs into useful form.
        Returns a list of nilearn maskers and the list of corresponding label
        names."""
        import nilearn.input_data as nl
        import nilearn.image as nli

        label_data = nli.concat_imgs(self.inputs.label_files)
        maskers = []

        # determine form of label files, choose appropriate nilearn masker
        if np.amax(label_data.dataobj) > 1:  # 3d label file
            n_labels = np.amax(label_data.dataobj)
            maskers.append(nl.NiftiLabelsMasker(label_data))
        else:  # 4d labels
            n_labels = label_data.shape[3]
            if self.inputs.incl_shared_variance:  # independent computation
                for img in nli.iter_img(label_data):
                    maskers.append(
                        nl.NiftiMapsMasker(self._4d(img.dataobj, img.affine))
                    )
            else:  # one computation fitting all
                maskers.append(nl.NiftiMapsMasker(label_data))

        # check label list size
        if not np.isclose(int(n_labels), n_labels):
            raise ValueError(
                "The label files {} contain invalid value {}. Check input.".format(
                    self.inputs.label_files, n_labels
                )
            )

        if len(self.inputs.class_labels) != n_labels:
            raise ValueError(
                "The length of class_labels {} does not "
                "match the number of regions {} found in "
                "label_files {}".format(
                    self.inputs.class_labels, n_labels, self.inputs.label_files
                )
            )

        if self.inputs.include_global:
            global_label_data = label_data.dataobj.sum(axis=3)  # sum across all regions
            global_label_data = (
                np.rint(global_label_data).astype(int).clip(0, 1)
            )  # binarize
            global_label_data = self._4d(global_label_data, label_data.affine)
            global_masker = nl.NiftiLabelsMasker(
                global_label_data, detrend=self.inputs.detrend
            )
            maskers.insert(0, global_masker)
            self.inputs.class_labels.insert(0, "GlobalSignal")

        for masker in maskers:
            masker.set_params(detrend=self.inputs.detrend)

        return maskers
Exemplo n.º 2
0
def partial_seed_correlation(functional_image, atlas_image, mask_image):
    """
    Return seed maps for each of the seed regions in 'atlas_image' while correcting for the average signal in all
    other seed regions.

    :param functional_image: subject level 4D nibale image file
    :param atlas_image:
    :param mask_image:
    :return:
    """
    roi_masker = nid.NiftiMasker(mask_img=mask_image,
                                 verbose=0,
                                 standardize=False)
    atlas_masker = nid.NiftiLabelsMasker(labels_img=atlas_image,
                                         mask_img=mask_image,
                                         standardize=False)
    voxel_masker = nid.NiftiMasker(mask_img=mask_image,
                                   verbose=0,
                                   standardize=True)

    atlas = roi_masker.fit_transform(atlas_image)
    unique_rois = np.unique(atlas[atlas != 0])
    atlas_timeseries = atlas_masker.fit_transform(functional_image)

    seed_maps = list()
    for roi_id in range(atlas_timeseries.shape[1]):
        roi = unique_rois[roi_id]
        conf_id = [
            i for i in range(atlas_timeseries.shape[1]) if not i == roi_id
        ]
        confound_timeseries = atlas_timeseries[:, conf_id]

        seed_atlas_i = roi_masker.inverse_transform(atlas == roi)
        seed_masker_stand = nid.NiftiLabelsMasker(labels_img=seed_atlas_i,
                                                  mask_img=mask_image,
                                                  standardize=True)

        voxel_timeseries = voxel_masker.fit_transform(
            functional_image, confounds=confound_timeseries)
        seed_timeseries = seed_masker_stand.fit_transform(
            functional_image, confounds=confound_timeseries)
        seed_correlation = np.dot(voxel_timeseries.T,
                                  seed_timeseries) / voxel_timeseries.shape[0]
        seed_maps.append(seed_correlation)

    seed_map_array = np.concatenate(seed_maps, -1)
    seed_correlations_fisher_z = np.arctanh(seed_map_array)
    return seed_correlations_fisher_z
Exemplo n.º 3
0
def extract_ts_parc(net_parcels_map_nifti, conf, func_file, coords, mask,
                    dir_path, ID, network):
    from nilearn import input_data
    from pynets.graphestimation import extract_ts_parc_fast
    from pynets import utils
    ##extract time series from whole brain parcellaions:
    fast = False
    #import time
    #start_time = time.time()
    if fast == True:
        ts_within_nodes = extract_ts_parc_fast(net_parcels_map_nifti, conf,
                                               func_file, dir_path)
    else:
        parcel_masker = input_data.NiftiLabelsMasker(
            labels_img=net_parcels_map_nifti,
            background_label=0,
            standardize=True)
        ts_within_nodes = parcel_masker.fit_transform(func_file,
                                                      confounds=conf)
    print("%s%s%d%s" %
          ('\nTime series has {0} samples'.format(ts_within_nodes.shape[0]),
           ' and ', len(coords), ' volumetric ROI\'s\n'))
    ##Save time series as txt file
    utils.save_ts_to_file(mask, network, ID, dir_path, ts_within_nodes)
    return (ts_within_nodes)
Exemplo n.º 4
0
def extract_mat(rsimg,
                maskimg,
                labelimg,
                regnames=None,
                conntype='correlation',
                space='labels'):

    masker = input_data.NiftiLabelsMasker(labelimg,
                                          background_label=0,
                                          smoothing_fwhm=None,
                                          standardize=False,
                                          detrend=False,
                                          mask_img=maskimg,
                                          resampling_target=space,
                                          verbose=0)

    # Extract time series
    time_series = masker.fit_transform(rsimg)

    connobj = connectome.ConnectivityMeasure(kind=conntype)
    connmat = connobj.fit_transform([time_series])[0]

    if regnames is not None:
        reglabs = open(regnames).read().splitlines()
    else:
        # get the unique labels list, other than 0, which will be first
        reglabs = list(
            np.unique(labelimg.get_data())[1:].astype(np.int).astype(np.str))

    conndf = get_con_df(connmat, reglabs)

    return conndf, connmat
Exemplo n.º 5
0
def participant_level(args, subjects_to_analyze):
    # The subject level analysis: extract time-series per subject
    # Retrieve the atlas
    atlas_filename = os.path.join(os.path.dirname(__file__),
                                  ATLAS_DIR, ATLAS_FILENAME)

    # find all RS scans and extract time-series on them
    for subject_label in subjects_to_analyze:
        for fmri_file in glob(os.path.join(args.bids_dir,
                                           "derivatives",
                                           "sub-%s" % subject_label,
                                           "func", "*_hmc_mni.nii.gz")
                          ):
            masker = input_data.NiftiLabelsMasker(
                            labels_img=atlas_filename,
                            standardize=True,
                            detrend=True,
                            verbose=3)
            time_series = masker.fit_transform(fmri_file)
            out_file = os.path.split(fmri_file)[-1].replace("_hmc_mni.nii.gz",
                            "_time_series.tsv")
            out_file = os.path.join(args.output_dir, out_file)
            sys.stderr.write("Saving time-series to %s\n" % out_file)
            np.savetxt(out_file, time_series, delimiter='\t')

            estimator = covariance.LedoitWolf(store_precision=True)
            estimator.fit(time_series)
            out_file = os.path.split(fmri_file)[-1].replace("_hmc_mni.nii.gz",
                            "_connectome.tsv")
            out_file = os.path.join(args.output_dir, out_file)
            print("Saving connectome matrix to %s" % out_file)
            np.savetxt(out_file, estimator.precision_, delimiter='\t')
Exemplo n.º 6
0
def extract_mat(rsimg, maskimg, labelimg, conntype='correlation', space='labels', savets=False, nomat=False):

    masker = input_data.NiftiLabelsMasker(labelimg,
                                          background_label=0,
                                          smoothing_fwhm=None,
                                          standardize=False, detrend=False,
                                          mask_img=maskimg,
                                          resampling_target=space,
                                          verbose=0)

    # get the unique labels list, other than 0, which will be first
    reginparc = np.unique(labelimg.get_data())[1:].astype(np.int)
    reglabs = list(reginparc.astype(np.str))

    # Extract time series
    time_series = masker.fit_transform(rsimg)

    if nomat:
        connmat = None
        conndf = None
    else:
        connobj = connectome.ConnectivityMeasure(kind=conntype)
        connmat = connobj.fit_transform([time_series])[0]
        conndf = get_con_df(connmat, reglabs)


    # if not saving time series, don't pass anything substantial, save mem
    if not savets:
        time_series = 42

    return conndf, connmat, time_series, reginparc
Exemplo n.º 7
0
def extract_ts_parc(net_parcels_map_nifti, conf, func_file, coords, mask,
                    dir_path, ID, network):
    import os
    from nilearn import input_data
    ##extract time series from whole brain parcellaions:
    parcel_masker = input_data.NiftiLabelsMasker(
        labels_img=net_parcels_map_nifti, background_label=0, standardize=True)
    ts_within_nodes = parcel_masker.fit_transform(func_file, confounds=conf)
    print('\nTime series has {0} samples'.format(ts_within_nodes.shape[0]) +
          ' and ' + str(len(coords)) + ' volumetric ROI\'s\n')
    ##Save time series as txt file
    if mask is None:
        if network is not None:
            out_path_ts = dir_path + '/' + ID + '_' + network + '_rsn_net_ts.txt'
        else:
            out_path_ts = dir_path + '/' + ID + '_wb_net_ts.txt'
    else:
        if network is not None:
            out_path_ts = dir_path + '/' + ID + '_' + str(
                os.path.basename(mask).split('.')
                [0]) + '_' + network + '_rsn_net_ts.txt'
        else:
            out_path_ts = dir_path + '/' + ID + '_' + str(
                os.path.basename(mask).split('.')[0]) + '_wb_net_ts.txt'
    np.savetxt(out_path_ts, ts_within_nodes)
    return (ts_within_nodes)
Exemplo n.º 8
0
def participant_level(args, subjects_to_analyze):
    # The subject level analysis: extract time-series per subject
    # Retrieve the atlas
    atlas_data = datasets.fetch_atlas_basc_multiscale_2015()
    atlas_filename = atlas_data.scale122

    # find all RS scans and extract time-series on them
    for subject_label in subjects_to_analyze:
        for fmri_file in glob(
                os.path.join(args.bids_dir, "derivatives",
                             "sub-%s" % subject_label, "func",
                             "*_hmc_mni.nii.gz")):
            masker = input_data.NiftiLabelsMasker(labels_img=atlas_filename,
                                                  standardize=True,
                                                  detrend=True,
                                                  verbose=3)
            time_series = masker.fit_transform(fmri_file)
            out_file = os.path.split(fmri_file)[-1].replace(
                "_hmc_mni.nii.gz", "_time_series.tsv")
            out_file = os.path.join(args.output_dir, out_file)
            np.savetxt(out_file, time_series, delimiter='\t')

            estimator = covariance.LedoitWolf(store_precision=True)
            estimator.fit(time_series)
            out_file = os.path.split(fmri_file)[-1].replace(
                "_hmc_mni.nii.gz", "_connectome.tsv")
            out_file = os.path.join(args.output_dir, out_file)
            np.savetxt(out_file, estimator.precision_, delimiter='\t')
Exemplo n.º 9
0
    def extract_ts_parc(self):
        """
        API for employing Nilearn's NiftiLabelsMasker to extract fMRI
        time-series data from spherical ROI's based on a given 3D atlas image
        of integer-based voxel intensities. The resulting time-series can then
        optionally be resampled using circular-block bootrapping. The final 2D
        m x n array is ultimately saved to file in .npy format.
        """
        import nibabel as nib
        from nilearn import input_data
        from pynets.fmri.estimation import fill_confound_nans

        self._net_parcels_map_nifti = nib.load(self.net_parcels_nii_path,
                                               mmap=True)
        self._net_parcels_map_nifti.set_data_dtype(np.int16)
        self._parcel_masker = input_data.NiftiLabelsMasker(
            labels_img=self._net_parcels_map_nifti,
            background_label=0,
            standardize=True,
            smoothing_fwhm=float(self.smooth),
            low_pass=self.low_pass,
            high_pass=self.hpass,
            detrend=self._detrending,
            t_r=self._t_r,
            verbose=2,
            resampling_target="labels",
            dtype="auto",
            mask_img=self._mask_img,
            strategy=self.extract_strategy)

        if self.conf is not None:
            import pandas as pd
            import os

            confounds = pd.read_csv(self.conf, sep="\t")
            if confounds.isnull().values.any():
                conf_corr = fill_confound_nans(confounds, self.dir_path)
                self.ts_within_nodes = self._parcel_masker.fit_transform(
                    self._func_img, confounds=conf_corr)
                os.remove(conf_corr)
            else:
                self.ts_within_nodes = self._parcel_masker.fit_transform(
                    self._func_img, confounds=self.conf)
        else:
            self.ts_within_nodes = self._parcel_masker.fit_transform(
                self._func_img)

        self._func_img.uncache()

        if self.ts_within_nodes is None:
            try:
                raise RuntimeError("\nTime-series extraction failed!")
            except RuntimeError:
                import sys
                sys.exit(1)
        else:
            self.node_size = "parc"

        return
Exemplo n.º 10
0
def extract_ts_parc(net_parcels_map_nifti, conf, func_file, coords, roi,
                    dir_path, ID, network, smooth, atlas_select, uatlas_select,
                    label_names, c_boot, block_size, mask):
    import os.path
    from nilearn import input_data
    # from pynets.fmri.estimation import extract_ts_parc_fast
    from pynets import utils
    #from sklearn.externals.joblib import Memory

    if not os.path.isfile(func_file):
        raise ValueError(
            '\nERROR: Functional data input not found! Check that the file(s) specified with the -i flag '
            'exist(s)')

    if conf:
        if not os.path.isfile(conf):
            raise ValueError(
                '\nERROR: Confound regressor file not found! Check that the file(s) specified with the '
                '-conf flag exist(s)')

    # if fast is True:
    #     ts_within_nodes = extract_ts_parc_fast(net_parcels_map_nifti, conf, func_file, dir_path)
    # else:
    detrending = True
    # parcel_masker = input_data.NiftiLabelsMasker(labels_img=net_parcels_map_nifti, background_label=0,
    #                                              standardize=True, smoothing_fwhm=float(smooth),
    #                                              detrend=detrending,
    #                                              memory=Memory(cachedir="%s%s%s" % (dir_path,
    #                                                                                 '/SpheresMasker_cache_',
    #                                                                                 str(ID)), verbose=2),
    #                                              memory_level=1)
    parcel_masker = input_data.NiftiLabelsMasker(
        labels_img=net_parcels_map_nifti,
        background_label=0,
        standardize=True,
        smoothing_fwhm=float(smooth),
        detrend=detrending,
        verbose=2,
        mask_img=mask)
    # parcel_masker = input_data.NiftiLabelsMasker(labels_img=net_parcels_map_nifti, background_label=0,
    #                                              standardize=True)
    ts_within_nodes = parcel_masker.fit_transform(func_file, confounds=conf)
    if ts_within_nodes is None:
        raise RuntimeError('\nERROR: Time-series extraction failed!')
    if float(c_boot) > 0:
        print("%s%s%s" %
              ('Performing circular block bootstrapping iteration: ', c_boot,
               '...'))
        ts_within_nodes = utils.timeseries_bootstrap(ts_within_nodes,
                                                     block_size)[0]
    print("%s%s%d%s" %
          ('\nTime series has {0} samples'.format(ts_within_nodes.shape[0]),
           ' mean extracted from ', len(coords), ' volumetric ROI\'s'))
    print("%s%s%s" % ('Smoothing FWHM: ', smooth, ' mm\n'))
    # Save time series as txt file
    utils.save_ts_to_file(roi, network, ID, dir_path, ts_within_nodes, c_boot)
    node_size = None
    return ts_within_nodes, node_size, smooth, dir_path, atlas_select, uatlas_select, label_names, coords, c_boot
Exemplo n.º 11
0
    def extract_label_rois(self, nifti_image):
        """Extract the 2D ROIs from 4D BOLD image using atlas labels get from nilearn"""

        atlas = datasets.fetch_atlas_harvard_oxford('cort-maxprob-thr25-2mm')
        # get filename containing atlas info
        atlas_filename = atlas.maps
        # Create a masker object to extract signal on the parcellation
        masker = input_data.NiftiLabelsMasker(labels_img=atlas_filename)
        # Turned the Nifti data to time-series where each column is a ROI
        rois_ts = masker.fit_transform(nifti_image)

        return atlas.labels, rois_ts
Exemplo n.º 12
0
def get_atlas_data(map):
    """
    This function takes a map from the HarvardOxford atlas and turns it into a mask we can apply to the NiFTi volumes
    :param map: the map in the HarvardOxford atlas to use
    :return: (NiftiLabelsMasker) a masker object
    """

    atlas = datasets.fetch_atlas_harvard_oxford(map)  # Retrieve the map from nilearn's datasets module
    map = atlas.maps  # Retrieve the maps from the atlas object

    masker = input_data.NiftiLabelsMasker(labels_img=map, standardize=True,
                                          memory='nilearn_cache')  # Turn it into a mask that we can apply

    return masker  # Return it
def project_contrast(img_files, parcellation, mask_file, resample=True):
    if type(parcellation) == str:
        parcellation = image.load_img(parcellation)
    resampled_images = image.resample_img(img_files, parcellation.affine)
    if len(parcellation.shape) == 3:
        masker = input_data.NiftiLabelsMasker(labels_img=parcellation,
                                              resampling_target="labels",
                                              standardize=False,
                                              memory='nilearn_cache',
                                              memory_level=1)
    elif len(parcellation.shape) == 4:
        masker = input_data.NiftiMapsMasker(maps_img=parcellation,
                                            mask_img=mask_file,
                                            resampling_target="maps",
                                            standardize=False,
                                            memory='nilearn_cache',
                                            memory_level=1)
    time_series = masker.fit_transform(resampled_images)
    return time_series, masker
Exemplo n.º 14
0
def seed_correlation(functional_image, atlas_image, mask_image):
    """

    :param functional_image: 4D nibabel image with time in the 4th dimension
    :param atlas_image: 3D nibabel image with integers for ROIs and 0 as background
    :param mask_image: 3D nibabel image with 0 as background and 1 as brain.
    :return: fisher z transformed correlations. shape = (n_voxels, n_regions)
    """
    atlas_masker = nid.NiftiLabelsMasker(labels_img=atlas_image,
                                         mask_img=mask_image,
                                         standardize=True)
    brain_masker = nid.NiftiMasker(mask_image, verbose=0, standardize=True)
    seed_time_series = atlas_masker.fit_transform(functional_image)
    brain_time_series = brain_masker.fit_transform(functional_image)

    seed_correlations = np.dot(brain_time_series.T,
                               seed_time_series) / seed_time_series.shape[0]
    seed_correlations_fisher_z = np.arctanh(seed_correlations)

    # seed_based_correlation_img = brain_masker.inverse_transform(seed_correlations_fisher_z.T)
    return seed_correlations_fisher_z
Exemplo n.º 15
0
def extract_mat(rs_file, brainmask_file, roi_file, confounds_file, conf, roi_type, tr, spikereg_threshold=None):
    """
    36 P
    """

    # Masker
    masker_pars = {"mask_img": brainmask_file, "detrend": True, "standardize": True, "low_pass": 0.1, "high_pass": \
        0.01, "t_r": tr}

    if roi_type == "maps":
        # for msdl type probablistic rois
        masker = input_data.NiftiMapsMasker(roi_file, **masker_pars)
    elif roi_type == "labels":
        # for binary rois
        masker = input_data.NiftiLabelsMasker(roi_file, **masker_pars)
    else:
        raise Exception("roi type not known {}".format(roi_type))

    # Extract time series
    confounds, outlier_stats = get_confounds(confounds_file, kind=conf, spikereg_threshold=spikereg_threshold)
    time_series = masker.fit_transform(rs_file, confounds=confounds.values)

    con_measure = connectome.ConnectivityMeasure(kind='correlation')
    conmat = con_measure.fit_transform([time_series])[0]

    report_str = "rs_file\t{}\n".format(rs_file)
    report_str += "roi_file\t{}\n".format(roi_file)

    keys = list(masker_pars.keys())
    keys.sort()
    for k in keys:
        report_str += "{}\t{}\n".format(k, masker_pars[k])

    report_str += "spike regression \t {}".format(spikereg_threshold)
    report_str += "\n\n"
    report_str += "confounds\t{}".format(", ".join(confounds.columns))
    report_str += "\n\n"
    report_str += confounds.to_string()

    return conmat, report_str, outlier_stats
Exemplo n.º 16
0
def confound_corrected_seed_correlation(functional_image, atlas_image,
                                        mask_image, confound_image):
    """

    :param functional_image: 4D nibabel image with time in the 4th dimension
    :param atlas_image: 3D nibabel image with 0 as background and integer values denoting ROIs
    :param mask_image: 3D mask image
    :param confound_image: A nibabel image with a single ROI for which we will compute the ROI timeseries and regress
                           it from the voxel timeseries in the functional image before generating the seed maps.
                           Can only contain
    :return:
    """
    confound_masker = nid.NiftiLabelsMasker(labels_img=confound_image,
                                            mask_img=mask_image,
                                            standardize=False)
    voxel_masker = nid.NiftiMasker(mask_image, verbose=0, standardize=False)

    confound_ts = confound_masker.fit_transform(functional_image)
    resid_ts = voxel_masker.fit_transform(functional_image,
                                          confounds=confound_ts)
    resid_image = voxel_masker.inverse_transform(resid_ts)
    return seed_correlation(resid_image, atlas_image, mask_image)
Exemplo n.º 17
0
def extract_ts_parc(net_parcels_map_nifti, conf, func_file, coords, mask,
                    dir_path, ID, network, smooth, atlas_select, uatlas_select,
                    label_names):
    from nilearn import input_data
    # from pynets.graphestimation import extract_ts_parc_fast
    from pynets import utils
    #from sklearn.externals.joblib import Memory

    # if fast is True:
    #     ts_within_nodes = extract_ts_parc_fast(net_parcels_map_nifti, conf, func_file, dir_path)
    # else:
    detrending = True
    # parcel_masker = input_data.NiftiLabelsMasker(labels_img=net_parcels_map_nifti, background_label=0,
    #                                              standardize=True, smoothing_fwhm=float(smooth),
    #                                              detrend=detrending,
    #                                              memory=Memory(cachedir="%s%s%s" % (dir_path,
    #                                                                                 '/SpheresMasker_cache_',
    #                                                                                 str(ID)), verbose=2),
    #                                              memory_level=1)
    parcel_masker = input_data.NiftiLabelsMasker(
        labels_img=net_parcels_map_nifti,
        background_label=0,
        standardize=True,
        smoothing_fwhm=float(smooth),
        detrend=detrending,
        verbose=2)
    # parcel_masker = input_data.NiftiLabelsMasker(labels_img=net_parcels_map_nifti, background_label=0,
    #                                              standardize=True)
    ts_within_nodes = parcel_masker.fit_transform(func_file, confounds=conf)
    print("%s%s%d%s" %
          ('\nTime series has {0} samples'.format(ts_within_nodes.shape[0]),
           ' mean extracted from ', len(coords), ' volumetric ROI\'s'))
    print("%s%s%s" % ('Smoothing FWHM: ', smooth, ' mm\n'))
    # Save time series as txt file
    utils.save_ts_to_file(mask, network, ID, dir_path, ts_within_nodes)

    node_size = None
    return ts_within_nodes, node_size, smooth, dir_path, atlas_select, uatlas_select, label_names, coords
Exemplo n.º 18
0
def test_empty_report():
    # Data for NiftiMasker
    data = np.zeros((9, 9, 9))
    data[3:-3, 3:-3, 3:-3] = 10
    data_img_3d = Nifti1Image(data, np.eye(4))
    # Data for NiftiLabelsMasker
    shape = (13, 11, 12)
    affine = np.diag([2, 2, 2, 1])
    n_regions = 9
    labels_img = data_gen.generate_labeled_regions(shape,
                                                   affine=affine,
                                                   n_regions=n_regions)
    # turn off reporting
    maskers = [input_data.NiftiMasker(reports=False),
               input_data.NiftiLabelsMasker(labels_img, reports=False)]
    for masker in maskers:
        masker.fit(data_img_3d)
        assert masker._reporting_data is None
        assert masker._reporting() == [None]
        with pytest.warns(UserWarning,
                          match=("Report generation not enabled ! "
                                 "No visual outputs will be created.")):
            masker.generate_report()
Exemplo n.º 19
0
    def extract_ts_parc(self):
        """
        API for employing Nilearn's NiftiLabelsMasker to extract fMRI
        time-series data from spherical ROI's based on a given 3D atlas image
        of integer-based voxel intensities. The resulting time-series can then
        optionally be resampled using circular-block bootrapping. The final 2D
        m x n array is ultimately saved to file in .npy format.
        """
        import pandas as pd
        from nilearn import input_data
        from pynets.fmri.estimation import fill_confound_nans

        self._parcel_masker = input_data.NiftiLabelsMasker(
            labels_img=self._net_parcels_map_nifti,
            background_label=0,
            standardize=True,
            smoothing_fwhm=float(self.smooth),
            low_pass=self.low_pass,
            high_pass=self.hpass,
            detrend=self._detrending,
            t_r=self._t_r,
            verbose=2,
            resampling_target="labels",
            dtype="auto",
            mask_img=self._mask_img,
            strategy=self.signal)

        if self.conf is not None:
            import os

            confounds = pd.read_csv(self.conf, sep="\t")

            cols = [
                i for i in confounds.columns
                if 'motion_outlier' in i or i == 'framewise_displacement'
                or i == 'white_matter' or i == 'csf' or i == 'std_dvars'
                or i == 'rot_z' or i == 'rot_y' or i == 'rot_x'
                or i == 'trans_z' or i == 'trans_y' or i == 'trans_x'
                or 'non_steady_state_outlier' in i
            ]

            if len(confounds.index) == self._func_img.shape[-1]:
                if confounds.isnull().values.any():
                    conf_corr = fill_confound_nans(confounds, self.dir_path)
                    conf_corr_df = pd.read_csv(conf_corr, sep="\t")
                    cols = [i for i in cols if i in conf_corr_df.columns]
                    self.ts_within_nodes = self._parcel_masker.fit_transform(
                        self._func_img.slicer[:, :, :, 5:],
                        confounds=conf_corr_df.loc[5:][cols].values)
                    os.remove(conf_corr)
                else:
                    self.ts_within_nodes = self._parcel_masker.fit_transform(
                        self._func_img.slicer[:, :, :, 5:],
                        confounds=pd.read_csv(self.conf,
                                              sep="\t").loc[5:][cols].values)
            else:
                from nilearn.image import high_variance_confounds
                print(f"Shape of confounds ({len(confounds.index)}) does not"
                      f" equal the number of volumes "
                      f"({self._func_img.shape[-1]}) in the time-series")
                self.ts_within_nodes = self._parcel_masker.fit_transform(
                    self._func_img.slicer[:, :, :, 5:],
                    confounds=pd.DataFrame(
                        high_variance_confounds(self._func_img,
                                                percentile=1)).loc[5:].values)
        else:
            from nilearn.image import high_variance_confounds
            self.ts_within_nodes = self._parcel_masker.fit_transform(
                self._func_img.slicer[:, :, :, 5:],
                confounds=pd.DataFrame(
                    high_variance_confounds(self._func_img,
                                            percentile=1)).loc[5:].values)

        self._func_img.uncache()

        if self.ts_within_nodes is None:
            raise RuntimeError("\nTime-series extraction failed!")

        else:
            self.node_radius = "parc"

        return
Exemplo n.º 20
0
# Use atlas for likelihood-based estimators
atlas = datasets.fetch_atlas_harvard_oxford("cort-maxprob-thr25-2mm")

# nilearn's NiftiLabelsMasker cannot handle NaNs at the moment,
# and some of the NIDM-Results packs' beta images have NaNs at the edge of the
# brain.
# So, we will create a reduced version of the atlas for this analysis.
nan_mask = image.math_img("~np.any(np.isnan(img), axis=3)",
                          img=img_dset.images["beta"].tolist())
nanmasked_atlas = image.math_img(
    "mask * atlas",
    mask=nan_mask,
    atlas=atlas["maps"],
)
masker = input_data.NiftiLabelsMasker(nanmasked_atlas)

meta = nimare.meta.ibma.VarianceBasedLikelihood(method="reml", mask=masker)
vbl_results = meta.fit(img_dset)

meta = nimare.meta.ibma.SampleSizeBasedLikelihood(method="reml", mask=masker)
ssbl_results = meta.fit(img_dset)

# Plot statistical maps from IBMAs
results = [
    dsl_results,
    stouffers_results,
    weighted_stouffers_results,
    fishers_results,
    ols_results,
    wls_results,
    'Ventral Attention IPL L', 'Ventral Attention TPJ L',
    'Limbic Amyg TempPole OFC', 'Frontoparietal ITG R',
    'Frontoparietal Parietal R', 'Frontoparietal MFG R',
    'Frontoparietal AIns R', 'Frontoparietal Precuneus',
    'Frontoparietal A/MCC', 'Frontoparietal PCC', 'Frontoparietal SFG L',
    'Frontoparietal MFG L', 'Frontoparietal ITG L',
    'Frontoparietal Parietal L', 'Frontoparietal ITG R',
    'Default Mode Temporal R', 'Default Mode Angular R', 'Default Mode IFG R',
    'Default Mode SFG', 'Default Mode MedTemp R', 'Default Mode Precuneus',
    'Default Mode Med Temp L', 'Default Mode ITG Angular L'
]

laird_2011_icns = '/home/data/nbc/anxiety-physics/17-networks-combo-ccn-5.14.nii.gz'
yeo_7_networks = '/home/kbott006/nilearn_data/yeo_2011/Yeo_JNeurophysiol11_MNI152/Yeo2011_7Networks_MNI152_FreeSurferConformed1mm_LiberalMask.nii.gz'
yeo_7_regions = '/home/kbott006/nilearn_data/yeo_2011/Yeo_JNeurophysiol11_MNI152/Yeo2011_7Network_Regions_MNI152_FreeSurferConformed1mm_LiberalMask.nii.gz'
network_masker = input_data.NiftiLabelsMasker(laird_2011_icns,
                                              standardize=True)
yeo_ntwk_masker = input_data.NiftiLabelsMasker(yeo_7_networks,
                                               standardize=True)
yeo_regn_masker = input_data.NiftiLabelsMasker(yeo_7_regions, standardize=True)

connectivity_metric = 'correlation'
conn_meas = ConnectivityMeasure(kind=connectivity_metric)

#determine the range of thresholds across which graph theoretic measures will be integrated
#threshold of 0.1 means the top 10% of connections will be retained
thresh_range = np.arange(0.1, 1, 0.1)

colnames = []
for session in sessions:
    for ntwk1 in yeo_7_ntwk_labels:
        for ntwk2 in yeo_7_ntwk_labels:
Exemplo n.º 22
0
def task_connectivity(layout,
                      subject,
                      session,
                      task,
                      atlas,
                      confounds,
                      connectivity_metric='correlation',
                      out_dir=None):
    """
    Makes connectivity matrices per subject per session per task per condition.
    Parameters
    ----------
    dset_dir : str
        BIDS-formatted dataset path (top-level, in which a 'derivatives/' directory will be made if one does not exist)
    subject : str
        Subject ID for which the networks will be calculated.
    session : str, optional
        Session of data collection. If there's only one session, we'll find it.
    task : str
        Name of task fMRI scan from which networks will be calculated.
    connectivity_metric : {"correlation", "partial correlation", "tangent",\
                           "covariance", "precision"}, optional
        The matrix kind. Passed to Nilearn's `ConnectivityMeasure`.
    space : str
        'native' if analyses will be performed in subjects' functional native space (atlas(es) should be transformed)
        'mni152-2mm' if analyses will be performed in MNI125 2mm isotropic space (fMRI data should already be transformed)
    atlas : str
        If you want to grab an atlas using Nilearn, this is the name of the atlas and 
        must match the corresponding function `fetch_atlas_[name]` in `nilearn.datasets`. 
        If you have your own atlas, this is the path to that nifti file.`
    confounds : list-like
        Filenames of confounds files.
    Returns
    -------
    confounds_file : str
        Filename of merged confounds .tsv file
    """
    #version = '0.1.1'
    try:
        version = idconn.__version__
    except:
        version = 'test'
    if '.nii' in atlas:
        assert exists(atlas), f'Mask file does not exist at {atlas}'

    if not out_dir:
        deriv_dir = join(layout.root, 'derivatives', f'idconn-{version}')
    else:
        deriv_dir = out_dir
    space = 'MNI152NLin2009cAsym'
    atlas_name = basename(atlas).rsplit('.', 2)[0]
    # use pybids here to grab # of runs and preproc bold filenames
    connectivity_measure = connectome.ConnectivityMeasure(
        kind=connectivity_metric)
    bold_files = layout.get(
        scope='derivatives',
        return_type='file',
        suffix='bold',
        task=task,
        space=space,
        subject=subject,
        session=session,
        extension='nii.gz'
    )  # should be preprocessed BOLD file from fmriprep, grabbed with pybids
    print(f'BOLD files found at {bold_files}')

    runs = []
    if len(bold_files) > 1:
        for i in range(0, len(bold_files)):
            assert exists(
                bold_files[i]
            ), "Preprocessed bold file(s) does not exist at {0}".format(
                bold_files)
            runs.append(layout.parse_file_entities(bold_files[i])['run'])
    else:
        runs = None
    print(f'Found runs: {runs}')

    out = join(deriv_dir, f'sub-{subject}', f'ses-{session}', 'func')
    if not exists(out):
        makedirs(out)

    event_files = layout.get(return_type='filename',
                             suffix='events',
                             task=task,
                             subject=subject)
    timing = pd.read_csv(event_files[0], header=0, index_col=0, sep='\t')
    conditions = timing['trial_type'].unique()

    run_cond = {}
    corrmats = {}
    for run in runs:
        bold_file = layout.get(scope='derivatives',
                               return_type='file',
                               suffix='bold',
                               task=task,
                               space='MNI152NLin2009cAsym',
                               subject=subject,
                               session=session,
                               extension='nii.gz',
                               run=run)
        assert len(
            bold_file
        ) == 1, f'BOLD file improperly specified, more than one .nii.gz file with {subject}, {session}, {task}, {run}: {bold_file}'
        tr = layout.get_tr(bold_file)

        #load timing file
        #update to use pyBIDS + layout
        event_file = layout.get(return_type='filename',
                                suffix='events',
                                task=task,
                                subject=subject,
                                run=run,
                                session=session)
        print('# of event files =', len(event_file), '\nfilename = ',
              event_file[0])
        the_file = str(event_file[0])
        assert exists(the_file), 'file really does not exist'
        timing = pd.read_csv(the_file, header=0, index_col=0, sep='\t')
        timing.sort_values('onset')

        confounds_file = layout.get(scope='derivatives',
                                    return_type='file',
                                    desc='confounds',
                                    subject=subject,
                                    session=session,
                                    task=task,
                                    run=run,
                                    extension='tsv')
        print(f'Confounds file located at: {confounds_file}')
        confounds_df = pd.read_csv(confounds_file[0], header=0, sep='\t')
        confounds_df = confounds_df[confounds].fillna(0)
        confounds_fname = join(
            deriv_dir, f'sub-{subject}', f'ses-{session}', 'func',
            f'sub-{subject}_ses-{session}_task-{task}_run-{run}_desc-confounds_timeseries.tsv'
        )
        confounds_df.to_csv(confounds_fname, sep='\t')

        masker = input_data.NiftiLabelsMasker(atlas,
                                              standardize=True,
                                              t_r=tr,
                                              verbose=2)
        ex_bold = image.index_img(bold_file[0], 2)
        display = plotting.plot_epi(ex_bold)
        display.add_contours(atlas)
        display.savefig(
            join(
                deriv_dir, f'sub-{subject}', f'ses-{session}', 'func',
                f'sub-{subject}_ses-{session}_task-{task}_run-{run}_desc-{atlas_name}_overlay.png'
            ))

        print(f'BOLD file located at {bold_file}\nTR = {tr}s')

        masker = input_data.NiftiLabelsMasker(atlas,
                                              standardize=True,
                                              t_r=tr,
                                              verbose=1)
        timeseries = masker.fit_transform(bold_file[0],
                                          confounds=confounds_fname)
        #load timing file
        #update to use pyBIDS + layout
        try:
            #and now we slice into conditions
            for condition in conditions:
                run_cond[condition] = {}
                corrmats[condition] = {}
                blocks = []
                cond_timing = timing[timing['trial_type'] == condition]
                for i in cond_timing.index:
                    blocks.append(
                        (cond_timing.loc[i]['onset'] / tr,
                         ((cond_timing.loc[i]['onset'] +
                           cond_timing.loc[i]['duration']) / tr) + 1))
                if len(blocks) > 1:
                    run_cond[condition][run] = np.vstack(
                        (timeseries[int(blocks[0][0]):int(blocks[0][1]), :],
                         timeseries[int(blocks[1][0]):int(blocks[1][1]), :]))
                if len(blocks) > 2:
                    for i in np.arange(2, len(blocks)):
                        run_cond[condition][run] = np.vstack((
                            timeseries[int(blocks[0][0]):int(blocks[0][1]), :],
                            timeseries[int(blocks[1][0]):int(blocks[1][1]), :]
                        ))
                    #print('extracted signals for {0}, {1}, {2}'.format(task, run, condition), run_cond['{0}-{1}'.format(run, condition)].shape)
                else:
                    pass
                print(f'Making correlation matrix for {run}, {condition}.')
                corrmats[condition][run] = connectivity_measure.fit_transform(
                    [run_cond[condition][run]])[0]
                print('And that correlation matrix is',
                      corrmats[condition][run].shape)
        except Exception as e:
            print('trying to slice and dice, but', e)
    #and paste together the timeseries from each run together per condition
    files = []
    avg_corrmats = {}
    print('Corrmats per run per condition have been made!')
    for condition in conditions:
        print(f'Merging corrmats for {task}-{condition}...')
        data = list(corrmats[condition].values())
        stacked_corrmats = np.array(data)
        print('Stacked corrmats have dimensions', stacked_corrmats.shape)
        avg_corrmat = np.mean(stacked_corrmats, axis=0)
        corrmat_df = pd.DataFrame(index=np.arange(1, avg_corrmat.shape[0] + 1),
                                  columns=np.arange(1,
                                                    avg_corrmat.shape[0] + 1),
                                  data=avg_corrmat)
        avg_corrmats[condition] = corrmat_df
        corrmat_file = join(
            deriv_dir, f'sub-{subject}', f'ses-{session}', 'func',
            f'sub-{subject}_ses-{session}_task-{task}_condition-{condition}_desc-{atlas_name}_corrmat.tsv'
        )
        try:
            corrmat_df.to_csv(corrmat_file, sep='\t')
            files.append(corrmat_file)
        except Exception as e:
            print('saving corrmat...', e)
    return files, avg_corrmats
Exemplo n.º 23
0
    "633", "634"
]

subjects = ['102']

data_dir = '/home/data/nbc/physics-learning/data/pre-processed'
pre_dir = '/home/data/nbc/anxiety-physics/pre'
post_dir = '/home/data/nbc/anxiety-physics/post'
sink_dir = '/home/data/nbc/anxiety-physics/output'
#data_dir = '/Users/Katie/Dropbox/Data'
#work_dir = '/Users/Katie/Dropbox/Data/salience-anxiety-graph-theory'
directories = [pre_dir, post_dir]
sessions = ['pre', 'post']

laird_2011_icns = '/home/data/nbc/anxiety-physics/17-networks-combo-ccn-5.14.nii.gz'
network_masker = input_data.NiftiLabelsMasker(laird_2011_icns,
                                              standardize=True)

connectivity_metric = 'correlation'

#determine the range of thresholds across which graph theoretic measures will be integrated
#threshold of 0.1 means the top 10% of connections will be retained
thresh_range = np.arange(0.1, 1, 0.1)

for i in np.arange(0, (len(sessions))):
    print sessions[i]
    for s in subjects:
        if not exists(join(sink_dir, sessions[i], s)):
            makedirs(join(sink_dir, sessions[i], s))
        fmri_file = join(directories[i],
                         '{0}_filtered_func_data_mni.nii.gz'.format(s))
        print fmri_file
def run_freesurfer_mask_connectivity(
    pop_name,
    population,
    freesurfer_dir,
    workspace_dir,
    mrs_datadir,
):

    df = pd.DataFrame(index=[population], columns=columnx)

    for subject in population:

        print '####################### Subject %s' % subject

        subject_dir = os.path.join(workspace_dir, 'GluConnectivity', subject)
        outdir = os.path.join(subject_dir, 'RSFC_CONNECTIVITY')
        mkdir_path(outdir)

        func_pproc = os.path.join(
            subject_dir,
            'functional_native_brain_preproc_FWHM_AROMA_residual_bp/bandpassed_demeaned_filtered.nii.gz'
        )  # 2.3 mm
        func_mean = os.path.join(
            subject_dir,
            'functional_native_brain_preproc_mean/REST_calc_resample_corrected_volreg_maths_tstat.nii'
        )
        func_aroma = os.path.join(
            subject_dir,
            'functional_native_brain_preproc_FWHM_AROMA/denoised_func_data_nonaggr.nii.gz'
        )
        func_gm = os.path.join(
            subject_dir,
            'functional_native_gm/TISSUE_CLASS_1_GM_OPTIMIZED_resample_flirt_thresh_maths.nii.gz'
        )
        anat_func = os.path.join(
            subject_dir,
            'anatomical_FUNC2mm_brain/MP2RAGE_DESKULL_RPI_resample_ero_flirt_flirt.nii.gz'
        )  # 2.3mm
        anat_func_xfm = os.path.join(
            subject_dir,
            'anatomical_FUNC2mm_xfm/REST_calc_resample_corrected_volreg_maths_tstat_flirt_inv.mat'
        )
        mni2natwarp = os.path.join(
            subject_dir,
            'MNI2mm_ANAT_xfm/MP2RAGE_DESKULL_RPI_resample_ero_fieldwarp_inverse.nii.gz'
        )

        #######################################    Grab ATAG masks   #######################################

        STN_LEFT = os.path.join(outdir, 'ATAG_STN_LEFT.nii.gz')
        SN_LEFT = os.path.join(outdir, 'ATAG_SN_LEFT.nii.gz')
        GPe_LEFT = os.path.join(outdir, 'ATAG_GPE_left.nii.gz')
        GPi_LEFT = os.path.join(outdir, 'ATAG_GPi_left.nii.gz')

        os.system('applywarp -i %s -r %s -w %s --postmat=%s -o %s' %
                  (mni_stn_left_1mm, anat_func, mni2natwarp, anat_func_xfm,
                   STN_LEFT))
        os.system(
            'applywarp -i %s -r %s -w %s --postmat=%s -o %s' %
            (mni_sn_left_1mm, anat_func, mni2natwarp, anat_func_xfm, SN_LEFT))

        os.system('fslmaths %s -bin %s' % (STN_LEFT, STN_LEFT))
        os.system('fslmaths %s -bin %s' % (SN_LEFT, SN_LEFT))

        #######################################    Grab Subcortical masks   #######################################
        print '1. grabbing FIRST Subcortical masks'

        STR = os.path.join(subject_dir, 'functional_subcortical',
                           'left_str.nii.gz')
        CAUx = os.path.join(subject_dir, 'functional_subcortical',
                            'left_caudate.nii.gz')
        PUT = os.path.join(subject_dir, 'functional_subcortical',
                           'left_putamen.nii.gz')
        PAL = os.path.join(subject_dir, 'functional_subcortical',
                           'left_pallidum.nii.gz')
        NAC = os.path.join(subject_dir, 'functional_subcortical',
                           'left_nacc.nii.gz')
        HIP = os.path.join(subject_dir, 'functional_subcortical',
                           'left_hipoocampus.nii.gz')
        AMG = os.path.join(subject_dir, 'functional_subcortical',
                           'left_amygdala.nii.gz')

        THA = os.path.join(subject_dir, 'functional_subcortical',
                           'thalamus.nii.gz')
        lTHA = os.path.join(subject_dir, 'functional_subcortical',
                            'left_thalamus.nii.gz')
        rTHA = os.path.join(subject_dir, 'functional_subcortical',
                            'right_thalamus.nii.gz')

        #######################################    Fill Caudate Holes   #######################################

        CAU = os.path.join(subject_dir, 'functional_subcortical',
                           'left_caudate_fill.nii.gz')

        if not os.path.isfile(CAU):
            os.system('fslmaths %s -fillh %s' % (CAUx, CAU))

        #######################################    Grab svs masks   #######################################
        print '2. grabbing SVS masks'

        svs_acc_src = os.path.join(
            mrs_datadir, pop_name, subject, 'svs_voxel_mask',
            '%s%s_ACC_RDA_MASK.nii' % (subject, mrs_datadir[-1]))
        svs_tha_src = os.path.join(
            mrs_datadir, pop_name, subject, 'svs_voxel_mask',
            '%s%s_THA_RDA_MASK.nii' % (subject, mrs_datadir[-1]))
        svs_str_src = os.path.join(
            mrs_datadir, pop_name, subject, 'svs_voxel_mask',
            '%s%s_STR_RDA_MASK.nii' % (subject, mrs_datadir[-1]))

        svs_acc = os.path.join(outdir, 'svs_acc.nii.gz')
        svs_tha = os.path.join(outdir, 'svs_tha.nii.gz')
        svs_str = os.path.join(outdir, 'svs_str.nii.gz')

        svs_acc_func = os.path.join(outdir, 'svs_acc_func.nii.gz')
        svs_tha_func = os.path.join(outdir, 'svs_tha_func.nii.gz')
        svs_str_func = os.path.join(outdir, 'svs_str_func.nii.gz')

        if not os.path.isfile(svs_acc_func):
            os.system('fslswapdim %s RL PA IS %s' % (svs_acc_src, svs_acc))
            os.system('fslswapdim %s RL PA IS %s' % (svs_tha_src, svs_tha))
            os.system('fslswapdim %s RL PA IS %s' % (svs_str_src, svs_str))

            os.system('flirt -in %s -ref %s -init %s -applyxfm -out %s' %
                      (svs_acc, anat_func, anat_func_xfm, svs_acc_func))
            os.system('flirt -in %s -ref %s -init %s -applyxfm -out %s' %
                      (svs_tha, anat_func, anat_func_xfm, svs_tha_func))
            os.system('flirt -in %s -ref %s -init %s -applyxfm -out %s' %
                      (svs_str, anat_func, anat_func_xfm, svs_str_func))
            os.system('fslmaths %s -thr 0.5 -bin %s' %
                      (svs_acc_func, svs_acc_func))
            os.system('fslmaths %s -thr 0.5 -bin %s' %
                      (svs_tha_func, svs_tha_func))
            os.system('fslmaths %s -thr 0.5 -bin %s' %
                      (svs_str_func, svs_str_func))

        #######################################   Grab freesurfer masks  #######################################
        print '3. grabbing Freesurfer masks'

        os.system('export SUBJECTS_DIR=%s' % (freesurfer_dir))

        t1mgz = os.path.join(freesurfer_dir, subject, 'mri', 'T1.mgz')
        segmgz = os.path.join(freesurfer_dir, subject, 'mri',
                              'aparc.a2009s+aseg.mgz')
        t1nii = os.path.join(outdir, 'freesurfer_T1.nii.gz')
        segnii = os.path.join(outdir, 'freesurfer_seg.nii.gz')

        fs_la_acc = os.path.join(outdir, 'freesurfer_seg_la_MCC_11107.nii.gz'
                                 )  # 11107  ctx_lh_G_and_S_cingul-Mid-Ant
        fs_ra_acc = os.path.join(outdir, 'freesurfer_seg_ra_MCC_12107.nii.gz'
                                 )  # 12107  ctx_lh_G_and_S_cingul-Mid-Ant
        fs_acc = os.path.join(outdir, 'freesurfer_seg_aMCC_11107_12107.nii.gz')

        fs_la_insula = os.path.join(outdir,
                                    'freesurfer_seg_la_INS_11148.nii.gz'
                                    )  # 11148  ctx_lh_S_circular_insula_ant
        fs_ra_insula = os.path.join(outdir,
                                    'freesurfer_seg_ra_INS_12148.nii.gz'
                                    )  # 12148  ctx_lh_S_circular_insula_ant

        if not os.path.isfile(fs_acc):
            os.system('mri_convert %s %s' % (t1mgz, t1nii))
            os.system('mri_convert %s %s' % (segmgz, segnii))

            os.system('fslmaths %s -thr 11107 -uthr 11107 %s ' %
                      (segnii, fs_la_acc))
            os.system('fslmaths %s -thr 12107 -uthr 12107 %s ' %
                      (segnii, fs_ra_acc))
            os.system('fslmaths %s -add %s -dilM -bin %s' %
                      (fs_la_acc, fs_ra_acc, fs_acc))

            os.system('fslmaths %s -thr 11148 -uthr 11148 -dilM -bin %s' %
                      (segnii, fs_la_insula))
            os.system('fslmaths %s -thr 12148 -uthr 12148 -dilM -bin %s' %
                      (segnii, fs_ra_insula))

        labels_dir = os.path.join(freesurfer_dir, subject, 'label')
        fs_ba6_rh = os.path.join(outdir, 'freesurfer_seg_SMA_BA6_rh.nii.gz')
        fs_ba6_lh = os.path.join(outdir, 'freesurfer_seg_SMA_BA6_lh.nii.gz')
        fs_sma = os.path.join(outdir, 'freesurfer_seg_SMA_BA6.nii.gz')

        if not os.path.isfile(fs_sma):
            os.system(
                'mri_label2vol --label %s/rh.BA6.thresh.label --subject %s --temp %s --regheader %s --o %s'
                % (labels_dir, subject, t1mgz, t1mgz, fs_ba6_rh))
            os.system(
                'mri_label2vol --label %s/lh.BA6.thresh.label --subject %s --temp %s --regheader %s --o %s'
                % (labels_dir, subject, t1mgz, t1mgz, fs_ba6_lh))
            os.system('fslmaths  %s -add %s -dilM -dilM %s' %
                      (fs_ba6_rh, fs_ba6_lh, fs_sma))

        #######################################   TRANSFORM Freesurfer masks to native func space   #######################################
        print '4. Transforming Freesurfer masks to native func space'
        t1nii_rpi = os.path.join(outdir, 'freesurfer_T1_RPI.nii.gz')
        fs_acc_rpi = os.path.join(
            outdir, 'freesurfer_seg_aMCC_11107_12107_RPI.nii.gz')
        fs_la_insula_rpi = os.path.join(
            outdir, 'freesurfer_seg_la_INS_11148_RPI.nii.gz')
        fs_ra_insula_rpi = os.path.join(
            outdir, 'freesurfer_seg_ra_INS_12148_RPI.nii.gz')
        fs_sma_rpi = os.path.join(outdir, 'freesurfer_seg_SMA_BA6_RPI.nii.gz')

        fst1omat = os.path.join(outdir, 'freesurfer2func.mat')
        fst1func = os.path.join(outdir, 'freesurfer_T1_func.nii.gz')
        fs_acc_func = os.path.join(
            outdir, 'freesurfer_seg_aMCC_11107_12107_func.nii.gz')
        fs_la_insula_func = os.path.join(
            outdir, 'freesurfer_seg_la_INS_11148_func.nii.gz')
        fs_ra_insula_func = os.path.join(
            outdir, 'freesurfer_seg_ra_INS_11148_func.nii.gz')
        fs_sma_func = os.path.join(outdir,
                                   'freesurfer_seg_SMA_BA6_func.nii.gz')

        if not os.path.isfile(t1nii_rpi):
            os.system('fslswapdim %s RL PA IS %s' % (t1nii, t1nii_rpi))
            os.system('fslswapdim %s RL PA IS %s' % (fs_acc, fs_acc_rpi))
            os.system('fslswapdim %s RL PA IS %s' %
                      (fs_la_insula, fs_la_insula_rpi))
            os.system('fslswapdim %s RL PA IS %s' %
                      (fs_ra_insula, fs_ra_insula_rpi))
            os.system('fslswapdim %s RL PA IS %s' % (fs_sma, fs_sma_rpi))
            os.system(
                'flirt -in %s -ref %s -omat %s -dof 6 -out %s -cost mutualinfo'
                % (t1nii_rpi, anat_func, fst1omat, fst1func))
            os.system('flirt -in %s -ref %s -init %s -applyxfm -out %s' %
                      (fs_acc_rpi, anat_func, fst1omat, fs_acc_func))
            os.system(
                'flirt -in %s -ref %s -init %s -applyxfm -out %s' %
                (fs_la_insula_rpi, anat_func, fst1omat, fs_la_insula_func))
            os.system(
                'flirt -in %s -ref %s -init %s -applyxfm -out %s' %
                (fs_ra_insula_rpi, anat_func, fst1omat, fs_ra_insula_func))
            os.system('flirt -in %s -ref %s -init %s -applyxfm -out %s' %
                      (fs_sma_rpi, anat_func, fst1omat, fs_sma_func))

            os.system('fslmaths  %s -thr 0.5 -bin %s' %
                      (fs_acc_func, fs_acc_func))
            os.system('fslmaths  %s -thr 0.5 -bin %s' %
                      (fs_la_insula_func, fs_la_insula_func))
            os.system('fslmaths  %s -thr 0.5 -bin %s' %
                      (fs_ra_insula_func, fs_ra_insula_func))
            os.system('fslmaths  %s -thr 0.5 -bin %s' %
                      (fs_sma_func, fs_sma_func))

        if os.path.isfile(fs_sma_func):
            sma_load = nb.load(fs_sma_func).get_data()
            x, y, z = center_of_mass(sma_load)
            sma_point = os.path.join(outdir, 'sma_point.nii.gz')
            fs_sma_optimized = os.path.join(
                outdir, 'freesurfer_seg_SMA_BA6_func_opt.nii.gz')

            os.system(
                'fslmaths %s -mul 0 -add 1 -roi %s 1 %s 1 %s 1 0 1 %s -odt float'
                % (func_mean, x, y, z, sma_point))
            os.system(
                'fslmaths %s -kernel sphere 10 -fmean -dilM -dilM -ero -ero %s -odt float'
                % (sma_point, fs_sma_optimized))

        #######################################   GET MOTION PARAMS   #######################################
        print '5. Grabbing motion paramaters'

        motion = os.path.join(
            subject_dir,
            'functional_motion_statistics/motion_power_params.txt')
        if os.path.isfile(motion):
            power = pd.read_csv(motion)  #n, ignore_index = True)
            exclude = power.loc[subject][' FD_exclude']
            fd = power.loc[subject]['Subject']
        if os.path.isfile(func_aroma) and os.path.isfile(func_gm):
            dvars = np.mean(return_DVARS(func_aroma, func_gm))
            print dvars

        #######################################   GEN TIMESERIES OF ROIs   #######################################
        print '6. Extracting timeseries and calculating connectivity'

        if os.path.isfile(func_pproc):

            stn_timeseries = input_data.NiftiLabelsMasker(
                labels_img=STN_LEFT,
                standardize=True).fit_transform(func_pproc)
            sn_timeseries = input_data.NiftiLabelsMasker(
                labels_img=SN_LEFT, standardize=True).fit_transform(func_pproc)

            str_timeseries = input_data.NiftiLabelsMasker(
                labels_img=STR, standardize=True).fit_transform(func_pproc)
            tha_timeseries = input_data.NiftiLabelsMasker(
                labels_img=THA, standardize=True).fit_transform(func_pproc)
            thaL_timeseries = input_data.NiftiLabelsMasker(
                labels_img=lTHA, standardize=True).fit_transform(func_pproc)
            thaR_timeseries = input_data.NiftiLabelsMasker(
                labels_img=rTHA, standardize=True).fit_transform(func_pproc)

            cau_timeseries = input_data.NiftiLabelsMasker(
                labels_img=CAU, standardize=True).fit_transform(func_pproc)
            put_timeseries = input_data.NiftiLabelsMasker(
                labels_img=PUT, standardize=True).fit_transform(func_pproc)
            pal_timeseries = input_data.NiftiLabelsMasker(
                labels_img=PAL, standardize=True).fit_transform(func_pproc)
            nac_timeseries = input_data.NiftiLabelsMasker(
                labels_img=NAC, standardize=True).fit_transform(func_pproc)
            hip_timeseries = input_data.NiftiLabelsMasker(
                labels_img=HIP, standardize=True).fit_transform(func_pproc)
            amg_timeseries = input_data.NiftiLabelsMasker(
                labels_img=AMG, standardize=True).fit_transform(func_pproc)

            mACC_timeseries = input_data.NiftiLabelsMasker(
                labels_img=fs_acc_func,
                standardize=True).fit_transform(func_pproc)
            lINS_timeseries = input_data.NiftiLabelsMasker(
                labels_img=fs_la_insula_func,
                standardize=True).fit_transform(func_pproc)
            rINS_timeseries = input_data.NiftiLabelsMasker(
                labels_img=fs_ra_insula_func,
                standardize=True).fit_transform(func_pproc)
            SMA_timeseries = input_data.NiftiLabelsMasker(
                labels_img=fs_sma_optimized,
                standardize=True).fit_transform(func_pproc)

            mACCX_timeseries = input_data.NiftiLabelsMasker(
                labels_img=svs_acc_func,
                standardize=True).fit_transform(func_pproc)
            strX_timeseries = input_data.NiftiLabelsMasker(
                labels_img=svs_str_func,
                standardize=True).fit_transform(func_pproc)
            thaX_timeseries = input_data.NiftiLabelsMasker(
                labels_img=svs_tha_func,
                standardize=True).fit_transform(func_pproc)

            print '......calculating Subthalamic Nucleus connectivity'
            df.loc[subject]['stn_pal'] = float(
                pearsonr(stn_timeseries, pal_timeseries)[0])
            df.loc[subject]['stn_acc'] = float(
                pearsonr(stn_timeseries, mACC_timeseries)[0])
            df.loc[subject]['stn_tha'] = float(
                pearsonr(stn_timeseries, tha_timeseries)[0])
            df.loc[subject]['stn_thaX'] = float(
                pearsonr(stn_timeseries, thaX_timeseries)[0])
            df.loc[subject]['stn_thaL'] = float(
                pearsonr(stn_timeseries, thaL_timeseries)[0])
            df.loc[subject]['stn_thaR'] = float(
                pearsonr(stn_timeseries, thaR_timeseries)[0])
            df.loc[subject]['stn_hip'] = float(
                pearsonr(stn_timeseries, hip_timeseries)[0])
            df.loc[subject]['stn_amg'] = float(
                pearsonr(stn_timeseries, amg_timeseries)[0])
            df.loc[subject]['stn_accX'] = float(
                pearsonr(stn_timeseries, mACCX_timeseries)[0])
            df.loc[subject]['stn_lins'] = float(
                pearsonr(stn_timeseries, lINS_timeseries)[0])
            df.loc[subject]['stn_rins'] = float(
                pearsonr(stn_timeseries, rINS_timeseries)[0])
            df.loc[subject]['stn_sma'] = float(
                pearsonr(stn_timeseries, SMA_timeseries)[0])
            df.loc[subject]['stn_strX'] = float(
                pearsonr(stn_timeseries, strX_timeseries)[0])
            df.loc[subject]['stn_str'] = float(
                pearsonr(stn_timeseries, str_timeseries)[0])
            df.loc[subject]['stn_cau'] = float(
                pearsonr(stn_timeseries, cau_timeseries)[0])
            df.loc[subject]['stn_put'] = float(
                pearsonr(stn_timeseries, put_timeseries)[0])
            df.loc[subject]['stn_nac'] = float(
                pearsonr(stn_timeseries, nac_timeseries)[0])

            print '......calculating Substantia Nigra connectivity'
            df.loc[subject]['sn_pal'] = float(
                pearsonr(sn_timeseries, pal_timeseries)[0])
            df.loc[subject]['sn_acc'] = float(
                pearsonr(sn_timeseries, mACC_timeseries)[0])
            df.loc[subject]['sn_tha'] = float(
                pearsonr(sn_timeseries, tha_timeseries)[0])
            df.loc[subject]['sn_thaX'] = float(
                pearsonr(sn_timeseries, thaX_timeseries)[0])
            df.loc[subject]['sn_thaL'] = float(
                pearsonr(sn_timeseries, thaL_timeseries)[0])
            df.loc[subject]['sn_thaR'] = float(
                pearsonr(sn_timeseries, thaR_timeseries)[0])
            df.loc[subject]['sn_hip'] = float(
                pearsonr(sn_timeseries, hip_timeseries)[0])
            df.loc[subject]['sn_amg'] = float(
                pearsonr(sn_timeseries, amg_timeseries)[0])
            df.loc[subject]['sn_accX'] = float(
                pearsonr(sn_timeseries, mACCX_timeseries)[0])
            df.loc[subject]['sn_lins'] = float(
                pearsonr(sn_timeseries, lINS_timeseries)[0])
            df.loc[subject]['sn_rins'] = float(
                pearsonr(sn_timeseries, rINS_timeseries)[0])
            df.loc[subject]['sn_sma'] = float(
                pearsonr(sn_timeseries, SMA_timeseries)[0])
            df.loc[subject]['sn_strX'] = float(
                pearsonr(sn_timeseries, strX_timeseries)[0])
            df.loc[subject]['sn_str'] = float(
                pearsonr(sn_timeseries, str_timeseries)[0])
            df.loc[subject]['sn_cau'] = float(
                pearsonr(sn_timeseries, cau_timeseries)[0])
            df.loc[subject]['sn_put'] = float(
                pearsonr(sn_timeseries, put_timeseries)[0])
            df.loc[subject]['sn_nac'] = float(
                pearsonr(sn_timeseries, nac_timeseries)[0])

            print '......calculating STR_SVS connectivity'
            df.loc[subject]['strX_acc'] = float(
                pearsonr(strX_timeseries, mACC_timeseries)[0])
            df.loc[subject]['strX_tha'] = float(
                pearsonr(strX_timeseries, tha_timeseries)[0])
            df.loc[subject]['strX_thaX'] = float(
                pearsonr(strX_timeseries, thaX_timeseries)[0])
            df.loc[subject]['strX_thaL'] = float(
                pearsonr(strX_timeseries, thaL_timeseries)[0])
            df.loc[subject]['strX_thaR'] = float(
                pearsonr(strX_timeseries, thaR_timeseries)[0])
            df.loc[subject]['strX_hip'] = float(
                pearsonr(strX_timeseries, hip_timeseries)[0])
            df.loc[subject]['strX_amg'] = float(
                pearsonr(strX_timeseries, amg_timeseries)[0])
            df.loc[subject]['strX_accX'] = float(
                pearsonr(strX_timeseries, mACCX_timeseries)[0])
            df.loc[subject]['strX_lins'] = float(
                pearsonr(strX_timeseries, lINS_timeseries)[0])
            df.loc[subject]['strX_rins'] = float(
                pearsonr(strX_timeseries, rINS_timeseries)[0])
            df.loc[subject]['strX_sma'] = float(
                pearsonr(strX_timeseries, SMA_timeseries)[0])

            print '......calculating STR connetivity'
            df.loc[subject]['str_acc'] = float(
                pearsonr(str_timeseries, mACC_timeseries)[0])
            df.loc[subject]['str_tha'] = float(
                pearsonr(str_timeseries, tha_timeseries)[0])
            df.loc[subject]['str_thaX'] = float(
                pearsonr(str_timeseries, thaX_timeseries)[0])
            df.loc[subject]['str_thaL'] = float(
                pearsonr(str_timeseries, thaL_timeseries)[0])
            df.loc[subject]['str_thaR'] = float(
                pearsonr(str_timeseries, thaR_timeseries)[0])
            df.loc[subject]['str_hip'] = float(
                pearsonr(str_timeseries, hip_timeseries)[0])
            df.loc[subject]['str_amg'] = float(
                pearsonr(str_timeseries, amg_timeseries)[0])
            df.loc[subject]['str_accX'] = float(
                pearsonr(str_timeseries, mACCX_timeseries)[0])
            df.loc[subject]['str_lins'] = float(
                pearsonr(str_timeseries, lINS_timeseries)[0])
            df.loc[subject]['str_rins'] = float(
                pearsonr(str_timeseries, rINS_timeseries)[0])
            df.loc[subject]['str_sma'] = float(
                pearsonr(str_timeseries, SMA_timeseries)[0])

            print '......calculating CAUDATE connectivity'
            df.loc[subject]['cau_acc'] = float(
                pearsonr(cau_timeseries, mACC_timeseries)[0])
            df.loc[subject]['cau_tha'] = float(
                pearsonr(cau_timeseries, tha_timeseries)[0])
            df.loc[subject]['cau_thaX'] = float(
                pearsonr(cau_timeseries, thaX_timeseries)[0])
            df.loc[subject]['cau_thaL'] = float(
                pearsonr(cau_timeseries, thaL_timeseries)[0])
            df.loc[subject]['cau_thaR'] = float(
                pearsonr(cau_timeseries, thaR_timeseries)[0])
            df.loc[subject]['cau_pal'] = float(
                pearsonr(cau_timeseries, pal_timeseries)[0])
            df.loc[subject]['cau_hip'] = float(
                pearsonr(cau_timeseries, hip_timeseries)[0])
            df.loc[subject]['cau_amg'] = float(
                pearsonr(cau_timeseries, amg_timeseries)[0])
            df.loc[subject]['cau_accX'] = float(
                pearsonr(cau_timeseries, mACCX_timeseries)[0])
            df.loc[subject]['cau_lins'] = float(
                pearsonr(cau_timeseries, lINS_timeseries)[0])
            df.loc[subject]['cau_rins'] = float(
                pearsonr(cau_timeseries, rINS_timeseries)[0])
            df.loc[subject]['cau_sma'] = float(
                pearsonr(cau_timeseries, SMA_timeseries)[0])

            print '......calculating PUTAMEN connectivity'
            df.loc[subject]['put_tha'] = float(
                pearsonr(put_timeseries, tha_timeseries)[0])
            df.loc[subject]['put_thaX'] = float(
                pearsonr(put_timeseries, thaX_timeseries)[0])
            df.loc[subject]['put_thaL'] = float(
                pearsonr(put_timeseries, thaL_timeseries)[0])
            df.loc[subject]['put_thaR'] = float(
                pearsonr(put_timeseries, thaR_timeseries)[0])
            df.loc[subject]['put_pal'] = float(
                pearsonr(put_timeseries, pal_timeseries)[0])
            df.loc[subject]['put_hip'] = float(
                pearsonr(put_timeseries, hip_timeseries)[0])
            df.loc[subject]['put_amg'] = float(
                pearsonr(put_timeseries, amg_timeseries)[0])
            df.loc[subject]['put_acc'] = float(
                pearsonr(put_timeseries, mACC_timeseries)[0])
            df.loc[subject]['put_accX'] = float(
                pearsonr(put_timeseries, mACCX_timeseries)[0])
            df.loc[subject]['put_lins'] = float(
                pearsonr(put_timeseries, lINS_timeseries)[0])
            df.loc[subject]['put_rins'] = float(
                pearsonr(put_timeseries, rINS_timeseries)[0])
            df.loc[subject]['put_sma'] = float(
                pearsonr(put_timeseries, SMA_timeseries)[0])

            print '......calcualting NUCLESUS ACCUMBENS connectivity'
            df.loc[subject]['nac_tha'] = float(
                pearsonr(nac_timeseries, tha_timeseries)[0])
            df.loc[subject]['nac_thaX'] = float(
                pearsonr(nac_timeseries, thaX_timeseries)[0])
            df.loc[subject]['nac_thaL'] = float(
                pearsonr(nac_timeseries, thaL_timeseries)[0])
            df.loc[subject]['nac_thaR'] = float(
                pearsonr(nac_timeseries, thaR_timeseries)[0])
            df.loc[subject]['nac_pal'] = float(
                pearsonr(nac_timeseries, pal_timeseries)[0])
            df.loc[subject]['nac_hip'] = float(
                pearsonr(nac_timeseries, hip_timeseries)[0])
            df.loc[subject]['nac_amg'] = float(
                pearsonr(nac_timeseries, amg_timeseries)[0])
            df.loc[subject]['nac_acc'] = float(
                pearsonr(nac_timeseries, mACC_timeseries)[0])
            df.loc[subject]['nac_accX'] = float(
                pearsonr(nac_timeseries, mACCX_timeseries)[0])
            df.loc[subject]['nac_lins'] = float(
                pearsonr(nac_timeseries, lINS_timeseries)[0])
            df.loc[subject]['nac_rins'] = float(
                pearsonr(nac_timeseries, rINS_timeseries)[0])
            df.loc[subject]['nac_sma'] = float(
                pearsonr(nac_timeseries, SMA_timeseries)[0])

            print '......calcualting PALLIDUM connectivity'
            df.loc[subject]['pal_tha'] = float(
                pearsonr(pal_timeseries, tha_timeseries)[0])
            df.loc[subject]['pal_thaX'] = float(
                pearsonr(pal_timeseries, thaX_timeseries)[0])
            df.loc[subject]['pal_thaL'] = float(
                pearsonr(pal_timeseries, thaL_timeseries)[0])
            df.loc[subject]['pal_thaR'] = float(
                pearsonr(pal_timeseries, thaR_timeseries)[0])
            df.loc[subject]['pal_hip'] = float(
                pearsonr(pal_timeseries, hip_timeseries)[0])
            df.loc[subject]['pal_amg'] = float(
                pearsonr(pal_timeseries, amg_timeseries)[0])
            df.loc[subject]['pal_acc'] = float(
                pearsonr(pal_timeseries, mACC_timeseries)[0])
            df.loc[subject]['pal_accX'] = float(
                pearsonr(pal_timeseries, mACCX_timeseries)[0])
            df.loc[subject]['pal_lins'] = float(
                pearsonr(pal_timeseries, lINS_timeseries)[0])
            df.loc[subject]['pal_rins'] = float(
                pearsonr(pal_timeseries, rINS_timeseries)[0])
            df.loc[subject]['pal_sma'] = float(
                pearsonr(pal_timeseries, SMA_timeseries)[0])

            print '......calcualting THA_SVS connectivity'
            df.loc[subject]['thaX_cau'] = float(
                pearsonr(thaX_timeseries, cau_timeseries)[0])
            df.loc[subject]['thaX_put'] = float(
                pearsonr(thaX_timeseries, put_timeseries)[0])
            df.loc[subject]['thaX_pal'] = float(
                pearsonr(thaX_timeseries, pal_timeseries)[0])
            df.loc[subject]['thaX_nac'] = float(
                pearsonr(thaX_timeseries, nac_timeseries)[0])
            df.loc[subject]['thaX_hip'] = float(
                pearsonr(thaX_timeseries, hip_timeseries)[0])
            df.loc[subject]['thaX_amg'] = float(
                pearsonr(thaX_timeseries, amg_timeseries)[0])
            df.loc[subject]['thaX_acc'] = float(
                pearsonr(thaX_timeseries, mACC_timeseries)[0])
            df.loc[subject]['thaX_accX'] = float(
                pearsonr(thaX_timeseries, mACCX_timeseries)[0])
            df.loc[subject]['thaX_lins'] = float(
                pearsonr(thaX_timeseries, lINS_timeseries)[0])
            df.loc[subject]['thaX_rins'] = float(
                pearsonr(thaX_timeseries, rINS_timeseries)[0])
            df.loc[subject]['thaX_sma'] = float(
                pearsonr(thaX_timeseries, SMA_timeseries)[0])

            print '......calcualting THALAMUS FULL connectivity'
            df.loc[subject]['tha_cau'] = float(
                pearsonr(tha_timeseries, cau_timeseries)[0])
            df.loc[subject]['tha_put'] = float(
                pearsonr(tha_timeseries, put_timeseries)[0])
            df.loc[subject]['tha_pal'] = float(
                pearsonr(tha_timeseries, pal_timeseries)[0])
            df.loc[subject]['tha_nac'] = float(
                pearsonr(tha_timeseries, nac_timeseries)[0])
            df.loc[subject]['tha_hip'] = float(
                pearsonr(tha_timeseries, hip_timeseries)[0])
            df.loc[subject]['tha_amg'] = float(
                pearsonr(tha_timeseries, amg_timeseries)[0])
            df.loc[subject]['tha_acc'] = float(
                pearsonr(tha_timeseries, mACC_timeseries)[0])
            df.loc[subject]['tha_accX'] = float(
                pearsonr(tha_timeseries, mACCX_timeseries)[0])
            df.loc[subject]['tha_lins'] = float(
                pearsonr(tha_timeseries, lINS_timeseries)[0])
            df.loc[subject]['tha_rins'] = float(
                pearsonr(tha_timeseries, rINS_timeseries)[0])
            df.loc[subject]['tha_sma'] = float(
                pearsonr(tha_timeseries, SMA_timeseries)[0])

            print '......calcualting THALAMUS RIGHT connectivity'
            df.loc[subject]['thaR_cau'] = float(
                pearsonr(thaR_timeseries, cau_timeseries)[0])
            df.loc[subject]['thaR_put'] = float(
                pearsonr(thaR_timeseries, put_timeseries)[0])
            df.loc[subject]['thaR_pal'] = float(
                pearsonr(thaR_timeseries, pal_timeseries)[0])
            df.loc[subject]['thaR_nac'] = float(
                pearsonr(thaR_timeseries, nac_timeseries)[0])
            df.loc[subject]['thaR_hip'] = float(
                pearsonr(thaR_timeseries, hip_timeseries)[0])
            df.loc[subject]['thaR_amg'] = float(
                pearsonr(thaR_timeseries, amg_timeseries)[0])
            df.loc[subject]['thaR_acc'] = float(
                pearsonr(thaR_timeseries, mACC_timeseries)[0])
            df.loc[subject]['thaR_accX'] = float(
                pearsonr(thaR_timeseries, mACCX_timeseries)[0])
            df.loc[subject]['thaR_lins'] = float(
                pearsonr(thaR_timeseries, lINS_timeseries)[0])
            df.loc[subject]['thaR_rins'] = float(
                pearsonr(thaR_timeseries, rINS_timeseries)[0])
            df.loc[subject]['thaR_sma'] = float(
                pearsonr(thaR_timeseries, SMA_timeseries)[0])

            print '......calcualting THALAMUS LEFT connectivity'
            df.loc[subject]['thaL_cau'] = float(
                pearsonr(thaL_timeseries, cau_timeseries)[0])
            df.loc[subject]['thaL_put'] = float(
                pearsonr(thaL_timeseries, put_timeseries)[0])
            df.loc[subject]['thaL_pal'] = float(
                pearsonr(thaL_timeseries, pal_timeseries)[0])
            df.loc[subject]['thaL_nac'] = float(
                pearsonr(thaL_timeseries, nac_timeseries)[0])
            df.loc[subject]['thaL_hip'] = float(
                pearsonr(thaL_timeseries, hip_timeseries)[0])
            df.loc[subject]['thaL_amg'] = float(
                pearsonr(thaL_timeseries, amg_timeseries)[0])
            df.loc[subject]['thaL_acc'] = float(
                pearsonr(thaL_timeseries, mACC_timeseries)[0])
            df.loc[subject]['thaL_accX'] = float(
                pearsonr(thaL_timeseries, mACCX_timeseries)[0])
            df.loc[subject]['thaL_lins'] = float(
                pearsonr(thaL_timeseries, lINS_timeseries)[0])
            df.loc[subject]['thaL_rins'] = float(
                pearsonr(thaL_timeseries, rINS_timeseries)[0])
            df.loc[subject]['thaL_sma'] = float(
                pearsonr(thaL_timeseries, SMA_timeseries)[0])

            print '......calcualting ACC connectivity'
            df.loc[subject]['acc_lins'] = float(
                pearsonr(mACC_timeseries, lINS_timeseries)[0])
            df.loc[subject]['acc_rins'] = float(
                pearsonr(mACC_timeseries, rINS_timeseries)[0])
            df.loc[subject]['acc_sma'] = float(
                pearsonr(mACC_timeseries, rINS_timeseries)[0])
            df.loc[subject]['accX_lins'] = float(
                pearsonr(mACCX_timeseries, lINS_timeseries)[0])
            df.loc[subject]['accX_rins'] = float(
                pearsonr(mACCX_timeseries, rINS_timeseries)[0])
            df.loc[subject]['accX_sma'] = float(
                pearsonr(mACCX_timeseries, SMA_timeseries)[0])

            print '......calcualting SMA connectivity'
            df.loc[subject]['sma_lins'] = float(
                pearsonr(mACCX_timeseries, lINS_timeseries)[0])
            df.loc[subject]['sma_rins'] = float(
                pearsonr(mACCX_timeseries, rINS_timeseries)[0])

            df.loc[subject]['fd'] = fd
            df.loc[subject]['exclude'] = exclude
            df.loc[subject]['dvars'] = dvars

    df.to_csv(
        os.path.join(workspace_dir, 'GluConnectivity',
                     'x4_RSFC_df_%s_%s.csv' % (pop_name, mrs_datadir[-1])))
    print 'done'
Exemplo n.º 25
0
def connectivity(layout,
                 subject,
                 session,
                 task,
                 atlas,
                 connectivity_metric='correlation',
                 confounds=None,
                 out_dir=None):
    """
    Makes connectivity matrices per subject per session per task per condition.
    Parameters
    ----------
    layout : str
        BIDS layout with derivatives indexed from pyBIDS
    subject : str
        Subject ID for which the networks will be calculated.
    session : str, optional
        Session of data collection. If there's only one session, we'll find it.
    connectivity_metric : {"correlation", "partial correlation", "tangent",\
                           "covariance", "precision"}, optional
        The matrix kind. Passed to Nilearn's `ConnectivityMeasure`.
    space : str
        'native' if analyses will be performed in subjects' functional native space (atlas(es) should be transformed)
        'mni152-2mm' if analyses will be performed in MNI125 2mm isotropic space (fMRI data should already be transformed)
    atlas : str
        Name of atlas for parcellating voxels into nodes, must be in the same `space` given above.
    confounds : list-like
        Names of confounds (should be columns in fmriprep output confounds.tsv).
    Returns
    -------
    adjacency_matrix
    """
    try:
        version = idconn.__version__
    except:
        version = 'test'
    if '.nii' in atlas:
        assert exists(atlas), f'Mask file does not exist at {atlas}'

    if not out_dir:
        deriv_dir = join(layout.root, 'derivatives', f'idconn-{version}')
    else:
        deriv_dir = out_dir
    atlas_name = basename(atlas).rsplit('.', 2)[0]
    # use pybids here to grab # of runs and preproc bold filenames
    connectivity_measure = connectome.ConnectivityMeasure(
        kind=connectivity_metric)
    bold_files = layout.get(
        scope='derivatives',
        return_type='file',
        suffix='bold',
        task=task,
        space='MNI152NLin2009cAsym',
        subject=subject,
        session=session,
        extension='nii.gz'
    )  # should be preprocessed BOLD file from fmriprep, grabbed with pybids
    print(f'BOLD files found at {bold_files}')
    confounds_files = layout.get(scope='derivatives',
                                 return_type='file',
                                 desc='confounds',
                                 subject=subject,
                                 session=session,
                                 task=task)

    runs = []
    if len(bold_files) > 1:
        for i in range(0, len(bold_files)):
            assert exists(
                bold_files[i]
            ), "Preprocessed bold file(s) does not exist at {0}".format(
                bold_files)
            runs.append(layout.parse_file_entities(bold_files[i])['run'])
    else:
        runs = None
    print(f'Found runs: {runs}')

    out = join(deriv_dir, f'sub-{subject}', f'ses-{session}', 'func')
    if not exists(out):
        makedirs(out)

    #event_files = layout.get(return_type='filename', suffix='events', task=task, subject=subject)
    #timing = pd.read_csv(event_files[0], header=0, index_col=0, sep='\t')
    #conditions = timing['trial_type'].unique()

    if runs:
        corrmats = {}
        for run in runs:
            print('run = ', run)
            # read in events file for this subject, task, and run

            confounds_file = layout.get(scope='derivatives',
                                        return_type='file',
                                        desc='confounds',
                                        subject=subject,
                                        session=session,
                                        task=task,
                                        run=run,
                                        extension='tsv')
            print(f'Confounds file located at: {confounds_file}')
            confounds_df = pd.read_csv(confounds_file[0], header=0, sep='\t')
            confounds_df = confounds_df[confounds].fillna(0)
            confounds_fname = join(
                deriv_dir, f'sub-{subject}', f'ses-{session}', 'func',
                f'sub-{subject}_ses-{session}_task-{task}_run-{run}_desc-confounds_timeseries.tsv'
            )
            confounds_df.to_csv(confounds_fname, sep='\t')

            bold_file = layout.get(scope='derivatives',
                                   return_type='file',
                                   suffix='bold',
                                   task=task,
                                   space='MNI152NLin2009cAsym',
                                   subject=subject,
                                   session=session,
                                   extension='nii.gz',
                                   run=run)
            assert len(
                bold_file
            ) == 1, f'BOLD file improperly specified, more than one .nii.gz file with {subject}, {session}, {task}, {run}: {bold_file}'
            tr = layout.get_tr(bold_file)
            masker = input_data.NiftiLabelsMasker(atlas,
                                                  standardize=True,
                                                  t_r=tr,
                                                  verbose=2)

            ex_bold = image.index_img(bold_file[0], 2)
            display = plotting.plot_epi(ex_bold)
            display.add_contours(atlas)
            display.savefig(
                join(
                    deriv_dir, f'sub-{subject}', f'ses-{session}', 'func',
                    f'sub-{subject}_ses-{session}_task-{task}_run-{run}_desc-atlas_overlay.png'
                ))

            print(f'BOLD file located at {bold_file}\nTR = {tr}s')
            try:
                #for each parcellation, extract BOLD timeseries
                print(
                    f'Extracting bold signal for sub-{subject}, ses-{session}, run-{run}...'
                )
                timeseries = masker.fit_transform(bold_file[0],
                                                  confounds_fname)
            except Exception as e:
                print('ERROR: Trying to extract BOLD signals, but', e)
            try:
                print(
                    f'Making correlation matrix for for sub-{subject}, ses-{session}, task-{task}, run-{run}...'
                )
                corrmats[run] = connectivity_measure.fit_transform(
                    [timeseries])[0]
            except Exception as e:
                print('ERROR: Trying to make corrmat, but', e)
        data = list(corrmats.values())
        stacked_corrmats = np.array(data)
        print('Stacked corrmats have dimensions', stacked_corrmats.shape)
        avg_corrmat = np.mean(stacked_corrmats, axis=0)
    else:
        confounds_file = layout.get(scope='derivatives',
                                    return_type='file',
                                    desc='confounds',
                                    subject=subject,
                                    session=session,
                                    task=task,
                                    extension='tsv')
        print(f'Confounds file located at: {confounds_file}')
        confounds_df = pd.read_csv(confounds_file[0], header=0, sep='\t')
        confounds_df = confounds_df[confounds].fillna(0)
        confounds_fname = join(
            deriv_dir, f'sub-{subject}', f'ses-{session}', 'func',
            f'sub-{subject}_ses-{session}_task-{task}_desc-confounds_timeseries.tsv'
        )
        confounds_df.to_csv(confounds_fname, sep='\t')

        bold_file = layout.get(scope='derivatives',
                               return_type='file',
                               suffix='bold',
                               task=task,
                               space='MNI152NLin2009cAsym',
                               subject=subject,
                               session=session,
                               extension='nii.gz')
        assert len(
            bold_file
        ) == 1, f'BOLD file improperly specified, more than one .nii.gz file with {subject}, {session}, {task}: {bold_file}'
        tr = layout.get_tr(bold_file)
        masker = input_data.NiftiLabelsMasker(atlas,
                                              standardize=True,
                                              t_r=tr,
                                              verbose=2)

        ex_bold = image.index_img(bold_file[0], 2)
        display = plotting.plot_epi(ex_bold)
        display.add_contours(atlas)
        display.savefig(
            join(
                deriv_dir, f'sub-{subject}', f'ses-{session}', 'func',
                f'sub-{subject}_ses-{session}_task-{task}_desc-atlas_overlay.png'
            ))

        print(f'BOLD file located at {bold_file}\nTR = {tr}s')
        try:
            #for each parcellation, extract BOLD timeseries
            print(
                f'Extracting bold signal for sub-{subject}, ses-{session}...')
            timeseries = masker.fit_transform(bold_file[0], confounds_fname)
        except Exception as e:
            print('ERROR: Trying to extract BOLD signals, but', e)
        try:
            print(
                f'Making correlation matrix for for sub-{subject}, ses-{session}...'
            )
            avg_corrmat = connectivity_measure.fit_transform([timeseries])[0]
        except Exception as e:
            print('ERROR: Trying to make corrmat, but', e)

    print('Correlation matrix created, dimensions:', avg_corrmat.shape)
    try:
        corrmat_df = pd.DataFrame(index=np.arange(1, avg_corrmat.shape[0] + 1),
                                  columns=np.arange(1,
                                                    avg_corrmat.shape[0] + 1),
                                  data=avg_corrmat)
        corrmat_file = join(
            deriv_dir, f'sub-{subject}', f'ses-{session}', 'func',
            f'sub-{subject}_ses-{session}_task-{task}_space-MNI152NLin2009cAsym_atlas-{atlas_name}_desc-corrmat_bold.tsv'
        )
        corrmat_df.to_csv(corrmat_file, sep='\t')
    except Exception as e:
        print('ERROR saving corrmat...', e)
    return corrmat_df, corrmat_file
Exemplo n.º 26
0

mdtb_dir = '/Shared/lss_kahwang_hpc/data/MDTB/'
tomoya_dir = '/Shared/lss_kahwang_hpc/data/Tomoya/'

mdtb_subs = [
    'sub-02', 'sub-04', 'sub-09', 'sub-14', 'sub-17', 'sub-19', 'sub-21',
    'sub-24', 'sub-26', 'sub-28', 'sub-31', 'sub-03', 'sub-06', 'sub-12',
    'sub-15', 'sub-18', 'sub-20', 'sub-22', 'sub-25', 'sub-27', 'sub-30'
]
tomoya_subs = ['sub-01', 'sub-02', 'sub-03', 'sub-04', 'sub-05', 'sub-06']

Schaefer400 = nib.load(
    '/Shared/lss_kahwang_hpc/ROIs/Schaefer2018_400Parcels_7Networks_order_FSLMNI152_2mm.nii.gz'
)
Schaefer400_masker = input_data.NiftiLabelsMasker(Schaefer400)
mni_thalamus_mask = nib.load(
    "/Shared/lss_kahwang_hpc/ROIs//mni_atlas/MNI_thalamus_2mm.nii.gz")
mni_thalamus_masker = input_data.NiftiMasker(mni_thalamus_mask)

mdtb_fcmats = np.zeros((2445, 400, len(mdtb_subs)))
for i, sub in enumerate(mdtb_subs):
    func_file = nib.load(mdtb_dir + "3dDeconvolve/" + sub +
                         "/FIRmodel_errts_block.nii.gz")

    cortical_ts = Schaefer400_masker.fit_transform(func_file)
    roi_ts = mni_thalamus_masker.fit_transform(
        func_file)  #roi_ts = data[np.nonzero(mni_thalamus_mask.get_fdata())]

    roi_ts = np.delete(roi_ts, np.where(roi_ts.mean(axis=1) == 0)[0], axis=0)
    cortical_ts = np.delete(cortical_ts,
Exemplo n.º 27
0
    def transform(self, result):
        """Apply the analysis to a MetaResult.

        Parameters
        ----------
        result : :obj:`~nimare.results.MetaResult`
            A MetaResult produced by a coordinate- or image-based meta-analysis.

        Returns
        -------
        contribution_table : :obj:`pandas.DataFrame`
            A DataFrame with information about relative contributions of each experiment to each
            cluster in the thresholded map.
            There is one row for each experiment, as well as one more row at the top of the table
            (below the header), which has the center of mass of each cluster.
            The centers of mass are not guaranteed to fall within the actual clusters, but can
            serve as a useful heuristic for identifying them.
            There is one column for each cluster, with column names being integers indicating the
            cluster's associated value in the ``labeled_cluster_img`` output.
        labeled_cluster_img : :obj:`nibabel.nifti1.Nifti1Image`
            The labeled, thresholded map that is used to identify clusters characterized by this
            analysis.
            Each cluster in the map has a single value, which corresponds to the cluster's column
            name in ``contribution_table``.
        """
        if not hasattr(result.estimator, "dataset"):
            raise AttributeError(
                "MetaResult was not generated by an Estimator with a `dataset` attribute. "
                "This may be because the Estimator was a pairwise Estimator. "
                "The Jackknife method does not currently work with pairwise Estimators."
            )
        dset = result.estimator.dataset
        # We need to copy the estimator because it will otherwise overwrite the original version
        # with one missing a study in its inputs.
        estimator = copy.deepcopy(result.estimator)
        original_masker = estimator.masker

        # Collect the thresholded cluster map
        if self.target_image in result.maps:
            target_img = result.get_map(self.target_image, return_type="image")
        else:
            available_maps = [f"'{m}'" for m in result.maps.keys()]
            raise ValueError(
                f"Target image ('{self.target_image}') not present in result. "
                f"Available maps in result are: {', '.join(available_maps)}.")

        if self.voxel_thresh:
            thresh_img = image.threshold_img(target_img, self.voxel_thresh)
        else:
            thresh_img = target_img

        thresh_arr = thresh_img.get_fdata()

        # CBMAs have "stat" maps, while most IBMAs have "est" maps.
        # Fisher's and Stouffer's only have "z" maps though.
        if "est" in result.maps:
            target_value_map = "est"
        elif "stat" in result.maps:
            target_value_map = "stat"
        else:
            target_value_map = "z"

        stat_values = result.get_map(target_value_map, return_type="array")

        # Use study IDs in inputs_ instead of dataset, because we don't want to try fitting the
        # estimator to a study that might have been filtered out by the estimator's criteria.
        meta_ids = estimator.inputs_["id"]
        rows = ["Center of Mass"] + list(meta_ids)

        # Let's label the clusters in the thresholded map so we can use it as a NiftiLabelsMasker
        # This won't work when the Estimator's masker isn't a NiftiMasker... :(
        conn = ndimage.generate_binary_structure(3, 2)
        labeled_cluster_arr, n_clusters = ndimage.measurements.label(
            thresh_arr, conn)
        labeled_cluster_img = nib.Nifti1Image(
            labeled_cluster_arr,
            affine=target_img.affine,
            header=target_img.header,
        )

        if n_clusters == 0:
            LGR.warning("No clusters found")
            contribution_table = pd.DataFrame(index=rows)
            return contribution_table, labeled_cluster_img

        # Identify center of mass for each cluster
        # This COM may fall outside the cluster, but it is a useful heuristic for identifying them
        cluster_ids = list(range(1, n_clusters + 1))
        cluster_coms = ndimage.center_of_mass(
            labeled_cluster_arr,
            labeled_cluster_arr,
            cluster_ids,
        )
        cluster_coms = np.array(cluster_coms)
        cluster_coms = vox2mm(cluster_coms, target_img.affine)

        cluster_com_strs = []
        for i_peak in range(len(cluster_ids)):
            x, y, z = cluster_coms[i_peak, :].astype(int)
            xyz_str = f"({x}, {y}, {z})"
            cluster_com_strs.append(xyz_str)

        # Mask using a labels masker, so that we can easily get the mean value for each cluster
        cluster_masker = input_data.NiftiLabelsMasker(labeled_cluster_img)
        cluster_masker.fit(labeled_cluster_img)

        # Create empty contribution table
        contribution_table = pd.DataFrame(index=rows, columns=cluster_ids)
        contribution_table.index.name = "Cluster ID"
        contribution_table.loc["Center of Mass"] = cluster_com_strs

        with tqdm_joblib(tqdm(total=len(meta_ids))):
            jackknife_results = Parallel(n_jobs=self.n_cores)(
                delayed(self._transform)(
                    study_id,
                    all_ids=meta_ids,
                    dset=dset,
                    estimator=estimator,
                    target_value_map=target_value_map,
                    stat_values=stat_values,
                    original_masker=original_masker,
                    cluster_masker=cluster_masker,
                ) for study_id in meta_ids)

        # Add the results to the table
        for expid, stat_prop_values in jackknife_results:
            contribution_table.loc[expid] = stat_prop_values

        return contribution_table, labeled_cluster_img
Exemplo n.º 28
0
low_wm=['0bk','0bk_body', '0bk_faces', '0bk_places', '0bk_tools']
high_wm=['2bk','2bk_body', '2bk_faces', '2bk_places', '2bk_tools']

conditions={'0bk':53, '2bk':56, '0bk_body':2, '0bk_faces':5,'0bk_places':8,'0bk_tools':11,
'2bk_body':14, '2bk_faces':17,'2bk_places':20,'2bk_tools':23,
'body_others':26,'faces_others':29,'places_others':32,'tools_others':35}


thalamus_mask=nib.load('/Shared/lss_kahwang_hpc/ROIs/Thalamus_Morel_consolidated_mask_v3.nii.gz')
thalamus_mask_data = thalamus_mask.get_fdata()
thalamus_mask_data = thalamus_mask_data>0
thalamus_masker = image.new_img_like(thalamus_mask, thalamus_mask_data)

cortex_mask=nib.load('/Shared/lss_kahwang_hpc/ROIs/Schaefer2018_400Parcels_17Networks_order_FSLMNI152_2mm.nii.gz')
cortex_masker=input_data.NiftiLabelsMasker(labels_img=cortex_mask, standardize=False)

other_rois=nib.load('/Shared/lss_kahwang_hpc/HCP_data/activity_flow/other_rois/Schaefer100+BG_2mm.nii.gz')
other_rois_data=other_rois.get_fdata()

def save_object(obj, filename):
	''' Simple function to write out objects into a pickle file
	usage: save_object(obj, filename)
	'''
	with open(filename, 'wb') as output:
		pickle.dump(obj, output, pickle.HIGHEST_PROTOCOL)
def read_object(filename):
	''' short hand for reading object because I can never remember pickle syntax'''
	o = pickle.load(open(filename, "rb"))
	return o
Exemplo n.º 29
0
def extract_timecourse_from_nii(atlas,
                                nii,
                                mask=None,
                                confounds=None,
                                atlas_type=None,
                                t_r=None,
                                low_pass=None,
                                high_pass=1. / 128,
                                *args,
                                **kwargs):
    """
    Extract time courses from a 4D `nii`, one for each label 
    or map in `atlas`,

    This method extracts a set of time series from a 4D nifti file
    (usually BOLD fMRI), corresponding to the ROIs in `atlas`.
    It also performs some minimal preprocessing using 
    `nilearn.signal.clean`.
    It is especially convenient when using atlases from the
    `nilearn.datasets`-module.

    Parameters
    ----------

    atlas: sklearn.datasets.base.Bunch  
        This Bunch should contain at least a `maps`-attribute
        containing a label (3D) or probabilistic atlas (4D),
        as well as an `label` attribute, with one label for
        every ROI in the atlas.
        The function automatically detects which of the two is
        provided. It extracts a (weighted) time course per ROI.
        In the case of the probabilistic atlas, the voxels are
        weighted by their probability (see also the Mappers in
        nilearn).

    nii: 4D niimg-like object
        This NiftiImage contains the time series that need to
        be extracted using `atlas`

    mask: 3D niimg-like object
        Before time series are extracted, this mask is applied,
        can be useful if you want to exclude non-gray matter.

    confounds: CSV file or array-like, optional
        This parameter is passed to nilearn.signal.clean. Please 
        see the related documentation for details.
        shape: (number of scans, number of confounds)

    atlas_type: str, optional
        Can be 'labels' or 'probabilistic'. A label atlas
        should be 3D and contains one unique number per ROI.
        A Probabilistic atlas contains as many volume as 
        ROIs.
        Usually, `atlas_type` can be detected automatically.

    t_r, float, optional
        Repetition time of `nii`. Can be important for
        temporal filtering.

    low_pass: None or float, optional
        This parameter is passed to signal.clean. Please see the related
        documentation for details

    high_pass: None or float, optional
        This parameter is passed to signal.clean. Please see the related
        documentation for details

    Examples
    --------

    >>> from nilearn import datasets
    >>> data = '/data/ds001/derivatives/fmriprep/sub-01/func/sub-01_task-checkerboard_bold.nii.gz'
    >>> atlas = datasets.fetch_atlas_pauli_2017()
    >>> ts = extract_timecourse_from_nii(atlas,
                                         data,
                                         t_r=1.5)
    >>> ts.head()

    """

    standardize = kwargs.pop('standardize', False)
    detrend = kwargs.pop('detrend', False)

    if atlas_type is None:
        maps = check_niimg(atlas.maps)

        if len(maps.shape) == 3:
            atlas_type = 'labels'
        else:
            atlas_type = 'prob'

    if atlas_type == 'labels':
        masker = input_data.NiftiLabelsMasker(atlas.maps,
                                              mask_img=mask,
                                              standardize=standardize,
                                              detrend=detrend,
                                              t_r=t_r,
                                              low_pass=low_pass,
                                              high_pass=high_pass,
                                              *args,
                                              **kwargs)
    else:
        masker = input_data.NiftiMapsMasker(atlas.maps,
                                            mask_img=mask,
                                            standardize=standardize,
                                            detrend=detrend,
                                            t_r=t_r,
                                            low_pass=low_pass,
                                            high_pass=high_pass,
                                            *args,
                                            **kwargs)

    data = _make_psc(nii)

    results = masker.fit_transform(data, confounds=confounds)

    # For weird atlases that have a label for the background
    if len(atlas.labels) == results.shape[1] + 1:
        atlas.labels = atlas.labels[1:]

    if t_r is None:
        t_r = 1

    index = pd.Index(np.arange(0, t_r * data.shape[-1], t_r), name='time')

    columns = pd.Index(atlas.labels, name='roi')

    return pd.DataFrame(results, index=index, columns=columns)
from joblib import Parallel, delayed

haxby_dataset = datasets.fetch_haxby_simple()
func_filename = haxby_dataset.func
mask_filename = haxby_dataset.mask

atlas_filename, labels = datasets.fetch_harvard_oxford(
    'cort-maxprob-thr25-2mm', symmetric_split=True)

affine = load(mask_filename).get_affine()
shape = load(mask_filename).get_shape()
atlas = image.resample_img(atlas_filename,
                           target_affine=affine,
                           target_shape=shape,
                           interpolation='nearest')
roi_masker = input_data.NiftiLabelsMasker(labels_img=atlas,
                                          mask_img=mask_filename)
roi_masker.fit(mask_filename)  ## just to have it fitted

labels = np.recfromcsv(haxby_dataset.session_target[0], delimiter=" ")
target = labels['labels']
###################################################

y, session = np.loadtxt(haxby_dataset.session_target).astype('int').T
conditions = np.recfromtxt(haxby_dataset.conditions_target)['f0']

# Remove the rest condition, it is not very interesting
non_rest = conditions != b'rest'
conditions = conditions[non_rest]
y = y[non_rest]

# Get the labels of the numerical conditions represented by the vector y