コード例 #1
0
    def parcellate(self):
        """
        API for performing any of a variety of clustering routines available through NiLearn.
        """
        import gc
        import time
        import os
        from nilearn.regions import Parcellations
        from pynets.fmri.estimation import fill_confound_nans

        start = time.time()

        if (self.clust_type == 'ward') and (self.local_corr != 'allcorr'):
            if self._local_conn_mat_path is not None:
                if not os.path.isfile(self._local_conn_mat_path):
                    raise FileNotFoundError('File containing sparse matrix of local connectivity structure not found.')
            else:
                raise FileNotFoundError('File containing sparse matrix of local connectivity structure not found.')

        self._clust_est = Parcellations(method=self.clust_type, standardize=self._standardize, detrend=self._detrending,
                                        n_parcels=int(self.k), mask=self._clust_mask_corr_img,
                                        connectivity=self._local_conn, mask_strategy='background', memory_level=2,
                                        smoothing_fwhm=2, random_state=42)

        if self.conf is not None:
            import pandas as pd
            confounds = pd.read_csv(self.conf, sep='\t')
            if confounds.isnull().values.any():
                conf_corr = fill_confound_nans(confounds, self._dir_path)
                self._clust_est.fit(self._func_img, confounds=conf_corr)
            else:
                self._clust_est.fit(self._func_img, confounds=self.conf)
        else:
            self._clust_est.fit(self._func_img)

        self._clust_est.labels_img_.set_data_dtype(np.uint16)
        nib.save(self._clust_est.labels_img_, self.uatlas)

        print("%s%s%s" % (self.clust_type, self.k, " clusters: %.2fs" % (time.time() - start)))

        del self._clust_est
        self._func_img.uncache()
        self._clust_mask_corr_img.uncache()
        gc.collect()

        return self.uatlas
コード例 #2
0
ファイル: clustools.py プロジェクト: sparkler0323/PyNets
def nil_parcellate(func_file, clust_mask, k, clust_type, ID, dir_path, uatlas_select):
    import time
    import nibabel as nib
    from nilearn.regions import Parcellations
    from nilearn.regions import connected_label_regions
    detrending = True

    start = time.time()
    func_img = nib.load(func_file)
    mask_img = nib.load(clust_mask)
    clust_est = Parcellations(method=clust_type, detrend=detrending, n_parcels=int(k),
                              mask=mask_img)
    clust_est.fit(func_img)
    region_labels = connected_label_regions(clust_est.labels_img_)
    nib.save(region_labels, uatlas_select)
    print("%s%s%s" % (clust_type, k, " clusters: %.2fs" % (time.time() - start)))
    return
def clusterWard(img=None, nParcels=1024, standardize=False, smoothing=2):
    """
    Does brain parcellation using Ward clustering
    img -> nii image variable or path
    nParcels (optional, default 1024) -> number of parcels
    standardize (optional, default True) ->
    smoothing (optional, default 2) -> int - the higher it is, the more smoothing is applied
    Returns a tuple containing:
        1 -> Float array of shape (nScans, nParcels) - contains the parcel signals
        2 -> The ward parcellation object
    """
    ward = Parcellations(method='ward',
                         n_parcels=nParcels,
                         standardize=standardize,
                         smoothing_fwhm=smoothing,
                         memory='nilearn_cache',
                         memory_level=1,
                         verbose=1)
    ward.fit(img)
    img = ward.transform(img)
    return img, ward
コード例 #4
0
def fit_Wards(imgs, params):
    """Interface of Wards."""
    defaults = {
        'method': 'ward',
        'n_parcels': 10,
        'standardize': False,
        'smoothing_fwhm': 2.,
        'memory': 'nilearn_cache',
        'memory_level': 1,
        'verbose': 1,
        'n_jobs': -2
    }
    context = dict(defaults, **params)

    return Parcellations(**context).fit(imgs)
コード例 #5
0
class NiParcellate(object):
    """
    Class for implementing various clustering routines.
    """
    def __init__(self, func_file, clust_mask, k, clust_type, local_corr, conf=None, mask=None):
        """
        Parameters
        ----------
        func_file : str
            File path to a 4D Nifti1Image containing fMRI data.
        clust_mask : str
            File path to a 3D NIFTI file containing a mask, which restricts the
            voxels used in the clustering.
        k : int
            Numbers of clusters that will be generated.
        clust_type : str
            Type of clustering to be performed (e.g. 'ward', 'kmeans', 'complete', 'average').
        local_corr : str
            Type of local connectivity to use as the basis for clustering methods. Options are tcorr or scorr.
            Default is tcorr.
        conf : str
            File path to a confound regressor file for reduce noise in the time-series when extracting from ROI's.
        mask : str
            File path to a 3D NIFTI file containing a mask, which restricts the
            voxels used in the analysis.
        """
        self.func_file = func_file
        self.clust_mask = clust_mask
        self.k = int(k)
        self.clust_type = clust_type
        self.conf = conf
        self.local_corr = local_corr
        self.uatlas = None
        self.atlas = None
        self._detrending = True
        self._standardize = True
        self._func_img = nib.load(self.func_file)
        self.mask = mask
        self._mask_img = None
        self._local_conn_mat_path = None
        self._dir_path = None
        self._clust_est = None
        self._local_conn = None
        self._clust_mask_corr_img = None
        self._func_img_data = None
        self._masked_fmri_vol = None
        self._conn_comps = None
        self.num_conn_comps = None

    def create_clean_mask(self, num_std_dev=1.5):
        """
        Create a subject-refined version of the clustering mask.
        """
        import os
        from pynets.core import utils
        from nilearn.masking import intersect_masks
        from nilearn.image import index_img, math_img, resample_img
        mask_name = os.path.basename(self.clust_mask).split('.nii')[0]
        self.atlas = "%s%s%s%s%s" % (mask_name, '_', self.clust_type, '_k', str(self.k))
        print("%s%s%s%s%s%s%s" % ('\nCreating atlas using ', self.clust_type, ' at cluster level ', str(self.k),
                                  ' for ', str(self.atlas), '...\n'))
        self._dir_path = utils.do_dir_path(self.atlas, self.func_file)
        self.uatlas = "%s%s%s%s%s%s%s%s" % (self._dir_path, '/', mask_name, '_clust-', self.clust_type, '_k',
                                            str(self.k), '.nii.gz')

        # Load clustering mask
        self._func_img.set_data_dtype(np.float32)
        func_vol_img = index_img(self._func_img, 1)
        func_vol_img.set_data_dtype(np.uint16)
        clust_mask_res_img = resample_img(nib.load(self.clust_mask), target_affine=func_vol_img.affine,
                                          target_shape=func_vol_img.shape, interpolation='nearest')
        clust_mask_res_img.set_data_dtype(np.uint16)
        func_data = np.asarray(func_vol_img.dataobj).astype('float32')
        func_int_thr = np.round(np.mean(func_data[func_data > 0]) - np.std(func_data[func_data > 0]) * num_std_dev, 3)
        if self.mask is not None:
            self._mask_img = nib.load(self.mask)
            self._mask_img.set_data_dtype(np.uint16)
            mask_res_img = resample_img(self._mask_img, target_affine=func_vol_img.affine,
                                        target_shape=func_vol_img.shape, interpolation='nearest')
            mask_res_img.set_data_dtype(np.uint16)
            self._clust_mask_corr_img = intersect_masks([math_img('img > ' + str(func_int_thr), img=func_vol_img),
                                                         math_img('img > 0.01', img=clust_mask_res_img),
                                                         math_img('img > 0.01', img=mask_res_img)],
                                                        threshold=1, connected=False)
            self._clust_mask_corr_img.set_data_dtype(np.uint16)
            self._mask_img.uncache()
            mask_res_img.uncache()
        else:
            self._clust_mask_corr_img = intersect_masks([math_img('img > ' + str(func_int_thr), img=func_vol_img),
                                                         math_img('img > 0.01', img=clust_mask_res_img)],
                                                        threshold=1, connected=False)
            self._clust_mask_corr_img.set_data_dtype(np.uint16)
        nib.save(self._clust_mask_corr_img, "%s%s%s%s" % (self._dir_path, '/', mask_name, '.nii.gz'))

        del func_data
        func_vol_img.uncache()
        clust_mask_res_img.uncache()

        return self.atlas

    def create_local_clustering(self, overwrite, r_thresh, min_region_size=80):
        """
        API for performing any of a variety of clustering routines available through NiLearn.
        """
        import os.path as op
        from scipy.sparse import save_npz, load_npz
        from nilearn.regions import connected_regions

        conn_comps = connected_regions(self._clust_mask_corr_img, extract_type='connected_components',
                                       min_region_size=min_region_size)
        self._conn_comps = conn_comps[0]
        self.num_conn_comps = len(conn_comps[1])

        if self.clust_type == 'complete' or self.clust_type == 'average' or self.clust_type == 'single':
            if self.num_conn_comps > 1:
                raise ValueError('Clustering method unstable with spatial constrainsts applied to multiple '
                                 'connected components.')

        if self.clust_type == 'ward' or self.clust_type == 'ncut':
            if self.k < self.num_conn_comps:
                raise ValueError('k must minimally be greater than the total number of connected components in '
                                 'the mask in the case of agglomerative clustering.')
            if self.local_corr == 'tcorr' or self.local_corr == 'scorr':
                self._local_conn_mat_path = "%s%s%s%s" % (self.uatlas.split('.nii')[0], '_', self.local_corr,
                                                          '_conn.npz')

                if (not op.isfile(self._local_conn_mat_path)) or (overwrite is True):
                    from pynets.fmri.clustools import make_local_connectivity_tcorr, make_local_connectivity_scorr
                    if self.local_corr == 'tcorr':
                        self._local_conn = make_local_connectivity_tcorr(self._func_img, self._clust_mask_corr_img,
                                                                         thresh=r_thresh)
                    elif self.local_corr == 'scorr':
                        self._local_conn = make_local_connectivity_scorr(self._func_img, self._clust_mask_corr_img,
                                                                         thresh=r_thresh)
                    else:
                        raise ValueError('Local connectivity type not available')

                    print("%s%s" % ('Saving spatially constrained connectivity structure to: ',
                                    self._local_conn_mat_path))
                    save_npz(self._local_conn_mat_path, self._local_conn)
                elif op.isfile(self._local_conn_mat_path):
                    self._local_conn = load_npz(self._local_conn_mat_path)
            elif self.local_corr == 'allcorr':
                self._local_conn = 'auto'
            else:
                raise ValueError('Local connectivity method not recognized. Only tcorr, scorr, and auto are currently '
                                 'supported')
        else:
            self._local_conn = 'auto'
        return

    def parcellate(self):
        """
        API for performing any of a variety of clustering routines available through NiLearn.
        """
        import gc
        import time
        import os
        from nilearn.regions import Parcellations
        from pynets.fmri.estimation import fill_confound_nans

        start = time.time()

        if (self.clust_type == 'ward') and (self.local_corr != 'allcorr'):
            if self._local_conn_mat_path is not None:
                if not os.path.isfile(self._local_conn_mat_path):
                    raise FileNotFoundError('File containing sparse matrix of local connectivity structure not found.')
            else:
                raise FileNotFoundError('File containing sparse matrix of local connectivity structure not found.')

        if self.clust_type == 'complete' or self.clust_type == 'average' or self.clust_type == 'single' or \
            self.clust_type == 'ward' or (self.clust_type == 'rena' and self.num_conn_comps == 1) or \
                (self.clust_type == 'kmeans' and self.num_conn_comps == 1):
            self._clust_est = Parcellations(method=self.clust_type, standardize=self._standardize,
                                            detrend=self._detrending,
                                            n_parcels=self.k, mask=self._clust_mask_corr_img,
                                            connectivity=self._local_conn, mask_strategy='background', memory_level=2,
                                            random_state=42)

            if self.conf is not None:
                import pandas as pd
                confounds = pd.read_csv(self.conf, sep='\t')
                if confounds.isnull().values.any():
                    conf_corr = fill_confound_nans(confounds, self._dir_path)
                    self._clust_est.fit(self._func_img, confounds=conf_corr)
                else:
                    self._clust_est.fit(self._func_img, confounds=self.conf)
            else:
                self._clust_est.fit(self._func_img)

            self._clust_est.labels_img_.set_data_dtype(np.uint16)
            nib.save(self._clust_est.labels_img_, self.uatlas)
        elif self.clust_type == 'ncut':
            out_img = parcellate_ncut(self._local_conn, self.k, self._clust_mask_corr_img)
            out_img.set_data_dtype(np.uint16)
            nib.save(out_img, self.uatlas)
        elif self.clust_type == 'rena' or self.clust_type == 'kmeans' and self.num_conn_comps > 1:
            from pynets.core import nodemaker
            from nilearn.regions import connected_regions, Parcellations
            from nilearn.image import iter_img, new_img_like
            from pynets.core.utils import flatten, proportional

            mask_img_list = []
            mask_voxels_dict = dict()
            for i, mask_img in enumerate(list(iter_img(self._conn_comps))):
                mask_voxels_dict[i] = np.int(np.sum(np.asarray(mask_img.dataobj)))
                mask_img_list.append(mask_img)

            # Allocate k across connected components using Hagenbach-Bischoff Quota based on number of voxels
            k_list = proportional(self.k, list(mask_voxels_dict.values()))

            conn_comp_atlases = []
            print("%s%s%s" % ('Building ', len(mask_img_list), ' separate atlases with voxel-proportional nclusters '
                                                               'for each connected component...'))
            for i, mask_img in enumerate(mask_img_list):
                if k_list[i] == 0:
                    # print('0 voxels in component. Discarding...')
                    continue
                self._clust_est = Parcellations(method=self.clust_type, standardize=self._standardize,
                                                detrend=self._detrending,
                                                n_parcels=k_list[i], mask=mask_img,
                                                mask_strategy='background',
                                                memory_level=2,
                                                random_state=42)
                if self.conf is not None:
                    import pandas as pd
                    confounds = pd.read_csv(self.conf, sep='\t')
                    if confounds.isnull().values.any():
                        conf_corr = fill_confound_nans(confounds, self._dir_path)
                        self._clust_est.fit(self._func_img, confounds=conf_corr)
                    else:
                        self._clust_est.fit(self._func_img, confounds=self.conf)
                else:
                    self._clust_est.fit(self._func_img)
                conn_comp_atlases.append(self._clust_est.labels_img_)

            # Then combine the multiple atlases, corresponding to each connected component, into a single atlas
            atlas_of_atlases = []
            for atlas in conn_comp_atlases:
                bna_data = np.around(np.asarray(atlas.dataobj)).astype('uint16')

                # Get an array of unique parcels
                bna_data_for_coords_uniq = np.unique(bna_data)

                # Number of parcels:
                par_max = len(bna_data_for_coords_uniq) - 1
                img_stack = []
                for idx in range(1, par_max + 1):
                    roi_img = bna_data == bna_data_for_coords_uniq[idx].astype('uint16')
                    img_stack.append(roi_img.astype('uint16'))
                img_stack = np.array(img_stack)

                img_list = []
                for idy in range(par_max):
                    img_list.append(new_img_like(atlas, img_stack[idy]))
                atlas_of_atlases.append(img_list)
                del img_list, img_stack, bna_data

            atlas_of_atlases = list(flatten(atlas_of_atlases))

            [super_atlas_ward, _] = nodemaker.create_parcel_atlas(atlas_of_atlases)
            super_atlas_ward.set_data_dtype(np.uint16)

            nib.save(super_atlas_ward, self.uatlas)
            del atlas_of_atlases, super_atlas_ward, conn_comp_atlases, mask_img_list, mask_voxels_dict

        print("%s%s%s" % (self.clust_type, self.k, " clusters: %.2fs" % (time.time() - start)))

        del self._clust_est
        self._func_img.uncache()
        self._clust_mask_corr_img.uncache()
        gc.collect()

        return self.uatlas
コード例 #6
0
def parcellate(func_boot_img, local_corr, clust_type, _local_conn_mat_path,
               num_conn_comps, _clust_mask_corr_img, _standardize,
               _detrending, k, _local_conn, conf, _dir_path, _conn_comps):
    """
    API for performing any of a variety of clustering routines available
    through NiLearn.
    """
    import time
    import os
    import numpy as np
    from nilearn.regions import Parcellations
    from pynets.fmri.estimation import fill_confound_nans
    # from joblib import Memory
    import tempfile

    cache_dir = tempfile.mkdtemp()
    # memory = Memory(cache_dir, verbose=0)

    start = time.time()

    if (clust_type == "ward") and (local_corr != "allcorr"):
        if _local_conn_mat_path is not None:
            if not os.path.isfile(_local_conn_mat_path):
                raise FileNotFoundError(
                    "File containing sparse matrix of local connectivity"
                    " structure not found."
                )
        else:
            raise FileNotFoundError(
                "File containing sparse matrix of local connectivity"
                " structure not found."
            )

    if (
        clust_type == "complete"
        or clust_type == "average"
        or clust_type == "single"
        or clust_type == "ward"
        or (clust_type == "rena" and num_conn_comps == 1)
        or (clust_type == "kmeans" and num_conn_comps == 1)
    ):
        _clust_est = Parcellations(
            method=clust_type,
            standardize=_standardize,
            detrend=_detrending,
            n_parcels=k,
            mask=_clust_mask_corr_img,
            connectivity=_local_conn,
            mask_strategy="background",
            random_state=42
        )

        if conf is not None:
            import pandas as pd
            import random
            from nipype.utils.filemanip import fname_presuffix, copyfile

            out_name_conf = fname_presuffix(
                conf, suffix=f"_tmp{random.randint(1, 1000)}",
                newpath=cache_dir
            )
            copyfile(
                conf,
                out_name_conf,
                copy=True,
                use_hardlink=False)

            confounds = pd.read_csv(out_name_conf, sep="\t")
            if confounds.isnull().values.any():
                conf_corr = fill_confound_nans(confounds, _dir_path)
                try:
                    _clust_est.fit(func_boot_img, confounds=conf_corr)
                except UserWarning:
                    return None
                os.remove(conf_corr)
            else:
                try:
                    _clust_est.fit(func_boot_img, confounds=out_name_conf)
                except UserWarning:
                    return None
            os.remove(out_name_conf)
        else:
            try:
                _clust_est.fit(func_boot_img)
            except UserWarning:
                return None
        _clust_est.labels_img_.set_data_dtype(np.uint16)
        print(
            f"{clust_type}{k}"
            f"{(' clusters: %.2fs' % (time.time() - start))}"
        )

        return _clust_est.labels_img_

    elif clust_type == "ncut":
        out_img = parcellate_ncut(
            _local_conn, k, _clust_mask_corr_img
        )
        out_img.set_data_dtype(np.uint16)
        print(
            f"{clust_type}{k}"
            f"{(' clusters: %.2fs' % (time.time() - start))}"
        )
        return out_img

    elif (
        clust_type == "rena"
        or clust_type == "kmeans"
        and num_conn_comps > 1
    ):
        from pynets.core import nodemaker
        from nilearn.regions import connected_regions, Parcellations
        from nilearn.image import iter_img, new_img_like
        from pynets.core.utils import flatten, proportional

        mask_img_list = []
        mask_voxels_dict = dict()
        for i, mask_img in enumerate(iter_img(_conn_comps)):
            mask_voxels_dict[i] = np.int(
                np.sum(np.asarray(mask_img.dataobj)))
            mask_img_list.append(mask_img)

        # Allocate k across connected components using Hagenbach-Bischoff
        # Quota based on number of voxels
        k_list = proportional(k, list(mask_voxels_dict.values()))

        conn_comp_atlases = []
        print(
            f"Building {len(mask_img_list)} separate atlases with "
            f"voxel-proportional k clusters for each "
            f"connected component...")
        for i, mask_img in enumerate(iter_img(mask_img_list)):
            if k_list[i] < 5:
                print(f"Only {k_list[i]} voxels in component. Discarding...")
                continue
            _clust_est = Parcellations(
                method=clust_type,
                standardize=_standardize,
                detrend=_detrending,
                n_parcels=k_list[i],
                mask=mask_img,
                mask_strategy="background",
                random_state=i
            )
            if conf is not None:
                import pandas as pd
                import random
                from nipype.utils.filemanip import fname_presuffix, copyfile

                out_name_conf = fname_presuffix(
                    conf, suffix=f"_tmp{random.randint(1, 1000)}",
                    newpath=cache_dir
                )
                copyfile(
                    conf,
                    out_name_conf,
                    copy=True,
                    use_hardlink=False)

                confounds = pd.read_csv(out_name_conf, sep="\t")
                if confounds.isnull().values.any():
                    conf_corr = fill_confound_nans(
                        confounds, _dir_path)
                    try:
                        _clust_est.fit(func_boot_img, confounds=conf_corr)
                    except UserWarning:
                        continue
                else:
                    try:
                        _clust_est.fit(func_boot_img, confounds=conf)
                    except UserWarning:
                        continue
            else:
                try:
                    _clust_est.fit(func_boot_img)
                except UserWarning:
                    continue
            conn_comp_atlases.append(_clust_est.labels_img_)

        # Then combine the multiple atlases, corresponding to each
        # connected component, into a single atlas
        atlas_of_atlases = []
        for atlas in iter_img(conn_comp_atlases):
            bna_data = np.around(
                np.asarray(
                    atlas.dataobj)).astype("uint16")

            # Get an array of unique parcels
            bna_data_for_coords_uniq = np.unique(bna_data)

            # Number of parcels:
            par_max = len(bna_data_for_coords_uniq) - 1
            img_stack = []
            for idx in range(1, par_max + 1):
                roi_img = bna_data == bna_data_for_coords_uniq[idx].astype(
                    "uint16")
                img_stack.append(roi_img.astype("uint16"))
            img_stack = np.array(img_stack)

            img_list = []
            for idy in range(par_max):
                img_list.append(new_img_like(atlas, img_stack[idy]))
            atlas_of_atlases.append(img_list)
            del img_list, img_stack, bna_data

        atlas_of_atlases = list(flatten(atlas_of_atlases))

        [super_atlas_ward, _] = nodemaker.create_parcel_atlas(
            atlas_of_atlases)
        super_atlas_ward.set_data_dtype(np.uint16)
        del atlas_of_atlases, conn_comp_atlases, mask_img_list, \
            mask_voxels_dict

        print(
            f"{clust_type}{k}"
            f"{(' clusters: %.2fs' % (time.time() - start))}"
        )

        # memory.clear(warn=False)

        return super_atlas_ward
コード例 #7
0
'''
###################################################################################
###################################################################################
###################################################################################
####################  PARCELLATION METHOD ##########################################

from nilearn.regions import Parcellations
# Agglomerative Clustering: ward

# We build parameters of our own for this object. Parameters related to
# masking, caching and defining number of clusters and specific parcellations

ward = Parcellations(
    method='ward',
    n_parcels=1000,
    standardize=False,
    smoothing_fwhm=None,
    memory=r'D:\ROI Schizo\Nilearn_parcell\Nilearn_parcell_cache',
    memory_level=1)
# Call fit on functional dataset: single subject (less samples).

for bold in func:
    ward.fit(bold)
    print('bold is fitted')

ward_labels_img = ward.labels_img_
ward_mask = ward.masker_
# Now, ward_labels_img are Nifti1Image object, it can be saved to file
# with the following code:
ward_labels_img.to_filename(
    r'D:\ROI Schizo\Nilearn_parcell\schizo_90sub_1000_ward_parcel.nii')
コード例 #8
0
for i, subject in enumerate(subjects):
    individual_components_img = masker.inverse_transform(
        individual_components[i])
    #plot_prob_atlas(individual_components_img,
    #                title='DictLearning components, subject %s' % subject)

###############################################################################
# Generate brain parcellations
from nilearn.regions import Parcellations
n_parcellations = 20
n_parcels = 256

ward = Parcellations(method='ward',
                     n_parcels=n_parcels,
                     standardize=False,
                     smoothing_fwhm=4.,
                     memory='nilearn_cache',
                     memory_level=1,
                     verbose=1,
                     mask=mask_gm)

make_parcellations = memory.cache(make_parcellations)

parcellations = make_parcellations(ward, rs_fmri, n_parcellations, n_jobs)

###############################################################################
# Cross-validated predictions
from sklearn.linear_model import RidgeCV
from sklearn.metrics import r2_score
from sklearn.model_selection import KFold
clf = RidgeCV()
コード例 #9
0
class NilParcellate(object):
    """
    Class for implementing various clustering routines.
    """
    def __init__(self, func_file, clust_mask, k, clust_type, local_corr, conf=None, mask=None):
        """
        Parameters
        ----------
        func_file : str
            File path to a 4D Nifti1Image containing fMRI data.
        clust_mask : str
            File path to a 3D NIFTI file containing a mask, which restricts the
            voxels used in the clustering.
        k : int
            Numbers of clusters that will be generated.
        clust_type : str
            Type of clustering to be performed (e.g. 'ward', 'kmeans', 'complete', 'average').
        local_corr : str
            Type of local connectivity to use as the basis for clustering methods. Options are tcorr or scorr.
            Default is tcorr.
        conf : str
            File path to a confound regressor file for reduce noise in the time-series when extracting from ROI's.
        mask : str
            File path to a 3D NIFTI file containing a mask, which restricts the
            voxels used in the analysis.
        """
        self.func_file = func_file
        self.clust_mask = clust_mask
        self.k = k
        self.clust_type = clust_type
        self.conf = conf
        self.local_corr = local_corr
        self.uatlas = None
        self.atlas = None
        self._detrending = True
        self._standardize = True
        self._func_img = nib.load(self.func_file)
        self.mask = mask
        self._mask_img = None
        self._local_conn_mat_path = None
        self._dir_path = None
        self._clust_est = None
        self._local_conn = None
        self._clust_mask_corr_img = None
        self._func_img_data = None
        self._masked_fmri_vol = None

    def create_clean_mask(self, num_std_dev=1.5):
        """
        Create a subject-refined version of the clustering mask.
        """
        import os
        from pynets.core import utils
        from nilearn.masking import intersect_masks
        from nilearn.image import index_img, math_img, resample_img
        mask_name = os.path.basename(self.clust_mask).split('.nii')[0]
        self.atlas = "%s%s%s%s%s" % (mask_name, '_', self.clust_type, '_k', str(self.k))
        print("%s%s%s%s%s%s%s" % ('\nCreating atlas using ', self.clust_type, ' at cluster level ', str(self.k),
                                  ' for ', str(self.atlas), '...\n'))
        self._dir_path = utils.do_dir_path(self.atlas, self.func_file)
        self.uatlas = "%s%s%s%s%s%s%s%s" % (self._dir_path, '/', mask_name, '_clust-', self.clust_type, '_k',
                                            str(self.k), '.nii.gz')

        # Load clustering mask
        self._func_img.set_data_dtype(np.float32)
        func_vol_img = index_img(self._func_img, 1)
        func_vol_img.set_data_dtype(np.uint16)
        clust_mask_res_img = resample_img(nib.load(self.clust_mask), target_affine=func_vol_img.affine,
                                          target_shape=func_vol_img.shape, interpolation='nearest')
        clust_mask_res_img.set_data_dtype(np.uint16)
        func_data = np.asarray(func_vol_img.dataobj).astype('float32')
        func_int_thr = np.round(np.mean(func_data[func_data > 0]) - np.std(func_data[func_data > 0]) * num_std_dev, 3)
        if self.mask is not None:
            self._mask_img = nib.load(self.mask)
            self._mask_img.set_data_dtype(np.uint16)
            mask_res_img = resample_img(self._mask_img, target_affine=func_vol_img.affine,
                                        target_shape=func_vol_img.shape, interpolation='nearest')
            mask_res_img.set_data_dtype(np.uint16)
            self._clust_mask_corr_img = intersect_masks([math_img('img > ' + str(func_int_thr), img=func_vol_img),
                                                         math_img('img > 0.01', img=clust_mask_res_img),
                                                         math_img('img > 0.01', img=mask_res_img)],
                                                        threshold=1, connected=False)
            self._clust_mask_corr_img.set_data_dtype(np.uint16)
            self._mask_img.uncache()
            mask_res_img.uncache()
        else:
            self._clust_mask_corr_img = intersect_masks([math_img('img > ' + str(func_int_thr), img=func_vol_img),
                                                         math_img('img > 0.01', img=clust_mask_res_img)],
                                                        threshold=1, connected=False)
            self._clust_mask_corr_img.set_data_dtype(np.uint16)
        nib.save(self._clust_mask_corr_img, "%s%s%s%s" % (self._dir_path, '/', mask_name, '.nii.gz'))

        del func_data
        func_vol_img.uncache()
        clust_mask_res_img.uncache()

        return self.atlas

    def create_local_clustering(self, overwrite, r_thresh):
        """
        API for performing any of a variety of clustering routines available through NiLearn.
        """
        import os.path as op
        from scipy.sparse import save_npz, load_npz
        from nilearn.regions import connected_regions
        from pynets.fmri.clustools import make_local_connectivity_tcorr, make_local_connectivity_scorr

        conn_comps = len(connected_regions(self._clust_mask_corr_img, extract_type='connected_components',
                                           min_region_size=1)[1])
        if self.clust_type == 'kmeans':
            if self.k > conn_comps:
                if conn_comps != 1:
                    raise ValueError('k must be less than or equal to the total number of connected components in '
                                     'the mask in the case of kmeans clustering.')

        if self.clust_type == 'complete' or self.clust_type == 'average' or self.clust_type == 'single':
            if conn_comps > 1:
                raise ValueError('Complete, Average, and Single linkage agglomerative clustering are unstable in the '
                                 'case of multiple connected components.')

        if self.clust_type == 'ward':
            if self.k < conn_comps:
                raise ValueError('k must minimally be greater than the total number of connected components in '
                                 'the mask in the case of agglomerative clustering.')
            if self.local_corr == 'tcorr' or self.local_corr == 'scorr':
                self._local_conn_mat_path = "%s%s%s%s" % (self.uatlas.split('.nii')[0], '_', self.local_corr,
                                                          '_conn.npz')

                if (not op.isfile(self._local_conn_mat_path)) or (overwrite is True):
                    if self.local_corr == 'tcorr':
                        self._local_conn = make_local_connectivity_tcorr(self._func_img, self._clust_mask_corr_img,
                                                                         thresh=r_thresh)
                    elif self.local_corr == 'scorr':
                        self._local_conn = make_local_connectivity_scorr(self._func_img, self._clust_mask_corr_img,
                                                                         thresh=r_thresh)
                    else:
                        raise ValueError('Local connectivity type not available')

                    print("%s%s" % ('Saving spatially constrained connectivity structure to: ',
                                    self._local_conn_mat_path))
                    save_npz(self._local_conn_mat_path, self._local_conn)
                elif op.isfile(self._local_conn_mat_path):
                    self._local_conn = load_npz(self._local_conn_mat_path)
            elif self.local_corr == 'allcorr':
                self._local_conn = 'auto'
            else:
                raise ValueError('Local connectivity method not recognized. Only tcorr, scorr, and auto are currently '
                                 'supported')
        else:
            self._local_conn = 'auto'
        return

    def parcellate(self):
        """
        API for performing any of a variety of clustering routines available through NiLearn.
        """
        import gc
        import time
        import os
        from nilearn.regions import Parcellations
        from pynets.fmri.estimation import fill_confound_nans

        start = time.time()

        if (self.clust_type == 'ward') and (self.local_corr != 'allcorr'):
            if self._local_conn_mat_path is not None:
                if not os.path.isfile(self._local_conn_mat_path):
                    raise FileNotFoundError('File containing sparse matrix of local connectivity structure not found.')
            else:
                raise FileNotFoundError('File containing sparse matrix of local connectivity structure not found.')

        self._clust_est = Parcellations(method=self.clust_type, standardize=self._standardize, detrend=self._detrending,
                                        n_parcels=int(self.k), mask=self._clust_mask_corr_img,
                                        connectivity=self._local_conn, mask_strategy='background', memory_level=2,
                                        smoothing_fwhm=2, random_state=42)

        if self.conf is not None:
            import pandas as pd
            confounds = pd.read_csv(self.conf, sep='\t')
            if confounds.isnull().values.any():
                conf_corr = fill_confound_nans(confounds, self._dir_path)
                self._clust_est.fit(self._func_img, confounds=conf_corr)
            else:
                self._clust_est.fit(self._func_img, confounds=self.conf)
        else:
            self._clust_est.fit(self._func_img)

        self._clust_est.labels_img_.set_data_dtype(np.uint16)
        nib.save(self._clust_est.labels_img_, self.uatlas)

        print("%s%s%s" % (self.clust_type, self.k, " clusters: %.2fs" % (time.time() - start)))

        del self._clust_est
        self._func_img.uncache()
        self._clust_mask_corr_img.uncache()
        gc.collect()

        return self.uatlas
コード例 #10
0
schizo_series = list(chain.from_iterable(ts_schizo))
schizo_series = np.array(schizo_series)

healthy_series = list(chain.from_iterable(ts_healthy))
healthy_series = np.array(healthy_series)

###########################################################################################

import nibabel as nb
fname = atlas_harvard_oxford.maps
img = nb.load(fname)
nb.save(img, fname.replace('.img', '.nii'))

mask_parcel = Parcellations('rena',
                            n_parcels=100,
                            random_state=0,
                            mask=atlas_harvard_oxford.maps)

mask_parcel.fit(func[0][0])

kmeans_labels_img = mask_parcel.labels_img_

mean_func_image = 'D:\ROI Schizo\ROIs\Schizo-Healthy\single_subj_T1.nii'
plotting.plot_roi(kmeans_labels_img,
                  mean_func_image,
                  title="KMeans parcellation",
                  display_mode='xz')

kmeans_labels_img.to_filename('roi1_parcellation.nii')

# Import dictionary learning algorithm from decomposition module and call the
コード例 #11
0
from nilearn.regions import Parcellations

# Computing ward for the first time, will be long... This can be seen by
# measuring using time
import time
start = time.time()

# Agglomerative Clustering: ward

# We build parameters of our own for this object. Parameters related to
# masking, caching and defining number of clusters and specific parcellations
# method.
ward = Parcellations(method='ward',
                     n_parcels=1000,
                     standardize=False,
                     smoothing_fwhm=2.,
                     memory='nilearn_cache',
                     memory_level=1,
                     verbose=1)
# Call fit on functional dataset: single subject (less samples).
ward.fit(dataset.func)
print("Ward agglomeration 1000 clusters: %.2fs" % (time.time() - start))

# We compute now ward clustering with 2000 clusters and compare
# time with 1000 clusters. To see the benefits of caching for second time.

# We initialize class again with n_parcels=2000 this time.
start = time.time()
ward = Parcellations(method='ward',
                     n_parcels=2000,
                     standardize=False,
コード例 #12
0
               (df.acquisition == 'ffx').values
        paths.append(df[mask].path.values[-1])
        subjects.append(subject)
        contrasts.append(contrast)
        tasks.append(df[mask].task.values[-1])

tasks = np.array(tasks)[:len(np.unique(contrasts))]
subjects = np.array(subjects)
paths = np.array(paths)
n_contrasts = len(df.contrast.unique())
n_parcels = 100

ward = Parcellations(method='ward',
                     n_parcels=n_parcels,
                     mask=mask_gm,
                     standardize=False,
                     memory_level=1,
                     memory=cache,
                     verbose=1)
# Call fit on functional dataset: single subject (less samples).
ward.fit(paths)


def _scorer(clf, X, Y):
    """ Custom scorer"""
    if Y.ndim > 1:
        return 1 - np.sum((Y - clf.predict(X))**2, 1) / np.sum((Y)**2, 1)
    else:
        return 1 - (Y - clf.predict(X))**2 / Y**2

コード例 #13
0
## Parcellation

# Computing ward for the first time, will be long... This can be seen by
# measuring using time
start = time.time()

# Agglomerative Clustering: ward

# We build parameters of our own for this object. Parameters related to
# masking, caching and defining number of clusters and specific parcellations
# method.
ward = Parcellations(method='ward',
                     n_parcels=n_clusters,
                     standardize=False,
                     smoothing_fwhm=2.,
                     memory='nilearn_cache',
                     memory_level=1,
                     verbose=1)
# Call fit on functional dataset: single subject (less samples).
ward.fit(dataset)
print("Ward agglomeration 1000 clusters: %.2fs" % (time.time() - start))

#%%

ward_labels_img = ward.labels_img_

# # Now, ward_labels_img are Nifti1Image object, it can be saved to file
# # with the following code:
ward_labels_img.to_filename(
    os.path.join(outdir, f'ward_parcellation_k{n_clusters}.nii.gz'))
コード例 #14
0
    def parcellate(self, func_boot_img):
        """
        API for performing any of a variety of clustering routines available
        through NiLearn.
        """
        import time
        import os
        from nilearn.regions import Parcellations
        from pynets.fmri.estimation import fill_confound_nans

        start = time.time()

        if (self.clust_type == "ward") and (self.local_corr != "allcorr"):
            if self._local_conn_mat_path is not None:
                if not os.path.isfile(self._local_conn_mat_path):
                    raise FileNotFoundError(
                        "File containing sparse matrix of local connectivity"
                        " structure not found.")
            else:
                raise FileNotFoundError(
                    "File containing sparse matrix of local connectivity"
                    " structure not found.")

        if (self.clust_type == "complete" or self.clust_type == "average"
                or self.clust_type == "single" or self.clust_type == "ward"
                or (self.clust_type == "rena" and self.num_conn_comps == 1)
                or (self.clust_type == "kmeans" and self.num_conn_comps == 1)):
            _clust_est = Parcellations(
                method=self.clust_type,
                standardize=self._standardize,
                detrend=self._detrending,
                n_parcels=self.k,
                mask=self._clust_mask_corr_img,
                connectivity=self._local_conn,
                mask_strategy="background",
                memory_level=2,
                random_state=42,
            )

            if self.conf is not None:
                import pandas as pd

                confounds = pd.read_csv(self.conf, sep="\t")
                if confounds.isnull().values.any():
                    conf_corr = fill_confound_nans(confounds, self._dir_path)
                    _clust_est.fit(func_boot_img, confounds=conf_corr)
                else:
                    _clust_est.fit(func_boot_img, confounds=self.conf)
            else:
                _clust_est.fit(func_boot_img)

            _clust_est.labels_img_.set_data_dtype(np.uint16)
            print(f"{self.clust_type}{self.k}"
                  f"{(' clusters: %.2fs' % (time.time() - start))}")
            return _clust_est.labels_img_

        elif self.clust_type == "ncut":
            out_img = parcellate_ncut(self._local_conn, self.k,
                                      self._clust_mask_corr_img)
            out_img.set_data_dtype(np.uint16)
            print(f"{self.clust_type}{self.k}"
                  f"{(' clusters: %.2fs' % (time.time() - start))}")
            return out_img

        elif (self.clust_type == "rena"
              or self.clust_type == "kmeans" and self.num_conn_comps > 1):
            from pynets.core import nodemaker
            from nilearn.regions import connected_regions, Parcellations
            from nilearn.image import iter_img, new_img_like
            from pynets.core.utils import flatten, proportional

            mask_img_list = []
            mask_voxels_dict = dict()
            for i, mask_img in enumerate(list(iter_img(self._conn_comps))):
                mask_voxels_dict[i] = np.int(
                    np.sum(np.asarray(mask_img.dataobj)))
                mask_img_list.append(mask_img)

            # Allocate k across connected components using Hagenbach-Bischoff
            # Quota based on number of voxels
            k_list = proportional(self.k, list(mask_voxels_dict.values()))

            conn_comp_atlases = []
            print(f"Building {len(mask_img_list)} separate atlases with "
                  f"voxel-proportional k clusters for each "
                  f"connected component...")
            for i, mask_img in enumerate(mask_img_list):
                if k_list[i] == 0:
                    # print('0 voxels in component. Discarding...')
                    continue
                _clust_est = Parcellations(
                    method=self.clust_type,
                    standardize=self._standardize,
                    detrend=self._detrending,
                    n_parcels=k_list[i],
                    mask=mask_img,
                    mask_strategy="background",
                    memory_level=2,
                    random_state=42,
                )
                if self.conf is not None:
                    import pandas as pd

                    confounds = pd.read_csv(self.conf, sep="\t")
                    if confounds.isnull().values.any():
                        conf_corr = fill_confound_nans(confounds,
                                                       self._dir_path)
                        _clust_est.fit(func_boot_img, confounds=conf_corr)
                    else:
                        _clust_est.fit(func_boot_img, confounds=self.conf)
                else:
                    _clust_est.fit(func_boot_img)
                conn_comp_atlases.append(_clust_est.labels_img_)

            # Then combine the multiple atlases, corresponding to each
            # connected component, into a single atlas
            atlas_of_atlases = []
            for atlas in conn_comp_atlases:
                bna_data = np.around(np.asarray(
                    atlas.dataobj)).astype("uint16")

                # Get an array of unique parcels
                bna_data_for_coords_uniq = np.unique(bna_data)

                # Number of parcels:
                par_max = len(bna_data_for_coords_uniq) - 1
                img_stack = []
                for idx in range(1, par_max + 1):
                    roi_img = bna_data == bna_data_for_coords_uniq[idx].astype(
                        "uint16")
                    img_stack.append(roi_img.astype("uint16"))
                img_stack = np.array(img_stack)

                img_list = []
                for idy in range(par_max):
                    img_list.append(new_img_like(atlas, img_stack[idy]))
                atlas_of_atlases.append(img_list)
                del img_list, img_stack, bna_data

            atlas_of_atlases = list(flatten(atlas_of_atlases))

            [super_atlas_ward,
             _] = nodemaker.create_parcel_atlas(atlas_of_atlases)
            super_atlas_ward.set_data_dtype(np.uint16)
            del atlas_of_atlases, conn_comp_atlases, mask_img_list, \
                mask_voxels_dict

            print(f"{self.clust_type}{self.k}"
                  f"{(' clusters: %.2fs' % (time.time() - start))}")
            return super_atlas_ward
コード例 #15
0
    def parcellate(self):
        """
        API for performing any of a variety of clustering routines available through NiLearn.
        """
        import gc
        import time
        import os
        from nilearn.regions import Parcellations
        from pynets.fmri.estimation import fill_confound_nans

        start = time.time()

        if (self.clust_type == 'ward') and (self.local_corr != 'allcorr'):
            if self._local_conn_mat_path is not None:
                if not os.path.isfile(self._local_conn_mat_path):
                    raise FileNotFoundError('File containing sparse matrix of local connectivity structure not found.')
            else:
                raise FileNotFoundError('File containing sparse matrix of local connectivity structure not found.')

        if self.clust_type == 'complete' or self.clust_type == 'average' or self.clust_type == 'single' or \
            self.clust_type == 'ward' or (self.clust_type == 'rena' and self.num_conn_comps == 1) or \
                (self.clust_type == 'kmeans' and self.num_conn_comps == 1):
            self._clust_est = Parcellations(method=self.clust_type, standardize=self._standardize,
                                            detrend=self._detrending,
                                            n_parcels=self.k, mask=self._clust_mask_corr_img,
                                            connectivity=self._local_conn, mask_strategy='background', memory_level=2,
                                            random_state=42)

            if self.conf is not None:
                import pandas as pd
                confounds = pd.read_csv(self.conf, sep='\t')
                if confounds.isnull().values.any():
                    conf_corr = fill_confound_nans(confounds, self._dir_path)
                    self._clust_est.fit(self._func_img, confounds=conf_corr)
                else:
                    self._clust_est.fit(self._func_img, confounds=self.conf)
            else:
                self._clust_est.fit(self._func_img)

            self._clust_est.labels_img_.set_data_dtype(np.uint16)
            nib.save(self._clust_est.labels_img_, self.uatlas)
        elif self.clust_type == 'ncut':
            out_img = parcellate_ncut(self._local_conn, self.k, self._clust_mask_corr_img)
            out_img.set_data_dtype(np.uint16)
            nib.save(out_img, self.uatlas)
        elif self.clust_type == 'rena' or self.clust_type == 'kmeans' and self.num_conn_comps > 1:
            from pynets.core import nodemaker
            from nilearn.regions import connected_regions, Parcellations
            from nilearn.image import iter_img, new_img_like
            from pynets.core.utils import flatten, proportional

            mask_img_list = []
            mask_voxels_dict = dict()
            for i, mask_img in enumerate(list(iter_img(self._conn_comps))):
                mask_voxels_dict[i] = np.int(np.sum(np.asarray(mask_img.dataobj)))
                mask_img_list.append(mask_img)

            # Allocate k across connected components using Hagenbach-Bischoff Quota based on number of voxels
            k_list = proportional(self.k, list(mask_voxels_dict.values()))

            conn_comp_atlases = []
            print("%s%s%s" % ('Building ', len(mask_img_list), ' separate atlases with voxel-proportional nclusters '
                                                               'for each connected component...'))
            for i, mask_img in enumerate(mask_img_list):
                if k_list[i] == 0:
                    # print('0 voxels in component. Discarding...')
                    continue
                self._clust_est = Parcellations(method=self.clust_type, standardize=self._standardize,
                                                detrend=self._detrending,
                                                n_parcels=k_list[i], mask=mask_img,
                                                mask_strategy='background',
                                                memory_level=2,
                                                random_state=42)
                if self.conf is not None:
                    import pandas as pd
                    confounds = pd.read_csv(self.conf, sep='\t')
                    if confounds.isnull().values.any():
                        conf_corr = fill_confound_nans(confounds, self._dir_path)
                        self._clust_est.fit(self._func_img, confounds=conf_corr)
                    else:
                        self._clust_est.fit(self._func_img, confounds=self.conf)
                else:
                    self._clust_est.fit(self._func_img)
                conn_comp_atlases.append(self._clust_est.labels_img_)

            # Then combine the multiple atlases, corresponding to each connected component, into a single atlas
            atlas_of_atlases = []
            for atlas in conn_comp_atlases:
                bna_data = np.around(np.asarray(atlas.dataobj)).astype('uint16')

                # Get an array of unique parcels
                bna_data_for_coords_uniq = np.unique(bna_data)

                # Number of parcels:
                par_max = len(bna_data_for_coords_uniq) - 1
                img_stack = []
                for idx in range(1, par_max + 1):
                    roi_img = bna_data == bna_data_for_coords_uniq[idx].astype('uint16')
                    img_stack.append(roi_img.astype('uint16'))
                img_stack = np.array(img_stack)

                img_list = []
                for idy in range(par_max):
                    img_list.append(new_img_like(atlas, img_stack[idy]))
                atlas_of_atlases.append(img_list)
                del img_list, img_stack, bna_data

            atlas_of_atlases = list(flatten(atlas_of_atlases))

            [super_atlas_ward, _] = nodemaker.create_parcel_atlas(atlas_of_atlases)
            super_atlas_ward.set_data_dtype(np.uint16)

            nib.save(super_atlas_ward, self.uatlas)
            del atlas_of_atlases, super_atlas_ward, conn_comp_atlases, mask_img_list, mask_voxels_dict

        print("%s%s%s" % (self.clust_type, self.k, " clusters: %.2fs" % (time.time() - start)))

        del self._clust_est
        self._func_img.uncache()
        self._clust_mask_corr_img.uncache()
        gc.collect()

        return self.uatlas
コード例 #16
0
# all can be done at once using `Parcellations` object.

from nilearn.regions import Parcellations

# Computing ward for the first time, will be long... This can be seen by
# measuring using time
import time
start = time.time()

# Agglomerative Clustering: ward

# We build parameters of our own for this object. Parameters related to
# masking, caching and defining number of clusters and specific parcellations
# method.
ward = Parcellations(method='ward', n_parcels=1000,
                     standardize=False, smoothing_fwhm=2.,
                     memory='nilearn_cache', memory_level=1,
                     verbose=1)
# Call fit on functional dataset: single subject (less samples).
ward.fit(dataset.func)
print("Ward agglomeration 1000 clusters: %.2fs" % (time.time() - start))

# We compute now ward clustering with 2000 clusters and compare
# time with 1000 clusters. To see the benefits of caching for second time.

# We initialize class again with n_parcels=2000 this time.
start = time.time()
ward = Parcellations(method='ward', n_parcels=2000,
                     standardize=False, smoothing_fwhm=2.,
                     memory='nilearn_cache', memory_level=1,
                     verbose=1)
ward.fit(dataset.func)
コード例 #17
0
#
# Transforming list of images to data matrix and build brain parcellations,
# all can be done at once using `Parcellations` object.


# Computing ward for the first time, will be long... This can be seen by
# measuring using time
start = time.time()

# Agglomerative Clustering: ward

# We build parameters of our own for this object. Parameters related to
# masking, caching and defining number of clusters and specific parcellations
# method.
ward = Parcellations(method='ward', n_parcels=1000,
                     standardize=False, smoothing_fwhm=2.,
                     memory='nilearn_cache', memory_level=1,
                     verbose=1)
# Call fit on functional dataset: single subject (less samples).
ward.fit(dataset.func)
print("Ward agglomeration 1000 clusters: %.2fs" % (time.time() - start))

# We compute now ward clustering with 2000 clusters and compare
# time with 1000 clusters. To see the benefits of caching for second time.

# We initialize class again with n_parcels=2000 this time.
start = time.time()
ward = Parcellations(method='ward', n_parcels=2000,
                     standardize=False, smoothing_fwhm=2.,
                     memory='nilearn_cache', memory_level=1,
                     verbose=1)
ward.fit(dataset.func)
コード例 #18
0
ファイル: clustools.py プロジェクト: neurolibre/PyNets
class NiParcellate(object):
    """
    Class for implementing various clustering routines.
    """
    def __init__(self,
                 func_file,
                 clust_mask,
                 k,
                 clust_type,
                 local_corr,
                 outdir,
                 conf=None,
                 mask=None):
        """
        Parameters
        ----------
        func_file : str
            File path to a 4D Nifti1Image containing fMRI data.
        clust_mask : str
            File path to a 3D NIFTI file containing a mask, which restricts the
            voxels used in the clustering.
        k : int
            Numbers of clusters that will be generated.
        clust_type : str
            Type of clustering to be performed (e.g. 'ward', 'kmeans', 'complete', 'average').
        local_corr : str
            Type of local connectivity to use as the basis for clustering methods. Options are tcorr or scorr.
            Default is tcorr.
        outdir : str
            Path to base derivatives directory.
        conf : str
            File path to a confound regressor file for reduce noise in the time-series when extracting from ROI's.
        mask : str
            File path to a 3D NIFTI file containing a mask, which restricts the
            voxels used in the analysis.

        References
        ----------
        .. [1] Thirion, B., Varoquaux, G., Dohmatob, E., & Poline, J. B. (2014).
          Which fMRI clustering gives good brain parcellations?
          Frontiers in Neuroscience. https://doi.org/10.3389/fnins.2014.00167
        .. [2] Bellec, P., Rosa-Neto, P., Lyttelton, O. C., Benali, H., & Evans, A. C. (2010).
          Multi-level bootstrap analysis of stable clusters in resting-state fMRI.
          NeuroImage. https://doi.org/10.1016/j.neuroimage.2010.02.082
        .. [3] Garcia-Garcia, M., Nikolaidis, A., Bellec, P., Craddock, R. C., Cheung, B.,
          Castellanos, F. X., & Milham, M. P. (2018). Detecting stable individual
          differences in the functional organization of the human basal ganglia.
          NeuroImage. https://doi.org/10.1016/j.neuroimage.2017.07.029

        """
        self.func_file = func_file
        self.clust_mask = clust_mask
        self.k = int(k)
        self.clust_type = clust_type
        self.conf = conf
        self.local_corr = local_corr
        self.uatlas = None
        self.atlas = None
        self._detrending = True
        self._standardize = True
        self._func_img = nib.load(self.func_file)
        self.mask = mask
        self._mask_img = None
        self._local_conn_mat_path = None
        self._dir_path = None
        self._clust_est = None
        self._local_conn = None
        self._clust_mask_corr_img = None
        self._func_img_data = None
        self._masked_fmri_vol = None
        self._conn_comps = None
        self.num_conn_comps = None
        self.outdir = outdir

    def create_clean_mask(self, num_std_dev=1.5):
        """
        Create a subject-refined version of the clustering mask.
        """
        import os
        from pynets.core import utils
        from nilearn.masking import intersect_masks
        from nilearn.image import index_img, math_img, resample_img
        mask_name = os.path.basename(self.clust_mask).split('.nii')[0]
        self.atlas = f"{mask_name}{'_'}{self.clust_type}{'_k'}{str(self.k)}"
        print(
            f"\nCreating atlas using {self.clust_type} at cluster level {str(self.k)} for {str(self.atlas)}...\n"
        )
        self._dir_path = utils.do_dir_path(self.atlas, self.outdir)
        self.uatlas = f"{self._dir_path}/{mask_name}_clust-{self.clust_type}_k{str(self.k)}.nii.gz"

        # Load clustering mask
        self._func_img.set_data_dtype(np.float32)
        func_vol_img = index_img(self._func_img, 1)
        func_vol_img.set_data_dtype(np.uint16)
        clust_mask_res_img = resample_img(nib.load(self.clust_mask),
                                          target_affine=func_vol_img.affine,
                                          target_shape=func_vol_img.shape,
                                          interpolation='nearest')
        clust_mask_res_img.set_data_dtype(np.uint16)
        func_data = np.asarray(func_vol_img.dataobj).astype('float32')
        func_int_thr = np.round(
            np.mean(func_data[func_data > 0]) -
            np.std(func_data[func_data > 0]) * num_std_dev, 3)
        if self.mask is not None:
            self._mask_img = nib.load(self.mask)
            self._mask_img.set_data_dtype(np.uint16)
            mask_res_img = resample_img(self._mask_img,
                                        target_affine=func_vol_img.affine,
                                        target_shape=func_vol_img.shape,
                                        interpolation='nearest')
            mask_res_img.set_data_dtype(np.uint16)
            self._clust_mask_corr_img = intersect_masks([
                math_img('img > ' + str(func_int_thr), img=func_vol_img),
                math_img('img > 0.01', img=clust_mask_res_img),
                math_img('img > 0.01', img=mask_res_img)
            ],
                                                        threshold=1,
                                                        connected=False)
            self._clust_mask_corr_img.set_data_dtype(np.uint16)
            self._mask_img.uncache()
            mask_res_img.uncache()
        else:
            self._clust_mask_corr_img = intersect_masks([
                math_img('img > ' + str(func_int_thr), img=func_vol_img),
                math_img('img > 0.01', img=clust_mask_res_img)
            ],
                                                        threshold=1,
                                                        connected=False)
            self._clust_mask_corr_img.set_data_dtype(np.uint16)
        nib.save(self._clust_mask_corr_img,
                 f"{self._dir_path}{'/'}{mask_name}{'.nii.gz'}")

        del func_data
        func_vol_img.uncache()
        clust_mask_res_img.uncache()

        return self.atlas

    def create_local_clustering(self, overwrite, r_thresh, min_region_size=80):
        """
        API for performing any of a variety of clustering routines available through NiLearn.
        """
        import os.path as op
        from scipy.sparse import save_npz, load_npz
        from nilearn.regions import connected_regions

        try:
            conn_comps = connected_regions(self._clust_mask_corr_img,
                                           extract_type='connected_components',
                                           min_region_size=min_region_size)
            self._conn_comps = conn_comps[0]
            self.num_conn_comps = len(conn_comps[1])
        except:
            raise ValueError('Clustering mask is empty!')

        if not self._conn_comps:
            if np.sum(np.asarray(self._clust_mask_corr_img.dataobj)) == 0:
                raise ValueError('Clustering mask is empty!')
            else:
                self._conn_comps = self._clust_mask_corr_img
                self.num_conn_comps = 1
        print(
            f"Detected {self.num_conn_comps} connected components in clustering mask with a mininimum region "
            f"size of {min_region_size}")
        if self.clust_type == 'complete' or self.clust_type == 'average' or self.clust_type == 'single':
            if self.num_conn_comps > 1:
                raise ValueError(
                    'Clustering method unstable with spatial constrainsts applied to multiple '
                    'connected components.')

        if (self.clust_type == 'ward'
                and self.num_conn_comps > 1) or self.clust_type == 'ncut':
            if self.k < self.num_conn_comps:
                raise ValueError(
                    'k must minimally be greater than the total number of connected components in '
                    'the mask in the case of agglomerative clustering.')
            if self.local_corr == 'tcorr' or self.local_corr == 'scorr':
                self._local_conn_mat_path = f"{self.uatlas.split('.nii')[0]}_{self.local_corr}_conn.npz"

                if (not op.isfile(
                        self._local_conn_mat_path)) or (overwrite is True):
                    from pynets.fmri.clustools import make_local_connectivity_tcorr, make_local_connectivity_scorr
                    if self.local_corr == 'tcorr':
                        self._local_conn = make_local_connectivity_tcorr(
                            self._func_img,
                            self._clust_mask_corr_img,
                            thresh=r_thresh)
                    elif self.local_corr == 'scorr':
                        self._local_conn = make_local_connectivity_scorr(
                            self._func_img,
                            self._clust_mask_corr_img,
                            thresh=r_thresh)
                    else:
                        raise ValueError(
                            'Local connectivity type not available')

                    print(
                        f"Saving spatially constrained connectivity structure to: {self._local_conn_mat_path}"
                    )
                    save_npz(self._local_conn_mat_path, self._local_conn)
                elif op.isfile(self._local_conn_mat_path):
                    self._local_conn = load_npz(self._local_conn_mat_path)
            elif self.local_corr == 'allcorr':
                if self.clust_type == 'ncut':
                    raise ValueError(
                        'Must select either `tcorr` or `scorr` local connectivity option if you are using '
                        '`ncut` clustering method')
                self._local_conn = 'auto'
            else:
                raise ValueError(
                    'Local connectivity method not recognized. Only tcorr, scorr, and auto are currently '
                    'supported')
        else:
            self._local_conn = 'auto'
        return

    def prep_boot(self, blocklength=1):
        from nilearn.masking import apply_mask
        ts_data = apply_mask(self._func_img, self._clust_mask_corr_img)
        return ts_data, int(int(np.sqrt(ts_data.shape[0])) * blocklength)

    def parcellate(self, func_boot_img):
        """
        API for performing any of a variety of clustering routines available through NiLearn.
        """
        import time
        import os
        from nilearn.regions import Parcellations
        from pynets.fmri.estimation import fill_confound_nans

        start = time.time()

        if (self.clust_type == 'ward') and (self.local_corr != 'allcorr'):
            if self._local_conn_mat_path is not None:
                if not os.path.isfile(self._local_conn_mat_path):
                    raise FileNotFoundError(
                        'File containing sparse matrix of local connectivity structure not found.'
                    )
            else:
                raise FileNotFoundError(
                    'File containing sparse matrix of local connectivity structure not found.'
                )

        if self.clust_type == 'complete' or self.clust_type == 'average' or self.clust_type == 'single' or \
            self.clust_type == 'ward' or (self.clust_type == 'rena' and self.num_conn_comps == 1) or \
                (self.clust_type == 'kmeans' and self.num_conn_comps == 1):
            self._clust_est = Parcellations(method=self.clust_type,
                                            standardize=self._standardize,
                                            detrend=self._detrending,
                                            n_parcels=self.k,
                                            mask=self._clust_mask_corr_img,
                                            connectivity=self._local_conn,
                                            mask_strategy='background',
                                            memory_level=2,
                                            random_state=42)

            if self.conf is not None:
                import pandas as pd
                confounds = pd.read_csv(self.conf, sep='\t')
                if confounds.isnull().values.any():
                    conf_corr = fill_confound_nans(confounds, self._dir_path)
                    self._clust_est.fit(func_boot_img, confounds=conf_corr)
                else:
                    self._clust_est.fit(func_boot_img, confounds=self.conf)
            else:
                self._clust_est.fit(func_boot_img)

            self._clust_est.labels_img_.set_data_dtype(np.uint16)
            print(
                f"{self.clust_type}{self.k}{(' clusters: %.2fs' % (time.time() - start))}"
            )
            return self._clust_est.labels_img_

        elif self.clust_type == 'ncut':
            out_img = parcellate_ncut(self._local_conn, self.k,
                                      self._clust_mask_corr_img)
            out_img.set_data_dtype(np.uint16)
            print(
                f"{self.clust_type}{self.k}{(' clusters: %.2fs' % (time.time() - start))}"
            )
            return out_img

        elif self.clust_type == 'rena' or self.clust_type == 'kmeans' and self.num_conn_comps > 1:
            from pynets.core import nodemaker
            from nilearn.regions import connected_regions, Parcellations
            from nilearn.image import iter_img, new_img_like
            from pynets.core.utils import flatten, proportional

            mask_img_list = []
            mask_voxels_dict = dict()
            for i, mask_img in enumerate(list(iter_img(self._conn_comps))):
                mask_voxels_dict[i] = np.int(
                    np.sum(np.asarray(mask_img.dataobj)))
                mask_img_list.append(mask_img)

            # Allocate k across connected components using Hagenbach-Bischoff Quota based on number of voxels
            k_list = proportional(self.k, list(mask_voxels_dict.values()))

            conn_comp_atlases = []
            print(
                f"Building {len(mask_img_list)} separate atlases with voxel-proportional k clusters for each "
                f"connected component...")
            for i, mask_img in enumerate(mask_img_list):
                if k_list[i] == 0:
                    # print('0 voxels in component. Discarding...')
                    continue
                self._clust_est = Parcellations(method=self.clust_type,
                                                standardize=self._standardize,
                                                detrend=self._detrending,
                                                n_parcels=k_list[i],
                                                mask=mask_img,
                                                mask_strategy='background',
                                                memory_level=2,
                                                random_state=42)
                if self.conf is not None:
                    import pandas as pd
                    confounds = pd.read_csv(self.conf, sep='\t')
                    if confounds.isnull().values.any():
                        conf_corr = fill_confound_nans(confounds,
                                                       self._dir_path)
                        self._clust_est.fit(func_boot_img, confounds=conf_corr)
                    else:
                        self._clust_est.fit(func_boot_img, confounds=self.conf)
                else:
                    self._clust_est.fit(func_boot_img)
                conn_comp_atlases.append(self._clust_est.labels_img_)

            # Then combine the multiple atlases, corresponding to each connected component, into a single atlas
            atlas_of_atlases = []
            for atlas in conn_comp_atlases:
                bna_data = np.around(np.asarray(
                    atlas.dataobj)).astype('uint16')

                # Get an array of unique parcels
                bna_data_for_coords_uniq = np.unique(bna_data)

                # Number of parcels:
                par_max = len(bna_data_for_coords_uniq) - 1
                img_stack = []
                for idx in range(1, par_max + 1):
                    roi_img = bna_data == bna_data_for_coords_uniq[idx].astype(
                        'uint16')
                    img_stack.append(roi_img.astype('uint16'))
                img_stack = np.array(img_stack)

                img_list = []
                for idy in range(par_max):
                    img_list.append(new_img_like(atlas, img_stack[idy]))
                atlas_of_atlases.append(img_list)
                del img_list, img_stack, bna_data

            atlas_of_atlases = list(flatten(atlas_of_atlases))

            [super_atlas_ward,
             _] = nodemaker.create_parcel_atlas(atlas_of_atlases)
            super_atlas_ward.set_data_dtype(np.uint16)
            del atlas_of_atlases, conn_comp_atlases, mask_img_list, mask_voxels_dict

            print(
                f"{self.clust_type}{self.k}{(' clusters: %.2fs' % (time.time() - start))}"
            )
            return super_atlas_ward