コード例 #1
0
    def extract_ts_parc(self):
        """
        API for employing Nilearn's NiftiLabelsMasker to extract fMRI
        time-series data from spherical ROI's based on a given 3D atlas image
        of integer-based voxel intensities. The resulting time-series can then
        optionally be resampled using circular-block bootrapping. The final 2D
        m x n array is ultimately saved to file in .npy format.
        """
        import nibabel as nib
        from nilearn import input_data
        from pynets.fmri.estimation import fill_confound_nans

        self._net_parcels_map_nifti = nib.load(self.net_parcels_nii_path,
                                               mmap=True)
        self._net_parcels_map_nifti.set_data_dtype(np.int16)
        self._parcel_masker = input_data.NiftiLabelsMasker(
            labels_img=self._net_parcels_map_nifti,
            background_label=0,
            standardize=True,
            smoothing_fwhm=float(self.smooth),
            low_pass=self.low_pass,
            high_pass=self.hpass,
            detrend=self._detrending,
            t_r=self._t_r,
            verbose=2,
            resampling_target="labels",
            dtype="auto",
            mask_img=self._mask_img,
            strategy=self.extract_strategy)

        if self.conf is not None:
            import pandas as pd
            import os

            confounds = pd.read_csv(self.conf, sep="\t")
            if confounds.isnull().values.any():
                conf_corr = fill_confound_nans(confounds, self.dir_path)
                self.ts_within_nodes = self._parcel_masker.fit_transform(
                    self._func_img, confounds=conf_corr)
                os.remove(conf_corr)
            else:
                self.ts_within_nodes = self._parcel_masker.fit_transform(
                    self._func_img, confounds=self.conf)
        else:
            self.ts_within_nodes = self._parcel_masker.fit_transform(
                self._func_img)

        self._func_img.uncache()

        if self.ts_within_nodes is None:
            try:
                raise RuntimeError("\nTime-series extraction failed!")
            except RuntimeError:
                import sys
                sys.exit(1)
        else:
            self.node_size = "parc"

        return
コード例 #2
0
ファイル: test_estimation.py プロジェクト: dPys/PyNets
def test_fill_confound_nans():
    """ Testing filling pd dataframe np.nan values with mean."""

    confounds = np.ones((5, 5))
    confounds[0][0] = np.nan
    confounds = pd.DataFrame({'Column1': [np.nan, 2, 4]})
    with tempfile.TemporaryDirectory() as dir_path:
        conf_corr = fill_confound_nans(confounds, dir_path)
        conf_corr = np.genfromtxt(conf_corr, delimiter='\t', skip_header=True)

    assert not np.isnan(conf_corr).any()
    assert conf_corr[0] == np.mean(conf_corr[1:])
コード例 #3
0
    def extract_ts_coords(self):
        """
        API for employing Nilearn's NiftiSpheresMasker to extract fMRI time-series data from spherical ROI's based on a
        given list of seed coordinates. The resulting time-series can then optionally be resampled using circular-block
        bootrapping. The final 2D m x n array is ultimately saved to file in .npy format.
        """
        from nilearn import input_data
        from pynets.fmri.estimation import fill_confound_nans

        print("%s%s%s" % ('Using node radius: ', self.node_size, ' mm'))
        self._spheres_masker = input_data.NiftiSpheresMasker(
            seeds=self.coords,
            radius=float(self.node_size),
            allow_overlap=True,
            standardize=True,
            smoothing_fwhm=float(self.smooth),
            high_pass=self.hpass,
            detrend=self._detrending,
            t_r=self._t_r,
            verbose=2,
            dtype='auto',
            mask_img=self._mask_img)
        if self.conf is not None:
            import pandas as pd
            confounds = pd.read_csv(self.conf, sep='\t')
            if confounds.isnull().values.any():
                conf_corr = fill_confound_nans(confounds, self.dir_path)
                self.ts_within_nodes = self._spheres_masker.fit_transform(
                    self._func_img, confounds=conf_corr)
            else:
                self.ts_within_nodes = self._spheres_masker.fit_transform(
                    self._func_img, confounds=self.conf)
        else:
            self.ts_within_nodes = self._spheres_masker.fit_transform(
                self._func_img)

        self._func_img.uncache()

        if self.ts_within_nodes is None:
            raise RuntimeError('\nERROR: Time-series extraction failed!')
        else:
            print("%s%s%d%s" % ('\nTime series has {0} samples'.format(
                self.ts_within_nodes.shape[0]), ' mean extracted from ',
                                len(self.coords), ' coordinate ROI\'s'))

        return
コード例 #4
0
    def parcellate(self):
        """
        API for performing any of a variety of clustering routines available through NiLearn.
        """
        import gc
        import time
        import os
        from nilearn.regions import Parcellations
        from pynets.fmri.estimation import fill_confound_nans

        start = time.time()

        if (self.clust_type == 'ward') and (self.local_corr != 'allcorr'):
            if self._local_conn_mat_path is not None:
                if not os.path.isfile(self._local_conn_mat_path):
                    raise FileNotFoundError('File containing sparse matrix of local connectivity structure not found.')
            else:
                raise FileNotFoundError('File containing sparse matrix of local connectivity structure not found.')

        self._clust_est = Parcellations(method=self.clust_type, standardize=self._standardize, detrend=self._detrending,
                                        n_parcels=int(self.k), mask=self._clust_mask_corr_img,
                                        connectivity=self._local_conn, mask_strategy='background', memory_level=2,
                                        smoothing_fwhm=2, random_state=42)

        if self.conf is not None:
            import pandas as pd
            confounds = pd.read_csv(self.conf, sep='\t')
            if confounds.isnull().values.any():
                conf_corr = fill_confound_nans(confounds, self._dir_path)
                self._clust_est.fit(self._func_img, confounds=conf_corr)
            else:
                self._clust_est.fit(self._func_img, confounds=self.conf)
        else:
            self._clust_est.fit(self._func_img)

        self._clust_est.labels_img_.set_data_dtype(np.uint16)
        nib.save(self._clust_est.labels_img_, self.uatlas)

        print("%s%s%s" % (self.clust_type, self.k, " clusters: %.2fs" % (time.time() - start)))

        del self._clust_est
        self._func_img.uncache()
        self._clust_mask_corr_img.uncache()
        gc.collect()

        return self.uatlas
コード例 #5
0
ファイル: interfaces.py プロジェクト: dPys/PyNets
    def extract_ts_parc(self):
        """
        API for employing Nilearn's NiftiLabelsMasker to extract fMRI
        time-series data from spherical ROI's based on a given 3D atlas image
        of integer-based voxel intensities. The resulting time-series can then
        optionally be resampled using circular-block bootrapping. The final 2D
        m x n array is ultimately saved to file in .npy format.
        """
        import pandas as pd
        from nilearn import input_data
        from pynets.fmri.estimation import fill_confound_nans

        self._parcel_masker = input_data.NiftiLabelsMasker(
            labels_img=self._net_parcels_map_nifti,
            background_label=0,
            standardize=True,
            smoothing_fwhm=float(self.smooth),
            low_pass=self.low_pass,
            high_pass=self.hpass,
            detrend=self._detrending,
            t_r=self._t_r,
            verbose=2,
            resampling_target="labels",
            dtype="auto",
            mask_img=self._mask_img,
            strategy=self.signal)

        if self.conf is not None:
            import os

            confounds = pd.read_csv(self.conf, sep="\t")

            cols = [
                i for i in confounds.columns
                if 'motion_outlier' in i or i == 'framewise_displacement'
                or i == 'white_matter' or i == 'csf' or i == 'std_dvars'
                or i == 'rot_z' or i == 'rot_y' or i == 'rot_x'
                or i == 'trans_z' or i == 'trans_y' or i == 'trans_x'
                or 'non_steady_state_outlier' in i
            ]

            if len(confounds.index) == self._func_img.shape[-1]:
                if confounds.isnull().values.any():
                    conf_corr = fill_confound_nans(confounds, self.dir_path)
                    conf_corr_df = pd.read_csv(conf_corr, sep="\t")
                    cols = [i for i in cols if i in conf_corr_df.columns]
                    self.ts_within_nodes = self._parcel_masker.fit_transform(
                        self._func_img.slicer[:, :, :, 5:],
                        confounds=conf_corr_df.loc[5:][cols].values)
                    os.remove(conf_corr)
                else:
                    self.ts_within_nodes = self._parcel_masker.fit_transform(
                        self._func_img.slicer[:, :, :, 5:],
                        confounds=pd.read_csv(self.conf,
                                              sep="\t").loc[5:][cols].values)
            else:
                from nilearn.image import high_variance_confounds
                print(f"Shape of confounds ({len(confounds.index)}) does not"
                      f" equal the number of volumes "
                      f"({self._func_img.shape[-1]}) in the time-series")
                self.ts_within_nodes = self._parcel_masker.fit_transform(
                    self._func_img.slicer[:, :, :, 5:],
                    confounds=pd.DataFrame(
                        high_variance_confounds(self._func_img,
                                                percentile=1)).loc[5:].values)
        else:
            from nilearn.image import high_variance_confounds
            self.ts_within_nodes = self._parcel_masker.fit_transform(
                self._func_img.slicer[:, :, :, 5:],
                confounds=pd.DataFrame(
                    high_variance_confounds(self._func_img,
                                            percentile=1)).loc[5:].values)

        self._func_img.uncache()

        if self.ts_within_nodes is None:
            raise RuntimeError("\nTime-series extraction failed!")

        else:
            self.node_radius = "parc"

        return
コード例 #6
0
    def parcellate(self):
        """
        API for performing any of a variety of clustering routines available through NiLearn.
        """
        import gc
        import time
        import os
        from nilearn.regions import Parcellations
        from pynets.fmri.estimation import fill_confound_nans

        start = time.time()

        if (self.clust_type == 'ward') and (self.local_corr != 'allcorr'):
            if self._local_conn_mat_path is not None:
                if not os.path.isfile(self._local_conn_mat_path):
                    raise FileNotFoundError('File containing sparse matrix of local connectivity structure not found.')
            else:
                raise FileNotFoundError('File containing sparse matrix of local connectivity structure not found.')

        if self.clust_type == 'complete' or self.clust_type == 'average' or self.clust_type == 'single' or \
            self.clust_type == 'ward' or (self.clust_type == 'rena' and self.num_conn_comps == 1) or \
                (self.clust_type == 'kmeans' and self.num_conn_comps == 1):
            self._clust_est = Parcellations(method=self.clust_type, standardize=self._standardize,
                                            detrend=self._detrending,
                                            n_parcels=self.k, mask=self._clust_mask_corr_img,
                                            connectivity=self._local_conn, mask_strategy='background', memory_level=2,
                                            random_state=42)

            if self.conf is not None:
                import pandas as pd
                confounds = pd.read_csv(self.conf, sep='\t')
                if confounds.isnull().values.any():
                    conf_corr = fill_confound_nans(confounds, self._dir_path)
                    self._clust_est.fit(self._func_img, confounds=conf_corr)
                else:
                    self._clust_est.fit(self._func_img, confounds=self.conf)
            else:
                self._clust_est.fit(self._func_img)

            self._clust_est.labels_img_.set_data_dtype(np.uint16)
            nib.save(self._clust_est.labels_img_, self.uatlas)
        elif self.clust_type == 'ncut':
            out_img = parcellate_ncut(self._local_conn, self.k, self._clust_mask_corr_img)
            out_img.set_data_dtype(np.uint16)
            nib.save(out_img, self.uatlas)
        elif self.clust_type == 'rena' or self.clust_type == 'kmeans' and self.num_conn_comps > 1:
            from pynets.core import nodemaker
            from nilearn.regions import connected_regions, Parcellations
            from nilearn.image import iter_img, new_img_like
            from pynets.core.utils import flatten, proportional

            mask_img_list = []
            mask_voxels_dict = dict()
            for i, mask_img in enumerate(list(iter_img(self._conn_comps))):
                mask_voxels_dict[i] = np.int(np.sum(np.asarray(mask_img.dataobj)))
                mask_img_list.append(mask_img)

            # Allocate k across connected components using Hagenbach-Bischoff Quota based on number of voxels
            k_list = proportional(self.k, list(mask_voxels_dict.values()))

            conn_comp_atlases = []
            print("%s%s%s" % ('Building ', len(mask_img_list), ' separate atlases with voxel-proportional nclusters '
                                                               'for each connected component...'))
            for i, mask_img in enumerate(mask_img_list):
                if k_list[i] == 0:
                    # print('0 voxels in component. Discarding...')
                    continue
                self._clust_est = Parcellations(method=self.clust_type, standardize=self._standardize,
                                                detrend=self._detrending,
                                                n_parcels=k_list[i], mask=mask_img,
                                                mask_strategy='background',
                                                memory_level=2,
                                                random_state=42)
                if self.conf is not None:
                    import pandas as pd
                    confounds = pd.read_csv(self.conf, sep='\t')
                    if confounds.isnull().values.any():
                        conf_corr = fill_confound_nans(confounds, self._dir_path)
                        self._clust_est.fit(self._func_img, confounds=conf_corr)
                    else:
                        self._clust_est.fit(self._func_img, confounds=self.conf)
                else:
                    self._clust_est.fit(self._func_img)
                conn_comp_atlases.append(self._clust_est.labels_img_)

            # Then combine the multiple atlases, corresponding to each connected component, into a single atlas
            atlas_of_atlases = []
            for atlas in conn_comp_atlases:
                bna_data = np.around(np.asarray(atlas.dataobj)).astype('uint16')

                # Get an array of unique parcels
                bna_data_for_coords_uniq = np.unique(bna_data)

                # Number of parcels:
                par_max = len(bna_data_for_coords_uniq) - 1
                img_stack = []
                for idx in range(1, par_max + 1):
                    roi_img = bna_data == bna_data_for_coords_uniq[idx].astype('uint16')
                    img_stack.append(roi_img.astype('uint16'))
                img_stack = np.array(img_stack)

                img_list = []
                for idy in range(par_max):
                    img_list.append(new_img_like(atlas, img_stack[idy]))
                atlas_of_atlases.append(img_list)
                del img_list, img_stack, bna_data

            atlas_of_atlases = list(flatten(atlas_of_atlases))

            [super_atlas_ward, _] = nodemaker.create_parcel_atlas(atlas_of_atlases)
            super_atlas_ward.set_data_dtype(np.uint16)

            nib.save(super_atlas_ward, self.uatlas)
            del atlas_of_atlases, super_atlas_ward, conn_comp_atlases, mask_img_list, mask_voxels_dict

        print("%s%s%s" % (self.clust_type, self.k, " clusters: %.2fs" % (time.time() - start)))

        del self._clust_est
        self._func_img.uncache()
        self._clust_mask_corr_img.uncache()
        gc.collect()

        return self.uatlas
コード例 #7
0
def parcellate(func_boot_img, local_corr, clust_type, _local_conn_mat_path,
               num_conn_comps, _clust_mask_corr_img, _standardize,
               _detrending, k, _local_conn, conf, _dir_path, _conn_comps):
    """
    API for performing any of a variety of clustering routines available
    through NiLearn.
    """
    import time
    import os
    import numpy as np
    from nilearn.regions import Parcellations
    from pynets.fmri.estimation import fill_confound_nans
    # from joblib import Memory
    import tempfile

    cache_dir = tempfile.mkdtemp()
    # memory = Memory(cache_dir, verbose=0)

    start = time.time()

    if (clust_type == "ward") and (local_corr != "allcorr"):
        if _local_conn_mat_path is not None:
            if not os.path.isfile(_local_conn_mat_path):
                raise FileNotFoundError(
                    "File containing sparse matrix of local connectivity"
                    " structure not found."
                )
        else:
            raise FileNotFoundError(
                "File containing sparse matrix of local connectivity"
                " structure not found."
            )

    if (
        clust_type == "complete"
        or clust_type == "average"
        or clust_type == "single"
        or clust_type == "ward"
        or (clust_type == "rena" and num_conn_comps == 1)
        or (clust_type == "kmeans" and num_conn_comps == 1)
    ):
        _clust_est = Parcellations(
            method=clust_type,
            standardize=_standardize,
            detrend=_detrending,
            n_parcels=k,
            mask=_clust_mask_corr_img,
            connectivity=_local_conn,
            mask_strategy="background",
            random_state=42
        )

        if conf is not None:
            import pandas as pd
            import random
            from nipype.utils.filemanip import fname_presuffix, copyfile

            out_name_conf = fname_presuffix(
                conf, suffix=f"_tmp{random.randint(1, 1000)}",
                newpath=cache_dir
            )
            copyfile(
                conf,
                out_name_conf,
                copy=True,
                use_hardlink=False)

            confounds = pd.read_csv(out_name_conf, sep="\t")
            if confounds.isnull().values.any():
                conf_corr = fill_confound_nans(confounds, _dir_path)
                try:
                    _clust_est.fit(func_boot_img, confounds=conf_corr)
                except UserWarning:
                    return None
                os.remove(conf_corr)
            else:
                try:
                    _clust_est.fit(func_boot_img, confounds=out_name_conf)
                except UserWarning:
                    return None
            os.remove(out_name_conf)
        else:
            try:
                _clust_est.fit(func_boot_img)
            except UserWarning:
                return None
        _clust_est.labels_img_.set_data_dtype(np.uint16)
        print(
            f"{clust_type}{k}"
            f"{(' clusters: %.2fs' % (time.time() - start))}"
        )

        return _clust_est.labels_img_

    elif clust_type == "ncut":
        out_img = parcellate_ncut(
            _local_conn, k, _clust_mask_corr_img
        )
        out_img.set_data_dtype(np.uint16)
        print(
            f"{clust_type}{k}"
            f"{(' clusters: %.2fs' % (time.time() - start))}"
        )
        return out_img

    elif (
        clust_type == "rena"
        or clust_type == "kmeans"
        and num_conn_comps > 1
    ):
        from pynets.core import nodemaker
        from nilearn.regions import connected_regions, Parcellations
        from nilearn.image import iter_img, new_img_like
        from pynets.core.utils import flatten, proportional

        mask_img_list = []
        mask_voxels_dict = dict()
        for i, mask_img in enumerate(iter_img(_conn_comps)):
            mask_voxels_dict[i] = np.int(
                np.sum(np.asarray(mask_img.dataobj)))
            mask_img_list.append(mask_img)

        # Allocate k across connected components using Hagenbach-Bischoff
        # Quota based on number of voxels
        k_list = proportional(k, list(mask_voxels_dict.values()))

        conn_comp_atlases = []
        print(
            f"Building {len(mask_img_list)} separate atlases with "
            f"voxel-proportional k clusters for each "
            f"connected component...")
        for i, mask_img in enumerate(iter_img(mask_img_list)):
            if k_list[i] < 5:
                print(f"Only {k_list[i]} voxels in component. Discarding...")
                continue
            _clust_est = Parcellations(
                method=clust_type,
                standardize=_standardize,
                detrend=_detrending,
                n_parcels=k_list[i],
                mask=mask_img,
                mask_strategy="background",
                random_state=i
            )
            if conf is not None:
                import pandas as pd
                import random
                from nipype.utils.filemanip import fname_presuffix, copyfile

                out_name_conf = fname_presuffix(
                    conf, suffix=f"_tmp{random.randint(1, 1000)}",
                    newpath=cache_dir
                )
                copyfile(
                    conf,
                    out_name_conf,
                    copy=True,
                    use_hardlink=False)

                confounds = pd.read_csv(out_name_conf, sep="\t")
                if confounds.isnull().values.any():
                    conf_corr = fill_confound_nans(
                        confounds, _dir_path)
                    try:
                        _clust_est.fit(func_boot_img, confounds=conf_corr)
                    except UserWarning:
                        continue
                else:
                    try:
                        _clust_est.fit(func_boot_img, confounds=conf)
                    except UserWarning:
                        continue
            else:
                try:
                    _clust_est.fit(func_boot_img)
                except UserWarning:
                    continue
            conn_comp_atlases.append(_clust_est.labels_img_)

        # Then combine the multiple atlases, corresponding to each
        # connected component, into a single atlas
        atlas_of_atlases = []
        for atlas in iter_img(conn_comp_atlases):
            bna_data = np.around(
                np.asarray(
                    atlas.dataobj)).astype("uint16")

            # Get an array of unique parcels
            bna_data_for_coords_uniq = np.unique(bna_data)

            # Number of parcels:
            par_max = len(bna_data_for_coords_uniq) - 1
            img_stack = []
            for idx in range(1, par_max + 1):
                roi_img = bna_data == bna_data_for_coords_uniq[idx].astype(
                    "uint16")
                img_stack.append(roi_img.astype("uint16"))
            img_stack = np.array(img_stack)

            img_list = []
            for idy in range(par_max):
                img_list.append(new_img_like(atlas, img_stack[idy]))
            atlas_of_atlases.append(img_list)
            del img_list, img_stack, bna_data

        atlas_of_atlases = list(flatten(atlas_of_atlases))

        [super_atlas_ward, _] = nodemaker.create_parcel_atlas(
            atlas_of_atlases)
        super_atlas_ward.set_data_dtype(np.uint16)
        del atlas_of_atlases, conn_comp_atlases, mask_img_list, \
            mask_voxels_dict

        print(
            f"{clust_type}{k}"
            f"{(' clusters: %.2fs' % (time.time() - start))}"
        )

        # memory.clear(warn=False)

        return super_atlas_ward
コード例 #8
0
    def parcellate(self, func_boot_img):
        """
        API for performing any of a variety of clustering routines available
        through NiLearn.
        """
        import time
        import os
        from nilearn.regions import Parcellations
        from pynets.fmri.estimation import fill_confound_nans

        start = time.time()

        if (self.clust_type == "ward") and (self.local_corr != "allcorr"):
            if self._local_conn_mat_path is not None:
                if not os.path.isfile(self._local_conn_mat_path):
                    raise FileNotFoundError(
                        "File containing sparse matrix of local connectivity"
                        " structure not found.")
            else:
                raise FileNotFoundError(
                    "File containing sparse matrix of local connectivity"
                    " structure not found.")

        if (self.clust_type == "complete" or self.clust_type == "average"
                or self.clust_type == "single" or self.clust_type == "ward"
                or (self.clust_type == "rena" and self.num_conn_comps == 1)
                or (self.clust_type == "kmeans" and self.num_conn_comps == 1)):
            _clust_est = Parcellations(
                method=self.clust_type,
                standardize=self._standardize,
                detrend=self._detrending,
                n_parcels=self.k,
                mask=self._clust_mask_corr_img,
                connectivity=self._local_conn,
                mask_strategy="background",
                memory_level=2,
                random_state=42,
            )

            if self.conf is not None:
                import pandas as pd

                confounds = pd.read_csv(self.conf, sep="\t")
                if confounds.isnull().values.any():
                    conf_corr = fill_confound_nans(confounds, self._dir_path)
                    _clust_est.fit(func_boot_img, confounds=conf_corr)
                else:
                    _clust_est.fit(func_boot_img, confounds=self.conf)
            else:
                _clust_est.fit(func_boot_img)

            _clust_est.labels_img_.set_data_dtype(np.uint16)
            print(f"{self.clust_type}{self.k}"
                  f"{(' clusters: %.2fs' % (time.time() - start))}")
            return _clust_est.labels_img_

        elif self.clust_type == "ncut":
            out_img = parcellate_ncut(self._local_conn, self.k,
                                      self._clust_mask_corr_img)
            out_img.set_data_dtype(np.uint16)
            print(f"{self.clust_type}{self.k}"
                  f"{(' clusters: %.2fs' % (time.time() - start))}")
            return out_img

        elif (self.clust_type == "rena"
              or self.clust_type == "kmeans" and self.num_conn_comps > 1):
            from pynets.core import nodemaker
            from nilearn.regions import connected_regions, Parcellations
            from nilearn.image import iter_img, new_img_like
            from pynets.core.utils import flatten, proportional

            mask_img_list = []
            mask_voxels_dict = dict()
            for i, mask_img in enumerate(list(iter_img(self._conn_comps))):
                mask_voxels_dict[i] = np.int(
                    np.sum(np.asarray(mask_img.dataobj)))
                mask_img_list.append(mask_img)

            # Allocate k across connected components using Hagenbach-Bischoff
            # Quota based on number of voxels
            k_list = proportional(self.k, list(mask_voxels_dict.values()))

            conn_comp_atlases = []
            print(f"Building {len(mask_img_list)} separate atlases with "
                  f"voxel-proportional k clusters for each "
                  f"connected component...")
            for i, mask_img in enumerate(mask_img_list):
                if k_list[i] == 0:
                    # print('0 voxels in component. Discarding...')
                    continue
                _clust_est = Parcellations(
                    method=self.clust_type,
                    standardize=self._standardize,
                    detrend=self._detrending,
                    n_parcels=k_list[i],
                    mask=mask_img,
                    mask_strategy="background",
                    memory_level=2,
                    random_state=42,
                )
                if self.conf is not None:
                    import pandas as pd

                    confounds = pd.read_csv(self.conf, sep="\t")
                    if confounds.isnull().values.any():
                        conf_corr = fill_confound_nans(confounds,
                                                       self._dir_path)
                        _clust_est.fit(func_boot_img, confounds=conf_corr)
                    else:
                        _clust_est.fit(func_boot_img, confounds=self.conf)
                else:
                    _clust_est.fit(func_boot_img)
                conn_comp_atlases.append(_clust_est.labels_img_)

            # Then combine the multiple atlases, corresponding to each
            # connected component, into a single atlas
            atlas_of_atlases = []
            for atlas in conn_comp_atlases:
                bna_data = np.around(np.asarray(
                    atlas.dataobj)).astype("uint16")

                # Get an array of unique parcels
                bna_data_for_coords_uniq = np.unique(bna_data)

                # Number of parcels:
                par_max = len(bna_data_for_coords_uniq) - 1
                img_stack = []
                for idx in range(1, par_max + 1):
                    roi_img = bna_data == bna_data_for_coords_uniq[idx].astype(
                        "uint16")
                    img_stack.append(roi_img.astype("uint16"))
                img_stack = np.array(img_stack)

                img_list = []
                for idy in range(par_max):
                    img_list.append(new_img_like(atlas, img_stack[idy]))
                atlas_of_atlases.append(img_list)
                del img_list, img_stack, bna_data

            atlas_of_atlases = list(flatten(atlas_of_atlases))

            [super_atlas_ward,
             _] = nodemaker.create_parcel_atlas(atlas_of_atlases)
            super_atlas_ward.set_data_dtype(np.uint16)
            del atlas_of_atlases, conn_comp_atlases, mask_img_list, \
                mask_voxels_dict

            print(f"{self.clust_type}{self.k}"
                  f"{(' clusters: %.2fs' % (time.time() - start))}")
            return super_atlas_ward