Beispiel #1
0
def build_asetomes(est_path_iterlist, ID):
    """
    Embeds single graphs using the ASE algorithm.

    Parameters
    ----------
    est_path_iterlist : list
        List of file paths to .npy files, each containing a graph.
    ID : str
        A subject id or other unique identifier.
    """
    from pynets.core.utils import prune_suffices
    from pynets.stats.embeddings import _ase_embed

    out_paths = []
    for file_ in list(flatten(est_path_iterlist)):
        mat = np.load(file_)
        atlas = prune_suffices(file_.split('/')[-3])
        res = prune_suffices('_'.join(
            file_.split('/')[-1].split('modality')[1].split('_')[1:]).split(
                '_est')[0])
        if 'rsn' in res:
            subgraph = res.split('rsn-')[1]
        else:
            subgraph = 'whole_brain'
        out_path = _ase_embed(mat, atlas, file_, ID, subgraph_name=subgraph)
        out_paths.append(out_path)

    return out_paths
Beispiel #2
0
def build_masetome(est_path_iterlist, ID):
    """
    Embeds structural-functional graph pairs into a common invariant subspace.

    Parameters
    ----------
    est_path_iterlist : list
        List of list of pairs of file paths (.npy) corresponding to
        structural and functional connectomes matched at a given node
        resolution.
    ID : str
        A subject id or other unique identifier.

    References
    ----------
    .. [1] Rosenthal, G., Váša, F., Griffa, A., Hagmann, P., Amico, E., Goñi, J.,
      Sporns, O. (2018). Mapping higher-order relations between brain structure
      and function with embedded vector representations of connectomes.
      Nature Communications. https://doi.org/10.1038/s41467-018-04614-w

    """
    import numpy as np
    from pynets.core.utils import prune_suffices
    from pynets.stats.embeddings import _mase_embed
    from pynets.core.utils import load_runconfig

    # Available functional and structural connectivity models
    hardcoded_params = load_runconfig()
    try:
        n_components = hardcoded_params["gradients"]["n_components"][0]
    except KeyError:
        import sys
        print("ERROR: available gradient dimensionality presets not "
              "sucessfully extracted from runconfig.yaml")
        sys.exit(1)

    out_paths = []
    for pairs in est_path_iterlist:
        pop_list = []
        for _file in pairs:
            pop_list.append(np.load(_file))
        atlas = prune_suffices(pairs[0].split("/")[-3])
        res = prune_suffices("_".join(pairs[0].split("/")[-1].split("modality")
                                      [1].split("_")[1:]).split("_est")[0])
        if "rsn" in res:
            subgraph = res.split("rsn-")[1].split('_')[0]
        else:
            subgraph = "all_nodes"
        out_path = _mase_embed(pop_list,
                               atlas,
                               pairs[0],
                               ID,
                               subgraph_name=subgraph,
                               n_components=n_components)
        out_paths.append(out_path)

    return out_paths
Beispiel #3
0
def build_asetomes(est_path_iterlist, ID):
    """
    Embeds single graphs using the ASE algorithm.

    Parameters
    ----------
    est_path_iterlist : list
        List of file paths to .npy files, each containing a graph.
    ID : str
        A subject id or other unique identifier.

    """
    import numpy as np
    from pynets.core.utils import prune_suffices, flatten
    from pynets.stats.embeddings import _ase_embed
    import yaml
    import pkg_resources

    # Available functional and structural connectivity models
    with open(pkg_resources.resource_filename("pynets", "runconfig.yaml"),
              "r") as stream:
        hardcoded_params = yaml.load(stream)
        try:
            n_components = hardcoded_params["gradients"]["n_components"][0]
        except KeyError:
            import sys
            print("ERROR: available gradient dimensionality presets not "
                  "sucessfully extracted from runconfig.yaml")
            sys.exit(1)

    stream.close()

    if isinstance(est_path_iterlist, list):
        est_path_iterlist = list(flatten(est_path_iterlist))
    else:
        est_path_iterlist = [est_path_iterlist]

    out_paths = []
    for file_ in est_path_iterlist:
        mat = np.load(file_)
        atlas = prune_suffices(file_.split("/")[-3])
        res = prune_suffices("_".join(
            file_.split("/")[-1].split("modality")[1].split("_")[1:]).split(
                "_est")[0])
        if "rsn" in res:
            subgraph = res.split("rsn-")[1]
        else:
            subgraph = "all_nodes"
        out_path = _ase_embed(mat,
                              atlas,
                              file_,
                              ID,
                              subgraph_name=subgraph,
                              n_components=n_components)
        out_paths.append(out_path)

    return out_paths
Beispiel #4
0
def build_masetome(est_path_iterlist, ID):
    """
    Embeds structural-functional graph pairs into a common invariant subspace.

    Parameters
    ----------
    est_path_iterlist : list
        List of list of pairs of file paths (.npy) corresponding to
        structural and functional connectomes matched at a given node
        resolution.
    ID : str
        A subject id or other unique identifier.

    References
    ----------
    .. [1] Rosenthal, G., Váša, F., Griffa, A., Hagmann, P., Amico, E., Goñi, J.,
      Sporns, O. (2018). Mapping higher-order relations between brain structure
      and function with embedded vector representations of connectomes.
      Nature Communications. https://doi.org/10.1038/s41467-018-04614-w

    """
    import numpy as np
    from pynets.core.utils import prune_suffices
    from pynets.stats.embeddings import _mase_embed

    out_paths = []
    for pairs in est_path_iterlist:
        pop_list = []
        for _file in pairs:
            pop_list.append(np.load(_file))
        atlas = prune_suffices(pairs[0].split("/")[-3])
        res = prune_suffices("_".join(pairs[0].split("/")[-1].split("modality")
                                      [1].split("_")[1:]).split("_est")[0])
        if "rsn" in res:
            subgraph = res.split("rsn-")[1]
        else:
            subgraph = "whole_brain"
        out_path = _mase_embed(pop_list,
                               atlas,
                               pairs[0],
                               ID,
                               subgraph_name=subgraph)
        out_paths.append(out_path)

    return out_paths
Beispiel #5
0
def build_asetomes(est_path_iterlist, ID):
    """
    Embeds single graphs using the ASE algorithm.

    Parameters
    ----------
    est_path_iterlist : list
        List of file paths to .npy files, each containing a graph.
    ID : str
        A subject id or other unique identifier.

    """
    import numpy as np
    from pynets.core.utils import prune_suffices, flatten
    from pynets.stats.embeddings import _ase_embed

    if isinstance(est_path_iterlist, list):
        est_path_iterlist = list(flatten(est_path_iterlist))
    else:
        est_path_iterlist = [est_path_iterlist]

    out_paths = []
    for file_ in est_path_iterlist:
        mat = np.load(file_)
        atlas = prune_suffices(file_.split("/")[-3])
        res = prune_suffices("_".join(
            file_.split("/")[-1].split("modality")[1].split("_")[1:]).split(
                "_est")[0])
        if "rsn" in res:
            subgraph = res.split("rsn-")[1]
        else:
            subgraph = "whole_brain"
        out_path = _ase_embed(mat, atlas, file_, ID, subgraph_name=subgraph)
        out_paths.append(out_path)

    return out_paths
Beispiel #6
0
def build_masetome(est_path_iterlist, ID):
    """
    Embeds structural-functional graph pairs into a common invariant subspace.

    Parameters
    ----------
    est_path_iterlist : list
        List of list of pairs of file paths (.npy) corresponding to
        structural and functional connectomes matched at a given node
        resolution.
    ID : str
        A subject id or other unique identifier.

    References
    ----------
    .. [1] Rosenthal, G., Váša, F., Griffa, A., Hagmann, P., Amico, E., Goñi,
      J., Sporns, O. (2018). Mapping higher-order relations between brain
      structure and function with embedded vector representations of
      connectomes. Nature Communications.
      https://doi.org/10.1038/s41467-018-04614-w

    """
    from pathlib import Path
    import os
    import numpy as np
    from pynets.core.utils import prune_suffices
    from pynets.stats.embeddings import _mase_embed
    from pynets.core.utils import load_runconfig

    # Available functional and structural connectivity models
    hardcoded_params = load_runconfig()
    try:
        n_components = hardcoded_params["gradients"][
            "n_components"][0]
    except KeyError:
        import sys
        print(
            "ERROR: available gradient dimensionality presets not "
            "sucessfully extracted from runconfig.yaml"
        )
        sys.exit(1)

    out_paths = []
    for pairs in est_path_iterlist:
        pop_list = []
        for _file in pairs:
            mat = np.load(_file)
            if np.isfinite(mat).all():
                pop_list.append(mat)
        if len(pop_list) != len(pairs):
            continue
        atlas = prune_suffices(pairs[0].split("/")[-3])
        res = prune_suffices("_".join(pairs[0].split(
            "/")[-1].split("modality")[1].split("_")[1:]).split("_est")[0])
        if "rsn" in res:
            subgraph = res.split("rsn-")[1].split('_')[0]
        else:
            subgraph = "all_nodes"
        out_path = _mase_embed(
            pop_list,
            atlas,
            pairs[0],
            ID,
            subgraph_name=subgraph, n_components=n_components)

        if out_path is not None:
            out_paths.append(out_path)
        else:
            # Add a null tmp file to prevent pool from breaking
            dir_path = str(Path(os.path.dirname(pairs[0])))
            namer_dir = f"{dir_path}/mplx_embeddings"
            if os.path.isdir(namer_dir) is False:
                os.makedirs(namer_dir, exist_ok=True)

            out_path = (
                f"{namer_dir}/gradient-MASE_{atlas}_{subgraph}"
                f"_{os.path.basename(pairs[0])}_NULL"
            )
            if not os.path.exists(out_path):
                os.mknod(out_path)
            out_paths.append(out_path)

    return out_paths
Beispiel #7
0
def build_asetomes(est_path_iterlist, ID):
    """
    Embeds single graphs using the ASE algorithm.

    Parameters
    ----------
    est_path_iterlist : list
        List of file paths to .npy files, each containing a graph.
    ID : str
        A subject id or other unique identifier.

    """
    from pathlib import Path
    import os
    import numpy as np
    from pynets.core.utils import prune_suffices, flatten
    from pynets.stats.embeddings import _ase_embed
    from pynets.core.utils import load_runconfig

    # Available functional and structural connectivity models
    hardcoded_params = load_runconfig()
    try:
        n_components = hardcoded_params["gradients"][
            "n_components"][0]
    except KeyError:
        import sys
        print(
            "ERROR: available gradient dimensionality presets not "
            "sucessfully extracted from runconfig.yaml"
        )
        sys.exit(1)

    if isinstance(est_path_iterlist, list):
        est_path_iterlist = list(flatten(est_path_iterlist))
    else:
        est_path_iterlist = [est_path_iterlist]

    out_paths = []
    for file_ in est_path_iterlist:
        mat = np.load(file_)
        if np.isfinite(mat).all() == False:
            continue

        atlas = prune_suffices(file_.split("/")[-3])
        res = prune_suffices("_".join(file_.split(
            "/")[-1].split("modality")[1].split("_")[1:]).split("_est")[0])
        if "rsn" in res:
            subgraph = res.split("rsn-")[1].split('_')[0]
        else:
            subgraph = "all_nodes"
        out_path = _ase_embed(mat, atlas, file_, ID, subgraph_name=subgraph,
                              n_components=n_components)
        if out_path is not None:
            out_paths.append(out_path)
        else:
            # Add a null tmp file to prevent pool from breaking
            dir_path = str(Path(os.path.dirname(file_)).parent)
            namer_dir = f"{dir_path}/embeddings"
            if os.path.isdir(namer_dir) is False:
                os.makedirs(namer_dir, exist_ok=True)
            out_path = f"{namer_dir}/gradient-ASE" \
                       f"_{atlas}_{subgraph}_{os.path.basename(file_)}_NULL"
            if not os.path.exists(out_path):
                os.mknod(out_path)
            out_paths.append(out_path)

    return out_paths
Beispiel #8
0
def build_asetomes(est_path_iterlist):
    """
    Embeds single graphs using the ASE algorithm.

    Parameters
    ----------
    est_path_iterlist : list
        List of file paths to .npy files, each containing a graph.

    """
    from pathlib import Path
    import os
    import numpy as np
    from pynets.statistics.individual.spectral import _ase_embed
    from pynets.core.utils import prune_suffices, flatten, load_runconfig

    # Available functional and structural connectivity models
    hardcoded_params = load_runconfig()
    try:
        n_components = hardcoded_params["gradients"][
            "n_components"][0]
    except KeyError:
        import sys
        print(
            "ERROR: available gradient dimensionality presets not "
            "sucessfully extracted from advanced.yaml"
        )
        sys.exit(1)

    if isinstance(est_path_iterlist, list):
        est_path_iterlist = list(flatten(est_path_iterlist))
    else:
        est_path_iterlist = [est_path_iterlist]

    out_paths = []
    for file_ in est_path_iterlist:
        mat = np.load(file_)
        if np.isfinite(mat).all() == False:
            continue

        atlas = prune_suffices(file_.split("/")[-3])
        res = prune_suffices("_".join(file_.split(
            "/")[-1].split("modality")[1].split("_")[1:]).split("_est")[0])
        if "subnet" in res:
            subgraph = res.split("subnet-")[1].split('_')[0]
        else:
            subgraph = "all_nodes"

            out_path = _ase_embed(mat, atlas, file_, subgraph_name=subgraph,
                                  n_components=n_components, prune=0, norm=1)

        if out_path is not None:
            out_paths.append(out_path)
        else:
            # Add a null tmp file to prevent pool from breaking
            dir_path = str(Path(os.path.dirname(file_)).parent)
            namer_dir = f"{dir_path}/embeddings"
            if os.path.isdir(namer_dir) is False:
                os.makedirs(namer_dir, exist_ok=True)
            out_path = f"{namer_dir}/gradient-ASE" \
                       f"_subnet-{atlas}_granularity-{subgraph}_" \
                       f"{os.path.basename(file_)}_NULL"
            # TODO: Replace this band-aid solution with the real fix
            out_path = out_path.replace('subnet-subnet-',
                                        'subnet-').replace(
                'granularity-granularity-', 'granularity-')
            if not os.path.exists(out_path):
                open(out_path, 'w').close()
            out_paths.append(out_path)

    return out_paths
Beispiel #9
0
    def _run_interface(self, runtime):
        from pynets.core import utils, nodemaker
        from nipype.utils.filemanip import fname_presuffix, copyfile
        from nilearn.image import concat_imgs
        import pandas as pd
        import time
        import textwrap
        from pathlib import Path
        import os.path as op
        import glob

        base_path = utils.get_file()
        # Test if atlas is a nilearn atlas. If so, fetch coords, labels, and/or
        # networks.
        nilearn_parc_atlases = [
            "atlas_harvard_oxford",
            "atlas_aal",
            "atlas_destrieux_2009",
            "atlas_talairach_gyrus",
            "atlas_talairach_ba",
            "atlas_talairach_lobe",
        ]
        nilearn_coords_atlases = ["coords_power_2011", "coords_dosenbach_2010"]
        nilearn_prob_atlases = ["atlas_msdl", "atlas_pauli_2017"]
        local_atlases = [
            op.basename(i).split(".nii")[0]
            for i in glob.glob(f"{str(Path(base_path).parent.parent)}"
                               f"/templates/atlases/*.nii.gz")
            if "_4d" not in i
        ]

        if self.inputs.parcellation is None and self.inputs.atlas in \
                nilearn_parc_atlases:
            [labels, networks_list, parcellation
             ] = nodemaker.nilearn_atlas_helper(self.inputs.atlas,
                                                self.inputs.parc)
            if parcellation:
                if not isinstance(parcellation, str):
                    nib.save(
                        parcellation, f"{runtime.cwd}"
                        f"{self.inputs.atlas}{'.nii.gz'}")
                    parcellation = f"{runtime.cwd}" \
                                   f"{self.inputs.atlas}{'.nii.gz'}"
                if self.inputs.clustering is False:
                    [parcellation,
                     labels] = \
                        nodemaker.enforce_hem_distinct_consecutive_labels(
                        parcellation, label_names=labels)
                [coords, atlas, par_max, label_intensities] = \
                    nodemaker.get_names_and_coords_of_parcels(parcellation)
                if self.inputs.parc is True:
                    parcels_4d_img = nodemaker.three_to_four_parcellation(
                        parcellation)
                else:
                    parcels_4d_img = None
            else:
                raise FileNotFoundError(
                    f"\nAtlas file for {self.inputs.atlas} not found!")

            atlas = self.inputs.atlas
        elif (self.inputs.parcellation is None and self.inputs.parc is False
              and self.inputs.atlas in nilearn_coords_atlases):
            print("Fetching coords and labels from nilearn coordinate-based"
                  " atlas library...")
            # Fetch nilearn atlas coords
            [coords, _, networks_list,
             labels] = nodemaker.fetch_nilearn_atlas_coords(self.inputs.atlas)
            parcels_4d = None
            par_max = None
            atlas = self.inputs.atlas
            parcellation = None
            label_intensities = None
        elif (self.inputs.parcellation is None and self.inputs.parc is False
              and self.inputs.atlas in nilearn_prob_atlases):
            import matplotlib
            matplotlib.use("agg")
            from nilearn.plotting import find_probabilistic_atlas_cut_coords

            print("Fetching coords and labels from nilearn probabilistic atlas"
                  " library...")
            # Fetch nilearn atlas coords
            [labels, networks_list, parcellation
             ] = nodemaker.nilearn_atlas_helper(self.inputs.atlas,
                                                self.inputs.parc)
            coords = find_probabilistic_atlas_cut_coords(maps_img=parcellation)
            if parcellation:
                if not isinstance(parcellation, str):
                    nib.save(
                        parcellation, f"{runtime.cwd}"
                        f"{self.inputs.atlas}{'.nii.gz'}")
                    parcellation = f"{runtime.cwd}" \
                                   f"{self.inputs.atlas}{'.nii.gz'}"
                if self.inputs.clustering is False:
                    [parcellation,
                     labels] = \
                        nodemaker.enforce_hem_distinct_consecutive_labels(
                        parcellation, label_names=labels)
                if self.inputs.parc is True:
                    parcels_4d_img = nodemaker.three_to_four_parcellation(
                        parcellation)
                else:
                    parcels_4d_img = None
            else:
                raise FileNotFoundError(
                    f"\nAtlas file for {self.inputs.atlas} not found!")

            par_max = None
            atlas = self.inputs.atlas
            label_intensities = None
        elif self.inputs.parcellation is None and self.inputs.atlas in \
            local_atlases:
            parcellation_pre = (
                f"{str(Path(base_path).parent.parent)}/templates/atlases/"
                f"{self.inputs.atlas}.nii.gz")
            parcellation = fname_presuffix(parcellation_pre,
                                           newpath=runtime.cwd)
            copyfile(parcellation_pre,
                     parcellation,
                     copy=True,
                     use_hardlink=False)
            try:
                par_img = nib.load(parcellation)
            except indexed_gzip.ZranError as e:
                print(
                    e, "\nCannot load subnetwork reference image. "
                    "Do you have git-lfs installed?")
            try:
                if self.inputs.clustering is False:
                    [parcellation, _] = \
                        nodemaker.enforce_hem_distinct_consecutive_labels(
                            parcellation)

                # Fetch user-specified atlas coords
                [coords, _, par_max, label_intensities] = \
                    nodemaker.get_names_and_coords_of_parcels(parcellation)
                if self.inputs.parc is True:
                    parcels_4d_img = nodemaker.three_to_four_parcellation(
                        parcellation)
                else:
                    parcels_4d_img = None
                # Describe user atlas coords
                print(f"\n{self.inputs.atlas} comes with {par_max} parcels\n")
            except ValueError as e:
                print(
                    e, "Either you have specified the name of an atlas that "
                    "does not exist in the nilearn or local repository or "
                    "you have not supplied a 3d atlas parcellation image!")
            labels = None
            networks_list = None
            atlas = self.inputs.atlas
        elif self.inputs.parcellation:
            if self.inputs.clustering is True:
                while True:
                    if op.isfile(self.inputs.parcellation):
                        break
                    else:
                        print("Waiting for atlas file...")
                        time.sleep(5)

            try:
                parcellation_tmp_path = fname_presuffix(
                    self.inputs.parcellation, newpath=runtime.cwd)
                copyfile(self.inputs.parcellation,
                         parcellation_tmp_path,
                         copy=True,
                         use_hardlink=False)
                # Fetch user-specified atlas coords
                if self.inputs.clustering is False:
                    [parcellation,
                     _] = nodemaker.enforce_hem_distinct_consecutive_labels(
                         parcellation_tmp_path)
                else:
                    parcellation = parcellation_tmp_path
                [coords, atlas, par_max, label_intensities] = \
                    nodemaker.get_names_and_coords_of_parcels(parcellation)
                if self.inputs.parc is True:
                    parcels_4d_img = nodemaker.three_to_four_parcellation(
                        parcellation)
                else:
                    parcels_4d_img = None

                atlas = utils.prune_suffices(atlas)

                # Describe user atlas coords
                print(f"\n{atlas} comes with {par_max} parcels\n")
            except ValueError as e:
                print(
                    e, "Either you have specified the name of an atlas that "
                    "does not exist in the nilearn or local repository or "
                    "you have not supplied a 3d atlas parcellation image!")
            labels = None
            networks_list = None
        else:
            raise ValueError(
                "Either you have specified the name of an atlas that does"
                " not exist in the nilearn or local repository or you have"
                " not supplied a 3d atlas parcellation image!")

        # Labels prep
        if atlas and not labels:
            if (self.inputs.ref_txt is not None) and (op.exists(
                    self.inputs.ref_txt)):
                labels = pd.read_csv(self.inputs.ref_txt,
                                     sep=" ",
                                     header=None,
                                     names=["Index",
                                            "Region"])["Region"].tolist()
            else:
                if atlas in local_atlases:
                    ref_txt = (
                        f"{str(Path(base_path).parent.parent)}/templates/"
                        f"labels/"
                        f"{atlas}.txt")
                else:
                    ref_txt = self.inputs.ref_txt
                if ref_txt is not None:
                    try:
                        labels = pd.read_csv(ref_txt,
                                             sep=" ",
                                             header=None,
                                             names=["Index", "Region"
                                                    ])["Region"].tolist()
                    except BaseException:
                        if self.inputs.use_parcel_naming is True:
                            try:
                                labels = nodemaker.parcel_naming(
                                    coords, self.inputs.vox_size)
                            except BaseException:
                                print("AAL reference labeling failed!")
                                labels = np.arange(len(coords) + 1)[
                                    np.arange(len(coords) + 1) != 0].tolist()
                        else:
                            print("Using generic index labels...")
                            labels = np.arange(len(coords) +
                                               1)[np.arange(len(coords) +
                                                            1) != 0].tolist()
                else:
                    if self.inputs.use_parcel_naming is True:
                        try:
                            labels = nodemaker.parcel_naming(
                                coords, self.inputs.vox_size)
                        except BaseException:
                            print("AAL reference labeling failed!")
                            labels = np.arange(len(coords) +
                                               1)[np.arange(len(coords) +
                                                            1) != 0].tolist()
                    else:
                        print("Using generic index labels...")
                        labels = np.arange(len(coords) +
                                           1)[np.arange(len(coords) +
                                                        1) != 0].tolist()

        dir_path = utils.do_dir_path(atlas, self.inputs.outdir)

        if len(coords) != len(labels):
            labels = [
                i for i in labels if (i != 'Unknown' and i != 'Background')
            ]
            if len(coords) != len(labels):
                print("Length of coordinates is not equal to length of "
                      "label names...")
                if self.inputs.use_parcel_naming is True:
                    try:
                        print("Attempting consensus parcel naming instead...")
                        labels = nodemaker.parcel_naming(
                            coords, self.inputs.vox_size)
                    except BaseException:
                        print("Reverting to integer labels instead...")
                        labels = np.arange(len(coords) +
                                           1)[np.arange(len(coords) +
                                                        1) != 0].tolist()
                else:
                    print("Reverting to integer labels instead...")
                    labels = np.arange(len(coords) +
                                       1)[np.arange(len(coords) +
                                                    1) != 0].tolist()

        print(f"Coordinates:\n{coords}")
        print(f"Labels:\n"
              f"{textwrap.shorten(str(labels), width=1000, placeholder='...')}"
              f"")

        assert len(coords) == len(labels)

        if label_intensities is not None:
            self._results["labels"] = list(zip(labels, label_intensities))
        else:
            self._results["labels"] = labels
        self._results["coords"] = coords
        self._results["atlas"] = atlas
        self._results["networks_list"] = networks_list
        # TODO: Optimize this with 4d array concatenation and .npyz

        out_path = f"{runtime.cwd}/parcels_4d.nii.gz"
        nib.save(parcels_4d_img, out_path)
        self._results["parcels_4d"] = out_path
        self._results["par_max"] = par_max
        self._results["parcellation"] = parcellation
        self._results["dir_path"] = dir_path

        return runtime