Пример #1
0
def build_asetomes(est_path_iterlist, ID):
    """
    Embeds single graphs using the ASE algorithm.

    Parameters
    ----------
    est_path_iterlist : list
        List of file paths to .npy files, each containing a graph.
    ID : str
        A subject id or other unique identifier.
    """
    from pynets.core.utils import prune_suffices
    from pynets.stats.embeddings import _ase_embed

    out_paths = []
    for file_ in list(flatten(est_path_iterlist)):
        mat = np.load(file_)
        atlas = prune_suffices(file_.split('/')[-3])
        res = prune_suffices('_'.join(
            file_.split('/')[-1].split('modality')[1].split('_')[1:]).split(
                '_est')[0])
        if 'rsn' in res:
            subgraph = res.split('rsn-')[1]
        else:
            subgraph = 'whole_brain'
        out_path = _ase_embed(mat, atlas, file_, ID, subgraph_name=subgraph)
        out_paths.append(out_path)

    return out_paths
Пример #2
0
def build_asetomes(est_path_iterlist, ID):
    """
    Embeds single graphs using the ASE algorithm.

    Parameters
    ----------
    est_path_iterlist : list
        List of file paths to .npy files, each containing a graph.
    ID : str
        A subject id or other unique identifier.

    """
    import numpy as np
    from pynets.core.utils import prune_suffices, flatten
    from pynets.stats.embeddings import _ase_embed
    import yaml
    import pkg_resources

    # Available functional and structural connectivity models
    with open(pkg_resources.resource_filename("pynets", "runconfig.yaml"),
              "r") as stream:
        hardcoded_params = yaml.load(stream)
        try:
            n_components = hardcoded_params["gradients"]["n_components"][0]
        except KeyError:
            import sys
            print("ERROR: available gradient dimensionality presets not "
                  "sucessfully extracted from runconfig.yaml")
            sys.exit(1)

    stream.close()

    if isinstance(est_path_iterlist, list):
        est_path_iterlist = list(flatten(est_path_iterlist))
    else:
        est_path_iterlist = [est_path_iterlist]

    out_paths = []
    for file_ in est_path_iterlist:
        mat = np.load(file_)
        atlas = prune_suffices(file_.split("/")[-3])
        res = prune_suffices("_".join(
            file_.split("/")[-1].split("modality")[1].split("_")[1:]).split(
                "_est")[0])
        if "rsn" in res:
            subgraph = res.split("rsn-")[1]
        else:
            subgraph = "all_nodes"
        out_path = _ase_embed(mat,
                              atlas,
                              file_,
                              ID,
                              subgraph_name=subgraph,
                              n_components=n_components)
        out_paths.append(out_path)

    return out_paths
Пример #3
0
def build_asetomes(est_path_iterlist, ID):
    """
    Embeds single graphs using the ASE algorithm.

    Parameters
    ----------
    est_path_iterlist : list
        List of file paths to .npy files, each containing a graph.
    ID : str
        A subject id or other unique identifier.

    """
    import numpy as np
    from pynets.core.utils import prune_suffices, flatten
    from pynets.stats.embeddings import _ase_embed

    if isinstance(est_path_iterlist, list):
        est_path_iterlist = list(flatten(est_path_iterlist))
    else:
        est_path_iterlist = [est_path_iterlist]

    out_paths = []
    for file_ in est_path_iterlist:
        mat = np.load(file_)
        atlas = prune_suffices(file_.split("/")[-3])
        res = prune_suffices("_".join(
            file_.split("/")[-1].split("modality")[1].split("_")[1:]).split(
                "_est")[0])
        if "rsn" in res:
            subgraph = res.split("rsn-")[1]
        else:
            subgraph = "whole_brain"
        out_path = _ase_embed(mat, atlas, file_, ID, subgraph_name=subgraph)
        out_paths.append(out_path)

    return out_paths
Пример #4
0
def build_asetomes(est_path_iterlist, ID):
    """
    Embeds single graphs using the ASE algorithm.

    Parameters
    ----------
    est_path_iterlist : list
        List of file paths to .npy files, each containing a graph.
    ID : str
        A subject id or other unique identifier.

    """
    from pathlib import Path
    import os
    import numpy as np
    from pynets.core.utils import prune_suffices, flatten
    from pynets.stats.embeddings import _ase_embed
    from pynets.core.utils import load_runconfig

    # Available functional and structural connectivity models
    hardcoded_params = load_runconfig()
    try:
        n_components = hardcoded_params["gradients"][
            "n_components"][0]
    except KeyError:
        import sys
        print(
            "ERROR: available gradient dimensionality presets not "
            "sucessfully extracted from runconfig.yaml"
        )
        sys.exit(1)

    if isinstance(est_path_iterlist, list):
        est_path_iterlist = list(flatten(est_path_iterlist))
    else:
        est_path_iterlist = [est_path_iterlist]

    out_paths = []
    for file_ in est_path_iterlist:
        mat = np.load(file_)
        if np.isfinite(mat).all() == False:
            continue

        atlas = prune_suffices(file_.split("/")[-3])
        res = prune_suffices("_".join(file_.split(
            "/")[-1].split("modality")[1].split("_")[1:]).split("_est")[0])
        if "rsn" in res:
            subgraph = res.split("rsn-")[1].split('_')[0]
        else:
            subgraph = "all_nodes"
        out_path = _ase_embed(mat, atlas, file_, ID, subgraph_name=subgraph,
                              n_components=n_components)
        if out_path is not None:
            out_paths.append(out_path)
        else:
            # Add a null tmp file to prevent pool from breaking
            dir_path = str(Path(os.path.dirname(file_)).parent)
            namer_dir = f"{dir_path}/embeddings"
            if os.path.isdir(namer_dir) is False:
                os.makedirs(namer_dir, exist_ok=True)
            out_path = f"{namer_dir}/gradient-ASE" \
                       f"_{atlas}_{subgraph}_{os.path.basename(file_)}_NULL"
            if not os.path.exists(out_path):
                os.mknod(out_path)
            out_paths.append(out_path)

    return out_paths