Пример #1
0
def test_nodemaker_tools_masking_coords_WB():
    # Set example inputs
    base_dir = str(Path(__file__).parent / "examples")
    #base_dir = '/Users/rxh180012/PyNets-development/tests/examples'
    dir_path = base_dir + '/997'
    mask = dir_path + '/pDMN_3_bin.nii.gz'
    atlas_select = 'coords_dosenbach_2010'
    error = 2

    start_time = time.time()
    [WB_coords, _, _,
     WB_label_names] = nodemaker.fetch_nilearn_atlas_coords(atlas_select)
    print("%s%s%s" % (
        'fetch_nilearn_atlas_coords (Masking whole-brain coords version) --> finished: ',
        str(np.round(time.time() - start_time, 1)), 's'))

    start_time = time.time()
    [WB_coords_masked,
     WB_label_names_masked] = nodemaker.coord_masker(mask, WB_coords,
                                                     WB_label_names, error)
    print("%s%s%s" %
          ('coord_masker (Masking whole-brain coords version) --> finished: ',
           str(np.round(time.time() - start_time, 1)), 's'))

    assert WB_coords is not None
    assert WB_coords is not None
    assert WB_coords_masked is not None
    assert WB_label_names is not None
    assert WB_label_names_masked is not None
Пример #2
0
def test_nodemaker_tools_nilearn_coords_RSN():
    # Set example inputs
    base_dir = str(Path(__file__).parent / "examples")
    #base_dir = '/Users/rxh180012/PyNets-development/tests/examples'
    dir_path = base_dir + '/997'
    func_file = dir_path + '/sub-997_ses-01_task-REST_run-01_bold_space-MNI152NLin2009cAsym_preproc_masked.nii.gz'
    network = 'Default'
    #can't find original atlas_select
    atlas_select = 'coords_dosenbach_2010'
    #atlas_select = dir_path + '/coords_dosenbach_2010.nii.gz'
    parc = False
    parcel_list = None

    start_time = time.time()
    [coords, _, _,
     label_names] = nodemaker.fetch_nilearn_atlas_coords(atlas_select)
    print("%s%s%s" % ('fetch_nilearn_atlas_coords --> finished: ',
                      str(np.round(time.time() - start_time, 1)), 's'))

    start_time = time.time()
    [net_coords, _, net_label_names,
     network] = nodemaker.get_node_membership(network, func_file, coords,
                                              label_names, parc, parcel_list)
    print("%s%s%s" % ('get_node_membership --> finished: ',
                      str(np.round(time.time() - start_time, 1)), 's'))

    assert coords is not None
    assert label_names is not None
    assert net_coords is not None
    assert net_label_names is not None
    assert network is not None
Пример #3
0
def test_nodemaker_tools_nilearn_coords_RSN():
    # Set example inputs
    base_dir = str(Path(__file__).parent / "examples")
    dir_path = base_dir + '/002/fmri'
    func_file = dir_path + '/002.nii.gz'
    network = 'Default'
    atlas = 'coords_dosenbach_2010'
    parc = False
    parcel_list = None

    start_time = time.time()
    [coords, _, _, labels] = nodemaker.fetch_nilearn_atlas_coords(atlas)
    print("%s%s%s" % ('fetch_nilearn_atlas_coords --> finished: ',
                      str(np.round(time.time() - start_time, 1)), 's'))

    start_time = time.time()
    [net_coords, _, net_labels,
     network] = nodemaker.get_node_membership(network, func_file, coords,
                                              labels, parc, parcel_list)
    print("%s%s%s" % ('get_node_membership --> finished: ',
                      str(np.round(time.time() - start_time, 1)), 's'))

    assert coords is not None
    assert labels is not None
    assert net_coords is not None
    assert net_labels is not None
    assert network is not None
Пример #4
0
def test_nodemaker_tools_masking_coords_WB():
    # Set example inputs
    base_dir = str(Path(__file__).parent / "examples")
    roi = base_dir + '/pDMN_3_bin.nii.gz'
    atlas = 'coords_dosenbach_2010'
    error = 2

    start_time = time.time()
    [WB_coords, _, _, WB_labels] = nodemaker.fetch_nilearn_atlas_coords(atlas)
    print("%s%s%s" % (
        'fetch_nilearn_atlas_coords (Masking whole-brain coords version) --> finished: ',
        str(np.round(time.time() - start_time, 1)), 's'))

    start_time = time.time()
    [WB_coords_masked,
     WB_labels_masked] = nodemaker.coords_masker(roi, WB_coords, WB_labels,
                                                 error)
    print("%s%s%s" %
          ('coords_masker (Masking whole-brain coords version) --> finished: ',
           str(np.round(time.time() - start_time, 1)), 's'))

    assert WB_coords is not None
    assert WB_coords is not None
    assert WB_coords_masked is not None
    assert WB_labels is not None
    assert WB_labels_masked is not None
Пример #5
0
def test_nodemaker_tools_masking_coords_RSN():
    ##Set example inputs##
    base_dir = str(Path(__file__).parent / "examples")
    dir_path = base_dir + '/997'
    func_file = dir_path + '/sub-997_ses-01_task-REST_run-01_bold_space-MNI152NLin2009cAsym_preproc_masked.nii.gz'
    mask = dir_path + '/pDMN_3_bin.nii.gz'
    atlas_select = 'coords_dosenbach_2010'
    network = 'Default'
    parc = False
    parcel_list = None

    [coords, atlas_select, networks_list,
     label_names] = nodemaker.fetch_nilearn_atlas_coords(atlas_select)

    [net_coords, net_parcel_list, net_label_names,
     network] = nodemaker.get_node_membership(network, func_file, coords,
                                              label_names, parc, parcel_list)

    [net_coords_masked,
     net_label_names_masked] = nodemaker.coord_masker(mask, net_coords,
                                                      net_label_names)

    assert coords is not None
    assert net_coords is not None
    assert net_coords_masked is not None
    assert net_label_names is not None
    assert net_label_names_masked is not None
    assert network is not None
Пример #6
0
def test_nodemaker_tools_nilearn_coords_WB():
    ##Set example inputs##
    atlas_select = 'coords_dosenbach_2010'

    [WB_coords, atlas_select, networks_list,
     WB_label_names] = nodemaker.fetch_nilearn_atlas_coords(atlas_select)

    assert WB_coords is not None
    assert WB_label_names is not None
Пример #7
0
def RSN_fetch_nodes_and_labels(atlas_select, parlistfile, ref_txt, parc,
                               func_file):
    from pynets import utils, nodemaker
    import pandas as pd
    ##Test if atlas_select is a nilearn atlas. If so, fetch coords, labels, and/or networks.
    nilearn_atlases = [
        'atlas_aal', 'atlas_craddock_2012', 'atlas_destrieux_2009'
    ]
    if atlas_select in nilearn_atlases:
        [label_names, networks_list,
         parlistfile] = utils.nilearn_atlas_helper(atlas_select)

    ##Get coordinates and/or parcels from atlas
    if parlistfile is None and parc == False:
        print(
            'Fetching coordinates and labels from nilearn coordinate-based atlases'
        )
        ##Fetch nilearn atlas coords
        [coords, atlas_name, networks_list,
         label_names] = nodemaker.fetch_nilearn_atlas_coords(atlas_select)
        parcel_list = None
        par_max = None
    else:
        ##Fetch user-specified atlas coords
        [coords, atlas_select, par_max,
         parcel_list] = nodemaker.get_names_and_coords_of_parcels(parlistfile)
        networks_list = None

    ##Labels prep
    try:
        label_names
    except:
        if ref_txt is not None and os.path.exists(ref_txt):
            atlas_select = os.path.basename(ref_txt).split('.txt')[0]
            dict_df = pd.read_csv(ref_txt,
                                  sep=" ",
                                  header=None,
                                  names=["Index", "Region"])
            label_names = dict_df['Region'].tolist()
        else:
            label_names = np.arange(len(coords) +
                                    1)[np.arange(len(coords) +
                                                 1) != 0].tolist()
    if label_names is None:
        label_names = np.arange(len(coords) +
                                1)[np.arange(len(coords) + 1) != 0].tolist()
    try:
        atlas_name
    except:
        atlas_name = atlas_select

    dir_path = utils.do_dir_path(atlas_select, func_file)
    return (label_names, coords, atlas_name, networks_list, parcel_list,
            par_max, parlistfile, dir_path)
Пример #8
0
def test_nodemaker_tools_nilearn_coords_WB():
    # Set example inputs
    atlas_select = 'coords_dosenbach_2010'

    start_time = time.time()
    [WB_coords, _, _, WB_label_names] = nodemaker.fetch_nilearn_atlas_coords(atlas_select)
    print("%s%s%s" % ('fetch_nilearn_atlas_coords (Whole-brain version) --> finished: ',
    str(np.round(time.time() - start_time, 1)), 's'))

    assert WB_coords is not None
    assert WB_label_names is not None
Пример #9
0
def WB_fetch_nodes_and_labels(atlas_select, parlistfile, ref_txt, parc, func_file):
    from pynets import utils, nodemaker
    import pandas as pd
    from pathlib import Path
    ##Test if atlas_select is a nilearn atlas. If so, fetch coords, labels, and/or networks.
    nilearn_parc_atlases=['atlas_aal', 'atlas_craddock_2012', 'atlas_destrieux_2009']
    nilearn_coord_atlases=['harvard_oxford', 'msdl', 'coords_power_2011', 'smith_2009', 'basc_multiscale_2015', 'allen_2011', 'coords_dosenbach_2010']
    if atlas_select in nilearn_parc_atlases:
        [label_names, networks_list, parlistfile] = utils.nilearn_atlas_helper(atlas_select)

    ##Get coordinates and/or parcels from atlas
    if parlistfile is None and parc == False and atlas_select in nilearn_coord_atlases:
        print('Fetching coordinates and labels from nilearn coordinate-based atlases')
        ##Fetch nilearn atlas coords
        [coords, atlas_name, networks_list, label_names] = nodemaker.fetch_nilearn_atlas_coords(atlas_select)
        parcel_list = None
        par_max = None
    else:
        try:
            ##Fetch user-specified atlas coords
            [coords, atlas_select, par_max, parcel_list] = nodemaker.get_names_and_coords_of_parcels(parlistfile)
            networks_list = None
            ##Describe user atlas coords
            print('\n' + str(atlas_select) + ' comes with {0} '.format(par_max) + 'parcels' + '\n')
        except:
            raise ValueError('\n\nError: Either you have specified the name of a nilearn atlas that does not exist or you have not supplied a 3d atlas parcellation image!\n\n')

    ##Labels prep
    try:
        label_names
    except:
        if ref_txt is not None and os.path.exists(ref_txt):
            atlas_select = os.path.basename(ref_txt).split('.txt')[0]
            dict_df = pd.read_csv(ref_txt, sep=" ", header=None, names=["Index", "Region"])
            label_names = dict_df['Region'].tolist()
        else:
            try:
                atlas_ref_txt = atlas_select + '.txt'
                ref_txt = Path(__file__)/'atlases'/atlas_ref_txt
                dict_df = pd.read_csv(ref_txt, sep=" ", header=None, names=["Index", "Region"])
                label_names = dict_df['Region'].tolist()
            except:
                label_names = np.arange(len(coords) + 1)[np.arange(len(coords) + 1) != 0].tolist()
    if label_names is None:
        label_names = np.arange(len(coords) + 1)[np.arange(len(coords) + 1) != 0].tolist()
    try:
        atlas_name
    except:
        atlas_name = atlas_select
        
    dir_path = utils.do_dir_path(atlas_select, func_file)
    return(label_names, coords, atlas_name, networks_list, parcel_list, par_max, parlistfile, dir_path)
Пример #10
0
def test_nodemaker_tools():
    ##Set example inputs##
    NETWORK = 'DMN'
    mask = Path(__file__).parent / "examples" / "997" / "pDMN_3_bin.nii.gz"
    parlistfile = Path(
        __file__
    ).parent / "examples" / "whole_brain_cluster_labels_PCA100.nii.gz"
    atlas_select = 'coords_power_2011'
    [coords, atlas_name, networks_list,
     label_names] = nodemaker.fetch_nilearn_atlas_coords(atlas_select)

    if atlas_name == 'Power 2011 atlas':
        network_coords_ref = NETWORK + '_coords.csv'
        atlas_coords = pkgutil.get_data("pynets",
                                        "rsnrefs/" + network_coords_ref)
        df = pd.read_csv(io.BytesIO(atlas_coords)).ix[:, 0:4]
        i = 1
        net_coords = []
        ix_labels = []
        for i in range(len(df)):
            x = int(df.ix[i, 1])
            y = int(df.ix[i, 2])
            z = int(df.ix[i, 3])
            net_coords.append((x, y, z))
            ix_labels.append(i)
            i = i + 1
            label_names = ix_labels

    if label_names != ix_labels:
        try:
            label_names = label_names.tolist()
        except:
            pass
        label_names = [label_names[i] for i in ix_labels]

    ##Test 1
    [net_coords,
     label_names_out] = nodemaker.coord_masker(str(mask), net_coords,
                                               label_names)
    ##Test 2
    [coords, atlas_name,
     par_max] = nodemaker.get_names_and_coords_of_parcels(str(parlistfile))
    ##Test 3
    #out_path = nodemaker.gen_network_parcels(str(parlistfile), NETWORK, labels_names)

    assert net_coords is not None
    assert label_names_out is not None
    assert mask is not None
    assert coords is not None
    assert atlas_name is not None
    assert par_max is not None
Пример #11
0
def test_nodemaker_tools_masking_coords_WB():
    ##Set example inputs##
    base_dir = str(Path(__file__).parent / "examples")
    dir_path = base_dir + '/997'
    mask = dir_path + '/pDMN_3_bin.nii.gz'
    atlas_select = 'coords_dosenbach_2010'

    [WB_coords, atlas_select, networks_list,
     WB_label_names] = nodemaker.fetch_nilearn_atlas_coords(atlas_select)

    [WB_coords_masked,
     WB_label_names_masked] = nodemaker.coord_masker(mask, WB_coords,
                                                     WB_label_names)

    assert WB_coords is not None
    assert WB_coords is not None
    assert WB_coords_masked is not None
    assert WB_label_names is not None
    assert WB_label_names_masked is not None
Пример #12
0
def test_nodemaker_tools_masking_coords_RSN():
    # Set example inputs
    base_dir = str(Path(__file__).parent / "examples")
    dir_path = base_dir + '/997'
    func_file = dir_path + '/sub-997_ses-01_task-REST_run-01_bold_space-MNI152NLin2009cAsym_preproc_masked.nii.gz'
    mask = dir_path + '/pDMN_3_bin.nii.gz'
    atlas_select = 'coords_dosenbach_2010'
    network = 'Default'
    parc = False
    parcel_list = None
    error = 2

    start_time = time.time()
    [coords, _, _,
     label_names] = nodemaker.fetch_nilearn_atlas_coords(atlas_select)
    print("%s%s%s" %
          ('fetch_nilearn_atlas_coords (Masking RSN version) --> finished: ',
           str(np.round(time.time() - start_time, 1)), 's'))

    start_time = time.time()
    [net_coords, _, net_label_names,
     network] = nodemaker.get_node_membership(network, func_file, coords,
                                              label_names, parc, parcel_list)
    print("%s%s%s" %
          ('get_node_membership (Masking RSN version) --> finished: ',
           str(np.round(time.time() - start_time, 1)), 's'))

    start_time = time.time()
    [net_coords_masked,
     net_label_names_masked] = nodemaker.coord_masker(mask, net_coords,
                                                      net_label_names, error)
    print("%s%s%s" % ('coord_masker (Masking RSN version) --> finished: ',
                      str(np.round(time.time() - start_time, 1)), 's'))

    assert coords is not None
    assert net_coords is not None
    assert net_coords_masked is not None
    assert net_label_names is not None
    assert net_label_names_masked is not None
    assert network is not None
Пример #13
0
def fetch_nodes_and_labels(atlas_select,
                           uatlas_select,
                           ref_txt,
                           parc,
                           in_file,
                           use_AAL_naming,
                           clustering=False):
    from pynets import utils, nodemaker
    import pandas as pd
    import time
    from pathlib import Path
    import os.path as op

    base_path = utils.get_file()
    # Test if atlas_select is a nilearn atlas. If so, fetch coords, labels, and/or networks.
    nilearn_parc_atlases = [
        'atlas_harvard_oxford', 'atlas_aal', 'atlas_destrieux_2009',
        'atlas_talairach_gyrus', 'atlas_talairach_ba', 'atlas_talairach_lobe'
    ]
    nilearn_coords_atlases = ['coords_power_2011', 'coords_dosenbach_2010']
    nilearn_prob_atlases = ['atlas_msdl', 'atlas_pauli_2017']
    if uatlas_select is None and atlas_select in nilearn_parc_atlases:
        [label_names, networks_list,
         uatlas_select] = nodemaker.nilearn_atlas_helper(atlas_select, parc)
        if uatlas_select:
            if not isinstance(uatlas_select, str):
                nib.save(uatlas_select,
                         "%s%s%s" % ('/tmp/', atlas_select, '.nii.gz'))
                uatlas_select = "%s%s%s" % ('/tmp/', atlas_select, '.nii.gz')
            [coords, _, par_max
             ] = nodemaker.get_names_and_coords_of_parcels(uatlas_select)
            if parc is True:
                parcel_list = nodemaker.gen_img_list(uatlas_select)
            else:
                parcel_list = None
        else:
            raise ValueError(
                "%s%s%s" %
                ('\nERROR: Atlas file for ', atlas_select, ' not found!'))
    elif uatlas_select is None and parc is False and atlas_select in nilearn_coords_atlases:
        print(
            'Fetching coords and labels from nilearn coordsinate-based atlas library...'
        )
        # Fetch nilearn atlas coords
        [coords, _, networks_list,
         label_names] = nodemaker.fetch_nilearn_atlas_coords(atlas_select)
        parcel_list = None
        par_max = None
    elif uatlas_select is None and parc is False and atlas_select in nilearn_prob_atlases:
        from nilearn.plotting import find_probabilistic_atlas_cut_coords
        print(
            'Fetching coords and labels from nilearn probabilistic atlas library...'
        )
        # Fetch nilearn atlas coords
        [label_names, networks_list,
         uatlas_select] = nodemaker.nilearn_atlas_helper(atlas_select, parc)
        coords = find_probabilistic_atlas_cut_coords(maps_img=uatlas_select)
        if uatlas_select:
            if not isinstance(uatlas_select, str):
                nib.save(uatlas_select,
                         "%s%s%s" % ('/tmp/', atlas_select, '.nii.gz'))
                uatlas_select = "%s%s%s" % ('/tmp/', atlas_select, '.nii.gz')
            if parc is True:
                parcel_list = nodemaker.gen_img_list(uatlas_select)
            else:
                parcel_list = None
        else:
            raise ValueError(
                "%s%s%s" %
                ('\nERROR: Atlas file for ', atlas_select, ' not found!'))
        par_max = None
    elif uatlas_select:
        if clustering is True:
            while True:
                if op.isfile(uatlas_select):
                    break
                else:
                    print('Waiting for atlas file...')
                    time.sleep(15)
        atlas_select = uatlas_select.split('/')[-1].split('.')[0]
        try:
            # Fetch user-specified atlas coords
            [coords, atlas_select, par_max
             ] = nodemaker.get_names_and_coords_of_parcels(uatlas_select)
            if parc is True:
                parcel_list = nodemaker.gen_img_list(uatlas_select)
            else:
                parcel_list = None
            # Describe user atlas coords
            print("%s%s%s%s" %
                  ('\n', atlas_select, ' comes with {0} '.format(par_max),
                   'parcels\n'))
        except ValueError:
            print(
                '\n\nError: Either you have specified the name of a nilearn atlas that does not exist or you have not '
                'supplied a 3d atlas parcellation image!\n\n')
            parcel_list = None
            par_max = None
            coords = None
        label_names = None
        networks_list = None
    else:
        networks_list = None
        label_names = None
        parcel_list = None
        par_max = None
        coords = None

    # Labels prep
    if atlas_select:
        if label_names:
            pass
        else:
            if ref_txt is not None and op.exists(ref_txt):
                dict_df = pd.read_csv(ref_txt,
                                      sep=" ",
                                      header=None,
                                      names=["Index", "Region"])
                label_names = dict_df['Region'].tolist()
            else:
                try:
                    ref_txt = "%s%s%s%s" % (str(
                        Path(base_path).parent), '/labelcharts/', atlas_select,
                                            '.txt')
                    if op.exists(ref_txt):
                        try:
                            dict_df = pd.read_csv(ref_txt,
                                                  sep="\t",
                                                  header=None,
                                                  names=["Index", "Region"])
                            label_names = dict_df['Region'].tolist()
                            #print(label_names)
                        except:
                            print(
                                "WARNING: label names from label reference file failed to populate or are invalid. "
                                "Attempting AAL naming...")
                            try:
                                label_names = nodemaker.AAL_naming(coords)
                                # print(label_names)
                            except:
                                print('AAL reference labeling failed!')
                                label_names = np.arange(len(coords) + 1)[
                                    np.arange(len(coords) + 1) != 0].tolist()
                    else:
                        if use_AAL_naming is True:
                            try:
                                label_names = nodemaker.AAL_naming(coords)
                                # print(label_names)
                            except:
                                print('AAL reference labeling failed!')
                                label_names = np.arange(len(coords) + 1)[
                                    np.arange(len(coords) + 1) != 0].tolist()
                        else:
                            print('Using generic numbering labels...')
                            label_names = np.arange(len(coords) + 1)[
                                np.arange(len(coords) + 1) != 0].tolist()
                except:
                    print(
                        "Label reference file not found. Attempting AAL naming..."
                    )
                    if use_AAL_naming is True:
                        try:
                            label_names = nodemaker.AAL_naming(coords)
                            #print(label_names)
                        except:
                            print('AAL reference labeling failed!')
                            label_names = np.arange(len(coords) + 1)[
                                np.arange(len(coords) + 1) != 0].tolist()
                    else:
                        print('Using generic numbering labels...')
                        label_names = np.arange(len(coords) +
                                                1)[np.arange(len(coords) +
                                                             1) != 0].tolist()
    else:
        print(
            'WARNING: No labels available since atlas name is not specified!')

    print("%s%s" % ('Labels:\n', label_names))
    atlas_name = atlas_select
    dir_path = utils.do_dir_path(atlas_select, in_file)

    return label_names, coords, atlas_name, networks_list, parcel_list, par_max, uatlas_select, dir_path
Пример #14
0
def network_connectome(input_file, ID, atlas_select, NETWORK, node_size, mask,
                       thr, parlistfile, all_nets, conn_model, dens_thresh,
                       conf, adapt_thresh, plot_switch, bedpostx_dir):
    nilearn_atlases = [
        'atlas_aal', 'atlas_craddock_2012', 'atlas_destrieux_2009'
    ]

    ##Input is nifti file
    func_file = input_file

    ##Test if atlas_select is a nilearn atlas
    if atlas_select in nilearn_atlases:
        atlas = getattr(datasets, 'fetch_%s' % atlas_select)()
        try:
            parlistfile = atlas.maps
            try:
                label_names = atlas.labels
            except:
                label_names = None
            try:
                networks_list = atlas.networks
            except:
                networks_list = None
        except RuntimeError:
            print('Error, atlas fetching failed.')
            sys.exit()

    if parlistfile == None and atlas_select not in nilearn_atlases:
        ##Fetch nilearn atlas coords
        [coords, atlas_name, networks_list,
         label_names] = nodemaker.fetch_nilearn_atlas_coords(atlas_select)

        if atlas_name == 'Power 2011 atlas':
            ##Reference RSN list
            import pkgutil
            import io
            network_coords_ref = NETWORK + '_coords.csv'
            atlas_coords = pkgutil.get_data("pynets",
                                            "rsnrefs/" + network_coords_ref)
            df = pd.read_csv(io.BytesIO(atlas_coords)).ix[:, 0:4]
            i = 1
            net_coords = []
            ix_labels = []
            for i in range(len(df)):
                #print("ROI Reference #: " + str(i))
                x = int(df.ix[i, 1])
                y = int(df.ix[i, 2])
                z = int(df.ix[i, 3])
                #print("X:" + str(x) + " Y:" + str(y) + " Z:" + str(z))
                net_coords.append((x, y, z))
                ix_labels.append(i)
                i = i + 1
                #print(net_coords)
                label_names = ix_labels
        elif atlas_name == 'Dosenbach 2010 atlas':
            coords = list(tuple(x) for x in coords)

            ##Get coord membership dictionary
            [membership, membership_plotting
             ] = nodemaker.get_mem_dict(func_file, coords, networks_list)

            ##Convert to membership dataframe
            mem_df = membership.to_frame().reset_index()

            nets_avail = list(set(list(mem_df['index'])))
            ##Get network name equivalents
            if NETWORK == 'DMN':
                NETWORK = 'default'
            elif NETWORK == 'FPTC':
                NETWORK = 'fronto-parietal'
            elif NETWORK == 'CON':
                NETWORK = 'cingulo-opercular'
            elif NETWORK not in nets_avail:
                print('Error: ' + NETWORK + ' not available with this atlas!')
                sys.exit()

            ##Get coords for network-of-interest
            mem_df.loc[mem_df['index'] == NETWORK]
            net_coords = mem_df.loc[mem_df['index'] == NETWORK][[0]].values[:,
                                                                            0]
            net_coords = list(tuple(x) for x in net_coords)
            ix_labels = mem_df.loc[mem_df['index'] == NETWORK].index.values
            ####Add code for any special RSN reference lists for the nilearn atlases here#####
            ##If labels_names are not indices and NETWORK is specified, sub-list label names

        if label_names != ix_labels:
            try:
                label_names = label_names.tolist()
            except:
                pass
            label_names = [label_names[i] for i in ix_labels]

        ##Get subject directory path
        dir_path = os.path.dirname(
            os.path.realpath(func_file)) + '/' + atlas_select
        if not os.path.exists(dir_path):
            os.makedirs(dir_path)

        ##If masking, remove those coords that fall outside of the mask
        if mask != None:
            [net_coords,
             label_names] = nodemaker.coord_masker(mask, net_coords,
                                                   label_names)

        ##Save coords and label_names to pickles
        coord_path = dir_path + '/coords_' + NETWORK + '_' + str(thr) + '.pkl'
        with open(coord_path, 'wb') as f:
            pickle.dump(net_coords, f)

        labels_path = dir_path + '/labelnames_' + NETWORK + '_' + str(
            thr) + '.pkl'
        with open(labels_path, 'wb') as f:
            pickle.dump(label_names, f)

        if bedpostx_dir is not None:
            from pynets.diffconnectometry import run_struct_mapping
            FSLDIR = os.environ['FSLDIR']
            try:
                FSLDIR
            except NameError:
                print('FSLDIR environment variable not set!')
            est_path2 = run_struct_mapping(FSLDIR, ID, bedpostx_dir, dir_path,
                                           NETWORK, net_coords, node_size)

    else:
        ##Fetch user-specified atlas coords
        [coords_all, atlas_name,
         par_max] = nodemaker.get_names_and_coords_of_parcels(parlistfile)
        coords = list(tuple(x) for x in coords_all)

        ##Get subject directory path
        dir_path = os.path.dirname(
            os.path.realpath(func_file)) + '/' + atlas_name
        if not os.path.exists(dir_path):
            os.makedirs(dir_path)

        ##Get coord membership dictionary
        try:
            networks_list
        except:
            networks_list = None
        [membership,
         membership_plotting] = nodemaker.get_mem_dict(func_file, coords,
                                                       networks_list)

        ##Convert to membership dataframe
        mem_df = membership.to_frame().reset_index()

        ##Get coords for network-of-interest
        mem_df.loc[mem_df['index'] == NETWORK]
        net_coords = mem_df.loc[mem_df['index'] == NETWORK][[0]].values[:, 0]
        net_coords = list(tuple(x) for x in net_coords)
        ix_labels = mem_df.loc[mem_df['index'] == NETWORK].index.values
        try:
            label_names = [label_names[i] for i in ix_labels]
        except:
            label_names = ix_labels

        if mask != None:
            [net_coords,
             label_names] = nodemaker.coord_masker(mask, net_coords,
                                                   label_names)

        ##Save coords and label_names to pickles
        coord_path = dir_path + '/coords_' + NETWORK + '_' + str(thr) + '.pkl'
        with open(coord_path, 'wb') as f:
            pickle.dump(net_coords, f)

        labels_path = dir_path + '/labelnames_' + NETWORK + '_' + str(
            thr) + '.pkl'
        with open(labels_path, 'wb') as f:
            pickle.dump(label_names, f)

        if bedpostx_dir is not None:
            from pynets.diffconnectometry import run_struct_mapping
            est_path2 = run_struct_mapping(FSLDIR, ID, bedpostx_dir, dir_path,
                                           NETWORK, net_coords, node_size)

        ##Generate network parcels image (through refinement, this could be used
        ##in place of the 3 lines above)
        #net_parcels_img_path = gen_network_parcels(parlistfile, NETWORK, labels)
        #parcellation = nib.load(net_parcels_img_path)
        #parcel_masker = input_data.NiftiLabelsMasker(labels_img=parcellation, background_label=0, memory='nilearn_cache', memory_level=5, standardize=True)
        #ts_within_parcels = parcel_masker.fit_transform(func_file)
        #net_ts = ts_within_parcels

    ##Grow ROIs
    masker = input_data.NiftiSpheresMasker(seeds=net_coords,
                                           radius=float(node_size),
                                           allow_overlap=True,
                                           memory_level=5,
                                           memory='nilearn_cache',
                                           verbose=2,
                                           standardize=True)
    ts_within_spheres = masker.fit_transform(func_file, confounds=conf)
    net_ts = ts_within_spheres

    ##Save time series as txt file
    out_path_ts = dir_path + '/' + ID + '_' + NETWORK + '_net_ts.txt'
    np.savetxt(out_path_ts, net_ts)

    ##Fit connectivity model
    if adapt_thresh is not False:
        if os.path.isfile(est_path2) == True:
            [conn_matrix, est_path, edge_threshold,
             thr] = thresholding.adaptive_thresholding(ts_within_spheres,
                                                       conn_model, NETWORK, ID,
                                                       est_path2, dir_path)
        else:
            print('No structural mx found! Exiting...')
            sys.exit(0)
    elif dens_thresh is None:
        edge_threshold = str(float(thr) * 100) + '%'
        [conn_matrix,
         est_path] = graphestimation.get_conn_matrix(ts_within_spheres,
                                                     conn_model, NETWORK, ID,
                                                     dir_path, thr)
        conn_matrix = thresholding.threshold_proportional(
            conn_matrix, float(thr), dir_path)
        conn_matrix = thresholding.normalize(conn_matrix)
    elif dens_thresh is not None:
        [conn_matrix, est_path, edge_threshold,
         thr] = thresholding.density_thresholding(ts_within_spheres,
                                                  conn_model, NETWORK, ID,
                                                  dens_thresh, dir_path)

    if plot_switch == True:
        ##Plot connectogram
        plotting.plot_connectogram(conn_matrix, conn_model, atlas_name,
                                   dir_path, ID, NETWORK, label_names)

        ##Plot adj. matrix based on determined inputs
        plotting.plot_conn_mat(conn_matrix, conn_model, atlas_name, dir_path,
                               ID, NETWORK, label_names, mask)

        ##Plot network time-series
        plotting.plot_timeseries(net_ts, NETWORK, ID, dir_path, atlas_name,
                                 label_names)

        ##Plot connectome viz for specific Yeo networks
        title = "Connectivity Projected on the " + NETWORK
        out_path_fig = dir_path + '/' + ID + '_' + NETWORK + '_connectome_plot.png'
        niplot.plot_connectome(conn_matrix,
                               net_coords,
                               edge_threshold=edge_threshold,
                               title=title,
                               display_mode='lyrz',
                               output_file=out_path_fig)
    return est_path, thr
Пример #15
0
def wb_connectome_with_nl_atlas_coords(input_file, ID, atlas_select, NETWORK,
                                       node_size, mask, thr, all_nets,
                                       conn_model, dens_thresh, conf,
                                       adapt_thresh, plot_switch,
                                       bedpostx_dir):
    nilearn_atlases = [
        'atlas_aal', 'atlas_craddock_2012', 'atlas_destrieux_2009'
    ]

    ##Input is nifti file
    func_file = input_file

    ##Fetch nilearn atlas coords
    [coords, atlas_name, networks_list,
     label_names] = nodemaker.fetch_nilearn_atlas_coords(atlas_select)

    ##Get subject directory path
    dir_path = os.path.dirname(
        os.path.realpath(func_file)) + '/' + atlas_select
    if not os.path.exists(dir_path):
        os.makedirs(dir_path)

    ##Get coord membership dictionary if all_nets option triggered
    if all_nets != False:
        try:
            networks_list
        except:
            networks_list = None
        [membership,
         membership_plotting] = nodemaker.get_mem_dict(func_file, coords,
                                                       networks_list)

    ##Mask coordinates
    if mask is not None:
        [coords, label_names] = nodemaker.coord_masker(mask, coords,
                                                       label_names)

    ##Save coords and label_names to pickles
    coord_path = dir_path + '/coords_wb_' + str(thr) + '.pkl'
    with open(coord_path, 'wb') as f:
        pickle.dump(coords, f)

    labels_path = dir_path + '/labelnames_wb_' + str(thr) + '.pkl'
    with open(labels_path, 'wb') as f:
        pickle.dump(label_names, f)

    if bedpostx_dir is not None:
        from pynets.diffconnectometry import run_struct_mapping
        FSLDIR = os.environ['FSLDIR']
        try:
            FSLDIR
        except NameError:
            print('FSLDIR environment variable not set!')
        est_path2 = run_struct_mapping(FSLDIR, ID, bedpostx_dir, dir_path,
                                       NETWORK, coords, node_size)

    ##Extract within-spheres time-series from funct file
    spheres_masker = input_data.NiftiSpheresMasker(seeds=coords,
                                                   radius=float(node_size),
                                                   memory='nilearn_cache',
                                                   memory_level=5,
                                                   verbose=2,
                                                   standardize=True)
    ts_within_spheres = spheres_masker.fit_transform(func_file, confounds=conf)
    print('\n' +
          'Time series has {0} samples'.format(ts_within_spheres.shape[0]) +
          '\n')

    ##Save time series as txt file
    out_path_ts = dir_path + '/' + ID + '_whole_brain_ts_within_spheres.txt'
    np.savetxt(out_path_ts, ts_within_spheres)

    ##Fit connectivity model
    if adapt_thresh is not False:
        if os.path.isfile(est_path2) == True:
            [conn_matrix, est_path, edge_threshold,
             thr] = thresholding.adaptive_thresholding(ts_within_spheres,
                                                       conn_model, NETWORK, ID,
                                                       est_path2, dir_path)
        else:
            print('No structural mx found! Exiting...')
            sys.exit(0)
    elif dens_thresh is None:
        edge_threshold = str(float(thr) * 100) + '%'
        [conn_matrix,
         est_path] = graphestimation.get_conn_matrix(ts_within_spheres,
                                                     conn_model, NETWORK, ID,
                                                     dir_path, thr)
        conn_matrix = thresholding.threshold_proportional(
            conn_matrix, float(thr), dir_path)
        conn_matrix = thresholding.normalize(conn_matrix)
    elif dens_thresh is not None:
        [conn_matrix, est_path, edge_threshold,
         thr] = thresholding.density_thresholding(ts_within_spheres,
                                                  conn_model, NETWORK, ID,
                                                  dens_thresh, dir_path)

    if plot_switch == True:
        ##Plot connectogram
        plotting.plot_connectogram(conn_matrix, conn_model, atlas_name,
                                   dir_path, ID, NETWORK, label_names)

        ##Plot adj. matrix based on determined inputs
        plotting.plot_conn_mat(conn_matrix, conn_model, atlas_name, dir_path,
                               ID, NETWORK, label_names, mask)

        ##Plot connectome viz for all Yeo networks
        if all_nets != False:
            plotting.plot_membership(membership_plotting, conn_matrix,
                                     conn_model, coords, edge_threshold,
                                     atlas_name, dir_path)
        else:
            out_path_fig = dir_path + '/' + ID + '_' + atlas_name + '_connectome_viz.png'
            niplot.plot_connectome(conn_matrix,
                                   coords,
                                   title=atlas_name,
                                   edge_threshold=edge_threshold,
                                   node_size=20,
                                   colorbar=True,
                                   output_file=out_path_fig)
    return est_path, thr
Пример #16
0
def fetch_nodes_and_labels(atlas,
                           uatlas,
                           ref_txt,
                           parc,
                           in_file,
                           use_AAL_naming,
                           clustering=False):
    """
    General API for fetching, identifying, and defining atlas nodes based on coordinates and/or labels.

    Parameters
    ----------
    atlas : str
        Name of a Nilearn-hosted coordinate or parcellation/label-based atlas supported for fetching.
        See Nilearn's datasets.atlas module for more detailed reference.
    uatlas : str
        File path to atlas parcellation Nifti1Image in MNI template space.
    ref_txt : str
        Path to an atlas reference .txt file that maps labels to intensities corresponding to uatlas.
    parc : bool
        Indicates whether to use parcels instead of coordinates as ROI nodes.
    in_file : str
        File path to Nifti1Image object whose affine will provide sampling reference for fetching.
    use_AAL_naming : bool
        Indicates whether to perform Automated-Anatomical Labeling of each coordinate from a list of a voxel
        coordinates.
    clustering : bool
        Indicates whether clustering was performed. Default is False.

    Returns
    -------
    labels : list
        List of string labels corresponding to ROI nodes.
    coords : list
        List of (x, y, z) tuples in mm-space corresponding to a coordinate atlas used or
        which represent the center-of-mass of each parcellation node.
    atlas_name : str
        Name of atlas parcellation (can differ slightly from fetch API string).
    networks_list : list
        List of RSN's and their associated cooordinates, if predefined uniquely for a given atlas.
    parcel_list : list
        List of 3D boolean numpy arrays or binarized Nifti1Images corresponding to ROI masks.
    par_max : int
        The maximum label intensity in the parcellation image.
    uatlas : str
        File path to atlas parcellation Nifti1Image in MNI template space.
    dir_path : str
        Path to directory containing subject derivative data for given run.
    """
    from pynets import utils, nodemaker
    import pandas as pd
    import time
    from pathlib import Path
    import os.path as op

    base_path = utils.get_file()
    # Test if atlas is a nilearn atlas. If so, fetch coords, labels, and/or networks.
    nilearn_parc_atlases = [
        'atlas_harvard_oxford', 'atlas_aal', 'atlas_destrieux_2009',
        'atlas_talairach_gyrus', 'atlas_talairach_ba', 'atlas_talairach_lobe'
    ]
    nilearn_coords_atlases = ['coords_power_2011', 'coords_dosenbach_2010']
    nilearn_prob_atlases = ['atlas_msdl', 'atlas_pauli_2017']
    if uatlas is None and atlas in nilearn_parc_atlases:
        [labels, networks_list,
         uatlas] = nodemaker.nilearn_atlas_helper(atlas, parc)
        if uatlas:
            if not isinstance(uatlas, str):
                nib.save(uatlas, "%s%s%s" % ('/tmp/', atlas, '.nii.gz'))
                uatlas = "%s%s%s" % ('/tmp/', atlas, '.nii.gz')
            [coords, _,
             par_max] = nodemaker.get_names_and_coords_of_parcels(uatlas)
            if parc is True:
                parcel_list = nodemaker.gen_img_list(uatlas)
            else:
                parcel_list = None
        else:
            raise ValueError(
                "%s%s%s" % ('\nERROR: Atlas file for ', atlas, ' not found!'))
    elif uatlas is None and parc is False and atlas in nilearn_coords_atlases:
        print(
            'Fetching coords and labels from nilearn coordinate-based atlas library...'
        )
        # Fetch nilearn atlas coords
        [coords, _, networks_list,
         labels] = nodemaker.fetch_nilearn_atlas_coords(atlas)
        parcel_list = None
        par_max = None
    elif uatlas is None and parc is False and atlas in nilearn_prob_atlases:
        from nilearn.plotting import find_probabilistic_atlas_cut_coords
        print(
            'Fetching coords and labels from nilearn probabilistic atlas library...'
        )
        # Fetch nilearn atlas coords
        [labels, networks_list,
         uatlas] = nodemaker.nilearn_atlas_helper(atlas, parc)
        coords = find_probabilistic_atlas_cut_coords(maps_img=uatlas)
        if uatlas:
            if not isinstance(uatlas, str):
                nib.save(uatlas, "%s%s%s" % ('/tmp/', atlas, '.nii.gz'))
                uatlas = "%s%s%s" % ('/tmp/', atlas, '.nii.gz')
            if parc is True:
                parcel_list = nodemaker.gen_img_list(uatlas)
            else:
                parcel_list = None
        else:
            raise ValueError(
                "%s%s%s" % ('\nERROR: Atlas file for ', atlas, ' not found!'))
        par_max = None
    elif uatlas:
        if clustering is True:
            while True:
                if op.isfile(uatlas):
                    break
                else:
                    print('Waiting for atlas file...')
                    time.sleep(15)
        atlas = uatlas.split('/')[-1].split('.')[0]
        try:
            # Fetch user-specified atlas coords
            [coords, atlas,
             par_max] = nodemaker.get_names_and_coords_of_parcels(uatlas)
            if parc is True:
                parcel_list = nodemaker.gen_img_list(uatlas)
            else:
                parcel_list = None
            # Describe user atlas coords
            print(
                "%s%s%s%s" %
                ('\n', atlas, ' comes with {0} '.format(par_max), 'parcels\n'))
        except ValueError:
            print(
                '\n\nError: Either you have specified the name of a nilearn atlas that does not exist or '
                'you have not supplied a 3d atlas parcellation image!\n\n')
            parcel_list = None
            par_max = None
            coords = None
        labels = None
        networks_list = None
    else:
        networks_list = None
        labels = None
        parcel_list = None
        par_max = None
        coords = None

    # Labels prep
    if atlas:
        if labels:
            pass
        else:
            if ref_txt is not None and op.exists(ref_txt):
                dict_df = pd.read_csv(ref_txt,
                                      sep=" ",
                                      header=None,
                                      names=["Index", "Region"])
                labels = dict_df['Region'].tolist()
            else:
                try:
                    ref_txt = "%s%s%s%s" % (str(Path(base_path).parent),
                                            '/labelcharts/', atlas, '.txt')
                    if op.exists(ref_txt):
                        try:
                            dict_df = pd.read_csv(ref_txt,
                                                  sep="\t",
                                                  header=None,
                                                  names=["Index", "Region"])
                            labels = dict_df['Region'].tolist()
                        except:
                            print(
                                "WARNING: label names from label reference file failed to populate or are invalid. "
                                "Attempting AAL naming...")
                            try:
                                labels = nodemaker.AAL_naming(coords)
                            except:
                                print('AAL reference labeling failed!')
                                labels = np.arange(len(coords) + 1)[
                                    np.arange(len(coords) + 1) != 0].tolist()
                    else:
                        if use_AAL_naming is True:
                            try:
                                labels = nodemaker.AAL_naming(coords)
                            except:
                                print('AAL reference labeling failed!')
                                labels = np.arange(len(coords) + 1)[
                                    np.arange(len(coords) + 1) != 0].tolist()
                        else:
                            print('Using generic numbering labels...')
                            labels = np.arange(len(coords) +
                                               1)[np.arange(len(coords) +
                                                            1) != 0].tolist()
                except:
                    print(
                        "Label reference file not found. Attempting AAL naming..."
                    )
                    if use_AAL_naming is True:
                        try:
                            labels = nodemaker.AAL_naming(coords)
                        except:
                            print('AAL reference labeling failed!')
                            labels = np.arange(len(coords) +
                                               1)[np.arange(len(coords) +
                                                            1) != 0].tolist()
                    else:
                        print('Using generic numbering labels...')
                        labels = np.arange(len(coords) +
                                           1)[np.arange(len(coords) +
                                                        1) != 0].tolist()
    else:
        print(
            'WARNING: No labels available since atlas name is not specified!')

    print("%s%s" % ('Labels:\n', labels))
    atlas_name = atlas
    dir_path = utils.do_dir_path(atlas, in_file)

    if len(coords) != len(labels):
        labels = len(coords) * [np.nan]
        if len(coords) != len(labels):
            raise ValueError(
                'ERROR: length of coordinates is not equal to length of label names'
            )

    return labels, coords, atlas_name, networks_list, parcel_list, par_max, uatlas, dir_path