def test_nodemaker_tools_parlistfile_WB(): # Set example inputs base_dir = str(Path(__file__).parent / "examples") #base_dir = '/Users/rxh180012/PyNets-development/tests/examples' parlistfile = base_dir + '/whole_brain_cluster_labels_PCA200.nii.gz' start_time = time.time() [WB_coords, _, _] = nodemaker.get_names_and_coords_of_parcels(parlistfile) print("%s%s%s" % ( 'get_names_and_coords_of_parcels (User-atlas whole-brain version) --> finished: ', str(np.round(time.time() - start_time, 1)), 's')) WB_label_names = np.arange(len(WB_coords) + 1)[np.arange(len(WB_coords) + 1) != 0].tolist() start_time = time.time() WB_parcel_list = nodemaker.gen_img_list(parlistfile) [WB_parcels_map_nifti, parcel_list_exp] = nodemaker.create_parcel_atlas(WB_parcel_list) print( "%s%s%s" % ('create_parcel_atlas (User-atlas whole-brain version) --> finished: ', str(np.round(time.time() - start_time, 1)), 's')) assert WB_coords is not None assert WB_label_names is not None assert WB_parcel_list is not None assert WB_parcels_map_nifti is not None assert parcel_list_exp is not None
def prep_nodes(node_size, parc, parcel_list, net_parcels_map_nifti, network, dir_path, mask, atlas_select): import shutil from pynets import diffconnectometry, nodemaker if parc is True: node_size = 'parc' if network: seeds_dir = "%s%s%s%s%s%s%s" % (dir_path, '/seeds_', network, '_', atlas_select, '_', str(node_size)) else: seeds_dir = "%s%s%s%s%s" % (dir_path, '/seeds_', atlas_select, '_', str(node_size)) if os.path.exists(seeds_dir) is True: shutil.rmtree(seeds_dir) if not os.path.exists(seeds_dir): os.mkdir(seeds_dir) if parc is True: # If masking was performed, get reduced list if mask or network: parcel_list = nodemaker.gen_img_list(net_parcels_map_nifti) diffconnectometry.save_parcel_vols(parcel_list, seeds_dir) seeds_list = get_seeds_list(seeds_dir) return parcel_list, seeds_dir, node_size, seeds_list
def test_nodemaker_tools_masking_parlistfile_RSN(): # Set example inputs base_dir = str(Path(__file__).parent / "examples") #base_dir = '/Users/rxh180012/PyNets-development/tests/examples' dir_path = base_dir + '/997' func_file = dir_path + '/sub-997_ses-01_task-REST_run-01_bold_space-MNI152NLin2009cAsym_preproc_masked.nii.gz' parlistfile = base_dir + '/whole_brain_cluster_labels_PCA200.nii.gz' mask = dir_path + '/pDMN_3_bin.nii.gz' network = 'Default' ID = '997' perc_overlap = 0.10 parc = True start_time = time.time() [coords, _, _] = nodemaker.get_names_and_coords_of_parcels(parlistfile) print("%s%s%s" % ('get_names_and_coords_of_parcels --> finished: ', str(np.round(time.time() - start_time, 1)), 's')) label_names = np.arange(len(coords) + 1)[np.arange(len(coords) + 1) != 0].tolist() start_time = time.time() parcel_list = nodemaker.gen_img_list(parlistfile) [net_coords, net_parcel_list, net_label_names, network] = nodemaker.get_node_membership(network, func_file, coords, label_names, parc, parcel_list) print("%s%s%s" % ('get_node_membership --> finished: ', str(np.round(time.time() - start_time, 1)), 's')) start_time = time.time() [net_coords_masked, net_label_names_masked, net_parcel_list_masked ] = nodemaker.parcel_masker(mask, net_coords, net_parcel_list, net_label_names, dir_path, ID, perc_overlap) print("%s%s%s" % ('parcel_masker --> finished: ', str(np.round(time.time() - start_time, 1)), 's')) start_time = time.time() [net_parcels_map_nifti, parcel_list_exp] = nodemaker.create_parcel_atlas(net_parcel_list_masked) print("%s%s%s" % ('create_parcel_atlas --> finished: ', str(np.round(time.time() - start_time, 1)), 's')) start_time = time.time() out_path = nodemaker.gen_network_parcels(parlistfile, network, net_label_names_masked, dir_path) print("%s%s%s" % ('gen_network_parcels --> finished: ', str(np.round(time.time() - start_time, 1)), 's')) assert coords is not None assert net_coords is not None assert net_label_names is not None assert net_parcel_list is not None assert net_coords_masked is not None assert net_label_names_masked is not None assert net_parcel_list_masked is not None assert out_path is not None assert net_parcels_map_nifti is not None assert parcel_list_exp is not None assert network is not None
def test_nodemaker_tools_masking_parlistfile_WB(): # Set example inputs base_dir = str(Path(__file__).parent/"examples") #base_dir = '/Users/rxh180012/PyNets-development/tests/examples' dir_path = base_dir + '/997' parlistfile = base_dir + '/whole_brain_cluster_labels_PCA200.nii.gz' atlas_select = 'whole_brain_cluster_labels_PCA200' roi = dir_path + '/pDMN_3_bin.nii.gz' mask = None ID = '997' parc = True perc_overlap = 0.10 start_time = time.time() [WB_coords, _, _] = nodemaker.get_names_and_coords_of_parcels(parlistfile) print("%s%s%s" % ('get_names_and_coords_of_parcels (Masking whole-brain version) --> finished: ', str(np.round(time.time() - start_time, 1)), 's')) WB_label_names = np.arange(len(WB_coords) + 1)[np.arange(len(WB_coords) + 1) != 0].tolist() start_time = time.time() WB_parcel_list = nodemaker.gen_img_list(parlistfile) [_, _, WB_parcel_list_masked] = nodemaker.parcel_masker(roi, WB_coords, WB_parcel_list, WB_label_names, dir_path, ID, mask, perc_overlap) print("%s%s%s" % ('parcel_masker (Masking whole-brain version) --> finished: ', str(np.round(time.time() - start_time, 1)), 's')) start_time = time.time() [WB_parcels_map_nifti, parcel_list_exp] = nodemaker.create_parcel_atlas(WB_parcel_list_masked) print("%s%s%s" % ('create_parcel_atlas (Masking whole-brain version) --> finished: ', str(np.round(time.time() - start_time, 1)), 's')) start_time = time.time() [WB_net_parcels_map_nifti_unmasked, WB_coords_unmasked, _, WB_atlas_select, WB_uatlas_select] = nodemaker.node_gen(WB_coords, WB_parcel_list, WB_label_names, dir_path, ID, parc, atlas_select, parlistfile) print("%s%s%s" % ('node_gen (Masking whole-brain version) --> finished: ', str(np.round(time.time() - start_time, 1)), 's')) start_time = time.time() [WB_net_parcels_map_nifti_masked, WB_coords_masked, WB_label_names_masked, WB_atlas_select, WB_uatlas_select] = nodemaker.node_gen_masking(roi, WB_coords, WB_parcel_list, WB_label_names, dir_path, ID, parc, atlas_select, parlistfile, mask) print("%s%s%s" % ('node_gen_masking (Masking whole-brain version) --> finished: ', str(np.round(time.time() - start_time, 1)), 's')) assert WB_coords is not None assert WB_label_names is not None assert WB_parcel_list is not None assert WB_coords_masked is not None assert WB_label_names_masked is not None assert WB_parcel_list_masked is not None assert WB_parcels_map_nifti is not None assert parcel_list_exp is not None assert WB_net_parcels_map_nifti_unmasked is not None assert WB_coords_unmasked is not None assert WB_net_parcels_map_nifti_masked is not None assert WB_coords_masked is not None
def test_nodemaker_tools_masking_parlistfile_RSN(): # Set example inputs base_dir = str(Path(__file__).parent / "examples") dir_path = base_dir + '/002/fmri' func_file = dir_path + '/002.nii.gz' parlistfile = base_dir + '/whole_brain_cluster_labels_PCA200.nii.gz' roi = base_dir + '/pDMN_3_bin.nii.gz' network = 'Default' ID = '002' perc_overlap = 0.10 parc = True start_time = time.time() [coords, _, _] = nodemaker.get_names_and_coords_of_parcels(parlistfile) print("%s%s%s" % ('get_names_and_coords_of_parcels --> finished: ', str(np.round(time.time() - start_time, 1)), 's')) labels = np.arange(len(coords) + 1)[np.arange(len(coords) + 1) != 0].tolist() start_time = time.time() parcel_list = nodemaker.gen_img_list(parlistfile) [net_coords, net_parcel_list, net_labels, network] = nodemaker.get_node_membership(network, func_file, coords, labels, parc, parcel_list) print("%s%s%s" % ('get_node_membership --> finished: ', str(np.round(time.time() - start_time, 1)), 's')) start_time = time.time() [net_coords_masked, net_labels_masked, net_parcel_list_masked ] = nodemaker.parcel_masker(roi, net_coords, net_parcel_list, net_labels, dir_path, ID, perc_overlap) print("%s%s%s" % ('parcel_masker --> finished: ', str(np.round(time.time() - start_time, 1)), 's')) start_time = time.time() [net_parcels_map_nifti, parcel_list_exp] = nodemaker.create_parcel_atlas(net_parcel_list_masked) print("%s%s%s" % ('create_parcel_atlas --> finished: ', str(np.round(time.time() - start_time, 1)), 's')) start_time = time.time() out_path = nodemaker.gen_network_parcels(parlistfile, network, net_labels_masked, dir_path) print("%s%s%s" % ('gen_network_parcels --> finished: ', str(np.round(time.time() - start_time, 1)), 's')) assert coords is not None assert net_coords is not None assert net_labels is not None assert net_parcel_list is not None assert net_coords_masked is not None assert net_labels_masked is not None assert net_parcel_list_masked is not None assert out_path is not None assert net_parcels_map_nifti is not None assert parcel_list_exp is not None assert network is not None
def gen_network_parcels(uatlas_select, network, labels, dir_path): from nilearn.image import concat_imgs from pynets import nodemaker img_list = nodemaker.gen_img_list(uatlas_select) print("%s%s%s" % ('\nExtracting parcels associated with ', network, ' network locations...\n')) net_parcels = [i for j, i in enumerate(img_list) if j in labels] bna_4D = concat_imgs(net_parcels).get_data() index_vec = np.array(range(len(net_parcels))) + 1 net_parcels_sum = np.sum(index_vec * bna_4D, axis=3) net_parcels_map_nifti = nib.Nifti1Image(net_parcels_sum, affine=np.eye(4)) out_path = "%s%s%s%s" % (dir_path, '/', network, '_parcels.nii.gz') nib.save(net_parcels_map_nifti, out_path) return out_path
def gen_network_parcels(uatlas, network, labels, dir_path): """ Return a modified verion of an atlas parcellation label, where labels have been filtered baseed on their spatial affinity for a specified RSN definition. Parameters ---------- uatlas : str File path to atlas parcellation Nifti1Image in MNI template space. network : str Resting-state network based on Yeo-7 and Yeo-17 naming (e.g. 'Default') used to filter nodes in the study of brain subgraphs. labels : list List of string labels corresponding to ROI nodes. dir_path : str Path to directory containing subject derivative data for given run. Returns ------- out_path : str File path to a new, RSN-filtered atlas parcellation Nifti1Image. """ from nilearn.image import concat_imgs from pynets import nodemaker import os.path as op if not op.isfile(uatlas): raise ValueError( '\nERROR: User-specified atlas input not found! Check that the file(s) specified with the -ua ' 'flag exist(s)') img_list = nodemaker.gen_img_list(uatlas) print("%s%s%s" % ('\nExtracting parcels associated with ', network, ' network locations...\n')) net_parcels = [i for j, i in enumerate(img_list) if j in labels] bna_4D = concat_imgs(net_parcels).get_fdata() index_vec = np.array(range(len(net_parcels))) + 1 net_parcels_sum = np.sum(index_vec * bna_4D, axis=3) net_parcels_map_nifti = nib.Nifti1Image(net_parcels_sum, affine=np.eye(4)) out_path = "%s%s%s%s" % (dir_path, '/', network, '_parcels.nii.gz') nib.save(net_parcels_map_nifti, out_path) net_parcels_map_nifti.uncache() return out_path
def gen_network_parcels(uatlas_select, network, labels, dir_path): from nilearn.image import concat_imgs from pynets import nodemaker import os.path as op if not op.isfile(uatlas_select): raise ValueError( '\nERROR: User-specified atlas input not found! Check that the file(s) specified with the -ua ' 'flag exist(s)') img_list = nodemaker.gen_img_list(uatlas_select) print("%s%s%s" % ('\nExtracting parcels associated with ', network, ' network locations...\n')) net_parcels = [i for j, i in enumerate(img_list) if j in labels] bna_4D = concat_imgs(net_parcels).get_fdata() index_vec = np.array(range(len(net_parcels))) + 1 net_parcels_sum = np.sum(index_vec * bna_4D, axis=3) net_parcels_map_nifti = nib.Nifti1Image(net_parcels_sum, affine=np.eye(4)) out_path = "%s%s%s%s" % (dir_path, '/', network, '_parcels.nii.gz') nib.save(net_parcels_map_nifti, out_path) net_parcels_map_nifti.uncache() return out_path
def fetch_nodes_and_labels(atlas_select, uatlas_select, ref_txt, parc, in_file, use_AAL_naming, clustering=False): from pynets import utils, nodemaker import pandas as pd import time from pathlib import Path import os.path as op base_path = utils.get_file() # Test if atlas_select is a nilearn atlas. If so, fetch coords, labels, and/or networks. nilearn_parc_atlases = [ 'atlas_harvard_oxford', 'atlas_aal', 'atlas_destrieux_2009', 'atlas_talairach_gyrus', 'atlas_talairach_ba', 'atlas_talairach_lobe' ] nilearn_coords_atlases = ['coords_power_2011', 'coords_dosenbach_2010'] nilearn_prob_atlases = ['atlas_msdl', 'atlas_pauli_2017'] if uatlas_select is None and atlas_select in nilearn_parc_atlases: [label_names, networks_list, uatlas_select] = nodemaker.nilearn_atlas_helper(atlas_select, parc) if uatlas_select: if not isinstance(uatlas_select, str): nib.save(uatlas_select, "%s%s%s" % ('/tmp/', atlas_select, '.nii.gz')) uatlas_select = "%s%s%s" % ('/tmp/', atlas_select, '.nii.gz') [coords, _, par_max ] = nodemaker.get_names_and_coords_of_parcels(uatlas_select) if parc is True: parcel_list = nodemaker.gen_img_list(uatlas_select) else: parcel_list = None else: raise ValueError( "%s%s%s" % ('\nERROR: Atlas file for ', atlas_select, ' not found!')) elif uatlas_select is None and parc is False and atlas_select in nilearn_coords_atlases: print( 'Fetching coords and labels from nilearn coordsinate-based atlas library...' ) # Fetch nilearn atlas coords [coords, _, networks_list, label_names] = nodemaker.fetch_nilearn_atlas_coords(atlas_select) parcel_list = None par_max = None elif uatlas_select is None and parc is False and atlas_select in nilearn_prob_atlases: from nilearn.plotting import find_probabilistic_atlas_cut_coords print( 'Fetching coords and labels from nilearn probabilistic atlas library...' ) # Fetch nilearn atlas coords [label_names, networks_list, uatlas_select] = nodemaker.nilearn_atlas_helper(atlas_select, parc) coords = find_probabilistic_atlas_cut_coords(maps_img=uatlas_select) if uatlas_select: if not isinstance(uatlas_select, str): nib.save(uatlas_select, "%s%s%s" % ('/tmp/', atlas_select, '.nii.gz')) uatlas_select = "%s%s%s" % ('/tmp/', atlas_select, '.nii.gz') if parc is True: parcel_list = nodemaker.gen_img_list(uatlas_select) else: parcel_list = None else: raise ValueError( "%s%s%s" % ('\nERROR: Atlas file for ', atlas_select, ' not found!')) par_max = None elif uatlas_select: if clustering is True: while True: if op.isfile(uatlas_select): break else: print('Waiting for atlas file...') time.sleep(15) atlas_select = uatlas_select.split('/')[-1].split('.')[0] try: # Fetch user-specified atlas coords [coords, atlas_select, par_max ] = nodemaker.get_names_and_coords_of_parcels(uatlas_select) if parc is True: parcel_list = nodemaker.gen_img_list(uatlas_select) else: parcel_list = None # Describe user atlas coords print("%s%s%s%s" % ('\n', atlas_select, ' comes with {0} '.format(par_max), 'parcels\n')) except ValueError: print( '\n\nError: Either you have specified the name of a nilearn atlas that does not exist or you have not ' 'supplied a 3d atlas parcellation image!\n\n') parcel_list = None par_max = None coords = None label_names = None networks_list = None else: networks_list = None label_names = None parcel_list = None par_max = None coords = None # Labels prep if atlas_select: if label_names: pass else: if ref_txt is not None and op.exists(ref_txt): dict_df = pd.read_csv(ref_txt, sep=" ", header=None, names=["Index", "Region"]) label_names = dict_df['Region'].tolist() else: try: ref_txt = "%s%s%s%s" % (str( Path(base_path).parent), '/labelcharts/', atlas_select, '.txt') if op.exists(ref_txt): try: dict_df = pd.read_csv(ref_txt, sep="\t", header=None, names=["Index", "Region"]) label_names = dict_df['Region'].tolist() #print(label_names) except: print( "WARNING: label names from label reference file failed to populate or are invalid. " "Attempting AAL naming...") try: label_names = nodemaker.AAL_naming(coords) # print(label_names) except: print('AAL reference labeling failed!') label_names = np.arange(len(coords) + 1)[ np.arange(len(coords) + 1) != 0].tolist() else: if use_AAL_naming is True: try: label_names = nodemaker.AAL_naming(coords) # print(label_names) except: print('AAL reference labeling failed!') label_names = np.arange(len(coords) + 1)[ np.arange(len(coords) + 1) != 0].tolist() else: print('Using generic numbering labels...') label_names = np.arange(len(coords) + 1)[ np.arange(len(coords) + 1) != 0].tolist() except: print( "Label reference file not found. Attempting AAL naming..." ) if use_AAL_naming is True: try: label_names = nodemaker.AAL_naming(coords) #print(label_names) except: print('AAL reference labeling failed!') label_names = np.arange(len(coords) + 1)[ np.arange(len(coords) + 1) != 0].tolist() else: print('Using generic numbering labels...') label_names = np.arange(len(coords) + 1)[np.arange(len(coords) + 1) != 0].tolist() else: print( 'WARNING: No labels available since atlas name is not specified!') print("%s%s" % ('Labels:\n', label_names)) atlas_name = atlas_select dir_path = utils.do_dir_path(atlas_select, in_file) return label_names, coords, atlas_name, networks_list, parcel_list, par_max, uatlas_select, dir_path
def fetch_nodes_and_labels(atlas, uatlas, ref_txt, parc, in_file, use_AAL_naming, clustering=False): """ General API for fetching, identifying, and defining atlas nodes based on coordinates and/or labels. Parameters ---------- atlas : str Name of a Nilearn-hosted coordinate or parcellation/label-based atlas supported for fetching. See Nilearn's datasets.atlas module for more detailed reference. uatlas : str File path to atlas parcellation Nifti1Image in MNI template space. ref_txt : str Path to an atlas reference .txt file that maps labels to intensities corresponding to uatlas. parc : bool Indicates whether to use parcels instead of coordinates as ROI nodes. in_file : str File path to Nifti1Image object whose affine will provide sampling reference for fetching. use_AAL_naming : bool Indicates whether to perform Automated-Anatomical Labeling of each coordinate from a list of a voxel coordinates. clustering : bool Indicates whether clustering was performed. Default is False. Returns ------- labels : list List of string labels corresponding to ROI nodes. coords : list List of (x, y, z) tuples in mm-space corresponding to a coordinate atlas used or which represent the center-of-mass of each parcellation node. atlas_name : str Name of atlas parcellation (can differ slightly from fetch API string). networks_list : list List of RSN's and their associated cooordinates, if predefined uniquely for a given atlas. parcel_list : list List of 3D boolean numpy arrays or binarized Nifti1Images corresponding to ROI masks. par_max : int The maximum label intensity in the parcellation image. uatlas : str File path to atlas parcellation Nifti1Image in MNI template space. dir_path : str Path to directory containing subject derivative data for given run. """ from pynets import utils, nodemaker import pandas as pd import time from pathlib import Path import os.path as op base_path = utils.get_file() # Test if atlas is a nilearn atlas. If so, fetch coords, labels, and/or networks. nilearn_parc_atlases = [ 'atlas_harvard_oxford', 'atlas_aal', 'atlas_destrieux_2009', 'atlas_talairach_gyrus', 'atlas_talairach_ba', 'atlas_talairach_lobe' ] nilearn_coords_atlases = ['coords_power_2011', 'coords_dosenbach_2010'] nilearn_prob_atlases = ['atlas_msdl', 'atlas_pauli_2017'] if uatlas is None and atlas in nilearn_parc_atlases: [labels, networks_list, uatlas] = nodemaker.nilearn_atlas_helper(atlas, parc) if uatlas: if not isinstance(uatlas, str): nib.save(uatlas, "%s%s%s" % ('/tmp/', atlas, '.nii.gz')) uatlas = "%s%s%s" % ('/tmp/', atlas, '.nii.gz') [coords, _, par_max] = nodemaker.get_names_and_coords_of_parcels(uatlas) if parc is True: parcel_list = nodemaker.gen_img_list(uatlas) else: parcel_list = None else: raise ValueError( "%s%s%s" % ('\nERROR: Atlas file for ', atlas, ' not found!')) elif uatlas is None and parc is False and atlas in nilearn_coords_atlases: print( 'Fetching coords and labels from nilearn coordinate-based atlas library...' ) # Fetch nilearn atlas coords [coords, _, networks_list, labels] = nodemaker.fetch_nilearn_atlas_coords(atlas) parcel_list = None par_max = None elif uatlas is None and parc is False and atlas in nilearn_prob_atlases: from nilearn.plotting import find_probabilistic_atlas_cut_coords print( 'Fetching coords and labels from nilearn probabilistic atlas library...' ) # Fetch nilearn atlas coords [labels, networks_list, uatlas] = nodemaker.nilearn_atlas_helper(atlas, parc) coords = find_probabilistic_atlas_cut_coords(maps_img=uatlas) if uatlas: if not isinstance(uatlas, str): nib.save(uatlas, "%s%s%s" % ('/tmp/', atlas, '.nii.gz')) uatlas = "%s%s%s" % ('/tmp/', atlas, '.nii.gz') if parc is True: parcel_list = nodemaker.gen_img_list(uatlas) else: parcel_list = None else: raise ValueError( "%s%s%s" % ('\nERROR: Atlas file for ', atlas, ' not found!')) par_max = None elif uatlas: if clustering is True: while True: if op.isfile(uatlas): break else: print('Waiting for atlas file...') time.sleep(15) atlas = uatlas.split('/')[-1].split('.')[0] try: # Fetch user-specified atlas coords [coords, atlas, par_max] = nodemaker.get_names_and_coords_of_parcels(uatlas) if parc is True: parcel_list = nodemaker.gen_img_list(uatlas) else: parcel_list = None # Describe user atlas coords print( "%s%s%s%s" % ('\n', atlas, ' comes with {0} '.format(par_max), 'parcels\n')) except ValueError: print( '\n\nError: Either you have specified the name of a nilearn atlas that does not exist or ' 'you have not supplied a 3d atlas parcellation image!\n\n') parcel_list = None par_max = None coords = None labels = None networks_list = None else: networks_list = None labels = None parcel_list = None par_max = None coords = None # Labels prep if atlas: if labels: pass else: if ref_txt is not None and op.exists(ref_txt): dict_df = pd.read_csv(ref_txt, sep=" ", header=None, names=["Index", "Region"]) labels = dict_df['Region'].tolist() else: try: ref_txt = "%s%s%s%s" % (str(Path(base_path).parent), '/labelcharts/', atlas, '.txt') if op.exists(ref_txt): try: dict_df = pd.read_csv(ref_txt, sep="\t", header=None, names=["Index", "Region"]) labels = dict_df['Region'].tolist() except: print( "WARNING: label names from label reference file failed to populate or are invalid. " "Attempting AAL naming...") try: labels = nodemaker.AAL_naming(coords) except: print('AAL reference labeling failed!') labels = np.arange(len(coords) + 1)[ np.arange(len(coords) + 1) != 0].tolist() else: if use_AAL_naming is True: try: labels = nodemaker.AAL_naming(coords) except: print('AAL reference labeling failed!') labels = np.arange(len(coords) + 1)[ np.arange(len(coords) + 1) != 0].tolist() else: print('Using generic numbering labels...') labels = np.arange(len(coords) + 1)[np.arange(len(coords) + 1) != 0].tolist() except: print( "Label reference file not found. Attempting AAL naming..." ) if use_AAL_naming is True: try: labels = nodemaker.AAL_naming(coords) except: print('AAL reference labeling failed!') labels = np.arange(len(coords) + 1)[np.arange(len(coords) + 1) != 0].tolist() else: print('Using generic numbering labels...') labels = np.arange(len(coords) + 1)[np.arange(len(coords) + 1) != 0].tolist() else: print( 'WARNING: No labels available since atlas name is not specified!') print("%s%s" % ('Labels:\n', labels)) atlas_name = atlas dir_path = utils.do_dir_path(atlas, in_file) if len(coords) != len(labels): labels = len(coords) * [np.nan] if len(coords) != len(labels): raise ValueError( 'ERROR: length of coordinates is not equal to length of label names' ) return labels, coords, atlas_name, networks_list, parcel_list, par_max, uatlas, dir_path