def individual_tcorr_clustering(func_file, clust_mask, ID, k, thresh=0.5): import os from pynets import utils from pynets.clustools import make_image_from_bin_renum, binfile_parcellate, make_local_connectivity_tcorr mask_name = os.path.basename(clust_mask).split('.nii.gz')[0] atlas_select = mask_name + '_k' + str(k) print('\nCreating atlas at cluster level ' + str(k) + ' for ' + str(atlas_select) + '...\n') working_dir = os.path.dirname(func_file) + '/' + atlas_select outfile = working_dir + '/rm_tcorr_conn_' + str(ID) + '.npy' outfile_parc = working_dir + '/rm_tcorr_indiv_cluster_' + str(ID) binfile = working_dir + '/rm_tcorr_indiv_cluster_' + str(ID) + '_' + str( k) + '.npy' dir_path = utils.do_dir_path(atlas_select, func_file) parlistfile = dir_path + '/' + mask_name + '_k' + str(k) + '.nii.gz' make_local_connectivity_tcorr(func_file, clust_mask, outfile, thresh) binfile_parcellate(outfile, outfile_parc, int(k)) ##write out for group mean clustering make_image_from_bin_renum(parlistfile, binfile, clust_mask) return (parlistfile, atlas_select, dir_path)
def individual_tcorr_clustering(func_file, clust_mask, ID, k, clust_type, thresh=0.5): import os from pynets import utils, clustools nilearn_clust_list = ['kmeans', 'ward', 'complete', 'average'] mask_name = os.path.basename(clust_mask).split('.nii.gz')[0] atlas_select = "%s%s%s%s%s" % (mask_name, '_', clust_type, '_k', str(k)) print("%s%s%s%s%s%s%s" % ('\nCreating atlas using ', clust_type, ' at cluster level ', str(k), ' for ', str(atlas_select), '...\n')) dir_path = utils.do_dir_path(atlas_select, func_file) uatlas_select = "%s%s%s%s%s%s%s%s" % (dir_path, '/', mask_name, '_', clust_type, '_k', str(k), '.nii.gz') if clust_type in nilearn_clust_list: clustools.nil_parcellate(func_file, clust_mask, k, clust_type, ID, dir_path, uatlas_select) elif clust_type == 'ncut': working_dir = "%s%s%s" % (os.path.dirname(func_file), '/', atlas_select) outfile = "%s%s%s%s" % (working_dir, '/rm_tcorr_conn_', str(ID), '.npy') outfile_parc = "%s%s%s" % (working_dir, '/rm_tcorr_indiv_cluster_', str(ID)) binfile = "%s%s%s%s%s%s" % (working_dir, '/rm_tcorr_indiv_cluster_', str(ID), '_', str(k), '.npy') clustools.make_local_connectivity_tcorr(func_file, clust_mask, outfile, thresh) clustools.binfile_parcellate(outfile, outfile_parc, int(k)) clustools.make_image_from_bin_renum(uatlas_select, binfile, clust_mask) clustering = True return uatlas_select, atlas_select, clustering, clust_mask, k, clust_type
def individual_tcorr_clustering(func_file, clust_mask, ID, k, thresh=0.5): import os from pynets import utils from pynets.clustools import make_image_from_bin_renum, binfile_parcellate, make_local_connectivity_tcorr mask_name = os.path.basename(clust_mask).split('.nii.gz')[0] atlas_select = "%s%s%s" % (mask_name, '_k', str(k)) print("%s%s%s%s%s" % ('\nCreating atlas at cluster level ', str(k), ' for ', str(atlas_select), '...\n')) working_dir = "%s%s%s" % (os.path.dirname(func_file), '/', atlas_select) outfile = "%s%s%s%s" % (working_dir, '/rm_tcorr_conn_', str(ID), '.npy') outfile_parc = "%s%s%s" % (working_dir, '/rm_tcorr_indiv_cluster_', str(ID)) binfile = "%s%s%s%s%s%s" % (working_dir, '/rm_tcorr_indiv_cluster_', str(ID), '_', str(k), '.npy') dir_path = utils.do_dir_path(atlas_select, func_file) parlistfile = "%s%s%s%s%s%s" % (dir_path, '/', mask_name, '_k', str(k), '.nii.gz') make_local_connectivity_tcorr(func_file, clust_mask, outfile, thresh) binfile_parcellate(outfile, outfile_parc, int(k)) # write out for group mean clustering make_image_from_bin_renum(parlistfile, binfile, clust_mask) return parlistfile, atlas_select, dir_path
def build_multi_net_paths(multi_nets, atlas_select, input_file, multi_thr, min_thr, max_thr, step_thr, ID, network, conn_model, thr, mask, dir_path, est_path_list, node_size_list, node_size): from pynets import utils if multi_nets is not None: num_networks = len(multi_nets) for network in multi_nets: dir_path = utils.do_dir_path(atlas_select, input_file) [iter_thresh, est_path_list] = utils.build_est_path_list( multi_thr, min_thr, max_thr, step_thr, ID, network, conn_model, thr, mask, dir_path, est_path_list, node_size_list, node_size) else: num_networks = 1 dir_path = utils.do_dir_path(atlas_select, input_file) [iter_thresh, est_path_list] = utils.build_est_path_list( multi_thr, min_thr, max_thr, step_thr, ID, network, conn_model, thr, mask, dir_path, est_path_list, node_size_list, node_size) return (iter_thresh, est_path_list, num_networks, dir_path)
def RSN_fetch_nodes_and_labels(atlas_select, parlistfile, ref_txt, parc, func_file): from pynets import utils, nodemaker import pandas as pd ##Test if atlas_select is a nilearn atlas. If so, fetch coords, labels, and/or networks. nilearn_atlases = [ 'atlas_aal', 'atlas_craddock_2012', 'atlas_destrieux_2009' ] if atlas_select in nilearn_atlases: [label_names, networks_list, parlistfile] = utils.nilearn_atlas_helper(atlas_select) ##Get coordinates and/or parcels from atlas if parlistfile is None and parc == False: print( 'Fetching coordinates and labels from nilearn coordinate-based atlases' ) ##Fetch nilearn atlas coords [coords, atlas_name, networks_list, label_names] = nodemaker.fetch_nilearn_atlas_coords(atlas_select) parcel_list = None par_max = None else: ##Fetch user-specified atlas coords [coords, atlas_select, par_max, parcel_list] = nodemaker.get_names_and_coords_of_parcels(parlistfile) networks_list = None ##Labels prep try: label_names except: if ref_txt is not None and os.path.exists(ref_txt): atlas_select = os.path.basename(ref_txt).split('.txt')[0] dict_df = pd.read_csv(ref_txt, sep=" ", header=None, names=["Index", "Region"]) label_names = dict_df['Region'].tolist() else: label_names = np.arange(len(coords) + 1)[np.arange(len(coords) + 1) != 0].tolist() if label_names is None: label_names = np.arange(len(coords) + 1)[np.arange(len(coords) + 1) != 0].tolist() try: atlas_name except: atlas_name = atlas_select dir_path = utils.do_dir_path(atlas_select, func_file) return (label_names, coords, atlas_name, networks_list, parcel_list, par_max, parlistfile, dir_path)
def WB_fetch_nodes_and_labels(atlas_select, parlistfile, ref_txt, parc, func_file): from pynets import utils, nodemaker import pandas as pd from pathlib import Path ##Test if atlas_select is a nilearn atlas. If so, fetch coords, labels, and/or networks. nilearn_parc_atlases=['atlas_aal', 'atlas_craddock_2012', 'atlas_destrieux_2009'] nilearn_coord_atlases=['harvard_oxford', 'msdl', 'coords_power_2011', 'smith_2009', 'basc_multiscale_2015', 'allen_2011', 'coords_dosenbach_2010'] if atlas_select in nilearn_parc_atlases: [label_names, networks_list, parlistfile] = utils.nilearn_atlas_helper(atlas_select) ##Get coordinates and/or parcels from atlas if parlistfile is None and parc == False and atlas_select in nilearn_coord_atlases: print('Fetching coordinates and labels from nilearn coordinate-based atlases') ##Fetch nilearn atlas coords [coords, atlas_name, networks_list, label_names] = nodemaker.fetch_nilearn_atlas_coords(atlas_select) parcel_list = None par_max = None else: try: ##Fetch user-specified atlas coords [coords, atlas_select, par_max, parcel_list] = nodemaker.get_names_and_coords_of_parcels(parlistfile) networks_list = None ##Describe user atlas coords print('\n' + str(atlas_select) + ' comes with {0} '.format(par_max) + 'parcels' + '\n') except: raise ValueError('\n\nError: Either you have specified the name of a nilearn atlas that does not exist or you have not supplied a 3d atlas parcellation image!\n\n') ##Labels prep try: label_names except: if ref_txt is not None and os.path.exists(ref_txt): atlas_select = os.path.basename(ref_txt).split('.txt')[0] dict_df = pd.read_csv(ref_txt, sep=" ", header=None, names=["Index", "Region"]) label_names = dict_df['Region'].tolist() else: try: atlas_ref_txt = atlas_select + '.txt' ref_txt = Path(__file__)/'atlases'/atlas_ref_txt dict_df = pd.read_csv(ref_txt, sep=" ", header=None, names=["Index", "Region"]) label_names = dict_df['Region'].tolist() except: label_names = np.arange(len(coords) + 1)[np.arange(len(coords) + 1) != 0].tolist() if label_names is None: label_names = np.arange(len(coords) + 1)[np.arange(len(coords) + 1) != 0].tolist() try: atlas_name except: atlas_name = atlas_select dir_path = utils.do_dir_path(atlas_select, func_file) return(label_names, coords, atlas_name, networks_list, parcel_list, par_max, parlistfile, dir_path)
def fetch_nodes_and_labels(atlas_select, uatlas_select, ref_txt, parc, in_file, use_AAL_naming, clustering=False): from pynets import utils, nodemaker import pandas as pd import time from pathlib import Path import os.path as op base_path = utils.get_file() # Test if atlas_select is a nilearn atlas. If so, fetch coords, labels, and/or networks. nilearn_parc_atlases = [ 'atlas_harvard_oxford', 'atlas_aal', 'atlas_destrieux_2009', 'atlas_talairach_gyrus', 'atlas_talairach_ba', 'atlas_talairach_lobe' ] nilearn_coords_atlases = ['coords_power_2011', 'coords_dosenbach_2010'] nilearn_prob_atlases = ['atlas_msdl', 'atlas_pauli_2017'] if uatlas_select is None and atlas_select in nilearn_parc_atlases: [label_names, networks_list, uatlas_select] = nodemaker.nilearn_atlas_helper(atlas_select, parc) if uatlas_select: if not isinstance(uatlas_select, str): nib.save(uatlas_select, "%s%s%s" % ('/tmp/', atlas_select, '.nii.gz')) uatlas_select = "%s%s%s" % ('/tmp/', atlas_select, '.nii.gz') [coords, _, par_max ] = nodemaker.get_names_and_coords_of_parcels(uatlas_select) if parc is True: parcel_list = nodemaker.gen_img_list(uatlas_select) else: parcel_list = None else: raise ValueError( "%s%s%s" % ('\nERROR: Atlas file for ', atlas_select, ' not found!')) elif uatlas_select is None and parc is False and atlas_select in nilearn_coords_atlases: print( 'Fetching coords and labels from nilearn coordsinate-based atlas library...' ) # Fetch nilearn atlas coords [coords, _, networks_list, label_names] = nodemaker.fetch_nilearn_atlas_coords(atlas_select) parcel_list = None par_max = None elif uatlas_select is None and parc is False and atlas_select in nilearn_prob_atlases: from nilearn.plotting import find_probabilistic_atlas_cut_coords print( 'Fetching coords and labels from nilearn probabilistic atlas library...' ) # Fetch nilearn atlas coords [label_names, networks_list, uatlas_select] = nodemaker.nilearn_atlas_helper(atlas_select, parc) coords = find_probabilistic_atlas_cut_coords(maps_img=uatlas_select) if uatlas_select: if not isinstance(uatlas_select, str): nib.save(uatlas_select, "%s%s%s" % ('/tmp/', atlas_select, '.nii.gz')) uatlas_select = "%s%s%s" % ('/tmp/', atlas_select, '.nii.gz') if parc is True: parcel_list = nodemaker.gen_img_list(uatlas_select) else: parcel_list = None else: raise ValueError( "%s%s%s" % ('\nERROR: Atlas file for ', atlas_select, ' not found!')) par_max = None elif uatlas_select: if clustering is True: while True: if op.isfile(uatlas_select): break else: print('Waiting for atlas file...') time.sleep(15) atlas_select = uatlas_select.split('/')[-1].split('.')[0] try: # Fetch user-specified atlas coords [coords, atlas_select, par_max ] = nodemaker.get_names_and_coords_of_parcels(uatlas_select) if parc is True: parcel_list = nodemaker.gen_img_list(uatlas_select) else: parcel_list = None # Describe user atlas coords print("%s%s%s%s" % ('\n', atlas_select, ' comes with {0} '.format(par_max), 'parcels\n')) except ValueError: print( '\n\nError: Either you have specified the name of a nilearn atlas that does not exist or you have not ' 'supplied a 3d atlas parcellation image!\n\n') parcel_list = None par_max = None coords = None label_names = None networks_list = None else: networks_list = None label_names = None parcel_list = None par_max = None coords = None # Labels prep if atlas_select: if label_names: pass else: if ref_txt is not None and op.exists(ref_txt): dict_df = pd.read_csv(ref_txt, sep=" ", header=None, names=["Index", "Region"]) label_names = dict_df['Region'].tolist() else: try: ref_txt = "%s%s%s%s" % (str( Path(base_path).parent), '/labelcharts/', atlas_select, '.txt') if op.exists(ref_txt): try: dict_df = pd.read_csv(ref_txt, sep="\t", header=None, names=["Index", "Region"]) label_names = dict_df['Region'].tolist() #print(label_names) except: print( "WARNING: label names from label reference file failed to populate or are invalid. " "Attempting AAL naming...") try: label_names = nodemaker.AAL_naming(coords) # print(label_names) except: print('AAL reference labeling failed!') label_names = np.arange(len(coords) + 1)[ np.arange(len(coords) + 1) != 0].tolist() else: if use_AAL_naming is True: try: label_names = nodemaker.AAL_naming(coords) # print(label_names) except: print('AAL reference labeling failed!') label_names = np.arange(len(coords) + 1)[ np.arange(len(coords) + 1) != 0].tolist() else: print('Using generic numbering labels...') label_names = np.arange(len(coords) + 1)[ np.arange(len(coords) + 1) != 0].tolist() except: print( "Label reference file not found. Attempting AAL naming..." ) if use_AAL_naming is True: try: label_names = nodemaker.AAL_naming(coords) #print(label_names) except: print('AAL reference labeling failed!') label_names = np.arange(len(coords) + 1)[ np.arange(len(coords) + 1) != 0].tolist() else: print('Using generic numbering labels...') label_names = np.arange(len(coords) + 1)[np.arange(len(coords) + 1) != 0].tolist() else: print( 'WARNING: No labels available since atlas name is not specified!') print("%s%s" % ('Labels:\n', label_names)) atlas_name = atlas_select dir_path = utils.do_dir_path(atlas_select, in_file) return label_names, coords, atlas_name, networks_list, parcel_list, par_max, uatlas_select, dir_path
def fetch_nodes_and_labels(atlas, uatlas, ref_txt, parc, in_file, use_AAL_naming, clustering=False): """ General API for fetching, identifying, and defining atlas nodes based on coordinates and/or labels. Parameters ---------- atlas : str Name of a Nilearn-hosted coordinate or parcellation/label-based atlas supported for fetching. See Nilearn's datasets.atlas module for more detailed reference. uatlas : str File path to atlas parcellation Nifti1Image in MNI template space. ref_txt : str Path to an atlas reference .txt file that maps labels to intensities corresponding to uatlas. parc : bool Indicates whether to use parcels instead of coordinates as ROI nodes. in_file : str File path to Nifti1Image object whose affine will provide sampling reference for fetching. use_AAL_naming : bool Indicates whether to perform Automated-Anatomical Labeling of each coordinate from a list of a voxel coordinates. clustering : bool Indicates whether clustering was performed. Default is False. Returns ------- labels : list List of string labels corresponding to ROI nodes. coords : list List of (x, y, z) tuples in mm-space corresponding to a coordinate atlas used or which represent the center-of-mass of each parcellation node. atlas_name : str Name of atlas parcellation (can differ slightly from fetch API string). networks_list : list List of RSN's and their associated cooordinates, if predefined uniquely for a given atlas. parcel_list : list List of 3D boolean numpy arrays or binarized Nifti1Images corresponding to ROI masks. par_max : int The maximum label intensity in the parcellation image. uatlas : str File path to atlas parcellation Nifti1Image in MNI template space. dir_path : str Path to directory containing subject derivative data for given run. """ from pynets import utils, nodemaker import pandas as pd import time from pathlib import Path import os.path as op base_path = utils.get_file() # Test if atlas is a nilearn atlas. If so, fetch coords, labels, and/or networks. nilearn_parc_atlases = [ 'atlas_harvard_oxford', 'atlas_aal', 'atlas_destrieux_2009', 'atlas_talairach_gyrus', 'atlas_talairach_ba', 'atlas_talairach_lobe' ] nilearn_coords_atlases = ['coords_power_2011', 'coords_dosenbach_2010'] nilearn_prob_atlases = ['atlas_msdl', 'atlas_pauli_2017'] if uatlas is None and atlas in nilearn_parc_atlases: [labels, networks_list, uatlas] = nodemaker.nilearn_atlas_helper(atlas, parc) if uatlas: if not isinstance(uatlas, str): nib.save(uatlas, "%s%s%s" % ('/tmp/', atlas, '.nii.gz')) uatlas = "%s%s%s" % ('/tmp/', atlas, '.nii.gz') [coords, _, par_max] = nodemaker.get_names_and_coords_of_parcels(uatlas) if parc is True: parcel_list = nodemaker.gen_img_list(uatlas) else: parcel_list = None else: raise ValueError( "%s%s%s" % ('\nERROR: Atlas file for ', atlas, ' not found!')) elif uatlas is None and parc is False and atlas in nilearn_coords_atlases: print( 'Fetching coords and labels from nilearn coordinate-based atlas library...' ) # Fetch nilearn atlas coords [coords, _, networks_list, labels] = nodemaker.fetch_nilearn_atlas_coords(atlas) parcel_list = None par_max = None elif uatlas is None and parc is False and atlas in nilearn_prob_atlases: from nilearn.plotting import find_probabilistic_atlas_cut_coords print( 'Fetching coords and labels from nilearn probabilistic atlas library...' ) # Fetch nilearn atlas coords [labels, networks_list, uatlas] = nodemaker.nilearn_atlas_helper(atlas, parc) coords = find_probabilistic_atlas_cut_coords(maps_img=uatlas) if uatlas: if not isinstance(uatlas, str): nib.save(uatlas, "%s%s%s" % ('/tmp/', atlas, '.nii.gz')) uatlas = "%s%s%s" % ('/tmp/', atlas, '.nii.gz') if parc is True: parcel_list = nodemaker.gen_img_list(uatlas) else: parcel_list = None else: raise ValueError( "%s%s%s" % ('\nERROR: Atlas file for ', atlas, ' not found!')) par_max = None elif uatlas: if clustering is True: while True: if op.isfile(uatlas): break else: print('Waiting for atlas file...') time.sleep(15) atlas = uatlas.split('/')[-1].split('.')[0] try: # Fetch user-specified atlas coords [coords, atlas, par_max] = nodemaker.get_names_and_coords_of_parcels(uatlas) if parc is True: parcel_list = nodemaker.gen_img_list(uatlas) else: parcel_list = None # Describe user atlas coords print( "%s%s%s%s" % ('\n', atlas, ' comes with {0} '.format(par_max), 'parcels\n')) except ValueError: print( '\n\nError: Either you have specified the name of a nilearn atlas that does not exist or ' 'you have not supplied a 3d atlas parcellation image!\n\n') parcel_list = None par_max = None coords = None labels = None networks_list = None else: networks_list = None labels = None parcel_list = None par_max = None coords = None # Labels prep if atlas: if labels: pass else: if ref_txt is not None and op.exists(ref_txt): dict_df = pd.read_csv(ref_txt, sep=" ", header=None, names=["Index", "Region"]) labels = dict_df['Region'].tolist() else: try: ref_txt = "%s%s%s%s" % (str(Path(base_path).parent), '/labelcharts/', atlas, '.txt') if op.exists(ref_txt): try: dict_df = pd.read_csv(ref_txt, sep="\t", header=None, names=["Index", "Region"]) labels = dict_df['Region'].tolist() except: print( "WARNING: label names from label reference file failed to populate or are invalid. " "Attempting AAL naming...") try: labels = nodemaker.AAL_naming(coords) except: print('AAL reference labeling failed!') labels = np.arange(len(coords) + 1)[ np.arange(len(coords) + 1) != 0].tolist() else: if use_AAL_naming is True: try: labels = nodemaker.AAL_naming(coords) except: print('AAL reference labeling failed!') labels = np.arange(len(coords) + 1)[ np.arange(len(coords) + 1) != 0].tolist() else: print('Using generic numbering labels...') labels = np.arange(len(coords) + 1)[np.arange(len(coords) + 1) != 0].tolist() except: print( "Label reference file not found. Attempting AAL naming..." ) if use_AAL_naming is True: try: labels = nodemaker.AAL_naming(coords) except: print('AAL reference labeling failed!') labels = np.arange(len(coords) + 1)[np.arange(len(coords) + 1) != 0].tolist() else: print('Using generic numbering labels...') labels = np.arange(len(coords) + 1)[np.arange(len(coords) + 1) != 0].tolist() else: print( 'WARNING: No labels available since atlas name is not specified!') print("%s%s" % ('Labels:\n', labels)) atlas_name = atlas dir_path = utils.do_dir_path(atlas, in_file) if len(coords) != len(labels): labels = len(coords) * [np.nan] if len(coords) != len(labels): raise ValueError( 'ERROR: length of coordinates is not equal to length of label names' ) return labels, coords, atlas_name, networks_list, parcel_list, par_max, uatlas, dir_path
if (k_min is not None and k_max is not None) and k is None and clust_mask_list is not None: k_clustering = 4 elif (k_min is not None and k_max is not None) and k is None and clust_mask_list is None: k_clustering = 2 elif k is not None and (k_min is None and k_max is None) and clust_mask_list is not None: k_clustering = 3 elif k is not None and (k_min is None and k_max is None) and clust_mask_list is None: k_clustering = 1 else: k_clustering = 0 if input_file: if parlistfile is not None and k_clustering == 0 and user_atlas_list is None: atlas_select = parlistfile.split('/')[-1].split('.')[0] dir_path = do_dir_path(atlas_select, input_file) print("%s%s" % ("ATLAS: ", atlas_select)) elif parlistfile is not None and user_atlas_list is None and k_clustering == 0: atlas_select = parlistfile.split('/')[-1].split('.')[0] dir_path = do_dir_path(atlas_select, input_file) print("%s%s" % ("ATLAS: ", atlas_select)) elif user_atlas_list is not None: parlistfile = user_atlas_list[0] print('Iterating across multiple user atlases...') for parlistfile in user_atlas_list: atlas_select = parlistfile.split('/')[-1].split('.')[0] dir_path = do_dir_path(atlas_select, input_file) atlas_select = None elif multi_atlas is not None: print('Iterating across multiple nilearn atlases...') for atlas_select in multi_atlas: