msdl = datasets.fetch_atlas_msdl() ########################################################################## # Iterate over fetched atlases to extract coordinates - probabilistic # ------------------------------------------------------------------- from nilearn.input_data import NiftiMapsMasker # create masker to extract functional data within atlas parcels masker = NiftiMapsMasker(maps_img=msdl['maps'], standardize=True, memory='nilearn_cache') # extract time series from all subjects and concatenate them time_series = [] for func, confounds in zip(data.func, data.confounds): time_series.append(masker.fit_transform(func, confounds=confounds)) # calculate correlation matrices across subjects and display correlation_matrices = connectome_measure.fit_transform(time_series) # Mean correlation matrix across 10 subjects can be grabbed like this, # using connectome measure object mean_correlation_matrix = connectome_measure.mean_ # grab center coordinates for probabilistic atlas coordinates = plotting.find_probabilistic_atlas_cut_coords(maps_img=msdl['maps']) # plot connectome with 80% edge strength in the connectivity plotting.plot_connectome(mean_correlation_matrix, coordinates, edge_threshold="80%", title='MSDL (probabilistic)') plotting.show()
gsc.fit(subject_time_series) try: from sklearn.covariance import GraphicalLassoCV except ImportError: # for Scitkit-Learn < v0.20.0 from sklearn.covariance import GraphLassoCV as GraphicalLassoCV gl = GraphicalLassoCV(verbose=2) gl.fit(np.concatenate(subject_time_series)) ############################################################################## # Displaying results # ------------------- atlas_img = msdl_atlas_dataset.maps atlas_region_coords = plotting.find_probabilistic_atlas_cut_coords(atlas_img) labels = msdl_atlas_dataset.labels plotting.plot_connectome(gl.covariance_, atlas_region_coords, edge_threshold='90%', title="Covariance", display_mode="lzr") plotting.plot_connectome(-gl.precision_, atlas_region_coords, edge_threshold='90%', title="Sparse inverse covariance (GraphicalLasso)", display_mode="lzr", edge_vmax=.5, edge_vmin=-.5) plot_matrices(gl.covariance_, gl.precision_, "GraphicalLasso", labels)
from nilearn.maskers import NiftiMapsMasker # create masker to extract functional data within atlas parcels masker = NiftiMapsMasker(maps_img=difumo.maps, standardize=True, memory='nilearn_cache') # extract time series from all subjects and concatenate them time_series = [] for func, confounds in zip(data.func, data.confounds): time_series.append(masker.fit_transform(func, confounds=confounds)) # calculate correlation matrices across subjects and display correlation_matrices = connectome_measure.fit_transform(time_series) # Mean correlation matrix across 10 subjects can be grabbed like this, # using connectome measure object mean_correlation_matrix = connectome_measure.mean_ # grab center coordinates for probabilistic atlas coordinates = plotting.find_probabilistic_atlas_cut_coords( maps_img=difumo.maps) # plot connectome with 85% edge strength in the connectivity plotting.plot_connectome( mean_correlation_matrix, coordinates, edge_threshold="85%", title='DiFuMo with {0} dimensions (probabilistic)'.format(dim)) plotting.show()
subjects_timeseries[subject_id[0]].shape[-1]) #Getting the mean correlation matrix across all control subjects mean_correlations_control = np.mean(control_correlations, axis=0).reshape(subjects_timeseries[subject_id[0]].shape[-1], subjects_timeseries[subject_id[0]].shape[-1]) #Visualizing the mean correlation plotting.plot_matrix(mean_correlations_abide, vmax=1, vmin=-1, colorbar=True, title='Correlation between 20 regions for Abide') plotting.plot_matrix(mean_correlations_control, vmax=1, vmin=-1, colorbar=True, title='Correlation between 20 regions for controls') # In[ ]: #Getting the center coordinates from the component decomposition to use as atlas labels coords = plotting.find_probabilistic_atlas_cut_coords(components_img) #Plotting the connectome with 80% edge strength in the connectivity plotting.plot_connectome(mean_correlations_abide, coords, edge_threshold="80%", title='Correlation between 20 regions for Abide') plotting.plot_connectome(mean_correlations_control, coords, edge_threshold="80%", title='Correlation between 20 regions for controls') plotting.show() # In[ ]: correlation_matrices.shape # In[ ]:
# ------------------------------------------------------------------- from nilearn.input_data import NiftiMapsMasker # create masker to extract functional data within atlas parcels masker = NiftiMapsMasker(maps_img=msdl['maps'], standardize=True, memory='nilearn_cache') # extract time series from all subjects and concatenate them time_series = [] for func, confounds in zip(data.func, data.confounds): time_series.append(masker.fit_transform(func, confounds=confounds)) # calculate correlation matrices across subjects and display correlation_matrices = connectome_measure.fit_transform(time_series) # Mean correlation matrix across 10 subjects can be grabbed like this, # using connectome measure object mean_correlation_matrix = connectome_measure.mean_ # grab center coordinates for probabilistic atlas coordinates = plotting.find_probabilistic_atlas_cut_coords( maps_img=msdl['maps']) # plot connectome with 80% edge strength in the connectivity plotting.plot_connectome(mean_correlation_matrix, coordinates, edge_threshold="80%", title='MSDL (probabilistic)') plotting.show()
mean_correlations = np.mean(correlations, axis=0).reshape(n_regions_extracted, n_regions_extracted) ############################################################################### # Plot resulting connectomes # ---------------------------- title = 'Correlation between %d regions' % n_regions_extracted # First plot the matrix display = plotting.plot_matrix(mean_correlations, vmax=1, vmin=-1, colorbar=True, title=title) # Then find the center of the regions and plot a connectome regions_img = regions_extracted_img coords_connectome = plotting.find_probabilistic_atlas_cut_coords(regions_img) plotting.plot_connectome(mean_correlations, coords_connectome, edge_threshold='90%', title=title) ################################################################################ # Plot regions extracted for only one specific network # ---------------------------------------------------- # First, we plot a network of index=4 without region extraction (left plot) from nilearn import image img = image.index_img(components_img, 4) coords = plotting.find_xyz_cut_coords(img) display = plotting.plot_stat_map(img, cut_coords=coords, colorbar=False, title='Showing one specific network')
# call transform from RegionExtractor object to extract timeseries signals timeseries_each_subject = extractor.transform(filename, confounds=confound) # call fit_transform from ConnectivityMeasure object correlation = connectome_measure.fit_transform([timeseries_each_subject]) # saving each subject correlation to correlations correlations.append(correlation) # Mean of all correlations mean_correlations = np.mean(correlations, axis=0).reshape(n_regions_extracted, n_regions_extracted) title = 'Correlation between %d regions' % n_regions_extracted # First plot the matrix display = plotting.plot_matrix(mean_correlations, vmax=1, vmin=-1, colorbar=True, title=title) # Then find the center of the regions and plot a connectome regions_img = regions_extracted_img coords_connectome = plotting.find_probabilistic_atlas_cut_coords(regions_img) plotting.plot_connectome(mean_correlations, coords_connectome, edge_threshold='90%', title=title)
def AtlasParcellation(path, is_probabilistic=False): if is_probabilistic: return plotting.find_probabilistic_atlas_cut_coords(maps_img=path) else: return plotting.find_parcellation_cut_coords(labels_img=path)
def fetch_nodes_and_labels(atlas_select, uatlas_select, ref_txt, parc, in_file, use_AAL_naming, clustering=False): from pynets import utils, nodemaker import pandas as pd import time from pathlib import Path import os.path as op base_path = utils.get_file() # Test if atlas_select is a nilearn atlas. If so, fetch coords, labels, and/or networks. nilearn_parc_atlases = [ 'atlas_harvard_oxford', 'atlas_aal', 'atlas_destrieux_2009', 'atlas_talairach_gyrus', 'atlas_talairach_ba', 'atlas_talairach_lobe' ] nilearn_coords_atlases = ['coords_power_2011', 'coords_dosenbach_2010'] nilearn_prob_atlases = ['atlas_msdl', 'atlas_pauli_2017'] if uatlas_select is None and atlas_select in nilearn_parc_atlases: [label_names, networks_list, uatlas_select] = nodemaker.nilearn_atlas_helper(atlas_select, parc) if uatlas_select: if not isinstance(uatlas_select, str): nib.save(uatlas_select, "%s%s%s" % ('/tmp/', atlas_select, '.nii.gz')) uatlas_select = "%s%s%s" % ('/tmp/', atlas_select, '.nii.gz') [coords, _, par_max ] = nodemaker.get_names_and_coords_of_parcels(uatlas_select) if parc is True: parcel_list = nodemaker.gen_img_list(uatlas_select) else: parcel_list = None else: raise ValueError( "%s%s%s" % ('\nERROR: Atlas file for ', atlas_select, ' not found!')) elif uatlas_select is None and parc is False and atlas_select in nilearn_coords_atlases: print( 'Fetching coords and labels from nilearn coordsinate-based atlas library...' ) # Fetch nilearn atlas coords [coords, _, networks_list, label_names] = nodemaker.fetch_nilearn_atlas_coords(atlas_select) parcel_list = None par_max = None elif uatlas_select is None and parc is False and atlas_select in nilearn_prob_atlases: from nilearn.plotting import find_probabilistic_atlas_cut_coords print( 'Fetching coords and labels from nilearn probabilistic atlas library...' ) # Fetch nilearn atlas coords [label_names, networks_list, uatlas_select] = nodemaker.nilearn_atlas_helper(atlas_select, parc) coords = find_probabilistic_atlas_cut_coords(maps_img=uatlas_select) if uatlas_select: if not isinstance(uatlas_select, str): nib.save(uatlas_select, "%s%s%s" % ('/tmp/', atlas_select, '.nii.gz')) uatlas_select = "%s%s%s" % ('/tmp/', atlas_select, '.nii.gz') if parc is True: parcel_list = nodemaker.gen_img_list(uatlas_select) else: parcel_list = None else: raise ValueError( "%s%s%s" % ('\nERROR: Atlas file for ', atlas_select, ' not found!')) par_max = None elif uatlas_select: if clustering is True: while True: if op.isfile(uatlas_select): break else: print('Waiting for atlas file...') time.sleep(15) atlas_select = uatlas_select.split('/')[-1].split('.')[0] try: # Fetch user-specified atlas coords [coords, atlas_select, par_max ] = nodemaker.get_names_and_coords_of_parcels(uatlas_select) if parc is True: parcel_list = nodemaker.gen_img_list(uatlas_select) else: parcel_list = None # Describe user atlas coords print("%s%s%s%s" % ('\n', atlas_select, ' comes with {0} '.format(par_max), 'parcels\n')) except ValueError: print( '\n\nError: Either you have specified the name of a nilearn atlas that does not exist or you have not ' 'supplied a 3d atlas parcellation image!\n\n') parcel_list = None par_max = None coords = None label_names = None networks_list = None else: networks_list = None label_names = None parcel_list = None par_max = None coords = None # Labels prep if atlas_select: if label_names: pass else: if ref_txt is not None and op.exists(ref_txt): dict_df = pd.read_csv(ref_txt, sep=" ", header=None, names=["Index", "Region"]) label_names = dict_df['Region'].tolist() else: try: ref_txt = "%s%s%s%s" % (str( Path(base_path).parent), '/labelcharts/', atlas_select, '.txt') if op.exists(ref_txt): try: dict_df = pd.read_csv(ref_txt, sep="\t", header=None, names=["Index", "Region"]) label_names = dict_df['Region'].tolist() #print(label_names) except: print( "WARNING: label names from label reference file failed to populate or are invalid. " "Attempting AAL naming...") try: label_names = nodemaker.AAL_naming(coords) # print(label_names) except: print('AAL reference labeling failed!') label_names = np.arange(len(coords) + 1)[ np.arange(len(coords) + 1) != 0].tolist() else: if use_AAL_naming is True: try: label_names = nodemaker.AAL_naming(coords) # print(label_names) except: print('AAL reference labeling failed!') label_names = np.arange(len(coords) + 1)[ np.arange(len(coords) + 1) != 0].tolist() else: print('Using generic numbering labels...') label_names = np.arange(len(coords) + 1)[ np.arange(len(coords) + 1) != 0].tolist() except: print( "Label reference file not found. Attempting AAL naming..." ) if use_AAL_naming is True: try: label_names = nodemaker.AAL_naming(coords) #print(label_names) except: print('AAL reference labeling failed!') label_names = np.arange(len(coords) + 1)[ np.arange(len(coords) + 1) != 0].tolist() else: print('Using generic numbering labels...') label_names = np.arange(len(coords) + 1)[np.arange(len(coords) + 1) != 0].tolist() else: print( 'WARNING: No labels available since atlas name is not specified!') print("%s%s" % ('Labels:\n', label_names)) atlas_name = atlas_select dir_path = utils.do_dir_path(atlas_select, in_file) return label_names, coords, atlas_name, networks_list, parcel_list, par_max, uatlas_select, dir_path
# Computing group-sparse precision matrices # ------------------------------------------ from nilearn.connectome import GroupSparseCovarianceCV gsc = GroupSparseCovarianceCV(verbose=2) gsc.fit(subject_time_series) from sklearn import covariance gl = covariance.GraphLassoCV(verbose=2) gl.fit(np.concatenate(subject_time_series)) ############################################################################## # Displaying results # ------------------- atlas_img = msdl_atlas_dataset.maps atlas_region_coords = plotting.find_probabilistic_atlas_cut_coords(atlas_img) labels = msdl_atlas_dataset.labels plotting.plot_connectome(gl.covariance_, atlas_region_coords, edge_threshold='90%', title="Covariance", display_mode="lzr") plotting.plot_connectome(-gl.precision_, atlas_region_coords, edge_threshold='90%', title="Sparse inverse covariance (GraphLasso)", display_mode="lzr", edge_vmax=.5, edge_vmin=-.5) plot_matrices(gl.covariance_, gl.precision_, "GraphLasso", labels) title = "GroupSparseCovariance" plotting.plot_connectome(-gsc.precisions_[..., 0],
def fetch_nodes_and_labels(atlas, uatlas, ref_txt, parc, in_file, use_AAL_naming, clustering=False): """ General API for fetching, identifying, and defining atlas nodes based on coordinates and/or labels. Parameters ---------- atlas : str Name of a Nilearn-hosted coordinate or parcellation/label-based atlas supported for fetching. See Nilearn's datasets.atlas module for more detailed reference. uatlas : str File path to atlas parcellation Nifti1Image in MNI template space. ref_txt : str Path to an atlas reference .txt file that maps labels to intensities corresponding to uatlas. parc : bool Indicates whether to use parcels instead of coordinates as ROI nodes. in_file : str File path to Nifti1Image object whose affine will provide sampling reference for fetching. use_AAL_naming : bool Indicates whether to perform Automated-Anatomical Labeling of each coordinate from a list of a voxel coordinates. clustering : bool Indicates whether clustering was performed. Default is False. Returns ------- labels : list List of string labels corresponding to ROI nodes. coords : list List of (x, y, z) tuples in mm-space corresponding to a coordinate atlas used or which represent the center-of-mass of each parcellation node. atlas_name : str Name of atlas parcellation (can differ slightly from fetch API string). networks_list : list List of RSN's and their associated cooordinates, if predefined uniquely for a given atlas. parcel_list : list List of 3D boolean numpy arrays or binarized Nifti1Images corresponding to ROI masks. par_max : int The maximum label intensity in the parcellation image. uatlas : str File path to atlas parcellation Nifti1Image in MNI template space. dir_path : str Path to directory containing subject derivative data for given run. """ from pynets import utils, nodemaker import pandas as pd import time from pathlib import Path import os.path as op base_path = utils.get_file() # Test if atlas is a nilearn atlas. If so, fetch coords, labels, and/or networks. nilearn_parc_atlases = [ 'atlas_harvard_oxford', 'atlas_aal', 'atlas_destrieux_2009', 'atlas_talairach_gyrus', 'atlas_talairach_ba', 'atlas_talairach_lobe' ] nilearn_coords_atlases = ['coords_power_2011', 'coords_dosenbach_2010'] nilearn_prob_atlases = ['atlas_msdl', 'atlas_pauli_2017'] if uatlas is None and atlas in nilearn_parc_atlases: [labels, networks_list, uatlas] = nodemaker.nilearn_atlas_helper(atlas, parc) if uatlas: if not isinstance(uatlas, str): nib.save(uatlas, "%s%s%s" % ('/tmp/', atlas, '.nii.gz')) uatlas = "%s%s%s" % ('/tmp/', atlas, '.nii.gz') [coords, _, par_max] = nodemaker.get_names_and_coords_of_parcels(uatlas) if parc is True: parcel_list = nodemaker.gen_img_list(uatlas) else: parcel_list = None else: raise ValueError( "%s%s%s" % ('\nERROR: Atlas file for ', atlas, ' not found!')) elif uatlas is None and parc is False and atlas in nilearn_coords_atlases: print( 'Fetching coords and labels from nilearn coordinate-based atlas library...' ) # Fetch nilearn atlas coords [coords, _, networks_list, labels] = nodemaker.fetch_nilearn_atlas_coords(atlas) parcel_list = None par_max = None elif uatlas is None and parc is False and atlas in nilearn_prob_atlases: from nilearn.plotting import find_probabilistic_atlas_cut_coords print( 'Fetching coords and labels from nilearn probabilistic atlas library...' ) # Fetch nilearn atlas coords [labels, networks_list, uatlas] = nodemaker.nilearn_atlas_helper(atlas, parc) coords = find_probabilistic_atlas_cut_coords(maps_img=uatlas) if uatlas: if not isinstance(uatlas, str): nib.save(uatlas, "%s%s%s" % ('/tmp/', atlas, '.nii.gz')) uatlas = "%s%s%s" % ('/tmp/', atlas, '.nii.gz') if parc is True: parcel_list = nodemaker.gen_img_list(uatlas) else: parcel_list = None else: raise ValueError( "%s%s%s" % ('\nERROR: Atlas file for ', atlas, ' not found!')) par_max = None elif uatlas: if clustering is True: while True: if op.isfile(uatlas): break else: print('Waiting for atlas file...') time.sleep(15) atlas = uatlas.split('/')[-1].split('.')[0] try: # Fetch user-specified atlas coords [coords, atlas, par_max] = nodemaker.get_names_and_coords_of_parcels(uatlas) if parc is True: parcel_list = nodemaker.gen_img_list(uatlas) else: parcel_list = None # Describe user atlas coords print( "%s%s%s%s" % ('\n', atlas, ' comes with {0} '.format(par_max), 'parcels\n')) except ValueError: print( '\n\nError: Either you have specified the name of a nilearn atlas that does not exist or ' 'you have not supplied a 3d atlas parcellation image!\n\n') parcel_list = None par_max = None coords = None labels = None networks_list = None else: networks_list = None labels = None parcel_list = None par_max = None coords = None # Labels prep if atlas: if labels: pass else: if ref_txt is not None and op.exists(ref_txt): dict_df = pd.read_csv(ref_txt, sep=" ", header=None, names=["Index", "Region"]) labels = dict_df['Region'].tolist() else: try: ref_txt = "%s%s%s%s" % (str(Path(base_path).parent), '/labelcharts/', atlas, '.txt') if op.exists(ref_txt): try: dict_df = pd.read_csv(ref_txt, sep="\t", header=None, names=["Index", "Region"]) labels = dict_df['Region'].tolist() except: print( "WARNING: label names from label reference file failed to populate or are invalid. " "Attempting AAL naming...") try: labels = nodemaker.AAL_naming(coords) except: print('AAL reference labeling failed!') labels = np.arange(len(coords) + 1)[ np.arange(len(coords) + 1) != 0].tolist() else: if use_AAL_naming is True: try: labels = nodemaker.AAL_naming(coords) except: print('AAL reference labeling failed!') labels = np.arange(len(coords) + 1)[ np.arange(len(coords) + 1) != 0].tolist() else: print('Using generic numbering labels...') labels = np.arange(len(coords) + 1)[np.arange(len(coords) + 1) != 0].tolist() except: print( "Label reference file not found. Attempting AAL naming..." ) if use_AAL_naming is True: try: labels = nodemaker.AAL_naming(coords) except: print('AAL reference labeling failed!') labels = np.arange(len(coords) + 1)[np.arange(len(coords) + 1) != 0].tolist() else: print('Using generic numbering labels...') labels = np.arange(len(coords) + 1)[np.arange(len(coords) + 1) != 0].tolist() else: print( 'WARNING: No labels available since atlas name is not specified!') print("%s%s" % ('Labels:\n', labels)) atlas_name = atlas dir_path = utils.do_dir_path(atlas, in_file) if len(coords) != len(labels): labels = len(coords) * [np.nan] if len(coords) != len(labels): raise ValueError( 'ERROR: length of coordinates is not equal to length of label names' ) return labels, coords, atlas_name, networks_list, parcel_list, par_max, uatlas, dir_path
def _run_interface(self, runtime): from pynets.core import utils, nodemaker from nipype.utils.filemanip import fname_presuffix, copyfile from nilearn.image import concat_imgs import pandas as pd import time import textwrap from pathlib import Path import os.path as op import glob base_path = utils.get_file() # Test if atlas is a nilearn atlas. If so, fetch coords, labels, and/or # networks. nilearn_parc_atlases = [ "atlas_harvard_oxford", "atlas_aal", "atlas_destrieux_2009", "atlas_talairach_gyrus", "atlas_talairach_ba", "atlas_talairach_lobe", ] nilearn_coords_atlases = ["coords_power_2011", "coords_dosenbach_2010"] nilearn_prob_atlases = ["atlas_msdl", "atlas_pauli_2017"] local_atlases = [ op.basename(i).split(".nii")[0] for i in glob.glob(f"{str(Path(base_path).parent.parent)}" f"/templates/atlases/*.nii.gz") if "_4d" not in i ] if self.inputs.parcellation is None and self.inputs.atlas in \ nilearn_parc_atlases: [labels, networks_list, parcellation ] = nodemaker.nilearn_atlas_helper(self.inputs.atlas, self.inputs.parc) if parcellation: if not isinstance(parcellation, str): nib.save( parcellation, f"{runtime.cwd}" f"{self.inputs.atlas}{'.nii.gz'}") parcellation = f"{runtime.cwd}" \ f"{self.inputs.atlas}{'.nii.gz'}" if self.inputs.clustering is False: [parcellation, labels] = \ nodemaker.enforce_hem_distinct_consecutive_labels( parcellation, label_names=labels) [coords, atlas, par_max, label_intensities] = \ nodemaker.get_names_and_coords_of_parcels(parcellation) if self.inputs.parc is True: parcels_4d_img = nodemaker.three_to_four_parcellation( parcellation) else: parcels_4d_img = None else: raise FileNotFoundError( f"\nAtlas file for {self.inputs.atlas} not found!") atlas = self.inputs.atlas elif (self.inputs.parcellation is None and self.inputs.parc is False and self.inputs.atlas in nilearn_coords_atlases): print("Fetching coords and labels from nilearn coordinate-based" " atlas library...") # Fetch nilearn atlas coords [coords, _, networks_list, labels] = nodemaker.fetch_nilearn_atlas_coords(self.inputs.atlas) parcels_4d = None par_max = None atlas = self.inputs.atlas parcellation = None label_intensities = None elif (self.inputs.parcellation is None and self.inputs.parc is False and self.inputs.atlas in nilearn_prob_atlases): import matplotlib matplotlib.use("agg") from nilearn.plotting import find_probabilistic_atlas_cut_coords print("Fetching coords and labels from nilearn probabilistic atlas" " library...") # Fetch nilearn atlas coords [labels, networks_list, parcellation ] = nodemaker.nilearn_atlas_helper(self.inputs.atlas, self.inputs.parc) coords = find_probabilistic_atlas_cut_coords(maps_img=parcellation) if parcellation: if not isinstance(parcellation, str): nib.save( parcellation, f"{runtime.cwd}" f"{self.inputs.atlas}{'.nii.gz'}") parcellation = f"{runtime.cwd}" \ f"{self.inputs.atlas}{'.nii.gz'}" if self.inputs.clustering is False: [parcellation, labels] = \ nodemaker.enforce_hem_distinct_consecutive_labels( parcellation, label_names=labels) if self.inputs.parc is True: parcels_4d_img = nodemaker.three_to_four_parcellation( parcellation) else: parcels_4d_img = None else: raise FileNotFoundError( f"\nAtlas file for {self.inputs.atlas} not found!") par_max = None atlas = self.inputs.atlas label_intensities = None elif self.inputs.parcellation is None and self.inputs.atlas in \ local_atlases: parcellation_pre = ( f"{str(Path(base_path).parent.parent)}/templates/atlases/" f"{self.inputs.atlas}.nii.gz") parcellation = fname_presuffix(parcellation_pre, newpath=runtime.cwd) copyfile(parcellation_pre, parcellation, copy=True, use_hardlink=False) try: par_img = nib.load(parcellation) except indexed_gzip.ZranError as e: print( e, "\nCannot load subnetwork reference image. " "Do you have git-lfs installed?") try: if self.inputs.clustering is False: [parcellation, _] = \ nodemaker.enforce_hem_distinct_consecutive_labels( parcellation) # Fetch user-specified atlas coords [coords, _, par_max, label_intensities] = \ nodemaker.get_names_and_coords_of_parcels(parcellation) if self.inputs.parc is True: parcels_4d_img = nodemaker.three_to_four_parcellation( parcellation) else: parcels_4d_img = None # Describe user atlas coords print(f"\n{self.inputs.atlas} comes with {par_max} parcels\n") except ValueError as e: print( e, "Either you have specified the name of an atlas that " "does not exist in the nilearn or local repository or " "you have not supplied a 3d atlas parcellation image!") labels = None networks_list = None atlas = self.inputs.atlas elif self.inputs.parcellation: if self.inputs.clustering is True: while True: if op.isfile(self.inputs.parcellation): break else: print("Waiting for atlas file...") time.sleep(5) try: parcellation_tmp_path = fname_presuffix( self.inputs.parcellation, newpath=runtime.cwd) copyfile(self.inputs.parcellation, parcellation_tmp_path, copy=True, use_hardlink=False) # Fetch user-specified atlas coords if self.inputs.clustering is False: [parcellation, _] = nodemaker.enforce_hem_distinct_consecutive_labels( parcellation_tmp_path) else: parcellation = parcellation_tmp_path [coords, atlas, par_max, label_intensities] = \ nodemaker.get_names_and_coords_of_parcels(parcellation) if self.inputs.parc is True: parcels_4d_img = nodemaker.three_to_four_parcellation( parcellation) else: parcels_4d_img = None atlas = utils.prune_suffices(atlas) # Describe user atlas coords print(f"\n{atlas} comes with {par_max} parcels\n") except ValueError as e: print( e, "Either you have specified the name of an atlas that " "does not exist in the nilearn or local repository or " "you have not supplied a 3d atlas parcellation image!") labels = None networks_list = None else: raise ValueError( "Either you have specified the name of an atlas that does" " not exist in the nilearn or local repository or you have" " not supplied a 3d atlas parcellation image!") # Labels prep if atlas and not labels: if (self.inputs.ref_txt is not None) and (op.exists( self.inputs.ref_txt)): labels = pd.read_csv(self.inputs.ref_txt, sep=" ", header=None, names=["Index", "Region"])["Region"].tolist() else: if atlas in local_atlases: ref_txt = ( f"{str(Path(base_path).parent.parent)}/templates/" f"labels/" f"{atlas}.txt") else: ref_txt = self.inputs.ref_txt if ref_txt is not None: try: labels = pd.read_csv(ref_txt, sep=" ", header=None, names=["Index", "Region" ])["Region"].tolist() except BaseException: if self.inputs.use_parcel_naming is True: try: labels = nodemaker.parcel_naming( coords, self.inputs.vox_size) except BaseException: print("AAL reference labeling failed!") labels = np.arange(len(coords) + 1)[ np.arange(len(coords) + 1) != 0].tolist() else: print("Using generic index labels...") labels = np.arange(len(coords) + 1)[np.arange(len(coords) + 1) != 0].tolist() else: if self.inputs.use_parcel_naming is True: try: labels = nodemaker.parcel_naming( coords, self.inputs.vox_size) except BaseException: print("AAL reference labeling failed!") labels = np.arange(len(coords) + 1)[np.arange(len(coords) + 1) != 0].tolist() else: print("Using generic index labels...") labels = np.arange(len(coords) + 1)[np.arange(len(coords) + 1) != 0].tolist() dir_path = utils.do_dir_path(atlas, self.inputs.outdir) if len(coords) != len(labels): labels = [ i for i in labels if (i != 'Unknown' and i != 'Background') ] if len(coords) != len(labels): print("Length of coordinates is not equal to length of " "label names...") if self.inputs.use_parcel_naming is True: try: print("Attempting consensus parcel naming instead...") labels = nodemaker.parcel_naming( coords, self.inputs.vox_size) except BaseException: print("Reverting to integer labels instead...") labels = np.arange(len(coords) + 1)[np.arange(len(coords) + 1) != 0].tolist() else: print("Reverting to integer labels instead...") labels = np.arange(len(coords) + 1)[np.arange(len(coords) + 1) != 0].tolist() print(f"Coordinates:\n{coords}") print(f"Labels:\n" f"{textwrap.shorten(str(labels), width=1000, placeholder='...')}" f"") assert len(coords) == len(labels) if label_intensities is not None: self._results["labels"] = list(zip(labels, label_intensities)) else: self._results["labels"] = labels self._results["coords"] = coords self._results["atlas"] = atlas self._results["networks_list"] = networks_list # TODO: Optimize this with 4d array concatenation and .npyz out_path = f"{runtime.cwd}/parcels_4d.nii.gz" nib.save(parcels_4d_img, out_path) self._results["parcels_4d"] = out_path self._results["par_max"] = par_max self._results["parcellation"] = parcellation self._results["dir_path"] = dir_path return runtime