def get_structures(self, file_name=None): """ Read the list of adult mouse structures and return a Pandas DataFrame. Parameters ---------- file_name: string File name to save/read the structures table. If file_name is None, the file_name will be pulled out of the manifest. If caching is disabled, no file will be saved. Default is None. """ file_name = self.get_cache_path(file_name, self.STRUCTURES_KEY) if os.path.exists(file_name): structures = pd.DataFrame.from_csv(file_name) else: structures = OntologiesApi(base_uri=self.api.api_url).get_structures(1) structures = pd.DataFrame(structures) if self.cache: Manifest.safe_make_parent_dirs(file_name) structures.to_csv(file_name) structures.set_index(["id"], inplace=True, drop=False) return structures
class AllenBrainReference: def __init__(self, graph: str = "adult") -> None: self._onto = OntologiesApi() if graph == "adult": self._struct_dicts_list = self._onto.get_structures( structure_graph_ids=1, num_rows='all') elif graph == "development": self._struct_dicts_list = self._onto.get_structures( structure_graph_ids=17, num_rows='all') self._structures = { i["id"]: AllenBrainStructure(i, self) for i in self._struct_dicts_list } self._link() def __getitem__(self, key: int) -> Any: return self._structures[key] def __iter__(self) -> Any: for k, v in self._structures.items(): yield v def _link(self) -> None: for name, structure in self._structures.items(): structure.connect()
def allen_volume_from_structures(structures=selection_dorsal_cortex, resolution=10, version='annotation/ccf_2017'): ''' Gets specific regions from an annotation volume from the allen atlas ''' from allensdk.api.queries.mouse_connectivity_api import MouseConnectivityApi from allensdk.api.queries.ontologies_api import OntologiesApi from allensdk.core.structure_tree import StructureTree from allensdk.core.reference_space import ReferenceSpace # the annotation download writes a file, so we will need somwhere to put it # the annotation download writes a file, so we will need somwhere to put it annotation, meta = allen_get_raw_annotation(annotation_dir, version=version, resolution=10) oapi = OntologiesApi() structure_graph = oapi.get_structures_with_sets( [1]) # 1 is the id of the adult mouse structure graph # This removes some unused fields returned by the query structure_graph = StructureTree.clean_structures(structure_graph) tree = StructureTree(structure_graph) rsp = ReferenceSpace(tree, annotation, [resolution] * 3) areas = rsp.structure_tree.get_structures_by_acronym(structures) ids = [st['id'] for st in areas] mask_volume = np.zeros_like(annotation, dtype='int16') for i, sid in tqdm(enumerate(ids)): masks = rsp.make_structure_mask([sid]) mask_volume[masks == 1] = i + 1 areas[i]['mask_volume_id'] = i + 1 return mask_volume, areas
def oapi(): oa = OntologiesApi() oa.get_structures = mock.MagicMock(return_value=[{'id': 1, 'structure_id_path': '1'}]) oa.get_structure_set_map = mock.MagicMock(return_value={1: [2, 3]}) return oa
def Ref(): oapi = OntologiesApi() structure_graph = oapi.get_structures_with_sets([1]) structure_graph = StructureTree.clean_structures(structure_graph) tree = StructureTree(structure_graph) # "need to download annotation the first time" # annotation_dir = 'annotation' # Manifest.safe_mkdir(annotation_dir) # annotation_path = os.path.join(annotation_dir, 'annotation.nrrd') annotation_path = 'annotation_2017_25.nrrd' # mcapi = MouseConnectivityApi() # mcapi.download_annotation_volume('annotation/ccf_2016', 25, annotation_path) annotation, meta = nrrd.read(annotation_path) rsp = ReferenceSpace(tree, annotation, [25, 25, 25]) import pandas as pd df = pd.read_csv('ontology_170731.csv') idList = df['id'] acronymList = df['acronym'] nameList = df['name'] voxel_count = [rsp.total_voxel_map[int(stid)] for stid in df['id']] df = pd.DataFrame(np.column_stack( [idList, acronymList, nameList, voxel_count]), columns=['ID', 'Acronym', 'Name', 'Total Voxel']) df.to_csv('s2_{}.csv'.format(time.strftime('%y%m%d')))
def getFullStructureTree(): oapi = OntologiesApi() structure_graph = oapi.get_structures_with_sets([1]) # 1 is the id of the adult mouse structure graph # structure_graph = oapi.get_structures(structure_graph_ids=1) # (Alternative method) # This removes some unused fields returned by the query structure_graph = StructureTree.clean_structures(structure_graph) tree = StructureTree(structure_graph) return tree
def getMsTree(): oapi = OntologiesApi() # The 1 refers to the adult mouse brain atlas structure_graph = oapi.get_structures_with_sets([1]) # clean_structures() removes unused fields structure_graph = StructureTree.clean_structures(structure_graph) # a class with methods for aceessing and using ontologies data tree = StructureTree(structure_graph) return tree
def __init__(self, projection_metric="projection_energy", base_dir=None, **kwargs): """ Set up file paths and Allen SDKs :param base_dir: path to directory to use for saving data (default value None) :param path_fiprojection_metricle: - str, metric to quantify the strength of projections from the Allen Connectome. (default value 'projection_energy') :param kwargs: can be used to pass path to individual data folders. See brainrender/Utils/paths_manager.py """ Paths.__init__(self, base_dir=base_dir, **kwargs) self.projection_metric = projection_metric # get mouse connectivity cache and structure tree self.mcc = MouseConnectivityCache(manifest_file=os.path.join( self.mouse_connectivity_cache, "manifest.json")) self.structure_tree = self.mcc.get_structure_tree() # get ontologies API and brain structures sets self.oapi = OntologiesApi() self.get_structures_sets() # get reference space self.space = ReferenceSpaceApi() self.spacecache = ReferenceSpaceCache( manifest=os.path.join( self.annotated_volume, "manifest.json" ), # downloaded files are stored relative to here resolution=self.resolution, reference_space_key= "annotation/ccf_2017" # use the latest version of the CCF ) self.annotated_volume, _ = self.spacecache.get_annotation_volume() # mouse connectivity API [used for tractography] self.mca = MouseConnectivityApi() # Get tree search api self.tree_search = TreeSearchApi() # Get some metadata about experiments self.all_experiments = self.mcc.get_experiments(dataframe=True) self.strains = sorted( [x for x in set(self.all_experiments.strain) if x is not None]) self.transgenic_lines = sorted( set([ x for x in set(self.all_experiments.transgenic_line) if x is not None ]))
def __init__(self, graph: str = "adult") -> None: self._onto = OntologiesApi() if graph == "adult": self._struct_dicts_list = self._onto.get_structures( structure_graph_ids=1, num_rows='all') elif graph == "development": self._struct_dicts_list = self._onto.get_structures( structure_graph_ids=17, num_rows='all') self._structures = { i["id"]: AllenBrainStructure(i, self) for i in self._struct_dicts_list } self._link()
def __init__(self): # get mouse connectivity cache and structure tree self.mcc = MouseConnectivityCache(manifest_file=os.path.join( self.mouse_connectivity_cache, "manifest.json")) self.structure_tree = self.mcc.get_structure_tree() # get ontologies API and brain structures sets self.oapi = OntologiesApi() # get reference space self.space = ReferenceSpaceApi() self.spacecache = ReferenceSpaceCache( manifest=os.path.join( self.annotated_volume_fld, "manifest.json" ), # downloaded files are stored relative to here resolution=int(self.resolution[0]), reference_space_key= "annotation/ccf_2017", # use the latest version of the CCF ) self.annotated_volume, _ = self.spacecache.get_annotation_volume() # mouse connectivity API [used for tractography] self.mca = MouseConnectivityApi() # Get tree search api self.tree_search = TreeSearchApi()
def get_structure_tree(self, file_name=None, structure_graph_id=1): """ Read the list of adult mouse structures and return an StructureTree instance. Parameters ---------- file_name: string File name to save/read the structures table. If file_name is None, the file_name will be pulled out of the manifest. If caching is disabled, no file will be saved. Default is None. structure_graph_id: int Build a tree using structure only from the identified structure graph. """ file_name = self.get_cache_path(file_name, self.STRUCTURE_TREE_KEY) return OntologiesApi(self.api.api_url).get_structures_with_sets( strategy='lazy', path=file_name, pre=StructureTree.clean_structures, post=lambda x: StructureTree(StructureTree.clean_structures(x)), structure_graph_ids=structure_graph_id, **Cache.cache_json())
def writeStructDFs(df, structName_list, filename, IDheader='id', exclude=None): oapi = OntologiesApi() # The 1 refers to the adult mouse brain atlas structure_graph = oapi.get_structures_with_sets([1]) # clean_structures() removes unused fields structure_graph = StructureTree.clean_structures(structure_graph) # a class with methods for aceessing and using ontologies data tree = StructureTree(structure_graph) for structName in structName_list: towrite = getStructDF(df, structName, tree, IDheader=IDheader, exclude=exclude) writename = structName + filename towrite.to_csv(writename, sep='\t')
def __init__(self, projection_metric="projection_energy", paths_file=None): """ path_file {[str]} -- [Path to a YAML file specifying paths to data folders, to replace default paths] (default: {None}) """ Paths.__init__(self, paths_file=paths_file) self.projection_metric = projection_metric # get mouse connectivity cache and structure tree self.mcc = MouseConnectivityCache(manifest_file=os.path.join( self.mouse_connectivity_cache, "manifest.json")) self.structure_tree = self.mcc.get_structure_tree() # get ontologies API and brain structures sets self.oapi = OntologiesApi() self.get_structures_sets() # get reference space self.space = ReferenceSpaceApi() self.spacecache = ReferenceSpaceCache( manifest=os.path.join( "Data/ABA", "manifest.json" ), # downloaded files are stored relative to here resolution=self.resolution, reference_space_key= "annotation/ccf_2017" # use the latest version of the CCF ) self.annotated_volume, _ = self.spacecache.get_annotation_volume() # mouse connectivity API [used for tractography] self.mca = MouseConnectivityApi() # Get tree search api self.tree_search = TreeSearchApi() # Get some metadata about experiments self.all_experiments = self.mcc.get_experiments(dataframe=True) self.strains = sorted( [x for x in set(self.all_experiments.strain) if x is not None]) self.transgenic_lines = sorted( set([ x for x in set(self.all_experiments.transgenic_line) if x is not None ]))
def get_structures(self, file_name=None): """ Read the list of adult mouse structures and return a Pandas DataFrame. Parameters ---------- file_name: string File name to save/read the structures table. If file_name is None, the file_name will be pulled out of the manifest. If caching is disabled, no file will be saved. Default is None. """ file_name = self.get_cache_path(file_name, self.STRUCTURES_KEY) if os.path.exists(file_name): structures = pd.DataFrame.from_csv(file_name) else: structures = OntologiesApi().get_structures(1) structures = pd.DataFrame(structures) if self.cache: Manifest.safe_mkdir(os.path.dirname(file_name)) structures.to_csv(file_name) structures.set_index(['id'], inplace=True, drop=False) return structures
def __init__(self, base_dir=None, **kwargs): """ Set up file paths and Allen SDKs :param base_dir: path to directory to use for saving data (default value None) :param kwargs: can be used to pass path to individual data folders. See brainrender/Utils/paths_manager.py """ Atlas.__init__(self, base_dir=base_dir, **kwargs) self.meshes_folder = self.mouse_meshes # where the .obj mesh for each region is saved # get mouse connectivity cache and structure tree self.mcc = MouseConnectivityCache(manifest_file=os.path.join(self.mouse_connectivity_cache, "manifest.json")) self.structure_tree = self.mcc.get_structure_tree() # get ontologies API and brain structures sets self.oapi = OntologiesApi() self.get_structures_sets() # get reference space self.space = ReferenceSpaceApi() self.spacecache = ReferenceSpaceCache( manifest=os.path.join(self.annotated_volume_fld, "manifest.json"), # downloaded files are stored relative to here resolution=self.resolution, reference_space_key="annotation/ccf_2017" # use the latest version of the CCF ) self.annotated_volume, _ = self.spacecache.get_annotation_volume() # mouse connectivity API [used for tractography] self.mca = MouseConnectivityApi() # Get tree search api self.tree_search = TreeSearchApi() # Store all regions metadata [If there's internet connection] if self.other_sets is not None: self.regions = self.other_sets["Structures whose surfaces are represented by a precomputed mesh"].sort_values('acronym') self.region_acronyms = list(self.other_sets["Structures whose surfaces are represented by a precomputed mesh"].sort_values( 'acronym').acronym.values)
def __init__(self): SvgApi.__init__( self ) # https://github.com/AllenInstitute/AllenSDK/blob/master/allensdk/api/queries/svg_api.py ImageDownloadApi.__init__( self ) # https://github.com/AllenInstitute/AllenSDK/blob/master/allensdk/api/queries/image_download_api.py self.annsetsapi = AnnotatedSectionDataSetsApi( ) # https://github.com/AllenInstitute/AllenSDK/blob/master/allensdk/api/queries/annotated_section_data_sets_api.py self.oapi = OntologiesApi( ) # https://github.com/AllenInstitute/AllenSDK/blob/master/allensdk/api/queries/ontologies_api.py # Get metadata about atlases self.atlases = pd.DataFrame(self.oapi.get_atlases_table()) self.atlases_names = sorted(list(self.atlases['name'].values)) self.mouse_coronal_atlas_id = int(self.atlases.loc[ self.atlases['name'] == "Mouse, P56, Coronal"].id.values[0]) self.mouse_sagittal_atlas_id = int(self.atlases.loc[ self.atlases['name'] == "Mouse, P56, Sagittal"].id.values[0]) self.mouse_3D_atlas_id = int(self.atlases.loc[ self.atlases['name'] == "Mouse, Adult, 3D Coronal"].id.values[0]) # Get metadata about products if connected_to_internet(): self.products = pd.DataFrame( send_query( "http://api.brain-map.org/api/v2/data/query.json?criteria=model::Product" )) self.mouse_brain_reference_product_id = 12 self.mouse_brain_ish_data_product_id = 1 self.products_names = sorted(list(self.products["name"].values)) self.mouse_products_names = sorted( list(self.products.loc[self.products.species == "Mouse"] ["name"].values)) else: raise ConnectionError( "It seems that you are not connected to the internet, you won't be able to download stuff." )
def __init__(self): SvgApi.__init__( self ) # https://github.com/AllenInstitute/AllenSDK/blob/master/allensdk/api/queries/svg_api.py ImageDownloadApi.__init__( self ) # https://github.com/AllenInstitute/AllenSDK/blob/master/allensdk/api/queries/image_download_api.py self.annsetsapi = ( AnnotatedSectionDataSetsApi() ) # https://github.com/AllenInstitute/AllenSDK/blob/master/allensdk/api/queries/annotated_section_data_sets_api.py self.oapi = ( OntologiesApi() ) # https://github.com/AllenInstitute/AllenSDK/blob/master/allensdk/api/queries/ontologies_api.py # Get metadata about atlases self.atlases = pd.DataFrame(self.oapi.get_atlases_table()) self.atlases_names = sorted(list(self.atlases["name"].values)) self.mouse_coronal_atlas_id = int(self.atlases.loc[ self.atlases["name"] == self.mouse_coronal].id.values[0]) self.mouse_sagittal_atlas_id = int(self.atlases.loc[ self.atlases["name"] == self.mouse_sagittal].id.values[0]) self.mouse_3D_atlas_id = int(self.atlases.loc[ self.atlases["name"] == self.mouse3d].id.values[0]) # Get metadata about products self.products = pd.DataFrame( send_query( "http://api.brain-map.org/api/v2/data/query.json?criteria=model::Product" )) self.mouse_brain_reference_product_id = 12 self.mouse_brain_ish_data_product_id = 1 self.products_names = sorted(list(self.products["name"].values)) self.mouse_products_names = sorted( list(self.products.loc[self.products.species == "Mouse"] ["name"].values))
class ABA(Paths): """ This class handles interaction with the Allen Brain Atlas datasets and APIs to get structure trees, experimental metadata and results, tractography data etc. """ # useful vars for analysis excluded_regions = ["fiber tracts"] resolution = 25 def __init__(self, projection_metric = "projection_energy", base_dir=None, **kwargs): """ Set up file paths and Allen SDKs :param base_dir: path to directory to use for saving data (default value None) :param path_fiprojection_metricle: - str, metric to quantify the strength of projections from the Allen Connectome. (default value 'projection_energy') :param kwargs: can be used to pass path to individual data folders. See brainrender/Utils/paths_manager.py """ Paths.__init__(self, base_dir=base_dir, **kwargs) self.projection_metric = projection_metric # get mouse connectivity cache and structure tree self.mcc = MouseConnectivityCache(manifest_file=os.path.join(self.mouse_connectivity_cache, "manifest.json")) self.structure_tree = self.mcc.get_structure_tree() # get ontologies API and brain structures sets self.oapi = OntologiesApi() self.get_structures_sets() # get reference space self.space = ReferenceSpaceApi() self.spacecache = ReferenceSpaceCache( manifest=os.path.join(self.annotated_volume, "manifest.json"), # downloaded files are stored relative to here resolution=self.resolution, reference_space_key="annotation/ccf_2017" # use the latest version of the CCF ) self.annotated_volume, _ = self.spacecache.get_annotation_volume() # mouse connectivity API [used for tractography] self.mca = MouseConnectivityApi() # Get tree search api self.tree_search = TreeSearchApi() # Get some metadata about experiments self.all_experiments = self.mcc.get_experiments(dataframe=True) self.strains = sorted([x for x in set(self.all_experiments.strain) if x is not None]) self.transgenic_lines = sorted(set([x for x in set(self.all_experiments.transgenic_line) if x is not None])) ####### GET EXPERIMENTS DATA def get_structures_sets(self): """ Get the Allen's structure sets. """ summary_structures = self.structure_tree.get_structures_by_set_id([167587189]) # main summary structures summary_structures = [s for s in summary_structures if s["acronym"] not in self.excluded_regions] self.structures = pd.DataFrame(summary_structures) # Other structures sets try: all_sets = pd.DataFrame(self.oapi.get_structure_sets()) except: print("Could not retrieve data, possibly because there is no internet connection.") else: sets = ["Summary structures of the pons", "Summary structures of the thalamus", "Summary structures of the hypothalamus", "List of structures for ABA Fine Structure Search", "Structures representing the major divisions of the mouse brain", "Summary structures of the midbrain", "Structures whose surfaces are represented by a precomputed mesh"] self.other_sets = {} for set_name in sets: set_id = all_sets.loc[all_sets.description == set_name].id.values[0] self.other_sets[set_name] = pd.DataFrame(self.structure_tree.get_structures_by_set_id([set_id])) self.all_avaliable_meshes = sorted(self.other_sets["Structures whose surfaces are represented by a precomputed mesh"].acronym.values) def print_structures_list_to_text(self): """ Saves the name of every brain structure for which a 3d mesh (.obj file) is available in a text file. """ s = self.other_sets["Structures whose surfaces are represented by a precomputed mesh"].sort_values('acronym') with open('all_regions.txt', 'w') as o: for acr, name in zip(s.acronym.values, s['name'].values): o.write("({}) -- {}\n".format(acr, name)) def load_all_experiments(self, cre=False): """ This function downloads all the experimental data from the MouseConnectivityCache and saves the unionized results as pickled pandas dataframes. The process is slow, but the ammount of disk space necessary to save the data is small, so it's worth downloading all the experiments at once to speed up subsequent analysis. :param cre: Bool - data from either wild time or cre mice lines (Default value = False) """ if not cre: raise NotImplementedError("Only works for wild type sorry") # Downloads all experiments from allen brain atlas and saves the results as an easy to read pkl file for acronym in self.structures.acronym.values: print("Fetching experiments for : {}".format(acronym)) structure = self.structure_tree.get_structures_by_acronym([acronym])[0] experiments = self.mcc.get_experiments(cre=cre, injection_structure_ids=[structure['id']]) print(" found {} experiments".format(len(experiments))) try: structure_unionizes = self.mcc.get_structure_unionizes([e['id'] for e in experiments], is_injection=False, structure_ids=self.structures.id.values, include_descendants=False) except: pass structure_unionizes.to_pickle(os.path.join(self.output_data, "{}.pkl".format(acronym))) def print_structures(self): """ Prints the name of every structure in the structure tree to the console. """ acronyms, names = self.structures.acronym.values, self.structures['name'].values sort_idx = np.argsort(acronyms) acronyms, names = acronyms[sort_idx], names[sort_idx] [print("({}) - {}".format(a, n)) for a,n in zip(acronyms, names)] def experiments_source_search(self, SOI, *args, source=True, **kwargs): """ Returns data about experiments whose injection was in the SOI, structure of interest :param SOI: str, structure of interest. Acronym of structure to use as seed for teh search :param *args: :param source: (Default value = True) :param **kwargs: """ """ list of possible kwargs injection_structures : list of integers or strings Integer Structure.id or String Structure.acronym. target_domain : list of integers or strings, optional Integer Structure.id or String Structure.acronym. injection_hemisphere : string, optional 'right' or 'left', Defaults to both hemispheres. target_hemisphere : string, optional 'right' or 'left', Defaults to both hemispheres. transgenic_lines : list of integers or strings, optional Integer TransgenicLine.id or String TransgenicLine.name. Specify ID 0 to exclude all TransgenicLines. injection_domain : list of integers or strings, optional Integer Structure.id or String Structure.acronym. primary_structure_only : boolean, optional product_ids : list of integers, optional Integer Product.id start_row : integer, optional For paging purposes. Defaults to 0. num_rows : integer, optional For paging purposes. Defaults to 2000. """ transgenic_id = kwargs.pop('transgenic_id', 0) # id = 0 means use only wild type primary_structure_only = kwargs.pop('primary_structure_only', True) if not isinstance(SOI, list): SOI = [SOI] if source: injection_structures=SOI target_domain = None else: injection_structures = None target_domain = SOI return pd.DataFrame(self.mca.experiment_source_search(injection_structures=injection_structures, target_domain = target_domain, transgenic_lines=transgenic_id, primary_structure_only=primary_structure_only)) def experiments_target_search(self, *args, **kwargs): """ :param *args: :param **kwargs: """ return self.experiments_source_search(*args, source=False, **kwargs) def fetch_experiments_data(self, experiments_id, *args, average_experiments=False, **kwargs): """ Get data and metadata for expeirments in the Allen Mouse Connectome project. :param experiments_id: int, list, np.ndarray with ID of experiments whose data need to be fetched :param *args: :param average_experiments: (Default value = False) :param **kwargs: """ if isinstance(experiments_id, np.ndarray): experiments_id = [int(x) for x in experiments_id] elif not isinstance(experiments_id, list): experiments_id = [experiments_id] if [x for x in experiments_id if not isinstance(x, int)]: raise ValueError("Invalid experiments_id argument: {}".format(experiments_id)) default_structures_ids = self.structures.id.values is_injection = kwargs.pop('is_injection', False) # Include only structures that are not injection structure_ids = kwargs.pop('structure_ids', default_structures_ids) # Pass IDs of structures of interest hemisphere_ids= kwargs.pop('hemisphere_ids', None) # 1 left, 2 right, 3 both if not average_experiments: return pd.DataFrame(self.mca.get_structure_unionizes(experiments_id, is_injection = is_injection, structure_ids = structure_ids, hemisphere_ids = hemisphere_ids)) else: raise NotImplementedError("Need to find a way to average across experiments") unionized = pd.DataFrame(self.mca.get_structure_unionizes(experiments_id, is_injection = is_injection, structure_ids = structure_ids, hemisphere_ids = hemisphere_ids)) for regionid in list(set(unionized.structure_id)): region_avg = unionized.loc[unionized.structure_id == regionid].mean(axis=1) ####### ANALYSIS ON EXPERIMENTAL DATA def analyze_efferents(self, ROI, projection_metric = None): """ Loads the experiments on ROI and looks at average statistics of efferent projections :param ROI: str, acronym of brain region of interest :param projection_metric: if None, the default projection metric is used, otherwise pass a string with metric to use (Default value = None) """ if projection_metric is None: projection_metric = self.projection_metric experiment_data = pd.read_pickle(os.path.join(self.output_data, "{}.pkl".format(ROI))) experiment_data = experiment_data.loc[experiment_data.volume > self.volume_threshold] # Loop over all structures and get the injection density results = {"left":[], "right":[], "both":[], "id":[], "acronym":[], "name":[]} for target in self.structures.id.values: target_acronym = self.structures.loc[self.structures.id == target].acronym.values[0] target_name = self.structures.loc[self.structures.id == target].name.values[0] exp_target = experiment_data.loc[experiment_data.structure_id == target] exp_target_hemi = self.hemispheres(exp_target.loc[exp_target.hemisphere_id == 1], exp_target.loc[exp_target.hemisphere_id == 2], exp_target.loc[exp_target.hemisphere_id == 3]) proj_energy = self.hemispheres(np.nanmean(exp_target_hemi.left[projection_metric].values), np.nanmean(exp_target_hemi.right[projection_metric].values), np.nanmean(exp_target_hemi.both[projection_metric].values) ) for hemi in self.hemispheres_names: results[hemi].append(proj_energy._asdict()[hemi]) results["id"].append(target) results["acronym"].append(target_acronym) results["name"].append(target_name) results = pd.DataFrame.from_dict(results).sort_values("right", na_position = "first") return results def analyze_afferents(self, ROI, projection_metric = None): """[Loads the experiments on ROI and looks at average statistics of afferent projections] :param ROI: str, acronym of region of itnerest :param projection_metric: if None, the default projection metric is used, otherwise pass a string with metric to use (Default value = None) """ if projection_metric is None: projection_metric = self.projection_metric ROI_id = self.structure_tree.get_structures_by_acronym([ROI])[0]["id"] # Loop over all strctures and get projection towards SOI results = {"left":[], "right":[], "both":[], "id":[], "acronym":[], "name":[]} for origin in self.structures.id.values: origin_acronym = self.structures.loc[self.structures.id == origin].acronym.values[0] origin_name = self.structures.loc[self.structures.id == origin].name.values[0] experiment_data = pd.read_pickle(os.path.join(self.output_data, "{}.pkl".format(origin_acronym))) experiment_data = experiment_data.loc[experiment_data.volume > self.volume_threshold] exp_target = experiment_data.loc[experiment_data.structure_id == SOI_id] exp_target_hemi = self.hemispheres(exp_target.loc[exp_target.hemisphere_id == 1], exp_target.loc[exp_target.hemisphere_id == 2], exp_target.loc[exp_target.hemisphere_id == 3]) proj_energy = self.hemispheres(np.nanmean(exp_target_hemi.left[projection_metric].values), np.nanmean(exp_target_hemi.right[projection_metric].values), np.nanmean(exp_target_hemi.both[projection_metric].values) ) for hemi in self.hemispheres_names: results[hemi].append(proj_energy._asdict()[hemi]) results["id"].append(origin) results["acronym"].append(origin_acronym) results["name"].append(origin_name) results = pd.DataFrame.from_dict(results).sort_values("right", na_position = "first") return results ####### GET TRACTOGRAPHY AND SPATIAL DATA def get_projection_tracts_to_target(self, p0=None, **kwargs): """ Gets tractography data for all experiments whose projections reach the brain region or location of iterest. :param p0: list of 3 floats with XYZ coordinates of point to be used as seed (Default value = None) :param **kwargs: """ # check args if p0 is None: raise ValueError("Please pass coordinates") elif isinstance(p0, np.ndarray): p0 = list(p0) elif not isinstance(p0, (list, tuple)): raise ValueError("Invalid argument passed (p0): {}".format(p0)) tract = self.mca.experiment_spatial_search(seed_point=p0, **kwargs) if isinstance(tract, str): raise ValueError('Something went wrong with query, query error message:\n{}'.format(tract)) else: return tract ### OPERATIONS ON STRUCTURE TREES def get_structure_ancestors(self, regions, ancestors=True, descendants=False): """ Get's the ancestors of the region(s) passed as arguments :param regions: str, list of str with acronums of regions of interest :param ancestors: if True, returns the ancestors of the region (Default value = True) :param descendants: if True, returns the descendants of the region (Default value = False) """ if not isinstance(regions, list): struct_id = self.structure_tree.get_structures_by_acronym([regions])[0]['id'] return pd.DataFrame(self.tree_search.get_tree('Structure', struct_id, ancestors=ancestors, descendants=descendants)) else: ancestors = [] for region in regions: struct_id = self.structure_tree.get_structures_by_acronym([region])[0]['id'] ancestors.append(pd.DataFrame(self.tree_search.get_tree('Structure', struct_id, ancestors=ancestors, descendants=descendants))) return ancestors def get_structure_descendants(self, regions): return self.get_structure_ancestors(regions, ancestors=False, descendants=True) def get_structure_from_coordinates(self, p0): """ Given a point in the Allen Mouse Brain reference space, returns the brain region that the point is in. :param p0: list of floats with XYZ coordinates. """ voxel = np.round(np.array(p0) / self.resolution).astype(int) try: structure_id = self.annotated_volume[voxel[0], voxel[1], voxel[2]] except: return None # Each voxel in the annotation volume is annotated as specifically as possible structure = self.structure_tree.get_structures_by_id([structure_id])[0] return structure
resolution=RES_UM, reference_space_key="annotation/ccf_2017" # use the latest version of the CCF ) # Download annotated_volume, _ = spacecache.get_annotation_volume() template_volume, _ = spacecache.get_template_volume() # Save tiff stacks: tifffile.imsave(str(save_dir / "reference.tiff"), template_volume) tifffile.imsave(str(save_dir / "annotated.tiff"), annotated_volume) # Download structures tree and meshes: ###################################### oapi = OntologiesApi() # ontologies struct_tree = spacecache.get_structure_tree() # structures tree # Find id of set of regions with mesh: select_set = "Structures whose surfaces are represented by a precomputed mesh" all_sets = pd.DataFrame(oapi.get_structure_sets()) mesh_set_id = all_sets[all_sets.description == select_set].id.values[0] structs_with_mesh = struct_tree.get_structures_by_set_id([mesh_set_id]) meshes_dir = (save_dir / "meshes") # directory to save meshes into space = ReferenceSpaceApi() for s in structs_with_mesh: name = s["id"] try:
class ImageDownload(SvgApi, ImageDownloadApi): """ Handles query to the Allen ImageDownloadApi and saves the data """ # useful tutorial: https://allensdk.readthedocs.io/en/latest/_static/examples/nb/image_download.html def __init__(self): SvgApi.__init__( self ) # https://github.com/AllenInstitute/AllenSDK/blob/master/allensdk/api/queries/svg_api.py ImageDownloadApi.__init__( self ) # https://github.com/AllenInstitute/AllenSDK/blob/master/allensdk/api/queries/image_download_api.py self.annsetsapi = AnnotatedSectionDataSetsApi( ) # https://github.com/AllenInstitute/AllenSDK/blob/master/allensdk/api/queries/annotated_section_data_sets_api.py self.oapi = OntologiesApi( ) # https://github.com/AllenInstitute/AllenSDK/blob/master/allensdk/api/queries/ontologies_api.py # Get metadata about atlases self.atlases = pd.DataFrame(self.oapi.get_atlases_table()) self.atlases_names = sorted(list(self.atlases['name'].values)) self.mouse_coronal_atlas_id = int(self.atlases.loc[ self.atlases['name'] == "Mouse, P56, Coronal"].id.values[0]) self.mouse_sagittal_atlas_id = int(self.atlases.loc[ self.atlases['name'] == "Mouse, P56, Sagittal"].id.values[0]) self.mouse_3D_atlas_id = int(self.atlases.loc[ self.atlases['name'] == "Mouse, Adult, 3D Coronal"].id.values[0]) # Get metadata about products if connected_to_internet(): self.products = pd.DataFrame( send_query( "http://api.brain-map.org/api/v2/data/query.json?criteria=model::Product" )) self.mouse_brain_reference_product_id = 12 self.mouse_brain_ish_data_product_id = 1 self.products_names = sorted(list(self.products["name"].values)) self.mouse_products_names = sorted( list(self.products.loc[self.products.species == "Mouse"] ["name"].values)) else: raise ConnectionError( "It seems that you are not connected to the internet, you won't be able to download stuff." ) # UTILS def get_atlas_by_name(self, atlas_name): """ Get a brain atlas in the Allen's database given it's name :param atlas_name: str with atlas name """ if not atlas_name in self.atlases_names: raise ValueError("Available atlases: {}".format( self.atlases_names)) return self.atlases.loc[self.atlases['name'] == atlas_name].id.values[0] def get_products_by_species(self, species): """ Get all 'products' in the Allen Database for a given species :param species: str """ return self.products.loc[self.products.species == species] def get_experimentsid_by_productid(self, productid, **kwargs): """ Get the experiment's ID that belong to the same project (product). :param productid: int with product ID number :param **kwargs: """ # for more details: https://github.com/AllenInstitute/AllenSDK/blob/master/allensdk/api/queries/image_download_api.py return pd.DataFrame( self.get_section_data_sets_by_product([productid], **kwargs)) def get_experimentimages_by_expid(self, expid): """ Get's images that belong to an experiment :param expid: int with experiment name. """ # expid should be a section dataset id return pd.DataFrame(self.section_image_query(expid)) def get_atlasimages_by_atlasid(self, atlasid): """ Get the metadata of images that belong to an atlas. :param atlasid: int with atlas number """ if not isinstance(atlasid, int): raise ValueError( "Atlas id should be an integer not: {}".format(atlasid)) return pd.DataFrame(self.atlas_image_query(atlasid)) def download_images_by_imagesid(self, savedir, imagesids, downsample=0, annotated=True, snames=None, atlas_svg=True): """ Downloads and saves images given a list of images IDs. :param savedir: str, folder in which to save the image :param imagesids: list of int with images IDs :param downsample: downsample factor, to reduce the image size and resolution (Default value = 0) :param annotated: if True the images are overlayed with annotations (Default value = True) :param snames: if True the images are overlayed with the structures names (Default value = None) :param atlas_svg: if True fetches the images as SVG, otherwise as PNG (Default value = True) """ if not os.path.isdir(savedir): os.mkdir(savedir) curdir = os.getcwd() os.chdir(savedir) for i, imgid in tqdm(enumerate(imagesids)): if not atlas_svg and not annotated: savename = str(imgid) + ".jpg" elif not atlas_svg and annotated: savename = str(imgid) + "_annotated.jpg" else: savename = str(imgid) + ".svg" if snames is not None: sname, ext = savename.split(".") savename = sname + "_sect{}_img{}.".format(snames[i], i + 1) + ext if os.path.isfile(savename): continue if not atlas_svg and not annotated: self.download_section_image(imgid, file_path=savename, downsample=downsample) elif not atlas_svg and annotated: self.download_atlas_image(imgid, file_path=savename, annotation=True, downsample=downsample) else: self.download_svg(imgid, file_path=savename) file_names = os.listdir(savedir) print("Downloaded {} images".format(len(file_names))) os.chdir(curdir) def download_images_by_atlasid(self, savedir, atlasid, **kwargs): """ Downloads all the images that belong to an altlas :param savedir: str, folder in which to save the images :param atlasid: int, ID of the atlas to use :param **kwargs: keyword arguments for self.download_images_by_imagesid """ imgsids = self.get_atlasimages_by_atlasid(atlasid)['id'] imgs_secs_n = self.get_atlasimages_by_atlasid( atlasid)['section_number'] self.download_images_by_imagesid(savedir, imgsids, snames=imgs_secs_n, **kwargs)
def test_notebook(fn_temp_dir): # coding: utf-8 # # Reference Space # # This notebook contains example code demonstrating the use of the StructureTree and ReferenceSpace classes. These classes provide methods for interacting with the 3d spaces to which Allen Institute data and atlases are registered. # # Unlike the AllenSDK cache classes, StructureTree and ReferenceSpace operate entirely in memory. We recommend using json files to store text and nrrd files to store volumetric images. # # The MouseConnectivityCache class has methods for downloading, storing, and constructing StructureTrees and ReferenceSpaces. Please see [here](https://alleninstitute.github.io/AllenSDK/_static/examples/nb/mouse_connectivity.html) for examples. # ## Constructing a StructureTree # # A StructureTree object is a wrapper around a structure graph - a list of dictionaries documenting brain structures and their containment relationships. To build a structure tree, you will first need to obtain a structure graph. # # For a list of atlases and corresponding structure graph ids, see [here](http://help.brain-map.org/display/api/Atlas+Drawings+and+Ontologies). # In[1]: from allensdk.api.queries.ontologies_api import OntologiesApi from allensdk.core.structure_tree import StructureTree oapi = OntologiesApi() structure_graph = oapi.get_structures_with_sets( [1]) # 1 is the id of the adult mouse structure graph # This removes some unused fields returned by the query structure_graph = StructureTree.clean_structures(structure_graph) tree = StructureTree(structure_graph) # In[2]: # now let's take a look at a structure tree.get_structures_by_name(['Dorsal auditory area']) # The fields are: # * acronym: a shortened name for the structure # * rgb_triplet: each structure is assigned a consistent color for visualizations # * graph_id: the structure graph to which this structure belongs # * graph_order: each structure is assigned a consistent position in the flattened graph # * id: a unique integer identifier # * name: the full name of the structure # * structure_id_path: traces a path from the root node of the tree to this structure # * structure_set_ids: the structure belongs to these predefined groups # ## Using a StructureTree # In[3]: # get a structure's parent tree.parent([1011]) # In[4]: # get a dictionary mapping structure ids to names name_map = tree.get_name_map() name_map[247] # In[5]: # ask whether one structure is contained within another strida = 385 stridb = 247 is_desc = '' if tree.structure_descends_from(385, 247) else ' not' print('{0} is{1} in {2}'.format(name_map[strida], is_desc, name_map[stridb])) # In[6]: # build a custom map that looks up acronyms by ids # the syntax here is just a pair of node-wise functions. # The first one returns keys while the second one returns values acronym_map = tree.value_map(lambda x: x['id'], lambda y: y['acronym']) print(acronym_map[385]) # ## Downloading an annotation volume # # This code snippet will download and store a nrrd file containing the Allen Common Coordinate Framework annotation. We have requested an annotation with 25-micron isometric spacing. The orientation of this space is: # * Anterior -> Posterior # * Superior -> Inferior # * Left -> Right # This is the no-frills way to download an annotation volume. See the <a href='_static/examples/nb/mouse_connectivity.html#Manipulating-Grid-Data'>mouse connectivity</a> examples if you want to properly cache the downloaded data. # In[7]: import os import nrrd from allensdk.api.queries.mouse_connectivity_api import MouseConnectivityApi from allensdk.config.manifest import Manifest # the annotation download writes a file, so we will need somwhere to put it annotation_dir = 'annotation' Manifest.safe_mkdir(annotation_dir) annotation_path = os.path.join(annotation_dir, 'annotation.nrrd') mcapi = MouseConnectivityApi() mcapi.download_annotation_volume('annotation/ccf_2016', 25, annotation_path) annotation, meta = nrrd.read(annotation_path) # ## Constructing a ReferenceSpace # In[8]: from allensdk.core.reference_space import ReferenceSpace # build a reference space from a StructureTree and annotation volume, the third argument is # the resolution of the space in microns rsp = ReferenceSpace(tree, annotation, [25, 25, 25]) # ## Using a ReferenceSpace # #### making structure masks # # The simplest use of a Reference space is to build binary indicator masks for structures or groups of structures. # In[9]: # A complete mask for one structure whole_cortex_mask = rsp.make_structure_mask([315]) # view in coronal section # What if you want a mask for a whole collection of ontologically disparate structures? Just pass more structure ids to make_structure_masks: # In[10]: # This gets all of the structures targeted by the Allen Brain Observatory project brain_observatory_structures = rsp.structure_tree.get_structures_by_set_id( [514166994]) brain_observatory_ids = [st['id'] for st in brain_observatory_structures] brain_observatory_mask = rsp.make_structure_mask(brain_observatory_ids) # view in horizontal section # You can also make and store a number of structure_masks at once: # In[11]: import functools # Define a wrapper function that will control the mask generation. # This one checks for a nrrd file in the specified base directory # and builds/writes the mask only if one does not exist mask_writer = functools.partial(ReferenceSpace.check_and_write, annotation_dir) # many_structure_masks is a generator - nothing has actrually been run yet mask_generator = rsp.many_structure_masks([385, 1097], mask_writer) # consume the resulting iterator to make and write the masks for structure_id in mask_generator: print('made mask for structure {0}.'.format(structure_id)) os.listdir(annotation_dir) # #### Removing unassigned structures # A structure graph may contain structures that are not used in a particular reference space. Having these around can complicate use of the reference space, so we generally want to remove them. # # We'll try this using "Somatosensory areas, layer 6a" as a test case. In the 2016 ccf space, this structure is unused in favor of finer distinctions (e.g. "Primary somatosensory area, barrel field, layer 6a"). # In[12]: # Double-check the voxel counts no_voxel_id = rsp.structure_tree.get_structures_by_name( ['Somatosensory areas, layer 6a'])[0]['id'] print('voxel count for structure {0}: {1}'.format( no_voxel_id, rsp.total_voxel_map[no_voxel_id])) # remove unassigned structures from the ReferenceSpace's StructureTree rsp.remove_unassigned() # check the structure tree no_voxel_id in rsp.structure_tree.node_ids() # #### View a slice from the annotation # In[13]: import numpy as np # #### Downsample the space # # If you want an annotation at a resolution we don't provide, you can make one with the downsample method. # In[14]: import warnings target_resolution = [75, 75, 75] # in some versions of scipy, scipy.ndimage.zoom raises a helpful but distracting # warning about the method used to truncate integers. warnings.simplefilter('ignore') sf_rsp = rsp.downsample(target_resolution) # re-enable warnings warnings.simplefilter('default') print(rsp.annotation.shape) print(sf_rsp.annotation.shape)
# Save nifti qform = np.array([[0, 0, allen_resolution*pow(10, -3), 0], [-allen_resolution*pow(10, -3), 0, 0, 0], [0, -allen_resolution*pow(10, -3), 0, 0], [0, 0, 0, 1]]) img_annotation = nib.Nifti1Image(annot, np.eye(4)) img_average_template = nib.Nifti1Image(template, np.eye(4)) img_annotation.set_qform(qform, code=1) img_average_template.set_qform(qform, code=1) img_annotation.set_sform(np.eye(4), code=0) img_average_template.set_sform(np.eye(4), code=0) # img_average_template.set_qform(img_average_template_wrongread.get_qform()) nib.save(img_annotation, allen_annotation_path) nib.save(img_average_template, allen_average_template_path) # Get structure graph oapi = OntologiesApi() allen_structure_graph_dict = oapi.get_structures([1]) # Get structure graph with structure graph id = 1, which is the Mouse Brain Atlas structure graph # This removes some unused fields returned by the query allen_structure_graph_dict = StructureTree.clean_structures(allen_structure_graph_dict) # Get tree allen_structure_graph_tree = StructureTree(allen_structure_graph_dict) # now let's take a look at a structure allen_structure_graph_tree.get_structures_by_name(['Dorsal auditory area']) # Look at children or parent of structure, important for later (volume calculations) # Define path of structure graph table allen_average_template_csv_path=os.path.join(allen_dir, 'structure_graph.csv')
# open up a list of all of the experiments all_experiments = mcc.get_experiments(dataframe=True) print("{} total experiments".format(len(all_experiments))) # take a look at what we know about an experiment with a primary motor injection print(all_experiments.loc[122642490]) # grab the StructureTree instance structure_tree = mcc.get_structure_tree() print(structure_tree) from allensdk.api.queries.ontologies_api import OntologiesApi oapi = OntologiesApi() # get the ids of all the structure sets in the tree structure_set_ids = structure_tree.get_structure_sets() # query the API for information on those structure sets #print(oapi.get_structure_sets(structure_set_ids)) # #structures = structure_tree.get_structures_by_set_id([167587189]) #print(structures) # Projection grid data volume experiment_id = 181599674
def ontologies(): return OntologiesApi()
rbp4_cortical_df = pd.DataFrame(rbp4_cortical_experiments).set_index('id') rbp4_cortical_df.head() # As a convenience, structures are grouped in to named collections called "structure sets". These sets can be used to quickly gather a useful subset of structures from the tree. The criteria used to define structure sets are eclectic; a structure set might list: # # - structures that were used in a particular project. # - structures that coarsely partition the brain. # - structures that bear functional similarity. # # To see only structure sets relevant to the adult mouse brain, use the StructureTree: # In[11]: from allensdk.api.queries.ontologies_api import OntologiesApi oapi = OntologiesApi() # get the ids of all the structure sets in the tree structure_set_ids = structure_tree.get_structure_sets() # query the API for information on those structure sets pd.DataFrame(oapi.get_structure_sets(structure_set_ids)) # As you can see from the table above, there are many different sets that our available brain structures can be grouped in. Below we will look into our Mouse Connectivity Summary data by specifying the set ID using the `get_structure_by_set_id()` method. # In[12]: # From the above table, "Mouse Connectivity - Summary" has id 687527945 summary_connectivity = structure_tree.get_structures_by_set_id([687527945]) summary_connectivity_df = pd.DataFrame(summary_connectivity) summary_connectivity_df.head()
def ontologies(): oa = OntologiesApi() oa.json_msg_query = MagicMock(name='json_msg_query') return oa
class ImageDownload(SvgApi, ImageDownloadApi): # useful tutorial: https://allensdk.readthedocs.io/en/latest/_static/examples/nb/image_download.html def __init__(self): SvgApi.__init__( self ) # https://github.com/AllenInstitute/AllenSDK/blob/master/allensdk/api/queries/svg_api.py ImageDownloadApi.__init__( self ) # https://github.com/AllenInstitute/AllenSDK/blob/master/allensdk/api/queries/image_download_api.py self.annsetsapi = AnnotatedSectionDataSetsApi( ) # https://github.com/AllenInstitute/AllenSDK/blob/master/allensdk/api/queries/annotated_section_data_sets_api.py self.oapi = OntologiesApi( ) # https://github.com/AllenInstitute/AllenSDK/blob/master/allensdk/api/queries/ontologies_api.py # Get metadata about atlases self.atlases = pd.DataFrame(self.oapi.get_atlases_table()) self.atlases_names = sorted(list(self.atlases['name'].values)) self.mouse_coronal_atlas_id = int(self.atlases.loc[ self.atlases['name'] == "Mouse, P56, Coronal"].id.values[0]) self.mouse_sagittal_atlas_id = int(self.atlases.loc[ self.atlases['name'] == "Mouse, P56, Sagittal"].id.values[0]) self.mouse_3D_atlas_id = int(self.atlases.loc[ self.atlases['name'] == "Mouse, Adult, 3D Coronal"].id.values[0]) # Get metadata about products if connected_to_internet(): self.products = pd.DataFrame( send_query( "http://api.brain-map.org/api/v2/data/query.json?criteria=model::Product" )) self.mouse_brain_reference_product_id = 12 self.mouse_brain_ish_data_product_id = 1 self.products_names = sorted(list(self.products["name"].values)) self.mouse_products_names = sorted( list(self.products.loc[self.products.species == "Mouse"] ["name"].values)) else: raise ConnectionError( "It seems that you are not connected to the internet, you won't be able to download stuff." ) # UTILS def get_atlas_by_name(self, atlas_name): if not atlas_name in self.atlases_names: raise ValueError("Available atlases: {}".format( self.atlases_names)) return self.atlases.loc[self.atlases['name'] == atlas_name].id.values[0] def get_products_by_species(self, species): return self.products.loc[self.products.species == species] def get_experimentsid_by_productid(self, productid, **kwargs): # for more details: https://github.com/AllenInstitute/AllenSDK/blob/master/allensdk/api/queries/image_download_api.py return pd.DataFrame( self.get_section_data_sets_by_product([productid], **kwargs)) def get_experimentimages_by_expid(self, expid): # expid should be a section dataset id return pd.DataFrame(self.section_image_query(expid)) def get_atlasimages_by_atlasid(self, atlasid): if not isinstance(atlasid, int): raise ValueError( "Atlas id should be an integer not: {}".format(atlasid)) return pd.DataFrame(self.atlas_image_query(atlasid)) def download_images_by_imagesid(self, savedir, imagesids, downsample=0, annotated=True, snames=None, atlas_svg=True): if not os.path.isdir(savedir): os.mkdir(savedir) curdir = os.getcwd() os.chdir(savedir) for i, imgid in tqdm(enumerate(imagesids)): if not atlas_svg and not annotated: savename = str(imgid) + ".jpg" elif not atlas_svg and annotated: savename = str(imgid) + "_annotated.jpg" else: savename = str(imgid) + ".svg" if snames is not None: sname, ext = savename.split(".") savename = sname + "_sect{}_img{}.".format(snames[i], i + 1) + ext if os.path.isfile(savename): continue if not atlas_svg and not annotated: self.download_section_image(imgid, file_path=savename, downsample=downsample) elif not atlas_svg and annotated: self.download_atlas_image(imgid, file_path=savename, annotation=True, downsample=downsample) else: self.download_svg(imgid, file_path=savename) file_names = os.listdir(savedir) print("Downloaded {} images".format(len(file_names))) os.chdir(curdir) def download_images_by_atlasid(self, savedir, atlasid, **kwargs): imgsids = self.get_atlasimages_by_atlasid(atlasid)['id'] imgs_secs_n = self.get_atlasimages_by_atlasid( atlasid)['section_number'] self.download_images_by_imagesid(savedir, imgsids, snames=imgs_secs_n, **kwargs)
class OntologiesApiTests(unittest.TestCase): def __init__(self, *args, **kwargs): super(OntologiesApiTests, self).__init__(*args, **kwargs) def setUp(self): self.oa = OntologiesApi() def tearDown(self): self.oa = None def test_get_structure_graph(self): expected = "http://api.brain-map.org/api/v2/data/query.json?q=model::Structure,rma::criteria,[graph_id$in1],rma::options[num_rows$eq'all'][order$eqstructures.graph_order][count$eqfalse]" structure_graph_id = 1 self.oa.json_msg_query = \ MagicMock(name='json_msg_query') self.oa.get_structures(structure_graph_id) self.oa.json_msg_query.assert_called_once_with(expected) def test_list_structure_graphs(self): expected = "http://api.brain-map.org/api/v2/data/query.json?q=model::StructureGraph,rma::options[num_rows$eq'all'][count$eqfalse]" self.oa.json_msg_query = \ MagicMock(name='json_msg_query') self.oa.get_structure_graphs() self.oa.json_msg_query.assert_called_once_with(expected) def test_list_structure_sets(self): expected = "http://api.brain-map.org/api/v2/data/query.json?q=model::StructureSet,rma::options[num_rows$eq'all'][count$eqfalse]" self.oa.json_msg_query = \ MagicMock(name='json_msg_query') self.oa.get_structure_sets() self.oa.json_msg_query.assert_called_once_with(expected) def test_list_atlases(self): expected = "http://api.brain-map.org/api/v2/data/query.json?q=model::Atlas,rma::options[num_rows$eq'all'][count$eqfalse]" self.oa.json_msg_query = \ MagicMock(name='json_msg_query') self.oa.get_atlases() self.oa.json_msg_query.assert_called_once_with(expected) def test_structure_graph_by_name(self): expected = u"http://api.brain-map.org/api/v2/data/query.json?q=model::Structure,rma::criteria,graph[structure_graphs.name$in'Mouse Brain Atlas'],rma::options[num_rows$eq'all'][order$eqstructures.graph_order][count$eqfalse]" self.oa.json_msg_query = \ MagicMock(name='json_msg_query') self.oa.get_structures(structure_graph_names="'Mouse Brain Atlas'") self.oa.json_msg_query.assert_called_once_with(expected) def test_structure_graphs_by_names(self): expected = "http://api.brain-map.org/api/v2/data/query.json?q=model::Structure,rma::criteria,graph[structure_graphs.name$in'Mouse Brain Atlas','Human Brain Atlas'],rma::options[num_rows$eq'all'][order$eqstructures.graph_order][count$eqfalse]" self.oa.json_msg_query = \ MagicMock(name='json_msg_query') self.oa.get_structures(structure_graph_names=["'Mouse Brain Atlas'", "'Human Brain Atlas'"]) self.oa.json_msg_query.assert_called_once_with(expected) def test_structure_set_by_id(self): expected = "http://api.brain-map.org/api/v2/data/query.json?q=model::Structure,rma::criteria,[structure_set_id$in8],rma::options[num_rows$eq'all'][order$eqstructures.graph_order][count$eqfalse]" self.oa.json_msg_query = \ MagicMock(name='json_msg_query') self.oa.get_structures(structure_set_ids=8) self.oa.json_msg_query.assert_called_once_with(expected) def test_structure_sets_by_ids(self): expected = "http://api.brain-map.org/api/v2/data/query.json?q=model::Structure,rma::criteria,[structure_set_id$in7,8],rma::options[num_rows$eq'all'][order$eqstructures.graph_order][count$eqfalse]" self.oa.json_msg_query = \ MagicMock(name='json_msg_query') self.oa.get_structures(structure_set_ids=[7,8]) self.oa.json_msg_query.assert_called_once_with(expected) def test_structure_set_by_name(self): expected = "http://api.brain-map.org/api/v2/data/query.json?q=model::Structure,rma::criteria,structure_sets[name$in'Mouse Connectivity - Summary'],rma::options[num_rows$eq'all'][order$eqstructures.graph_order][count$eqfalse]" self.oa.json_msg_query = \ MagicMock(name='json_msg_query') self.oa.get_structures(structure_set_names="'Mouse Connectivity - Summary'") self.oa.json_msg_query.assert_called_once_with(expected) def test_structure_set_by_names(self): expected = "http://api.brain-map.org/api/v2/data/query.json?q=model::Structure,rma::criteria,structure_sets[name$in'NHP - Coarse','Mouse Connectivity - Summary'],rma::options[num_rows$eq'all'][order$eqstructures.graph_order][count$eqfalse]" self.oa.json_msg_query = \ MagicMock(name='json_msg_query') self.oa.get_structures(structure_set_names=["'NHP - Coarse'", "'Mouse Connectivity - Summary'"]) self.oa.json_msg_query.assert_called_once_with(expected) def test_structure_set_no_order(self): expected = "http://api.brain-map.org/api/v2/data/query.json?q=model::Structure,rma::criteria,[graph_id$in1],rma::options[num_rows$eq'all'][count$eqfalse]" self.oa.json_msg_query = \ MagicMock(name='json_msg_query') self.oa.get_structures(1, order=None) self.oa.json_msg_query.assert_called_once_with(expected) def test_atlas_1(self): expected = "http://api.brain-map.org/api/v2/data/query.json?q=model::Atlas,rma::criteria,[id$in1],structure_graph(ontology),graphic_group_labels,rma::include,structure_graph(ontology),graphic_group_labels,rma::options[only$eq'atlases.id,atlases.name,atlases.image_type,ontologies.id,ontologies.name,structure_graphs.id,structure_graphs.name,graphic_group_labels.id,graphic_group_labels.name'][num_rows$eq'all'][count$eqfalse]" atlas_id = 1 self.oa.json_msg_query = \ MagicMock(name='json_msg_query') self.oa.get_atlases_table(atlas_id) self.oa.json_msg_query.assert_called_once_with(expected) def test_atlas_verbose(self): expected = "http://api.brain-map.org/api/v2/data/query.json?q=model::Atlas,rma::criteria,structure_graph(ontology),graphic_group_labels,rma::include,structure_graph(ontology),graphic_group_labels,rma::options[num_rows$eq'all'][count$eqfalse]" self.oa.json_msg_query = \ MagicMock(name='json_msg_query') self.oa.get_atlases_table(brief=False) self.oa.json_msg_query.assert_called_once_with(expected)
return if not os.path.exists(os.path.dirname(path)): os.makedirs(os.path.dirname(path)) with open(path + name + ".obj", 'w') as file: for vert in verts: file.write("v " + str(vert[0]) + " " + str(vert[1]) + " " + str(vert[2]) + "\n") for face in faces: file.write("f " + str(face[0] + 1) + " " + str(face[1] + 1) + " " + str(face[2] + 1) + "\n") graph_id = 1 # Graph_id is the id of the structure we want to load. 1 is the id of the adult mouse structure graph oapi = OntologiesApi() structure_graph = oapi.get_structures_with_sets([graph_id]) # This removes some unused fields returned by the query structure_graph = StructureTree.clean_structures(structure_graph) tree = StructureTree(structure_graph) # the annotation download writes a file, so we will need somwhere to put it annotation_dir = 'E:\\Histology\\allen_rsp' annotation_path = os.path.join(annotation_dir, 'annotation_10.nrrd') # this is a string which contains the name of the latest ccf version annotation_version = MouseConnectivityApi.CCF_VERSION_DEFAULT mcapi = MouseConnectivityApi() #Next line commented because the annotation volume is already downloaded
basedir = '/Users/Eli/Dropbox/Neurodegeneration/TauSpread/tau-spread/' structIDSource = 'data/aba/atlas/structIDs.csv' outputFilename = 'data/aba/atlas/mask_aba.mat' basedir = str(sys.argv[1]) structIDSource = str(sys.argv[2]) outputFilename = str(sys.argv[3]) os.chdir(basedir) print("Making a mask for Oh structures as %s" % outputFilename) # Set max number of voxels: maxVoxels = 0 # (0: no max) #------------------------------------------------------------------------------- #------------------------------------------------------------------------------- oapi = OntologiesApi() structure_graph = oapi.get_structures_with_sets([adultMouseStructureGraphID]) # Removes some unused fields returned by the query: structure_graph = StructureTree.clean_structures(structure_graph) tree = StructureTree(structure_graph) # Example: # tree.get_structures_by_name(['Dorsal auditory area']) # The annotation download writes a file, so we will need somwhere to put it annotation_dir = os.path.dirname(structIDSource) Manifest.safe_mkdir(annotation_dir) annotation_path = os.path.join(annotation_dir, 'annotation.nrrd') #------------------------------------------------------------------------------- # Use the connectivity API: mcapi = MouseConnectivityApi()
def test_notebook(fn_temp_dir): # coding: utf-8 # ## Mouse Connectivity # # This notebook demonstrates how to access and manipulate data in the Allen Mouse Brain Connectivity Atlas. The `MouseConnectivityCache` AllenSDK class provides methods for downloading metadata about experiments, including their viral injection site and the mouse's transgenic line. You can request information either as a Pandas DataFrame or a simple list of dictionaries. # # An important feature of the `MouseConnectivityCache` is how it stores and retrieves data for you. By default, it will create (or read) a manifest file that keeps track of where various connectivity atlas data are stored. If you request something that has not already been downloaded, it will download it and store it in a well known location. # # Download this notebook in .ipynb format <a href='mouse_connectivity.ipynb'>here</a>. # In[1]: from allensdk.core.mouse_connectivity_cache import MouseConnectivityCache # The manifest file is a simple JSON file that keeps track of all of # the data that has already been downloaded onto the hard drives. # If you supply a relative path, it is assumed to be relative to your # current working directory. mcc = MouseConnectivityCache(manifest_file='connectivity/mouse_connectivity_manifest.json') # open up a list of all of the experiments all_experiments = mcc.get_experiments(dataframe=True) print("%d total experiments" % len(all_experiments)) # take a look at what we know about an experiment with a primary motor injection all_experiments.loc[122642490] # `MouseConnectivityCache` has a method for retrieving the adult mouse structure tree as an `StructureTree` class instance. This is a wrapper around a list of dictionaries, where each dictionary describes a structure. It is principally useful for looking up structures by their properties. # In[2]: # pandas for nice tables import pandas as pd # grab the StructureTree instance structure_tree = mcc.get_structure_tree() # get info on some structures structures = structure_tree.get_structures_by_name(['Primary visual area', 'Hypothalamus']) pd.DataFrame(structures) # As a convenience, structures are grouped in to named collections called "structure sets". These sets can be used to quickly gather a useful subset of structures from the tree. The criteria used to define structure sets are eclectic; a structure set might list: # # * structures that were used in a particular project. # * structures that coarsely partition the brain. # * structures that bear functional similarity. # # or something else entirely. To view all of the available structure sets along with their descriptions, follow this [link](http://api.brain-map.org/api/v2/data/StructureSet/query.json). To see only structure sets relevant to the adult mouse brain, use the StructureTree: # In[3]: from allensdk.api.queries.ontologies_api import OntologiesApi oapi = OntologiesApi() # get the ids of all the structure sets in the tree structure_set_ids = structure_tree.get_structure_sets() # query the API for information on those structure sets pd.DataFrame(oapi.get_structure_sets(structure_set_ids)) # On the connectivity atlas web site, you'll see that we show most of our data at a fairly coarse structure level. We did this by creating a structure set of ~300 structures, which we call the "summary structures". We can use the structure tree to get all of the structures in this set: # In[4]: # From the above table, "Mouse Connectivity - Summary" has id 167587189 summary_structures = structure_tree.get_structures_by_set_id([167587189]) pd.DataFrame(summary_structures) # This is how you can filter experiments by transgenic line: # In[5]: # fetch the experiments that have injections in the isocortex of cre-positive mice isocortex = structure_tree.get_structures_by_name(['Isocortex'])[0] cre_cortical_experiments = mcc.get_experiments(cre=True, injection_structure_ids=[isocortex['id']]) print("%d cre cortical experiments" % len(cre_cortical_experiments)) # same as before, but restrict the cre line rbp4_cortical_experiments = mcc.get_experiments(cre=[ 'Rbp4-Cre_KL100' ], injection_structure_ids=[isocortex['id']]) print("%d Rbp4 cortical experiments" % len(rbp4_cortical_experiments)) # ## Structure Signal Unionization # # The ProjectionStructureUnionizes API data tells you how much signal there was in a given structure and experiment. It contains the density of projecting signal, volume of projecting signal, and other information. `MouseConnectivityCache` provides methods for querying and storing this data. # In[6]: # find wild-type injections into primary visual area visp = structure_tree.get_structures_by_acronym(['VISp'])[0] visp_experiments = mcc.get_experiments(cre=False, injection_structure_ids=[visp['id']]) print("%d VISp experiments" % len(visp_experiments)) structure_unionizes = mcc.get_structure_unionizes([ e['id'] for e in visp_experiments ], is_injection=False, structure_ids=[isocortex['id']], include_descendants=True) print("%d VISp non-injection, cortical structure unionizes" % len(structure_unionizes)) # In[7]: structure_unionizes.head() # This is a rather large table, even for a relatively small number of experiments. You can filter it down to a smaller list of structures like this. # In[8]: dense_unionizes = structure_unionizes[ structure_unionizes.projection_density > .5 ] large_unionizes = dense_unionizes[ dense_unionizes.volume > .5 ] large_structures = pd.DataFrame(structure_tree.nodes(large_unionizes.structure_id)) print("%d large, dense, cortical, non-injection unionizes, %d structures" % ( len(large_unionizes), len(large_structures) )) print(large_structures.name) large_unionizes # ## Generating a Projection Matrix # The `MouseConnectivityCache` class provides a helper method for converting ProjectionStructureUnionize records for a set of experiments and structures into a matrix. This code snippet demonstrates how to make a matrix of projection density values in auditory sub-structures for cre-negative VISp experiments. # In[9]: import numpy as np import matplotlib.pyplot as plt import warnings warnings.filterwarnings('ignore') visp_experiment_ids = [ e['id'] for e in visp_experiments ] ctx_children = structure_tree.child_ids( [isocortex['id']] )[0] pm = mcc.get_projection_matrix(experiment_ids = visp_experiment_ids, projection_structure_ids = ctx_children, hemisphere_ids= [2], # right hemisphere, ipsilateral parameter = 'projection_density') row_labels = pm['rows'] # these are just experiment ids column_labels = [ c['label'] for c in pm['columns'] ] matrix = pm['matrix'] fig, ax = plt.subplots(figsize=(15,15)) heatmap = ax.pcolor(matrix, cmap=plt.cm.afmhot) # put the major ticks at the middle of each cell ax.set_xticks(np.arange(matrix.shape[1])+0.5, minor=False) ax.set_yticks(np.arange(matrix.shape[0])+0.5, minor=False) ax.set_xlim([0, matrix.shape[1]]) ax.set_ylim([0, matrix.shape[0]]) # want a more natural, table-like display ax.invert_yaxis() ax.xaxis.tick_top() ax.set_xticklabels(column_labels, minor=False) ax.set_yticklabels(row_labels, minor=False) # ## Manipulating Grid Data # # The `MouseConnectivityCache` class also helps you download and open every experiment's projection grid data volume. By default it will download 25um volumes, but you could also download data at other resolutions if you prefer (10um, 50um, 100um). # # This demonstrates how you can load the projection density for a particular experiment. It also shows how to download the template volume to which all grid data is registered. Voxels in that template have been structurally annotated by neuroanatomists and the resulting labels stored in a separate annotation volume image. # In[10]: # we'll take this experiment - an injection into the primary somatosensory - as an example experiment_id = 181599674 # In[11]: # projection density: number of projecting pixels / voxel volume pd, pd_info = mcc.get_projection_density(experiment_id) # injection density: number of projecting pixels in injection site / voxel volume ind, ind_info = mcc.get_injection_density(experiment_id) # injection fraction: number of pixels in injection site / voxel volume inf, inf_info = mcc.get_injection_fraction(experiment_id) # data mask: # binary mask indicating which voxels contain valid data dm, dm_info = mcc.get_data_mask(experiment_id) template, template_info = mcc.get_template_volume() annot, annot_info = mcc.get_annotation_volume() # in addition to the annotation volume, you can get binary masks for individual structures # in this case, we'll get one for the isocortex cortex_mask, cm_info = mcc.get_structure_mask(315) print(pd_info) print(pd.shape, template.shape, annot.shape) # Once you have these loaded, you can use matplotlib see what they look like. # In[12]: # compute the maximum intensity projection (along the anterior-posterior axis) of the projection data pd_mip = pd.max(axis=0) ind_mip = ind.max(axis=0) inf_mip = inf.max(axis=0) # show that slice of all volumes side-by-side f, pr_axes = plt.subplots(1, 3, figsize=(15, 6)) pr_axes[0].imshow(pd_mip, cmap='hot', aspect='equal') pr_axes[0].set_title("projection density MaxIP") pr_axes[1].imshow(ind_mip, cmap='hot', aspect='equal') pr_axes[1].set_title("injection density MaxIP") pr_axes[2].imshow(inf_mip, cmap='hot', aspect='equal') pr_axes[2].set_title("injection fraction MaxIP") # In[13]: # Look at a slice from the average template and annotation volumes # pick a slice to show slice_idx = 264 f, ccf_axes = plt.subplots(1, 3, figsize=(15, 6)) ccf_axes[0].imshow(template[slice_idx,:,:], cmap='gray', aspect='equal', vmin=template.min(), vmax=template.max()) ccf_axes[0].set_title("registration template") ccf_axes[1].imshow(annot[slice_idx,:,:], cmap='gray', aspect='equal', vmin=0, vmax=2000) ccf_axes[1].set_title("annotation volume") ccf_axes[2].imshow(cortex_mask[slice_idx,:,:], cmap='gray', aspect='equal', vmin=0, vmax=1) ccf_axes[2].set_title("isocortex mask") # On occasion the TissueCyte microscope fails to acquire a tile. In this case the data from that tile should not be used for analysis. The data mask associated with each experiment can be used to determine which portions of the grid data came from correctly acquired tiles. # # In this experiment, a missed tile can be seen in the data mask as a dark warped square. The values in the mask exist within [0, 1], describing the fraction of each voxel that was correctly acquired # In[14]: f, data_mask_axis = plt.subplots(figsize=(5, 6)) data_mask_axis.imshow(dm[81, :, :], cmap='hot', aspect='equal', vmin=0, vmax=1) data_mask_axis.set_title('data mask')
def setUp(self): self.oa = OntologiesApi()
class ABA(Atlas): """ This class handles interaction with the Allen Brain Atlas datasets and APIs to get structure trees, experimental metadata and results, tractography data etc. """ ignore_regions = ['retina', 'brain', 'fiber tracts', 'grey'] # ignored when rendering # useful vars for analysis excluded_regions = ["fiber tracts"] resolution = 25 _root_bounds = [[-17, 13193], [ 134, 7564], [486, 10891]] _root_midpoint = [np.mean([-17, 13193]), np.mean([134, 7564]), np.mean([486, 10891])] atlas_name = "ABA" mesh_format = 'obj' base_url = "https://neuroinformatics.nl/HBP/allen-connectivity-viewer/json/streamlines_NNN.json.gz" # Used for streamlines def __init__(self, base_dir=None, **kwargs): """ Set up file paths and Allen SDKs :param base_dir: path to directory to use for saving data (default value None) :param kwargs: can be used to pass path to individual data folders. See brainrender/Utils/paths_manager.py """ Atlas.__init__(self, base_dir=base_dir, **kwargs) self.meshes_folder = self.mouse_meshes # where the .obj mesh for each region is saved # get mouse connectivity cache and structure tree self.mcc = MouseConnectivityCache(manifest_file=os.path.join(self.mouse_connectivity_cache, "manifest.json")) self.structure_tree = self.mcc.get_structure_tree() # get ontologies API and brain structures sets self.oapi = OntologiesApi() self.get_structures_sets() # get reference space self.space = ReferenceSpaceApi() self.spacecache = ReferenceSpaceCache( manifest=os.path.join(self.annotated_volume_fld, "manifest.json"), # downloaded files are stored relative to here resolution=self.resolution, reference_space_key="annotation/ccf_2017" # use the latest version of the CCF ) self.annotated_volume, _ = self.spacecache.get_annotation_volume() # mouse connectivity API [used for tractography] self.mca = MouseConnectivityApi() # Get tree search api self.tree_search = TreeSearchApi() # Store all regions metadata [If there's internet connection] if self.other_sets is not None: self.regions = self.other_sets["Structures whose surfaces are represented by a precomputed mesh"].sort_values('acronym') self.region_acronyms = list(self.other_sets["Structures whose surfaces are represented by a precomputed mesh"].sort_values( 'acronym').acronym.values) # ---------------------------------------------------------------------------- # # Methods to support Scene creation # # ---------------------------------------------------------------------------- # """ These methods are used by brainrender.scene to populate a scene using the Allen brain atlas meshes. They overwrite methods of the base atlas class """ # ------------------------- Getting elements for scene ------------------------- # def get_brain_regions(self, brain_regions, VIP_regions=None, VIP_color=None, add_labels=False, colors=None, use_original_color=True, alpha=None, hemisphere=None, verbose=False, **kwargs): """ Gets brain regions meshes for rendering Many parameters can be passed to specify how the regions should be rendered. To treat a subset of the rendered regions, specify which regions are VIP. Use the kwargs to specify more detailes on how the regins should be rendered (e.g. wireframe look) :param brain_regions: str list of acronyms of brain regions :param VIP_regions: if a list of brain regions are passed, these are rendered differently compared to those in brain_regions (Default value = None) :param VIP_color: if passed, this color is used for the VIP regions (Default value = None) :param colors: str, color of rendered brian regions (Default value = None) :param use_original_color: bool, if True, the allen's default color for the region is used. (Default value = False) :param alpha: float, transparency of the rendered brain regions (Default value = None) :param hemisphere: str (Default value = None) :param add_labels: bool (default False). If true a label is added to each regions' actor. The label is visible when hovering the mouse over the actor :param **kwargs: used to determine a bunch of thigs, including the look and location of lables from scene.add_labels """ # Check that the atlas has brain regions data if self.region_acronyms is None: print(f"The atlas {self.atlas_name} has no brain regions data") return # Parse arguments if VIP_regions is None: VIP_regions = brainrender.DEFAULT_VIP_REGIONS if VIP_color is None: VIP_color = brainrender.DEFAULT_VIP_COLOR if alpha is None: _alpha = brainrender.DEFAULT_STRUCTURE_ALPHA else: _alpha = alpha # check that we have a list if not isinstance(brain_regions, list): brain_regions = [brain_regions] # check the colors input is correct if colors is not None: if isinstance(colors[0], (list, tuple)): if not len(colors) == len(brain_regions): raise ValueError("when passing colors as a list, the number of colors must match the number of brain regions") for col in colors: if not check_colors(col): raise ValueError("Invalide colors in input: {}".format(col)) else: if not check_colors(colors): raise ValueError("Invalide colors in input: {}".format(colors)) colors = [colors for i in range(len(brain_regions))] # loop over all brain regions actors = {} for i, region in enumerate(brain_regions): self._check_valid_region_arg(region) if region in self.ignore_regions: continue if verbose: print("Rendering: ({})".format(region)) # get the structure and check if we need to download the object file if region not in self.region_acronyms: print(f"The region {region} doesn't seem to belong to the atlas being used: {self.atlas_name}. Skipping") continue obj_file = os.path.join(self.meshes_folder, "{}.{}".format(region, self.mesh_format)) if not self._check_obj_file(region, obj_file): print("Could not render {}, maybe we couldn't get the mesh?".format(region)) continue # check which color to assign to the brain region if use_original_color: color = [x/255 for x in self.get_region_color(region)] else: if region in VIP_regions: color = VIP_color else: if colors is None: color = brainrender.DEFAULT_STRUCTURE_COLOR elif isinstance(colors, list): color = colors[i] else: color = colors if region in VIP_regions: alpha = 1 else: alpha = _alpha # Load the object file as a mesh and store the actor if hemisphere is not None: if hemisphere.lower() == "left" or hemisphere.lower() == "right": obj = self.get_region_unilateral(region, hemisphere=hemisphere, color=color, alpha=alpha) else: raise ValueError(f'Invalid hemisphere argument: {hemisphere}') else: obj = load(obj_file, c=color, alpha=alpha) if obj is not None: actors_funcs.edit_actor(obj, **kwargs) actors[region] = obj else: print(f"Something went wrong while loading mesh data for {region}") return actors def get_neurons(self, neurons, color=None, display_axon=True, display_dendrites=True, alpha=1, neurite_radius=None): """ Gets rendered morphological data of neurons reconstructions downloaded from the Mouse Light project at Janelia (or other sources). Accepts neurons argument as: - file(s) with morphological data - vtkplotter mesh actor(s) of entire neurons reconstructions - dictionary or list of dictionary with actors for different neuron parts :param neurons: str, list, dict. File(s) with neurons data or list of rendered neurons. :param display_axon, display_dendrites: if set to False the corresponding neurite is not rendered :param color: default None. Can be: - None: each neuron is given a random color - color: rbg, hex etc. If a single color is passed all neurons will have that color - cmap: str with name of a colormap: neurons are colored based on their sequential order and cmap - dict: a dictionary specifying a color for soma, dendrites and axon actors, will be the same for all neurons - list: a list of length = number of neurons with either a single color for each neuron or a dictionary of colors for each neuron :param alpha: float in range 0,1. Neurons transparency :param neurite_radius: float > 0 , radius of tube actor representing neurites """ if not isinstance(neurons, (list, tuple)): neurons = [neurons] # ------------------------------ Prepare colors ------------------------------ # N = len(neurons) colors = dict( soma = None, axon = None, dendrites = None, ) # If no color is passed, get random colors if color is None: cols = get_random_colors(N) colors = dict( soma = cols.copy(), axon = cols.copy(), dendrites = cols.copy(),) else: if isinstance(color, str): # Deal with a a cmap being passed if color in _mapscales_cmaps: cols = [colorMap(n, name=color, vmin=-2, vmax=N+2) for n in np.arange(N)] colors = dict( soma = cols.copy(), axon = cols.copy(), dendrites = cols.copy(),) else: # Deal with a single color being passed cols = [getColor(color) for n in np.arange(N)] colors = dict( soma = cols.copy(), axon = cols.copy(), dendrites = cols.copy(),) elif isinstance(color, dict): # Deal with a dictionary with color for each component if not 'soma' in color.keys(): raise ValueError(f"When passing a dictionary as color argument, \ soma should be one fo the keys: {color}") dendrites_color = color.pop('dendrites', color['soma']) axon_color = color.pop('axon', color['soma']) colors = dict( soma = [color['soma'] for n in np.arange(N)], axon = [axon_color for n in np.arange(N)], dendrites = [dendrites_color for n in np.arange(N)],) elif isinstance(color, (list, tuple)): # Check that the list content makes sense if len(color) != N: raise ValueError(f"When passing a list of color arguments, the list length"+ f" ({len(color)}) should match the number of neurons ({N}).") if len(set([type(c) for c in color])) > 1: raise ValueError(f"When passing a list of color arguments, all list elements"+ " should have the same type (e.g. str or dict)") if isinstance(color[0], dict): # Deal with a list of dictionaries soma_colors, dendrites_colors, axon_colors = [], [], [] for col in colors: if not 'soma' in col.keys(): raise ValueError(f"When passing a dictionary as col argument, \ soma should be one fo the keys: {col}") dendrites_colors.append(col.pop('dendrites', col['soma'])) axon_colors.append(col.pop('axon', col['soma'])) soma_colors.append(col['soma']) colors = dict( soma = soma_colors, axon = axon_colors, dendrites = dendrites_colors,) else: # Deal with a list of colors colors = dict( soma = color.copy(), axon = color.copy(), dendrites = color.copy(),) else: raise ValueError(f"Color argument passed is not valid. Should be a \ str, dict, list or None, not {type(color)}:{color}") # Check colors, if everything went well we should have N colors per entry for k,v in colors.items(): if len(v) != N: raise ValueError(f"Something went wrong while preparing colors. Not all \ entries have right length. We got: {colors}") # ---------------------------------- Render ---------------------------------- # _neurons_actors = [] for neuron in neurons: neuron_actors = {'soma':None, 'dendrites':None, 'axon': None} # Deal with neuron as filepath if isinstance(neuron, str): if os.path.isfile(neuron): if neuron.endswith('.swc'): neuron_actors, _ = get_neuron_actors_with_morphapi(swcfile=neuron, neurite_radius=neurite_radius) else: raise NotImplementedError('Currently we can only parse morphological reconstructions from swc files') else: raise ValueError(f"Passed neruon {neuron} is not a valid input. Maybe the file doesn't exist?") # Deal with neuron as single actor elif isinstance(neuron, Actor): # A single actor was passed, maybe it's the entire neuron neuron_actors['soma'] = neuron # store it as soma anyway pass # Deal with neuron as dictionary of actor elif isinstance(neuron, dict): neuron_actors['soma'] = neuron.pop('soma', None) neuron_actors['axon'] = neuron.pop('axon', None) # Get dendrites actors if 'apical_dendrites' in neuron.keys() or 'basal_dendrites' in neuron.keys(): if 'apical_dendrites' not in neuron.keys(): neuron_actors['dendrites'] = neuron['basal_dendrites'] elif 'basal_dendrites' not in neuron.keys(): neuron_actors['dendrites'] = neuron['apical_dendrites'] else: neuron_ctors['dendrites'] = merge(neuron['apical_dendrites'], neuron['basal_dendrites']) else: neuron_actors['dendrites'] = neuron.pop('dendrites', None) # Deal with neuron as instance of Neuron from morphapi elif isinstance(neuron, Neuron): neuron_actors, _ = get_neuron_actors_with_morphapi(neuron=neuron) # Deal with other inputs else: raise ValueError(f"Passed neuron {neuron} is not a valid input") # Check that we don't have anything weird in neuron_actors for key, act in neuron_actors.items(): if act is not None: if not isinstance(act, Actor): raise ValueError(f"Neuron actor {key} is {act.__type__} but should be a vtkplotter Mesh. Not: {act}") if not display_axon: neuron_actors['axon'] = None if not display_dendrites: neuron_actors['dendrites'] = None _neurons_actors.append(neuron_actors) # Color actors for n, neuron in enumerate(_neurons_actors): if neuron['axon'] is not None: neuron['axon'].c(colors['axon'][n]) neuron['soma'].c(colors['soma'][n]) if neuron['dendrites'] is not None: neuron['dendrites'].c(colors['dendrites'][n]) # Return if len(_neurons_actors) == 1: return _neurons_actors[0], None elif not _neurons_actors: return None, None else: return _neurons_actors, None def get_tractography(self, tractography, color=None, color_by="manual", others_alpha=1, verbose=True, VIP_regions=[], VIP_color=None, others_color="white", include_all_inj_regions=False, extract_region_from_inj_coords=False, display_injection_volume=True): """ Renders tractography data and adds it to the scene. A subset of tractography data can receive special treatment using the with VIP regions argument: if the injection site for the tractography data is in a VIP regions, this is colored differently. :param tractography: list of dictionaries with tractography data :param color: color of rendered tractography data :param color_by: str, specifies which criteria to use to color the tractography (Default value = "manual") :param others_alpha: float (Default value = 1) :param verbose: bool (Default value = True) :param VIP_regions: list of brain regions with VIP treatement (Default value = []) :param VIP_color: str, color to use for VIP data (Default value = None) :param others_color: str, color for not VIP data (Default value = "white") :param include_all_inj_regions: bool (Default value = False) :param extract_region_from_inj_coords: bool (Default value = False) :param display_injection_volume: float, if True a spehere is added to display the injection coordinates and volume (Default value = True) """ # check argument if not isinstance(tractography, list): if isinstance(tractography, dict): tractography = [tractography] else: raise ValueError("the 'tractography' variable passed must be a list of dictionaries") else: if not isinstance(tractography[0], dict): raise ValueError("the 'tractography' variable passed must be a list of dictionaries") if not isinstance(VIP_regions, list): raise ValueError("VIP_regions should be a list of acronyms") # check coloring mode used and prepare a list COLORS to use for coloring stuff if color_by == "manual": # check color argument if color is None: color = TRACT_DEFAULT_COLOR COLORS = [color for i in range(len(tractography))] elif isinstance(color, list): if not len(color) == len(tractography): raise ValueError("If a list of colors is passed, it must have the same number of items as the number of tractography traces") else: for col in color: if not check_colors(col): raise ValueError("Color variable passed to tractography is invalid: {}".format(col)) COLORS = color else: if not check_colors(color): raise ValueError("Color variable passed to tractography is invalid: {}".format(color)) else: COLORS = [color for i in range(len(tractography))] elif color_by == "region": COLORS = [self.get_region_color(t['structure-abbrev']) for t in tractography] elif color_by == "target_region": if VIP_color is not None: if not check_colors(VIP_color) or not check_colors(others_color): raise ValueError("Invalid VIP or other color passed") try: if include_all_inj_regions: COLORS = [VIP_color if is_any_item_in_list( [x['abbreviation'] for x in t['injection-structures']], VIP_regions)\ else others_color for t in tractography] else: COLORS = [VIP_color if t['structure-abbrev'] in VIP_regions else others_color for t in tractography] except: raise ValueError("Something went wrong while getting colors for tractography") else: COLORS = [self.get_region_color(t['structure-abbrev']) if t['structure-abbrev'] in VIP_regions else others_color for t in tractography] else: raise ValueError("Unrecognised 'color_by' argument {}".format(color_by)) # add actors to represent tractography data actors, structures_acronyms = [], [] if VERBOSE and verbose: print("Structures found to be projecting to target: ") # Loop over injection experiments for i, (t, color) in enumerate(zip(tractography, COLORS)): # Use allen metadata if include_all_inj_regions: inj_structures = [x['abbreviation'] for x in t['injection-structures']] else: inj_structures = [self.get_structure_parent(t['structure-abbrev'])['acronym']] if VERBOSE and verbose and not is_any_item_in_list(inj_structures, structures_acronyms): print(" -- ({})".format(t['structure-abbrev'])) structures_acronyms.append(t['structure-abbrev']) # get tractography points and represent as list if color_by == "target_region" and not is_any_item_in_list(inj_structures, VIP_regions): alpha = others_alpha else: alpha = TRACTO_ALPHA if alpha == 0: continue # skip transparent ones # check if we need to manually check injection coords if extract_region_from_inj_coords: try: region = self.get_structure_from_coordinates(t['injection-coordinates'], just_acronym=False) if region is None: continue inj_structures = [self.get_structure_parent(region['acronym'])['acronym']] except: raise ValueError(self.get_structure_from_coordinates(t['injection-coordinates'], just_acronym=False)) if inj_structures is None: continue elif isinstance(extract_region_from_inj_coords, list): # check if injection coord are in one of the brain regions in list, otherwise skip if not is_any_item_in_list(inj_structures, extract_region_from_inj_coords): continue # represent injection site as sphere if display_injection_volume: actors.append(shapes.Sphere(pos=t['injection-coordinates'], c=color, r=INJECTION_VOLUME_SIZE*t['injection-volume'], alpha=TRACTO_ALPHA)) points = [p['coord'] for p in t['path']] actors.append(shapes.Tube(points, r=TRACTO_RADIUS, c=color, alpha=alpha, res=TRACTO_RES)) return actors def get_streamlines(self, sl_file, *args, colorby=None, color_each=False, **kwargs): """ Render streamline data downloaded from https://neuroinformatics.nl/HBP/allen-connectivity-viewer/streamline-downloader.html :param sl_file: path to JSON file with streamliens data [or list of files] :param colorby: str, criteria for how to color the streamline data (Default value = None) :param color_each: bool, if True, the streamlines for each injection is colored differently (Default value = False) :param *args: :param **kwargs: """ color = None if not color_each: if colorby is not None: try: color = self.structure_tree.get_structures_by_acronym([colorby])[0]['rgb_triplet'] if "color" in kwargs.keys(): del kwargs["color"] except: raise ValueError("Could not extract color for region: {}".format(colorby)) else: if colorby is not None: color = kwargs.pop("color", None) try: get_n_shades_of(color, 1) except: raise ValueError("Invalide color argument: {}".format(color)) if not isinstance(sl_file, (list, tuple)): sl_file = [sl_file] actors = [] if isinstance(sl_file[0], (str, pd.DataFrame)): # we have a list of files to add for slf in tqdm(sl_file): if not color_each: if color is not None: if isinstance(slf, str): streamlines = parse_streamline(filepath=slf, *args, color=color, **kwargs) else: streamlines = parse_streamline(data=slf, *args, color=color, **kwargs) else: if isinstance(slf, str): streamlines = parse_streamline(filepath=slf, *args, **kwargs) else: streamlines = parse_streamline(data=slf, *args, **kwargs) else: if color is not None: col = get_n_shades_of(color, 1)[0] else: col = get_random_colors(n_colors=1) if isinstance(slf, str): streamlines = parse_streamline(filepath=slf, color=col, *args, **kwargs) else: streamlines = parse_streamline(data= slf, color=col, *args, **kwargs) actors.extend(streamlines) else: raise ValueError("unrecognized argument sl_file: {}".format(sl_file)) return actors def get_injection_sites(self, experiments, color=None): """ Creates Spherse at the location of injections with a volume proportional to the injected volume :param experiments: list of dictionaries with tractography data :param color: (Default value = None) """ # check arguments if not isinstance(experiments, list): raise ValueError("experiments must be a list") if not isinstance(experiments[0], dict): raise ValueError("experiments should be a list of dictionaries") #c= cgeck color if color is None: color = INJECTION_DEFAULT_COLOR injection_sites = [] for exp in experiments: injection_sites.append(shapes.Sphere(pos=(exp["injection_x"], exp["injection_y"], exp["injection_z"]), r = INJECTION_VOLUME_SIZE*exp["injection_volume"]*3, c=color )) return injection_sites # ---------------------------------------------------------------------------- # # STRUCTURE TREE INTERACTION # # ---------------------------------------------------------------------------- # # ------------------------- Get/Print structures sets ------------------------ # def get_structures_sets(self): """ Get the Allen's structure sets. """ summary_structures = self.structure_tree.get_structures_by_set_id([167587189]) # main summary structures summary_structures = [s for s in summary_structures if s["acronym"] not in self.excluded_regions] self.structures = pd.DataFrame(summary_structures) # Other structures sets try: all_sets = pd.DataFrame(self.oapi.get_structure_sets()) except: print("Could not retrieve data, possibly because there is no internet connection. Limited functionality available.") else: sets = ["Summary structures of the pons", "Summary structures of the thalamus", "Summary structures of the hypothalamus", "List of structures for ABA Fine Structure Search", "Structures representing the major divisions of the mouse brain", "Summary structures of the midbrain", "Structures whose surfaces are represented by a precomputed mesh"] self.other_sets = {} for set_name in sets: set_id = all_sets.loc[all_sets.description == set_name].id.values[0] self.other_sets[set_name] = pd.DataFrame(self.structure_tree.get_structures_by_set_id([set_id])) self.all_avaliable_meshes = sorted(self.other_sets["Structures whose surfaces are represented by a precomputed mesh"].acronym.values) def print_structures_list_to_text(self): """ Saves the name of every brain structure for which a 3d mesh (.obj file) is available in a text file. """ s = self.other_sets["Structures whose surfaces are represented by a precomputed mesh"].sort_values('acronym') with open('all_regions.txt', 'w') as o: for acr, name in zip(s.acronym.values, s['name'].values): o.write("({}) -- {}\n".format(acr, name)) def print_structures(self): """ Prints the name of every structure in the structure tree to the console. """ acronyms, names = self.structures.acronym.values, self.structures['name'].values sort_idx = np.argsort(acronyms) acronyms, names = acronyms[sort_idx], names[sort_idx] [print("({}) - {}".format(a, n)) for a,n in zip(acronyms, names)] # -------------------------- Parents and descendants ------------------------- # def get_structure_ancestors(self, regions, ancestors=True, descendants=False): """ Get's the ancestors of the region(s) passed as arguments :param regions: str, list of str with acronums of regions of interest :param ancestors: if True, returns the ancestors of the region (Default value = True) :param descendants: if True, returns the descendants of the region (Default value = False) """ if not isinstance(regions, list): struct_id = self.structure_tree.get_structures_by_acronym([regions])[0]['id'] return pd.DataFrame(self.tree_search.get_tree('Structure', struct_id, ancestors=ancestors, descendants=descendants)) else: ancestors = [] for region in regions: struct_id = self.structure_tree.get_structures_by_acronym([region])[0]['id'] ancestors.append(pd.DataFrame(self.tree_search.get_tree('Structure', struct_id, ancestors=ancestors, descendants=descendants))) return ancestors def get_structure_descendants(self, regions): return self.get_structure_ancestors(regions, ancestors=False, descendants=True) def get_structure_parent(self, acronyms): """ Gets the parent of a brain region (or list of regions) from the hierarchical structure of the Allen Brain Atals. :param acronyms: list of acronyms of brain regions. """ if not isinstance(acronyms, list): self._check_valid_region_arg(acronyms) s = self.structure_tree.get_structures_by_acronym([acronyms])[0] if s['id'] in self.structures.id.values: return s else: return self.get_structure_ancestors(s['acronym']).iloc[-1] else: parents = [] for region in acronyms: self._check_valid_region_arg(region) s = self.structure_tree.get_structures_by_acronym(acronyms)[0] if s['id'] in self.structures.id.values: parents.append(s) parents.append(self.get_structure_ancestors(s['acronym']).iloc[-1]) return parents # ---------------------------------------------------------------------------- # # UTILS # # ---------------------------------------------------------------------------- # def get_hemisphere_from_point(self, point): if point[2] < self._root_midpoint[2]: return 'left' else: return 'right' def mirror_point_across_hemispheres(self, point): delta = point[2] - self._root_midpoint[2] point[2] = self._root_midpoint[2] - delta return point def get_region_color(self, regions): """ Gets the RGB color of a brain region from the Allen Brain Atlas. :param regions: list of regions acronyms. """ if not isinstance(regions, list): return self.structure_tree.get_structures_by_acronym([regions])[0]['rgb_triplet'] else: return [self.structure_tree.get_structures_by_acronym([r])[0]['rgb_triplet'] for r in regions] def _check_obj_file(self, region, obj_file): """ If the .obj file for a brain region hasn't been downloaded already, this function downloads it and saves it. :param region: string, acronym of brain region :param obj_file: path to .obj file to save downloaded data. """ # checks if the obj file has been downloaded already, if not it takes care of downloading it if not os.path.isfile(obj_file): try: if isinstance(region, dict): region = region['acronym'] structure = self.structure_tree.get_structures_by_acronym([region])[0] except Exception as e: raise ValueError(f'Could not find region with name {region}, got error: {e}') try: self.space.download_structure_mesh(structure_id = structure["id"], ccf_version ="annotation/ccf_2017", file_name=obj_file) return True except: print("Could not get mesh for: {}".format(obj_file)) return False else: return True def _get_structure_mesh(self, acronym, **kwargs): """ Fetches the mesh for a brain region from the Allen Brain Atlas SDK. :param acronym: string, acronym of brain region :param **kwargs: """ structure = self.structure_tree.get_structures_by_acronym([acronym])[0] obj_path = os.path.join(self.mouse_meshes, "{}.obj".format(acronym)) if self._check_obj_file(structure, obj_path): mesh = load_mesh_from_file(obj_path, **kwargs) return mesh else: return None def get_region_unilateral(self, region, hemisphere="both", color=None, alpha=None): """ Regions meshes are loaded with both hemispheres' meshes by default. This function splits them in two. :param region: str, actors of brain region :param hemisphere: str, which hemisphere to return ['left', 'right' or 'both'] (Default value = "both") :param color: color of each side's mesh. (Default value = None) :param alpha: transparency of each side's mesh. (Default value = None) """ if color is None: color = ROOT_COLOR if alpha is None: alpha = ROOT_ALPHA bilateralmesh = self._get_structure_mesh(region, c=color, alpha=alpha) if bilateralmesh is None: print(f'Failed to get mesh for {region}, returning None') return None com = bilateralmesh.centerOfMass() # this will always give a point that is on the midline cut = bilateralmesh.cutWithPlane(origin=com, normal=(0, 0, 1)) right = bilateralmesh.cutWithPlane( origin=com, normal=(0, 0, 1)) # left is the mirror right # WIP com = self.get_region_CenterOfMass('root', unilateral=False)[2] left = actors_funcs.mirror_actor_at_point(right.clone(), com, axis='x') if hemisphere == "both": return left, right elif hemisphere == "left": return left else: return right @staticmethod def _check_valid_region_arg(region): """ Check that the string passed is a valid brain region name. :param region: string, acronym of a brain region according to the Allen Brain Atlas. """ if not isinstance(region, int) and not isinstance(region, str): raise ValueError("region must be a list, integer or string, not: {}".format(type(region))) else: return True def get_hemispere_from_point(self, p0): if p0[2] > self._root_midpoint[2]: return 'right' else: return 'left' def get_structure_from_coordinates(self, p0, just_acronym=True): """ Given a point in the Allen Mouse Brain reference space, returns the brain region that the point is in. :param p0: list of floats with XYZ coordinates. """ voxel = np.round(np.array(p0) / self.resolution).astype(int) try: structure_id = self.annotated_volume[voxel[0], voxel[1], voxel[2]] except: return None # Each voxel in the annotation volume is annotated as specifically as possible structure = self.structure_tree.get_structures_by_id([structure_id])[0] if structure is not None: if just_acronym: return structure['acronym'] return structure def get_colors_from_coordinates(self, p0): """ Given a point or a list of points returns a list of colors where each item is the color of the brain region each point is in """ if isinstance(p0[0], (float, int)): struct = self.get_structure_from_coordinates(p0, just_acronym=False) if struct is not None: return struct['rgb_triplet'] else: return None else: structures = [self.get_structure_from_coordinates(p, just_acronym=False) for p in p0] colors = [struct['rgb_triplet'] if struct is not None else None for struct in structures] return colors # ---------------------------------------------------------------------------- # # TRACTOGRAPHY FETCHING # # ---------------------------------------------------------------------------- # def get_projection_tracts_to_target(self, p0=None, **kwargs): """ Gets tractography data for all experiments whose projections reach the brain region or location of iterest. :param p0: list of 3 floats with XYZ coordinates of point to be used as seed (Default value = None) :param **kwargs: """ # check args if p0 is None: raise ValueError("Please pass coordinates") elif isinstance(p0, np.ndarray): p0 = list(p0) elif not isinstance(p0, (list, tuple)): raise ValueError("Invalid argument passed (p0): {}".format(p0)) p0 = [np.int(p) for p in p0] tract = self.mca.experiment_spatial_search(seed_point=p0, **kwargs) if isinstance(tract, str): raise ValueError('Something went wrong with query, query error message:\n{}'.format(tract)) else: return tract # ---------------------------------------------------------------------------- # # STREAMLINES FETCHING # # ---------------------------------------------------------------------------- # def download_streamlines_for_region(self, region, *args, **kwargs): """ Using the Allen Mouse Connectivity data and corresponding API, this function finds expeirments whose injections were targeted to the region of interest and downloads the corresponding streamlines data. By default, experiements are selected for only WT mice and onl when the region was the primary injection target. Look at "ABA.experiments_source_search" to see how to change this behaviour. :param region: str with region to use for research :param *args: arguments for ABA.experiments_source_search :param **kwargs: arguments for ABA.experiments_source_search """ # Get experiments whose injections were targeted to the region region_experiments = experiments_source_search(self.mca, region, *args, **kwargs) try: return download_streamlines(region_experiments.id.values, streamlines_folder=self.streamlines_cache) except: print(f"Could not download streamlines for region {region}") return [], [] # <- there were no experiments in the target region def download_streamlines_to_region(self, p0, *args, mouse_line = "wt", **kwargs): """ Using the Allen Mouse Connectivity data and corresponding API, this function finds injection experiments which resulted in fluorescence being found in the target point, then downloads the streamlines data. :param p0: list of floats with XYZ coordinates :param mouse_line: str with name of the mouse line to use(Default value = "wt") :param *args: :param **kwargs: """ experiments = pd.DataFrame(self.get_projection_tracts_to_target(p0=p0)) if mouse_line == "wt": experiments = experiments.loc[experiments["transgenic-line"] == ""] else: if not isinstance(mouse_line, list): experiments = experiments.loc[experiments["transgenic-line"] == mouse_line] else: raise NotImplementedError("ops, you've found a bug!. For now you can only pass one mouse line at the time, sorry.") return download_streamlines(experiments.id.values, streamlines_folder=self.streamlines_cache)