def main(output_dir, resolution, retain_transform_data, zoom,
         parallel_downloads, parallel_processors):
    mcc = MouseConnectivityCache(
        manifest_file=
        f'{output_dir}/connectivity/mouse_connectivity_manifest.json',
        resolution=resolution)
    mcc.get_annotation_volume()
    mcc.get_reference_space()
    experiments = [e['id'] for e in mcc.get_experiments(dataframe=False)]
    experiments = sorted(list(set(experiments)))

    try:
        os.makedirs(f'{output_dir}/data/input')
        for e in experiments:
            os.makedirs(f'{output_dir}/data/input/{e}')
    except FileExistsError:
        for e in experiments:
            if os.path.isdir(f'{output_dir}/data/input/{e}'):
                shutil.rmtree(f'{output_dir}/data/input/{e}')
                os.makedirs(f'{output_dir}/data/input/{e}')

    downloads = [
        subprocess.Popen([
            "python", "./displacement_data_downloader.py", f"-o{output_dir}",
            f"-r{resolution}", f"-n{i}"
        ]) for i in range(parallel_downloads)
    ]
    processes = [
        subprocess.Popen([
            "python", "./segmentation_data_builder.py", f"-o{output_dir}",
            f"-r{resolution}", f"-c{len(experiments)}", f"-z{zoom}"
        ] + (["-t"] if retain_transform_data else []) + [f"-n{i}"])
        for i in range(parallel_processors)
    ]
    exit_codes = [p.wait() for p in processes + downloads]
Example #2
0
 def get_anatomical_info(self):
     
     manifest_path = './connectivity/mouse_connectivity_manifest.json'
     mcc = MouseConnectivityCache(resolution=self.resolution, manifest_file=manifest_path)
     tree = mcc.get_structure_tree()
     annotation, _ = mcc.get_annotation_volume()
         
     self.areas_in_blob = list(set([tree.get_structures_by_id([i])[0]['acronym'] for i in annotation[np.where(self.mask>0)] if i>0]))
     
     self.cuboid_center_areas = [tree.get_structures_by_id([i])[0]['acronym'] for i in annotation[tuple(self.cuboid_points.T)] if i>0]
class SegmentationDataDownloader(DirWatcher):
    def __init__(self, output_dir, number, resolution):
        super().__init__(
            *[
                os.path.join(output_dir, d)
                for d in ['data/input', 'data/dl', 'data/ready']
            ], f'downloader-{number}')
        self.mcc = MouseConnectivityCache(
            manifest_file=
            f'{output_dir}/connectivity/mouse_connectivity_manifest.json',
            resolution=resolution)
        self.mcc.get_annotation_volume()
        self.mcc.get_reference_space()

    def process_item(self, item, directory):
        self.logger.info(f"Download displacement data for {item}")
        self.mcc.get_deformation_field(item,
                                       header_path=f'{directory}/dfmfld.mhd',
                                       voxel_path=f'{directory}/dfmfld.raw')
        self.mcc.get_affine_parameters(item,
                                       direction='trv',
                                       file_name=f'{directory}/aff_param.txt')
Example #4
0
class SegmentationDataBuilder(DirWatcher):
    def __init__(self, output_dir, resolution, retain_transform_data, zoom,
                 number, count):
        super().__init__(
            *[
                os.path.join(output_dir, d)
                for d in ['data/ready', 'data/proc', 'data/result']
            ], f'segmentation-data-builder-{number}')
        self.count = count
        self.zoom = zoom
        self.retain_transform_data = retain_transform_data
        self.resolution = resolution
        self.output_dir = output_dir
        self.mcc = MouseConnectivityCache(
            manifest_file=
            f'{output_dir}/connectivity/mouse_connectivity_manifest.json',
            resolution=resolution)
        self.anno, self.meta = self.mcc.get_annotation_volume()
        self.rsp = self.mcc.get_reference_space()
        self.rsp.remove_unassigned(
        )  # This removes ids that are not in this particular reference space

    def process_item(self, item, directory):
        item = int(item)
        experiment = ExperimentSectionData(
            self.mcc,
            item,
            directory,
            self.anno,
            self.meta,
            self.rsp,
            self.logger,
            zoom=self.zoom,
            remove_transform_data=not self.retain_transform_data)
        experiment.create_section_data()
        experiment.cleanup()
Example #5
0
File: test1.py Project: busybus/as
# query the API for information on those structure sets
#print(oapi.get_structure_sets(structure_set_ids))

#
#structures = structure_tree.get_structures_by_set_id([167587189])
#print(structures)

# Projection grid data volume

experiment_id = 181599674

# projection density: number of projecting pixels / voxel volume
pd, pd_info = mcc.get_projection_density(experiment_id)

# injection density: number of projecting pixels in injection site / voxel volume
ind, ind_info = mcc.get_injection_density(experiment_id)

# injection fraction: number of pixels in injection site / voxel volume
inf, inf_info = mcc.get_injection_fraction(experiment_id)

# data mask:
# binary mask indicating which voxels contain valid data
dm, dm_info = mcc.get_data_mask(experiment_id)

template, template_info = mcc.get_template_volume()
annot, annot_info = mcc.get_annotation_volume()

print(pd_info)
print(pd.shape, template.shape, annot.shape)
Example #6
0
    def launch(self, view_model):
        resolution = view_model.resolution
        weighting = view_model.weighting
        inj_f_thresh = view_model.inj_f_thresh / 100.
        vol_thresh = view_model.vol_thresh

        project = dao.get_project_by_id(self.current_project_id)
        manifest_file = self.file_handler.get_allen_mouse_cache_folder(
            project.name)
        manifest_file = os.path.join(manifest_file,
                                     'mouse_connectivity_manifest.json')
        cache = MouseConnectivityCache(resolution=resolution,
                                       manifest_file=manifest_file)

        # the method creates a dictionary with information about which experiments need to be downloaded
        ist2e = dictionary_builder(cache, False)

        # the method downloads experiments necessary to build the connectivity
        projmaps = download_an_construct_matrix(cache, weighting, ist2e, False)

        # the method cleans the file projmaps in 4 steps
        projmaps = pms_cleaner(projmaps)

        # download from the AllenSDK the annotation volume, the template volume
        vol, annot_info = cache.get_annotation_volume()
        template, template_info = cache.get_template_volume()

        # rotate template in the TVB 3D reference:
        template = rotate_reference(template)

        # grab the StructureTree instance
        structure_tree = cache.get_structure_tree()

        # the method includes in the parcellation only brain regions whose volume is greater than vol_thresh
        projmaps = areas_volume_threshold(cache, projmaps, vol_thresh,
                                          resolution)

        # the method exclude from the experimental dataset
        # those exps where the injected fraction of pixel in the injection site is lower than than the inj_f_thr
        projmaps = infected_threshold(cache, projmaps, inj_f_thresh)

        # the method creates file order and keyword that will be the link between the SC order and the
        # id key in the Allen database
        [order, key_ord] = create_file_order(projmaps, structure_tree)

        # the method builds the Structural Connectivity (SC) matrix
        structural_conn = construct_structural_conn(projmaps, order, key_ord)

        # the method returns the coordinate of the centres and the name of the brain areas in the selected parcellation
        [centres, names] = construct_centres(cache, order, key_ord)

        # the method returns the tract lengths between the brain areas in the selected parcellation
        tract_lengths = construct_tract_lengths(centres)

        # the method associated the parent and the grandparents to the child in the selected parcellation with
        # the biggest volume
        [unique_parents, unique_grandparents
         ] = parents_and_grandparents_finder(cache, order, key_ord,
                                             structure_tree)

        # the method returns a volume indexed between 0 and N-1, with N=tot brain areas in the parcellation.
        # -1=background and areas that are not in the parcellation
        vol_parcel = mouse_brain_visualizer(vol, order, key_ord,
                                            unique_parents,
                                            unique_grandparents,
                                            structure_tree, projmaps)

        # results: Connectivity, Volume & RegionVolumeMapping
        # Connectivity
        result_connectivity = Connectivity()
        result_connectivity.centres = centres
        result_connectivity.region_labels = numpy.array(names)
        result_connectivity.weights = structural_conn
        result_connectivity.tract_lengths = tract_lengths
        result_connectivity.configure()
        # Volume
        result_volume = Volume()
        result_volume.origin = numpy.array([[0.0, 0.0, 0.0]])
        result_volume.voxel_size = numpy.array(
            [resolution, resolution, resolution])
        # result_volume.voxel_unit= micron
        # Region Volume Mapping
        result_rvm = RegionVolumeMapping()
        result_rvm.volume = result_volume
        result_rvm.array_data = vol_parcel
        result_rvm.connectivity = result_connectivity
        result_rvm.title = "Volume mouse brain "
        result_rvm.dimensions_labels = ["X", "Y", "Z"]
        # Volume template
        result_template = StructuralMRI()
        result_template.array_data = template
        result_template.weighting = 'T1'
        result_template.volume = result_volume

        connectivity_index = h5.store_complete(result_connectivity,
                                               self.storage_path)
        volume_index = h5.store_complete(result_volume, self.storage_path)
        rvm_index = h5.store_complete(result_rvm, self.storage_path)
        template_index = h5.store_complete(result_template, self.storage_path)

        return [connectivity_index, volume_index, rvm_index, template_index]
Example #7
0
    def launch(self, resolution, weighting, inf_vox_thresh, vol_thresh):
        resolution = int(resolution)
        weighting = int(weighting)
        inf_vox_thresh = float(inf_vox_thresh)
        vol_thresh = float(vol_thresh)

        project = dao.get_project_by_id(self.current_project_id)
        manifest_file = self.file_handler.get_allen_mouse_cache_folder(
            project.name)
        manifest_file = os.path.join(manifest_file,
                                     'mouse_connectivity_manifest.json')
        cache = MouseConnectivityCache(resolution=resolution,
                                       manifest_file=manifest_file)

        # the method creates a dictionary with information about which experiments need to be downloaded
        ist2e = DictionaireBuilder(cache, False)

        # the method downloads experiments necessary to build the connectivity
        projmaps = DownloadAndConstructMatrix(cache, weighting, ist2e, False)

        # the method cleans the file projmaps in 4 steps
        projmaps = pmsCleaner(projmaps)

        #download from the AllenSDK the annotation volume, the ontology, the template volume
        Vol, annot_info = cache.get_annotation_volume()
        ontology = cache.get_ontology()
        template, template_info = cache.get_template_volume()

        #rotate template in the TVB 3D reference:
        template = RotateReference(template)

        # the method includes in the parcellation only brain regions whose volume is greater than vol_thresh
        projmaps = AreasVolumeTreshold(cache, projmaps, vol_thresh, resolution,
                                       Vol, ontology)

        # the method includes in the parcellation only brain regions where at least one injection experiment had infected more than N voxel (where N is inf_vox_thresh)
        projmaps = AreasVoxelTreshold(cache, projmaps, inf_vox_thresh, Vol,
                                      ontology)

        # the method creates file order and keyord that will be the link between the SC order and the id key in the Allen database
        [order, key_ord] = CreateFileOrder(projmaps, ontology)

        # the method builds the Structural Connectivity (SC) matrix
        SC = ConstructingSC(projmaps, order, key_ord)

        # the method returns the coordinate of the centres and the name of the brain areas in the selected parcellation
        [centres, names] = Construct_centres(cache, ontology, order, key_ord)

        # the method returns the tract lengths between the brain areas in the selected parcellation
        tract_lengths = ConstructTractLengths(centres)

        # the method associated the parent and the grandparents to the child in the selected parcellation with the biggest volume
        [unique_parents, unique_grandparents
         ] = ParentsAndGrandParentsFinder(cache, order, key_ord, ontology)

        # the method returns a volume indexed between 0 and N-1, with N=tot brain areas in the parcellation. -1=background and areas that are not in the parcellation
        Vol_parcel = MouseBrainVisualizer(Vol, order, key_ord, unique_parents,
                                          unique_grandparents, ontology,
                                          projmaps)

        # results: Connectivity, Volume & RegionVolumeMapping
        # Connectivity
        result_connectivity = Connectivity(storage_path=self.storage_path)
        result_connectivity.centres = centres
        result_connectivity.region_labels = names
        result_connectivity.weights = SC
        result_connectivity.tract_lengths = tract_lengths
        # Volume
        result_volume = Volume(storage_path=self.storage_path)
        result_volume.origin = [[0.0, 0.0, 0.0]]
        result_volume.voxel_size = [resolution, resolution, resolution]
        # result_volume.voxel_unit= micron
        # Region Volume Mapping
        result_rvm = RegionVolumeMapping(storage_path=self.storage_path)
        result_rvm.volume = result_volume
        result_rvm.array_data = Vol_parcel
        result_rvm.connectivity = result_connectivity
        result_rvm.title = "Volume mouse brain "
        result_rvm.dimensions_labels = ["X", "Y", "Z"]
        # Volume template
        result_template = StructuralMRI(storage_path=self.storage_path)
        result_template.array_data = template
        result_template.weighting = 'T1'
        result_template.volume = result_volume
        return [
            result_connectivity, result_volume, result_rvm, result_template
        ]
def test_notebook(fn_temp_dir):

    # coding: utf-8

    # ## Mouse Connectivity
    # 
    # This notebook demonstrates how to access and manipulate data in the Allen Mouse Brain Connectivity Atlas. The `MouseConnectivityCache` AllenSDK class provides methods for downloading metadata about experiments, including their viral injection site and the mouse's transgenic line. You can request information either as a Pandas DataFrame or a simple list of dictionaries.
    # 
    # An important feature of the `MouseConnectivityCache` is how it stores and retrieves data for you. By default, it will create (or read) a manifest file that keeps track of where various connectivity atlas data are stored. If you request something that has not already been downloaded, it will download it and store it in a well known location.
    # 
    # Download this notebook in .ipynb format <a href='mouse_connectivity.ipynb'>here</a>.

    # In[1]:

    from allensdk.core.mouse_connectivity_cache import MouseConnectivityCache

    # The manifest file is a simple JSON file that keeps track of all of
    # the data that has already been downloaded onto the hard drives.
    # If you supply a relative path, it is assumed to be relative to your
    # current working directory.
    mcc = MouseConnectivityCache(manifest_file='connectivity/mouse_connectivity_manifest.json')

    # open up a list of all of the experiments
    all_experiments = mcc.get_experiments(dataframe=True)
    print("%d total experiments" % len(all_experiments))

    # take a look at what we know about an experiment with a primary motor injection
    all_experiments.loc[122642490]


    # `MouseConnectivityCache` has a method for retrieving the adult mouse structure tree as an `StructureTree` class instance. This is a wrapper around a list of dictionaries, where each dictionary describes a structure. It is principally useful for looking up structures by their properties.

    # In[2]:

    # pandas for nice tables
    import pandas as pd

    # grab the StructureTree instance
    structure_tree = mcc.get_structure_tree()

    # get info on some structures
    structures = structure_tree.get_structures_by_name(['Primary visual area', 'Hypothalamus'])
    pd.DataFrame(structures)


    # As a convenience, structures are grouped in to named collections called "structure sets". These sets can be used to quickly gather a useful subset of structures from the tree. The criteria used to define structure sets are eclectic; a structure set might list:
    # 
    # * structures that were used in a particular project.
    # * structures that coarsely partition the brain.
    # * structures that bear functional similarity.
    # 
    # or something else entirely. To view all of the available structure sets along with their descriptions, follow this [link](http://api.brain-map.org/api/v2/data/StructureSet/query.json). To see only structure sets relevant to the adult mouse brain, use the StructureTree:

    # In[3]:

    from allensdk.api.queries.ontologies_api import OntologiesApi

    oapi = OntologiesApi()

    # get the ids of all the structure sets in the tree
    structure_set_ids = structure_tree.get_structure_sets()

    # query the API for information on those structure sets
    pd.DataFrame(oapi.get_structure_sets(structure_set_ids))


    # On the connectivity atlas web site, you'll see that we show most of our data at a fairly coarse structure level. We did this by creating a structure set of ~300 structures, which we call the "summary structures". We can use the structure tree to get all of the structures in this set:

    # In[4]:

    # From the above table, "Mouse Connectivity - Summary" has id 167587189
    summary_structures = structure_tree.get_structures_by_set_id([167587189])
    pd.DataFrame(summary_structures)


    # This is how you can filter experiments by transgenic line:

    # In[5]:

    # fetch the experiments that have injections in the isocortex of cre-positive mice
    isocortex = structure_tree.get_structures_by_name(['Isocortex'])[0]
    cre_cortical_experiments = mcc.get_experiments(cre=True, 
                                                    injection_structure_ids=[isocortex['id']])

    print("%d cre cortical experiments" % len(cre_cortical_experiments))

    # same as before, but restrict the cre line
    rbp4_cortical_experiments = mcc.get_experiments(cre=[ 'Rbp4-Cre_KL100' ], 
                                                    injection_structure_ids=[isocortex['id']])


    print("%d Rbp4 cortical experiments" % len(rbp4_cortical_experiments))


    # ## Structure Signal Unionization
    # 
    # The ProjectionStructureUnionizes API data tells you how much signal there was in a given structure and experiment. It contains the density of projecting signal, volume of projecting signal, and other information. `MouseConnectivityCache` provides methods for querying and storing this data.

    # In[6]:

    # find wild-type injections into primary visual area
    visp = structure_tree.get_structures_by_acronym(['VISp'])[0]
    visp_experiments = mcc.get_experiments(cre=False, 
                                           injection_structure_ids=[visp['id']])

    print("%d VISp experiments" % len(visp_experiments))

    structure_unionizes = mcc.get_structure_unionizes([ e['id'] for e in visp_experiments ], 
                                                      is_injection=False,
                                                      structure_ids=[isocortex['id']],
                                                      include_descendants=True)

    print("%d VISp non-injection, cortical structure unionizes" % len(structure_unionizes))


    # In[7]:

    structure_unionizes.head()


    # This is a rather large table, even for a relatively small number of experiments.  You can filter it down to a smaller list of structures like this.

    # In[8]:

    dense_unionizes = structure_unionizes[ structure_unionizes.projection_density > .5 ]
    large_unionizes = dense_unionizes[ dense_unionizes.volume > .5 ]
    large_structures = pd.DataFrame(structure_tree.nodes(large_unionizes.structure_id))

    print("%d large, dense, cortical, non-injection unionizes, %d structures" % ( len(large_unionizes), len(large_structures) ))

    print(large_structures.name)

    large_unionizes


    # ## Generating a Projection Matrix
    # The `MouseConnectivityCache` class provides a helper method for converting ProjectionStructureUnionize records for a set of experiments and structures into a matrix.  This code snippet demonstrates how to make a matrix of projection density values in auditory sub-structures for cre-negative VISp experiments. 

    # In[9]:

    import numpy as np
    import matplotlib.pyplot as plt
    import warnings
    warnings.filterwarnings('ignore')

    visp_experiment_ids = [ e['id'] for e in visp_experiments ]
    ctx_children = structure_tree.child_ids( [isocortex['id']] )[0]

    pm = mcc.get_projection_matrix(experiment_ids = visp_experiment_ids, 
                                   projection_structure_ids = ctx_children,
                                   hemisphere_ids= [2], # right hemisphere, ipsilateral
                                   parameter = 'projection_density')

    row_labels = pm['rows'] # these are just experiment ids
    column_labels = [ c['label'] for c in pm['columns'] ] 
    matrix = pm['matrix']

    fig, ax = plt.subplots(figsize=(15,15))
    heatmap = ax.pcolor(matrix, cmap=plt.cm.afmhot)

    # put the major ticks at the middle of each cell
    ax.set_xticks(np.arange(matrix.shape[1])+0.5, minor=False)
    ax.set_yticks(np.arange(matrix.shape[0])+0.5, minor=False)

    ax.set_xlim([0, matrix.shape[1]])
    ax.set_ylim([0, matrix.shape[0]])          

    # want a more natural, table-like display
    ax.invert_yaxis()
    ax.xaxis.tick_top()

    ax.set_xticklabels(column_labels, minor=False)
    ax.set_yticklabels(row_labels, minor=False)

    # ## Manipulating Grid Data
    # 
    # The `MouseConnectivityCache` class also helps you download and open every experiment's projection grid data volume. By default it will download 25um volumes, but you could also download data at other resolutions if you prefer (10um, 50um, 100um).
    # 
    # This demonstrates how you can load the projection density for a particular experiment. It also shows how to download the template volume to which all grid data is registered. Voxels in that template have been structurally annotated by neuroanatomists and the resulting labels stored in a separate annotation volume image.

    # In[10]:

    # we'll take this experiment - an injection into the primary somatosensory - as an example
    experiment_id = 181599674


    # In[11]:

    # projection density: number of projecting pixels / voxel volume
    pd, pd_info = mcc.get_projection_density(experiment_id)

    # injection density: number of projecting pixels in injection site / voxel volume
    ind, ind_info = mcc.get_injection_density(experiment_id)

    # injection fraction: number of pixels in injection site / voxel volume
    inf, inf_info = mcc.get_injection_fraction(experiment_id)

    # data mask:
    # binary mask indicating which voxels contain valid data
    dm, dm_info = mcc.get_data_mask(experiment_id)

    template, template_info = mcc.get_template_volume()
    annot, annot_info = mcc.get_annotation_volume()

    # in addition to the annotation volume, you can get binary masks for individual structures
    # in this case, we'll get one for the isocortex
    cortex_mask, cm_info = mcc.get_structure_mask(315)

    print(pd_info)
    print(pd.shape, template.shape, annot.shape)


    # Once you have these loaded, you can use matplotlib see what they look like.

    # In[12]:

    # compute the maximum intensity projection (along the anterior-posterior axis) of the projection data
    pd_mip = pd.max(axis=0)
    ind_mip = ind.max(axis=0)
    inf_mip = inf.max(axis=0)

    # show that slice of all volumes side-by-side
    f, pr_axes = plt.subplots(1, 3, figsize=(15, 6))

    pr_axes[0].imshow(pd_mip, cmap='hot', aspect='equal')
    pr_axes[0].set_title("projection density MaxIP")

    pr_axes[1].imshow(ind_mip, cmap='hot', aspect='equal')
    pr_axes[1].set_title("injection density MaxIP")

    pr_axes[2].imshow(inf_mip, cmap='hot', aspect='equal')
    pr_axes[2].set_title("injection fraction MaxIP")


    # In[13]:

    # Look at a slice from the average template and annotation volumes

    # pick a slice to show
    slice_idx = 264

    f, ccf_axes = plt.subplots(1, 3, figsize=(15, 6))

    ccf_axes[0].imshow(template[slice_idx,:,:], cmap='gray', aspect='equal', vmin=template.min(), vmax=template.max())
    ccf_axes[0].set_title("registration template")

    ccf_axes[1].imshow(annot[slice_idx,:,:], cmap='gray', aspect='equal', vmin=0, vmax=2000)
    ccf_axes[1].set_title("annotation volume")

    ccf_axes[2].imshow(cortex_mask[slice_idx,:,:], cmap='gray', aspect='equal', vmin=0, vmax=1)
    ccf_axes[2].set_title("isocortex mask")


    # On occasion the TissueCyte microscope fails to acquire a tile. In this case the data from that tile should not be used for analysis. The data mask associated with each experiment can be used to determine which portions of the grid data came from correctly acquired tiles.
    # 
    # In this experiment, a missed tile can be seen in the data mask as a dark warped square. The values in the mask exist within [0, 1], describing the fraction of each voxel that was correctly acquired

    # In[14]:

    f, data_mask_axis = plt.subplots(figsize=(5, 6))

    data_mask_axis.imshow(dm[81, :, :], cmap='hot', aspect='equal', vmin=0, vmax=1)
    data_mask_axis.set_title('data mask')
Example #9
0
reference_template_path = os.path.join(reference_path, 'average_template_50_reoriented.nii.gz')
reference_annotation_path = os.path.join(reference_path, 'annotation_50_reoriented.nii.gz')

allen_dir = '/home/enzo/Desktop/allen'
allen_fsl_dir = '/usr/local/fsl/data/standard/allen_new'
Manifest.safe_mkdir(allen_dir)

# this is a string which contains the name of the latest ccf version
allen_version = ReferenceSpaceApi.CCF_VERSION_DEFAULT
allen_resolution = 50 # Set resolution in micrometers



# Download data
mcc = MouseConnectivityCache(resolution=allen_resolution)
annot, annot_header = mcc.get_annotation_volume()
template, template_header = mcc.get_template_volume()

# Define paths
allen_annotation_path = os.path.join(allen_dir, 'annotation_'+str(allen_resolution)+'.nii.gz')
allen_annotation_remapped_path = os.path.join(allen_dir, 'annotation_'+str(allen_resolution)+'_remapped.nii.gz')
allen_average_template_path=os.path.join(allen_dir, 'average_template_'+str(allen_resolution)+'.nii.gz')

# Save nifti
qform = np.array([[0, 0, allen_resolution*pow(10, -3), 0], [-allen_resolution*pow(10, -3), 0, 0, 0], [0, -allen_resolution*pow(10, -3), 0, 0], [0, 0, 0, 1]])
img_annotation = nib.Nifti1Image(annot, np.eye(4))
img_average_template = nib.Nifti1Image(template, np.eye(4))
img_annotation.set_qform(qform, code=1)
img_average_template.set_qform(qform, code=1)
img_annotation.set_sform(np.eye(4), code=0)
img_average_template.set_sform(np.eye(4), code=0)
    def launch(self, resolution, weighting, inj_f_thresh, vol_thresh):
        resolution = int(resolution)
        weighting = int(weighting)
        inj_f_thresh = float(inj_f_thresh)/100.
        vol_thresh = float(vol_thresh)

        project = dao.get_project_by_id(self.current_project_id)
        manifest_file = self.file_handler.get_allen_mouse_cache_folder(project.name)
        manifest_file = os.path.join(manifest_file, 'mouse_connectivity_manifest.json')
        cache = MouseConnectivityCache(resolution=resolution, manifest_file=manifest_file)

        # the method creates a dictionary with information about which experiments need to be downloaded
        ist2e = dictionary_builder(cache, False)

        # the method downloads experiments necessary to build the connectivity
        projmaps = download_an_construct_matrix(cache, weighting, ist2e, False)

        # the method cleans the file projmaps in 4 steps
        projmaps = pms_cleaner(projmaps)

        # download from the AllenSDK the annotation volume, the template volume
        vol, annot_info = cache.get_annotation_volume()
        template, template_info = cache.get_template_volume()

        # rotate template in the TVB 3D reference:
        template = rotate_reference(template)

        # grab the StructureTree instance
        structure_tree = cache.get_structure_tree()

        # the method includes in the parcellation only brain regions whose volume is greater than vol_thresh
        projmaps = areas_volume_threshold(cache, projmaps, vol_thresh, resolution)
        
        # the method exclude from the experimental dataset
        # those exps where the injected fraction of pixel in the injection site is lower than than the inj_f_thr 
        projmaps = infected_threshold(cache, projmaps, inj_f_thresh)

        # the method creates file order and keyword that will be the link between the SC order and the
        # id key in the Allen database
        [order, key_ord] = create_file_order(projmaps, structure_tree)

        # the method builds the Structural Connectivity (SC) matrix
        structural_conn = construct_structural_conn(projmaps, order, key_ord)

        # the method returns the coordinate of the centres and the name of the brain areas in the selected parcellation
        [centres, names] = construct_centres(cache, order, key_ord)

        # the method returns the tract lengths between the brain areas in the selected parcellation
        tract_lengths = construct_tract_lengths(centres)

        # the method associated the parent and the grandparents to the child in the selected parcellation with
        # the biggest volume
        [unique_parents, unique_grandparents] = parents_and_grandparents_finder(cache, order, key_ord, structure_tree)

        # the method returns a volume indexed between 0 and N-1, with N=tot brain areas in the parcellation.
        # -1=background and areas that are not in the parcellation
        vol_parcel = mouse_brain_visualizer(vol, order, key_ord, unique_parents, unique_grandparents,
                                            structure_tree, projmaps)

        # results: Connectivity, Volume & RegionVolumeMapping
        # Connectivity
        result_connectivity = Connectivity(storage_path=self.storage_path)
        result_connectivity.centres = centres
        result_connectivity.region_labels = names
        result_connectivity.weights = structural_conn
        result_connectivity.tract_lengths = tract_lengths
        # Volume
        result_volume = Volume(storage_path=self.storage_path)
        result_volume.origin = [[0.0, 0.0, 0.0]]
        result_volume.voxel_size = [resolution, resolution, resolution]
        # result_volume.voxel_unit= micron
        # Region Volume Mapping
        result_rvm = RegionVolumeMapping(storage_path=self.storage_path)
        result_rvm.volume = result_volume
        result_rvm.array_data = vol_parcel
        result_rvm.connectivity = result_connectivity
        result_rvm.title = "Volume mouse brain "
        result_rvm.dimensions_labels = ["X", "Y", "Z"]
        # Volume template
        result_template = StructuralMRI(storage_path=self.storage_path)
        result_template.array_data = template
        result_template.weighting = 'T1'
        result_template.volume = result_volume
        return [result_connectivity, result_volume, result_rvm, result_template]
    def launch(self, resolution, weighting, inf_vox_thresh, vol_thresh):
        resolution = int(resolution)
        weighting = int(weighting)
        inf_vox_thresh = float(inf_vox_thresh)
        vol_thresh = float(vol_thresh)

        project = dao.get_project_by_id(self.current_project_id)
        manifest_file = self.file_handler.get_allen_mouse_cache_folder(project.name)
        manifest_file = os.path.join(manifest_file, "mouse_connectivity_manifest.json")
        cache = MouseConnectivityCache(resolution=resolution, manifest_file=manifest_file)

        # the method creates a dictionary with information about which experiments need to be downloaded
        ist2e = DictionaireBuilder(cache, False)

        # the method downloads experiments necessary to build the connectivity
        projmaps = DownloadAndConstructMatrix(cache, weighting, ist2e, False)

        # the method cleans the file projmaps in 4 steps
        projmaps = pmsCleaner(projmaps)

        Vol, annot_info = cache.get_annotation_volume()
        ontology = cache.get_ontology()

        # the method includes in the parcellation only brain regions whose volume is greater than vol_thresh
        projmaps = AreasVolumeTreshold(projmaps, vol_thresh, resolution, Vol, ontology)

        # the method includes in the parcellation only brain regions where at least one injection experiment had infected more than N voxel (where N is inf_vox_thresh)
        projmaps = AreasVoxelTreshold(cache, projmaps, inf_vox_thresh, Vol, ontology)

        # the method creates file order and keyord that will be the link between the SC order and the id key in the Allen database
        [order, key_ord] = CreateFileOrder(projmaps, ontology)

        # the method builds the Structural Connectivity (SC) matrix
        SC = ConstructingSC(projmaps, order, key_ord)

        # the method returns the coordinate of the centres and the name of the brain areas in the selected parcellation
        [centres, names] = Construct_centres(ontology, order, key_ord, Vol)

        # the method returns the tract lengths between the brain areas in the selected parcellation
        tract_lengths = ConstructTractLengths(centres)

        # the method associated the parent and the grandparents to the child in the selected parcellation with the biggest volume
        [unique_parents, unique_grandparents] = ParentsAndGrandParentsFinder(order, key_ord, Vol, ontology)

        # the method returns a volume indexed between 0 and N-1, with N=tot brain areas in the parcellation. -1=background and areas that are not in the parcellation
        Vol_parcel = MouseBrainVisualizer(Vol, order, key_ord, unique_parents, unique_grandparents, ontology, projmaps)

        # results: Connectivity, Volume & RegionVolumeMapping
        # Connectivity
        result_connectivity = Connectivity(storage_path=self.storage_path)
        result_connectivity.centres = centres
        result_connectivity.region_labels = names
        result_connectivity.weights = SC
        result_connectivity.tract_lengths = tract_lengths
        # Volume
        result_volume = Volume(storage_path=self.storage_path)
        result_volume.origin = [[0.0, 0.0, 0.0]]
        result_volume.voxel_size = [resolution, resolution, resolution]
        # result_volume.voxel_unit= micron
        # Region Volume Mapping
        result_rvm = RegionVolumeMapping(storage_path=self.storage_path)
        result_rvm.volume = result_volume
        result_rvm.array_data = Vol_parcel
        result_rvm.connectivity = result_connectivity
        result_rvm.title = "Volume mouse brain "
        result_rvm.dimensions_labels = ["X", "Y", "Z"]
        return [result_connectivity, result_rvm, result_volume]
Example #12
0
@author wronk

Script to calculate the volume of the mouse brain from the common coordinate
frame developed by ABI. This requires the connectivity dataset and the
allensdk.

Output from connectivity dataset (as of Sept 2015) is below.
'''

import numpy as np
from allensdk.core.mouse_connectivity_cache import (MouseConnectivityCache as
                                                    MCC)

# load in nrdd data
mcc = MCC(manifest_file='/Volumes/Brain2015/connectivity/manifest.json')
annot, annot_info = mcc.get_annotation_volume()

# get all voxels that are non-zeros
brain_voxels = annot.flatten() > 0
sum_vox = np.sum(brain_voxels)

print 'Found %d voxels' % (sum_vox)
print 'Density is %0.3f percent' % (100 * sum_vox / len(brain_voxels))

# Get size of each box, make sure they're in cubes
width_dims = []
for di in range(3):
    width_dims.append(annot_info['space directions'][di][di])
assert len(set(width_dims)) == 1

# width_dims in um, convert to cm, get cm^3