Пример #1
0
    def launch(self, data_file, apply_corrections=False, connectivity=None):
        """
        Execute import operations:
        """

        try:
            parser = NIFTIParser(data_file)

            # Create volume DT
            volume = Volume(storage_path=self.storage_path)
            volume.set_operation_id(self.operation_id)
            volume.origin = [[0.0, 0.0, 0.0]]
            volume.voxel_size = [
                parser.zooms[0], parser.zooms[1], parser.zooms[2]
            ]
            if parser.units is not None and len(parser.units) > 0:
                volume.voxel_unit = parser.units[0]

            if parser.has_time_dimension or not connectivity:
                # Now create TimeSeries and fill it with data from NIFTI image
                time_series = TimeSeriesVolume(storage_path=self.storage_path)
                time_series.set_operation_id(self.operation_id)
                time_series.volume = volume
                time_series.title = "NIFTI Import - " + os.path.split(
                    data_file)[1]
                time_series.labels_ordering = ["Time", "X", "Y", "Z"]
                time_series.start_time = 0.0

                if len(parser.zooms) > 3:
                    time_series.sample_period = float(parser.zooms[3])
                else:
                    # If no time dim, set sampling to 1 sec
                    time_series.sample_period = 1

                if parser.units is not None and len(parser.units) > 1:
                    time_series.sample_period_unit = parser.units[1]

                parser.parse(time_series, True)
                return [volume, time_series]

            else:
                region2volume_mapping = RegionVolumeMapping(
                    storage_path=self.storage_path)
                region2volume_mapping.set_operation_id(self.operation_id)
                region2volume_mapping.volume = volume
                region2volume_mapping.connectivity = connectivity
                region2volume_mapping.title = "NIFTI Import - " + os.path.split(
                    data_file)[1]
                region2volume_mapping.dimensions_labels = ["X", "Y", "Z"]
                region2volume_mapping.apply_corrections = apply_corrections

                parser.parse(region2volume_mapping, False)
                return [volume, region2volume_mapping]

        except ParseException, excep:
            logger = get_logger(__name__)
            logger.exception(excep)
            raise LaunchException(excep)
Пример #2
0
    def _base_before_launch(self, data_file, region_volume_gid):
        if data_file is None:
            raise LaunchException("Please select a file to import!")

        if region_volume_gid is not None:
            rvm_h5 = h5.h5_file_for_gid(region_volume_gid)
            self._attempt_to_cache_regionmap(rvm_h5)
            self.region_volume_shape = rvm_h5.read_data_shape()
            self.region_volume_h5 = rvm_h5

        datatype = Tracts()
        dummy_rvm = RegionVolumeMapping()
        dummy_rvm.gid = region_volume_gid
        datatype.region_volume_map = dummy_rvm
        return datatype
Пример #3
0
class TractData(MappedType):
    vertices = arrays.PositionArray(
        label="Vertex positions",
        file_storage=core.FILE_STORAGE_EXPAND,
        order=-1,
        doc="""An array specifying coordinates for the tracts vertices.""")

    tract_start_idx = arrays.IntegerArray(
        label="Tract starting indices",
        order=-1,
        doc="""Where is the first vertex of a tract in the vertex array""")

    tract_region = arrays.IntegerArray(
        label="Tract region index",
        required=False,
        order=-1,
        doc="""
        An index used to find quickly all tract emerging from a region
        tract_region[i] is the region of the i'th tract. -1 represents the background
        """
    )

    region_volume_map = RegionVolumeMapping(
        label="Region volume Mapping used to create the tract_region index",
        required=False,
        order=-1
    )

    __generate_table__ = True

    @property
    def tracts_count(self):
        return len(self.tract_start_idx) - 1
Пример #4
0
    def _create_region_map(self, volume, connectivity, apply_corrections):
        region2volume_mapping = RegionVolumeMapping(
            storage_path=self.storage_path)
        region2volume_mapping.set_operation_id(self.operation_id)
        region2volume_mapping.volume = volume
        region2volume_mapping.connectivity = connectivity
        region2volume_mapping.title = "NIFTI Import - " + os.path.split(
            self.data_file)[1]
        region2volume_mapping.dimensions_labels = ["X", "Y", "Z"]
        region2volume_mapping.apply_corrections = apply_corrections

        self.parser.parse(region2volume_mapping, False)
        return region2volume_mapping
Пример #5
0
    def _create_region_map(self, volume, connectivity, apply_corrections):
        region2volume_mapping = RegionVolumeMapping(storage_path=self.storage_path)
        region2volume_mapping.set_operation_id(self.operation_id)
        region2volume_mapping.volume = volume
        region2volume_mapping.connectivity = connectivity
        region2volume_mapping.title = "NIFTI Import - " + os.path.split(self.data_file)[1]
        region2volume_mapping.dimensions_labels = ["X", "Y", "Z"]
        region2volume_mapping.apply_corrections = apply_corrections

        self.parser.parse(region2volume_mapping, False)
        return region2volume_mapping
Пример #6
0
    def launch(self, data_file, apply_corrections=False, connectivity=None):
        """
        Execute import operations:
        """

        try:
            parser = NIFTIParser(data_file)

            # Create volume DT
            volume = Volume(storage_path=self.storage_path)
            volume.set_operation_id(self.operation_id)
            volume.origin = [[0.0, 0.0, 0.0]]
            volume.voxel_size = [parser.zooms[0], parser.zooms[1], parser.zooms[2]]
            if parser.units is not None and len(parser.units) > 0:
                volume.voxel_unit = parser.units[0]

            if parser.has_time_dimension or not connectivity:
                # Now create TimeSeries and fill it with data from NIFTI image
                time_series = TimeSeriesVolume(storage_path=self.storage_path)
                time_series.set_operation_id(self.operation_id)
                time_series.volume = volume
                time_series.title = "NIFTI Import - " + os.path.split(data_file)[1]
                time_series.labels_ordering = ["Time", "X", "Y", "Z"]
                time_series.start_time = 0.0

                if len(parser.zooms) > 3:
                    time_series.sample_period = float(parser.zooms[3])
                else:
                    # If no time dim, set sampling to 1 sec
                    time_series.sample_period = 1

                if parser.units is not None and len(parser.units) > 1:
                    time_series.sample_period_unit = parser.units[1]

                parser.parse(time_series, True)
                return [volume, time_series]

            else:
                region2volume_mapping = RegionVolumeMapping(storage_path=self.storage_path)
                region2volume_mapping.set_operation_id(self.operation_id)
                region2volume_mapping.volume = volume
                region2volume_mapping.connectivity = connectivity
                region2volume_mapping.title = "NIFTI Import - " + os.path.split(data_file)[1]
                region2volume_mapping.dimensions_labels = ["X", "Y", "Z"]
                region2volume_mapping.apply_corrections = apply_corrections

                parser.parse(region2volume_mapping, False)
                return [volume, region2volume_mapping]

        except ParseException, excep:
            logger = get_logger(__name__)
            logger.exception(excep)
            raise LaunchException(excep)
Пример #7
0
    def _create_region_map(self, volume, connectivity, apply_corrections, mappings_file, title):

        nifti_data = self.parser.parse()
        nifti_data = self._apply_corrections_and_mapping(nifti_data, apply_corrections,
                                                         mappings_file, connectivity.number_of_regions)
        rvm = RegionVolumeMapping()
        rvm.title = title
        rvm.dimensions_labels = ["X", "Y", "Z"]
        rvm.volume = volume
        rvm.connectivity = h5.load_from_index(connectivity)
        rvm.array_data = nifti_data

        return h5.store_complete(rvm, self.storage_path)
Пример #8
0
    def launch(self, view_model):
        resolution = view_model.resolution
        weighting = view_model.weighting
        inj_f_thresh = view_model.inj_f_thresh / 100.
        vol_thresh = view_model.vol_thresh

        project = dao.get_project_by_id(self.current_project_id)
        manifest_file = self.file_handler.get_allen_mouse_cache_folder(
            project.name)
        manifest_file = os.path.join(manifest_file,
                                     'mouse_connectivity_manifest.json')
        cache = MouseConnectivityCache(resolution=resolution,
                                       manifest_file=manifest_file)

        # the method creates a dictionary with information about which experiments need to be downloaded
        ist2e = dictionary_builder(cache, False)

        # the method downloads experiments necessary to build the connectivity
        projmaps = download_an_construct_matrix(cache, weighting, ist2e, False)

        # the method cleans the file projmaps in 4 steps
        projmaps = pms_cleaner(projmaps)

        # download from the AllenSDK the annotation volume, the template volume
        vol, annot_info = cache.get_annotation_volume()
        template, template_info = cache.get_template_volume()

        # rotate template in the TVB 3D reference:
        template = rotate_reference(template)

        # grab the StructureTree instance
        structure_tree = cache.get_structure_tree()

        # the method includes in the parcellation only brain regions whose volume is greater than vol_thresh
        projmaps = areas_volume_threshold(cache, projmaps, vol_thresh,
                                          resolution)

        # the method exclude from the experimental dataset
        # those exps where the injected fraction of pixel in the injection site is lower than than the inj_f_thr
        projmaps = infected_threshold(cache, projmaps, inj_f_thresh)

        # the method creates file order and keyword that will be the link between the SC order and the
        # id key in the Allen database
        [order, key_ord] = create_file_order(projmaps, structure_tree)

        # the method builds the Structural Connectivity (SC) matrix
        structural_conn = construct_structural_conn(projmaps, order, key_ord)

        # the method returns the coordinate of the centres and the name of the brain areas in the selected parcellation
        [centres, names] = construct_centres(cache, order, key_ord)

        # the method returns the tract lengths between the brain areas in the selected parcellation
        tract_lengths = construct_tract_lengths(centres)

        # the method associated the parent and the grandparents to the child in the selected parcellation with
        # the biggest volume
        [unique_parents, unique_grandparents
         ] = parents_and_grandparents_finder(cache, order, key_ord,
                                             structure_tree)

        # the method returns a volume indexed between 0 and N-1, with N=tot brain areas in the parcellation.
        # -1=background and areas that are not in the parcellation
        vol_parcel = mouse_brain_visualizer(vol, order, key_ord,
                                            unique_parents,
                                            unique_grandparents,
                                            structure_tree, projmaps)

        # results: Connectivity, Volume & RegionVolumeMapping
        # Connectivity
        result_connectivity = Connectivity()
        result_connectivity.centres = centres
        result_connectivity.region_labels = numpy.array(names)
        result_connectivity.weights = structural_conn
        result_connectivity.tract_lengths = tract_lengths
        result_connectivity.configure()
        # Volume
        result_volume = Volume()
        result_volume.origin = numpy.array([[0.0, 0.0, 0.0]])
        result_volume.voxel_size = numpy.array(
            [resolution, resolution, resolution])
        # result_volume.voxel_unit= micron
        # Region Volume Mapping
        result_rvm = RegionVolumeMapping()
        result_rvm.volume = result_volume
        result_rvm.array_data = vol_parcel
        result_rvm.connectivity = result_connectivity
        result_rvm.title = "Volume mouse brain "
        result_rvm.dimensions_labels = ["X", "Y", "Z"]
        # Volume template
        result_template = StructuralMRI()
        result_template.array_data = template
        result_template.weighting = 'T1'
        result_template.volume = result_volume

        connectivity_index = h5.store_complete(result_connectivity,
                                               self.storage_path)
        volume_index = h5.store_complete(result_volume, self.storage_path)
        rvm_index = h5.store_complete(result_rvm, self.storage_path)
        template_index = h5.store_complete(result_template, self.storage_path)

        return [connectivity_index, volume_index, rvm_index, template_index]
Пример #9
0
class Tracts(MappedType):
    """Datatype for results of diffusion imaging tractography."""

    MAX_N_VERTICES = 2 ** 16

    vertices = arrays.PositionArray(
        label="Vertex positions",
        file_storage=core.FILE_STORAGE_EXPAND,
        order=-1,
        doc="""An array specifying coordinates for the tracts vertices.""")

    tract_start_idx = arrays.IntegerArray(
        label="Tract starting indices",
        order=-1,
        doc="""Where is the first vertex of a tract in the vertex array""")

    tract_region = arrays.IntegerArray(
        label="Tract region index",
        required=False,
        order=-1,
        doc="""
        An index used to find quickly all tract emerging from a region
        tract_region[i] is the region of the i'th tract. -1 represents the background
        """
    )

    region_volume_map = RegionVolumeMapping(
        label="Region volume Mapping used to create the tract_region index",
        required=False,
        order=-1
    )


    @property
    def tracts_count(self):
        return len(self.tract_start_idx) - 1

    def get_tract(self, i):
        """
        get a tract by index
        """
        start, end = self.tract_start_idx[i:i + 2]
        return self.get_data('vertices', slice(start, end), close_file=False)

    def _get_tract_ids(self, region_id):
        tract_ids = numpy.where(self.tract_region == region_id)[0]
        return tract_ids

    def _get_track_ids_webgl_chunks(self, region_id):
        """
        webgl can draw up to MAX_N_VERTICES vertices in a draw call.
        Assuming that no one track exceeds this limit we partition
        the tracts such that each track bundle has fewer than the max vertices
        :return: the id's of the tracts in a region chunked by the above criteria.
        """
        # We have to split the int64 range in many uint16 ranges
        tract_ids = self._get_tract_ids(region_id)

        tract_id_chunks = []
        chunk = []

        count = 0
        tidx = 0
        tract_start_idx = self.tract_start_idx  # traits make . expensive

        while tidx < len(tract_ids):  # tidx always grows
            tid = tract_ids[tidx]
            start, end = tract_start_idx[tid:tid + 2]
            track_len = end - start

            if track_len >= self.MAX_N_VERTICES:
                raise ValueError('cannot yet handle very long tracts')

            count += track_len

            if count < self.MAX_N_VERTICES:
                # add this track to the current chunk and advance to next track
                chunk.append(tid)
                tidx += 1
            else:
                # stay with the same track and start a new chunk
                tract_id_chunks.append(chunk)
                chunk = []
                count = 0

        if chunk:
            tract_id_chunks.append(chunk)

        # q = []
        # for a in tract_id_chunks:
        #     q.extend(a)
        # assert (numpy.array(q) == tract_ids).all()

        return tract_id_chunks

    def get_vertices(self, region_id, slice_number=0):
        """
        Concatenates the vertices for all tracts starting in region_id.
        Returns a completely flat array as required by gl.bindBuffer apis
        """
        region_id = int(region_id)
        slice_number = int(slice_number)

        chunks = self._get_track_ids_webgl_chunks(region_id)
        tract_ids = chunks[slice_number]

        tracts_vertices = []
        for tid in tract_ids:
            tracts_vertices.append(self.get_tract(tid))

        self.close_file()

        if tracts_vertices:
            tracts_vertices = numpy.concatenate(tracts_vertices)
            return tracts_vertices.ravel()
        else:
            return numpy.array([])

    def get_line_starts(self, region_id):
        """
        Returns a compact representation of the element buffers required to draw the streams via gl.drawElements
        A list of indices that describe where the first vertex for a tract is in the vertex array returned by
        get_tract_vertices_starting_in_region
        """
        region_id = int(region_id)
        chunks = self._get_track_ids_webgl_chunks(region_id)
        chunk_line_starts = []
        tract_start_idx = self.tract_start_idx  # traits make the . expensive

        for tract_ids in chunks:
            offset = 0
            tract_offsets = [0]

            for tid in tract_ids:
                start, end = tract_start_idx[tid:tid + 2]
                track_len = end - start
                offset += track_len
                tract_offsets.append(offset)

            chunk_line_starts.append(tract_offsets)

        return chunk_line_starts

    def get_urls_for_rendering(self):
        return ('/flow/read_datatype_attribute/' + self.gid + '/get_line_starts/False',
                '/flow/read_binary_datatype_attribute/' + self.gid + '/get_vertices')
Пример #10
0
    def launch(self, resolution, weighting, inf_vox_thresh, vol_thresh):
        resolution = int(resolution)
        weighting = int(weighting)
        inf_vox_thresh = float(inf_vox_thresh)
        vol_thresh = float(vol_thresh)

        project = dao.get_project_by_id(self.current_project_id)
        manifest_file = self.file_handler.get_allen_mouse_cache_folder(
            project.name)
        manifest_file = os.path.join(manifest_file,
                                     'mouse_connectivity_manifest.json')
        cache = MouseConnectivityCache(resolution=resolution,
                                       manifest_file=manifest_file)

        # the method creates a dictionary with information about which experiments need to be downloaded
        ist2e = DictionaireBuilder(cache, False)

        # the method downloads experiments necessary to build the connectivity
        projmaps = DownloadAndConstructMatrix(cache, weighting, ist2e, False)

        # the method cleans the file projmaps in 4 steps
        projmaps = pmsCleaner(projmaps)

        #download from the AllenSDK the annotation volume, the ontology, the template volume
        Vol, annot_info = cache.get_annotation_volume()
        ontology = cache.get_ontology()
        template, template_info = cache.get_template_volume()

        #rotate template in the TVB 3D reference:
        template = RotateReference(template)

        # the method includes in the parcellation only brain regions whose volume is greater than vol_thresh
        projmaps = AreasVolumeTreshold(cache, projmaps, vol_thresh, resolution,
                                       Vol, ontology)

        # the method includes in the parcellation only brain regions where at least one injection experiment had infected more than N voxel (where N is inf_vox_thresh)
        projmaps = AreasVoxelTreshold(cache, projmaps, inf_vox_thresh, Vol,
                                      ontology)

        # the method creates file order and keyord that will be the link between the SC order and the id key in the Allen database
        [order, key_ord] = CreateFileOrder(projmaps, ontology)

        # the method builds the Structural Connectivity (SC) matrix
        SC = ConstructingSC(projmaps, order, key_ord)

        # the method returns the coordinate of the centres and the name of the brain areas in the selected parcellation
        [centres, names] = Construct_centres(cache, ontology, order, key_ord)

        # the method returns the tract lengths between the brain areas in the selected parcellation
        tract_lengths = ConstructTractLengths(centres)

        # the method associated the parent and the grandparents to the child in the selected parcellation with the biggest volume
        [unique_parents, unique_grandparents
         ] = ParentsAndGrandParentsFinder(cache, order, key_ord, ontology)

        # the method returns a volume indexed between 0 and N-1, with N=tot brain areas in the parcellation. -1=background and areas that are not in the parcellation
        Vol_parcel = MouseBrainVisualizer(Vol, order, key_ord, unique_parents,
                                          unique_grandparents, ontology,
                                          projmaps)

        # results: Connectivity, Volume & RegionVolumeMapping
        # Connectivity
        result_connectivity = Connectivity(storage_path=self.storage_path)
        result_connectivity.centres = centres
        result_connectivity.region_labels = names
        result_connectivity.weights = SC
        result_connectivity.tract_lengths = tract_lengths
        # Volume
        result_volume = Volume(storage_path=self.storage_path)
        result_volume.origin = [[0.0, 0.0, 0.0]]
        result_volume.voxel_size = [resolution, resolution, resolution]
        # result_volume.voxel_unit= micron
        # Region Volume Mapping
        result_rvm = RegionVolumeMapping(storage_path=self.storage_path)
        result_rvm.volume = result_volume
        result_rvm.array_data = Vol_parcel
        result_rvm.connectivity = result_connectivity
        result_rvm.title = "Volume mouse brain "
        result_rvm.dimensions_labels = ["X", "Y", "Z"]
        # Volume template
        result_template = StructuralMRI(storage_path=self.storage_path)
        result_template.array_data = template
        result_template.weighting = 'T1'
        result_template.volume = result_volume
        return [
            result_connectivity, result_volume, result_rvm, result_template
        ]
    def launch(self, resolution, weighting, inj_f_thresh, vol_thresh):
        resolution = int(resolution)
        weighting = int(weighting)
        inj_f_thresh = float(inj_f_thresh)/100.
        vol_thresh = float(vol_thresh)

        project = dao.get_project_by_id(self.current_project_id)
        manifest_file = self.file_handler.get_allen_mouse_cache_folder(project.name)
        manifest_file = os.path.join(manifest_file, 'mouse_connectivity_manifest.json')
        cache = MouseConnectivityCache(resolution=resolution, manifest_file=manifest_file)

        # the method creates a dictionary with information about which experiments need to be downloaded
        ist2e = dictionary_builder(cache, False)

        # the method downloads experiments necessary to build the connectivity
        projmaps = download_an_construct_matrix(cache, weighting, ist2e, False)

        # the method cleans the file projmaps in 4 steps
        projmaps = pms_cleaner(projmaps)

        # download from the AllenSDK the annotation volume, the template volume
        vol, annot_info = cache.get_annotation_volume()
        template, template_info = cache.get_template_volume()

        # rotate template in the TVB 3D reference:
        template = rotate_reference(template)

        # grab the StructureTree instance
        structure_tree = cache.get_structure_tree()

        # the method includes in the parcellation only brain regions whose volume is greater than vol_thresh
        projmaps = areas_volume_threshold(cache, projmaps, vol_thresh, resolution)
        
        # the method exclude from the experimental dataset
        # those exps where the injected fraction of pixel in the injection site is lower than than the inj_f_thr 
        projmaps = infected_threshold(cache, projmaps, inj_f_thresh)

        # the method creates file order and keyword that will be the link between the SC order and the
        # id key in the Allen database
        [order, key_ord] = create_file_order(projmaps, structure_tree)

        # the method builds the Structural Connectivity (SC) matrix
        structural_conn = construct_structural_conn(projmaps, order, key_ord)

        # the method returns the coordinate of the centres and the name of the brain areas in the selected parcellation
        [centres, names] = construct_centres(cache, order, key_ord)

        # the method returns the tract lengths between the brain areas in the selected parcellation
        tract_lengths = construct_tract_lengths(centres)

        # the method associated the parent and the grandparents to the child in the selected parcellation with
        # the biggest volume
        [unique_parents, unique_grandparents] = parents_and_grandparents_finder(cache, order, key_ord, structure_tree)

        # the method returns a volume indexed between 0 and N-1, with N=tot brain areas in the parcellation.
        # -1=background and areas that are not in the parcellation
        vol_parcel = mouse_brain_visualizer(vol, order, key_ord, unique_parents, unique_grandparents,
                                            structure_tree, projmaps)

        # results: Connectivity, Volume & RegionVolumeMapping
        # Connectivity
        result_connectivity = Connectivity(storage_path=self.storage_path)
        result_connectivity.centres = centres
        result_connectivity.region_labels = names
        result_connectivity.weights = structural_conn
        result_connectivity.tract_lengths = tract_lengths
        # Volume
        result_volume = Volume(storage_path=self.storage_path)
        result_volume.origin = [[0.0, 0.0, 0.0]]
        result_volume.voxel_size = [resolution, resolution, resolution]
        # result_volume.voxel_unit= micron
        # Region Volume Mapping
        result_rvm = RegionVolumeMapping(storage_path=self.storage_path)
        result_rvm.volume = result_volume
        result_rvm.array_data = vol_parcel
        result_rvm.connectivity = result_connectivity
        result_rvm.title = "Volume mouse brain "
        result_rvm.dimensions_labels = ["X", "Y", "Z"]
        # Volume template
        result_template = StructuralMRI(storage_path=self.storage_path)
        result_template.array_data = template
        result_template.weighting = 'T1'
        result_template.volume = result_volume
        return [result_connectivity, result_volume, result_rvm, result_template]
Пример #12
0
    def launch(self, resolution, weighting, inf_vox_thresh, vol_thresh):
        resolution = int(resolution)
        weighting = int(weighting)
        inf_vox_thresh = float(inf_vox_thresh)
        vol_thresh = float(vol_thresh)

        project = dao.get_project_by_id(self.current_project_id)
        manifest_file = self.file_handler.get_allen_mouse_cache_folder(project.name)
        manifest_file = os.path.join(manifest_file, "mouse_connectivity_manifest.json")
        cache = MouseConnectivityCache(resolution=resolution, manifest_file=manifest_file)

        # the method creates a dictionary with information about which experiments need to be downloaded
        ist2e = DictionaireBuilder(cache, False)

        # the method downloads experiments necessary to build the connectivity
        projmaps = DownloadAndConstructMatrix(cache, weighting, ist2e, False)

        # the method cleans the file projmaps in 4 steps
        projmaps = pmsCleaner(projmaps)

        Vol, annot_info = cache.get_annotation_volume()
        ontology = cache.get_ontology()

        # the method includes in the parcellation only brain regions whose volume is greater than vol_thresh
        projmaps = AreasVolumeTreshold(projmaps, vol_thresh, resolution, Vol, ontology)

        # the method includes in the parcellation only brain regions where at least one injection experiment had infected more than N voxel (where N is inf_vox_thresh)
        projmaps = AreasVoxelTreshold(cache, projmaps, inf_vox_thresh, Vol, ontology)

        # the method creates file order and keyord that will be the link between the SC order and the id key in the Allen database
        [order, key_ord] = CreateFileOrder(projmaps, ontology)

        # the method builds the Structural Connectivity (SC) matrix
        SC = ConstructingSC(projmaps, order, key_ord)

        # the method returns the coordinate of the centres and the name of the brain areas in the selected parcellation
        [centres, names] = Construct_centres(ontology, order, key_ord, Vol)

        # the method returns the tract lengths between the brain areas in the selected parcellation
        tract_lengths = ConstructTractLengths(centres)

        # the method associated the parent and the grandparents to the child in the selected parcellation with the biggest volume
        [unique_parents, unique_grandparents] = ParentsAndGrandParentsFinder(order, key_ord, Vol, ontology)

        # the method returns a volume indexed between 0 and N-1, with N=tot brain areas in the parcellation. -1=background and areas that are not in the parcellation
        Vol_parcel = MouseBrainVisualizer(Vol, order, key_ord, unique_parents, unique_grandparents, ontology, projmaps)

        # results: Connectivity, Volume & RegionVolumeMapping
        # Connectivity
        result_connectivity = Connectivity(storage_path=self.storage_path)
        result_connectivity.centres = centres
        result_connectivity.region_labels = names
        result_connectivity.weights = SC
        result_connectivity.tract_lengths = tract_lengths
        # Volume
        result_volume = Volume(storage_path=self.storage_path)
        result_volume.origin = [[0.0, 0.0, 0.0]]
        result_volume.voxel_size = [resolution, resolution, resolution]
        # result_volume.voxel_unit= micron
        # Region Volume Mapping
        result_rvm = RegionVolumeMapping(storage_path=self.storage_path)
        result_rvm.volume = result_volume
        result_rvm.array_data = Vol_parcel
        result_rvm.connectivity = result_connectivity
        result_rvm.title = "Volume mouse brain "
        result_rvm.dimensions_labels = ["X", "Y", "Z"]
        return [result_connectivity, result_rvm, result_volume]