示例#1
0
    def test_happy_flow_import(self):
        """
        Test that importing a CFF generates at least one DataType in DB.
        """

        zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip')
        TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, subject=TEST_SUBJECT_A)

        field = FilterChain.datatype + '.subject'
        filters = FilterChain('', [field], [TEST_SUBJECT_A], ['=='])
        reference_connectivity_index = TestFactory.get_entity(self.test_project, ConnectivityIndex, filters)

        dt_count_before = TestFactory.get_entity_count(self.test_project, ConnectivityIndex())

        self._import_csv_test_connectivity(reference_connectivity_index.gid, TEST_SUBJECT_B)

        dt_count_after = TestFactory.get_entity_count(self.test_project, ConnectivityIndex())
        assert dt_count_before + 1 == dt_count_after

        filters = FilterChain('', [field], [TEST_SUBJECT_B], ['like'])
        imported_connectivity_index = TestFactory.get_entity(self.test_project, ConnectivityIndex, filters)

        # check relationship between the imported connectivity and the reference
        assert reference_connectivity_index.number_of_regions == imported_connectivity_index.number_of_regions
        assert not reference_connectivity_index.number_of_connections == imported_connectivity_index.number_of_connections

        reference_connectivity = h5.load_from_index(reference_connectivity_index)
        imported_connectivity = h5.load_from_index(imported_connectivity_index)

        assert not (reference_connectivity.weights == imported_connectivity.weights).all()
        assert not (reference_connectivity.tract_lengths == imported_connectivity.tract_lengths).all()

        assert (reference_connectivity.centres == imported_connectivity.centres).all()
        assert (reference_connectivity.orientations == imported_connectivity.orientations).all()
        assert (reference_connectivity.region_labels == imported_connectivity.region_labels).all()
示例#2
0
    def launch(self, view_model):
        # type: (TopographicViewerModel) -> dict

        connectivities_idx = []
        measures_ht = []
        for measure in [view_model.data_0, view_model.data_1, view_model.data_2]:
            if measure is not None:
                measure_index = self.load_entity_by_gid(measure)
                measures_ht.append(h5.load_from_index(measure_index))
                conn_index = self.load_entity_by_gid(measure_index.fk_connectivity_gid)
                connectivities_idx.append(conn_index)

        with h5.h5_file_for_index(connectivities_idx[0]) as conn_h5:
            centres = conn_h5.centres.load()
        sensor_locations = TopographyCalculations.normalize_sensors(centres)
        sensor_number = len(sensor_locations)

        arrays = []
        titles = []
        min_vals = []
        max_vals = []
        data_array = []
        data_arrays = []
        for i, measure in enumerate(measures_ht):
            if connectivities_idx[i].number_of_regions != sensor_number:
                raise Exception("Use the same connectivity!!!")
            arrays.append(measure.array_data.tolist())
            titles.append(measure.title)
            min_vals.append(measure.array_data.min())
            max_vals.append(measure.array_data.max())

        color_bar_min = min(min_vals)
        color_bar_max = max(max_vals)

        for i, array_data in enumerate(arrays):
            try:
                data_array = TopographyCalculations.compute_topography_data(array_data, sensor_locations)

                # We always access the first element because only one connectivity can be used at one time
                first_label = h5.load_from_index(connectivities_idx[0]).hemispheres[0]
                if first_label:
                    data_array = numpy.rot90(data_array, k=1, axes=(0, 1))
                else:
                    data_array = numpy.rot90(data_array, k=-1, axes=(0, 1))
                if numpy.any(numpy.isnan(array_data)):
                    titles[i] = titles[i] + " - Topography contains nan"
                if not numpy.any(array_data):
                    titles[i] = titles[i] + " - Topography data is all zeros"
                data_arrays.append(ABCDisplayer.dump_with_precision(data_array.flat))
            except KeyError as err:
                self.log.exception(err)
                raise LaunchException("The measure points location is not compatible with this viewer ", err)

        params = dict(matrix_datas=data_arrays,
                      matrix_shape=json.dumps(data_array.squeeze().shape),
                      titles=titles,
                      vmin=color_bar_min,
                      vmax=color_bar_max)
        return self.build_display_result("topographic/view", params,
                                         pages={"controlPage": "topographic/controls"})
示例#3
0
    def test_import_region_mapping(self):
        """
        This method tests import of a NIFTI file compressed in GZ format.
        """
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'connectivity_76.zip')
        TestFactory.import_zip_connectivity(self.test_user, self.test_project,
                                            zip_path, "John")
        to_link_conn = TestFactory.get_entity(self.test_project,
                                              ConnectivityIndex)

        mapping_index = self._import(self.GZ_NII_FILE,
                                     RegionVolumeMappingIndex,
                                     to_link_conn.gid)
        mapping = h5.load_from_index(mapping_index)

        assert -1 <= mapping.array_data.min()
        assert mapping.array_data.max() < to_link_conn.number_of_regions
        assert to_link_conn.gid == mapping_index.connectivity_gid

        volume_index = ABCAdapter.load_entity_by_gid(mapping_index.volume_gid)
        assert volume_index is not None

        volume = h5.load_from_index(volume_index)
        assert numpy.equal(self.DEFAULT_ORIGIN, volume.origin).all()
        assert numpy.equal([3.0, 3.0, 3.0], volume.voxel_size).all()
        assert self.UNKNOWN_STR == volume.voxel_unit
    def transactional_setup_method(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity, a cortical surface and a face surface;
        imports a CFF data-set
        """
        self.test_user = TestFactory.create_user('Brain_Viewer_User')
        self.test_project = TestFactory.create_project(self.test_user,
                                                       'Brain_Viewer_Project')

        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'connectivity_96.zip')
        TestFactory.import_zip_connectivity(self.test_user, self.test_project,
                                            zip_path, "John")
        connectivity_idx = TestFactory.get_entity(self.test_project,
                                                  ConnectivityIndex)
        assert connectivity_idx is not None

        self.face_surface = TestFactory.import_surface_zip(
            self.test_user, self.test_project, self.face, CORTICAL)

        region_mapping = TestFactory.import_region_mapping(
            self.test_user, self.test_project, self.region_mapping_path,
            self.face_surface.gid, connectivity_idx.gid)
        self.connectivity = h5.load_from_index(connectivity_idx)
        self.region_mapping = h5.load_from_index(region_mapping)
示例#5
0
    def generate_preview(self,
                         input_data,
                         figure_size=None,
                         surface_data=None,
                         colors=None,
                         rays=None,
                         step=None,
                         **kwargs):
        """
        Generate the preview for the BURST cockpit.

        see `launch_`
        """
        connectivity = h5.load_from_index(input_data)
        assert isinstance(connectivity, Connectivity)
        if colors:
            colors_dt = h5.load_from_index(colors)
        else:
            colors_dt = None
        if rays:
            rays_dt = h5.load_from_index(rays)
        else:
            rays_dt = None

        parameters, _ = Connectivity2DViewer().compute_preview_parameters(
            connectivity, figure_size[0], figure_size[1], colors_dt, rays_dt,
            step)
        return self.build_display_result("connectivity/portlet_preview",
                                         parameters)
示例#6
0
    def launch(self, view_model):
        # type: (ProjectionMatrixImporterModel) -> [ProjectionMatrixIndex]
        """
        Creates ProjectionMatrix entity from uploaded data.

        :raises LaunchException: when
                    * no projection_file or sensors are specified
                    * the dataset is invalid
                    * number of sensors is different from the one in dataset
        """
        if view_model.projection_file is None:
            raise LaunchException(
                "Please select MATLAB file which contains data to import")

        if view_model.sensors is None:
            raise LaunchException(
                "No sensors selected. Please initiate upload again and select one."
            )

        if view_model.surface is None:
            raise LaunchException(
                "No source selected. Please initiate upload again and select a source."
            )

        surface_index = self.load_entity_by_gid(view_model.surface)
        expected_surface_shape = surface_index.number_of_vertices

        sensors_index = self.load_entity_by_gid(view_model.sensors)
        expected_sensors_shape = sensors_index.number_of_sensors

        self.logger.debug("Reading projection matrix from uploaded file...")
        if view_model.projection_file.endswith(".mat"):
            projection_data = self.read_matlab_data(view_model.projection_file,
                                                    view_model.dataset_name)
        else:
            projection_data = self.read_list_data(view_model.projection_file)

        if projection_data is None or len(projection_data) == 0:
            raise LaunchException("Invalid (empty) dataset...")

        if projection_data.shape[0] != expected_sensors_shape:
            raise LaunchException(
                "Invalid Projection Matrix shape[0]: %d Expected: %d" %
                (projection_data.shape[0], expected_sensors_shape))

        if projection_data.shape[1] != expected_surface_shape:
            raise LaunchException(
                "Invalid Projection Matrix shape[1]: %d Expected: %d" %
                (projection_data.shape[1], expected_surface_shape))

        projection_matrix_type = determine_projection_type(sensors_index)
        surface_ht = h5.load_from_index(surface_index)
        sensors_ht = h5.load_from_index(sensors_index)
        projection_matrix = ProjectionMatrix(
            sources=surface_ht,
            sensors=sensors_ht,
            projection_type=projection_matrix_type,
            projection_data=projection_data)
        return h5.store_complete(projection_matrix, self.storage_path)
示例#7
0
    def launch(self,
               input_data,
               surface_data=None,
               colors=None,
               rays=None,
               step=None):
        """
        Given the input connectivity data and the surface data, 
        build the HTML response to be displayed.

        :param input_data: index towards the `Connectivity` object which will be displayed
        :type input_data: `ConnectivityIndex`
        :param surface_data: if provided, it is displayed as a shadow to give an idea of the connectivity \
                             position relative to the full brain cortical surface
        :type surface_data: `SurfaceIndex`
        :param colors: used to establish a colormap for the nodes displayed in 2D Connectivity viewers
        :type colors:  `ConnectivityMeasureIndex`
        :param rays: used to establish the size of the spheres representing each node in 3D Nodes viewer
        :type rays:  `ConnectivityMeasureIndex`
        :param step: a threshold applied to the 2D Connectivity Viewers to differentiate 2 types of nodes \
                     the ones with a value greater that this will be displayed as red discs, instead of yellow
        :type step:  float
        """
        connectivity = h5.load_from_index(input_data)
        assert isinstance(connectivity, Connectivity)
        if colors:
            colors_dt = h5.load_from_index(colors)
        else:
            colors_dt = None
        if rays:
            rays_dt = h5.load_from_index(rays)
        else:
            rays_dt = None

        global_params, global_pages = self._compute_connectivity_global_params(
            connectivity)
        if surface_data is not None:
            surface_h5 = h5.h5_file_for_index(surface_data)
            url_vertices, url_normals, _, url_triangles, _ = SurfaceURLGenerator.get_urls_for_rendering(
                surface_h5)
        else:
            url_vertices, url_normals, url_triangles = [], [], []

        global_params["urlVertices"] = json.dumps(url_vertices)
        global_params["urlTriangles"] = json.dumps(url_triangles)
        global_params["urlNormals"] = json.dumps(url_normals)
        global_params['isSingleMode'] = False

        result_params, result_pages = Connectivity2DViewer(
        ).compute_parameters(connectivity, colors_dt, rays_dt, step)
        result_params.update(global_params)
        result_pages.update(global_pages)
        _params, _pages = Connectivity3DViewer().compute_parameters(
            connectivity, colors_dt, rays_dt)
        result_params.update(_params)
        result_pages.update(_pages)

        return self.build_display_result("connectivity/main_connectivity",
                                         result_params, result_pages)
示例#8
0
    def deserialize_simulator(simulator_gid, storage_path):
        simulator_in_path = h5.path_for(storage_path, SimulatorH5,
                                        simulator_gid)
        simulator_in = SimulatorAdapterModel()

        with SimulatorH5(simulator_in_path) as simulator_in_h5:
            simulator_in_h5.load_into(simulator_in)
            simulator_in.connectivity = simulator_in_h5.connectivity.load()
            simulator_in.stimulus = simulator_in_h5.stimulus.load()
            simulator_in.history_gid = simulator_in_h5.simulation_state.load()

        for monitor in simulator_in.monitors:
            if isinstance(monitor, Projection):
                with SimulatorH5(simulator_in_path) as simulator_in_h5:
                    monitor_h5_path = simulator_in_h5.get_reference_path(
                        monitor.gid)

                monitor_h5_class = h5_factory.monitor_h5_factory(type(monitor))

                with monitor_h5_class(monitor_h5_path) as monitor_h5:
                    sensors_gid = monitor_h5.sensors.load()
                    projection_gid = monitor_h5.projection.load()
                    region_mapping_gid = monitor_h5.region_mapping.load()

                sensors_index = ABCAdapter.load_entity_by_gid(sensors_gid.hex)
                projection_index = ABCAdapter.load_entity_by_gid(
                    projection_gid.hex)

                sensors_class = monitor.projection_class().sensors.field_type
                sensors = h5.load_from_index(sensors_index,
                                             dt_class=sensors_class)

                projection_class = monitor.projection_class()
                projection = h5.load_from_index(projection_index,
                                                dt_class=projection_class)

                region_mapping = ABCAdapter.load_traited_by_gid(
                    region_mapping_gid)

                monitor.sensors = sensors
                monitor.projection = projection
                monitor.region_mapping = region_mapping

        if simulator_in.surface:
            cortex_path = h5.path_for(storage_path, CortexH5,
                                      simulator_in.surface.gid)
            with CortexH5(cortex_path) as cortex_h5:
                simulator_in.surface.local_connectivity = cortex_h5.local_connectivity.load(
                )
                simulator_in.surface.region_mapping_data = cortex_h5.region_mapping_data.load(
                )
                rm_index = dao.get_datatype_by_gid(
                    simulator_in.surface.region_mapping_data.hex)
                simulator_in.surface.surface_gid = uuid.UUID(
                    rm_index.fk_surface_gid)

        return simulator_in
    def _prepare_simulator_from_view_model(self, view_model):
        simulator = Simulator()
        simulator.gid = view_model.gid

        conn = self.load_traited_by_gid(view_model.connectivity)
        simulator.connectivity = conn

        simulator.conduction_speed = view_model.conduction_speed
        simulator.coupling = view_model.coupling

        rm_surface = None

        if view_model.surface:
            simulator.surface = Cortex()
            rm_index = self.load_entity_by_gid(
                view_model.surface.region_mapping_data.hex)
            rm = h5.load_from_index(rm_index)

            rm_surface_index = self.load_entity_by_gid(rm_index.fk_surface_gid)
            rm_surface = h5.load_from_index(rm_surface_index, CorticalSurface)
            rm.surface = rm_surface
            rm.connectivity = conn

            simulator.surface.region_mapping_data = rm
            if simulator.surface.local_connectivity:
                lc = self.load_traited_by_gid(
                    view_model.surface.local_connectivity)
                assert lc.surface.gid == rm_index.fk_surface_gid
                lc.surface = rm_surface
                simulator.surface.local_connectivity = lc

        if view_model.stimulus:
            stimulus_index = self.load_entity_by_gid(view_model.stimulus.hex)
            stimulus = h5.load_from_index(stimulus_index)
            simulator.stimulus = stimulus

            if isinstance(stimulus, StimuliSurface):
                simulator.stimulus.surface = rm_surface
            else:
                simulator.stimulus.connectivity = simulator.connectivity

        simulator.model = view_model.model
        simulator.integrator = view_model.integrator
        simulator.initial_conditions = view_model.initial_conditions
        simulator.monitors = view_model.monitors
        simulator.simulation_length = view_model.simulation_length

        # TODO: why not load history here?
        # if view_model.history:
        #     history_index = dao.get_datatype_by_gid(view_model.history.hex)
        #     history = h5.load_from_index(history_index)
        #     assert isinstance(history, SimulationHistory)
        #     history.fill_into(self.algorithm)
        return simulator
示例#10
0
    def deserialize_simulator(simulator_gid, storage_path):
        simulator_in_path = h5.path_for(storage_path, SimulatorH5,
                                        simulator_gid)
        simulator_in = Simulator()

        with SimulatorH5(simulator_in_path) as simulator_in_h5:
            simulator_in_h5.load_into(simulator_in)
            connectivity_gid = simulator_in_h5.connectivity.load()
            stimulus_gid = simulator_in_h5.stimulus.load()
            simulation_state_gid = simulator_in_h5.simulation_state.load()

        conn_index = dao.get_datatype_by_gid(connectivity_gid.hex)
        conn = h5.load_from_index(conn_index)

        simulator_in.connectivity = conn

        if simulator_in.surface:
            cortex_path = h5.path_for(storage_path, CortexH5,
                                      simulator_in.surface.gid)
            with CortexH5(cortex_path) as cortex_h5:
                local_conn_gid = cortex_h5.local_connectivity.load()
                region_mapping_gid = cortex_h5.region_mapping_data.load()

            region_mapping_index = dao.get_datatype_by_gid(
                region_mapping_gid.hex)
            region_mapping_path = h5.path_for_stored_index(
                region_mapping_index)
            region_mapping = RegionMapping()
            with RegionMappingH5(region_mapping_path) as region_mapping_h5:
                region_mapping_h5.load_into(region_mapping)
                region_mapping.gid = region_mapping_h5.gid.load()
                surf_gid = region_mapping_h5.surface.load()

            surf_index = dao.get_datatype_by_gid(surf_gid.hex)
            surf_h5 = h5.h5_file_for_index(surf_index)
            surf = CorticalSurface()
            surf_h5.load_into(surf)
            surf_h5.close()
            region_mapping.surface = surf
            simulator_in.surface.region_mapping_data = region_mapping

            if local_conn_gid:
                local_conn_index = dao.get_datatype_by_gid(local_conn_gid.hex)
                local_conn = h5.load_from_index(local_conn_index)
                simulator_in.surface.local_connectivity = local_conn

        if stimulus_gid:
            stimulus_index = dao.get_datatype_by_gid(stimulus_gid.hex)
            stimulus = h5.load_from_index(stimulus_index)
            simulator_in.stimulus = stimulus

        return simulator_in, simulation_state_gid
示例#11
0
    def _try_load_region_mapping(self):
        region_map = None
        region_volume_map = None

        region_map_index = self._try_find_mapping(RegionMappingIndex, self.algorithm.connectivity.gid.hex)
        region_volume_map_index = self._try_find_mapping(RegionVolumeMappingIndex, self.algorithm.connectivity.gid.hex)

        if region_map_index:
            region_map = h5.load_from_index(region_map_index)

        if region_volume_map_index:
            region_volume_map = h5.load_from_index(region_volume_map_index)

        return region_map, region_volume_map
示例#12
0
    def configure(self, simulator_gid):
        """
        Make preparations for the adapter launch.
        """
        self.log.debug("%s: Instantiating requested simulator..." % str(self))

        simulator_service = SimulatorService()
        self.algorithm, connectivity_gid, simulation_state_gid = simulator_service.deserialize_simulator(
            simulator_gid, self.storage_path)
        self.branch_simulation_state_gid = simulation_state_gid

        # for monitor in self.algorithm.monitors:
        #     if issubclass(monitor, Projection):
        #         # TODO: add a service that loads a RM with Surface and Connectivity
        #         pass

        connectivity_index = dao.get_datatype_by_gid(connectivity_gid.hex)
        connectivity = h5.load_from_index(connectivity_index)

        connectivity.gid = connectivity_gid
        self.algorithm.connectivity = connectivity
        self.simulation_length = self.algorithm.simulation_length
        self.log.debug("%s: Initializing storage..." % str(self))
        try:
            self.algorithm.preconfigure()
        except ValueError as err:
            raise LaunchException(
                "Failed to configure simulator due to invalid Input Values. It could be because "
                "of an incompatibility between different version of TVB code.",
                err)
示例#13
0
    def test_import_demo_ts(self):
        """
        This method tests import of a NIFTI file.
        """
        time_series_index = self._import(self.TIMESERIES_NII_FILE,
                                         TimeSeriesVolumeIndex)

        # Since self.assertAlmostEquals is not available on all machine
        # We compare floats as following
        assert abs(1.0 - time_series_index.sample_period) <= 0.001
        assert "sec" == str(time_series_index.sample_period_unit)
        assert time_series_index.title.startswith("NIFTI")

        dimension_labels = time_series_index.labels_ordering
        assert dimension_labels is not None
        assert 4 == len(json.loads(dimension_labels))

        volume_index = ABCAdapter.load_entity_by_gid(
            time_series_index.volume_gid)
        assert volume_index is not None

        volume = h5.load_from_index(volume_index)

        assert numpy.equal(self.DEFAULT_ORIGIN, volume.origin).all()
        assert "mm" == volume.voxel_unit
示例#14
0
    def launch(self, weights, weights_delimiter, tracts, tracts_delimiter, input_data):
        """
        Execute import operations: process the weights and tracts csv files, then use
        the reference connectivity passed as input_data for the rest of the attributes.

        :param weights: csv file containing the weights measures
        :param tracts:  csv file containing the tracts measures
        :param input_data: a reference connectivity with the additional attributes

        :raises LaunchException: when the number of nodes in CSV files doesn't match the one in the connectivity
        """
        weights_matrix = self._read_csv_file(weights, weights_delimiter)
        tract_matrix = self._read_csv_file(tracts, tracts_delimiter)
        FilesHelper.remove_files([weights, tracts])
        if weights_matrix.shape[0] != input_data.number_of_regions:
            raise LaunchException("The csv files define %s nodes but the connectivity you selected as reference "
                                  "has only %s nodes." % (weights_matrix.shape[0], input_data.number_of_regions))

        input_connectivity = h5.load_from_index(input_data)

        result = Connectivity()
        result.centres = input_connectivity.centres
        result.region_labels = input_connectivity.region_labels
        result.weights = weights_matrix
        result.tract_lengths = tract_matrix
        result.orientations = input_connectivity.orientations
        result.areas = input_connectivity.areas
        result.cortical = input_connectivity.cortical
        result.hemispheres = input_connectivity.hemispheres
        result.configure()

        return h5.store_complete(result, self.storage_path)
示例#15
0
    def launch(self, view_model):
        # type: (PearsonCorrelationCoefficientVisualizerModel) -> dict
        """Construct data for visualization and launch it."""

        with h5.h5_file_for_gid(view_model.datatype) as datatype_h5:
            matrix_shape = datatype_h5.array_data.shape[0:2]
            ts_gid = datatype_h5.source.load()

        ts_index = self.load_entity_by_gid(ts_gid)
        state_list = ts_index.get_labels_for_dimension(1)
        mode_list = list(range(ts_index.data_length_4d))

        with h5.load_from_index(ts_index) as ts_h5:
            labels = self.get_space_labels(ts_h5)

        if not labels:
            labels = None
        pars = dict(matrix_labels=json.dumps(labels),
                    matrix_shape=json.dumps(matrix_shape),
                    viewer_title='Pearson Edge Bundle',
                    url_base=URLGenerator.build_h5_url(view_model.datatype.hex,
                                                       'get_correlation_data',
                                                       flatten="True",
                                                       parameter=''),
                    state_variable=0,
                    mode=mode_list[0],
                    state_list=state_list,
                    mode_list=mode_list,
                    pearson_min=CorrelationCoefficients.PEARSON_MIN,
                    pearson_max=CorrelationCoefficients.PEARSON_MAX,
                    thresh=0.5)

        return self.build_display_result("pearson_edge_bundle/view", pars)
示例#16
0
 def load_traited_by_gid(self, data_gid):
     # type: (typing.Union[uuid.UUID, str]) -> HasTraits
     """
     Load a generic HasTraits instance, specified by GID.
     """
     index = self.load_entity_by_gid(data_gid)
     return h5.load_from_index(index)
示例#17
0
    def create_interlinked_projects(self, region_mapping_factory, time_series_region_index_factory):
        """
        Extend the two projects created in setup.
        Project src will have 3 datatypes, one a connectivity, and a link to the time series from the dest project.
        Project dest will have 3 links to the datatypes in src and a time series derived from the linked connectivity
        """
        # add a connectivity to src project and link it to dest project
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip')
        TestFactory.import_zip_connectivity(self.dst_user, self.dest_project, zip_path, "John")
        conn = TestFactory.get_entity(self.dest_project, ConnectivityIndex)
        self.flow_service.create_link([conn.id], self.dest_project.id)
        # in dest derive a time series from the linked conn

        path = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip')
        surface = TestFactory.import_surface_zip(self.dst_user, self.dest_project, path, CORTICAL)

        TXT_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'regionMapping_16k_76.txt')
        region_mapping = TestFactory.import_region_mapping(self.dst_user, self.dest_project, TXT_FILE,
                                                                surface.gid, conn.gid)

        ts = time_series_region_index_factory(connectivity=h5.load_from_index(conn), region_mapping=h5.load_from_index(region_mapping))
        # then link the time series in the src project
        self.flow_service.create_link([ts.id], self.src_project.id)

        assert 3 == len(dao.get_datatypes_in_project(self.src_project.id))
        assert 1 == len(dao.get_linked_datatypes_in_project(self.src_project.id))
        assert 1 == len(dao.get_datatypes_in_project(self.dest_project.id))
        assert 3 == len(dao.get_linked_datatypes_in_project(self.dest_project.id))
示例#18
0
    def launch(self, view_model):
        # type: (NodeComplexCoherenceModel) -> [ComplexCoherenceSpectrumIndex]
        """
        Launch algorithm and build results.
        :param view_model: the ViewModel keeping the algorithm inputs
        :return: the complex coherence for the specified time series
        """
        # TODO ---------- Iterate over slices and compose final result ------------##
        time_series = h5.load_from_index(self.input_time_series_index)
        ht_result = calculate_complex_cross_coherence(time_series, view_model.epoch_length,
                                                      view_model.segment_length,
                                                      view_model.segment_shift,
                                                      view_model.window_function,
                                                      view_model.average_segments,
                                                      view_model.subtract_epoch_average,
                                                      view_model.zeropad, view_model.detrend_ts,
                                                      view_model.max_freq, view_model.npat)
        self.log.debug("got ComplexCoherenceSpectrum result")
        self.log.debug("ComplexCoherenceSpectrum segment_length is %s" % (str(ht_result.segment_length)))
        self.log.debug("ComplexCoherenceSpectrum epoch_length is %s" % (str(ht_result.epoch_length)))
        self.log.debug("ComplexCoherenceSpectrum windowing_function is %s" % (str(ht_result.windowing_function)))

        complex_coherence_index = h5.store_complete(ht_result, self.storage_path)

        result_path = h5.path_for(self.storage_path, ComplexCoherenceSpectrumH5, complex_coherence_index.gid)
        ica_h5 = ComplexCoherenceSpectrumH5(path=result_path)

        self.fill_index_from_h5(complex_coherence_index, ica_h5)
        ica_h5.close()

        return complex_coherence_index
示例#19
0
    def launch(self, view_model):
        # type: (CSVConnectivityImporterModel) -> ConnectivityIndex
        """
        Execute import operations: process the weights and tracts csv files, then use
        the reference connectivity passed as input_data for the rest of the attributes.

        :raises LaunchException: when the number of nodes in CSV files doesn't match the one in the connectivity
        """
        weights_matrix = self._read_csv_file(view_model.weights, view_model.weights_delimiter)
        tract_matrix = self._read_csv_file(view_model.tracts, view_model.tracts_delimiter)
        self.storage_interface.remove_files([view_model.weights, view_model.tracts])
        conn_index = self.load_entity_by_gid(view_model.input_data)
        if weights_matrix.shape[0] != conn_index.number_of_regions:
            raise LaunchException("The csv files define %s nodes but the connectivity you selected as reference "
                                  "has only %s nodes." % (weights_matrix.shape[0], conn_index.number_of_regions))

        input_connectivity = h5.load_from_index(conn_index)

        result = Connectivity()
        result.centres = input_connectivity.centres
        result.region_labels = input_connectivity.region_labels
        result.weights = weights_matrix
        result.tract_lengths = tract_matrix
        result.orientations = input_connectivity.orientations
        result.areas = input_connectivity.areas
        result.cortical = input_connectivity.cortical
        result.hemispheres = input_connectivity.hemispheres
        result.configure()

        return h5.store_complete(result, self.storage_path)
示例#20
0
    def build():
        data = numpy.random.random((10, 10, 10, 10))
        time_series_index = time_series_index_factory(data=data)
        time_series = h5.load_from_index(time_series_index)
        n_comp = 5
        ica = mode_decompositions.IndependentComponents(source=time_series,
                                    component_time_series=numpy.random.random((10, n_comp, 10, 10)),
                                    prewhitening_matrix=numpy.random.random((n_comp, 10, 10, 10)),
                                    unmixing_matrix=numpy.random.random((n_comp, n_comp, 10, 10)),
                                    n_components=n_comp)
        ica.compute_norm_source()
        ica.compute_normalised_component_time_series()

        op = operation_factory()

        ica_index = IndependentComponentsIndex()
        ica_index.fk_from_operation = op.id
        ica_index.fill_from_has_traits(ica)

        independent_components_h5_path = h5.path_for_stored_index(ica_index)
        with IndependentComponentsH5(independent_components_h5_path) as f:
            f.store(ica)

        ica_index = dao.store_entity(ica_index)
        return ica_index
示例#21
0
    def launch(self,
               original_connectivity,
               new_weights,
               new_tracts,
               interest_area_indexes,
               is_branch=False,
               **kwargs):
        """
        Method to be called when user submits changes on the
        Connectivity matrix in the Visualizer.
        """
        # note: is_branch is missing instead of false because browsers only send checked boxes in forms.
        original_conn_ht = h5.load_from_index(original_connectivity)
        assert isinstance(original_conn_ht, Connectivity)

        if not is_branch:
            new_conn_ht = self._cut_connectivity(
                original_conn_ht, numpy.array(new_weights),
                numpy.array(interest_area_indexes), numpy.array(new_tracts))
            return [h5.store_complete(new_conn_ht, self.storage_path)]

        else:
            result = []
            new_conn_ht = self._branch_connectivity(
                original_conn_ht, numpy.array(new_weights),
                numpy.array(interest_area_indexes), numpy.array(new_tracts))
            new_conn_index = h5.store_complete(new_conn_ht, self.storage_path)
            result.append(new_conn_index)
            result.extend(
                self._store_related_region_mappings(original_connectivity.gid,
                                                    new_conn_ht))
            return result
示例#22
0
 def load_traited_by_gid(data_gid):
     # type: (uuid.UUID) -> HasTraits
     """
     Load a generic HasTraits instance, specified by GID.
     """
     index = load_entity_by_gid(data_gid.hex)
     return h5.load_from_index(index)
示例#23
0
    def launch(self, view_model):
        # type: (TimeseriesMetricsAdapterModel) -> [DatatypeMeasureIndex]
        """ 
        Launch algorithm and build results.

        :param time_series: the time series on which the algorithms are run
        :param algorithms:  the algorithms to be run for computing measures on the time series
        :type  algorithms:  any subclass of BaseTimeseriesMetricAlgorithm
                            (KuramotoIndex, GlobalVariance, VarianceNodeVariance)
        :rtype: `DatatypeMeasureIndex`
        """
        algorithms = view_model.algorithms
        if algorithms is None or len(algorithms) == 0:
            algorithms = list(ALGORITHMS)

        self.log.debug("time_series shape is %s" % str(self.input_shape))
        dt_timeseries = h5.load_from_index(self.input_time_series_index)

        metrics_results = {}
        for algorithm_name in algorithms:

            algorithm = ALGORITHMS[algorithm_name](time_series=dt_timeseries)
            if view_model.segment is not None:
                algorithm.segment = view_model.segment
            if view_model.start_point is not None:
                algorithm.start_point = view_model.start_point

            # Validate that current algorithm's filter is valid.
            algorithm_filter = TimeseriesMetricsAdapterForm.get_extra_algorithm_filters(
            ).get(algorithm_name)
            if algorithm_filter is not None \
                    and not algorithm_filter.get_python_filter_equivalent(self.input_time_series_index):
                self.log.warning(
                    'Measure algorithm will not be computed because of incompatibility on input. '
                    'Filters failed on algo: ' + str(algorithm_name))
                continue
            else:
                self.log.debug("Applying measure: " + str(algorithm_name))

            unstored_result = algorithm.evaluate()
            # ----------------- Prepare a Float object(s) for result ----------------##
            if isinstance(unstored_result, dict):
                metrics_results.update(unstored_result)
            else:
                metrics_results[algorithm_name] = unstored_result

        result = DatatypeMeasureIndex()
        result.fk_source_gid = self.input_time_series_index.gid
        result.metrics = json.dumps(metrics_results)

        result_path = h5.path_for(self.storage_path, DatatypeMeasureH5,
                                  result.gid)
        with DatatypeMeasureH5(result_path) as result_h5:
            result_h5.metrics.store(metrics_results)
            result_h5.analyzed_datatype.store(dt_timeseries)
            result_h5.gid.store(uuid.UUID(result.gid))

        return result
示例#24
0
    def _store_related_region_mappings(self, original_conn_gid, new_connectivity_ht):
        result = []

        linked_region_mappings = dao.get_generic_entity(RegionMappingIndex, original_conn_gid, 'fk_connectivity_gid')
        for mapping in linked_region_mappings:
            original_rm = h5.load_from_index(mapping)
            surface_idx = dao.get_generic_entity(SurfaceIndex, mapping.fk_surface_gid, 'gid')[0]
            surface = h5.load_from_index(surface_idx)

            new_rm = RegionMapping()
            new_rm.connectivity = new_connectivity_ht
            new_rm.surface = surface
            new_rm.array_data = original_rm.array_data

            result_rm_index = h5.store_complete(new_rm, self.storage_path)
            result.append(result_rm_index)

        return result
示例#25
0
    def test_remove_time_series(self, time_series_region_index_factory):
        """
        Tests the happy flow for the deletion of a time series.
        """
        count_ts = self.count_all_entities(TimeSeriesRegionIndex)
        assert 0 == count_ts, "There should be no time series"
        conn = try_get_last_datatype(self.test_project.id, ConnectivityIndex)
        conn = h5.load_from_index(conn)
        rm = try_get_last_datatype(self.test_project.id, RegionMappingIndex)
        rm = h5.load_from_index(rm)
        time_series_region_index_factory(conn, rm)
        series = self.get_all_entities(TimeSeriesRegionIndex)
        assert 1 == len(series), "There should be only one time series"

        self.project_service.remove_datatype(self.test_project.id, series[0].gid)

        res = dao.get_datatype_by_gid(series[0].gid)
        assert res is None, "The time series was not deleted."
示例#26
0
def function_sensors_to_surface(sensors_gid, surface_to_map_gid):
    """
    Map EEG sensors onto the head surface (skin-air).

    EEG sensor locations are typically only given on a unit sphere, that is,
    they are effectively only identified by their orientation with respect
    to a coordinate system. This method is used to map these unit vector
    sensor "locations" to a specific location on the surface of the skin.

    Assumes coordinate systems are aligned, i.e. common x,y,z and origin.

    """
    index = ABCAdapter.load_entity_by_gid(sensors_gid)
    sensors_dt = h5.load_from_index(index)

    index = ABCAdapter.load_entity_by_gid(surface_to_map_gid)
    surface_dt = h5.load_from_index(index)

    return sensors_dt.sensors_to_surface(surface_dt).tolist()
示例#27
0
    def test_import_quads_no_normals(self):
        """
        Test that import works with a file which contains quads and no normals
        """
        surface_index = TestFactory.import_surface_obj(self.test_user, self.test_project, self.face,
                                                       SurfaceTypesEnum.FACE_SURFACE, False)

        surface = h5.load_from_index(surface_index)
        assert 8614 == len(surface.vertex_normals)
        assert 8614 == len(surface.vertices)
        assert 17224 == len(surface.triangles)
示例#28
0
    def test_import_simple_with_normals(self):
        """
        Test that import works with an OBJ file which includes normals
        """
        surface_index = TestFactory.import_surface_obj(self.test_user, self.test_project, self.torus,
                                                       SurfaceTypesEnum.FACE_SURFACE, False)
        assert 441 == surface_index.number_of_vertices
        assert 800 == surface_index.number_of_triangles

        surface = h5.load_from_index(surface_index)
        assert 441 == len(surface.vertex_normals)
        def _create_measure(conn, op, op_dir, project_id):
            conn_measure = ConnectivityMeasure()
            conn_measure.connectivity = h5.load_from_index(conn)
            conn_measure.array_data = numpy.array(conn.number_of_regions)

            conn_measure_db = h5.store_complete(conn_measure, op_dir)
            conn_measure_db.fk_from_operation = op.id
            dao.store_entity(conn_measure_db)

            count = dao.count_datatypes(project_id, DataTypeMatrix)
            return count
示例#30
0
    def _get_grouped_space_labels(self, ts_h5):
        """
        :return: A structure of this form [('left', [(idx, lh_label)...]), ('right': [(idx, rh_label) ...])]
        """
        if isinstance(ts_h5, TimeSeriesRegionH5):
            connectivity_gid = ts_h5.connectivity.load()
            conn_idx = self.load_entity_by_gid(connectivity_gid.hex)
            conn = h5.load_from_index(conn_idx)
            return self._connectivity_grouped_space_labels(conn)

        ts_h5.get_grouped_space_labels()