Ejemplo n.º 1
0
    def launch(self,
               original_connectivity,
               new_weights,
               new_tracts,
               interest_area_indexes,
               is_branch=False,
               **kwargs):
        """
        Method to be called when user submits changes on the
        Connectivity matrix in the Visualizer.
        """
        # note: is_branch is missing instead of false because browsers only send checked boxes in forms.
        original_conn_ht = h5.load_from_index(original_connectivity)
        assert isinstance(original_conn_ht, Connectivity)

        if not is_branch:
            new_conn_ht = self._cut_connectivity(
                original_conn_ht, numpy.array(new_weights),
                numpy.array(interest_area_indexes), numpy.array(new_tracts))
            return [h5.store_complete(new_conn_ht, self.storage_path)]

        else:
            result = []
            new_conn_ht = self._branch_connectivity(
                original_conn_ht, numpy.array(new_weights),
                numpy.array(interest_area_indexes), numpy.array(new_tracts))
            new_conn_index = h5.store_complete(new_conn_ht, self.storage_path)
            result.append(new_conn_index)
            result.extend(
                self._store_related_region_mappings(original_connectivity.gid,
                                                    new_conn_ht))
            return result
Ejemplo n.º 2
0
    def launch(self, view_model):
        """
        Method to be called when user submits changes on the
        Connectivity matrix in the Visualizer.
        """
        original_conn_ht = self.load_traited_by_gid(
            view_model.original_connectivity)
        assert isinstance(original_conn_ht, Connectivity)

        if not view_model.is_branch:
            new_conn_ht = self._cut_connectivity(
                original_conn_ht, view_model.new_weights,
                view_model.interest_area_indexes, view_model.new_tracts)
            return [h5.store_complete(new_conn_ht, self.storage_path)]

        else:
            result = []
            new_conn_ht = self._branch_connectivity(
                original_conn_ht, view_model.new_weights,
                view_model.interest_area_indexes, view_model.new_tracts)
            new_conn_index = h5.store_complete(new_conn_ht, self.storage_path)
            result.append(new_conn_index)
            result.extend(
                self._store_related_region_mappings(
                    view_model.original_connectivity.gid, new_conn_ht))
            return result
Ejemplo n.º 3
0
    def launch(self, view_model):
        # type: (SurfaceStimulusCreatorModel) -> [StimuliSurfaceIndex]
        """
        Used for creating a `StimuliSurface` instance
        """
        stimuli_surface = self.prepare_stimuli_surface_from_view_model(view_model, view_model.surface)
        stimuli_surface_index = StimuliSurfaceIndex()
        stimuli_surface_index.fill_from_has_traits(stimuli_surface)

        h5.store_complete(stimuli_surface, self.storage_path)
        return stimuli_surface_index
Ejemplo n.º 4
0
    def build(op=None):
        surface = surface_factory(cortical=True)
        lconn = LocalConnectivity()
        lconn.surface = surface
        if op is None:
            op = operation_factory()

        surface_db = h5.store_complete(surface, op.id, op.project.name)
        surface_db.fk_from_operation = op.id
        dao.store_entity(surface_db)

        lconn_db = h5.store_complete(lconn, op.id, op.project.name)
        lconn_db.fk_from_operation = op.id
        return dao.store_entity(lconn_db), lconn
Ejemplo n.º 5
0
    def build(op=None):
        surface = surface_factory(cortical=True)
        lconn = LocalConnectivity()
        lconn.surface = surface
        if op is None:
            op = operation_factory()

        storage_path = FilesHelper().get_project_folder(op.project, str(op.id))
        surface_db = h5.store_complete(surface, storage_path)
        surface_db.fk_from_operation = op.id
        dao.store_entity(surface_db)

        lconn_db = h5.store_complete(lconn, storage_path)
        lconn_db.fk_from_operation = op.id
        return dao.store_entity(lconn_db), lconn
Ejemplo n.º 6
0
    def build(op=None):
        region_mapping = region_mapping_factory()
        if op is None:
            op = operation_factory()

        storage_path = FilesHelper().get_project_folder(op.project, str(op.id))
        surface_db = h5.store_complete(region_mapping.surface, storage_path)
        surface_db.fk_from_operation = op.id
        dao.store_entity(surface_db)
        conn_db = h5.store_complete(region_mapping.connectivity, storage_path)
        conn_db.fk_from_operation = op.id
        dao.store_entity(conn_db)
        rm_db = h5.store_complete(region_mapping, storage_path)
        rm_db.fk_from_operation = op.id
        return dao.store_entity(rm_db)
Ejemplo n.º 7
0
    def launch(self, weights, weights_delimiter, tracts, tracts_delimiter, input_data):
        """
        Execute import operations: process the weights and tracts csv files, then use
        the reference connectivity passed as input_data for the rest of the attributes.

        :param weights: csv file containing the weights measures
        :param tracts:  csv file containing the tracts measures
        :param input_data: a reference connectivity with the additional attributes

        :raises LaunchException: when the number of nodes in CSV files doesn't match the one in the connectivity
        """
        weights_matrix = self._read_csv_file(weights, weights_delimiter)
        tract_matrix = self._read_csv_file(tracts, tracts_delimiter)
        FilesHelper.remove_files([weights, tracts])
        if weights_matrix.shape[0] != input_data.number_of_regions:
            raise LaunchException("The csv files define %s nodes but the connectivity you selected as reference "
                                  "has only %s nodes." % (weights_matrix.shape[0], input_data.number_of_regions))

        input_connectivity = h5.load_from_index(input_data)

        result = Connectivity()
        result.centres = input_connectivity.centres
        result.region_labels = input_connectivity.region_labels
        result.weights = weights_matrix
        result.tract_lengths = tract_matrix
        result.orientations = input_connectivity.orientations
        result.areas = input_connectivity.areas
        result.cortical = input_connectivity.cortical
        result.hemispheres = input_connectivity.hemispheres
        result.configure()

        return h5.store_complete(result, self.storage_path)
Ejemplo n.º 8
0
def test_store_simulator_view_model_eeg(connectivity_index_factory, surface_index_factory, region_mapping_factory,
                                        sensors_index_factory, operation_factory):
    conn = connectivity_index_factory()
    surface_idx, surface = surface_index_factory(cortical=True)
    region_mapping = region_mapping_factory()
    sensors_idx, sensors = sensors_index_factory()
    proj = ProjectionSurfaceEEG(sensors=sensors, sources=surface, projection_data=numpy.ones(3))

    op = operation_factory()
    storage_path = FilesHelper().get_project_folder(op.project, str(op.id))
    prj_db_db = h5.store_complete(proj, storage_path)
    prj_db_db.fk_from_operation = op.id
    dao.store_entity(prj_db_db)

    seeg_monitor = EEGViewModel(projection=proj.gid, sensors=sensors.gid)
    seeg_monitor.region_mapping = region_mapping.gid.hex
    sim_view_model = SimulatorAdapterModel()
    sim_view_model.connectivity = conn.gid
    sim_view_model.monitors = [seeg_monitor]

    op = operation_factory()
    storage_path = FilesHelper().get_project_folder(op.project, str(op.id))

    h5.store_view_model(sim_view_model, storage_path)

    loaded_sim_view_model = h5.load_view_model(sim_view_model.gid, storage_path)

    assert isinstance(sim_view_model, SimulatorAdapterModel)
    assert isinstance(loaded_sim_view_model, SimulatorAdapterModel)
    assert sim_view_model.monitors[0].projection == loaded_sim_view_model.monitors[0].projection
Ejemplo n.º 9
0
    def launch(self, view_model):
        # type: (RegionStimulusCreatorModel) -> [StimuliRegionIndex]
        """
        Used for creating a `StimuliRegion` instance
        """
        stimuli_region = StimuliRegion()
        stimuli_region.connectivity = Connectivity()
        stimuli_region.connectivity.gid = view_model.connectivity
        stimuli_region.weight = view_model.weight
        stimuli_region.temporal = view_model.temporal

        stimuli_region_idx = StimuliRegionIndex()
        stimuli_region_idx.fill_from_has_traits(stimuli_region)

        h5.store_complete(stimuli_region, self.storage_path)
        return stimuli_region_idx
Ejemplo n.º 10
0
    def launch(self, view_model):
        # type: (CSVConnectivityImporterModel) -> ConnectivityIndex
        """
        Execute import operations: process the weights and tracts csv files, then use
        the reference connectivity passed as input_data for the rest of the attributes.

        :raises LaunchException: when the number of nodes in CSV files doesn't match the one in the connectivity
        """
        weights_matrix = self._read_csv_file(view_model.weights, view_model.weights_delimiter)
        tract_matrix = self._read_csv_file(view_model.tracts, view_model.tracts_delimiter)
        self.storage_interface.remove_files([view_model.weights, view_model.tracts])
        conn_index = self.load_entity_by_gid(view_model.input_data)
        if weights_matrix.shape[0] != conn_index.number_of_regions:
            raise LaunchException("The csv files define %s nodes but the connectivity you selected as reference "
                                  "has only %s nodes." % (weights_matrix.shape[0], conn_index.number_of_regions))

        input_connectivity = h5.load_from_index(conn_index)

        result = Connectivity()
        result.centres = input_connectivity.centres
        result.region_labels = input_connectivity.region_labels
        result.weights = weights_matrix
        result.tract_lengths = tract_matrix
        result.orientations = input_connectivity.orientations
        result.areas = input_connectivity.areas
        result.cortical = input_connectivity.cortical
        result.hemispheres = input_connectivity.hemispheres
        result.configure()

        return h5.store_complete(result, self.storage_path)
    def launch(self, view_model):
        # type: (ConnectivityMeasureImporterModel) -> [ConnectivityMeasureIndex]
        """
        Execute import operations:
        """
        try:
            data = self.read_matlab_data(view_model.data_file, view_model.dataset_name)
            measurement_count, node_count = data.shape
            connectivity = self.load_traited_by_gid(view_model.connectivity)

            if node_count != connectivity.number_of_regions:
                raise LaunchException('The measurements are for %s nodes but the selected connectivity'
                                      ' contains %s nodes' % (node_count, connectivity.number_of_regions))

            measures = []
            self.generic_attributes.user_tag_2 = "conn_%d" % node_count

            for i in range(measurement_count):
                cm_data = data[i, :]

                measure = ConnectivityMeasure()
                measure.array_data = cm_data
                measure.connectivity = connectivity
                measure.title = "Measure %d for Connectivity with %d nodes." % ((i + 1), node_count)

                cm_idx = h5.store_complete(measure, self.storage_path)
                measures.append(cm_idx)
            return measures

        except ParseException as excep:
            logger = get_logger(__name__)
            logger.exception(excep)
            raise LaunchException(excep)
Ejemplo n.º 12
0
    def launch(self,
               file_type,
               data_file,
               data_file_part2,
               should_center=False):
        """
        Execute import operations:
        """
        parser = GIFTIParser(self.storage_path, self.operation_id)
        try:
            surface = parser.parse(data_file,
                                   data_file_part2,
                                   file_type,
                                   should_center=should_center)
            surface.compute_triangle_normals()
            surface.compute_vertex_normals()
            validation_result = surface.validate()

            if validation_result.warnings:
                self.add_operation_additional_info(validation_result.summary())
            self.generic_attributes.user_tag_1 = surface.surface_type
            surface_idx = h5.store_complete(surface, self.storage_path)
            return [surface_idx]
        except ParseException as excep:
            logger = get_logger(__name__)
            logger.exception(excep)
            raise LaunchException(excep)
Ejemplo n.º 13
0
    def launch(self, view_model):
        # type: (NodeComplexCoherenceModel) -> [ComplexCoherenceSpectrumIndex]
        """
        Launch algorithm and build results.
        :param view_model: the ViewModel keeping the algorithm inputs
        :return: the complex coherence for the specified time series
        """
        # TODO ---------- Iterate over slices and compose final result ------------##
        time_series = h5.load_from_index(self.input_time_series_index)
        ht_result = calculate_complex_cross_coherence(time_series, view_model.epoch_length,
                                                      view_model.segment_length,
                                                      view_model.segment_shift,
                                                      view_model.window_function,
                                                      view_model.average_segments,
                                                      view_model.subtract_epoch_average,
                                                      view_model.zeropad, view_model.detrend_ts,
                                                      view_model.max_freq, view_model.npat)
        self.log.debug("got ComplexCoherenceSpectrum result")
        self.log.debug("ComplexCoherenceSpectrum segment_length is %s" % (str(ht_result.segment_length)))
        self.log.debug("ComplexCoherenceSpectrum epoch_length is %s" % (str(ht_result.epoch_length)))
        self.log.debug("ComplexCoherenceSpectrum windowing_function is %s" % (str(ht_result.windowing_function)))

        complex_coherence_index = h5.store_complete(ht_result, self.storage_path)

        result_path = h5.path_for(self.storage_path, ComplexCoherenceSpectrumH5, complex_coherence_index.gid)
        ica_h5 = ComplexCoherenceSpectrumH5(path=result_path)

        self.fill_index_from_h5(complex_coherence_index, ica_h5)
        ica_h5.close()

        return complex_coherence_index
Ejemplo n.º 14
0
    def launch(self, view_model):
        # type: (PearsonCorrelationCoefficientAdapterModel) -> [CorrelationCoefficientsIndex]
        """
        Launch algorithm and build results.
        Compute the node-pairwise pearson correlation coefficient of the given input 4D TimeSeries  datatype.

        The result will contain values between -1 and 1, inclusive.

        :param time_series: the input time-series for which correlation coefficient should be computed
        :param t_start: the physical time interval start for the analysis
        :param t_end: physical time, interval end
        :returns: the correlation coefficient for the given time series
        :rtype: `CorrelationCoefficients`
        """
        with h5.h5_file_for_index(self.input_time_series_index) as ts_h5:
            ts_labels_ordering = ts_h5.labels_ordering.load()
            result = self._compute_correlation_coefficients(ts_h5, view_model.t_start, view_model.t_end)

        if isinstance(self.input_time_series_index, TimeSeriesEEGIndex) \
                or isinstance(self.input_time_series_index, TimeSeriesMEGIndex) \
                or isinstance(self.input_time_series_index, TimeSeriesSEEGIndex):
            labels_ordering = ["Sensor", "Sensor", "1", "1"]
        else:
            labels_ordering = list(CorrelationCoefficients.labels_ordering.default)
            labels_ordering[0] = ts_labels_ordering[2]
            labels_ordering[1] = ts_labels_ordering[2]

        corr_coef = CorrelationCoefficients()
        corr_coef.array_data = result
        corr_coef.source = TimeSeries(gid=view_model.time_series)
        corr_coef.labels_ordering = labels_ordering

        return h5.store_complete(corr_coef, self.storage_path)
Ejemplo n.º 15
0
    def test_prepare_inputs_with_eeg_monitor(self, operation_factory,
                                             simulator_factory,
                                             surface_index_factory,
                                             sensors_index_factory,
                                             region_mapping_index_factory,
                                             connectivity_index_factory):
        surface_idx, surface = surface_index_factory(cortical=True)
        sensors_idx, sensors = sensors_index_factory()
        proj = ProjectionSurfaceEEG(sensors=sensors,
                                    sources=surface,
                                    projection_data=numpy.ones(3))

        op = operation_factory()
        storage_path = FilesHelper().get_project_folder(op.project, str(op.id))
        prj_db_db = h5.store_complete(proj, storage_path)
        prj_db_db.fk_from_operation = op.id
        dao.store_entity(prj_db_db)

        connectivity = connectivity_index_factory(76, op)
        rm_index = region_mapping_index_factory(conn_gid=connectivity.gid,
                                                surface_gid=surface_idx.gid)

        eeg_monitor = EEGViewModel(projection=proj.gid, sensors=sensors.gid)
        eeg_monitor.region_mapping = rm_index.gid

        sim_folder, sim_gid = simulator_factory(op=op,
                                                monitor=eeg_monitor,
                                                conn_gid=connectivity.gid)
        hpc_client = HPCSchedulerClient()
        input_files = hpc_client._prepare_input(op, sim_gid)
        assert len(input_files) == 11
Ejemplo n.º 16
0
 def launch(self, view_model):
     result = DummyDataType()
     if view_model.param_5 is not None:
         result.row1 = str(view_model.param_5)
     if view_model.param_6 is not None:
         result.row2 = str(view_model.param_6)
     return h5.store_complete(result, self.storage_path)
Ejemplo n.º 17
0
    def launch(self, view_model):
        # type: (ProjectionMatrixImporterModel) -> [ProjectionMatrixIndex]
        """
        Creates ProjectionMatrix entity from uploaded data.

        :raises LaunchException: when
                    * no projection_file or sensors are specified
                    * the dataset is invalid
                    * number of sensors is different from the one in dataset
        """
        if view_model.projection_file is None:
            raise LaunchException(
                "Please select MATLAB file which contains data to import")

        if view_model.sensors is None:
            raise LaunchException(
                "No sensors selected. Please initiate upload again and select one."
            )

        if view_model.surface is None:
            raise LaunchException(
                "No source selected. Please initiate upload again and select a source."
            )

        surface_index = self.load_entity_by_gid(view_model.surface)
        expected_surface_shape = surface_index.number_of_vertices

        sensors_index = self.load_entity_by_gid(view_model.sensors)
        expected_sensors_shape = sensors_index.number_of_sensors

        self.logger.debug("Reading projection matrix from uploaded file...")
        if view_model.projection_file.endswith(".mat"):
            projection_data = self.read_matlab_data(view_model.projection_file,
                                                    view_model.dataset_name)
        else:
            projection_data = self.read_list_data(view_model.projection_file)

        if projection_data is None or len(projection_data) == 0:
            raise LaunchException("Invalid (empty) dataset...")

        if projection_data.shape[0] != expected_sensors_shape:
            raise LaunchException(
                "Invalid Projection Matrix shape[0]: %d Expected: %d" %
                (projection_data.shape[0], expected_sensors_shape))

        if projection_data.shape[1] != expected_surface_shape:
            raise LaunchException(
                "Invalid Projection Matrix shape[1]: %d Expected: %d" %
                (projection_data.shape[1], expected_surface_shape))

        projection_matrix_type = determine_projection_type(sensors_index)
        surface_ht = h5.load_from_index(surface_index)
        sensors_ht = h5.load_from_index(sensors_index)
        projection_matrix = ProjectionMatrix(
            sources=surface_ht,
            sensors=sensors_ht,
            projection_type=projection_matrix_type,
            projection_data=projection_data)
        return h5.store_complete(projection_matrix, self.storage_path)
Ejemplo n.º 18
0
    def _create_volume(self):
        volume = Volume()
        volume.origin = numpy.array([[0.0, 0.0, 0.0]])
        volume.voxel_size = numpy.array([self.parser.zooms[0], self.parser.zooms[1], self.parser.zooms[2]])
        if self.parser.units is not None and len(self.parser.units) > 0:
            volume.voxel_unit = self.parser.units[0]

        return h5.store_complete(volume, self.storage_path), volume
Ejemplo n.º 19
0
    def build(data=4, op=None, cortical=False):
        surface = surface_factory(data, cortical=cortical)
        if op is None:
            op = operation_factory()

        surface_db = h5.store_complete(surface, op.id, op.project.name)
        surface_db.fk_from_operation = op.id
        return dao.store_entity(surface_db), surface
Ejemplo n.º 20
0
 def build_connectivity_measure(self, result, key, connectivity, title="", label_x="", label_y=""):
     measure = ConnectivityMeasure()
     measure.array_data = result[key]
     measure.connectivity = connectivity
     measure.title = title
     measure.label_x = label_x
     measure.label_y = label_y
     return h5.store_complete(measure, self.storage_path)
Ejemplo n.º 21
0
    def build(type="EEG", nr_sensors=3, op=None):
        sensors = sensors_factory(type, nr_sensors)
        if op is None:
            op = operation_factory()

        sensors_db = h5.store_complete(sensors, op.id, op.project.name)
        sensors_db.fk_from_operation = op.id
        return dao.store_entity(sensors_db), sensors
Ejemplo n.º 22
0
 def launch(self, data_file, **kwargs):
     try:
         parser = NetworkxParser(**kwargs)
         net = networkx.read_gpickle(data_file)
         connectivity = parser.parse(net)
         return h5.store_complete(connectivity, self.storage_path)
     except ParseException as excep:
         self.log.exception("Could not process Connectivity")
         raise LaunchException(excep)
Ejemplo n.º 23
0
    def build(data=4, op=None, conn=None):
        if conn is None:
            conn = connectivity_factory(data)
        if op is None:
            op = operation_factory()

        conn_db = h5.store_complete(conn, op.id, op.project.name)
        conn_db.fk_from_operation = op.id
        return dao.store_entity(conn_db)
Ejemplo n.º 24
0
    def _create_mri(self, volume, title):
        mri = StructuralMRI()
        mri.title = title
        mri.dimensions_labels = ["X", "Y", "Z"]
        mri.weighting = "T1"
        mri.array_data = self.parser.parse()
        mri.volume = volume

        return h5.store_complete(mri, self.storage_path)
Ejemplo n.º 25
0
    def build(data=4, op=None):
        conn = connectivity_factory(data)
        if op is None:
            op = operation_factory()

        storage_path = FilesHelper().get_project_folder(op.project, str(op.id))
        conn_db = h5.store_complete(conn, storage_path)
        conn_db.fk_from_operation = op.id
        return dao.store_entity(conn_db)
Ejemplo n.º 26
0
    def build(data=4, op=None, cortical=False):
        surface = surface_factory(data, cortical=cortical)
        if op is None:
            op = operation_factory()

        storage_path = FilesHelper().get_project_folder(op.project, str(op.id))
        surface_db = h5.store_complete(surface, storage_path)
        surface_db.fk_from_operation = op.id
        return dao.store_entity(surface_db), surface
Ejemplo n.º 27
0
    def build(type="EEG", nr_sensors=3, op=None):
        sensors = sensors_factory(type, nr_sensors)
        if op is None:
            op = operation_factory()

        storage_path = FilesHelper().get_project_folder(op.project, str(op.id))
        sensors_db = h5.store_complete(sensors, storage_path)
        sensors_db.fk_from_operation = op.id
        return dao.store_entity(sensors_db), sensors
Ejemplo n.º 28
0
    def launch(self, view_model):
        # type: (FCDAdapterModel) -> [FcdIndex, ConnectivityMeasureIndex]
        """
        Launch algorithm and build results.
        :param view_model: the ViewModel keeping the algorithm inputs
        :return: the fcd index for the computed fcd matrix on the given time-series, with that sw and that sp
        """
        with h5.h5_file_for_index(self.input_time_series_index) as ts_h5:
            [fcd, fcd_segmented, eigvect_dict,
             eigval_dict] = self._compute_fcd_matrix(ts_h5)
            connectivity_gid = ts_h5.connectivity.load()
            connectivity = self.load_traited_by_gid(connectivity_gid)

        result = [
        ]  # list to store: fcd index, fcd_segmented index (eventually), and connectivity measure indexes

        # Create an index for the computed fcd.
        fcd_index = FcdIndex()
        fcd_h5_path = h5.path_for(self.storage_path, FcdH5, fcd_index.gid)
        with FcdH5(fcd_h5_path) as fcd_h5:
            self._populate_fcd_h5(fcd_h5, fcd, fcd_index.gid,
                                  self.input_time_series_index.gid,
                                  view_model.sw, view_model.sp)
            self._populate_fcd_index(fcd_index,
                                     self.input_time_series_index.gid, fcd_h5)
        result.append(fcd_index)

        if np.amax(fcd_segmented) == 1.1:
            result_fcd_segmented_index = FcdIndex()
            result_fcd_segmented_h5_path = h5.path_for(
                self.storage_path, FcdH5, result_fcd_segmented_index.gid)
            with FcdH5(
                    result_fcd_segmented_h5_path) as result_fcd_segmented_h5:
                self._populate_fcd_h5(result_fcd_segmented_h5, fcd_segmented,
                                      result_fcd_segmented_index.gid,
                                      self.input_time_series_index.gid,
                                      view_model.sw, view_model.sp)
                self._populate_fcd_index(result_fcd_segmented_index,
                                         self.input_time_series_index.gid,
                                         result_fcd_segmented_h5)
            result.append(result_fcd_segmented_index)

        for mode in eigvect_dict.keys():
            for var in eigvect_dict[mode].keys():
                for ep in eigvect_dict[mode][var].keys():
                    for eig in range(3):
                        cm_data = eigvect_dict[mode][var][ep][eig]
                        measure = ConnectivityMeasure()
                        measure.connectivity = connectivity
                        measure.array_data = cm_data
                        measure.title = "Epoch # %d, eigenvalue = %s, variable = %s, " \
                                        "mode = %s." % (ep, eigval_dict[mode][var][ep][eig], var, mode)
                        cm_index = h5.store_complete(measure,
                                                     self.storage_path)
                        result.append(cm_index)
        return result
Ejemplo n.º 29
0
    def launch(self, sensors_file, sensors_type):
        """
        Creates required sensors from the uploaded file.

        :param sensors_file: the file containing sensor data
        :param sensors_type: a string from "EEG Sensors", "MEG sensors", "Internal Sensors"

        :returns: a list of sensors instances of the specified type

        :raises LaunchException: when
                    * no sensors_file specified
                    * sensors_type is invalid (not one of the mentioned options)
                    * sensors_type is "MEG sensors" and no orientation is specified
        """
        if sensors_file is None:
            raise LaunchException(
                "Please select sensors file which contains data to import")

        self.logger.debug("Create sensors instance")
        if sensors_type == SensorsEEG.sensors_type.default:
            sensors_inst = SensorsEEG()
        elif sensors_type == SensorsMEG.sensors_type.default:
            sensors_inst = SensorsMEG()
        elif sensors_type == SensorsInternal.sensors_type.default:
            sensors_inst = SensorsInternal()
        else:
            exception_str = "Could not determine sensors type (selected option %s)" % sensors_type
            raise LaunchException(exception_str)

        locations = self.read_list_data(sensors_file, usecols=[1, 2, 3])

        # NOTE: TVB has the nose pointing -y and left ear pointing +x
        # If the sensors are in CTF coordinates : nose pointing +x left ear +y
        # to rotate the sensors by -90 along z uncomment below
        # locations = numpy.array([[0, 1, 0], [-1, 0, 0], [0, 0, 1]]).dot(locations.T).T
        sensors_inst.locations = locations
        sensors_inst.labels = self.read_list_data(sensors_file,
                                                  dtype=MEMORY_STRING,
                                                  usecols=[0])
        sensors_inst.number_of_sensors = sensors_inst.labels.size

        if isinstance(sensors_inst, SensorsMEG):
            try:
                sensors_inst.orientations = self.read_list_data(
                    sensors_file, usecols=[4, 5, 6])
                sensors_inst.has_orientation = True
            except IndexError:
                raise LaunchException(
                    "Uploaded file does not contains sensors orientation.")

        sensors_inst.configure()
        self.logger.debug("Sensors instance ready to be stored")

        sensors_idx = h5.store_complete(sensors_inst, self.storage_path)
        self.generic_attributes.user_tag_1 = sensors_inst.sensors_type
        return sensors_idx
Ejemplo n.º 30
0
 def launch(self, view_model):
     # type: (NetworkxImporterModel) -> [ConnectivityIndex]
     try:
         parser = NetworkxParser(view_model)
         net = pandas.read_pickle(view_model.data_file)
         connectivity = parser.parse(net)
         return h5.store_complete(connectivity, self.storage_path)
     except ParseException as excep:
         self.log.exception("Could not process Connectivity")
         raise LaunchException(excep)