コード例 #1
0
    def _fill_result_index(self, result_index, result_signal_shape):
        result_index.time_series_type = TimeSeriesRegion.__name__
        result_index.data_ndim = len(result_signal_shape)
        result_index.data_length_1d, result_index.data_length_2d, \
        result_index.data_length_3d, result_index.data_length_4d = prepare_array_shape_meta(result_signal_shape)

        result_index.fk_connectivity_gid = self.input_time_series_index.fk_connectivity_gid
        result_index.fk_region_mapping_gid = self.input_time_series_index.fk_region_mapping_gid
        result_index.fk_region_mapping_volume_gid = self.input_time_series_index.fk_region_mapping_volume_gid

        result_index.sample_period = self.input_time_series_index.sample_period
        result_index.sample_period_unit = self.input_time_series_index.sample_period_unit
        result_index.sample_rate = self.input_time_series_index.sample_rate
        result_index.labels_ordering = self.input_time_series_index.labels_ordering
        result_index.labels_dimensions = self.input_time_series_index.labels_dimensions
        result_index.has_volume_mapping = self.input_time_series_index.has_volume_mapping
        result_index.title = self.input_time_series_index.title
コード例 #2
0
    def launch(self, view_model):
        # type: (GIFTITimeSeriesImporterModel) -> [TimeSeriesSurfaceIndex]
        """
        Execute import operations:
        """
        if view_model.surface is None:
            raise LaunchException(
                "No surface selected. Please initiate upload again and select a brain surface."
            )

        parser = GIFTIParser(self.storage_path, self.operation_id)
        try:
            partial_time_series, gifti_data_arrays = parser.parse(
                view_model.data_file)

            ts_idx = TimeSeriesSurfaceIndex()
            ts_h5_path = h5.path_for(self.storage_path, TimeSeriesSurfaceH5,
                                     ts_idx.gid)

            ts_h5 = TimeSeriesSurfaceH5(ts_h5_path)
            # todo : make sure that write_time_slice is not required here
            for data_array in gifti_data_arrays:
                ts_h5.write_data_slice([data_array.data])

            ts_h5.store(partial_time_series,
                        scalars_only=True,
                        store_references=False)
            ts_h5.gid.store(uuid.UUID(ts_idx.gid))

            ts_data_shape = ts_h5.read_data_shape()
            surface = self.load_entity_by_gid(view_model.surface)
            if surface.number_of_vertices != ts_data_shape[1]:
                msg = "Imported time series doesn't have values for all surface vertices. Surface has %d vertices " \
                      "while time series has %d values." % (surface.number_of_vertices, ts_data_shape[1])
                raise LaunchException(msg)
            else:
                ts_h5.surface.store(uuid.UUID(surface.gid))
                ts_idx.fk_surface_gid = surface.gid
            ts_h5.close()

            ts_idx.sample_period_unit = partial_time_series.sample_period_unit
            ts_idx.sample_period = partial_time_series.sample_period
            ts_idx.sample_rate = partial_time_series.sample_rate
            ts_idx.labels_ordering = json.dumps(
                partial_time_series.labels_ordering)
            ts_idx.labels_dimensions = json.dumps(
                partial_time_series.labels_dimensions)
            ts_idx.data_ndim = len(ts_data_shape)
            ts_idx.data_length_1d, ts_idx.data_length_2d, ts_idx.data_length_3d, ts_idx.data_length_4d = prepare_array_shape_meta(
                ts_data_shape)

            return [ts_idx]

        except ParseException as excep:
            logger = get_logger(__name__)
            logger.exception(excep)
            raise LaunchException(excep)
コード例 #3
0
    def _create_time_series(self, volume, title):
        # Now create TimeSeries and fill it with data from NIFTI image
        time_series = TimeSeriesVolume()
        time_series.title = title
        time_series.labels_ordering = ["Time", "X", "Y", "Z"]
        time_series.start_time = 0.0
        time_series.volume = volume

        if len(self.parser.zooms) > 3:
            time_series.sample_period = float(self.parser.zooms[3])
        else:
            # If no time dim, set sampling to 1 sec
            time_series.sample_period = 1

        if self.parser.units is not None and len(self.parser.units) > 1:
            time_series.sample_period_unit = self.parser.units[1]

        ts_h5_path = h5.path_for(self.storage_path, TimeSeriesVolumeH5,
                                 time_series.gid)
        nifti_data = self.parser.parse()
        with TimeSeriesVolumeH5(ts_h5_path) as ts_h5:
            ts_h5.store(time_series, scalars_only=True, store_references=True)
            for i in range(self.parser.time_dim_size):
                ts_h5.write_data_slice([nifti_data[:, :, :, i, ...]])
            data_shape = ts_h5.read_data_shape()

        ts_idx = TimeSeriesVolumeIndex()
        ts_idx.fill_from_has_traits(time_series)
        ts_idx.data_ndim = len(data_shape)
        ts_idx.data_length_1d, ts_idx.data_length_2d, ts_idx.data_length_3d, ts_idx.data_length_4d = prepare_array_shape_meta(
            data_shape)
        return ts_idx
コード例 #4
0
    def launch(self,
               data_file,
               dataset_name,
               structure_path='',
               transpose=False,
               slice=None,
               sampling_rate=1000,
               start_time=0,
               tstype_parameters=None):
        try:
            data = read_nested_mat_file(data_file, dataset_name,
                                        structure_path)

            if transpose:
                data = data.T
            if slice:
                data = data[parse_slice(slice)]

            ts, ts_idx, ts_h5 = self.ts_builder[self.tstype](self, data.shape,
                                                             tstype_parameters)

            ts.start_time = start_time
            ts.sample_period_unit = 's'

            ts_h5.write_time_slice(numpy.r_[:data.shape[0]] * ts.sample_period)
            # we expect empirical data shape to be time, channel.
            # But tvb expects time, state, channel, mode. Introduce those dimensions
            ts_h5.write_data_slice(data[:, numpy.newaxis, :, numpy.newaxis])

            data_shape = ts_h5.read_data_shape()
            ts_h5.nr_dimensions.store(len(data_shape))
            ts_h5.gid.store(uuid.UUID(ts_idx.gid))
            ts_h5.sample_period.store(ts.sample_period)
            ts_h5.sample_period_unit.store(ts.sample_period_unit)
            ts_h5.sample_rate.store(ts.sample_rate)
            ts_h5.start_time.store(ts.start_time)
            ts_h5.labels_ordering.store(ts.labels_ordering)
            ts_h5.labels_dimensions.store(ts.labels_dimensions)
            ts_h5.title.store(ts.title)
            ts_h5.close()

            ts_idx.title = ts.title
            ts_idx.time_series_type = type(ts).__name__
            ts_idx.sample_period_unit = ts.sample_period_unit
            ts_idx.sample_period = ts.sample_period
            ts_idx.sample_rate = ts.sample_rate
            ts_idx.labels_dimensions = json.dumps(ts.labels_dimensions)
            ts_idx.labels_ordering = json.dumps(ts.labels_ordering)
            ts_idx.data_ndim = len(data_shape)
            ts_idx.data_length_1d, ts_idx.data_length_2d, ts_idx.data_length_3d, ts_idx.data_length_4d = prepare_array_shape_meta(
                data_shape)

            return ts_idx
        except ParseException as ex:
            self.log.exception(ex)
            raise LaunchException(ex)
コード例 #5
0
    def launch(self, view_model):
        # type: (RegionMatTimeSeriesImporterModel) -> [TimeSeriesRegionIndex, TimeSeriesEEGIndex]

        try:
            data = read_nested_mat_file(view_model.data_file,
                                        view_model.dataset_name,
                                        view_model.structure_path)

            if view_model.transpose:
                data = data.T
            if view_model.slice:
                data = data[parse_slice(view_model.slice)]

            datatype_index = self.load_entity_by_gid(view_model.datatype)
            ts, ts_idx, ts_h5 = self.ts_builder[self.tstype](self, data.shape,
                                                             datatype_index)

            ts.start_time = view_model.start_time
            ts.sample_period_unit = 's'

            ts_h5.write_time_slice(numpy.r_[:data.shape[0]] * ts.sample_period)
            # we expect empirical data shape to be time, channel.
            # But tvb expects time, state, channel, mode. Introduce those dimensions
            ts_h5.write_data_slice(data[:, numpy.newaxis, :, numpy.newaxis])

            data_shape = ts_h5.read_data_shape()
            ts_h5.nr_dimensions.store(len(data_shape))
            ts_h5.gid.store(uuid.UUID(ts_idx.gid))
            ts_h5.sample_period.store(ts.sample_period)
            ts_h5.sample_period_unit.store(ts.sample_period_unit)
            ts_h5.sample_rate.store(ts.sample_rate)
            ts_h5.start_time.store(ts.start_time)
            ts_h5.labels_ordering.store(ts.labels_ordering)
            ts_h5.labels_dimensions.store(ts.labels_dimensions)
            ts_h5.title.store(ts.title)
            ts_h5.close()

            ts_idx.title = ts.title
            ts_idx.time_series_type = type(ts).__name__
            ts_idx.sample_period_unit = ts.sample_period_unit
            ts_idx.sample_period = ts.sample_period
            ts_idx.sample_rate = ts.sample_rate
            ts_idx.labels_dimensions = json.dumps(ts.labels_dimensions)
            ts_idx.labels_ordering = json.dumps(ts.labels_ordering)
            ts_idx.data_ndim = len(data_shape)
            ts_idx.data_length_1d, ts_idx.data_length_2d, ts_idx.data_length_3d, ts_idx.data_length_4d = prepare_array_shape_meta(
                data_shape)

            return ts_idx
        except ParseException as ex:
            self.log.exception(ex)
            raise LaunchException(ex)