Example #1
0
    def build(connectivity, region_mapping, test_user=None, test_project=None):
        time = numpy.linspace(0, 1000, 4000)
        data = numpy.zeros((time.size, 1, 3, 1))
        data[:, 0, 0, 0] = numpy.sin(2 * numpy.pi * time / 1000.0 * 40)
        data[:, 0, 1, 0] = numpy.sin(2 * numpy.pi * time / 1000.0 * 200)
        data[:, 0, 2, 0] = numpy.sin(2 * numpy.pi * time / 1000.0 * 100) + \
                           numpy.sin(2 * numpy.pi * time / 1000.0 * 300)

        ts = TimeSeriesRegion(time=time,
                              data=data,
                              sample_period=1.0 / 4000,
                              connectivity=connectivity,
                              region_mapping=region_mapping)

        op = operation_factory(test_user=test_user, test_project=test_project)

        ts_db = TimeSeriesRegionIndex()
        ts_db.fk_from_operation = op.id
        ts_db.fill_from_has_traits(ts)

        ts_h5_path = h5.path_for_stored_index(ts_db)
        with TimeSeriesRegionH5(ts_h5_path) as f:
            f.store(ts)
            f.sample_rate.store(ts.sample_rate)
            f.nr_dimensions.store(ts.data.ndim)

        ts_db = dao.store_entity(ts_db)
        return ts_db
Example #2
0
    def create_region_ts(self, data_shape, connectivity):
        if connectivity.number_of_regions != data_shape[1]:
            raise LaunchException("Data has %d channels but the connectivity has %d nodes"
                                  % (data_shape[1], connectivity.number_of_regions))
        ts_idx = TimeSeriesRegionIndex()
        ts_idx.fk_connectivity_gid = connectivity.gid
        ts_idx.has_surface_mapping = True

        ts_h5_path = h5.path_for(self.storage_path, TimeSeriesRegionH5, ts_idx.gid)
        ts_h5 = TimeSeriesRegionH5(ts_h5_path)
        ts_h5.connectivity.store(uuid.UUID(connectivity.gid))

        return TimeSeriesRegion(), ts_idx, ts_h5
Example #3
0
    def build(connectivity, region_mapping, ts=None, test_user=None, test_project=None, op=None):
        if ts is None:
            ts = time_series_region_factory(connectivity, region_mapping)

        if not op:
            op = operation_factory(test_user=test_user, test_project=test_project)

        ts_db = TimeSeriesRegionIndex()
        ts_db.fk_from_operation = op.id
        ts_db.fill_from_has_traits(ts)

        ts_h5_path = h5.path_for_stored_index(ts_db)
        with TimeSeriesRegionH5(ts_h5_path) as f:
            f.store(ts)
            f.sample_rate.store(ts.sample_rate)
            f.nr_dimensions.store(ts.data.ndim)

        ts_db = dao.store_entity(ts_db)
        return ts_db
    def launch(self, view_model):
        # type: (BalloonModelAdapterModel) -> [TimeSeriesRegionIndex]
        """
        Launch algorithm and build results.

        :param time_series: the input time-series used as neural activation in the Balloon Model
        :returns: the simulated BOLD signal
        :rtype: `TimeSeries`
        """
        input_time_series_h5 = h5.h5_file_for_index(
            self.input_time_series_index)
        time_line = input_time_series_h5.read_time_page(0, self.input_shape[0])

        bold_signal_index = TimeSeriesRegionIndex()
        bold_signal_h5_path = h5.path_for(self.storage_path,
                                          TimeSeriesRegionH5,
                                          bold_signal_index.gid)
        bold_signal_h5 = TimeSeriesRegionH5(bold_signal_h5_path)
        bold_signal_h5.gid.store(uuid.UUID(bold_signal_index.gid))
        self._fill_result_h5(bold_signal_h5, input_time_series_h5)

        # ---------- Iterate over slices and compose final result ------------##

        node_slice = [
            slice(self.input_shape[0]),
            slice(self.input_shape[1]), None,
            slice(self.input_shape[3])
        ]
        small_ts = TimeSeries()
        small_ts.sample_period = self.input_time_series_index.sample_period
        small_ts.sample_period_unit = self.input_time_series_index.sample_period_unit
        small_ts.time = time_line

        for node in range(self.input_shape[2]):
            node_slice[2] = slice(node, node + 1)
            small_ts.data = input_time_series_h5.read_data_slice(
                tuple(node_slice))
            self.algorithm.time_series = small_ts
            partial_bold = self.algorithm.evaluate()
            bold_signal_h5.write_data_slice_on_grow_dimension(
                partial_bold.data, grow_dimension=2)

        bold_signal_h5.write_time_slice(time_line)
        bold_signal_shape = bold_signal_h5.data.shape
        bold_signal_h5.nr_dimensions.store(len(bold_signal_shape))
        bold_signal_h5.close()
        input_time_series_h5.close()

        self._fill_result_index(bold_signal_index, bold_signal_shape)
        return bold_signal_index
Example #5
0
    def create_region_ts(self, data_shape, connectivity):
        if connectivity.number_of_regions != data_shape[1]:
            raise LaunchException(
                "Data has %d channels but the connectivity has %d nodes" %
                (data_shape[1], connectivity.number_of_regions))
        ts_idx = TimeSeriesRegionIndex()
        ts_idx.fk_connectivity_gid = connectivity.gid

        region_map_indexes = dao.get_generic_entity(RegionMappingIndex,
                                                    connectivity.gid,
                                                    'fk_connectivity_gid')
        ts_idx.has_surface_mapping = False
        if len(region_map_indexes) > 0:
            ts_idx.fk_region_mapping_gid = region_map_indexes[0].gid
            ts_idx.has_surface_mapping = True

        ts_h5_path = self.path_for(TimeSeriesRegionH5, ts_idx.gid)
        ts_h5 = TimeSeriesRegionH5(ts_h5_path)
        ts_h5.connectivity.store(uuid.UUID(connectivity.gid))

        return TimeSeriesRegion(), ts_idx, ts_h5