def build(connectivity, region_mapping, test_user=None, test_project=None): time = numpy.linspace(0, 1000, 4000) data = numpy.zeros((time.size, 1, 3, 1)) data[:, 0, 0, 0] = numpy.sin(2 * numpy.pi * time / 1000.0 * 40) data[:, 0, 1, 0] = numpy.sin(2 * numpy.pi * time / 1000.0 * 200) data[:, 0, 2, 0] = numpy.sin(2 * numpy.pi * time / 1000.0 * 100) + \ numpy.sin(2 * numpy.pi * time / 1000.0 * 300) ts = TimeSeriesRegion(time=time, data=data, sample_period=1.0 / 4000, connectivity=connectivity, region_mapping=region_mapping) op = operation_factory(test_user=test_user, test_project=test_project) ts_db = TimeSeriesRegionIndex() ts_db.fk_from_operation = op.id ts_db.fill_from_has_traits(ts) ts_h5_path = h5.path_for_stored_index(ts_db) with TimeSeriesRegionH5(ts_h5_path) as f: f.store(ts) f.sample_rate.store(ts.sample_rate) f.nr_dimensions.store(ts.data.ndim) ts_db = dao.store_entity(ts_db) return ts_db
def __import_time_series_csv_datatype(self, hrf_folder, connectivity_gid, patient, user_tag): path = os.path.join(hrf_folder, self.TIME_SERIES_CSV_FILE) with open(path) as csv_file: csv_reader = csv.reader( csv_file, delimiter=CSVDelimiterOptionsEnum.COMMA.value) ts = list(csv_reader) ts_data = np.array(ts, dtype=np.float64).reshape( (len(ts), 1, len(ts[0]), 1)) ts_time = np.random.rand(ts_data.shape[0], ) project = dao.get_project_by_id(self.current_project_id) ts_gid = uuid.uuid4() h5_path = "TimeSeries_{}.h5".format(ts_gid.hex) operation_folder = self.storage_interface.get_project_folder( project.name, str(self.operation_id)) h5_path = os.path.join(operation_folder, h5_path) conn = h5.load_from_gid(connectivity_gid) ts = TimeSeriesRegion() ts.data = ts_data ts.time = ts_time ts.gid = ts_gid ts.connectivity = conn generic_attributes = GenericAttributes() generic_attributes.user_tag_1 = user_tag generic_attributes.state = DEFAULTDATASTATE_RAW_DATA with TimeSeriesRegionH5(h5_path) as ts_h5: ts_h5.store(ts) ts_h5.nr_dimensions.store(4) ts_h5.subject.store(patient) ts_h5.store_generic_attributes(generic_attributes) ts_index = TimeSeriesIndex() ts_index.gid = ts_gid.hex ts_index.fk_from_operation = self.operation_id ts_index.time_series_type = "TimeSeriesRegion" ts_index.data_length_1d = ts_data.shape[0] ts_index.data_length_2d = ts_data.shape[1] ts_index.data_length_3d = ts_data.shape[2] ts_index.data_length_4d = ts_data.shape[3] ts_index.data_ndim = len(ts_data.shape) ts_index.sample_period_unit = 'ms' ts_index.sample_period = TimeSeries.sample_period.default ts_index.sample_rate = 1024.0 ts_index.subject = patient ts_index.state = DEFAULTDATASTATE_RAW_DATA ts_index.labels_ordering = json.dumps( list(TimeSeries.labels_ordering.default)) ts_index.labels_dimensions = json.dumps( TimeSeries.labels_dimensions.default) ts_index.visible = False # we don't want to show these TimeSeries because they are dummy dao.store_entity(ts_index) return ts_gid
def create_region_ts(self, data_shape, connectivity): if connectivity.number_of_regions != data_shape[1]: raise LaunchException("Data has %d channels but the connectivity has %d nodes" % (data_shape[1], connectivity.number_of_regions)) ts_idx = TimeSeriesRegionIndex() ts_idx.fk_connectivity_gid = connectivity.gid ts_idx.has_surface_mapping = True ts_h5_path = h5.path_for(self.storage_path, TimeSeriesRegionH5, ts_idx.gid) ts_h5 = TimeSeriesRegionH5(ts_h5_path) ts_h5.connectivity.store(uuid.UUID(connectivity.gid)) return TimeSeriesRegion(), ts_idx, ts_h5
def launch(self, view_model): # type: (BalloonModelAdapterModel) -> [TimeSeriesRegionIndex] """ Launch algorithm and build results. :param time_series: the input time-series used as neural activation in the Balloon Model :returns: the simulated BOLD signal :rtype: `TimeSeries` """ input_time_series_h5 = h5.h5_file_for_index( self.input_time_series_index) time_line = input_time_series_h5.read_time_page(0, self.input_shape[0]) bold_signal_index = TimeSeriesRegionIndex() bold_signal_h5_path = h5.path_for(self.storage_path, TimeSeriesRegionH5, bold_signal_index.gid) bold_signal_h5 = TimeSeriesRegionH5(bold_signal_h5_path) bold_signal_h5.gid.store(uuid.UUID(bold_signal_index.gid)) self._fill_result_h5(bold_signal_h5, input_time_series_h5) # ---------- Iterate over slices and compose final result ------------## node_slice = [ slice(self.input_shape[0]), slice(self.input_shape[1]), None, slice(self.input_shape[3]) ] small_ts = TimeSeries() small_ts.sample_period = self.input_time_series_index.sample_period small_ts.sample_period_unit = self.input_time_series_index.sample_period_unit small_ts.time = time_line for node in range(self.input_shape[2]): node_slice[2] = slice(node, node + 1) small_ts.data = input_time_series_h5.read_data_slice( tuple(node_slice)) self.algorithm.time_series = small_ts partial_bold = self.algorithm.evaluate() bold_signal_h5.write_data_slice_on_grow_dimension( partial_bold.data, grow_dimension=2) bold_signal_h5.write_time_slice(time_line) bold_signal_shape = bold_signal_h5.data.shape bold_signal_h5.nr_dimensions.store(len(bold_signal_shape)) bold_signal_h5.close() input_time_series_h5.close() self._fill_result_index(bold_signal_index, bold_signal_shape) return bold_signal_index
def build(connectivity, region_mapping, ts=None, test_user=None, test_project=None, op=None): if ts is None: ts = time_series_region_factory(connectivity, region_mapping) if not op: op = operation_factory(test_user=test_user, test_project=test_project) ts_db = TimeSeriesRegionIndex() ts_db.fk_from_operation = op.id ts_db.fill_from_has_traits(ts) ts_h5_path = h5.path_for_stored_index(ts_db) with TimeSeriesRegionH5(ts_h5_path) as f: f.store(ts) f.sample_rate.store(ts.sample_rate) f.nr_dimensions.store(ts.data.ndim) ts_db = dao.store_entity(ts_db) return ts_db
def create_region_ts(self, data_shape, connectivity): if connectivity.number_of_regions != data_shape[1]: raise LaunchException( "Data has %d channels but the connectivity has %d nodes" % (data_shape[1], connectivity.number_of_regions)) ts_idx = TimeSeriesRegionIndex() ts_idx.fk_connectivity_gid = connectivity.gid region_map_indexes = dao.get_generic_entity(RegionMappingIndex, connectivity.gid, 'fk_connectivity_gid') ts_idx.has_surface_mapping = False if len(region_map_indexes) > 0: ts_idx.fk_region_mapping_gid = region_map_indexes[0].gid ts_idx.has_surface_mapping = True ts_h5_path = self.path_for(TimeSeriesRegionH5, ts_idx.gid) ts_h5 = TimeSeriesRegionH5(ts_h5_path) ts_h5.connectivity.store(uuid.UUID(connectivity.gid)) return TimeSeriesRegion(), ts_idx, ts_h5