def test_store_load_region_mapping(session, connectivity_factory, surface_factory, region_mapping_factory, sensors_factory): connectivity = connectivity_factory(2) conn_idx = ConnectivityIndex() conn_idx.fill_from_has_traits(connectivity) session.add(conn_idx) surface = surface_factory(5) surf_idx = SurfaceIndex() surf_idx.fill_from_has_traits(surface) session.add(surf_idx) region_mapping = region_mapping_factory(surface, connectivity) rm_idx = RegionMappingIndex() rm_idx.fill_from_has_traits(region_mapping) rm_idx.connectivity = conn_idx rm_idx.surface = surf_idx session.add(rm_idx) sensors = sensors_factory("SEEG", 3) sensors_seeg_idx = SensorsIndex() sensors_seeg_idx.fill_from_has_traits(sensors) session.add(sensors_seeg_idx) sensors_eeg = sensors_factory("EEG", 3) sensors_eeg_idx = SensorsIndex() sensors_eeg_idx.fill_from_has_traits(sensors_eeg) session.add(sensors_eeg_idx) time_series = TimeSeries(data=numpy.arange(5)) fcd = Fcd( array_data=numpy.arange(5), source=time_series, ) ts_index = TimeSeriesIndex() ts_index.fill_from_has_traits(time_series) session.add(ts_index) fcd_index = FcdIndex() fcd_index.fill_from_has_traits(fcd) fcd_index.source = ts_index session.add(fcd_index) session.commit() res = session.query(ConnectivityIndex) assert res.count() == 1 assert res[0].number_of_regions == 2 assert res[0].number_of_connections == 4 assert res[0].undirected is True assert res[0].weights_min == 0 res = session.query(SurfaceIndex) assert res.count() == 1 res = session.query(RegionMappingIndex) assert res.count() == 1
def __import_time_series_csv_datatype(self, hrf_folder, connectivity_gid, patient, user_tag): path = os.path.join(hrf_folder, self.TIME_SERIES_CSV_FILE) with open(path) as csv_file: csv_reader = csv.reader( csv_file, delimiter=CSVDelimiterOptionsEnum.COMMA.value) ts = list(csv_reader) ts_data = np.array(ts, dtype=np.float64).reshape( (len(ts), 1, len(ts[0]), 1)) ts_time = np.random.rand(ts_data.shape[0], ) project = dao.get_project_by_id(self.current_project_id) ts_gid = uuid.uuid4() h5_path = "TimeSeries_{}.h5".format(ts_gid.hex) operation_folder = self.storage_interface.get_project_folder( project.name, str(self.operation_id)) h5_path = os.path.join(operation_folder, h5_path) conn = h5.load_from_gid(connectivity_gid) ts = TimeSeriesRegion() ts.data = ts_data ts.time = ts_time ts.gid = ts_gid ts.connectivity = conn generic_attributes = GenericAttributes() generic_attributes.user_tag_1 = user_tag generic_attributes.state = DEFAULTDATASTATE_RAW_DATA with TimeSeriesRegionH5(h5_path) as ts_h5: ts_h5.store(ts) ts_h5.nr_dimensions.store(4) ts_h5.subject.store(patient) ts_h5.store_generic_attributes(generic_attributes) ts_index = TimeSeriesIndex() ts_index.gid = ts_gid.hex ts_index.fk_from_operation = self.operation_id ts_index.time_series_type = "TimeSeriesRegion" ts_index.data_length_1d = ts_data.shape[0] ts_index.data_length_2d = ts_data.shape[1] ts_index.data_length_3d = ts_data.shape[2] ts_index.data_length_4d = ts_data.shape[3] ts_index.data_ndim = len(ts_data.shape) ts_index.sample_period_unit = 'ms' ts_index.sample_period = TimeSeries.sample_period.default ts_index.sample_rate = 1024.0 ts_index.subject = patient ts_index.state = DEFAULTDATASTATE_RAW_DATA ts_index.labels_ordering = json.dumps( list(TimeSeries.labels_ordering.default)) ts_index.labels_dimensions = json.dumps( TimeSeries.labels_dimensions.default) ts_index.visible = False # we don't want to show these TimeSeries because they are dummy dao.store_entity(ts_index) return ts_gid
def make_ts_from_op(session, operation_factory): # make file stored and indexed time series two_node_simple_sin_ts = make_ts() op = operation_factory() ts_db = TimeSeriesIndex() ts_db.fk_from_operation = op.id ts_db.fill_from_has_traits(two_node_simple_sin_ts) ts_h5_path = h5.path_for_stored_index(ts_db) with TimeSeriesH5(ts_h5_path) as f: f.store(two_node_simple_sin_ts) f.sample_rate.store(two_node_simple_sin_ts.sample_rate) session.add(ts_db) session.commit() return ts_db
def build(data=None, op=None): ts = time_series_factory(data) if op is None: op = operation_factory() ts_db = TimeSeriesIndex() ts_db.fk_from_operation = op.id ts_db.fill_from_has_traits(ts) ts_h5_path = h5.path_for_stored_index(ts_db) with TimeSeriesH5(ts_h5_path) as f: f.store(ts) f.sample_rate.store(ts.sample_rate) f.nr_dimensions.store(ts.data.ndim) ts_db = dao.store_entity(ts_db) return ts_db
def launch(self, view_model): # type: (FooDataImporterModel) -> TimeSeriesIndex array_data = numpy.loadtxt(view_model.array_data) ts = TimeSeries(data=array_data) ts.configure() ts_index = TimeSeriesIndex() ts_index.fill_from_has_traits(ts) ts_h5_path = h5.path_for(self.storage_path, TimeSeriesH5, ts_index.gid) with TimeSeriesH5(ts_h5_path) as ts_h5: ts_h5.store(ts, scalars_only=True) ts_h5.store_generic_attributes(GenericAttributes()) ts_h5.write_data_slice(array_data) return ts_index
def build(): time = numpy.linspace(0, 1000, 4000) data = numpy.zeros((time.size, 1, 3, 1)) data[:, 0, 0, 0] = numpy.sin(2 * numpy.pi * time / 1000.0 * 40) data[:, 0, 1, 0] = numpy.sin(2 * numpy.pi * time / 1000.0 * 200) data[:, 0, 2, 0] = numpy.sin(2 * numpy.pi * time / 1000.0 * 100) + numpy.sin(2 * numpy.pi * time / 1000.0 * 300) ts = TimeSeries(time=time, data=data, sample_period=1.0 / 4000) op = operation_factory() ts_db = TimeSeriesIndex() ts_db.fk_from_operation = op.id ts_db.fill_from_has_traits(ts) ts_h5_path = h5.path_for_stored_index(ts_db) with TimeSeriesH5(ts_h5_path) as f: f.store(ts) session.add(ts_db) session.commit() return ts_db