示例#1
0
    def test_prepare_indexes_for_simulation_results(self, time_series_factory,
                                                    operation_factory,
                                                    simulator_factory):
        ts_1 = time_series_factory()
        ts_2 = time_series_factory()
        ts_3 = time_series_factory()

        operation = operation_factory(test_user=self.test_user,
                                      test_project=self.test_project)
        sim_folder, sim_gid = simulator_factory(op=operation)

        path_1 = os.path.join(sim_folder,
                              "Time_Series_{}.h5".format(ts_1.gid.hex))
        path_2 = os.path.join(sim_folder,
                              "Time_Series_{}.h5".format(ts_2.gid.hex))
        path_3 = os.path.join(sim_folder,
                              "Time_Series_{}.h5".format(ts_3.gid.hex))

        with TimeSeriesH5(path_1) as f:
            f.store(ts_1)
            f.sample_rate.store(ts_1.sample_rate)
            f.store_generic_attributes(GenericAttributes())

        with TimeSeriesH5(path_2) as f:
            f.store(ts_2)
            f.sample_rate.store(ts_2.sample_rate)
            f.store_generic_attributes(GenericAttributes())

        with TimeSeriesH5(path_3) as f:
            f.store(ts_3)
            f.sample_rate.store(ts_3.sample_rate)
            f.store_generic_attributes(GenericAttributes())

        burst_configuration = BurstConfiguration(self.test_project.id)
        burst_configuration.fk_simulation = operation.id
        burst_configuration.simulator_gid = operation.view_model_gid
        burst_configuration = dao.store_entity(burst_configuration)

        file_names = [path_1, path_2, path_3]
        ts_datatypes = [ts_1, ts_2, ts_3]
        indexes = self.burst_service.prepare_indexes_for_simulation_results(
            operation, file_names, burst_configuration)

        for i in range(len(indexes)):
            assert indexes[i].gid == ts_datatypes[
                i].gid.hex, "Gid was not set correctly on index."
            assert indexes[i].sample_period == ts_datatypes[i].sample_period
            assert indexes[i].sample_period_unit == ts_datatypes[
                i].sample_period_unit
            assert indexes[i].sample_rate == ts_datatypes[i].sample_rate
示例#2
0
def test_streaming_reads(tmph5factory):
    t = make_harmonic_ts()
    path = tmph5factory()

    with TimeSeriesH5(path) as f:
        time = numpy.linspace(0, 33, ntime)
        data = harmonic_chunk(time)
        f.store(t, scalars_only=True)
        f.write_data_slice(data)

    with TimeSeriesH5(path) as f:
        data = f.read_data_slice((slice(0, 33), slice(None), 0))
        expected = numpy.zeros((33, nsv))
        expected[:, 1] = 1.0  # the cos(0) part
        numpy.testing.assert_array_equal(data, expected)
示例#3
0
    def build():
        ts_index = time_series_factory()

        ts_h5 = h5_file_for_index(ts_index)
        ts = TimeSeries()
        ts_h5.load_into(ts)
        ts_h5.close()

        data_shape = ts.data.shape

        result_shape = (data_shape[2], data_shape[2], data_shape[1],
                        data_shape[3])
        result = numpy.zeros(result_shape)

        for mode in range(data_shape[3]):
            for var in range(data_shape[1]):
                data = ts_h5.data[:, var, :, mode]
                data = data - data.mean(axis=0)[numpy.newaxis, 0]
                result[:, :, var, mode] = numpy.cov(data.T)

        covariance = Covariance(source=ts, array_data=result)

        op = operation_factory()

        covariance_db = CovarianceIndex()
        covariance_db.fk_from_operation = op.id
        covariance_db.fill_from_has_traits(covariance)

        covariance_h5_path = h5.path_for_stored_index(covariance_db)
        with TimeSeriesH5(covariance_h5_path) as f:
            f.store(ts)

        session.add(covariance_db)
        session.commit()
        return covariance_db
示例#4
0
def make_ts_from_op(session, operation_factory):
    # make file stored and indexed time series
    two_node_simple_sin_ts = make_ts()
    op = operation_factory()

    ts_db = TimeSeriesIndex()
    ts_db.fk_from_operation = op.id
    ts_db.fill_from_has_traits(two_node_simple_sin_ts)

    ts_h5_path = h5.path_for_stored_index(ts_db)
    with TimeSeriesH5(ts_h5_path) as f:
        f.store(two_node_simple_sin_ts)
        f.sample_rate.store(two_node_simple_sin_ts.sample_rate)

    session.add(ts_db)
    session.commit()
    return ts_db
示例#5
0
    def build(data=None, op=None):
        ts = time_series_factory(data)

        if op is None:
            op = operation_factory()

        ts_db = TimeSeriesIndex()
        ts_db.fk_from_operation = op.id
        ts_db.fill_from_has_traits(ts)

        ts_h5_path = h5.path_for_stored_index(ts_db)
        with TimeSeriesH5(ts_h5_path) as f:
            f.store(ts)
            f.sample_rate.store(ts.sample_rate)
            f.nr_dimensions.store(ts.data.ndim)

        ts_db = dao.store_entity(ts_db)
        return ts_db
示例#6
0
    def launch(self, view_model):
        # type: (FooDataImporterModel) -> TimeSeriesIndex

        array_data = numpy.loadtxt(view_model.array_data)

        ts = TimeSeries(data=array_data)
        ts.configure()

        ts_index = TimeSeriesIndex()
        ts_index.fill_from_has_traits(ts)

        ts_h5_path = h5.path_for(self.storage_path, TimeSeriesH5, ts_index.gid)

        with TimeSeriesH5(ts_h5_path) as ts_h5:
            ts_h5.store(ts, scalars_only=True)
            ts_h5.store_generic_attributes(GenericAttributes())
            ts_h5.write_data_slice(array_data)
        return ts_index
示例#7
0
    def build():
        time = numpy.linspace(0, 1000, 4000)
        data = numpy.zeros((time.size, 1, 3, 1))
        data[:, 0, 0, 0] = numpy.sin(2 * numpy.pi * time / 1000.0 * 40)
        data[:, 0, 1, 0] = numpy.sin(2 * numpy.pi * time / 1000.0 * 200)
        data[:, 0, 2, 0] = numpy.sin(2 * numpy.pi * time / 1000.0 * 100) + numpy.sin(2 * numpy.pi * time / 1000.0 * 300)

        ts = TimeSeries(time=time, data=data, sample_period=1.0 / 4000)
        op = operation_factory()

        ts_db = TimeSeriesIndex()
        ts_db.fk_from_operation = op.id
        ts_db.fill_from_has_traits(ts)

        ts_h5_path = h5.path_for_stored_index(ts_db)
        with TimeSeriesH5(ts_h5_path) as f:
            f.store(ts)

        session.add(ts_db)
        session.commit()
        return ts_db
示例#8
0
def test_streaming_writes(tmph5factory):
    nchunks = 4
    t = make_harmonic_ts()
    # t.configure will fail for new files, it wants to read the data shape!
    path = tmph5factory()

    with TimeSeriesH5(path) as f:
        # when doing a partial write we still should populate the fields that we can
        # we aim to have the file in the right format even after a chunk write
        # This is similar to super.store but for specific scalars
        f.store(t, scalars_only=True)

        # todo: refuse write_data_slice unless required metadata has been written
        # otherwise we create files that will not conform to the timeseriesh5 format!

        for chunk_id in range(nchunks):
            time = numpy.linspace(chunk_id,
                                  chunk_id + 33,
                                  ntime,
                                  endpoint=False)
            data = harmonic_chunk(time)
            f.write_data_slice(data)
示例#9
0
def fire_simulation_example(tvb_client_instance):
    logger.info("Requesting projects for logged user")
    projects_of_user = tvb_client_instance.get_project_list()
    assert len(projects_of_user) > 0
    logger.info("TVB has {} projects for this user".format(
        len(projects_of_user)))

    project_gid = projects_of_user[0].gid
    logger.info("Requesting datatypes from project {}...".format(project_gid))
    data_in_project = tvb_client_instance.get_data_in_project(project_gid)
    logger.info("We have {} datatypes".format(len(data_in_project)))

    logger.info("Requesting operations from project {}...".format(project_gid))
    ops_in_project, _ = tvb_client_instance.get_operations_in_project(
        project_gid, 1)
    logger.info("Displayname of the first operation is: {}".format(
        ops_in_project[0].displayname))

    connectivity_gid = None
    datatypes_type = []
    for datatype in data_in_project:
        datatypes_type.append(datatype.type)
        if datatype.type == ConnectivityIndex().display_type:
            connectivity_gid = datatype.gid
    logger.info("The datatypes in project are: {}".format(datatypes_type))

    if connectivity_gid:
        logger.info("Preparing the simulator...")
        simulator = SimulatorAdapterModel()
        simulator.connectivity = connectivity_gid
        simulator.simulation_length = 100

        logger.info("Starting the simulation...")
        operation_gid = tvb_client_instance.fire_simulation(
            project_gid, simulator)

        logger.info("Monitoring the simulation operation...")
        monitor_operation(tvb_client_instance, operation_gid)

        logger.info("Requesting the results of the simulation...")
        simulation_results = tvb_client_instance.get_operation_results(
            operation_gid)
        datatype_names = []
        for datatype in simulation_results:
            datatype_names.append(datatype.name)
        logger.info("The resulted datatype are: {}".format(datatype_names))

        time_series_gid = simulation_results[1].gid
        logger.info("Download the time series file...")
        time_series_path = tvb_client_instance.retrieve_datatype(
            time_series_gid, tvb_client_instance.temp_folder)
        logger.info(
            "The time series file location is: {}".format(time_series_path))

        logger.info("Requesting algorithms to run on time series...")
        algos = tvb_client_instance.get_operations_for_datatype(
            time_series_gid)
        algo_names = [algo.displayname for algo in algos]
        logger.info("Possible algorithms are {}".format(algo_names))

        logger.info("Launch Fourier Analyzer...")
        fourier_model = FFTAdapterModel()
        fourier_model.time_series = time_series_gid
        fourier_model.window_function = 'hamming'

        operation_gid = tvb_client_instance.launch_operation(
            project_gid, FourierAdapter, fourier_model)
        logger.info(
            "Fourier Analyzer operation has launched with gid {}".format(
                operation_gid))

        logger.info("Download the connectivity file...")
        connectivity_path = tvb_client_instance.retrieve_datatype(
            connectivity_gid, tvb_client_instance.temp_folder)
        logger.info(
            "The connectivity file location is: {}".format(connectivity_path))

        logger.info("Loading an entire Connectivity datatype in memory...")
        connectivity = tvb_client_instance.load_datatype_from_file(
            connectivity_path)
        logger.info("Info on current Connectivity: {}".format(
            connectivity.summary_info()))

        logger.info(
            "Loading a chuck from the time series H5 file, as this can be very large..."
        )
        with TimeSeriesH5(time_series_path) as time_series_h5:
            data_shape = time_series_h5.read_data_shape()
            chunk = time_series_h5.read_data_slice(
                tuple([
                    slice(20),
                    slice(data_shape[1]),
                    slice(data_shape[2]),
                    slice(data_shape[3])
                ]))

        assert chunk.shape[0] == 20
        assert chunk.shape[1] == data_shape[1]
        assert chunk.shape[2] == data_shape[2]
        assert chunk.shape[3] == data_shape[3]

        return project_gid, time_series_gid
示例#10
0
                operation_gid))

        logger.info("Download the connectivity file...")
        connectivity_path = tvb_client.retrieve_datatype(
            connectivity_gid, tvb_client.temp_folder)
        logger.info(
            "The connectivity file location is: {}".format(connectivity_path))

        logger.info("Loading an entire Connectivity datatype in memory...")
        connectivity = h5.load(connectivity_path)
        logger.info("Info on current Connectivity: {}".format(
            connectivity.summary_info()))

        logger.info(
            "Loading a chuck from the time series H5 file, as this can be very large..."
        )
        with TimeSeriesH5(time_series_path) as time_series_h5:
            data_shape = time_series_h5.read_data_shape()
            chunk = time_series_h5.read_data_slice(
                tuple([
                    slice(20),
                    slice(data_shape[1]),
                    slice(data_shape[2]),
                    slice(data_shape[3])
                ]))

        assert chunk.shape[0] == 20
        assert chunk.shape[1] == data_shape[1]
        assert chunk.shape[2] == data_shape[2]
        assert chunk.shape[3] == data_shape[3]