def test_estimate_execution_time(self, connectivity_index_factory):
        """
        Test that get_execution_time_approximation considers the correct params
        """
        model = SimulatorAdapterModel()
        model.connectivity = connectivity_index_factory(self.CONNECTIVITY_NODES).gid

        self.simulator_adapter.configure(model)
        estimation1 = self.simulator_adapter.get_execution_time_approximation(model)

        # import surfaceData and region mapping
        cortex_file = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip')
        surface = TestFactory.import_surface_zip(self.test_user, self.test_project, cortex_file, CORTICAL)
        rm_file = path.join(path.dirname(tvb_data.regionMapping.__file__), 'regionMapping_16k_76.txt')
        region_mapping = TestFactory.import_region_mapping(self.test_user, self.test_project, rm_file, surface.gid,
                                                           model.connectivity.hex)
        local_conn = TestFactory.create_local_connectivity(self.test_user, self.test_project, surface.gid)
        cortex_model = CortexViewModel()
        cortex_model.region_mapping_data = region_mapping.gid
        cortex_model.fk_surface_gid = surface.gid
        cortex_model.local_connectivity = local_conn.gid
        model.surface = cortex_model

        # Estimation when the surface input parameter is set
        self.simulator_adapter.configure(model)
        estimation2 = self.simulator_adapter.get_execution_time_approximation(model)

        assert estimation1 == estimation2 // 500
        model.surface = None

        # Modify integration step and simulation length:
        initial_simulation_length = model.simulation_length
        initial_integration_step = model.integrator.dt

        for factor in (2, 4, 10):
            model.simulation_length = initial_simulation_length * factor
            model.integrator.dt = initial_integration_step / factor
            self.simulator_adapter.configure(model)
            estimation3 = self.simulator_adapter.get_execution_time_approximation(model)

            assert estimation1 == estimation3 // factor // factor

        # Check that no division by zero happens
        model.integrator.dt = 0
        estimation4 = self.simulator_adapter.get_execution_time_approximation(model)
        assert estimation4 > 0

        # even with length zero, still a positive estimation should be returned
        model.simulation_length = 0
        estimation5 = self.simulator_adapter.get_execution_time_approximation(model)
        assert estimation5 > 0
    def test_happy_flow_launch(self, connectivity_index_factory, operation_factory):
        """
        Test that launching a simulation from UI works.
        """
        model = SimulatorAdapterModel()
        model.connectivity = connectivity_index_factory(self.CONNECTIVITY_NODES).gid
        model.simulation_length = 32

        TestFactory.launch_synchronously(self.test_user.id, self.test_project, self.simulator_adapter, model)
        sim_result = dao.get_generic_entity(TimeSeriesRegionIndex, 'TimeSeriesRegion', 'time_series_type')[0]
        assert (sim_result.data_length_1d, sim_result.data_length_2d, sim_result.data_length_3d,
                sim_result.data_length_4d) == (32, 1, self.CONNECTIVITY_NODES, 1)
Exemple #3
0
def run_simulation():
    log = get_logger(__name__)

    # This ID of a project needs to exists in DB, and it can be taken from the WebInterface:
    project = dao.get_project_by_id(1)

    # Find a structural Connectivity and load it in memory
    connectivity_index = dao.get_generic_entity(
        ConnectivityIndex, DataTypeMetaData.DEFAULT_SUBJECT, "subject")[0]
    connectivity = h5.load_from_index(connectivity_index)

    # Load the SimulatorAdapter algorithm from DB
    cached_simulator_algorithm = AlgorithmService(
    ).get_algorithm_by_module_and_class(IntrospectionRegistry.SIMULATOR_MODULE,
                                        IntrospectionRegistry.SIMULATOR_CLASS)

    # Instantiate a SimulatorService and launch the configured simulation
    simulator_model = SimulatorAdapterModel()
    simulator_model.connectivity = connectivity.gid
    simulator_model.simulation_length = 100
    simulator_model.coupling = Scaling()

    simulator_service = SimulatorService()
    burst = BurstConfiguration(project.id, name="Simulation")
    dao.store_entity(burst)
    launched_operation = simulator_service.async_launch_and_prepare_simulation(
        burst, project.administrator, project, cached_simulator_algorithm,
        simulator_model)

    # wait for the operation to finish
    while not launched_operation.has_finished:
        sleep(5)
        launched_operation = dao.get_operation_by_id(launched_operation.id)

    if launched_operation.status == STATUS_FINISHED:
        ts = dao.get_generic_entity(TimeSeriesRegionIndex,
                                    launched_operation.id,
                                    "fk_from_operation")[0]
        log.info("TimeSeries result is: %s " % ts)
    else:
        log.warning(
            "Operation ended with problems [%s]: [%s]" %
            (launched_operation.status, launched_operation.additional_info))
    def test_estimate_hdd(self, connectivity_index_factory):
        """
        Test that occupied HDD estimation for simulation results considers simulation length.
        """
        model = SimulatorAdapterModel()
        model.connectivity = connectivity_index_factory(self.CONNECTIVITY_NODES).gid
        estimate1 = self._estimate_hdd(model)
        assert estimate1 > 1

        # Change simulation length and monitor period, we expect a direct proportional increase in estimated HDD
        factor = 3
        model.simulation_length = float(model.simulation_length) * factor
        period = float(model.monitors[0].period)
        model.monitors[0].period = period / factor
        estimate2 = self._estimate_hdd(model)
        assert estimate1 == estimate2 // factor // factor

        # Change number of nodes in connectivity. Expect HDD estimation increase.
        model.connectivity = connectivity_index_factory(self.CONNECTIVITY_NODES * factor).gid
        estimate3 = self._estimate_hdd(model)
        assert estimate2 == estimate3 / factor
Exemple #5
0
def fire_simulation(project_id, simulator):
    project = dao.get_project_by_id(project_id)
    assert isinstance(simulator, Simulator)
    # Load the SimulatorAdapter algorithm from DB
    cached_simulator_algorithm = AlgorithmService(
    ).get_algorithm_by_module_and_class(IntrospectionRegistry.SIMULATOR_MODULE,
                                        IntrospectionRegistry.SIMULATOR_CLASS)

    simulator_model = SimulatorAdapterModel()
    simulator_model.connectivity = simulator.connectivity.gid
    simulator_model.simulation_length = simulator.simulation_length

    # Instantiate a SimulatorService and launch the configured simulation
    simulator_service = SimulatorService()
    burst = BurstConfiguration(project.id)
    burst.name = "Sim " + str(datetime.now())
    burst.start_time = datetime.now()
    dao.store_entity(burst)

    launched_operation = simulator_service.async_launch_and_prepare_simulation(
        burst, project.administrator, project, cached_simulator_algorithm,
        simulator_model)
    LOG.info("Operation launched ....")
    return launched_operation
Exemple #6
0
def fire_simulation_example(tvb_client_instance):
    logger.info("Requesting projects for logged user")
    projects_of_user = tvb_client_instance.get_project_list()
    assert len(projects_of_user) > 0
    logger.info("TVB has {} projects for this user".format(len(projects_of_user)))

    project_gid = projects_of_user[0].gid
    logger.info("Requesting datatypes from project {}...".format(project_gid))
    data_in_project = tvb_client_instance.get_data_in_project(project_gid)
    logger.info("We have {} datatypes".format(len(data_in_project)))

    logger.info("Requesting operations from project {}...".format(project_gid))
    ops_in_project, _ = tvb_client_instance.get_operations_in_project(project_gid, 1)
    logger.info("Displayname of the first operation is: {}".format(ops_in_project[0].displayname))

    connectivity_gid = None
    datatypes_type = []
    for datatype in data_in_project:
        datatypes_type.append(datatype.type)
        if datatype.type == ConnectivityIndex().display_type:
            connectivity_gid = datatype.gid
    logger.info("The datatypes in project are: {}".format(datatypes_type))

    if connectivity_gid:
        logger.info("Preparing the simulator...")
        simulator = SimulatorAdapterModel()
        simulator.connectivity = connectivity_gid
        simulator.simulation_length = 100

        logger.info("Starting the simulation...")
        operation_gid = tvb_client_instance.fire_simulation(project_gid, simulator)

        logger.info("Monitoring the simulation operation...")
        monitor_operation(tvb_client_instance, operation_gid)

        logger.info("Requesting the results of the simulation...")
        simulation_results = tvb_client_instance.get_operation_results(operation_gid)
        datatype_names = []
        for datatype in simulation_results:
            datatype_names.append(datatype.name)
        logger.info("The resulted datatype are: {}".format(datatype_names))

        time_series_gid = simulation_results[1].gid
        logger.info("Download the time series file...")
        time_series_path = tvb_client_instance.retrieve_datatype(time_series_gid, tvb_client_instance.temp_folder)
        logger.info("The time series file location is: {}".format(time_series_path))

        logger.info("Requesting algorithms to run on time series...")
        algos = tvb_client_instance.get_operations_for_datatype(time_series_gid)
        algo_names = [algo.displayname for algo in algos]
        logger.info("Possible algorithms are {}".format(algo_names))

        logger.info("Launch Fourier Analyzer...")
        fourier_model = FFTAdapterModel()
        fourier_model.time_series = time_series_gid
        fourier_model.window_function = 'hamming'

        operation_gid = tvb_client_instance.launch_operation(project_gid, FourierAdapter, fourier_model)
        logger.info("Fourier Analyzer operation has launched with gid {}".format(operation_gid))

        data_in_project = tvb_client_instance.get_data_in_project(project_gid)
        logger.info("We have {} datatypes".format(len(data_in_project)))

        for datatype in data_in_project:
            if datatype.type == 'FourierSpectrum':
                ggid = datatype.gid

                extra_info = tvb_client_instance.get_extra_info(ggid)
                logger.info("The extra information for Fourier {}".format(extra_info))

                break

        logger.info("Download the connectivity file...")
        connectivity_path = tvb_client_instance.retrieve_datatype(connectivity_gid, tvb_client_instance.temp_folder)
        logger.info("The connectivity file location is: {}".format(connectivity_path))

        logger.info("Loading an entire Connectivity datatype in memory...")
        connectivity = tvb_client_instance.load_datatype_from_file(connectivity_path)
        logger.info("Info on current Connectivity: {}".format(connectivity.summary_info()))

        logger.info("Loading a chuck from the time series H5 file, as this can be very large...")
        with TimeSeriesH5(time_series_path) as time_series_h5:
            data_shape = time_series_h5.read_data_shape()
            chunk = time_series_h5.read_data_slice(
                tuple([slice(20), slice(data_shape[1]), slice(data_shape[2]), slice(data_shape[3])]))

        assert chunk.shape[0] == 20
        assert chunk.shape[1] == data_shape[1]
        assert chunk.shape[2] == data_shape[2]
        assert chunk.shape[3] == data_shape[3]

        return project_gid, time_series_gid