Пример #1
0
    def handle_next_fragment_for_monitors(self, context, rendering_rules,
                                          current_monitor, next_monitor,
                                          is_noise_form, form_action_url,
                                          if_bold_url):
        simulator, _, _, is_branch = context.get_common_params()
        if isinstance(current_monitor,
                      BoldViewModel) and is_noise_form is False:
            return self._prepare_next_fragment_if_bold(current_monitor,
                                                       rendering_rules,
                                                       if_bold_url)
        if not next_monitor:
            rendering_rules.is_branch = is_branch
            return SimulatorFinalFragment.prepare_final_fragment(
                simulator, context.burst_config, context.project.id,
                rendering_rules, SimulatorWizzardURLs.SETUP_PSE_URL)

        next_form = get_form_for_monitor(type(next_monitor))(simulator,
                                                             is_branch)
        next_form = AlgorithmService().prepare_adapter_form(
            form_instance=next_form, project_id=context.project.id)
        next_form.fill_from_trait(next_monitor)
        monitor_name = self.prepare_monitor_legend(
            simulator.is_surface_simulation, next_monitor)
        rendering_rules.form = next_form
        rendering_rules.form_action_url = form_action_url
        rendering_rules.monitor_name = monitor_name
        return rendering_rules.to_dict()
Пример #2
0
    def transactional_setup_method(self):
        """ Prepare some entities to work with during tests:"""

        self.algorithm_service = AlgorithmService()
        category = dao.get_uploader_categories()[0]
        self.algorithm = dao.store_entity(model_operation.Algorithm(TEST_ADAPTER_VALID_MODULE,
                                                                    TEST_ADAPTER_VALID_CLASS, category.id))
Пример #3
0
    def initialize_two_projects(self, dummy_datatype_index_factory,
                                project_factory, user_factory):
        """
        Creates a user, an algorithm and 2 projects
        Project src_project will have an operation and 2 datatypes
        Project dest_project will be empty.
        Initializes a flow and a project service
        """
        self.clean_database(delete_folders=True)

        self.algorithm_service = AlgorithmService()
        self.project_service = ProjectService()

        # Create the source project with 2 datatypes in it
        src_user = user_factory(username="******")
        self.src_usr_id = src_user.id
        self.src_project = project_factory(src_user, "Src_Project")
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'paupau.zip')
        self.red_datatype = TestFactory.import_zip_connectivity(
            src_user, self.src_project, zip_path, "John")
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'sensors',
                                'eeg_unitvector_62.txt.bz2')
        self.blue_datatype = TestFactory.import_sensors(
            src_user, self.src_project, zip_path, SensorTypes.TYPE_EEG.value)
        assert 1 == self.red_datatypes_in(self.src_project.id)
        assert 1 == self.blue_datatypes_in(self.src_project.id)

        # create the destination project empty
        self.dst_user = user_factory(username='******')
        self.dst_usr_id = self.dst_user.id
        self.dest_project = project_factory(self.dst_user,
                                            "Destination_Project")
        assert 0 == self.red_datatypes_in(self.dest_project.id)
        assert 0 == self.blue_datatypes_in(self.dest_project.id)
Пример #4
0
    def launch_simulation(self, current_user_id, zip_directory, project_gid):
        try:
            project = self.project_service.find_project_lazy_by_gid(
                project_gid)
        except ProjectServiceException:
            raise InvalidIdentifierException()

        try:
            simulator_h5_name = DirLoader(
                zip_directory, None).find_file_for_has_traits_type(Simulator)
            simulator_file = os.path.join(zip_directory, simulator_h5_name)
        except IOError:
            raise InvalidInputException(
                'No Simulator h5 file found in the archive')

        try:
            simulator_algorithm = AlgorithmService(
            ).get_algorithm_by_module_and_class(SimulatorAdapter.__module__,
                                                SimulatorAdapter.__name__)
            simulation = self.simulator_service.prepare_simulation_on_server(
                user_id=current_user_id,
                project=project,
                algorithm=simulator_algorithm,
                zip_folder_path=zip_directory,
                simulator_file=simulator_file)
            return simulation.gid
        except Exception as excep:
            self.logger.error(excep, exc_info=True)
            raise ServiceException(str(excep))
    def test_get_project_structure(self, datatype_group_factory,
                                   dummy_datatype_index_factory,
                                   project_factory, user_factory):
        """
        Tests project structure is as expected and contains all datatypes and created links
        """
        user = user_factory()
        project1 = project_factory(user, name="TestPS1")
        project2 = project_factory(user, name="TestPS2")

        dt_group = datatype_group_factory(project=project1)
        dt_simple = dummy_datatype_index_factory(state="RAW_DATA",
                                                 project=project1)
        # Create 3 DTs directly in Project 2
        dummy_datatype_index_factory(state="RAW_DATA", project=project2)
        dummy_datatype_index_factory(state="RAW_DATA", project=project2)
        dummy_datatype_index_factory(state="RAW_DATA", project=project2)

        # Create Links from Project 1 into Project 2
        link_ids, expected_links = [], []
        link_ids.append(dt_simple.id)
        expected_links.append(dt_simple.gid)

        # Prepare links towards a full DT Group, but expecting only the DT_Group in the final tree
        dts = dao.get_datatype_in_group(datatype_group_id=dt_group.id)
        link_ids.extend([dt_to_link.id for dt_to_link in dts])
        link_ids.append(dt_group.id)
        expected_links.append(dt_group.gid)

        # Actually create the links from Prj1 into Prj2
        AlgorithmService().create_link(link_ids, project2.id)

        # Retrieve the raw data used to compose the tree (for easy parsing)
        dts_in_tree = dao.get_data_in_project(project2.id)
        dts_in_tree = [dt.gid for dt in dts_in_tree]
        # Retrieve the tree json (for trivial validations only, as we can not decode)
        node_json = self.project_service.get_project_structure(
            project2, None, DataTypeMetaData.KEY_STATE,
            DataTypeMetaData.KEY_SUBJECT, None)

        assert len(expected_links) + 3 == len(
            dts_in_tree), "invalid number of nodes in tree"
        assert dt_group.gid in dts_in_tree, "DT_Group should be in the Project Tree!"
        assert dt_group.gid in node_json, "DT_Group should be in the Project Tree JSON!"

        project_dts = dao.get_datatypes_in_project(project2.id)
        for dt in project_dts:
            if dt.fk_datatype_group is not None:
                assert not dt.gid in node_json, "DTs part of a group should not be"
                assert not dt.gid in dts_in_tree, "DTs part of a group should not be"
            else:
                assert dt.gid in node_json, "Simple DTs and DT_Groups should be"
                assert dt.gid in dts_in_tree, "Simple DTs and DT_Groups should be"

        for link_gid in expected_links:
            assert link_gid in node_json, "Expected Link not present"
            assert link_gid in dts_in_tree, "Expected Link not present"
Пример #6
0
    def __init__(self):
        self.logger = get_logger(self.__class__.__module__)

        self.user_service = UserService()
        self.project_service = ProjectService()
        self.algorithm_service = AlgorithmService()
        self.analyze_category_link = '/flow/step_analyzers'
        self.analyze_adapters = None

        self.connectivity_tab_link = '/flow/step_connectivity'
        view_category = self.algorithm_service.get_visualisers_category()
        conn_id = self.algorithm_service.get_algorithm_by_module_and_class(
            IntrospectionRegistry.CONNECTIVITY_MODULE,
            IntrospectionRegistry.CONNECTIVITY_CLASS).id
        connectivity_link = self.get_url_adapter(view_category.id, conn_id)

        self.connectivity_submenu = [
            dict(title="Large Scale Connectivity",
                 link=connectivity_link,
                 subsection=WebStructure.SUB_SECTION_CONNECTIVITY,
                 description=
                 "View Connectivity Regions. Perform Connectivity lesions"),
            dict(title="Local Connectivity",
                 link='/spatial/localconnectivity/step_1/1',
                 subsection=WebStructure.SUB_SECTION_LOCAL_CONNECTIVITY,
                 description=
                 "Create or view existent Local Connectivity entities.")
        ]

        allen_algo = self.algorithm_service.get_algorithm_by_module_and_class(
            IntrospectionRegistry.ALLEN_CREATOR_MODULE,
            IntrospectionRegistry.ALLEN_CREATOR_CLASS)
        if allen_algo and not allen_algo.removed:
            # Only add the Allen Creator if AllenSDK is installed
            allen_link = self.get_url_adapter(allen_algo.fk_category,
                                              allen_algo.id)
            self.connectivity_submenu.append(
                dict(
                    title="Allen Connectome Builder",
                    link=allen_link,
                    subsection=WebStructure.SUB_SECTION_ALLEN,
                    description=
                    "Download data from Allen dataset and create a mouse connectome"
                ))

        self.burst_submenu = [
            dict(link='/burst',
                 subsection=WebStructure.SUB_SECTION_BURST,
                 title='Simulation Cockpit',
                 description='Manage simulations'),
            dict(link='/burst/dynamic',
                 subsection='dynamic',
                 title='Phase plane',
                 description='Configure model dynamics')
        ]
Пример #7
0
    def prepare_stimulus_fragment(simulator, rendering_rules,
                                  is_surface_simulation, form_action_url,
                                  project_id):
        form = SimulatorStimulusFragment(is_surface_simulation)
        stimuli_fragment = AlgorithmService().prepare_adapter_form(
            form_instance=form, project_id=project_id)
        stimuli_fragment.fill_from_trait(simulator)

        rendering_rules.form = stimuli_fragment
        rendering_rules.form_action_url = form_action_url
        return rendering_rules.to_dict()
Пример #8
0
    def prepare_cortex_fragment(simulator, rendering_rules, form_action_url,
                                project_id):
        surface_index = load_entity_by_gid(simulator.surface.surface_gid)
        form = SimulatorRMFragment(surface_index, simulator.connectivity)
        rm_fragment = AlgorithmService().prepare_adapter_form(
            form_instance=form, project_id=project_id)
        rm_fragment.fill_from_trait(simulator.surface)

        rendering_rules.form = rm_fragment
        rendering_rules.form_action_url = form_action_url
        return rendering_rules.to_dict()
Пример #9
0
    def _compute_connectivity_global_params(self, connectivity):
        """
        Returns a dictionary which contains the data needed for drawing a connectivity.

        :param connectivity: the `Connectivity(HasTraits)` object
        """
        conn_gid = connectivity.gid.hex
        path_weights = SurfaceURLGenerator.paths2url(conn_gid,
                                                     'ordered_weights')
        path_pos = SurfaceURLGenerator.paths2url(conn_gid, 'ordered_centres')
        path_tracts = SurfaceURLGenerator.paths2url(conn_gid, 'ordered_tracts')
        path_labels = SurfaceURLGenerator.paths2url(conn_gid, 'ordered_labels')
        path_hemisphere_order_indices = SurfaceURLGenerator.paths2url(
            conn_gid, 'hemisphere_order_indices')

        algo = AlgorithmService().get_algorithm_by_module_and_class(
            CONNECTIVITY_CREATOR_MODULE, CONNECTIVITY_CREATOR_CLASS)
        submit_url = '/{}/{}/{}'.format(SurfaceURLGenerator.FLOW,
                                        algo.fk_category, algo.id)
        global_pages = dict(controlPage="connectivity/top_right_controls")

        minimum, maximum, minimum_non_zero = self._compute_matrix_extrema(
            connectivity.ordered_weights)
        minimum_t, maximum_t, minimum_non_zero_t = self._compute_matrix_extrema(
            connectivity.ordered_tracts)

        global_params = dict(
            urlWeights=path_weights,
            urlPositions=path_pos,
            urlTracts=path_tracts,
            urlLabels=path_labels,
            originalConnectivity=conn_gid,
            title="Connectivity Control",
            submitURL=submit_url,
            positions=connectivity.ordered_centres,
            tractsMin=minimum_t,
            tractsMax=maximum_t,
            weightsMin=minimum,
            weightsMax=maximum,
            tractsNonZeroMin=minimum_non_zero_t,
            weightsNonZeroMin=minimum_non_zero,
            pointsLabels=connectivity.ordered_labels,
            conductionSpeed=1,
            connectivity_entity=connectivity,
            base_selection=connectivity.saved_selection_labels,
            hemisphereOrderUrl=path_hemisphere_order_indices,
            leftHemisphereCount=(connectivity.hemispheres == 0).sum())
        global_params.update(
            self.build_params_for_selectable_connectivity(connectivity))
        return global_params, global_pages
Пример #10
0
    def get_fragment_after_monitors(simulator, burst_config, project_id,
                                    is_branch, rendering_rules, setup_pse_url):
        first_monitor = simulator.first_monitor
        if first_monitor is None:
            rendering_rules.is_branch = is_branch
            return SimulatorFinalFragment.prepare_final_fragment(
                simulator, burst_config, project_id, rendering_rules,
                setup_pse_url)

        form = get_form_for_monitor(type(first_monitor))(simulator, is_branch)
        form = AlgorithmService().prepare_adapter_form(form_instance=form)
        form.fill_from_trait(first_monitor)

        monitor_name = MonitorsWizardHandler.prepare_monitor_legend(
            simulator.is_surface_simulation, first_monitor)
        rendering_rules.monitor_name = monitor_name
        rendering_rules.form = form
        return rendering_rules.to_dict()
Пример #11
0
def run_simulation():
    log = get_logger(__name__)

    # This ID of a project needs to exists in DB, and it can be taken from the WebInterface:
    project = dao.get_project_by_id(1)

    # Find a structural Connectivity and load it in memory
    connectivity_index = dao.get_generic_entity(
        ConnectivityIndex, DataTypeMetaData.DEFAULT_SUBJECT, "subject")[0]
    connectivity = h5.load_from_index(connectivity_index)

    # Load the SimulatorAdapter algorithm from DB
    cached_simulator_algorithm = AlgorithmService(
    ).get_algorithm_by_module_and_class(IntrospectionRegistry.SIMULATOR_MODULE,
                                        IntrospectionRegistry.SIMULATOR_CLASS)

    # Instantiate a SimulatorService and launch the configured simulation
    simulator_model = SimulatorAdapterModel()
    simulator_model.connectivity = connectivity.gid
    simulator_model.simulation_length = 100
    simulator_model.coupling = Scaling()

    simulator_service = SimulatorService()
    burst = BurstConfiguration(project.id, name="Simulation")
    dao.store_entity(burst)
    launched_operation = simulator_service.async_launch_and_prepare_simulation(
        burst, project.administrator, project, cached_simulator_algorithm,
        simulator_model)

    # wait for the operation to finish
    while not launched_operation.has_finished:
        sleep(5)
        launched_operation = dao.get_operation_by_id(launched_operation.id)

    if launched_operation.status == STATUS_FINISHED:
        ts = dao.get_generic_entity(TimeSeriesRegionIndex,
                                    launched_operation.id,
                                    "fk_from_operation")[0]
        log.info("TimeSeries result is: %s " % ts)
    else:
        log.warning(
            "Operation ended with problems [%s]: [%s]" %
            (launched_operation.status, launched_operation.additional_info))
Пример #12
0
def fire_simulation(project_id, simulator):
    project = dao.get_project_by_id(project_id)
    assert isinstance(simulator, Simulator)
    # Load the SimulatorAdapter algorithm from DB
    cached_simulator_algorithm = AlgorithmService(
    ).get_algorithm_by_module_and_class(IntrospectionRegistry.SIMULATOR_MODULE,
                                        IntrospectionRegistry.SIMULATOR_CLASS)

    # Instantiate a SimulatorService and launch the configured simulation
    simulator_service = SimulatorService()
    burst = BurstConfiguration(project.id)
    burst.name = "Sim " + str(datetime.now())
    burst.start_time = datetime.now()
    dao.store_entity(burst)

    launched_operation = simulator_service.async_launch_and_prepare_simulation(
        burst, project.administrator, project, cached_simulator_algorithm,
        simulator)
    LOG.info("Operation launched ....")
    return launched_operation
Пример #13
0
    def test_datatype_select_field(self, connectivity_index_factory):
        trait_attribute = SimulatorAdapterModel.connectivity

        connectivity_1 = connectivity_index_factory(2)
        connectivity_2 = connectivity_index_factory(2)
        connectivity_3 = connectivity_index_factory(2)

        op_1 = dao.get_operation_by_id(connectivity_1.fk_from_operation)
        op_1.fk_launched_in = self.test_project.id
        dao.store_entity(op_1)
        op_2 = dao.get_operation_by_id(connectivity_2.fk_from_operation)
        op_2.fk_launched_in = self.test_project.id
        dao.store_entity(op_2)
        op_3 = dao.get_operation_by_id(connectivity_3.fk_from_operation)
        op_3.fk_launched_in = self.test_project.id
        dao.store_entity(op_3)

        datatype_select_field = TraitDataTypeSelectField(trait_attribute,
                                                         self.name,
                                                         None,
                                                         has_all_option=True)
        AlgorithmService().fill_selectfield_with_datatypes(
            datatype_select_field, self.test_project.id)

        post_data = {self.name: connectivity_1.gid}
        datatype_select_field.fill_from_post(post_data)

        options = datatype_select_field.options()
        conn_1 = next(options)
        conn_2 = next(options)
        conn_3 = next(options)

        next(options)
        with pytest.raises(StopIteration):
            next(options)

        assert conn_1.value == connectivity_3.gid
        assert conn_2.value == connectivity_2.gid
        assert conn_3.value == connectivity_1.gid
        assert uuid.UUID(post_data[self.name]) == datatype_select_field.data, "UUID data was not set correctly on" \
                                                                              " TraitDataTypeSelectField"
Пример #14
0
 def __init__(self):
     self.logger = get_logger(self.__class__.__module__)
     self.burst_service = BurstService()
     self.operation_service = OperationService()
     self.algorithm_service = AlgorithmService()
     self.storage_interface = StorageInterface()
Пример #15
0
class TestBurstService(BaseTestCase):
    """
    Test the service layer for BURST PAGE. We can't have this transactional since
    we launch operations in different threads and the transactional operator only rolls back 
    sessions bounded to the current thread transaction.
    """
    burst_service = BurstService()
    sim_algorithm = AlgorithmService().get_algorithm_by_module_and_class(IntrospectionRegistry.SIMULATOR_MODULE,
                                                                         IntrospectionRegistry.SIMULATOR_CLASS)

    def setup_method(self):
        """
        Sets up the environment for running the tests;
        cleans the database before testing and saves config file;
        creates a test user, a test project;
        creates burst, flow, operation and workflow services

        """
        self.clean_database()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user)

    def teardown_method(self):
        """
        Clean up database.
        """
        self.clean_database()

    def test_clone_burst_configuration(self):
        """
        Test that all the major attributes are the same after a clone burst but the
        id of the cloned one is None.
        """
        first_burst = TestFactory.store_burst(self.test_project.id)
        cloned_burst = first_burst.clone()
        self._compare_bursts(first_burst, cloned_burst)
        assert cloned_burst.name == first_burst.name, 'Cloned burst should have the same name'
        assert cloned_burst.id is None, 'id should be none for cloned entry.'

    def test_store_burst_config(self):
        """
        Test that a burst entity is properly stored in db.
        """
        burst_config = TestFactory.store_burst(self.test_project.id)
        assert burst_config.id is not None, 'Burst was not stored properly.'
        stored_entity = dao.get_burst_by_id(burst_config.id)
        assert stored_entity is not None, 'Burst was not stored properly.'
        self._compare_bursts(burst_config, stored_entity)

    def _compare_bursts(self, first_burst, second_burst):
        """
        Compare that all important attributes are the same between two bursts. (name, project id and status)
        """
        assert first_burst.name == second_burst.name, "Names not equal for bursts."
        assert first_burst.fk_project == second_burst.fk_project, "Projects not equal for bursts."
        assert first_burst.status == second_burst.status, "Statuses not equal for bursts."
        assert first_burst.range1 == second_burst.range1, "Statuses not equal for bursts."
        assert first_burst.range2 == second_burst.range2, "Statuses not equal for bursts."

    def test_getavailablebursts_none(self):
        """
        Test that an empty list is returned if no data is available in db.
        """
        bursts = self.burst_service.get_available_bursts(self.test_project.id)
        assert bursts == [], "Unexpected result returned : %s" % (bursts,)

    def test_get_available_bursts_happy(self):
        """
        Test that all the correct burst are returned for the given project.
        """
        project = Project("second_test_proj", self.test_user.id, "description")
        second_project = dao.store_entity(project)
        test_project_bursts = [TestFactory.store_burst(self.test_project.id).id for _ in range(4)]
        second_project_bursts = [TestFactory.store_burst(second_project.id).id for _ in range(3)]
        returned_test_project_bursts = [burst.id for burst in
                                        self.burst_service.get_available_bursts(self.test_project.id)]
        returned_second_project_bursts = [burst.id for burst in
                                          self.burst_service.get_available_bursts(second_project.id)]
        assert len(test_project_bursts) == len(returned_test_project_bursts), \
            "Incorrect bursts retrieved for project %s." % self.test_project
        assert len(second_project_bursts) == len(returned_second_project_bursts), \
            "Incorrect bursts retrieved for project %s." % second_project
        assert set(second_project_bursts) == set(returned_second_project_bursts), \
            "Incorrect bursts retrieved for project %s." % second_project
        assert set(test_project_bursts) == set(returned_test_project_bursts), \
            "Incorrect bursts retrieved for project %s." % self.test_project

    def test_rename_burst(self, operation_factory):
        """
        Test that renaming of a burst functions properly.
        """
        operation = operation_factory()
        burst_config = TestFactory.store_burst(self.test_project.id, operation)
        self.burst_service.rename_burst(burst_config.id, "new_burst_name")
        loaded_burst = dao.get_burst_by_id(burst_config.id)
        assert loaded_burst.name == "new_burst_name", "Burst was not renamed properly."

    def test_burst_delete_with_project(self):
        """
        Test that on removal of a project all burst related data is cleared.
        """
        TestFactory.store_burst(self.test_project.id)
        ProjectService().remove_project(self.test_project.id)
        self._check_burst_removed()

    def test_load_burst_configuration(self):
        """
        Test that loads the burst configuration based non the stored config id
        """
        stored_burst = TestFactory.store_burst(self.test_project.id)
        burst_config = self.burst_service.load_burst_configuration(stored_burst.id)
        assert burst_config.id == stored_burst.id, "The loaded burst does not have the same ID"

    def test_update_simulation_fields(self, tmph5factory):
        """
        Test that updates the simulation fields of the burst
        """
        stored_burst = TestFactory.store_burst(self.test_project.id)

        connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project)
        op = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)
        simulation = SimulatorAdapterModel()
        simulation.connectivity = UUID(connectivity.gid)

        burst_config = self.burst_service.update_simulation_fields(stored_burst, op.id, simulation.gid)
        assert burst_config.id == stored_burst.id, "The loaded burst does not have the same ID"
        assert burst_config.fk_simulation == op.id, "The loaded burst does not have the fk simulation that it was given"
        assert burst_config.simulator_gid == simulation.gid.hex, "The loaded burst does not have the simulation gid that it was given"

    def test_prepare_name(self):
        """
        Test prepare burst name
        """
        stored_burst = TestFactory.store_burst(self.test_project.id)
        simulation_tuple = self.burst_service.prepare_simulation_name(stored_burst, self.test_project.id)
        assert simulation_tuple[0] == 'simulation_' + str(dao.get_number_of_bursts(self.test_project.id) + 1), \
            "The default simulation name is not the defined one"

        busrt_test_name = "Burst Test Name"
        stored_burst.name = busrt_test_name
        stored_burst = dao.store_entity(stored_burst)
        simulation_tuple = self.burst_service.prepare_simulation_name(stored_burst, self.test_project.id)
        assert simulation_tuple[0] == busrt_test_name, "The burst name is not the given one"

    def test_prepare_burst_for_pse(self):
        """
        Test prepare burst for pse
        """
        burst = BurstConfiguration(self.test_project.id)
        assert burst.fk_metric_operation_group == None, "The fk for the metric operation group is not None"
        assert burst.fk_operation_group == None, "The fk for the operation group is not None"
        assert burst.operation_group == None, "The operation group is not None"

        pse_burst = self.burst_service.prepare_burst_for_pse(burst)
        assert pse_burst.metric_operation_group != None, "The fk for the operation group is None"
        assert pse_burst.operation_group != None, "The operation group is None"

    def _check_burst_removed(self):
        """
        Test that a burst was properly removed. This means checking that the burst entity,
        any workflow steps and any datatypes resulted from the burst are also removed.
        """
        remaining_bursts = dao.get_bursts_for_project(self.test_project.id)
        assert 0 == len(remaining_bursts), "Burst was not deleted"
        ops_number = dao.get_operation_numbers(self.test_project.id)[0]
        assert 0 == ops_number, "Operations were not deleted."
        datatypes = dao.get_datatypes_in_project(self.test_project.id)
        assert 0 == len(datatypes)

        datatype1_stored = self.count_all_entities(Datatype1)
        datatype2_stored = self.count_all_entities(Datatype2)
        assert 0 == datatype1_stored, "Specific datatype entries for DataType1 were not deleted."
        assert 0 == datatype2_stored, "Specific datatype entries for DataType2 were not deleted."

    def test_prepare_indexes_for_simulation_results(self, time_series_factory, operation_factory, simulator_factory):
        ts_1 = time_series_factory()
        ts_2 = time_series_factory()
        ts_3 = time_series_factory()

        operation = operation_factory(test_user=self.test_user, test_project=self.test_project)
        sim_folder, sim_gid = simulator_factory(op=operation)

        path_1 = os.path.join(sim_folder, "Time_Series_{}.h5".format(ts_1.gid.hex))
        path_2 = os.path.join(sim_folder, "Time_Series_{}.h5".format(ts_2.gid.hex))
        path_3 = os.path.join(sim_folder, "Time_Series_{}.h5".format(ts_3.gid.hex))

        with TimeSeriesH5(path_1) as f:
            f.store(ts_1)
            f.sample_rate.store(ts_1.sample_rate)
            f.store_generic_attributes(GenericAttributes())

        with TimeSeriesH5(path_2) as f:
            f.store(ts_2)
            f.sample_rate.store(ts_2.sample_rate)
            f.store_generic_attributes(GenericAttributes())

        with TimeSeriesH5(path_3) as f:
            f.store(ts_3)
            f.sample_rate.store(ts_3.sample_rate)
            f.store_generic_attributes(GenericAttributes())

        burst_configuration = BurstConfiguration(self.test_project.id)
        burst_configuration.fk_simulation = operation.id
        burst_configuration.simulator_gid = operation.view_model_gid
        burst_configuration = dao.store_entity(burst_configuration)

        file_names = [path_1, path_2, path_3]
        ts_datatypes = [ts_1, ts_2, ts_3]
        indexes = self.burst_service.prepare_indexes_for_simulation_results(operation, file_names, burst_configuration)

        for i in range(len(indexes)):
            assert indexes[i].gid == ts_datatypes[i].gid.hex, "Gid was not set correctly on index."
            assert indexes[i].sample_period == ts_datatypes[i].sample_period
            assert indexes[i].sample_period_unit == ts_datatypes[i].sample_period_unit
            assert indexes[i].sample_rate == ts_datatypes[i].sample_rate
Пример #16
0
 def __init__(self):
     self.logger = get_logger(self.__class__.__module__)
     self.burst_service = BurstService()
     self.operation_service = OperationService()
     self.algorithm_service = AlgorithmService()
     self.files_helper = FilesHelper()
Пример #17
0
 def __init__(self):
     self.algorithm_service = AlgorithmService()
Пример #18
0
        ConnectivityIndex, DataTypeMetaData.DEFAULT_SUBJECT, "subject")[0]
    connectivity = h5.load_from_index(connectivity_index)

    # Prepare a Simulator instance with defaults and configure it to use the previously loaded Connectivity
    simulator = Simulator()
    simulator.connectivity = connectivity
    # Configure the Simulator to use a Scaling type coupling
    simulator.coupling = Scaling()
    # Choose a higher value for the 'tau' parameter of the Generic2dOscillator model
    simulator.model.tau = numpy.array([2.0])
    # Configure the simulation length
    simulator.simulation_length = 100

    # Load the SimulatorAdapter algorithm from DB
    cached_simulator_algorithm = AlgorithmService(
    ).get_algorithm_by_module_and_class(IntrospectionRegistry.SIMULATOR_MODULE,
                                        IntrospectionRegistry.SIMULATOR_CLASS)

    # Instantiate a SimulatorService and launch the configured simulation
    simulator_service = SimulatorService()
    launched_operation = simulator_service.async_launch_and_prepare_simulation(
        BurstConfiguration(project.id), project.administrator, project,
        cached_simulator_algorithm, simulator)

    # wait for the operation to finish
    while not launched_operation.has_finished:
        sleep(5)
        launched_operation = dao.get_operation_by_id(launched_operation.id)

    if launched_operation.status == STATUS_FINISHED:
        ts = dao.get_generic_entity(TimeSeriesRegionIndex,
Пример #19
0
    def retrieve_project_full(self, project_id, applied_filters=None, current_page=1):
        """
        Return a Tuple with Project entity and Operations for current Project.
        :param project_id: Current Project Identifier
        :param applied_filters: Filters to apply on Operations
        :param current_page: Number for current page in operations
        """
        selected_project = self.find_project(project_id)
        total_filtered = self.count_filtered_operations(project_id, applied_filters)
        pages_no = total_filtered // OPERATIONS_PAGE_SIZE + (1 if total_filtered % OPERATIONS_PAGE_SIZE else 0)
        total_ops_nr = self.count_filtered_operations(project_id)

        start_idx = OPERATIONS_PAGE_SIZE * (current_page - 1)
        current_ops = dao.get_filtered_operations(project_id, applied_filters, start_idx, OPERATIONS_PAGE_SIZE)
        if current_ops is None:
            return selected_project, 0, [], 0

        operations = []
        for one_op in current_ops:
            try:
                result = {}
                if one_op[0] != one_op[1]:
                    result["id"] = str(one_op[0]) + "-" + str(one_op[1])
                else:
                    result["id"] = str(one_op[0])
                burst = dao.get_burst_for_operation_id(one_op[0])
                result["burst_name"] = burst.name if burst else '-'
                result["count"] = one_op[2]
                result["gid"] = one_op[13]
                if one_op[3] is not None and one_op[3]:
                    try:
                        operation_group = dao.get_generic_entity(OperationGroup, one_op[3])[0]
                        result["group"] = operation_group.name
                        result["group"] = result["group"].replace("_", " ")
                        result["operation_group_id"] = operation_group.id
                        datatype_group = dao.get_datatypegroup_by_op_group_id(one_op[3])
                        result["datatype_group_gid"] = datatype_group.gid if datatype_group is not None else None
                        result["gid"] = operation_group.gid
                        # Filter only viewers for current DataTypeGroup entity:
                        result["view_groups"] = AlgorithmService().get_visualizers_for_group(datatype_group.gid) \
                            if datatype_group is not None else None
                    except Exception:
                        self.logger.exception("We will ignore group on entity:" + str(one_op))
                        result["datatype_group_gid"] = None
                else:
                    result['group'] = None
                    result['datatype_group_gid'] = None
                result["algorithm"] = dao.get_algorithm_by_id(one_op[4])
                result["user"] = dao.get_user_by_id(one_op[5])
                if type(one_op[6]) is str:
                    result["create"] = string2date(str(one_op[6]))
                else:
                    result["create"] = one_op[6]
                if type(one_op[7]) is str:
                    result["start"] = string2date(str(one_op[7]))
                else:
                    result["start"] = one_op[7]
                if type(one_op[8]) is str:
                    result["complete"] = string2date(str(one_op[8]))
                else:
                    result["complete"] = one_op[8]

                if result["complete"] is not None and result["start"] is not None:
                    result["duration"] = format_timedelta(result["complete"] - result["start"])
                result["status"] = one_op[9]
                result["additional"] = one_op[10]
                result["visible"] = True if one_op[11] > 0 else False
                result['operation_tag'] = one_op[12]
                if not result['group']:
                    datatype_results = dao.get_results_for_operation(result['id'])
                    result['results'] = []
                    for dt in datatype_results:
                        dt_loaded = load_entity_by_gid(dt.gid)
                        if dt_loaded:
                            result['results'].append(dt_loaded)
                        else:
                            self.logger.warning("Could not retrieve datatype %s" % str(dt))

                else:
                    result['results'] = None
                operations.append(result)
            except Exception:
                # We got an exception when processing one Operation Row. We will continue with the rest of the rows.
                self.logger.exception("Could not prepare operation for display:" + str(one_op))
        return selected_project, total_ops_nr, operations, pages_no