def test_launch_group_burst_no_metric(self):
        """
        Test the launch burst method from burst service. Try to launch a burst with test adapter which has
        no metrics associated. This should fail.
        """
        burst_config = self.burst_service.new_burst_configuration(self.test_project.id)

        algo_id = self.flow_service.get_algorithm_by_module_and_class('tvb.tests.framework.adapters.testadapter1',
                                                                      'TestAdapter1').id
        kwargs_replica = {'test1_val1': '[0, 1, 2]', 'test1_val2': '0', model.RANGE_PARAMETER_1: 'test1_val1'}
        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
        tab_config = {test_portlet.id: [(0, 0), (0, 1), (1, 0)]}
        self._add_portlets_to_burst(burst_config, tab_config)
        burst_config.update_simulator_configuration(kwargs_replica)
        burst_id, _ = self.burst_service.launch_burst(burst_config, 0, algo_id, self.test_user.id)
        burst_config = dao.get_burst_by_id(burst_id)
        # Wait maximum x seconds for burst to finish
        self._wait_for_burst(burst_config, error_expected=True)

        launched_workflows = dao.get_workflows_for_burst(burst_id, is_count=True)
        self.assertEqual(3, launched_workflows, "3 workflows should have been launched due to group parameter.")

        op_groups = self.count_all_entities(model.OperationGroup)
        dt_groups = self.count_all_entities(model.DataTypeGroup)
        self.assertEqual(5, op_groups, "An operation group should have been created for each step.")
        self.assertEqual(5, dt_groups, "An dataType group should have been created for each step.")
Пример #2
0
 def test_create_workflow(self):
     """
     Test that a workflow with all the associated workflow steps is actually created.
     """
     workflow_step_list = [
         TestFactory.create_workflow_step("tvb_test.adapters.testadapter2",
                                          "TestAdapter2",
                                          static_kwargs={"test2": 2},
                                          step_index=1),
         TestFactory.create_workflow_step("tvb_test.adapters.testadapter1",
                                          "TestAdapter1",
                                          static_kwargs={
                                              "test1_val1": 1,
                                              "test1_val2": 1
                                          },
                                          step_index=2)
     ]
     burst_id = self.__create_complex_workflow(workflow_step_list)
     workflow_entities = dao.get_workflows_for_burst(burst_id)
     self.assertTrue(
         len(workflow_entities) == 1,
         "For some reason workflow was not stored in database.")
     workflow_steps = dao.get_workflow_steps(workflow_entities[0].id)
     self.assertEqual(len(workflow_steps),
                      len(workflow_step_list) + 1,
                      "Wrong number of workflow steps created.")
    def _prepare_and_launch_async_burst(self, length=4, is_range=False, nr_ops=0, wait_to_finish=0):
        """
        Launch an asynchronous burst with a simulation having all the default parameters, only the length received as
        a parameters. This is launched with actual simulator and not with a dummy test adapter as replacement.
        :param length: the length of the simulation in milliseconds. This is also used in case we need
            a group burst, in which case we will have `nr_ops` simulations with lengths starting from 
            `length` to `length + nr_ops` milliseconds
        :param is_range: a boolean which switches between a group burst and a non group burst.
            !! even if `is_range` is `True` you still need a non-zero positive `nr_ops` to have an actual group burst
        :param nr_ops: the number of operations in the group burst
        """
        launch_params = self._prepare_simulation_params(length, is_range, nr_ops)

        burst_config = self.burst_service.new_burst_configuration(self.test_project.id)
        burst_config.update_simulator_configuration(launch_params)
        burst_id = self.burst_service.launch_burst(burst_config, 0, self.sim_algorithm.id, self.test_user.id)[0]
        burst_config = dao.get_burst_by_id(burst_id)

        __timeout = 15
        __waited = 0
        # Wait a maximum of 15 seconds for the burst launch to be performed
        while dao.get_workflows_for_burst(burst_config.id, is_count=True) == 0 and __waited < __timeout:
            sleep(0.5)
            __waited += 0.5

        if wait_to_finish:
            burst_config = self._wait_for_burst(burst_config, timeout=wait_to_finish)
        return burst_config
Пример #4
0
    def load_burst(self, burst_id):
        """
        :param burst_id: the id of the burst that should be loaded
        
        Having this input the method should:
        
            - load the entity from the DB
            - get all the workflow steps for the saved burst id
            - go trough the visualization workflow steps to create the tab 
                configuration of the burst using the tab_index and index_in_tab 
                fields saved on each workflow_step
                
        """
        burst = dao.get_burst_by_id(burst_id)
        burst.prepare_after_load()
        burst.reset_tabs()
        burst_workflows = dao.get_workflows_for_burst(burst.id)

        group_gid = None
        if len(burst_workflows) == 1:
            # A simple burst with no range parameters
            burst = self.__populate_tabs_from_workflow(burst, burst_workflows[0])
        elif len(burst_workflows) > 1:
            # A burst workflow with a range of values, created multiple workflows and need
            # to launch parameter space exploration with the resulted group
            self.__populate_tabs_from_workflow(burst, burst_workflows[0])
            executed_steps = dao.get_workflow_steps(burst_workflows[0].id)

            operation = dao.get_operation_by_id(executed_steps[0].fk_operation)
            if operation.operation_group:
                workflow_group = dao.get_datatypegroup_by_op_group_id(operation.operation_group.id)
                group_gid = workflow_group.gid
        return burst, group_gid
Пример #5
0
    def load_burst(self, burst_id):
        """
        :param burst_id: the id of the burst that should be loaded
        
        Having this input the method should:
        
            - load the entity from the DB
            - get all the workflow steps for the saved burst id
            - go trough the visualization workflow steps to create the tab 
                configuration of the burst using the tab_index and index_in_tab 
                fields saved on each workflow_step
                
        """
        burst = dao.get_burst_by_id(burst_id)
        burst.prepare_after_load()
        burst.reset_tabs()
        burst_workflows = dao.get_workflows_for_burst(burst.id)

        group_gid = None
        if len(burst_workflows) == 1:
            # A simple burst with no range parameters
            burst = self.__populate_tabs_from_workflow(burst,
                                                       burst_workflows[0])
        elif len(burst_workflows) > 1:
            # A burst workflow with a range of values, created multiple workflows and need
            # to launch parameter space exploration with the resulted group
            self.__populate_tabs_from_workflow(burst, burst_workflows[0])
            executed_steps = dao.get_workflow_steps(burst_workflows[0].id)

            operation = dao.get_operation_by_id(executed_steps[0].fk_operation)
            if operation.operation_group:
                workflow_group = dao.get_datatypegroup_by_op_group_id(
                    operation.operation_group.id)
                group_gid = workflow_group.gid
        return burst, group_gid
 def test_load_tab_configuration(self):
     """
     Create a burst with some predefined portlets in some known positions. Check that the
     load_tab_configuration method does what it is expected, and we get the portlets in the
     corresponding tab positions.
     """
     burst_config = self.burst_service.new_burst_configuration(self.test_project.id)
     SIMULATOR_MODULE = 'tvb.tests.framework.adapters.testadapter1'
     SIMULATOR_CLASS = 'TestAdapter1'
     algo_id = self.flow_service.get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS).id
     kwargs_replica = {'test1_val1': '0', 'test1_val2': '0'}
     test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
     # Add test_portlet to positions (0,0), (0,1) and (1,0)
     tab_config = {test_portlet.id: [(0, 0), (0, 1), (1, 0)]}
     self._add_portlets_to_burst(burst_config, tab_config)
     burst_config.update_simulator_configuration(kwargs_replica)
     burst_id, _ = self.burst_service.launch_burst(burst_config, 0, algo_id, self.test_user.id)
     burst_config = dao.get_burst_by_id(burst_id)
     burst_config = self._wait_for_burst(burst_config)
     burst_wf = dao.get_workflows_for_burst(burst_config.id)[0]
     wf_step = dao.get_workflow_steps(burst_wf.id)[0]
     burst_config.prepare_after_load()
     for tab in burst_config.tabs:
         for portlet in tab.portlets:
             self.assertTrue(portlet is None, "Before loading the tab configuration all portlets should be none.")
     burst_config = self.burst_service.load_tab_configuration(burst_config, wf_step.fk_operation)
     for tab_idx, tab in enumerate(burst_config.tabs):
         for portlet_idx, portlet in enumerate(tab.portlets):
             if (tab_idx == 0 and portlet_idx in [0, 1]) or (tab_idx == 1 and portlet_idx == 0):
                 self.assertTrue(portlet is not None, "portlet gonfiguration not set")
                 self.assertEqual(test_portlet.id, portlet.portlet_id, "Unexpected portlet entity loaded.")
             else:
                 self.assertTrue(portlet is None, "Before loading the tab configuration all portlets should be none")
    def test_launch_burst_invalid_portlet_analyzer_data(self):
        """
        Test that burst is marked as error if invalid data is passed to the first step.
        """
        algo_id = self.flow_service.get_algorithm_by_module_and_class('tvb.tests.framework.adapters.testadapter1',
                                                                      'TestAdapter1').id
        #Adapter tries to do an int(test1_val1) and int(test1_val2) so this should be valid
        burst_config = self.burst_service.new_burst_configuration(self.test_project.id)
        kwargs_replica = {'test1_val1': '1', 'test1_val2': '0'}
        burst_config.update_simulator_configuration(kwargs_replica)

        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
        portlet_configuration = self.burst_service.new_portlet_configuration(test_portlet.id)
        #Portlet analyzer tries to do int(input) which should fail
        declared_overwrites = {ADAPTER_PREFIX_ROOT + '0test_non_dt_input': 'asa'}
        self.burst_service.update_portlet_configuration(portlet_configuration, declared_overwrites)
        burst_config.tabs[0].portlets[0] = portlet_configuration

        burst_id, _ = self.burst_service.launch_burst(burst_config, 0, algo_id, self.test_user.id)
        burst_config = dao.get_burst_by_id(burst_id)
        #Wait maximum x seconds for burst to finish
        burst_config = self._wait_for_burst(burst_config, error_expected=True)

        burst_wf = dao.get_workflows_for_burst(burst_config.id)[0]
        wf_steps = dao.get_workflow_steps(burst_wf.id)
        self.assertTrue(len(wf_steps) == 2,
                        "Should have exactly 2 wf steps. One for 'simulation' one for portlet analyze operation.")
        simulator_op = dao.get_operation_by_id(wf_steps[0].fk_operation)
        self.assertEqual(model.STATUS_FINISHED, simulator_op.status,
                         "First operation should be simulator which should have 'finished' status.")
        portlet_analyze_op = dao.get_operation_by_id(wf_steps[1].fk_operation)
        self.assertEqual(portlet_analyze_op.status, model.STATUS_ERROR,
                         "Second operation should be portlet analyze step which should have 'error' status.")
 def prepare_next_step(self, last_executed_op_id):
     """
     If the operation with id 'last_executed_op_id' resulted after
     the execution of a workflow step then this method will launch
     the operation corresponding to the next step from the workflow.
     """
     try:
         current_step, next_workflow_step = self._get_data(last_executed_op_id)
         if next_workflow_step is not None:
             operation = dao.get_operation_by_id(next_workflow_step.fk_operation)
             dynamic_param_names = next_workflow_step.dynamic_workflow_param_names
             if len(dynamic_param_names) > 0:
                 op_params = json.loads(operation.parameters)
                 for param_name in dynamic_param_names:
                     dynamic_param = op_params[param_name]
                     former_step = dao.get_workflow_step_by_step_index(next_workflow_step.fk_workflow,
                                                                       dynamic_param[wf_cfg.STEP_INDEX_KEY])
                     if type(dynamic_param[wf_cfg.DATATYPE_INDEX_KEY]) is IntType: 
                         datatypes = dao.get_results_for_operation(former_step.fk_operation)
                         op_params[param_name] = datatypes[dynamic_param[wf_cfg.DATATYPE_INDEX_KEY]].gid
                     else:
                         previous_operation = dao.get_operation_by_id(former_step.fk_operation)
                         op_params[param_name] = json.loads(previous_operation.parameters)[
                             dynamic_param[wf_cfg.DATATYPE_INDEX_KEY]]
                 operation.parameters = json.dumps(op_params)
                 operation = dao.store_entity(operation)
             return operation.id
         else:
             if current_step is not None:
                 current_workflow = dao.get_workflow_by_id(current_step.fk_workflow)
                 current_workflow.status = current_workflow.STATUS_FINISHED
                 dao.store_entity(current_workflow)
                 burst_entity = dao.get_burst_by_id(current_workflow.fk_burst)
                 parallel_workflows = dao.get_workflows_for_burst(burst_entity.id)
                 all_finished = True
                 for workflow in parallel_workflows:
                     if workflow.status == workflow.STATUS_STARTED:
                         all_finished = False
                 if all_finished:
                     self.mark_burst_finished(burst_entity, success=True)
                     disk_size = dao.get_burst_disk_size(burst_entity.id)  # Transform from kB to MB
                     if disk_size > 0:
                         user = dao.get_project_by_id(burst_entity.fk_project).administrator
                         user.used_disk_space = user.used_disk_space + disk_size
                         dao.store_entity(user)
             else:
                 operation = dao.get_operation_by_id(last_executed_op_id)
                 disk_size = dao.get_disk_size_for_operation(operation.id)  # Transform from kB to MB
                 if disk_size > 0:
                     user = dao.get_user_by_id(operation.fk_launched_by)
                     user.used_disk_space = user.used_disk_space + disk_size
                     dao.store_entity(user)
         return None
     except Exception, excep:
         self.logger.error(excep)
         self.logger.exception(excep)
         raise WorkflowInterStepsException(excep)
 def prepare_next_step(self, last_executed_op_id):
     """
     If the operation with id 'last_executed_op_id' resulted after
     the execution of a workflow step then this method will launch
     the operation corresponding to the next step from the workflow.
     """
     try:
         current_step, next_workflow_step = self._get_data(
             last_executed_op_id)
         if next_workflow_step is not None:
             operation = dao.get_operation_by_id(
                 next_workflow_step.fk_operation)
             dynamic_param_names = next_workflow_step.dynamic_workflow_param_names
             if len(dynamic_param_names) > 0:
                 op_params = json.loads(operation.parameters)
                 for param_name in dynamic_param_names:
                     dynamic_param = op_params[param_name]
                     former_step = dao.get_workflow_step_by_step_index(
                         next_workflow_step.fk_workflow, dynamic_param[
                             WorkflowStepConfiguration.STEP_INDEX_KEY])
                     if type(dynamic_param[WorkflowStepConfiguration.
                                           DATATYPE_INDEX_KEY]) is IntType:
                         datatypes = dao.get_results_for_operation(
                             former_step.fk_operation)
                         op_params[param_name] = datatypes[
                             dynamic_param[WorkflowStepConfiguration.
                                           DATATYPE_INDEX_KEY]].gid
                     else:
                         previous_operation = dao.get_operation_by_id(
                             former_step.fk_operation)
                         op_params[param_name] = json.loads(
                             previous_operation.parameters)[
                                 dynamic_param[WorkflowStepConfiguration.
                                               DATATYPE_INDEX_KEY]]
                 operation.parameters = json.dumps(op_params)
                 operation = dao.store_entity(operation)
             return operation.id
         elif current_step is not None:
             current_workflow = dao.get_workflow_by_id(
                 current_step.fk_workflow)
             current_workflow.status = current_workflow.STATUS_FINISHED
             dao.store_entity(current_workflow)
             burst_entity = dao.get_burst_by_id(current_workflow.fk_burst)
             parallel_workflows = dao.get_workflows_for_burst(
                 burst_entity.id)
             all_finished = True
             for workflow in parallel_workflows:
                 if workflow.status == workflow.STATUS_STARTED:
                     all_finished = False
             if all_finished:
                 self.mark_burst_finished(burst_entity)
         return None
     except Exception, excep:
         self.logger.error(excep)
         self.logger.exception(excep)
         raise WorkflowInterStepsException(excep)
Пример #10
0
    def test_remove_group_burst(self):
        """
        Same remove burst but for a burst that contains group of workflows launched as
        it would be from a Parameter Space Exploration. Check that the workflows are also
        deleted with the burst.
        """
        burst_config = self._prepare_and_launch_async_burst(length=1, is_range=True, nr_ops=4, wait_to_finish=60)

        launched_workflows = dao.get_workflows_for_burst(burst_config.id, is_count=True)
        self.assertEqual(4, launched_workflows, "4 workflows should have been launched due to group parameter.")

        got_deleted = self.burst_service.cancel_or_remove_burst(burst_config.id)
        self.assertTrue(got_deleted, "Burst should be deleted")

        launched_workflows = dao.get_workflows_for_burst(burst_config.id, is_count=True)
        self.assertEqual(0, launched_workflows, "No workflows should remain after delete.")

        burst_config = dao.get_burst_by_id(burst_config.id)
        self.assertTrue(burst_config is None, "Removing a canceled burst should delete it from db.")
 def _build_burst_export_dict(self, burst):
     """
     Compute needed export info and return dictionary
     """
     burst_info = BurstInformation(burst.to_dict()[1])
     workflows = dao.get_workflows_for_burst(burst.id)
     for workflow in workflows:
         # Get information for each workflow for this burst
         workflow_info = WorkflowInformation(workflow.to_dict()[1])
         wf_steps, view_steps = self._build_workflow_step_info(workflow)
         workflow_info.set_workflow_steps(wf_steps)
         workflow_info.set_view_steps(view_steps)
         burst_info.add_workflow(workflow_info)
     return burst_info.to_dict()
Пример #12
0
 def _build_burst_export_dict(self, burst):
     """
     Compute needed export info and return dictionary
     """
     burst_info = BurstInformation(burst.to_dict()[1])
     workflows = dao.get_workflows_for_burst(burst.id)
     for workflow in workflows:
         # Get information for each workflow for this burst
         workflow_info = WorkflowInformation(workflow.to_dict()[1])
         wf_steps, view_steps = self._build_workflow_step_info(workflow)
         workflow_info.set_workflow_steps(wf_steps)
         workflow_info.set_view_steps(view_steps)
         burst_info.add_workflow(workflow_info)
     return burst_info.to_dict()
Пример #13
0
 def test_create_workflow(self):
     """
     Test that a workflow with all the associated workflow steps is actually created.
     """
     workflow_step_list = [TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter2",
                                                            "TestAdapter2", step_index=1,
                                                            static_kwargs={"test2": 2}),
                           TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter1",
                                                            "TestAdapter1", step_index=2,
                                                            static_kwargs={"test1_val1": 1, "test1_val2": 1})]
     burst_id = self.__create_complex_workflow(workflow_step_list)
     workflow_entities = dao.get_workflows_for_burst(burst_id)
     assert  len(workflow_entities) == 1, "For some reason workflow was not stored in database."
     workflow_steps = dao.get_workflow_steps(workflow_entities[0].id)
     assert  len(workflow_steps) == len(workflow_step_list) + 1, "Wrong number of workflow steps created."
Пример #14
0
 def _build_burst_export_dict(self, burst, bursts_dict):
     """
     Compute needed info and add them to burst_dict for export.
     """
     burst_info = BurstInformation(burst.to_dict()[1])
     workflows = dao.get_workflows_for_burst(burst.id)
     for workflow in workflows:
         # Get information for each workflow for this burst
         workflow_info = WorkflowInformation(workflow.to_dict()[1])
         wf_steps, view_steps = self._build_workflow_step_info(workflow)
         workflow_info.set_workflow_steps(wf_steps)
         workflow_info.set_view_steps(view_steps)
         burst_info.add_workflow(workflow_info)
     # Save data in dictionary form so we can just save it as a json later on
     bursts_dict[burst.id] = burst_info.to_dict()
Пример #15
0
 def _build_burst_export_dict(self, burst, bursts_dict):
     """
     Compute needed info and add them to burst_dict for export.
     """
     burst_info = BurstInformation(burst.to_dict()[1])
     workflows = dao.get_workflows_for_burst(burst.id)
     for workflow in workflows:
         # Get information for each workflow for this burst
         workflow_info = WorkflowInformation(workflow.to_dict()[1])
         wf_steps, view_steps = self._build_workflow_step_info(workflow)
         workflow_info.set_workflow_steps(wf_steps)
         workflow_info.set_view_steps(view_steps)
         burst_info.add_workflow(workflow_info)
     # Save data in dictionary form so we can just save it as a json later on
     bursts_dict[burst.id] = burst_info.to_dict()
 def mark_burst_finished(self, burst_entity, error=False, success=False, cancel=False, error_message=None):
     """
     Mark Burst status field.
     Also compute 'weight' for current burst: no of operations inside, estimate time on disk...
     
     :param burst_entity: BurstConfiguration to be updated, at finish time.
     :param error: When True, burst will be marked as finished with error.
     :param success: When True, burst will be marked successfully.
     :param cancel: When True, burst will be marked as user-canceled.
     """
     try:
         linked_ops_number = dao.get_operations_in_burst(burst_entity.id, is_count=True)
         linked_datatypes = dao.get_generic_entity(model.DataType, burst_entity.id, "fk_parent_burst")
         
         disk_size = linked_ops_number   # 1KB for each dataType, considered for operation.xml files
         dt_group_sizes = dict()
         for dtype in linked_datatypes:
             if dtype.disk_size is not None:
                 disk_size = disk_size + dtype.disk_size
                 ### Prepare and compute DataTypeGroup sizes, in case of ranges.
                 if dtype.fk_datatype_group:
                     previous_group_size = dt_group_sizes[dtype.fk_datatype_group] if (dtype.fk_datatype_group 
                                                                                       in dt_group_sizes) else 0
                     dt_group_sizes[dtype.fk_datatype_group] = previous_group_size + dtype.disk_size
                          
         ### If there are any DataType Groups in current Burst, update their counter.
         burst_dt_groups = dao.get_generic_entity(model.DataTypeGroup, burst_entity.id, "fk_parent_burst")
         if len(burst_dt_groups) > 0:
             for dt_group in burst_dt_groups:
                 dt_group.count_results = dao.count_datatypes_in_group(dt_group.id)
                 dt_group.disk_size = dt_group_sizes[dt_group.id] if (dt_group.id in dt_group_sizes) else 0
                 dao.store_entity(dt_group)
                 
         ### Update actual Burst entity fields    
         burst_entity.disk_size = disk_size          # In KB
         burst_entity.datatypes_number = len(linked_datatypes) 
         burst_entity.workflows_number = len(dao.get_workflows_for_burst(burst_entity.id))  
         burst_entity.mark_status(success=success, error=error, cancel=cancel)
         burst_entity.error_message = error_message
         
         dao.store_entity(burst_entity)
     except Exception, excep:
         self.logger.error(excep)
         self.logger.exception("Could not correctly update Burst status and meta-data!")
         burst_entity.mark_status(error=True)
         burst_entity.error_message = "Error when updating Burst Status"
         dao.store_entity(burst_entity)
Пример #17
0
 def test_load_tab_configuration(self):
     """
     Create a burst with some predefined portlets in some known positions. Check that the
     load_tab_configuration method does what it is expected, and we get the portlets in the
     corresponding tab positions.
     """
     burst_config = self.burst_service.new_burst_configuration(
         self.test_project.id)
     SIMULATOR_MODULE = 'tvb.tests.framework.adapters.testadapter1'
     SIMULATOR_CLASS = 'TestAdapter1'
     algo_id = self.flow_service.get_algorithm_by_module_and_class(
         SIMULATOR_MODULE, SIMULATOR_CLASS).id
     kwargs_replica = {'test1_val1': '0', 'test1_val2': '0'}
     test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
     # Add test_portlet to positions (0,0), (0,1) and (1,0)
     tab_config = {test_portlet.id: [(0, 0), (0, 1), (1, 0)]}
     self._add_portlets_to_burst(burst_config, tab_config)
     burst_config.update_simulator_configuration(kwargs_replica)
     burst_id, _ = self.burst_service.launch_burst(burst_config, 0, algo_id,
                                                   self.test_user.id)
     burst_config = dao.get_burst_by_id(burst_id)
     burst_config = self._wait_for_burst(burst_config)
     burst_wf = dao.get_workflows_for_burst(burst_config.id)[0]
     wf_step = dao.get_workflow_steps(burst_wf.id)[0]
     burst_config.prepare_after_load()
     for tab in burst_config.tabs:
         for portlet in tab.portlets:
             self.assertTrue(
                 portlet is None,
                 "Before loading the tab configuration all portlets should be none."
             )
     burst_config = self.burst_service.load_tab_configuration(
         burst_config, wf_step.fk_operation)
     for tab_idx, tab in enumerate(burst_config.tabs):
         for portlet_idx, portlet in enumerate(tab.portlets):
             if (tab_idx == 0
                     and portlet_idx in [0, 1]) or (tab_idx == 1
                                                    and portlet_idx == 0):
                 self.assertTrue(portlet is not None,
                                 "portlet gonfiguration not set")
                 self.assertEqual(test_portlet.id, portlet.portlet_id,
                                  "Unexpected portlet entity loaded.")
             else:
                 self.assertTrue(
                     portlet is None,
                     "Before loading the tab configuration all portlets should be none"
                 )
    def mark_burst_finished(self,
                            burst_entity,
                            burst_status=None,
                            error_message=None):
        """
        Mark Burst status field.
        Also compute 'weight' for current burst: no of operations inside, estimate time on disk...
        
        :param burst_entity: BurstConfiguration to be updated, at finish time.
        :param burst_status: BurstConfiguration status. By default BURST_FINISHED
        :param error_message: If given, set the status to error and perpetuate the message.
        """
        if burst_status is None:
            burst_status = model.BurstConfiguration.BURST_FINISHED
        if error_message is not None:
            burst_status = model.BurstConfiguration.BURST_ERROR

        try:
            ### If there are any DataType Groups in current Burst, update their counter.
            burst_dt_groups = dao.get_generic_entity(model.DataTypeGroup,
                                                     burst_entity.id,
                                                     "fk_parent_burst")
            for dt_group in burst_dt_groups:
                dt_group.count_results = dao.count_datatypes_in_group(
                    dt_group.id)
                dt_group.disk_size, dt_group.subject = dao.get_summary_for_group(
                    dt_group.id)
                dao.store_entity(dt_group)

            ### Update actual Burst entity fields
            burst_entity.datatypes_number = dao.count_datatypes_in_burst(
                burst_entity.id)
            burst_entity.workflows_number = dao.get_workflows_for_burst(
                burst_entity.id, is_count=True)

            burst_entity.status = burst_status
            burst_entity.error_message = error_message
            burst_entity.finish_time = datetime.now()
            dao.store_entity(burst_entity)
        except Exception:
            self.logger.exception(
                "Could not correctly update Burst status and meta-data!")
            burst_entity.status = burst_status
            burst_entity.error_message = "Error when updating Burst Status"
            burst_entity.finish_time = datetime.now()
            dao.store_entity(burst_entity)
Пример #19
0
    def stop_burst(self, burst_entity):
        """
        Stop all the entities for the current burst and set the burst status to canceled.
        """
        burst_wfs = dao.get_workflows_for_burst(burst_entity.id)
        any_stopped = False
        for workflow in burst_wfs:
            wf_steps = dao.get_workflow_steps(workflow.id)
            for step in wf_steps:
                if step.fk_operation is not None:
                    self.logger.debug("We will stop operation: %d" % step.fk_operation)
                    any_stopped = self.operation_service.stop_operation(step.fk_operation) or any_stopped

        if any_stopped and burst_entity.status != burst_entity.BURST_CANCELED:
            self.workflow_service.mark_burst_finished(burst_entity, model.BurstConfiguration.BURST_CANCELED)
            return True
        return False
Пример #20
0
    def stop_burst(self, burst_entity):
        """
        Stop all the entities for the current burst and set the burst status to canceled.
        """
        burst_wfs = dao.get_workflows_for_burst(burst_entity.id)
        any_stopped = False
        for workflow in burst_wfs:
            wf_steps = dao.get_workflow_steps(workflow.id)
            for step in wf_steps:
                if step.fk_operation is not None:
                    self.logger.debug("We will stop operation: %d" % step.fk_operation)
                    any_stopped = self.operation_service.stop_operation(step.fk_operation) or any_stopped

        if any_stopped and burst_entity.status != burst_entity.BURST_CANCELED:
            self.workflow_service.mark_burst_finished(burst_entity, model.BurstConfiguration.BURST_CANCELED)
            return True
        return False
Пример #21
0
    def test_launch_burst_invalid_portlet_analyzer_data(self):
        """
        Test that burst is marked as error if invalid data is passed to the first step.
        """
        algo_id = self.flow_service.get_algorithm_by_module_and_class(
            'tvb.tests.framework.adapters.testadapter1', 'TestAdapter1').id
        #Adapter tries to do an int(test1_val1) and int(test1_val2) so this should be valid
        burst_config = self.burst_service.new_burst_configuration(
            self.test_project.id)
        kwargs_replica = {'test1_val1': '1', 'test1_val2': '0'}
        burst_config.update_simulator_configuration(kwargs_replica)

        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
        portlet_configuration = self.burst_service.new_portlet_configuration(
            test_portlet.id)
        #Portlet analyzer tries to do int(input) which should fail
        declared_overwrites = {
            ADAPTER_PREFIX_ROOT + '0test_non_dt_input': 'asa'
        }
        self.burst_service.update_portlet_configuration(
            portlet_configuration, declared_overwrites)
        burst_config.tabs[0].portlets[0] = portlet_configuration

        burst_id, _ = self.burst_service.launch_burst(burst_config, 0, algo_id,
                                                      self.test_user.id)
        burst_config = dao.get_burst_by_id(burst_id)
        #Wait maximum x seconds for burst to finish
        burst_config = self._wait_for_burst(burst_config, error_expected=True)

        burst_wf = dao.get_workflows_for_burst(burst_config.id)[0]
        wf_steps = dao.get_workflow_steps(burst_wf.id)
        self.assertTrue(
            len(wf_steps) == 2,
            "Should have exactly 2 wf steps. One for 'simulation' one for portlet analyze operation."
        )
        simulator_op = dao.get_operation_by_id(wf_steps[0].fk_operation)
        self.assertEqual(
            model.STATUS_FINISHED, simulator_op.status,
            "First operation should be simulator which should have 'finished' status."
        )
        portlet_analyze_op = dao.get_operation_by_id(wf_steps[1].fk_operation)
        self.assertEqual(
            portlet_analyze_op.status, model.STATUS_ERROR,
            "Second operation should be portlet analyze step which should have 'error' status."
        )
Пример #22
0
    def test_load_group_burst(self):
        """
        Launch a group adapter and load it afterwards and check that a group_id is properly loaded.
        """
        launch_params = self._prepare_simulation_params(1, True, 3)

        burst_config = self.burst_service.new_burst_configuration(self.test_project.id)
        burst_config.update_simulator_configuration(launch_params)
        burst_id, _ = self.burst_service.launch_burst(burst_config, 0, self.sim_algorithm.id, self.test_user.id)
        burst_config = dao.get_burst_by_id(burst_id)
        # Wait maximum x seconds for burst to finish
        self._wait_for_burst(burst_config)

        launched_workflows = dao.get_workflows_for_burst(burst_id, is_count=True)
        self.assertEqual(3, launched_workflows, "3 workflows should have been launched due to group parameter.")

        group_id = self.burst_service.load_burst(burst_id)[1]
        self.assertTrue(group_id >= 0, "Should be part of group.")
        datatype_measures = self.count_all_entities(DatatypeMeasure)
        self.assertEqual(3, datatype_measures)
Пример #23
0
    def mark_burst_finished(self, burst_entity, burst_status=None, error_message=None):
        """
        Mark Burst status field.
        Also compute 'weight' for current burst: no of operations inside, estimate time on disk...
        
        :param burst_entity: BurstConfiguration to be updated, at finish time.
        :param burst_status: BurstConfiguration status. By default BURST_FINISHED
        :param error_message: If given, set the status to error and perpetuate the message.
        """
        if burst_status is None:
            burst_status = model.BurstConfiguration.BURST_FINISHED
        if error_message is not None:
            burst_status = model.BurstConfiguration.BURST_ERROR

        try:
            ### If there are any DataType Groups in current Burst, update their counter.
            burst_dt_groups = dao.get_generic_entity(model.DataTypeGroup, burst_entity.id, "fk_parent_burst")
            for dt_group in burst_dt_groups:
                dt_group.count_results = dao.count_datatypes_in_group(dt_group.id)
                dt_group.disk_size, dt_group.subject = dao.get_summary_for_group(dt_group.id)
                dao.store_entity(dt_group)

            ### Update actual Burst entity fields
            ##  1KB for each dataType, considered for operation.xml files
            linked_ops_number = dao.get_operations_in_burst(burst_entity.id, is_count=True)
            burst_entity.disk_size = linked_ops_number + dao.get_disk_size_for_burst(burst_entity.id)        # In KB
            burst_entity.datatypes_number = dao.count_datatypes_in_burst(burst_entity.id)
            burst_entity.workflows_number = dao.get_workflows_for_burst(burst_entity.id, is_count=True)

            burst_entity.status = burst_status
            burst_entity.error_message = error_message
            burst_entity.finish_time = datetime.now()
            dao.store_entity(burst_entity)
        except Exception:
            self.logger.exception("Could not correctly update Burst status and meta-data!")
            burst_entity.status = burst_status
            burst_entity.error_message = "Error when updating Burst Status"
            burst_entity.finish_time = datetime.now()
            dao.store_entity(burst_entity)
Пример #24
0
    def test_launch_group_burst_no_metric(self):
        """
        Test the launch burst method from burst service. Try to launch a burst with test adapter which has
        no metrics associated. This should fail.
        """
        burst_config = self.burst_service.new_burst_configuration(
            self.test_project.id)

        algo_id = self.flow_service.get_algorithm_by_module_and_class(
            'tvb.tests.framework.adapters.testadapter1', 'TestAdapter1').id
        kwargs_replica = {
            'test1_val1': '[0, 1, 2]',
            'test1_val2': '0',
            model.RANGE_PARAMETER_1: 'test1_val1'
        }
        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
        tab_config = {test_portlet.id: [(0, 0), (0, 1), (1, 0)]}
        self._add_portlets_to_burst(burst_config, tab_config)
        burst_config.update_simulator_configuration(kwargs_replica)
        burst_id, _ = self.burst_service.launch_burst(burst_config, 0, algo_id,
                                                      self.test_user.id)
        burst_config = dao.get_burst_by_id(burst_id)
        # Wait maximum x seconds for burst to finish
        self._wait_for_burst(burst_config, error_expected=True)

        launched_workflows = dao.get_workflows_for_burst(burst_id,
                                                         is_count=True)
        self.assertEqual(
            3, launched_workflows,
            "3 workflows should have been launched due to group parameter.")

        op_groups = self.count_all_entities(model.OperationGroup)
        dt_groups = self.count_all_entities(model.DataTypeGroup)
        self.assertEqual(
            5, op_groups,
            "An operation group should have been created for each step.")
        self.assertEqual(
            5, dt_groups,
            "An dataType group should have been created for each step.")
Пример #25
0
    def _prepare_and_launch_async_burst(self,
                                        length=4,
                                        is_range=False,
                                        nr_ops=0,
                                        wait_to_finish=0):
        """
        Launch an asynchronous burst with a simulation having all the default parameters, only the length received as
        a parameters. This is launched with actual simulator and not with a dummy test adapter as replacement.
        :param length: the length of the simulation in milliseconds. This is also used in case we need
            a group burst, in which case we will have `nr_ops` simulations with lengths starting from 
            `length` to `length + nr_ops` milliseconds
        :param is_range: a boolean which switches between a group burst and a non group burst.
            !! even if `is_range` is `True` you still need a non-zero positive `nr_ops` to have an actual group burst
        :param nr_ops: the number of operations in the group burst
        """
        launch_params = self._prepare_simulation_params(
            length, is_range, nr_ops)

        burst_config = self.burst_service.new_burst_configuration(
            self.test_project.id)
        burst_config.update_simulator_configuration(launch_params)
        burst_id = self.burst_service.launch_burst(burst_config, 0,
                                                   self.sim_algorithm.id,
                                                   self.test_user.id)[0]
        burst_config = dao.get_burst_by_id(burst_id)

        __timeout = 15
        __waited = 0
        # Wait a maximum of 15 seconds for the burst launch to be performed
        while dao.get_workflows_for_burst(
                burst_config.id, is_count=True) == 0 and __waited < __timeout:
            sleep(0.5)
            __waited += 0.5

        if wait_to_finish:
            burst_config = self._wait_for_burst(burst_config,
                                                timeout=wait_to_finish)
        return burst_config
Пример #26
0
    def mark_burst_finished(self,
                            burst_entity,
                            error=False,
                            success=False,
                            cancel=False,
                            error_message=None):
        """
        Mark Burst status field.
        Also compute 'weight' for current burst: no of operations inside, estimate time on disk...
        
        :param burst_entity: BurstConfiguration to be updated, at finish time.
        :param error: When True, burst will be marked as finished with error.
        :param success: When True, burst will be marked successfully.
        :param cancel: When True, burst will be marked as user-canceled.
        """
        try:
            linked_ops_number = dao.get_operations_in_burst(burst_entity.id,
                                                            is_count=True)
            linked_datatypes = dao.get_generic_entity(model.DataType,
                                                      burst_entity.id,
                                                      "fk_parent_burst")

            disk_size = linked_ops_number  #### 1KB for each dataType, considered for operation.xml files
            dt_group_sizes = dict()
            for dtype in linked_datatypes:
                if dtype.disk_size is not None:
                    disk_size = disk_size + dtype.disk_size
                    ### Prepare and compute DataTypeGroup sizes, in case of ranges.
                    if dtype.fk_datatype_group:
                        previous_group_size = dt_group_sizes[
                            dtype.fk_datatype_group] if (
                                dtype.fk_datatype_group
                                in dt_group_sizes) else 0
                        dt_group_sizes[
                            dtype.
                            fk_datatype_group] = previous_group_size + dtype.disk_size

            ### If there are any DataType Groups in current Burst, update their counter.
            burst_dt_groups = dao.get_generic_entity(model.DataTypeGroup,
                                                     burst_entity.id,
                                                     "fk_parent_burst")
            if len(burst_dt_groups) > 0:
                for dt_group in burst_dt_groups:
                    dt_group.count_results = dao.count_datatypes_in_group(
                        dt_group.id)
                    dt_group.disk_size = dt_group_sizes[dt_group.id] if (
                        dt_group.id in dt_group_sizes) else 0
                    dao.store_entity(dt_group)

            ### Update actual Burst entity fields
            burst_entity.disk_size = disk_size  ## In KB
            burst_entity.datatypes_number = len(linked_datatypes)
            burst_entity.workflows_number = len(
                dao.get_workflows_for_burst(burst_entity.id))
            burst_entity.mark_status(success=success,
                                     error=error,
                                     cancel=cancel)
            burst_entity.error_message = error_message

            dao.store_entity(burst_entity)
        except Exception, excep:
            self.logger.error(excep)
            self.logger.exception(
                "Could not correctly update Burst status and meta-data!")
            burst_entity.mark_status(error=True)
            burst_entity.error_message = "Error when updating Burst Status"
            dao.store_entity(burst_entity)