Beispiel #1
0
    def async_launch_and_prepare_simulation(self, burst_config, user, project, simulator_algo,
                                            session_stored_simulator, simulation_state_gid):
        try:
            metadata = {}
            metadata.update({DataTypeMetaData.KEY_BURST: burst_config.id})
            simulator_id = simulator_algo.id
            algo_category = simulator_algo.algorithm_category
            operation = self._prepare_operation(project.id, user.id, simulator_id, session_stored_simulator.gid,
                                                algo_category, None, metadata)
            storage_path = self.files_helper.get_project_folder(project, str(operation.id))
            SimulatorSerializer().serialize_simulator(session_stored_simulator, simulation_state_gid, storage_path)
            BurstService.update_simulation_fields(burst_config.id, operation.id, session_stored_simulator.gid)

            wf_errs = 0
            try:
                OperationService().launch_operation(operation.id, True)
                return operation
            except Exception as excep:
                self.logger.error(excep)
                wf_errs += 1
                if burst_config:
                    BurstService().mark_burst_finished(burst_config, error_message=str(excep))

            self.logger.debug("Finished launching workflow. The operation was launched successfully, " +
                              str(wf_errs) + " had error on pre-launch steps")

        except Exception as excep:
            self.logger.error(excep)
            if burst_config:
                BurstService().mark_burst_finished(burst_config, error_message=str(excep))
Beispiel #2
0
    def async_launch_and_prepare_pse(self, burst_config, user, project, simulator_algo, range_param1, range_param2,
                                     session_stored_simulator):
        try:
            simulator_id = simulator_algo.id
            algo_category = simulator_algo.algorithm_category
            operation_group = burst_config.operation_group
            metric_operation_group = burst_config.metric_operation_group
            operations = []
            range_param2_values = []
            if range_param2:
                range_param2_values = range_param2.get_range_values()
            first_simulator = None
            for param1_value in range_param1.get_range_values():
                for param2_value in range_param2_values:
                    # Copy, but generate a new GUID for every Simulator in PSE
                    simulator = copy.deepcopy(session_stored_simulator)
                    simulator.gid = uuid.uuid4()
                    self._set_simulator_range_parameter(simulator, range_param1.name, param1_value)
                    self._set_simulator_range_parameter(simulator, range_param2.name, param2_value)

                    ranges = json.dumps({range_param1.name: param1_value[0], range_param2.name: param2_value[0]})

                    operation = self._prepare_operation(project.id, user.id, simulator_id, simulator.gid,
                                                        algo_category, operation_group,
                                                        {DataTypeMetaData.KEY_BURST: burst_config.id}, ranges)

                    storage_path = self.files_helper.get_project_folder(project, str(operation.id))
                    SimulatorSerializer().serialize_simulator(simulator,  None, storage_path)
                    operations.append(operation)
                    if first_simulator is None:
                        first_simulator = simulator

            first_operation = operations[0]
            BurstService.update_simulation_fields(burst_config.id, first_operation.id, first_simulator.gid)
            datatype_group = DataTypeGroup(operation_group, operation_id=first_operation.id,
                                           fk_parent_burst=burst_config.id,
                                           state=json.loads(first_operation.meta_data)[DataTypeMetaData.KEY_STATE])
            dao.store_entity(datatype_group)

            metrics_datatype_group = DataTypeGroup(metric_operation_group, fk_parent_burst=burst_config.id)
            dao.store_entity(metrics_datatype_group)

            wf_errs = 0
            for operation in operations:
                try:
                    OperationService().launch_operation(operation.id, True)
                except Exception as excep:
                    self.logger.error(excep)
                    wf_errs += 1
                    BurstService().mark_burst_finished(burst_config, error_message=str(excep))

            self.logger.debug("Finished launching workflows. " + str(len(operations) - wf_errs) +
                              " were launched successfully, " + str(wf_errs) + " had error on pre-launch steps")

        except Exception as excep:
            self.logger.error(excep)
            BurstService().mark_burst_finished(burst_config, error_message=str(excep))
Beispiel #3
0
    def stop_operation(operation_id):
        """
        Stop a thread for a given operation id
        """
        operation = dao.try_get_operation_by_id(operation_id)
        if not operation or operation.has_finished:
            LOGGER.info("Operation already stopped or not found at ID: %s" % operation_id)
            return True

        LOGGER.debug("Stopping operation: %s" % str(operation_id))

        # Set the thread stop flag to true
        for thread in CURRENT_ACTIVE_THREADS:
            if int(thread.operation_id) == operation_id:
                thread._stop()
                LOGGER.debug("Found running thread for operation: %d" % operation_id)

        # Kill Thread
        stopped = True
        operation_process = dao.get_operation_process_for_operation(operation_id)
        if operation_process is not None:
            # Now try to kill the operation if it exists
            stopped = OperationExecutor.stop_pid(operation_process.pid)
            if not stopped:
                LOGGER.debug("Operation %d was probably killed from it's specific thread." % operation_id)
            else:
                LOGGER.debug("Stopped OperationExecutor process for %d" % operation_id)

        # Mark operation as canceled in DB and on disk
        BurstService().persist_operation_state(operation, STATUS_CANCELED)

        return stopped
    def update_db_with_results(self, operation, sim_h5_filenames,
                               metric_operation, metric_h5_filename):
        # type: (Operation, list, Operation, str) -> (str, int)
        """
        Generate corresponding Index entities for the resulted H5 files and insert them in DB.
        """
        burst_service = BurstService()
        index_list = []

        burst_config = burst_service.get_burst_for_operation_id(operation.id)
        all_indexes = burst_service.prepare_indexes_for_simulation_results(
            operation, sim_h5_filenames, burst_config)
        if burst_config.fk_operation_group:
            metric_index = burst_service.prepare_index_for_metric_result(
                metric_operation, metric_h5_filename, burst_config)
            all_indexes.append(metric_index)

        for index in all_indexes:
            index = dao.store_entity(index)
            index_list.append(index)

        sim_adapter = SimulatorAdapter()
        sim_adapter.extract_operation_data(operation)
        sim_adapter.generic_attributes.parent_burst = burst_config.gid
        mesage, _ = sim_adapter._capture_operation_results(index_list)

        burst_service.update_burst_status(burst_config)
        # self.update_datatype_groups()
        return mesage
Beispiel #5
0
def launch_simulation_workflow(json_path, prj_id):
    """

    :param json_path: Path towards a local JSON file exported from GUI
    :param prj_id: This ID of a project needs to exists in DB, and it can be taken from the WebInterface
    """
    project = dao.get_project_by_id(prj_id)

    with open(json_path, 'rb') as input_file:
        simulation_json = input_file.read()
        simulation_json = json.loads(simulation_json)
        LOG.info("Simulation JSON loaded from file '%s': \n  %s", json_path,
                 simulation_json)

        importer = ImportService()
        simulation_config = importer.load_burst_entity(simulation_json, prj_id)
        LOG.info("Simulation Workflow configuration object loaded: \n  %s",
                 simulation_config)

        flow_service = FlowService()
        stored_adapter = flow_service.get_algorithm_by_module_and_class(
            SIMULATOR_MODULE, SIMULATOR_CLASS)
        LOG.info("Found Simulation algorithm in local DB: \n   %s",
                 stored_adapter)

        burst_service = BurstService()
        burst_service.launch_burst(simulation_config, 0, stored_adapter.id,
                                   project.administrator.id, LAUNCH_NEW)
        LOG.info(
            "Check in the web GUI for your operation. It should be starting now ..."
        )
 def test_launch_burst(self):
     """
     Launch a burst and check that it finishes correctly and before timeout (100)
     """
     self.burst_c.index()
     connectivity = self._burst_create_connectivity()
     launch_params = copy.deepcopy(SIMULATOR_PARAMETERS)
     launch_params['connectivity'] = connectivity.gid
     launch_params['simulation_length'] = '10'
     launch_params = {"simulator_parameters": json.dumps(launch_params)}
     burst_id = json.loads(
         self.burst_c.launch_burst("new", "test_burst",
                                   **launch_params))['id']
     waited = 1
     timeout = 100
     burst_config = dao.get_burst_by_id(burst_id)
     while burst_config.status == BurstConfiguration.BURST_RUNNING and waited <= timeout:
         sleep(0.5)
         waited += 0.5
         burst_config = dao.get_burst_by_id(burst_config.id)
     if waited > timeout:
         self.fail("Timed out waiting for simulations to finish.")
     if burst_config.status != BurstConfiguration.BURST_FINISHED:
         BurstService().stop_burst(burst_config)
         self.fail("Burst should have finished successfully.")
Beispiel #7
0
    def update_db_with_results(operation, sim_h5_filenames, metric_operation,
                               metric_h5_filename):
        # type: (Operation, list, Operation, str) -> (str, int)
        """
        Generate corresponding Index entities for the resulted H5 files and insert them in DB.
        """
        burst_service = BurstService()
        index_list = []
        is_group = operation.fk_operation_group is not None
        burst_config = burst_service.get_burst_for_operation_id(operation.id)
        if is_group:
            burst_config = burst_service.get_burst_for_operation_id(
                operation.fk_operation_group, True)
        all_indexes = burst_service.prepare_indexes_for_simulation_results(
            operation, sim_h5_filenames, burst_config)
        if is_group:
            # Update the operation group name
            operation_group = dao.get_operationgroup_by_id(
                metric_operation.fk_operation_group)
            operation_group.fill_operationgroup_name("DatatypeMeasureIndex")
            dao.store_entity(operation_group)

            metric_index = burst_service.prepare_index_for_metric_result(
                metric_operation, metric_h5_filename, burst_config)
            all_indexes.append(metric_index)

        for index in all_indexes:
            index = dao.store_entity(index)
            index_list.append(index)

        burst_service.update_burst_status(burst_config)
Beispiel #8
0
    def stop_burst_operation(self,
                             operation_id,
                             is_group,
                             remove_after_stop=False):
        """
        For a given operation id that is part of a burst just stop the given burst.
        :returns True when stopped operation was successfully.
        """
        operation_id = int(operation_id)
        if int(is_group) == 0:
            operation = self.flow_service.load_operation(operation_id)
        else:
            op_group = ProjectService.get_operation_group_by_id(operation_id)
            first_op = ProjectService.get_operations_in_group(op_group)[0]
            operation = self.flow_service.load_operation(int(first_op.id))

        try:
            burst_service = BurstService()
            result = burst_service.stop_burst(operation.burst)
            if remove_after_stop:
                current_burst = common.get_from_session(
                    common.KEY_BURST_CONFIG)
                if current_burst and current_burst.id == operation.burst.id:
                    common.remove_from_session(common.KEY_BURST_CONFIG)
                result = burst_service.cancel_or_remove_burst(
                    operation.burst.id) or result

            return result
        except Exception, ex:
            self.logger.exception(ex)
            return False
Beispiel #9
0
    def stop_operation(operation_id):
        # TODO: Review this implementation after DAINT maintenance
        operation = dao.get_operation_by_id(operation_id)
        if not operation or operation.has_finished:
            LOGGER.warning("Operation already stopped: %s" % operation_id)
            return True

        LOGGER.debug("Stopping HPC operation: %s" % str(operation_id))
        op_ident = OperationDAO().get_operation_process_for_operation(
            operation_id)
        if op_ident is not None:
            # TODO: Handle login
            transport = unicore_client.Transport(
                os.environ[HPCSchedulerClient.CSCS_LOGIN_TOKEN_ENV_KEY])
            # Abort HPC job
            job = Job(transport, op_ident.job_id)
            if job.is_running():
                job.abort()

        # Kill thread
        operation_thread = get_op_thread(operation_id)
        if operation_thread is None:
            LOGGER.warning("Thread for operation {} is not available".format(
                operation_id))
        else:
            operation_thread.stop()
            while not operation_thread.stopped():
                LOGGER.info(
                    "Thread for operation {} is stopping".format(operation_id))
        BurstService().persist_operation_state(operation, STATUS_CANCELED)
        return True
Beispiel #10
0
    def stop_operation(operation_id):
        """
        Stop a thread for a given operation id
        """
        operation = dao.try_get_operation_by_id(operation_id)
        if not operation or operation.has_finished:
            LOGGER.warning(
                "Operation already stopped or not found is given to stop job: %s"
                % operation_id)
            return True

        operation_process = dao.get_operation_process_for_operation(
            operation_id)
        result = 0
        # Try to kill only if operation job process is not None
        if operation_process is not None:
            stop_command = TvbProfile.current.cluster.STOP_COMMAND % operation_process.job_id
            LOGGER.info("Stopping cluster operation: %s" % stop_command)
            result = os.system(stop_command)
            if result != 0:
                LOGGER.error(
                    "Stopping cluster operation was unsuccessful. Try following status with '"
                    + TvbProfile.current.cluster.STATUS_COMMAND +
                    "'" % operation_process.job_id)

        BurstService().persist_operation_state(operation, STATUS_CANCELED)

        return result == 0
Beispiel #11
0
    def test_export_simulator_configuration(self, operation_factory,
                                            connectivity_index_factory):
        """
        Test export of a simulator configuration
        """
        conn_gid = uuid.UUID(connectivity_index_factory().gid)
        operation = operation_factory(is_simulation=True,
                                      store_vm=True,
                                      test_project=self.test_project,
                                      conn_gid=conn_gid)

        burst_configuration = BurstConfiguration(self.test_project.id)
        burst_configuration.fk_simulation = operation.id
        burst_configuration.simulator_gid = operation.view_model_gid
        burst_configuration.name = "Test_burst"
        burst_configuration = dao.store_entity(burst_configuration)

        op_folder = StorageInterface().get_project_folder(
            self.test_project.name, str(operation.id))
        BurstService().store_burst_configuration(burst_configuration,
                                                 op_folder)

        export_file = self.export_manager.export_simulator_configuration(
            burst_configuration.id)

        assert export_file is not None, "Export process should return path to export file"
        assert os.path.exists(
            export_file
        ), "Could not find export file: %s on disk." % export_file
        assert zipfile.is_zipfile(
            export_file), "Generated file is not a valid ZIP file"
Beispiel #12
0
    def run(self):
        """
        Get the required data from the operation queue and launch the operation.
        """
        # Try to get a spot to launch own operation.
        LOCKS_QUEUE.get(True)
        operation_id = self.operation_id
        run_params = [
            TvbProfile.current.PYTHON_INTERPRETER_PATH, '-m',
            'tvb.core.operation_async_launcher',
            str(operation_id), TvbProfile.CURRENT_PROFILE_NAME
        ]

        # In the exceptional case where the user pressed stop while the Thread startup is done,
        # We should no longer launch the operation.
        if self.stopped() is False:

            env = os.environ.copy()
            env['PYTHONPATH'] = os.pathsep.join(sys.path)
            # anything that was already in $PYTHONPATH should have been reproduced in sys.path

            launched_process = Popen(run_params,
                                     stdout=PIPE,
                                     stderr=PIPE,
                                     env=env)

            LOGGER.debug(
                "Storing pid=%s for operation id=%s launched on local machine."
                % (operation_id, launched_process.pid))
            op_ident = OperationProcessIdentifier(operation_id,
                                                  pid=launched_process.pid)
            dao.store_entity(op_ident)

            if self.stopped():
                # In the exceptional case where the user pressed stop while the Thread startup is done.
                # and stop_operation is concurrently asking about OperationProcessIdentity.
                self.stop_pid(launched_process.pid)

            subprocess_result = launched_process.communicate()
            LOGGER.info("Finished with launch of operation %s" % operation_id)
            returned = launched_process.wait()

            if returned != 0 and not self.stopped():
                # Process did not end as expected. (e.g. Segmentation fault)
                burst_service = BurstService()
                operation = dao.get_operation_by_id(self.operation_id)
                LOGGER.error(
                    "Operation suffered fatal failure! Exit code: %s Exit message: %s"
                    % (returned, subprocess_result))
                burst_service.persist_operation_state(
                    operation, STATUS_ERROR,
                    "Operation failed unexpectedly! Please check the log files."
                )

            del launched_process

        # Give back empty spot now that you finished your operation
        CURRENT_ACTIVE_THREADS.remove(self)
        LOCKS_QUEUE.put(1)
Beispiel #13
0
 def __init__(self):
     BurstBaseController.__init__(self)
     self.range_parameters = SimulatorRangeParameters()
     self.burst_service = BurstService()
     self.simulator_service = SimulatorService()
     self.cached_simulator_algorithm = self.algorithm_service.get_algorithm_by_module_and_class(
         IntrospectionRegistry.SIMULATOR_MODULE, IntrospectionRegistry.SIMULATOR_CLASS)
     self.context = SimulatorContext()
     self.monitors_handler = MonitorsWizardHandler()
    def __init__(self):
        BurstBaseController.__init__(self)
        self.burst_service = BurstService()
        self.workflow_service = WorkflowService()
        self.context = SelectedAdapterContext()

        ## Cache simulator Tree and Algorithm for performance issues.
        self.cached_simulator_algorithm = self.flow_service.get_algorithm_by_module_and_class(
            SIMULATOR_MODULE, SIMULATOR_CLASS)
def do_operation_launch(operation_id):
    """
    Event attached to the local queue for executing an operation, when we will have resources available.
    """
    log = get_logger('tvb.core.operation_async_launcher')
    burst_service = BurstService()

    try:
        log.debug("Loading operation with id=%s" % operation_id)
        curent_operation = dao.get_operation_by_id(operation_id)
        stored_adapter = curent_operation.algorithm
        log.debug("Importing Algorithm: " + str(stored_adapter.classname) +
                  " for Operation:" + str(curent_operation.id))
        adapter_instance = ABCAdapter.build_adapter(stored_adapter)
        # Un-comment bellow for profiling an operation:
        # import cherrypy.lib.profiler as profiler
        # p = profiler.Profiler("/Users/lia.domide/TVB/profiler/")
        # p.run(OperationService().initiate_prelaunch, curent_operation, adapter_instance, {}, **PARAMS)

        OperationService().initiate_prelaunch(curent_operation,
                                              adapter_instance)
        if curent_operation.fk_operation_group:
            parent_burst = dao.get_generic_entity(
                BurstConfiguration, curent_operation.fk_operation_group,
                'fk_operation_group')[0]
            operations_in_group = dao.get_operations_in_group(
                curent_operation.fk_operation_group)
            if parent_burst.fk_metric_operation_group:
                operations_in_group.extend(
                    dao.get_operations_in_group(
                        parent_burst.fk_metric_operation_group))
            burst_finished = True
            for operation in operations_in_group:
                if not has_finished(operation.status):
                    burst_finished = False
                    break

            if burst_finished and parent_burst is not None and parent_burst.status != BurstConfiguration.BURST_ERROR:
                burst_service.mark_burst_finished(parent_burst)
        else:
            parent_burst = burst_service.get_burst_for_operation_id(
                operation_id)
            if parent_burst is not None:
                burst_service.mark_burst_finished(parent_burst)

        log.debug("Successfully finished operation " + str(operation_id))

    except Exception as excep:
        log.error("Could not execute operation " + str(operation_id))
        log.exception(excep)
        parent_burst = burst_service.get_burst_for_operation_id(operation_id)
        if parent_burst is not None:
            burst_service.mark_burst_finished(parent_burst,
                                              error_message=str(excep))
 def test_load_burst_removed(self):
     """
     Add burst to session, then remove burst from database. Try to load
     burst and check that it will raise exception and remove it from session.
     """
     burst = self._store_burst(self.test_project.id, 'started',
                               {'test': 'test'}, 'burst1')
     cherrypy.session[common.KEY_BURST_CONFIG] = burst
     burst_id = burst.id
     BurstService().cancel_or_remove_burst(burst_id)
     self.assertRaises(Exception, self.burst_c.load_burst, burst_id)
     self.assertTrue(common.KEY_BURST_CONFIG not in cherrypy.session)
 def setUp(self):
     """
     Sets up the testing environment;
     saves config file;
     creates a test user, a test project;
     creates burst, operation, flow and workflow services
     """
     self.test_user = TestFactory.create_user()
     self.test_project = TestFactory.create_project(self.test_user)
     self.workflow_service = WorkflowService()
     self.burst_service = BurstService()
     self.operation_service = OperationService()
     self.flow_service = FlowService()
Beispiel #18
0
 def _handle_exception(self, exception, temp_files, message, operation=None):
     """
     Common way to treat exceptions:
         - remove temporary files, if any
         - set status ERROR on current operation (if any)
         - log exception
     """
     self.logger.exception(message)
     if operation is not None:
         BurstService().persist_operation_state(operation, STATUS_ERROR, str(exception))
     self._remove_files(temp_files)
     exception.message = message
     raise exception.with_traceback(
         sys.exc_info()[2])  # when rethrowing in python this is required to preserve the stack trace
Beispiel #19
0
    def stop_operation(operation_id):
        """
        Stop a thread for a given operation id
        """
        operation = dao.try_get_operation_by_id(operation_id)
        if not operation or operation.has_finished:
            LOGGER.info("Operation already stopped or not found at ID: %s" % operation_id)
            return True

        LOGGER.debug("Stopping operation: %s" % str(operation_id))
        stopped = StandAloneClient.stop_operation_process(operation_id, True)
        # Mark operation as canceled in DB and on disk
        BurstService().persist_operation_state(operation, STATUS_CANCELED)
        return stopped
Beispiel #20
0
 def store_burst(project_id, operation=None):
     """
     Build and persist BurstConfiguration entity.
     """
     burst = BurstConfiguration(project_id)
     if operation is not None:
         burst.name = 'dummy_burst'
         burst.status = BurstConfiguration.BURST_FINISHED
         burst.start_time = datetime.now()
         burst.range1 = '["conduction_speed", {"lo": 50, "step": 1.0, "hi": 100.0}]'
         burst.range2 = '["connectivity", null]'
         burst.fk_simulation = operation.id
         burst.simulator_gid = uuid.uuid4().hex
         BurstService().store_burst_configuration(burst)
     return dao.store_entity(burst)
Beispiel #21
0
 def build(test_user, test_project, simulation_length=10, is_group=False):
     model = SimulatorAdapterModel()
     model.connectivity = connectivity_index_factory().gid
     model.simulation_length = simulation_length
     burst = BurstConfiguration(test_project.id, name="Sim " + str(datetime.now()))
     burst.start_time = datetime.now()
     algorithm = dao.get_algorithm_by_module(SIMULATOR_MODULE, SIMULATOR_CLASS)
     service = SimulatorService()
     if is_group:
         range_param = RangeParameter("conduction_speed", float, Range(lo=50.0, hi=100.0, step=20.0))
         burst.range1 = range_param.to_json()
         burst = BurstService().prepare_burst_for_pse(burst)
         op = service.async_launch_and_prepare_pse(burst, test_user, test_project, algorithm,
                                                   range_param, None, model)
     else:
         dao.store_entity(burst)
         op = service.async_launch_and_prepare_simulation(burst, test_user, test_project, algorithm, model)
     return op
Beispiel #22
0
class TestBurstService(BaseTestCase):
    """
    Test the service layer for BURST PAGE. We can't have this transactional since
    we launch operations in different threads and the transactional operator only rolls back 
    sessions bounded to the current thread transaction.
    """
    burst_service = BurstService()
    sim_algorithm = AlgorithmService().get_algorithm_by_module_and_class(IntrospectionRegistry.SIMULATOR_MODULE,
                                                                         IntrospectionRegistry.SIMULATOR_CLASS)

    def setup_method(self):
        """
        Sets up the environment for running the tests;
        cleans the database before testing and saves config file;
        creates a test user, a test project;
        creates burst, flow, operation and workflow services

        """
        self.clean_database()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user)

    def teardown_method(self):
        """
        Clean up database.
        """
        self.clean_database()

    def test_clone_burst_configuration(self):
        """
        Test that all the major attributes are the same after a clone burst but the
        id of the cloned one is None.
        """
        first_burst = TestFactory.store_burst(self.test_project.id)
        cloned_burst = first_burst.clone()
        self._compare_bursts(first_burst, cloned_burst)
        assert cloned_burst.name == first_burst.name, 'Cloned burst should have the same name'
        assert cloned_burst.id is None, 'id should be none for cloned entry.'

    def test_store_burst_config(self):
        """
        Test that a burst entity is properly stored in db.
        """
        burst_config = TestFactory.store_burst(self.test_project.id)
        assert burst_config.id is not None, 'Burst was not stored properly.'
        stored_entity = dao.get_burst_by_id(burst_config.id)
        assert stored_entity is not None, 'Burst was not stored properly.'
        self._compare_bursts(burst_config, stored_entity)

    def _compare_bursts(self, first_burst, second_burst):
        """
        Compare that all important attributes are the same between two bursts. (name, project id and status)
        """
        assert first_burst.name == second_burst.name, "Names not equal for bursts."
        assert first_burst.fk_project == second_burst.fk_project, "Projects not equal for bursts."
        assert first_burst.status == second_burst.status, "Statuses not equal for bursts."
        assert first_burst.range1 == second_burst.range1, "Statuses not equal for bursts."
        assert first_burst.range2 == second_burst.range2, "Statuses not equal for bursts."

    def test_getavailablebursts_none(self):
        """
        Test that an empty list is returned if no data is available in db.
        """
        bursts = self.burst_service.get_available_bursts(self.test_project.id)
        assert bursts == [], "Unexpected result returned : %s" % (bursts,)

    def test_get_available_bursts_happy(self):
        """
        Test that all the correct burst are returned for the given project.
        """
        project = Project("second_test_proj", self.test_user.id, "description")
        second_project = dao.store_entity(project)
        test_project_bursts = [TestFactory.store_burst(self.test_project.id).id for _ in range(4)]
        second_project_bursts = [TestFactory.store_burst(second_project.id).id for _ in range(3)]
        returned_test_project_bursts = [burst.id for burst in
                                        self.burst_service.get_available_bursts(self.test_project.id)]
        returned_second_project_bursts = [burst.id for burst in
                                          self.burst_service.get_available_bursts(second_project.id)]
        assert len(test_project_bursts) == len(returned_test_project_bursts), \
            "Incorrect bursts retrieved for project %s." % self.test_project
        assert len(second_project_bursts) == len(returned_second_project_bursts), \
            "Incorrect bursts retrieved for project %s." % second_project
        assert set(second_project_bursts) == set(returned_second_project_bursts), \
            "Incorrect bursts retrieved for project %s." % second_project
        assert set(test_project_bursts) == set(returned_test_project_bursts), \
            "Incorrect bursts retrieved for project %s." % self.test_project

    def test_rename_burst(self, operation_factory):
        """
        Test that renaming of a burst functions properly.
        """
        operation = operation_factory()
        burst_config = TestFactory.store_burst(self.test_project.id, operation)
        self.burst_service.rename_burst(burst_config.id, "new_burst_name")
        loaded_burst = dao.get_burst_by_id(burst_config.id)
        assert loaded_burst.name == "new_burst_name", "Burst was not renamed properly."

    def test_burst_delete_with_project(self):
        """
        Test that on removal of a project all burst related data is cleared.
        """
        TestFactory.store_burst(self.test_project.id)
        ProjectService().remove_project(self.test_project.id)
        self._check_burst_removed()

    def test_load_burst_configuration(self):
        """
        Test that loads the burst configuration based non the stored config id
        """
        stored_burst = TestFactory.store_burst(self.test_project.id)
        burst_config = self.burst_service.load_burst_configuration(stored_burst.id)
        assert burst_config.id == stored_burst.id, "The loaded burst does not have the same ID"

    def test_update_simulation_fields(self, tmph5factory):
        """
        Test that updates the simulation fields of the burst
        """
        stored_burst = TestFactory.store_burst(self.test_project.id)

        connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project)
        op = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)
        simulation = SimulatorAdapterModel()
        simulation.connectivity = UUID(connectivity.gid)

        burst_config = self.burst_service.update_simulation_fields(stored_burst, op.id, simulation.gid)
        assert burst_config.id == stored_burst.id, "The loaded burst does not have the same ID"
        assert burst_config.fk_simulation == op.id, "The loaded burst does not have the fk simulation that it was given"
        assert burst_config.simulator_gid == simulation.gid.hex, "The loaded burst does not have the simulation gid that it was given"

    def test_prepare_name(self):
        """
        Test prepare burst name
        """
        stored_burst = TestFactory.store_burst(self.test_project.id)
        simulation_tuple = self.burst_service.prepare_simulation_name(stored_burst, self.test_project.id)
        assert simulation_tuple[0] == 'simulation_' + str(dao.get_number_of_bursts(self.test_project.id) + 1), \
            "The default simulation name is not the defined one"

        busrt_test_name = "Burst Test Name"
        stored_burst.name = busrt_test_name
        stored_burst = dao.store_entity(stored_burst)
        simulation_tuple = self.burst_service.prepare_simulation_name(stored_burst, self.test_project.id)
        assert simulation_tuple[0] == busrt_test_name, "The burst name is not the given one"

    def test_prepare_burst_for_pse(self):
        """
        Test prepare burst for pse
        """
        burst = BurstConfiguration(self.test_project.id)
        assert burst.fk_metric_operation_group == None, "The fk for the metric operation group is not None"
        assert burst.fk_operation_group == None, "The fk for the operation group is not None"
        assert burst.operation_group == None, "The operation group is not None"

        pse_burst = self.burst_service.prepare_burst_for_pse(burst)
        assert pse_burst.metric_operation_group != None, "The fk for the operation group is None"
        assert pse_burst.operation_group != None, "The operation group is None"

    def _check_burst_removed(self):
        """
        Test that a burst was properly removed. This means checking that the burst entity,
        any workflow steps and any datatypes resulted from the burst are also removed.
        """
        remaining_bursts = dao.get_bursts_for_project(self.test_project.id)
        assert 0 == len(remaining_bursts), "Burst was not deleted"
        ops_number = dao.get_operation_numbers(self.test_project.id)[0]
        assert 0 == ops_number, "Operations were not deleted."
        datatypes = dao.get_datatypes_in_project(self.test_project.id)
        assert 0 == len(datatypes)

        datatype1_stored = self.count_all_entities(Datatype1)
        datatype2_stored = self.count_all_entities(Datatype2)
        assert 0 == datatype1_stored, "Specific datatype entries for DataType1 were not deleted."
        assert 0 == datatype2_stored, "Specific datatype entries for DataType2 were not deleted."

    def test_prepare_indexes_for_simulation_results(self, time_series_factory, operation_factory, simulator_factory):
        ts_1 = time_series_factory()
        ts_2 = time_series_factory()
        ts_3 = time_series_factory()

        operation = operation_factory(test_user=self.test_user, test_project=self.test_project)
        sim_folder, sim_gid = simulator_factory(op=operation)

        path_1 = os.path.join(sim_folder, "Time_Series_{}.h5".format(ts_1.gid.hex))
        path_2 = os.path.join(sim_folder, "Time_Series_{}.h5".format(ts_2.gid.hex))
        path_3 = os.path.join(sim_folder, "Time_Series_{}.h5".format(ts_3.gid.hex))

        with TimeSeriesH5(path_1) as f:
            f.store(ts_1)
            f.sample_rate.store(ts_1.sample_rate)
            f.store_generic_attributes(GenericAttributes())

        with TimeSeriesH5(path_2) as f:
            f.store(ts_2)
            f.sample_rate.store(ts_2.sample_rate)
            f.store_generic_attributes(GenericAttributes())

        with TimeSeriesH5(path_3) as f:
            f.store(ts_3)
            f.sample_rate.store(ts_3.sample_rate)
            f.store_generic_attributes(GenericAttributes())

        burst_configuration = BurstConfiguration(self.test_project.id)
        burst_configuration.fk_simulation = operation.id
        burst_configuration.simulator_gid = operation.view_model_gid
        burst_configuration = dao.store_entity(burst_configuration)

        file_names = [path_1, path_2, path_3]
        ts_datatypes = [ts_1, ts_2, ts_3]
        indexes = self.burst_service.prepare_indexes_for_simulation_results(operation, file_names, burst_configuration)

        for i in range(len(indexes)):
            assert indexes[i].gid == ts_datatypes[i].gid.hex, "Gid was not set correctly on index."
            assert indexes[i].sample_period == ts_datatypes[i].sample_period
            assert indexes[i].sample_period_unit == ts_datatypes[i].sample_period_unit
            assert indexes[i].sample_rate == ts_datatypes[i].sample_rate
Beispiel #23
0
 def __init__(self):
     self.logger = get_logger(self.__class__.__module__)
     self.burst_service = BurstService()
     self.operation_service = OperationService()
     self.algorithm_service = AlgorithmService()
     self.storage_interface = StorageInterface()
Beispiel #24
0
class TestBurstService(BaseTestCase):
    """
    Test the service layer for BURST PAGE. We can't have this transactional since
    we launch operations in different threads and the transactional operator only rolls back 
    sessions bounded to the current thread transaction.
    """
    PORTLET_ID = "TA1TA2"
    ## This should not be present in portlets.xml
    INVALID_PORTLET_ID = "this_is_not_a_non_existent_test_portlet_ID"

    burst_service = BurstService()
    flow_service = FlowService()
    operation_service = OperationService()
    sim_algorithm = flow_service.get_algorithm_by_module_and_class(
        IntrospectionRegistry.SIMULATOR_MODULE,
        IntrospectionRegistry.SIMULATOR_CLASS)
    local_simulation_params = copy.deepcopy(SIMULATOR_PARAMETERS)

    def setup_method(self):
        """
        Sets up the environment for running the tests;
        cleans the database before testing and saves config file;
        creates a test user, a test project;
        creates burst, flow, operation and workflow services

        """
        self.clean_database()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user)

    def teardown_method(self):
        """
        Remove project folders and clean up database.
        """
        FilesHelper().remove_project_structure(self.test_project.name)
        self.clean_database()

    def test_clone_burst_configuration(self):
        """
        Test that all the major attributes are the same after a clone burst but the
        id of the cloned one is None.
        """
        first_burst = TestFactory.store_burst(self.test_project.id)
        cloned_burst = first_burst.clone()
        self._compare_bursts(first_burst, cloned_burst)
        assert first_burst.selected_tab == cloned_burst.selected_tab, "Selected tabs not equal for bursts."
        assert len(first_burst.tabs) == len(
            cloned_burst.tabs), "Tabs not equal for bursts."
        assert cloned_burst.id is None, 'id should be none for cloned entry.'

    def test_store_burst_config(self):
        """
        Test that a burst entity is properly stored in db.
        """
        burst_config = TestFactory.store_burst(self.test_project.id)
        assert burst_config.id is not None, 'Burst was not stored properly.'
        stored_entity = dao.get_burst_by_id(burst_config.id)
        assert stored_entity is not None, 'Burst was not stored properly.'
        self._compare_bursts(burst_config, stored_entity)

    def _compare_bursts(self, first_burst, second_burst):
        """
        Compare that all important attributes are the same between two bursts. (name, project id and status)
        """
        assert first_burst.name == second_burst.name, "Names not equal for bursts."
        assert first_burst.fk_project == second_burst.fk_project, "Projects not equal for bursts."
        assert first_burst.status == second_burst.status, "Statuses not equal for bursts."

    def test_getavailablebursts_none(self):
        """
        Test that an empty list is returned if no data is available in db.
        """
        bursts = self.burst_service.get_available_bursts(self.test_project.id)
        assert bursts == [], "Unexpected result returned : %s" % (bursts, )

    def test_get_available_bursts_happy(self):
        """
        Test that all the correct burst are returned for the given project.
        """
        project = Project("second_test_proj", self.test_user.id, "description")
        second_project = dao.store_entity(project)
        test_project_bursts = [
            TestFactory.store_burst(self.test_project.id).id for _ in range(4)
        ]
        second_project_bursts = [
            TestFactory.store_burst(second_project.id).id for _ in range(3)
        ]
        returned_test_project_bursts = [
            burst.id for burst in self.burst_service.get_available_bursts(
                self.test_project.id)
        ]
        returned_second_project_bursts = [
            burst.id for burst in self.burst_service.get_available_bursts(
                second_project.id)
        ]
        assert len(test_project_bursts) == len(returned_test_project_bursts),\
                         "Incorrect bursts retrieved for project %s." % self.test_project
        assert len(second_project_bursts) == len(returned_second_project_bursts),\
                         "Incorrect bursts retrieved for project %s." % second_project
        assert set(second_project_bursts) == set(returned_second_project_bursts),\
                         "Incorrect bursts retrieved for project %s." % second_project
        assert set(test_project_bursts) == set(returned_test_project_bursts),\
                         "Incorrect bursts retrieved for project %s." % self.test_project

    def test_rename_burst(self):
        """
        Test that renaming of a burst functions properly.
        """
        burst_config = TestFactory.store_burst(self.test_project.id)
        self.burst_service.rename_burst(burst_config.id, "new_burst_name")
        loaded_burst = dao.get_burst_by_id(burst_config.id)
        assert loaded_burst.name == "new_burst_name", "Burst was not renamed properly."

    def test_load_burst(self):
        """ 
        Test that the load burst works properly. NOTE: this method is also tested
        in the actual burst launch tests. This is just basic test to verify that the simulator
        interface is loaded properly.
        """
        burst_config = TestFactory.store_burst(self.test_project.id)
        loaded_burst = self.burst_service.load_burst(burst_config.id)[0]
        assert loaded_burst.simulator_configuration == {}, "No simulator configuration should have been loaded"
        assert burst_config.fk_project == loaded_burst.fk_project, "Loaded burst different from original one."
        burst_config = TestFactory.store_burst(
            self.test_project.id, simulator_config={"test": "test"})
        loaded_burst, _ = self.burst_service.load_burst(burst_config.id)
        assert loaded_burst.simulator_configuration == {
            "test": "test"
        }, "different burst loaded"
        assert burst_config.fk_project == loaded_burst.fk_project, "Loaded burst different from original one."

    def test_remove_burst(self):
        """
        Test the remove burst method added to burst_service.
        """
        loaded_burst, _ = self._prepare_and_launch_sync_burst()
        self.burst_service.cancel_or_remove_burst(loaded_burst.id)
        self._check_burst_removed()

    def test_branch_burst(self):
        """
        Test the branching of an existing burst.
        """
        burst_config = self._prepare_and_launch_async_burst(wait_to_finish=60)
        burst_config.prepare_after_load()

        launch_params = self._prepare_simulation_params(4)
        burst_config.update_simulator_configuration(launch_params)

        burst_id, _ = self.burst_service.launch_burst(burst_config, 0,
                                                      self.sim_algorithm.id,
                                                      self.test_user.id,
                                                      "branch")
        burst_config = dao.get_burst_by_id(burst_id)
        self._wait_for_burst(burst_config)

        ts_regions = self.count_all_entities(TimeSeriesRegion)
        sim_states = self.count_all_entities(SimulationHistoryIndex)
        assert 2 == ts_regions, "An operation group should have been created for each step."
        assert 2 == sim_states, "An dataType group should have been created for each step."

    def test_remove_group_burst(self):
        """
        Same remove burst but for a burst that contains group of workflows launched as
        it would be from a Parameter Space Exploration. Check that the workflows are also
        deleted with the burst.
        """
        burst_config = self._prepare_and_launch_async_burst(length=1,
                                                            is_range=True,
                                                            nr_ops=4,
                                                            wait_to_finish=60)

        launched_workflows = dao.get_workflows_for_burst(burst_config.id,
                                                         is_count=True)
        assert 4 == launched_workflows, "4 workflows should have been launched due to group parameter."

        got_deleted = self.burst_service.cancel_or_remove_burst(
            burst_config.id)
        assert got_deleted, "Burst should be deleted"

        launched_workflows = dao.get_workflows_for_burst(burst_config.id,
                                                         is_count=True)
        assert 0 == launched_workflows, "No workflows should remain after delete."

        burst_config = dao.get_burst_by_id(burst_config.id)
        assert burst_config is None, "Removing a canceled burst should delete it from db."

    def test_remove_started_burst(self):
        """
        Try removing a started burst, which should result in it getting canceled.
        """
        burst_entity = self._prepare_and_launch_async_burst(length=20000)
        assert BurstConfiguration.BURST_RUNNING == burst_entity.status,\
                         'A 20000 length simulation should still be started immediately after launch.'
        got_deleted = self.burst_service.cancel_or_remove_burst(
            burst_entity.id)
        assert not got_deleted, "Burst should be cancelled before deleted."
        burst_entity = dao.get_burst_by_id(burst_entity.id)
        assert BurstConfiguration.BURST_CANCELED == burst_entity.status,\
                         'Deleting a running burst should just cancel it first.'
        got_deleted = self.burst_service.cancel_or_remove_burst(
            burst_entity.id)
        assert got_deleted, "Burst should be deleted if status is cancelled."
        burst_entity = dao.get_burst_by_id(burst_entity.id)
        assert burst_entity is None, "Removing a canceled burst should delete it from db."

    def test_burst_delete_with_project(self):
        """
        Test that on removal of a project all burst related data is cleared.
        """
        self._prepare_and_launch_sync_burst()
        ProjectService().remove_project(self.test_project.id)
        self._check_burst_removed()

    def test_sync_burst_launch(self):
        """
        A full test for launching a burst. 
        First create the workflow steps and launch the burst.
        Then check that only operation created is for the first adapter from the portlet. The
        second should be viewed as a visualizer.
        After that load the burst and check that the visualizer and analyzer are loaded in the
        corresponding tab and that all the parameters are still the same. Finally check that burst
        status updates corresponding to final operation status.
        """
        loaded_burst, workflow_step_list = self._prepare_and_launch_sync_burst(
        )
        finished, started, error, _, _ = dao.get_operation_numbers(
            self.test_project.id)
        assert finished == 1, "One operations should have been generated for this burst."
        assert started == 0, "No operations should remain started since workflow was launched synchronous."
        assert error == 0, "No operations should return error status."
        assert loaded_burst.tabs[0].portlets[
            0] is not None, "Portlet not loaded from config!"
        portlet_config = loaded_burst.tabs[0].portlets[0]
        analyzers = portlet_config.analyzers
        assert len(
            analyzers
        ) == 0, "Only have 'simulator' and a visualizer. No analyzers should be loaded."
        visualizer = portlet_config.visualizer
        assert visualizer is not None, "Visualizer should not be none."
        assert visualizer.fk_algorithm == workflow_step_list[0].fk_algorithm,\
                         "Different ids after burst load for visualizer."
        assert visualizer.static_param == workflow_step_list[0].static_param,\
                         "Different static params after burst load for visualizer."
        assert visualizer.dynamic_param == workflow_step_list[0].dynamic_param,\
                         "Different static params after burst load for visualizer."

    def test_launch_burst(self):
        """
        Test the launch burst method from burst service.
        """
        first_step_algo = self.flow_service.get_algorithm_by_module_and_class(
            'tvb.tests.framework.adapters.testadapter1', 'TestAdapter1')
        adapter_interface = self.flow_service.prepare_adapter(
            self.test_project.id, first_step_algo)
        ui_submited_simulator_iface_replica = {}
        kwargs_replica = {}
        for entry in adapter_interface:
            ui_submited_simulator_iface_replica[entry[ABCAdapter.KEY_NAME]] = {
                KEY_PARAMETER_CHECKED: True,
                KEY_SAVED_VALUE: entry[ABCAdapter.KEY_DEFAULT]
            }
            kwargs_replica[entry[ABCAdapter.KEY_NAME]] = entry[
                ABCAdapter.KEY_DEFAULT]
        burst_config = self.burst_service.new_burst_configuration(
            self.test_project.id)
        burst_config.simulator_configuration = ui_submited_simulator_iface_replica
        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
        tab_config = {test_portlet.id: [(0, 0), (0, 1), (1, 0)]}
        self._add_portlets_to_burst(burst_config, tab_config)
        burst_config.update_simulator_configuration(kwargs_replica)
        burst_id, _ = self.burst_service.launch_burst(burst_config, 0,
                                                      first_step_algo.id,
                                                      self.test_user.id)
        burst_config = dao.get_burst_by_id(burst_id)
        assert burst_config.status in (BurstConfiguration.BURST_FINISHED, BurstConfiguration.BURST_RUNNING),\
                        "Burst not launched successfully!"
        # Wait maximum x seconds for burst to finish
        self._wait_for_burst(burst_config)

    def test_load_group_burst(self):
        """
        Launch a group adapter and load it afterwards and check that a group_id is properly loaded.
        """
        launch_params = self._prepare_simulation_params(1, True, 3)

        burst_config = self.burst_service.new_burst_configuration(
            self.test_project.id)
        burst_config.update_simulator_configuration(launch_params)
        burst_id, _ = self.burst_service.launch_burst(burst_config, 0,
                                                      self.sim_algorithm.id,
                                                      self.test_user.id)
        burst_config = dao.get_burst_by_id(burst_id)
        # Wait maximum x seconds for burst to finish
        self._wait_for_burst(burst_config)

        launched_workflows = dao.get_workflows_for_burst(burst_id,
                                                         is_count=True)
        assert 3 == launched_workflows, "3 workflows should have been launched due to group parameter."

        group_id = self.burst_service.load_burst(burst_id)[1]
        assert group_id >= 0, "Should be part of group."
        datatype_measures = self.count_all_entities(DatatypeMeasureIndex)
        assert 3 == datatype_measures

    def test_launch_burst_invalid_simulator_parameters(self):
        """
        Test that burst is marked as error if invalid data is passed to the first step.
        """
        algo_id = self.flow_service.get_algorithm_by_module_and_class(
            'tvb.tests.framework.adapters.testadapter1', 'TestAdapter1').id
        #Passing invalid kwargs to the 'simulator' component
        burst_config = self.burst_service.new_burst_configuration(
            self.test_project.id)
        kwargs_replica = {'test1_val1_invalid': '0', 'test1_val2': '0'}
        burst_config.update_simulator_configuration(kwargs_replica)
        burst_id, _ = self.burst_service.launch_burst(burst_config, 0, algo_id,
                                                      self.test_user.id)
        burst_config = dao.get_burst_by_id(burst_id)
        #Wait maximum x seconds for burst to finish
        self._wait_for_burst(burst_config, error_expected=True)

    def test_launch_burst_invalid_simulator_data(self):
        """
        Test that burst is marked as error if invalid data is passed to the first step.
        """
        algo_id = self.flow_service.get_algorithm_by_module_and_class(
            'tvb.tests.framework.adapters.testadapter1', 'TestAdapter1').id
        #Adapter tries to do an int(test1_val1) so this should fail
        burst_config = self.burst_service.new_burst_configuration(
            self.test_project.id)
        kwargs_replica = {'test1_val1': 'asa', 'test1_val2': '0'}
        burst_config.update_simulator_configuration(kwargs_replica)
        burst_id, _ = self.burst_service.launch_burst(burst_config, 0, algo_id,
                                                      self.test_user.id)
        burst_config = dao.get_burst_by_id(burst_id)
        #Wait maximum x seconds for burst to finish
        self._wait_for_burst(burst_config, error_expected=True)

    def test_launch_group_burst_happy_flow(self):
        """
        Happy flow of launching a burst with a range parameter. Expect to get both and operation
        group and a DataType group for the results of the simulations and for the metric steps.
        """
        burst_config = self._prepare_and_launch_async_burst(length=1,
                                                            is_range=True,
                                                            nr_ops=4,
                                                            wait_to_finish=120)
        if burst_config.status != BurstConfiguration.BURST_FINISHED:
            self.burst_service.stop_burst(burst_config)
            raise AssertionError("Burst should have finished successfully.")

        op_groups = self.count_all_entities(OperationGroup)
        dt_groups = self.get_all_entities(DataTypeGroup)
        assert 2 == op_groups, "An operation group should have been created for each step."
        assert len(
            dt_groups
        ) == 2, "An dataType group should have been created for each step."
        for datatype in dt_groups:
            assert 4 == datatype.count_results, "Should have 4 datatypes in group"

    def test_launch_group_burst_no_metric(self):
        """
        Test the launch burst method from burst service. Try to launch a burst with test adapter which has
        no metrics associated. This should fail.
        """
        burst_config = self.burst_service.new_burst_configuration(
            self.test_project.id)

        algo_id = self.flow_service.get_algorithm_by_module_and_class(
            'tvb.tests.framework.adapters.testadapter1', 'TestAdapter1').id
        kwargs_replica = {
            'test1_val1': '[0, 1, 2]',
            'test1_val2': '0',
            RANGE_PARAMETER_1: 'test1_val1'
        }
        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
        tab_config = {test_portlet.id: [(0, 0), (0, 1), (1, 0)]}
        self._add_portlets_to_burst(burst_config, tab_config)
        burst_config.update_simulator_configuration(kwargs_replica)
        burst_id, _ = self.burst_service.launch_burst(burst_config, 0, algo_id,
                                                      self.test_user.id)
        burst_config = dao.get_burst_by_id(burst_id)
        # Wait maximum x seconds for burst to finish
        self._wait_for_burst(burst_config, error_expected=True)

        launched_workflows = dao.get_workflows_for_burst(burst_id,
                                                         is_count=True)
        assert 3 == launched_workflows, "3 workflows should have been launched due to group parameter."

        op_groups = self.count_all_entities(OperationGroup)
        dt_groups = self.count_all_entities(DataTypeGroup)
        assert 5 == op_groups, "An operation group should have been created for each step."
        assert 5 == dt_groups, "An dataType group should have been created for each step."

    def test_load_tab_configuration(self):
        """
        Create a burst with some predefined portlets in some known positions. Check that the
        load_tab_configuration method does what it is expected, and we get the portlets in the
        corresponding tab positions.
        """
        burst_config = self.burst_service.new_burst_configuration(
            self.test_project.id)
        SIMULATOR_MODULE = 'tvb.tests.framework.adapters.testadapter1'
        SIMULATOR_CLASS = 'TestAdapter1'
        algo_id = self.flow_service.get_algorithm_by_module_and_class(
            SIMULATOR_MODULE, SIMULATOR_CLASS).id
        kwargs_replica = {'test1_val1': '0', 'test1_val2': '0'}
        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
        # Add test_portlet to positions (0,0), (0,1) and (1,0)
        tab_config = {test_portlet.id: [(0, 0), (0, 1), (1, 0)]}
        burst_config.update_simulator_configuration(kwargs_replica)
        burst_id, _ = self.burst_service.launch_burst(burst_config, 0, algo_id,
                                                      self.test_user.id)
        burst_config = dao.get_burst_by_id(burst_id)
        burst_config = self._wait_for_burst(burst_config)
        burst_wf = dao.get_workflows_for_burst(burst_config.id)[0]
        wf_step = dao.get_workflow_steps(burst_wf.id)[0]
        burst_config.prepare_after_load()
        for tab in burst_config.tabs:
            for portlet in tab.portlets:
                assert portlet is None, "Before loading the tab configuration all portlets should be none."
        burst_config = self.burst_service.load_tab_configuration(
            burst_config, wf_step.fk_operation)
        for tab_idx, tab in enumerate(burst_config.tabs):
            for portlet_idx, portlet in enumerate(tab.portlets):
                if (tab_idx == 0
                        and portlet_idx in [0, 1]) or (tab_idx == 1
                                                       and portlet_idx == 0):
                    assert portlet is not None, "portlet gonfiguration not set"
                    assert test_portlet.id == portlet.portlet_id, "Unexpected portlet entity loaded."
                else:
                    assert portlet is None, "Before loading the tab configuration all portlets should be none"

    def _wait_for_burst(self, burst_config, error_expected=False, timeout=500):
        """
        Method that just waits until a burst configuration is finished or a maximum timeout is reached.

        :param burst_config: the burst configuration that should be waited on
        :param timeout: the maximum number of seconds to wait after the burst
        """
        waited = 0
        while burst_config.status == BurstConfiguration.BURST_RUNNING and waited <= timeout:
            sleep(0.5)
            waited += 0.5
            burst_config = dao.get_burst_by_id(burst_config.id)

        if waited > timeout:
            self.burst_service.stop_burst(burst_config)
            raise AssertionError(
                "Timed out waiting for simulations to finish. We will cancel it"
            )

        if error_expected and burst_config.status != BurstConfiguration.BURST_ERROR:
            self.burst_service.stop_burst(burst_config)
            raise AssertionError(
                "Burst should have failed due to invalid input data.")

        if (not error_expected
            ) and burst_config.status != BurstConfiguration.BURST_FINISHED:
            msg = "Burst status should have been FINISH. Instead got %s %s" % (
                burst_config.status, burst_config.error_message)
            self.burst_service.stop_burst(burst_config)
            raise AssertionError(msg)

        return burst_config

    def _prepare_and_launch_async_burst(self,
                                        length=4,
                                        is_range=False,
                                        nr_ops=0,
                                        wait_to_finish=0):
        """
        Launch an asynchronous burst with a simulation having all the default parameters, only the length received as
        a parameters. This is launched with actual simulator and not with a dummy test adapter as replacement.
        :param length: the length of the simulation in milliseconds. This is also used in case we need
            a group burst, in which case we will have `nr_ops` simulations with lengths starting from 
            `length` to `length + nr_ops` milliseconds
        :param is_range: a boolean which switches between a group burst and a non group burst.
            !! even if `is_range` is `True` you still need a non-zero positive `nr_ops` to have an actual group burst
        :param nr_ops: the number of operations in the group burst
        """
        launch_params = self._prepare_simulation_params(
            length, is_range, nr_ops)

        burst_config = self.burst_service.new_burst_configuration(
            self.test_project.id)
        burst_config.update_simulator_configuration(launch_params)
        burst_id = self.burst_service.launch_burst(burst_config, 0,
                                                   self.sim_algorithm.id,
                                                   self.test_user.id)[0]
        burst_config = dao.get_burst_by_id(burst_id)

        __timeout = 15
        __waited = 0
        # Wait a maximum of 15 seconds for the burst launch to be performed
        while dao.get_workflows_for_burst(
                burst_config.id, is_count=True) == 0 and __waited < __timeout:
            sleep(0.5)
            __waited += 0.5

        if wait_to_finish:
            burst_config = self._wait_for_burst(burst_config,
                                                timeout=wait_to_finish)
        return burst_config

    def _prepare_and_launch_sync_burst(self):
        """
        Private method to launch a dummy burst. Return the burst loaded after the launch finished
        as well as the workflow steps that initially formed the burst.
        NOTE: the burst launched by this method is a `dummy` one, meaning we do not use an actual
        simulation, but instead test adapters.
        """
        burst_config = TestFactory.store_burst(self.test_project.id)

        workflow_step_list = []

        stored_dt = datatypes_factory.DatatypesFactory()._store_datatype(
            Datatype1())
        first_step_algorithm = self.flow_service.get_algorithm_by_module_and_class(
            "tvb.tests.framework.adapters.testadapter1",
            "TestAdapterDatatypeInput")
        metadata = {DataTypeMetaData.KEY_BURST: burst_config.id}
        kwargs = {"test_dt_input": stored_dt.gid, 'test_non_dt_input': '0'}
        operations, group = self.operation_service.prepare_operations(
            self.test_user.id, self.test_project, first_step_algorithm,
            first_step_algorithm.algorithm_category, metadata, **kwargs)

        ### Now fire the workflow and also update and store the burst configuration ##
        self.operation_service.launch_operation(operations[0].id, False)
        loaded_burst, _ = self.burst_service.load_burst(burst_config.id)
        import_operation = dao.get_operation_by_id(stored_dt.fk_from_operation)
        dao.remove_entity(import_operation.__class__, import_operation.id)
        dao.remove_datatype(stored_dt.gid)
        return loaded_burst, workflow_step_list

    def _check_burst_removed(self):
        """
        Test that a burst was properly removed. This means checking that the burst entity,
        any workflow steps and any datatypes resulted from the burst are also removed.
        """
        remaining_bursts = dao.get_bursts_for_project(self.test_project.id)
        assert 0 == len(remaining_bursts), "Burst was not deleted"
        ops_number = dao.get_operation_numbers(self.test_project.id)[0]
        assert 0 == ops_number, "Operations were not deleted."
        datatypes = dao.get_datatypes_in_project(self.test_project.id)
        assert 0 == len(datatypes)

        datatype1_stored = self.count_all_entities(Datatype1)
        datatype2_stored = self.count_all_entities(Datatype2)
        assert 0 == datatype1_stored, "Specific datatype entries for DataType1 were not deleted."
        assert 0 == datatype2_stored, "Specific datatype entries for DataType2 were not deleted."

    def _prepare_simulation_params(self, length, is_range=False, no_ops=0):

        connectivity = self._burst_create_connectivity()

        launch_params = self.local_simulation_params
        launch_params['connectivity'] = connectivity.gid
        if is_range:
            launch_params['simulation_length'] = str(
                list(range(length, length + no_ops)))
            launch_params[RANGE_PARAMETER_1] = 'simulation_length'
        else:
            launch_params['simulation_length'] = str(length)
            launch_params[RANGE_PARAMETER_1] = None

        return launch_params

    def _burst_create_connectivity(self):
        """
        Create a connectivity that will be used in "non-dummy" burst launches (with the actual simulator).
        """
        meta = {
            DataTypeMetaData.KEY_SUBJECT: "John Doe",
            DataTypeMetaData.KEY_STATE: "RAW_DATA"
        }

        self.operation = Operation(self.test_user.id,
                                   self.test_project.id,
                                   self.sim_algorithm.id,
                                   json.dumps(''),
                                   meta=json.dumps(meta),
                                   status=STATUS_STARTED)
        self.operation = dao.store_entity(self.operation)
        storage_path = FilesHelper().get_project_folder(
            self.test_project, str(self.operation.id))
        connectivity = Connectivity(storage_path=storage_path)
        connectivity.weights = numpy.ones((74, 74))
        connectivity.centres = numpy.ones((74, 3))
        adapter_instance = StoreAdapter([connectivity])
        self.operation_service.initiate_prelaunch(self.operation,
                                                  adapter_instance, {})
        return connectivity
Beispiel #25
0
class BurstServiceTest(BaseTestCase):
    """
    Test the service layer for BURST PAGE. We can't have this transactional since
    we launch operations in different threads and the transactional operator only rolls back 
    sessions bounded to the current thread transaction.
    """
    PORTLET_ID = "TA1TA2"
    ## This should not be present in portlets.xml
    INVALID_PORTLET_ID = "this_is_not_a_non_existent_test_portlet_ID"

    burst_service = BurstService()
    flow_service = FlowService()
    operation_service = OperationService()
    workflow_service = WorkflowService()
    sim_algorithm = flow_service.get_algorithm_by_module_and_class(
        SIMULATOR_MODULE, SIMULATOR_CLASS)
    local_simulation_params = copy.deepcopy(SIMULATOR_PARAMETERS)

    def setUp(self):
        """
        Sets up the environment for running the tests;
        cleans the database before testing and saves config file;
        creates a test user, a test project;
        creates burst, flow, operation and workflow services

        """
        self.clean_database()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user)

    def tearDown(self):
        """
        Remove project folders and clean up database.
        """
        FilesHelper().remove_project_structure(self.test_project.name)
        self.clean_database()

    def test_new_portlet_configuration(self):
        """
        Test that the correct portlet configuration is generated for the test portlet.
        """
        # Passing an invalid portlet ID should fail and raise an InvalidPortletConfiguration exception.
        self.assertRaises(InvalidPortletConfiguration,
                          self.burst_service.new_portlet_configuration, -1)

        # Now the happy flow
        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
        portlet_configuration = self.burst_service.new_portlet_configuration(
            test_portlet.id)
        analyzers = portlet_configuration.analyzers
        self.assertEqual(
            len(analyzers), 1, "Portlet configuration not build properly."
            " Portlet's analyzers list has unexpected number of elements.")
        self.assertEqual(
            analyzers[0].dynamic_param, {
                u'test_dt_input': {
                    wf_cfg.DATATYPE_INDEX_KEY: 0,
                    wf_cfg.STEP_INDEX_KEY: 0
                }
            }, "Dynamic parameters not loaded properly")
        visualizer = portlet_configuration.visualizer
        self.assertEqual(visualizer.dynamic_param, {},
                         "Dynamic parameters not loaded properly")
        self.assertEqual(visualizer.static_param, {u'test2': u'0'},
                         'Static parameters not loaded properly')

    def test_build_portlet_interface(self):
        """
        Test that the portlet interface is build properly, splitted by steps and prefixed.
        """
        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
        portlet_configuration = self.burst_service.new_portlet_configuration(
            test_portlet.id)
        actual_interface = self.burst_service.build_portlet_interface(
            portlet_configuration, self.test_project.id)
        #The expected portlet steps and interface in correspondace to the xml declaration
        #from tvb.tests.framework/core/portlets/test_portlet.xml
        expected_steps = [{
            'ui_name': 'TestAdapterDatatypeInput'
        }, {
            'ui_name': 'TestAdapter2'
        }]
        expected_interface = [{
            ABCAdapter.KEY_DEFAULT:
            'step_0[0]',
            ABCAdapter.KEY_DISABLED:
            True,
            KEY_DYNAMIC:
            True,
            ABCAdapter.KEY_NAME:
            ADAPTER_PREFIX_ROOT + '0test_dt_input'
        }, {
            ABCAdapter.KEY_DEFAULT:
            '0',
            ABCAdapter.KEY_DISABLED:
            False,
            KEY_DYNAMIC:
            False,
            ABCAdapter.KEY_NAME:
            ADAPTER_PREFIX_ROOT + '1test2'
        }]
        for idx, entry in enumerate(expected_steps):
            step = actual_interface[idx]
            for key in entry:
                self.assertEqual(entry.get(key), getattr(step, key))
            for key in expected_interface[idx]:
                self.assertEqual(expected_interface[idx].get(key, False),
                                 step.interface[0].get(key, False))

    def test_build_portlet_interface_invalid(self):
        """
        Test that a proper exception is raised in case an invalid portlet configuration is provided.
        """
        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
        portlet_configuration = self.burst_service.new_portlet_configuration(
            test_portlet.id)
        portlet_configuration.portlet_id = "this-is-invalid"
        self.assertRaises(InvalidPortletConfiguration,
                          self.burst_service.build_portlet_interface,
                          portlet_configuration, self.test_project.id)

    def test_update_portlet_config(self):
        """
        Test if a portlet configuration parameters are updated accordingly with a set
        of overwrites that would normally come from UI. Make sure to restart only if 
        analyzer parameters change.
        """
        def __update_params(declared_overwrites, expected_result):
            """
            Do the update and check that we get indeed the expected_result.
            :param declared_overwrites: a input dictionary in the form {'$$name$$' : '$$value$$'}. Make
                sure $$name$$ has the prefix that is added in case of portlet parameters,
                namely ADAPTER_PREFIX_ROOT + step_index + actual_name
            :param expected_result: boolean which should represent if we need or not to restart. (Was a
                visualizer parameter change or an analyzer one)
            """
            result = self.burst_service.update_portlet_configuration(
                portlet_configuration, declared_overwrites)
            self.assertEqual(
                expected_result, result,
                "After update expected %s as 'need_restart' but got %s." %
                (expected_result, result))

        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
        portlet_configuration = self.burst_service.new_portlet_configuration(
            test_portlet.id)
        previous_entry = portlet_configuration.analyzers[0].static_param[
            'test_non_dt_input']
        declared_overwrites = {
            ADAPTER_PREFIX_ROOT + '0test_non_dt_input': previous_entry
        }
        __update_params(declared_overwrites, False)
        declared_overwrites = {ADAPTER_PREFIX_ROOT + '1test2': 'new_value'}
        __update_params(declared_overwrites, False)
        declared_overwrites = {ADAPTER_PREFIX_ROOT + '0test_non_dt_input': '1'}
        __update_params(declared_overwrites, True)

    def test_update_portlet_config_invalid_data(self):
        """
        Trying an update on a portlet configuration with invalid data
        should not change the configuration instance in any way.
        """
        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
        portlet_configuration = self.burst_service.new_portlet_configuration(
            test_portlet.id)

        invalid_overwrites = {
            'this_is_not_a_valid_key': 'for_test_portlet_update'
        }
        before_update = copy.deepcopy(portlet_configuration)
        self.burst_service.update_portlet_configuration(
            portlet_configuration, invalid_overwrites)
        self.assertEqual(set(dir(before_update)),
                         set(dir(portlet_configuration)))
        #An update with invalid input data should have no effect on the configuration, but attributes changed
        for key in portlet_configuration.__dict__.keys():
            if hasattr(getattr(portlet_configuration, key), '__call__'):
                self.assertEqual(
                    getattr(before_update, key),
                    getattr(portlet_configuration, key),
                    "The value of attribute %s changed by a update with invalid data "
                    "when it shouldn't have." % key)

    def test_clone_burst_configuration(self):
        """
        Test that all the major attributes are the same after a clone burst but the
        id of the cloned one is None.
        """
        first_burst = TestFactory.store_burst(self.test_project.id)
        cloned_burst = first_burst.clone()
        self._compare_bursts(first_burst, cloned_burst)
        self.assertEqual(first_burst.selected_tab, cloned_burst.selected_tab,
                         "Selected tabs not equal for bursts.")
        self.assertEqual(len(first_burst.tabs), len(cloned_burst.tabs),
                         "Tabs not equal for bursts.")
        self.assertTrue(cloned_burst.id is None,
                        'id should be none for cloned entry.')

    def test_store_burst_config(self):
        """
        Test that a burst entity is properly stored in db.
        """
        burst_config = TestFactory.store_burst(self.test_project.id)
        self.assertTrue(burst_config.id is not None,
                        'Burst was not stored properly.')
        stored_entity = dao.get_burst_by_id(burst_config.id)
        self.assertTrue(stored_entity is not None,
                        'Burst was not stored properly.')
        self._compare_bursts(burst_config, stored_entity)

    def _compare_bursts(self, first_burst, second_burst):
        """
        Compare that all important attributes are the same between two bursts. (name, project id and status)
        """
        self.assertEqual(first_burst.name, second_burst.name,
                         "Names not equal for bursts.")
        self.assertEqual(first_burst.fk_project, second_burst.fk_project,
                         "Projects not equal for bursts.")
        self.assertEqual(first_burst.status, second_burst.status,
                         "Statuses not equal for bursts.")

    def test_getavailablebursts_none(self):
        """
        Test that an empty list is returned if no data is available in db.
        """
        bursts = self.burst_service.get_available_bursts(self.test_project.id)
        self.assertEqual(bursts, [],
                         "Unexpected result returned : %s" % (bursts, ))

    def test_get_available_bursts_happy(self):
        """
        Test that all the correct burst are returned for the given project.
        """
        project = model.Project("second_test_proj", self.test_user.id,
                                "description")
        second_project = dao.store_entity(project)
        test_project_bursts = [
            TestFactory.store_burst(self.test_project.id).id for _ in xrange(4)
        ]
        second_project_bursts = [
            TestFactory.store_burst(second_project.id).id for _ in xrange(3)
        ]
        returned_test_project_bursts = [
            burst.id for burst in self.burst_service.get_available_bursts(
                self.test_project.id)
        ]
        returned_second_project_bursts = [
            burst.id for burst in self.burst_service.get_available_bursts(
                second_project.id)
        ]
        self.assertEqual(
            len(test_project_bursts), len(returned_test_project_bursts),
            "Incorrect bursts retrieved for project %s." % self.test_project)
        self.assertEqual(
            len(second_project_bursts), len(returned_second_project_bursts),
            "Incorrect bursts retrieved for project %s." % second_project)
        self.assertEqual(
            set(second_project_bursts), set(returned_second_project_bursts),
            "Incorrect bursts retrieved for project %s." % second_project)
        self.assertEqual(
            set(test_project_bursts), set(returned_test_project_bursts),
            "Incorrect bursts retrieved for project %s." % self.test_project)

    def test_select_simulator_inputs(self):
        """
        Test that given a dictionary of selected inputs as it would arrive from UI, only
        the selected simulator inputs are kept.
        """
        simulator_input_tree = self.flow_service.prepare_adapter(
            self.test_project.id, self.sim_algorithm)
        child_parameter = ''
        checked_parameters = {
            simulator_input_tree[0][ABCAdapter.KEY_NAME]: {
                model.KEY_PARAMETER_CHECKED: True,
                model.KEY_SAVED_VALUE: 'new_value'
            },
            simulator_input_tree[1][ABCAdapter.KEY_NAME]: {
                model.KEY_PARAMETER_CHECKED: True,
                model.KEY_SAVED_VALUE: 'new_value'
            }
        }
        #Look for a entry from a subtree to add to the selected simulator inputs
        for idx, entry in enumerate(simulator_input_tree):
            found_it = False
            if idx not in (0, 1) and entry.get(ABCAdapter.KEY_OPTIONS, False):
                for option in entry[ABCAdapter.KEY_OPTIONS]:
                    if option[ABCAdapter.KEY_VALUE] == entry[
                            ABCAdapter.KEY_DEFAULT]:
                        if option[ABCAdapter.KEY_ATTRIBUTES]:
                            child_parameter = option[
                                ABCAdapter.KEY_ATTRIBUTES][0][
                                    ABCAdapter.KEY_NAME]
                            checked_parameters[entry[ABCAdapter.KEY_NAME]] = {
                                model.KEY_PARAMETER_CHECKED:
                                False,
                                model.KEY_SAVED_VALUE:
                                entry[ABCAdapter.KEY_DEFAULT]
                            }
                            checked_parameters[child_parameter] = {
                                model.KEY_PARAMETER_CHECKED: True,
                                model.KEY_SAVED_VALUE: 'new_value'
                            }
                            found_it = True
                            break
            if found_it:
                break
        self.assertTrue(
            child_parameter != '',
            "Could not find any sub-tree entry in simulator interface.")
        subtree = InputTreeManager.select_simulator_inputs(
            simulator_input_tree, checked_parameters)
        #After the select method we expect only the checked parameters entries to remain with
        #the new values updated accordingly.
        expected_outputs = [{
            ABCAdapter.KEY_NAME:
            simulator_input_tree[0][ABCAdapter.KEY_NAME],
            ABCAdapter.KEY_DEFAULT:
            'new_value'
        }, {
            ABCAdapter.KEY_NAME:
            simulator_input_tree[1][ABCAdapter.KEY_NAME],
            ABCAdapter.KEY_DEFAULT:
            'new_value'
        }, {
            ABCAdapter.KEY_NAME: child_parameter,
            ABCAdapter.KEY_DEFAULT: 'new_value'
        }]
        self.assertEqual(
            len(expected_outputs), len(subtree),
            "Some entries that should not have been displayed still are.")
        for idx, entry in enumerate(expected_outputs):
            self.assertEqual(expected_outputs[idx][ABCAdapter.KEY_NAME],
                             subtree[idx][ABCAdapter.KEY_NAME])
            self.assertEqual(expected_outputs[idx][ABCAdapter.KEY_DEFAULT],
                             subtree[idx][ABCAdapter.KEY_DEFAULT],
                             'Default value not update properly.')

    def test_rename_burst(self):
        """
        Test that renaming of a burst functions properly.
        """
        burst_config = TestFactory.store_burst(self.test_project.id)
        self.burst_service.rename_burst(burst_config.id, "new_burst_name")
        loaded_burst = dao.get_burst_by_id(burst_config.id)
        self.assertEqual(loaded_burst.name, "new_burst_name",
                         "Burst was not renamed properly.")

    def test_load_burst(self):
        """ 
        Test that the load burst works properly. NOTE: this method is also tested
        in the actual burst launch tests. This is just basic test to verify that the simulator
        interface is loaded properly.
        """
        burst_config = TestFactory.store_burst(self.test_project.id)
        loaded_burst = self.burst_service.load_burst(burst_config.id)[0]
        self.assertEqual(loaded_burst.simulator_configuration, {},
                         "No simulator configuration should have been loaded")
        self.assertEqual(burst_config.fk_project, loaded_burst.fk_project,
                         "Loaded burst different from original one.")
        burst_config = TestFactory.store_burst(
            self.test_project.id, simulator_config={"test": "test"})
        loaded_burst, _ = self.burst_service.load_burst(burst_config.id)
        self.assertEqual(loaded_burst.simulator_configuration,
                         {"test": "test"}, "different burst loaded")
        self.assertEqual(burst_config.fk_project, loaded_burst.fk_project,
                         "Loaded burst different from original one.")

    def test_remove_burst(self):
        """
        Test the remove burst method added to burst_service.
        """
        loaded_burst, _ = self._prepare_and_launch_sync_burst()
        self.burst_service.cancel_or_remove_burst(loaded_burst.id)
        self._check_burst_removed()

    def test_branch_burst(self):
        """
        Test the branching of an existing burst.
        """
        burst_config = self._prepare_and_launch_async_burst(wait_to_finish=60)
        burst_config.prepare_after_load()

        launch_params = self._prepare_simulation_params(4)
        burst_config.update_simulator_configuration(launch_params)

        burst_id, _ = self.burst_service.launch_burst(burst_config, 0,
                                                      self.sim_algorithm.id,
                                                      self.test_user.id,
                                                      "branch")
        burst_config = dao.get_burst_by_id(burst_id)
        self._wait_for_burst(burst_config)

        ts_regions = self.count_all_entities(TimeSeriesRegion)
        sim_states = self.count_all_entities(SimulationState)
        self.assertEqual(
            2, ts_regions,
            "An operation group should have been created for each step.")
        self.assertEqual(
            2, sim_states,
            "An dataType group should have been created for each step.")

    def test_remove_group_burst(self):
        """
        Same remove burst but for a burst that contains group of workflows launched as
        it would be from a Parameter Space Exploration. Check that the workflows are also
        deleted with the burst.
        """
        burst_config = self._prepare_and_launch_async_burst(length=1,
                                                            is_range=True,
                                                            nr_ops=4,
                                                            wait_to_finish=60)

        launched_workflows = dao.get_workflows_for_burst(burst_config.id,
                                                         is_count=True)
        self.assertEqual(
            4, launched_workflows,
            "4 workflows should have been launched due to group parameter.")

        got_deleted = self.burst_service.cancel_or_remove_burst(
            burst_config.id)
        self.assertTrue(got_deleted, "Burst should be deleted")

        launched_workflows = dao.get_workflows_for_burst(burst_config.id,
                                                         is_count=True)
        self.assertEqual(0, launched_workflows,
                         "No workflows should remain after delete.")

        burst_config = dao.get_burst_by_id(burst_config.id)
        self.assertTrue(burst_config is None,
                        "Removing a canceled burst should delete it from db.")

    def test_remove_started_burst(self):
        """
        Try removing a started burst, which should result in it getting canceled.
        """
        burst_entity = self._prepare_and_launch_async_burst(length=20000)
        self.assertEqual(
            BurstConfiguration.BURST_RUNNING, burst_entity.status,
            'A 20000 length simulation should still be started immediately after launch.'
        )
        got_deleted = self.burst_service.cancel_or_remove_burst(
            burst_entity.id)
        self.assertFalse(got_deleted,
                         "Burst should be cancelled before deleted.")
        burst_entity = dao.get_burst_by_id(burst_entity.id)
        self.assertEqual(
            BurstConfiguration.BURST_CANCELED, burst_entity.status,
            'Deleting a running burst should just cancel it first.')
        got_deleted = self.burst_service.cancel_or_remove_burst(
            burst_entity.id)
        self.assertTrue(got_deleted,
                        "Burst should be deleted if status is cancelled.")
        burst_entity = dao.get_burst_by_id(burst_entity.id)
        self.assertTrue(burst_entity is None,
                        "Removing a canceled burst should delete it from db.")

    def test_burst_delete_with_project(self):
        """
        Test that on removal of a project all burst related data is cleared.
        """
        self._prepare_and_launch_sync_burst()
        ProjectService().remove_project(self.test_project.id)
        self._check_burst_removed()

    def test_sync_burst_launch(self):
        """
        A full test for launching a burst. 
        First create the workflow steps and launch the burst.
        Then check that only operation created is for the first adapter from the portlet. The
        second should be viewed as a visualizer.
        After that load the burst and check that the visualizer and analyzer are loaded in the
        corresponding tab and that all the parameters are still the same. Finally check that burst
        status updates corresponding to final operation status.
        """
        loaded_burst, workflow_step_list = self._prepare_and_launch_sync_burst(
        )
        finished, started, error, _, _ = dao.get_operation_numbers(
            self.test_project.id)
        self.assertEqual(
            finished, 1,
            "One operations should have been generated for this burst.")
        self.assertEqual(
            started, 0,
            "No operations should remain started since workflow was launched synchronous."
        )
        self.assertEqual(error, 0, "No operations should return error status.")
        self.assertTrue(loaded_burst.tabs[0].portlets[0] is not None,
                        "Portlet not loaded from config!")
        portlet_config = loaded_burst.tabs[0].portlets[0]
        analyzers = portlet_config.analyzers
        self.assertEqual(
            len(analyzers), 0,
            "Only have 'simulator' and a visualizer. No analyzers should be loaded."
        )
        visualizer = portlet_config.visualizer
        self.assertTrue(visualizer is not None,
                        "Visualizer should not be none.")
        self.assertEqual(visualizer.fk_algorithm,
                         workflow_step_list[0].fk_algorithm,
                         "Different ids after burst load for visualizer.")
        self.assertEqual(
            visualizer.static_param, workflow_step_list[0].static_param,
            "Different static params after burst load for visualizer.")
        self.assertEqual(
            visualizer.dynamic_param, workflow_step_list[0].dynamic_param,
            "Different static params after burst load for visualizer.")

    def test_launch_burst(self):
        """
        Test the launch burst method from burst service.
        """
        first_step_algo = self.flow_service.get_algorithm_by_module_and_class(
            'tvb.tests.framework.adapters.testadapter1', 'TestAdapter1')
        adapter_interface = self.flow_service.prepare_adapter(
            self.test_project.id, first_step_algo)
        ui_submited_simulator_iface_replica = {}
        kwargs_replica = {}
        for entry in adapter_interface:
            ui_submited_simulator_iface_replica[entry[ABCAdapter.KEY_NAME]] = {
                model.KEY_PARAMETER_CHECKED: True,
                model.KEY_SAVED_VALUE: entry[ABCAdapter.KEY_DEFAULT]
            }
            kwargs_replica[entry[ABCAdapter.KEY_NAME]] = entry[
                ABCAdapter.KEY_DEFAULT]
        burst_config = self.burst_service.new_burst_configuration(
            self.test_project.id)
        burst_config.simulator_configuration = ui_submited_simulator_iface_replica
        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
        tab_config = {test_portlet.id: [(0, 0), (0, 1), (1, 0)]}
        self._add_portlets_to_burst(burst_config, tab_config)
        burst_config.update_simulator_configuration(kwargs_replica)
        burst_id, _ = self.burst_service.launch_burst(burst_config, 0,
                                                      first_step_algo.id,
                                                      self.test_user.id)
        burst_config = dao.get_burst_by_id(burst_id)
        self.assertTrue(
            burst_config.status in (BurstConfiguration.BURST_FINISHED,
                                    BurstConfiguration.BURST_RUNNING),
            "Burst not launched successfully!")
        # Wait maximum x seconds for burst to finish
        self._wait_for_burst(burst_config)

    def test_load_group_burst(self):
        """
        Launch a group adapter and load it afterwards and check that a group_id is properly loaded.
        """
        launch_params = self._prepare_simulation_params(1, True, 3)

        burst_config = self.burst_service.new_burst_configuration(
            self.test_project.id)
        burst_config.update_simulator_configuration(launch_params)
        burst_id, _ = self.burst_service.launch_burst(burst_config, 0,
                                                      self.sim_algorithm.id,
                                                      self.test_user.id)
        burst_config = dao.get_burst_by_id(burst_id)
        # Wait maximum x seconds for burst to finish
        self._wait_for_burst(burst_config)

        launched_workflows = dao.get_workflows_for_burst(burst_id,
                                                         is_count=True)
        self.assertEqual(
            3, launched_workflows,
            "3 workflows should have been launched due to group parameter.")

        group_id = self.burst_service.load_burst(burst_id)[1]
        self.assertTrue(group_id >= 0, "Should be part of group.")
        datatype_measures = self.count_all_entities(DatatypeMeasure)
        self.assertEqual(3, datatype_measures)

    def test_launch_burst_invalid_simulator_parameters(self):
        """
        Test that burst is marked as error if invalid data is passed to the first step.
        """
        algo_id = self.flow_service.get_algorithm_by_module_and_class(
            'tvb.tests.framework.adapters.testadapter1', 'TestAdapter1').id
        #Passing invalid kwargs to the 'simulator' component
        burst_config = self.burst_service.new_burst_configuration(
            self.test_project.id)
        kwargs_replica = {'test1_val1_invalid': '0', 'test1_val2': '0'}
        burst_config.update_simulator_configuration(kwargs_replica)
        burst_id, _ = self.burst_service.launch_burst(burst_config, 0, algo_id,
                                                      self.test_user.id)
        burst_config = dao.get_burst_by_id(burst_id)
        #Wait maximum x seconds for burst to finish
        self._wait_for_burst(burst_config, error_expected=True)

    def test_launch_burst_invalid_simulator_data(self):
        """
        Test that burst is marked as error if invalid data is passed to the first step.
        """
        algo_id = self.flow_service.get_algorithm_by_module_and_class(
            'tvb.tests.framework.adapters.testadapter1', 'TestAdapter1').id
        #Adapter tries to do an int(test1_val1) so this should fail
        burst_config = self.burst_service.new_burst_configuration(
            self.test_project.id)
        kwargs_replica = {'test1_val1': 'asa', 'test1_val2': '0'}
        burst_config.update_simulator_configuration(kwargs_replica)
        burst_id, _ = self.burst_service.launch_burst(burst_config, 0, algo_id,
                                                      self.test_user.id)
        burst_config = dao.get_burst_by_id(burst_id)
        #Wait maximum x seconds for burst to finish
        self._wait_for_burst(burst_config, error_expected=True)

    def test_launch_burst_invalid_portlet_analyzer_data(self):
        """
        Test that burst is marked as error if invalid data is passed to the first step.
        """
        algo_id = self.flow_service.get_algorithm_by_module_and_class(
            'tvb.tests.framework.adapters.testadapter1', 'TestAdapter1').id
        #Adapter tries to do an int(test1_val1) and int(test1_val2) so this should be valid
        burst_config = self.burst_service.new_burst_configuration(
            self.test_project.id)
        kwargs_replica = {'test1_val1': '1', 'test1_val2': '0'}
        burst_config.update_simulator_configuration(kwargs_replica)

        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
        portlet_configuration = self.burst_service.new_portlet_configuration(
            test_portlet.id)
        #Portlet analyzer tries to do int(input) which should fail
        declared_overwrites = {
            ADAPTER_PREFIX_ROOT + '0test_non_dt_input': 'asa'
        }
        self.burst_service.update_portlet_configuration(
            portlet_configuration, declared_overwrites)
        burst_config.tabs[0].portlets[0] = portlet_configuration

        burst_id, _ = self.burst_service.launch_burst(burst_config, 0, algo_id,
                                                      self.test_user.id)
        burst_config = dao.get_burst_by_id(burst_id)
        #Wait maximum x seconds for burst to finish
        burst_config = self._wait_for_burst(burst_config, error_expected=True)

        burst_wf = dao.get_workflows_for_burst(burst_config.id)[0]
        wf_steps = dao.get_workflow_steps(burst_wf.id)
        self.assertTrue(
            len(wf_steps) == 2,
            "Should have exactly 2 wf steps. One for 'simulation' one for portlet analyze operation."
        )
        simulator_op = dao.get_operation_by_id(wf_steps[0].fk_operation)
        self.assertEqual(
            model.STATUS_FINISHED, simulator_op.status,
            "First operation should be simulator which should have 'finished' status."
        )
        portlet_analyze_op = dao.get_operation_by_id(wf_steps[1].fk_operation)
        self.assertEqual(
            portlet_analyze_op.status, model.STATUS_ERROR,
            "Second operation should be portlet analyze step which should have 'error' status."
        )

    def test_launch_group_burst_happy_flow(self):
        """
        Happy flow of launching a burst with a range parameter. Expect to get both and operation
        group and a DataType group for the results of the simulations and for the metric steps.
        """
        burst_config = self._prepare_and_launch_async_burst(length=1,
                                                            is_range=True,
                                                            nr_ops=4,
                                                            wait_to_finish=120)
        if burst_config.status != BurstConfiguration.BURST_FINISHED:
            self.burst_service.stop_burst(burst_config)
            self.fail("Burst should have finished successfully.")

        op_groups = self.count_all_entities(model.OperationGroup)
        dt_groups = self.get_all_entities(model.DataTypeGroup)
        self.assertEqual(
            2, op_groups,
            "An operation group should have been created for each step.")
        self.assertEqual(
            len(dt_groups), 2,
            "An dataType group should have been created for each step.")
        for datatype in dt_groups:
            self.assertEqual(4, datatype.count_results,
                             "Should have 4 datatypes in group")

    def test_launch_group_burst_no_metric(self):
        """
        Test the launch burst method from burst service. Try to launch a burst with test adapter which has
        no metrics associated. This should fail.
        """
        burst_config = self.burst_service.new_burst_configuration(
            self.test_project.id)

        algo_id = self.flow_service.get_algorithm_by_module_and_class(
            'tvb.tests.framework.adapters.testadapter1', 'TestAdapter1').id
        kwargs_replica = {
            'test1_val1': '[0, 1, 2]',
            'test1_val2': '0',
            model.RANGE_PARAMETER_1: 'test1_val1'
        }
        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
        tab_config = {test_portlet.id: [(0, 0), (0, 1), (1, 0)]}
        self._add_portlets_to_burst(burst_config, tab_config)
        burst_config.update_simulator_configuration(kwargs_replica)
        burst_id, _ = self.burst_service.launch_burst(burst_config, 0, algo_id,
                                                      self.test_user.id)
        burst_config = dao.get_burst_by_id(burst_id)
        # Wait maximum x seconds for burst to finish
        self._wait_for_burst(burst_config, error_expected=True)

        launched_workflows = dao.get_workflows_for_burst(burst_id,
                                                         is_count=True)
        self.assertEqual(
            3, launched_workflows,
            "3 workflows should have been launched due to group parameter.")

        op_groups = self.count_all_entities(model.OperationGroup)
        dt_groups = self.count_all_entities(model.DataTypeGroup)
        self.assertEqual(
            5, op_groups,
            "An operation group should have been created for each step.")
        self.assertEqual(
            5, dt_groups,
            "An dataType group should have been created for each step.")

    def test_load_tab_configuration(self):
        """
        Create a burst with some predefined portlets in some known positions. Check that the
        load_tab_configuration method does what it is expected, and we get the portlets in the
        corresponding tab positions.
        """
        burst_config = self.burst_service.new_burst_configuration(
            self.test_project.id)
        SIMULATOR_MODULE = 'tvb.tests.framework.adapters.testadapter1'
        SIMULATOR_CLASS = 'TestAdapter1'
        algo_id = self.flow_service.get_algorithm_by_module_and_class(
            SIMULATOR_MODULE, SIMULATOR_CLASS).id
        kwargs_replica = {'test1_val1': '0', 'test1_val2': '0'}
        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)
        # Add test_portlet to positions (0,0), (0,1) and (1,0)
        tab_config = {test_portlet.id: [(0, 0), (0, 1), (1, 0)]}
        self._add_portlets_to_burst(burst_config, tab_config)
        burst_config.update_simulator_configuration(kwargs_replica)
        burst_id, _ = self.burst_service.launch_burst(burst_config, 0, algo_id,
                                                      self.test_user.id)
        burst_config = dao.get_burst_by_id(burst_id)
        burst_config = self._wait_for_burst(burst_config)
        burst_wf = dao.get_workflows_for_burst(burst_config.id)[0]
        wf_step = dao.get_workflow_steps(burst_wf.id)[0]
        burst_config.prepare_after_load()
        for tab in burst_config.tabs:
            for portlet in tab.portlets:
                self.assertTrue(
                    portlet is None,
                    "Before loading the tab configuration all portlets should be none."
                )
        burst_config = self.burst_service.load_tab_configuration(
            burst_config, wf_step.fk_operation)
        for tab_idx, tab in enumerate(burst_config.tabs):
            for portlet_idx, portlet in enumerate(tab.portlets):
                if (tab_idx == 0
                        and portlet_idx in [0, 1]) or (tab_idx == 1
                                                       and portlet_idx == 0):
                    self.assertTrue(portlet is not None,
                                    "portlet gonfiguration not set")
                    self.assertEqual(test_portlet.id, portlet.portlet_id,
                                     "Unexpected portlet entity loaded.")
                else:
                    self.assertTrue(
                        portlet is None,
                        "Before loading the tab configuration all portlets should be none"
                    )

    def _wait_for_burst(self, burst_config, error_expected=False, timeout=40):
        """
        Method that just waits until a burst configuration is finished or a maximum timeout is reached.

        :param burst_config: the burst configuration that should be waited on
        :param timeout: the maximum number of seconds to wait after the burst
        """
        waited = 0
        while burst_config.status == BurstConfiguration.BURST_RUNNING and waited <= timeout:
            sleep(0.5)
            waited += 0.5
            burst_config = dao.get_burst_by_id(burst_config.id)

        if waited > timeout:
            self.burst_service.stop_burst(burst_config)
            self.fail(
                "Timed out waiting for simulations to finish. We will cancel it"
            )

        if error_expected and burst_config.status != BurstConfiguration.BURST_ERROR:
            self.burst_service.stop_burst(burst_config)
            self.fail("Burst should have failed due to invalid input data.")

        if (not error_expected
            ) and burst_config.status != BurstConfiguration.BURST_FINISHED:
            msg = "Burst status should have been FINISH. Instead got %s %s" % (
                burst_config.status, burst_config.error_message)
            self.burst_service.stop_burst(burst_config)
            self.fail(msg)

        return burst_config

    def _prepare_and_launch_async_burst(self,
                                        length=4,
                                        is_range=False,
                                        nr_ops=0,
                                        wait_to_finish=0):
        """
        Launch an asynchronous burst with a simulation having all the default parameters, only the length received as
        a parameters. This is launched with actual simulator and not with a dummy test adapter as replacement.
        :param length: the length of the simulation in milliseconds. This is also used in case we need
            a group burst, in which case we will have `nr_ops` simulations with lengths starting from 
            `length` to `length + nr_ops` milliseconds
        :param is_range: a boolean which switches between a group burst and a non group burst.
            !! even if `is_range` is `True` you still need a non-zero positive `nr_ops` to have an actual group burst
        :param nr_ops: the number of operations in the group burst
        """
        launch_params = self._prepare_simulation_params(
            length, is_range, nr_ops)

        burst_config = self.burst_service.new_burst_configuration(
            self.test_project.id)
        burst_config.update_simulator_configuration(launch_params)
        burst_id = self.burst_service.launch_burst(burst_config, 0,
                                                   self.sim_algorithm.id,
                                                   self.test_user.id)[0]
        burst_config = dao.get_burst_by_id(burst_id)

        __timeout = 15
        __waited = 0
        # Wait a maximum of 15 seconds for the burst launch to be performed
        while dao.get_workflows_for_burst(
                burst_config.id, is_count=True) == 0 and __waited < __timeout:
            sleep(0.5)
            __waited += 0.5

        if wait_to_finish:
            burst_config = self._wait_for_burst(burst_config,
                                                timeout=wait_to_finish)
        return burst_config

    def _prepare_and_launch_sync_burst(self):
        """
        Private method to launch a dummy burst. Return the burst loaded after the launch finished
        as well as the workflow steps that initially formed the burst.
        NOTE: the burst launched by this method is a `dummy` one, meaning we do not use an actual
        simulation, but instead test adapters.
        """
        burst_config = TestFactory.store_burst(self.test_project.id)

        workflow_step_list = []
        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)

        stored_dt = datatypes_factory.DatatypesFactory()._store_datatype(
            Datatype1())
        first_step_algorithm = self.flow_service.get_algorithm_by_module_and_class(
            "tvb.tests.framework.adapters.testadapter1",
            "TestAdapterDatatypeInput")
        metadata = {DataTypeMetaData.KEY_BURST: burst_config.id}
        kwargs = {"test_dt_input": stored_dt.gid, 'test_non_dt_input': '0'}
        operations, group = self.operation_service.prepare_operations(
            self.test_user.id, self.test_project.id, first_step_algorithm,
            first_step_algorithm.algorithm_category, metadata, **kwargs)
        view_step = TestFactory.create_workflow_step(
            "tvb.tests.framework.adapters.testadapter2",
            "TestAdapter2", {"test2": 2}, {},
            0,
            0,
            0,
            0,
            is_view_step=True)
        view_step.fk_portlet = test_portlet.id
        workflow_step_list.append(view_step)

        workflows = self.workflow_service.create_and_store_workflow(
            self.test_project.id, burst_config.id, 0, first_step_algorithm.id,
            operations)
        self.operation_service.prepare_operations_for_workflowsteps(
            workflow_step_list, workflows, self.test_user.id, burst_config.id,
            self.test_project.id, group, operations)
        ### Now fire the workflow and also update and store the burst configuration ##
        self.operation_service.launch_operation(operations[0].id, False)
        loaded_burst, _ = self.burst_service.load_burst(burst_config.id)
        import_operation = dao.get_operation_by_id(stored_dt.fk_from_operation)
        dao.remove_entity(import_operation.__class__, import_operation.id)
        dao.remove_datatype(stored_dt.gid)
        return loaded_burst, workflow_step_list

    def _check_burst_removed(self):
        """
        Test that a burst was properly removed. This means checking that the burst entity,
        any workflow steps and any datatypes resulted from the burst are also removed.
        """
        remaining_bursts = dao.get_bursts_for_project(self.test_project.id)
        self.assertEqual(0, len(remaining_bursts), "Burst was not deleted")
        ops_number = dao.get_operation_numbers(self.test_project.id)[0]
        self.assertEqual(0, ops_number, "Operations were not deleted.")
        datatypes = dao.get_datatypes_in_project(self.test_project.id)
        self.assertEqual(0, len(datatypes))

        wf_steps = self.count_all_entities(model.WorkflowStep)
        datatype1_stored = self.count_all_entities(Datatype1)
        datatype2_stored = self.count_all_entities(Datatype2)
        self.assertEqual(0, wf_steps, "Workflow steps were not deleted.")
        self.assertEqual(
            0, datatype1_stored,
            "Specific datatype entries for DataType1 were not deleted.")
        self.assertEqual(
            0, datatype2_stored,
            "Specific datatype entries for DataType2 were not deleted.")

    def _add_portlets_to_burst(self, burst_config, portlet_dict):
        """
        Adds portlets to a burst config in certain tab position as received
        from a properly syntaxed list of dictionaries.
        :param burst_config: the burst configuration to which the portlet will be added
        :param portlet_dict: a list of dictionaries in the form
                { 'portlet_id' : [(tab_idx, idx_in_tab), (tab_idx1, idx_in_tab2), ...]
        NOTE: This will overwrite any portlets that are added to the burst in any of the positions
        received in parameter `portlet_dict`
        """
        for prt_id in portlet_dict:
            positions = portlet_dict[prt_id]
            for pos in positions:
                burst_config.tabs[pos[0]].portlets[
                    pos[1]] = self.burst_service.new_portlet_configuration(
                        prt_id, pos[0], pos[1])

    def _prepare_simulation_params(self, length, is_range=False, no_ops=0):

        connectivity = self._burst_create_connectivity()

        launch_params = self.local_simulation_params
        launch_params['connectivity'] = connectivity.gid
        if is_range:
            launch_params['simulation_length'] = str(
                range(length, length + no_ops))
            launch_params[model.RANGE_PARAMETER_1] = 'simulation_length'
        else:
            launch_params['simulation_length'] = str(length)
            launch_params[model.RANGE_PARAMETER_1] = None

        return launch_params

    def _burst_create_connectivity(self):
        """
        Create a connectivity that will be used in "non-dummy" burst launches (with the actual simulator).
        """
        meta = {
            DataTypeMetaData.KEY_SUBJECT: "John Doe",
            DataTypeMetaData.KEY_STATE: "RAW_DATA"
        }

        self.operation = model.Operation(self.test_user.id,
                                         self.test_project.id,
                                         self.sim_algorithm.id,
                                         json.dumps(''),
                                         meta=json.dumps(meta),
                                         status=model.STATUS_STARTED)
        self.operation = dao.store_entity(self.operation)
        storage_path = FilesHelper().get_project_folder(
            self.test_project, str(self.operation.id))
        connectivity = Connectivity(storage_path=storage_path)
        connectivity.weights = numpy.ones((74, 74))
        connectivity.centres = numpy.ones((74, 3))
        adapter_instance = StoreAdapter([connectivity])
        self.operation_service.initiate_prelaunch(self.operation,
                                                  adapter_instance, {})
        return connectivity
 def __init__(self):
     self.logger = get_logger(self.__class__.__module__)
     self.burst_service = BurstService()
     self.operation_service = OperationService()
     self.files_helper = FilesHelper()
Beispiel #27
0
 def __init__(self):
     super(BurstAPIController, self).__init__()
     self.burst_service = BurstService()