예제 #1
0
 def transactional_teardown_method(self):
     """
     Clean-up tests data
     """
     # Remove EXPORT folder
     export_folder = os.path.join(TvbProfile.current.TVB_STORAGE,
                                  StorageInterface.EXPORT_FOLDER_NAME)
     StorageInterface.remove_folder(export_folder, True)
예제 #2
0
    def teardown_method(self):
        """
        Reset the database when test is done.
        """
        # Delete TEMP folder
        StorageInterface.remove_folder(TvbProfile.current.TVB_TEMP_FOLDER)

        # Delete folder where data was exported
        if self.zip_path:
            StorageInterface.remove_folder(os.path.split(self.zip_path)[0])

        self.delete_project_folders()
예제 #3
0
    def delete_project_folders():
        """
        This method deletes folders for all projects from TVB folder.
        This is done without any check on database. You might get projects in DB but no folder for them on disk.
        """
        BaseStorageTestCase.delete_projects_folders()

        for folder in [
                os.path.join(TvbProfile.current.TVB_STORAGE,
                             StorageInterface.EXPORT_FOLDER_NAME),
                os.path.join(TvbProfile.current.TVB_STORAGE,
                             StorageInterface.TEMP_FOLDER)
        ]:
            StorageInterface.remove_folder(folder, True)
            os.makedirs(folder)
예제 #4
0
    def fire_simulation(self, project_gid, session_stored_simulator,
                        temp_folder):
        temporary_folder = create_temp_folder()

        h5.store_view_model(session_stored_simulator, temporary_folder)
        zip_folder_path = os.path.join(
            temp_folder, RequestFileKey.SIMULATION_FILE_NAME.value)
        StorageInterface().write_zip_folder(zip_folder_path, temporary_folder)
        StorageInterface.remove_folder(temporary_folder)

        file_obj = open(zip_folder_path, 'rb')
        return self.secured_request().post(
            self.build_request_url(
                RestLink.FIRE_SIMULATION.compute_url(
                    True, {LinkPlaceholder.PROJECT_GID.value: project_gid})),
            files={
                RequestFileKey.SIMULATION_FILE_KEY.value:
                (RequestFileKey.SIMULATION_FILE_NAME.value, file_obj)
            })
예제 #5
0
class ExportManager(object):
    """
    This class provides basic methods for exporting data types of projects in different formats.
    """
    all_exporters = {}  # Dictionary containing all available exporters
    export_folder = None
    EXPORT_FOLDER_NAME = "EXPORT_TMP"
    EXPORTED_SIMULATION_NAME = "exported_simulation"
    EXPORTED_SIMULATION_DTS_DIR = "datatypes"
    logger = get_logger(__name__)

    def __init__(self):
        # Here we register all available data type exporters
        # If new exporters supported, they should be added here
        self._register_exporter(TVBExporter())
        self._register_exporter(TVBLinkedExporter())
        self.export_folder = os.path.join(TvbProfile.current.TVB_STORAGE, self.EXPORT_FOLDER_NAME)
        self.storage_interface = StorageInterface()

    def _register_exporter(self, exporter):
        """
        This method register into an internal format available exporters.
        :param exporter: Instance of a data type exporter (extends ABCExporter)
        """
        if exporter is not None:
            self.all_exporters[exporter.__class__.__name__] = exporter

    def get_exporters_for_data(self, data):
        """
        Get available exporters for current data type.
        :returns: a dictionary with the {exporter_id : label}
        """
        if data is None:
            raise InvalidExportDataException("Could not detect exporters for null data")

        self.logger.debug("Trying to determine exporters valid for %s" % data.type)
        results = {}

        # No exporter for None data
        if data is None:
            return results

        for exporterId in self.all_exporters.keys():
            exporter = self.all_exporters[exporterId]
            if exporter.accepts(data):
                results[exporterId] = exporter.get_label()

        return results

    def export_data(self, data, exporter_id, project):
        """
        Export provided data using given exporter
        :param data: data type to be exported
        :param exporter_id: identifier of the exporter to be used
        :param project: project that contains data to be exported

        :returns: a tuple with the following elements
            1. name of the file to be shown to user
            2. full path of the export file (available for download)
            3. boolean which specify if file can be deleted after download
        """
        if data is None:
            raise InvalidExportDataException("Could not export null data. Please select data to be exported")

        if exporter_id is None:
            raise ExportException("Please select the exporter to be used for this operation")

        if exporter_id not in self.all_exporters:
            raise ExportException("Provided exporter identifier is not a valid one")

        exporter = self.all_exporters[exporter_id]

        if project is None:
            raise ExportException("Please provide the project where data files are stored")

        # Now we start the real export
        if not exporter.accepts(data):
            raise InvalidExportDataException("Current data can not be exported by specified exporter")

        # Now compute and create folder where to store exported data
        # This will imply to generate a folder which is unique for each export
        data_export_folder = None
        try:
            data_export_folder = self.storage_interface.build_data_export_folder(data, self.export_folder)
            self.logger.debug("Start export of data: %s" % data.type)
            export_data = exporter.export(data, data_export_folder, project)
        finally:
            # In case export did not generated any file delete folder
            if data_export_folder is not None and len(os.listdir(data_export_folder)) == 0:
                os.rmdir(data_export_folder)

        return export_data

    def _export_linked_datatypes(self, project):
        linked_paths = ProjectService().get_linked_datatypes_storage_path(project)

        if not linked_paths:
            # do not export an empty operation
            return None, None

        # Make an import operation which will contain links to other projects
        algo = dao.get_algorithm_by_module(TVB_IMPORTER_MODULE, TVB_IMPORTER_CLASS)
        op = model_operation.Operation(None, None, project.id, algo.id)
        op.project = project
        op.algorithm = algo
        op.id = 'links-to-external-projects'
        op.start_now()
        op.mark_complete(model_operation.STATUS_FINISHED)

        return linked_paths, op

    def export_project(self, project):
        """
        Given a project root and the TVB storage_path, create a ZIP
        ready for export.
        :param project: project object which identifies project to be exported
        """
        if project is None:
            raise ExportException("Please provide project to be exported")

        folders_to_exclude = self._get_op_with_errors(project.id)
        linked_paths, op = self._export_linked_datatypes(project)

        result_path = self.storage_interface.export_project(project, folders_to_exclude,
                                                            self.export_folder, linked_paths, op)

        return result_path

    @staticmethod
    def _get_op_with_errors(project_id):
        """
        Get the operation folders with error base name as list.
        """
        operations = dao.get_operations_with_error_in_project(project_id)
        op_with_errors = []
        for op in operations:
            op_with_errors.append(op.id)
        return op_with_errors

    def export_simulator_configuration(self, burst_id):
        burst = dao.get_burst_by_id(burst_id)
        if burst is None:
            raise InvalidExportDataException("Could not find burst with ID " + str(burst_id))

        op_folder = self.storage_interface.get_project_folder(burst.project.name, str(burst.fk_simulation))
        tmp_export_folder = self.storage_interface.build_data_export_folder(burst, self.export_folder)
        tmp_sim_folder = os.path.join(tmp_export_folder, self.EXPORTED_SIMULATION_NAME)

        if not os.path.exists(tmp_sim_folder):
            os.makedirs(tmp_sim_folder)

        all_view_model_paths, all_datatype_paths = h5.gather_references_of_view_model(burst.simulator_gid, op_folder)

        burst_path = h5.determine_filepath(burst.gid, op_folder)
        all_view_model_paths.append(burst_path)

        for vm_path in all_view_model_paths:
            dest = os.path.join(tmp_sim_folder, os.path.basename(vm_path))
            self.storage_interface.copy_file(vm_path, dest)

        for dt_path in all_datatype_paths:
            dest = os.path.join(tmp_sim_folder, self.EXPORTED_SIMULATION_DTS_DIR, os.path.basename(dt_path))
            self.storage_interface.copy_file(dt_path, dest)

        main_vm_path = h5.determine_filepath(burst.simulator_gid, tmp_sim_folder)
        H5File.remove_metadata_param(main_vm_path, 'history_gid')

        now = datetime.now()
        date_str = now.strftime("%Y-%m-%d_%H-%M")
        zip_file_name = "%s_%s.%s" % (date_str, str(burst_id), StorageInterface.ZIP_FILE_EXTENSION)

        result_path = os.path.join(tmp_export_folder, zip_file_name)
        self.storage_interface.write_zip_folder(result_path, tmp_sim_folder)

        self.storage_interface.remove_folder(tmp_sim_folder)
        return result_path
예제 #6
0
class SimulatorService(object):
    def __init__(self):
        self.logger = get_logger(self.__class__.__module__)
        self.burst_service = BurstService()
        self.operation_service = OperationService()
        self.algorithm_service = AlgorithmService()
        self.storage_interface = StorageInterface()

    @staticmethod
    def _reset_model(session_stored_simulator):
        session_stored_simulator.model = type(session_stored_simulator.model)()
        vi_indexes = session_stored_simulator.determine_indexes_for_chosen_vars_of_interest(
        )
        vi_indexes = numpy.array(list(vi_indexes.values()))
        for monitor in session_stored_simulator.monitors:
            monitor.variables_of_interest = vi_indexes

    def reset_at_connectivity_change(self, is_simulator_copy, form,
                                     session_stored_simulator):
        """
        In case the user copies a simulation and changes the Connectivity, we want to reset the Model and Noise
        parameters because they might not fit to the new Connectivity's nr of regions.
        """
        if is_simulator_copy and form.connectivity.value != session_stored_simulator.connectivity:
            self._reset_model(session_stored_simulator)
            if issubclass(type(session_stored_simulator.integrator),
                          IntegratorStochastic):
                session_stored_simulator.integrator.noise = type(
                    session_stored_simulator.integrator.noise)()

    def reset_at_surface_change(self, is_simulator_copy, form,
                                session_stored_simulator):
        """
        In case the user copies a surface-simulation and changes the Surface, we want to reset the Model
        parameters because they might not fit to the new Surface's nr of vertices.
        """
        if is_simulator_copy and (
                session_stored_simulator.surface is None and form.surface.value
                or session_stored_simulator.surface and form.surface.value !=
                session_stored_simulator.surface.surface_gid):
            self._reset_model(session_stored_simulator)

    @staticmethod
    def _set_simulator_range_parameter(simulator, range_parameter_name,
                                       range_parameter_value):
        range_param_name_list = range_parameter_name.split('.')
        current_attr = simulator
        for param_name in range_param_name_list[:len(range_param_name_list) -
                                                1]:
            current_attr = getattr(current_attr, param_name)
        setattr(current_attr, range_param_name_list[-1], range_parameter_value)

    def async_launch_and_prepare_simulation(self, burst_config, user, project,
                                            simulator_algo, simulator):
        try:
            operation = self.operation_service.prepare_operation(
                user.id,
                project,
                simulator_algo,
                view_model=simulator,
                burst_gid=burst_config.gid,
                op_group_id=burst_config.fk_operation_group)
            burst_config = self.burst_service.update_simulation_fields(
                burst_config, operation.id, simulator.gid)
            self.burst_service.store_burst_configuration(burst_config)

            wf_errs = 0
            try:
                OperationService().launch_operation(operation.id, True)
                return operation
            except Exception as excep:
                self.logger.error(excep)
                wf_errs += 1
                if burst_config:
                    self.burst_service.mark_burst_finished(
                        burst_config, error_message=str(excep))

            self.logger.debug(
                "Finished launching workflow. The operation was launched successfully, "
                + str(wf_errs) + " had error on pre-launch steps")

        except Exception as excep:
            self.logger.error(excep)
            if burst_config:
                self.burst_service.mark_burst_finished(
                    burst_config, error_message=str(excep))

    def prepare_simulation_on_server(self, user_id, project, algorithm,
                                     zip_folder_path, simulator_file):
        simulator_vm = h5.load_view_model_from_file(simulator_file)
        operation = self.operation_service.prepare_operation(
            user_id, project, algorithm, view_model=simulator_vm)
        self.async_launch_simulation_on_server(operation, zip_folder_path)

        return operation

    def async_launch_simulation_on_server(self, operation, zip_folder_path):
        try:
            OperationService().launch_operation(operation.id, True)
            return operation
        except Exception as excep:
            self.logger.error(excep)
        finally:
            self.storage_interface.remove_folder(zip_folder_path)

    @staticmethod
    def _set_range_param_in_dict(param_value):
        if type(param_value) is numpy.ndarray:
            return param_value[0]
        elif isinstance(param_value, uuid.UUID):
            return param_value.hex
        else:
            return param_value

    def async_launch_and_prepare_pse(self, burst_config, user, project,
                                     simulator_algo, range_param1,
                                     range_param2, session_stored_simulator):
        try:
            algo_category = simulator_algo.algorithm_category
            operation_group = burst_config.operation_group
            metric_operation_group = burst_config.metric_operation_group
            range_param2_values = [None]
            if range_param2:
                range_param2_values = range_param2.get_range_values()
            GROUP_BURST_PENDING[burst_config.id] = True
            operations, pse_canceled = self._prepare_operations(
                algo_category, burst_config, metric_operation_group,
                operation_group, project, range_param1, range_param2,
                range_param2_values, session_stored_simulator, simulator_algo,
                user)

            GROUP_BURST_PENDING[burst_config.id] = False
            if pse_canceled:
                return

            wf_errs = self._launch_operations(operations, burst_config)
            self.logger.debug("Finished launching workflows. " +
                              str(len(operations) - wf_errs) +
                              " were launched successfully, " + str(wf_errs) +
                              " had error on pre-launch steps")
            return operations[0] if len(operations) > 0 else None

        except Exception as excep:
            self.logger.error(excep)
            self.burst_service.mark_burst_finished(burst_config,
                                                   error_message=str(excep))

    def _launch_operations(self, operations, burst_config):
        wf_errs = 0
        for operation in operations:
            try:
                burst_config = dao.get_burst_by_id(burst_config.id)
                if burst_config is None or burst_config.status in [
                        BurstConfiguration.BURST_CANCELED,
                        BurstConfiguration.BURST_ERROR
                ]:
                    self.logger.debug(
                        "Preparing operations cannot continue. Burst config {}"
                        .format(burst_config))
                    return
                OperationService().launch_operation(operation.id, True)
            except Exception as excep:
                self.logger.error(excep)
                wf_errs += 1
                self.burst_service.mark_burst_finished(
                    burst_config, error_message=str(excep))
        return wf_errs

    def _prepare_operations(self, algo_category, burst_config,
                            metric_operation_group, operation_group, project,
                            range_param1, range_param2, range_param2_values,
                            session_stored_simulator, simulator_algo, user):
        first_simulator = None
        pse_canceled = False
        operations = []
        for param1_value in range_param1.get_range_values():
            for param2_value in range_param2_values:
                burst_config = dao.get_burst_by_id(burst_config.id)
                if burst_config is None:
                    self.logger.debug("Burst config was deleted")
                    pse_canceled = True
                    break

                if burst_config.status in [
                        BurstConfiguration.BURST_CANCELED,
                        BurstConfiguration.BURST_ERROR
                ]:
                    self.logger.debug(
                        "Current burst status is {}. Preparing operations cannot continue."
                        .format(burst_config.status))
                    pse_canceled = True
                    break
                # Copy, but generate a new GUID for every Simulator in PSE
                simulator = copy.deepcopy(session_stored_simulator)
                simulator.gid = uuid.uuid4()
                self._set_simulator_range_parameter(simulator,
                                                    range_param1.name,
                                                    param1_value)

                ranges = {
                    range_param1.name:
                    self._set_range_param_in_dict(param1_value)
                }

                if param2_value is not None:
                    self._set_simulator_range_parameter(
                        simulator, range_param2.name, param2_value)
                    ranges[range_param2.name] = self._set_range_param_in_dict(
                        param2_value)

                ranges = json.dumps(ranges)

                simulator.range_values = ranges
                operation = self.operation_service.prepare_operation(
                    user.id,
                    project,
                    simulator_algo,
                    view_model=simulator,
                    ranges=ranges,
                    burst_gid=burst_config.gid,
                    op_group_id=burst_config.fk_operation_group)
                operations.append(operation)
                if first_simulator is None:
                    first_simulator = simulator
                    burst_config = self.burst_service.update_simulation_fields(
                        burst_config, operation.id, first_simulator.gid)
                    self.burst_service.store_burst_configuration(burst_config)
                    datatype_group = DataTypeGroup(
                        operation_group,
                        operation_id=operation.id,
                        fk_parent_burst=burst_config.gid,
                        state=algo_category.defaultdatastate)
                    dao.store_entity(datatype_group)

                    metrics_datatype_group = DataTypeGroup(
                        metric_operation_group,
                        fk_parent_burst=burst_config.gid,
                        state=algo_category.defaultdatastate)
                    dao.store_entity(metrics_datatype_group)
        return operations, pse_canceled

    @staticmethod
    def compute_conn_branch_conditions(is_branch, simulator):
        if not is_branch:
            return None

        conn = load.load_entity_by_gid(simulator.connectivity)
        if conn.number_of_regions:
            return FilterChain(
                fields=[FilterChain.datatype + '.number_of_regions'],
                operations=["=="],
                values=[conn.number_of_regions])

    @staticmethod
    def validate_first_fragment(form, project_id, conn_idx):
        conn_count = dao.count_datatypes(project_id, conn_idx)
        if conn_count == 0:
            form.connectivity.errors.append(
                "No connectivity in the project! Simulation cannot be started without "
                "a connectivity!")

    def get_simulation_state_index(self, burst_config,
                                   simulation_history_class):
        parent_burst = burst_config.parent_burst_object
        simulation_state_index = dao.get_generic_entity(
            simulation_history_class, parent_burst.gid, "fk_parent_burst")

        if simulation_state_index is None or len(simulation_state_index) < 1:
            exc = BurstServiceException(
                "Simulation State not found for %s, thus we are unable to branch from "
                "it!" % burst_config.name)
            self.logger.error(exc)
            raise exc

        return simulation_state_index
예제 #7
0
class TestProjectService(TransactionalTestCase):
    """
    This class contains tests for the tvb.core.services.project_service module.
    """

    def transactional_setup_method(self):
        """
        Reset the database before each test.
        """
        self.project_service = ProjectService()
        self.storage_interface = StorageInterface()
        self.test_user = TestFactory.create_user()

    def transactional_teardown_method(self):
        """
        Remove project folders.
        """
        self.delete_project_folders()

    def test_create_project_happy_flow(self):

        user1 = TestFactory.create_user('test_user1')
        user2 = TestFactory.create_user('test_user2')
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        assert len(initial_projects) == 0, "Database reset probably failed!"

        TestFactory.create_project(self.test_user, 'test_project', "description", users=[user1.id, user2.id])

        resulting_projects = dao.get_projects_for_user(self.test_user.id)
        assert len(resulting_projects) == 1, "Project with valid data not inserted!"
        project = resulting_projects[0]
        assert project.name == "test_project", "Invalid retrieved project name"
        assert project.description == "description", "Description do no match"

        users_for_project = dao.get_members_of_project(project.id)
        for user in users_for_project:
            assert user.id in [user1.id, user2.id, self.test_user.id], "Users not stored properly."
        assert os.path.exists(os.path.join(TvbProfile.current.TVB_STORAGE,
                                           StorageInterface.PROJECTS_FOLDER,
                                           "test_project")), "Folder for project was not created"

    def test_create_project_empty_name(self):
        """
        Creating a project with an empty name.
        """
        data = dict(name="", description="test_description", users=[])
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        assert len(initial_projects) == 0, "Database reset probably failed!"
        with pytest.raises(ProjectServiceException):
            self.project_service.store_project(self.test_user, True, None, **data)

    def test_edit_project_happy_flow(self):
        """
        Standard flow for editing an existing project.
        """
        selected_project = TestFactory.create_project(self.test_user, 'test_proj')
        proj_root = self.storage_interface.get_project_folder(selected_project.name)
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        assert len(initial_projects) == 1, "Database initialization probably failed!"

        edited_data = dict(name="test_project", description="test_description", users=[])
        edited_project = self.project_service.store_project(self.test_user, False, selected_project.id, **edited_data)
        assert not os.path.exists(proj_root), "Previous folder not deleted"
        proj_root = self.storage_interface.get_project_folder(edited_project.name)
        assert os.path.exists(proj_root), "New folder not created!"
        assert selected_project.name != edited_project.name, "Project was no changed!"

    def test_edit_project_unexisting(self):
        """
        Trying to edit an un-existing project.
        """
        selected_project = TestFactory.create_project(self.test_user, 'test_proj')
        self.storage_interface.get_project_folder(selected_project.name)
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        assert len(initial_projects) == 1, "Database initialization probably failed!"
        data = dict(name="test_project", description="test_description", users=[])
        with pytest.raises(ProjectServiceException):
            self.project_service.store_project(self.test_user, False, 99, **data)

    def test_find_project_happy_flow(self):
        """
        Standard flow for finding a project by it's id.
        """
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        assert len(initial_projects) == 0, "Database reset probably failed!"
        inserted_project = TestFactory.create_project(self.test_user, 'test_project')
        assert self.project_service.find_project(inserted_project.id) is not None, "Project not found !"
        dao_returned_project = dao.get_project_by_id(inserted_project.id)
        service_returned_project = self.project_service.find_project(inserted_project.id)
        assert dao_returned_project.id == service_returned_project.id, \
            "Data returned from service is different from data returned by DAO."
        assert dao_returned_project.name == service_returned_project.name, \
            "Data returned from service is different than  data returned by DAO."
        assert dao_returned_project.description == service_returned_project.description, \
            "Data returned from service is different from data returned by DAO."
        assert dao_returned_project.members == service_returned_project.members, \
            "Data returned from service is different from data returned by DAO."

    def test_find_project_unexisting(self):
        """
        Searching for an un-existing project.
        """
        data = dict(name="test_project", description="test_description", users=[])
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        assert len(initial_projects) == 0, "Database reset probably failed!"
        project = self.project_service.store_project(self.test_user, True, None, **data)
        # fetch a likely non-existing project. Previous project id plus a 'big' offset
        with pytest.raises(ProjectServiceException):
            self.project_service.find_project(project.id + 1033)

    def test_retrieve_projects_for_user(self):
        """
        Test for retrieving the projects for a given user. One page only.
        """
        initial_projects = self.project_service.retrieve_projects_for_user(self.test_user.id)[0]
        assert len(initial_projects) == 0, "Database was not reset properly!"
        TestFactory.create_project(self.test_user, 'test_proj')
        TestFactory.create_project(self.test_user, 'test_proj1')
        TestFactory.create_project(self.test_user, 'test_proj2')
        user1 = TestFactory.create_user('another_user')
        TestFactory.create_project(user1, 'test_proj3')
        projects = self.project_service.retrieve_projects_for_user(self.test_user.id)[0]
        assert len(projects) == 3, "Projects not retrieved properly!"
        for project in projects:
            assert project.name != "test_project3", "This project should not have been retrieved"

    def test_retrieve_1project_3usr(self):
        """
        One user as admin, two users as members, getting projects for admin and for any of
        the members should return one.
        """
        member1 = TestFactory.create_user("member1")
        member2 = TestFactory.create_user("member2")
        TestFactory.create_project(self.test_user, 'Testproject', users=[member1.id, member2.id])
        projects = self.project_service.retrieve_projects_for_user(self.test_user.id, 1)[0]
        assert len(projects) == 1, "Projects not retrieved properly!"
        projects = self.project_service.retrieve_projects_for_user(member1.id, 1)[0]
        assert len(projects) == 1, "Projects not retrieved properly!"
        projects = self.project_service.retrieve_projects_for_user(member2.id, 1)[0]
        assert len(projects) == 1, "Projects not retrieved properly!"

    def test_retrieve_3projects_3usr(self):
        """
        Three users, 3 projects. Structure of db:
        proj1: {admin: user1, members: [user2, user3]}
        proj2: {admin: user2, members: [user1]}
        proj3: {admin: user3, members: [user1, user2]}
        Check valid project returns for all the users.
        """
        member1 = TestFactory.create_user("member1")
        member2 = TestFactory.create_user("member2")
        member3 = TestFactory.create_user("member3")
        TestFactory.create_project(member1, 'TestProject1', users=[member2.id, member3.id])
        TestFactory.create_project(member2, 'TestProject2', users=[member1.id])
        TestFactory.create_project(member3, 'TestProject3', users=[member1.id, member2.id])
        projects = self.project_service.retrieve_projects_for_user(member1.id, 1)[0]
        assert len(projects) == 3, "Projects not retrieved properly!"
        projects = self.project_service.retrieve_projects_for_user(member2.id, 1)[0]
        assert len(projects) == 3, "Projects not retrieved properly!"
        projects = self.project_service.retrieve_projects_for_user(member3.id, 1)[0]
        assert len(projects) == 2, "Projects not retrieved properly!"

    def test_retrieve_projects_random(self):
        """
        Generate a large number of users/projects, and validate the results.
        """
        ExtremeTestFactory.generate_users(NR_USERS, MAX_PROJ_PER_USER)
        for i in range(NR_USERS):
            current_user = dao.get_user_by_name("gen" + str(i))
            expected_projects = ExtremeTestFactory.VALIDATION_DICT[current_user.id]
            if expected_projects % PROJECTS_PAGE_SIZE == 0:
                expected_pages = expected_projects / PROJECTS_PAGE_SIZE
                exp_proj_per_page = PROJECTS_PAGE_SIZE
            else:
                expected_pages = expected_projects // PROJECTS_PAGE_SIZE + 1
                exp_proj_per_page = expected_projects % PROJECTS_PAGE_SIZE
            if expected_projects == 0:
                expected_pages = 0
                exp_proj_per_page = 0
            projects, pages = self.project_service.retrieve_projects_for_user(current_user.id, expected_pages)
            assert len(projects) == exp_proj_per_page, "Projects not retrieved properly! Expected:" + \
                                                       str(exp_proj_per_page) + "but got:" + str(len(projects))
            assert pages == expected_pages, "Pages not retrieved properly!"

        for folder in os.listdir(TvbProfile.current.TVB_STORAGE):
            full_path = os.path.join(TvbProfile.current.TVB_STORAGE, folder)
            if folder.startswith('Generated'):
                self.storage_interface.remove_folder(full_path)

    def test_retrieve_projects_page2(self):
        """
        Test for retrieving the second page projects for a given user.
        """
        for i in range(PROJECTS_PAGE_SIZE + 3):
            TestFactory.create_project(self.test_user, 'test_proj' + str(i))
        projects, pages = self.project_service.retrieve_projects_for_user(self.test_user.id, 2)
        assert len(projects) == (PROJECTS_PAGE_SIZE + 3) % PROJECTS_PAGE_SIZE, "Pagination inproper."
        assert pages == 2, 'Wrong number of pages retrieved.'

    def test_retrieve_projects_and_del(self):
        """
        Test for retrieving the second page projects for a given user.
        """
        created_projects = []
        for i in range(PROJECTS_PAGE_SIZE + 1):
            created_projects.append(TestFactory.create_project(self.test_user, 'test_proj' + str(i)))
        projects, pages = self.project_service.retrieve_projects_for_user(self.test_user.id, 2)
        assert len(projects) == (PROJECTS_PAGE_SIZE + 1) % PROJECTS_PAGE_SIZE, "Pagination improper."
        assert pages == (PROJECTS_PAGE_SIZE + 1) // PROJECTS_PAGE_SIZE + 1, 'Wrong number of pages'
        self.project_service.remove_project(created_projects[1].id)
        projects, pages = self.project_service.retrieve_projects_for_user(self.test_user.id, 2)
        assert len(projects) == 0, "Pagination improper."
        assert pages == 1, 'Wrong number of pages retrieved.'
        projects, pages = self.project_service.retrieve_projects_for_user(self.test_user.id, 1)
        assert len(projects) == PROJECTS_PAGE_SIZE, "Pagination improper."
        assert pages == 1, 'Wrong number of pages retrieved.'

    def test_empty_project_has_zero_disk_size(self):
        TestFactory.create_project(self.test_user, 'test_proj')
        projects, pages = self.project_service.retrieve_projects_for_user(self.test_user.id)
        assert 0 == projects[0].disk_size
        assert '0.0 KiB' == projects[0].disk_size_human

    def test_project_disk_size(self):
        project1 = TestFactory.create_project(self.test_user, 'test_proj1')
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip')
        TestFactory.import_zip_connectivity(self.test_user, project1, zip_path, 'testSubject')

        project2 = TestFactory.create_project(self.test_user, 'test_proj2')
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip')
        TestFactory.import_zip_connectivity(self.test_user, project2, zip_path, 'testSubject')

        projects = self.project_service.retrieve_projects_for_user(self.test_user.id)[0]
        assert projects[0].disk_size != projects[1].disk_size, "projects should have different size"

        for project in projects:
            assert 0 != project.disk_size
            assert '0.0 KiB' != project.disk_size_human

            prj_folder = self.storage_interface.get_project_folder(project.name)
            actual_disk_size = self.storage_interface.compute_recursive_h5_disk_usage(prj_folder)

            ratio = float(actual_disk_size) / project.disk_size
            msg = "Real disk usage: %s The one recorded in the db : %s" % (actual_disk_size, project.disk_size)
            assert ratio < 1.1, msg

    def test_get_linkable_projects(self):
        """
        Test for retrieving the projects for a given user.
        """
        initial_projects = self.project_service.retrieve_projects_for_user(self.test_user.id)[0]
        assert len(initial_projects) == 0, "Database was not reset!"
        test_proj = []
        user1 = TestFactory.create_user("another_user")
        for i in range(4):
            test_proj.append(TestFactory.create_project(self.test_user if i < 3 else user1, 'test_proj' + str(i)))
        operation = TestFactory.create_operation(test_user=self.test_user, test_project=test_proj[0])
        datatype = dao.store_entity(model_datatype.DataType(module="test_data", subject="subj1",
                                                            state="test_state", operation_id=operation.id))

        linkable = self.project_service.get_linkable_projects_for_user(self.test_user.id, str(datatype.id))[0]

        assert len(linkable) == 2, "Wrong count of link-able projects!"
        proj_names = [project.name for project in linkable]
        assert test_proj[1].name in proj_names
        assert test_proj[2].name in proj_names
        assert not test_proj[3].name in proj_names

    def test_remove_project_happy_flow(self):
        """
        Standard flow for deleting a project.
        """
        inserted_project = TestFactory.create_project(self.test_user, 'test_proj')
        project_root = self.storage_interface.get_project_folder(inserted_project.name)
        projects = dao.get_projects_for_user(self.test_user.id)
        assert len(projects) == 1, "Initializations failed!"
        assert os.path.exists(project_root), "Something failed at insert time!"
        self.project_service.remove_project(inserted_project.id)
        projects = dao.get_projects_for_user(self.test_user.id)
        assert len(projects) == 0, "Project was not deleted!"
        assert not os.path.exists(project_root), "Root folder not deleted!"

    def test_remove_project_wrong_id(self):
        """
        Flow for deleting a project giving an un-existing id.
        """
        TestFactory.create_project(self.test_user, 'test_proj')
        projects = dao.get_projects_for_user(self.test_user.id)
        assert len(projects) == 1, "Initializations failed!"
        with pytest.raises(ProjectServiceException):
            self.project_service.remove_project(99)

    def __check_meta_data(self, expected_meta_data, new_datatype):
        """Validate Meta-Data"""
        mapp_keys = {DataTypeOverlayDetails.DATA_SUBJECT: "subject", DataTypeOverlayDetails.DATA_STATE: "state"}
        for key, value in expected_meta_data.items():
            if key in mapp_keys:
                assert value == getattr(new_datatype, mapp_keys[key])
            elif key == DataTypeMetaData.KEY_OPERATION_TAG:
                if DataTypeMetaData.KEY_OP_GROUP_ID in expected_meta_data:
                    # We have a Group to check
                    op_group = new_datatype.parent_operation.fk_operation_group
                    op_group = dao.get_generic_entity(model_operation.OperationGroup, op_group)[0]
                    assert value == op_group.name
                else:
                    assert value == new_datatype.parent_operation.user_group

    def test_remove_project_node(self):
        """
        Test removing of a node from a project.
        """
        inserted_project, gid, op = TestFactory.create_value_wrapper(self.test_user)
        project_to_link = model_project.Project("Link", self.test_user.id, "descript")
        project_to_link = dao.store_entity(project_to_link)
        exact_data = dao.get_datatype_by_gid(gid)
        assert exact_data is not None, "Initialization problem!"
        link = dao.store_entity(model_datatype.Links(exact_data.id, project_to_link.id))

        vw_h5_path = h5.path_for_stored_index(exact_data)
        assert os.path.exists(vw_h5_path)

        if dao.get_system_user() is None:
            dao.store_entity(model_operation.User(TvbProfile.current.web.admin.SYSTEM_USER_NAME,
                                                  TvbProfile.current.web.admin.SYSTEM_USER_NAME, None, None, True,
                                                  None))

        self.project_service._remove_project_node_files(inserted_project.id, gid, [link])

        assert not os.path.exists(vw_h5_path)
        exact_data = dao.get_datatype_by_gid(gid)
        assert exact_data is not None, "Data should still be in DB, because of links"
        vw_h5_path_new = h5.path_for_stored_index(exact_data)
        assert os.path.exists(vw_h5_path_new)
        assert vw_h5_path_new != vw_h5_path

        self.project_service._remove_project_node_files(project_to_link.id, gid, [])
        assert dao.get_datatype_by_gid(gid) is None

    def test_update_meta_data_simple(self):
        """
        Test the new update metaData for a simple data that is not part of a group.
        """
        inserted_project, gid, _ = TestFactory.create_value_wrapper(self.test_user)
        new_meta_data = {DataTypeOverlayDetails.DATA_SUBJECT: "new subject",
                         DataTypeOverlayDetails.DATA_STATE: "second_state",
                         DataTypeOverlayDetails.CODE_GID: gid,
                         DataTypeOverlayDetails.CODE_OPERATION_TAG: 'new user group'}
        self.project_service.update_metadata(new_meta_data)

        new_datatype = dao.get_datatype_by_gid(gid)
        self.__check_meta_data(new_meta_data, new_datatype)

        new_datatype_h5 = h5.h5_file_for_index(new_datatype)
        assert new_datatype_h5.subject.load() == 'new subject', 'UserGroup not updated!'

    def test_update_meta_data_group(self, test_adapter_factory, datatype_group_factory):
        """
        Test the new update metaData for a group of dataTypes.
        """
        test_adapter_factory(adapter_class=DummyAdapter3)
        group, _ = datatype_group_factory()
        op_group_id = group.fk_operation_group

        new_meta_data = {DataTypeOverlayDetails.DATA_SUBJECT: "new subject",
                         DataTypeOverlayDetails.DATA_STATE: "updated_state",
                         DataTypeOverlayDetails.CODE_OPERATION_GROUP_ID: op_group_id,
                         DataTypeOverlayDetails.CODE_OPERATION_TAG: 'newGroupName'}
        self.project_service.update_metadata(new_meta_data)
        datatypes = dao.get_datatype_in_group(op_group_id)
        for datatype in datatypes:
            new_datatype = dao.get_datatype_by_id(datatype.id)
            assert op_group_id == new_datatype.parent_operation.fk_operation_group
            new_group = dao.get_generic_entity(model_operation.OperationGroup, op_group_id)[0]
            assert new_group.name == "newGroupName"
            self.__check_meta_data(new_meta_data, new_datatype)

    def test_retrieve_project_full(self, dummy_datatype_index_factory):
        """
        Tests full project information is retrieved by method `ProjectService.retrieve_project_full(...)`
        """

        project = TestFactory.create_project(self.test_user)
        operation = TestFactory.create_operation(test_user=self.test_user, test_project=project)

        dummy_datatype_index_factory(project=project, operation=operation)
        dummy_datatype_index_factory(project=project, operation=operation)
        dummy_datatype_index_factory(project=project, operation=operation)

        _, ops_nr, operations, pages_no = self.project_service.retrieve_project_full(project.id)
        assert ops_nr == 1, "DataType Factory should only use one operation to store all it's datatypes."
        assert pages_no == 1, "DataType Factory should only use one operation to store all it's datatypes."
        resulted_dts = operations[0]['results']
        assert len(resulted_dts) == 3, "3 datatypes should be created."

    def test_get_project_structure(self, datatype_group_factory, dummy_datatype_index_factory,
                                   project_factory, user_factory):
        """
        Tests project structure is as expected and contains all datatypes and created links
        """
        user = user_factory()
        project1 = project_factory(user, name="TestPS1")
        project2 = project_factory(user, name="TestPS2")

        dt_group, _ = datatype_group_factory(project=project1)
        dt_simple = dummy_datatype_index_factory(state="RAW_DATA", project=project1)
        # Create 3 DTs directly in Project 2
        dummy_datatype_index_factory(state="RAW_DATA", project=project2)
        dummy_datatype_index_factory(state="RAW_DATA", project=project2)
        dummy_datatype_index_factory(state="RAW_DATA", project=project2)

        # Create Links from Project 1 into Project 2
        link_ids, expected_links = [], []
        link_ids.append(dt_simple.id)
        expected_links.append(dt_simple.gid)

        # Prepare links towards a full DT Group, but expecting only the DT_Group in the final tree
        dts = dao.get_datatype_in_group(datatype_group_id=dt_group.id)
        link_ids.extend([dt_to_link.id for dt_to_link in dts])
        link_ids.append(dt_group.id)
        expected_links.append(dt_group.gid)

        # Actually create the links from Prj1 into Prj2
        for link_id in link_ids:
            AlgorithmService().create_link(link_id, project2.id)

        # Retrieve the raw data used to compose the tree (for easy parsing)
        dts_in_tree = dao.get_data_in_project(project2.id)
        dts_in_tree = [dt.gid for dt in dts_in_tree]
        # Retrieve the tree json (for trivial validations only, as we can not decode)
        node_json = self.project_service.get_project_structure(project2, None, DataTypeMetaData.KEY_STATE,
                                                               DataTypeMetaData.KEY_SUBJECT, None)

        assert len(expected_links) + 3 == len(dts_in_tree), "invalid number of nodes in tree"
        assert dt_group.gid in dts_in_tree, "DT_Group should be in the Project Tree!"
        assert dt_group.gid in node_json, "DT_Group should be in the Project Tree JSON!"

        project_dts = dao.get_datatypes_in_project(project2.id)
        for dt in project_dts:
            if dt.fk_datatype_group is not None:
                assert not dt.gid in node_json, "DTs part of a group should not be"
                assert not dt.gid in dts_in_tree, "DTs part of a group should not be"
            else:
                assert dt.gid in node_json, "Simple DTs and DT_Groups should be"
                assert dt.gid in dts_in_tree, "Simple DTs and DT_Groups should be"

        for link_gid in expected_links:
            assert link_gid in node_json, "Expected Link not present"
            assert link_gid in dts_in_tree, "Expected Link not present"
예제 #8
0
 def transactional_teardown_method(self):
     """ Cleans the testing environment """
     self.cleanup()
     self.clean_database()
     StorageInterface.remove_folder(self.VALID_SETTINGS['TVB_STORAGE'],
                                    True)
예제 #9
0
class OperationService:
    """
    Class responsible for preparing an operation launch.
    It will prepare parameters, and decide if the operation is to be executed
    immediately, or to be sent on the cluster.
    """
    ATT_UID = "uid"

    def __init__(self):
        self.logger = get_logger(self.__class__.__module__)
        self.storage_interface = StorageInterface()

    ##########################################################################################
    ######## Methods related to launching operations start here ##############################
    ##########################################################################################

    def initiate_operation(self, current_user, project, adapter_instance, visible=True, model_view=None):
        """
        Gets the parameters of the computation from the previous inputs form,
        and launches a computation (on the cluster or locally).

        Invoke custom method on an Adapter Instance. Make sure when the
        operation has finished that the correct results are stored into DB.
        """
        if not isinstance(adapter_instance, ABCAdapter):
            self.logger.warning("Inconsistent Adapter Class:" + str(adapter_instance.__class__))
            raise LaunchException("Developer Exception!!")

        algo = adapter_instance.stored_adapter
        operation = self.prepare_operation(current_user.id, project, algo, visible, model_view)
        if adapter_instance.launch_mode == AdapterLaunchModeEnum.SYNC_SAME_MEM:
            return self.initiate_prelaunch(operation, adapter_instance)
        else:
            return self._send_to_cluster(operation, adapter_instance, current_user.username)

    @staticmethod
    def prepare_metadata(algo_category, burst=None, current_ga=GenericAttributes()):
        """
        Gather generic_metadata from submitted fields and current to be execute algorithm.
        Will populate STATE, GROUP, etc in generic_metadata
        """
        generic_metadata = GenericAttributes()
        generic_metadata.state = algo_category.defaultdatastate
        generic_metadata.parent_burst = burst
        generic_metadata.fill_from(current_ga)
        return generic_metadata

    @staticmethod
    def _read_set(values):
        """ Parse a committed UI possible list of values, into a set converted into string."""
        if isinstance(values, list):
            set_values = []
            values_str = ""
            for val in values:
                if val not in set_values:
                    set_values.append(val)
                    values_str = values_str + " " + str(val)
            values = values_str
        return str(values).strip()

    def group_operation_launch(self, user_id, project, algorithm_id, category_id):
        """
        Create and prepare the launch of a group of operations.
        """
        algorithm = dao.get_algorithm_by_id(algorithm_id)
        ops, _ = self.prepare_operation(user_id, project, algorithm)
        for operation in ops:
            self.launch_operation(operation.id, True)

    def _prepare_metric_operation(self, sim_operation):
        # type: (Operation) -> Operation
        metric_algo = dao.get_algorithm_by_module(MEASURE_METRICS_MODULE, MEASURE_METRICS_CLASS)
        datatype_index = h5.REGISTRY.get_index_for_datatype(TimeSeries)
        time_series_index = dao.get_generic_entity(datatype_index, sim_operation.id, 'fk_from_operation')[0]
        ga = self.prepare_metadata(metric_algo.algorithm_category, time_series_index.fk_parent_burst)
        ga.visible = False

        view_model = get_class_by_name("{}.{}".format(MEASURE_METRICS_MODULE, MEASURE_METRICS_MODEL_CLASS))()
        view_model.time_series = time_series_index.gid
        view_model.algorithms = tuple(ALGORITHMS.keys())
        view_model.generic_attributes = ga

        parent_burst = dao.get_generic_entity(BurstConfiguration, time_series_index.fk_parent_burst, 'gid')[0]
        metric_op_group = dao.get_operationgroup_by_id(parent_burst.fk_metric_operation_group)
        metric_operation_group_id = parent_burst.fk_metric_operation_group
        range_values = sim_operation.range_values
        view_model.operation_group_gid = uuid.UUID(metric_op_group.gid)
        view_model.ranges = json.dumps(parent_burst.ranges)
        view_model.range_values = range_values
        view_model.is_metric_operation = True
        metric_operation = Operation(view_model.gid.hex, sim_operation.fk_launched_by, sim_operation.fk_launched_in,
                                     metric_algo.id, user_group=ga.operation_tag, op_group_id=metric_operation_group_id,
                                     range_values=range_values)
        metric_operation.visible = False
        metric_operation = dao.store_entity(metric_operation)

        metrics_datatype_group = dao.get_generic_entity(DataTypeGroup, metric_operation_group_id,
                                                        'fk_operation_group')[0]
        if metrics_datatype_group.fk_from_operation is None:
            metrics_datatype_group.fk_from_operation = metric_operation.id
            dao.store_entity(metrics_datatype_group)

        self.store_view_model(metric_operation, sim_operation.project, view_model)
        return metric_operation

    @transactional
    def prepare_operation(self, user_id, project, algorithm, visible=True, view_model=None, ranges=None,
                          burst_gid=None, op_group_id=None):
        """
        Do all the necessary preparations for storing an operation. If it's the case of a
        range of values create an operation group and multiple operations for each possible
        instance from the range.
        """
        algo_category = dao.get_category_by_id(algorithm.fk_category)
        ga = self.prepare_metadata(algo_category, current_ga=view_model.generic_attributes, burst=burst_gid)
        ga.visible = visible
        view_model.generic_attributes = ga

        self.logger.debug("Saving Operation(userId=" + str(user_id) + ",projectId=" + str(project.id) +
                          ",algorithmId=" + str(algorithm.id) + ")")

        operation = Operation(view_model.gid.hex, user_id, project.id, algorithm.id, user_group=ga.operation_tag,
                              op_group_id=op_group_id, range_values=ranges)
        operation = dao.store_entity(operation)

        self.store_view_model(operation, project, view_model)

        return operation

    @staticmethod
    def store_view_model(operation, project, view_model):
        storage_path = StorageInterface().get_project_folder(project.name, str(operation.id))
        h5.store_view_model(view_model, storage_path)
        view_model_size_on_disk = StorageInterface.compute_recursive_h5_disk_usage(storage_path)
        operation.view_model_disk_size = view_model_size_on_disk
        dao.store_entity(operation)

    def initiate_prelaunch(self, operation, adapter_instance):
        """
        Public method.
        This should be the common point in calling an adapter- method.
        """
        result_msg = ""
        temp_files = []
        try:
            operation = dao.get_operation_by_id(operation.id)  # Load Lazy fields

            disk_space_per_user = TvbProfile.current.MAX_DISK_SPACE
            pending_op_disk_space = dao.compute_disk_size_for_started_ops(operation.fk_launched_by)
            user_disk_space = dao.compute_user_generated_disk_size(operation.fk_launched_by)  # From kB to Bytes
            available_space = disk_space_per_user - pending_op_disk_space - user_disk_space

            view_model = adapter_instance.load_view_model(operation)
            try:
                form = adapter_instance.get_form()
                form = form() if isclass(form) else form
                fields = form.get_upload_field_names()
                project = dao.get_project_by_id(operation.fk_launched_in)
                tmp_folder = self.storage_interface.get_temp_folder(project.name)
                for upload_field in fields:
                    if hasattr(view_model, upload_field):
                        file = getattr(view_model, upload_field)
                        if file.startswith(tmp_folder) or file.startswith(TvbProfile.current.TVB_TEMP_FOLDER):
                            temp_files.append(file)
            except AttributeError:
                # Skip if we don't have upload fields on current form
                pass
            result_msg, nr_datatypes = adapter_instance._prelaunch(operation, view_model, available_space)
            operation = dao.get_operation_by_id(operation.id)
            operation.mark_complete(STATUS_FINISHED)
            dao.store_entity(operation)

            self._update_vm_generic_operation_tag(view_model, operation)
            self._remove_files(temp_files)

        except zipfile.BadZipfile as excep:
            msg = "The uploaded file is not a valid ZIP!"
            self._handle_exception(excep, temp_files, msg, operation)
        except TVBException as excep:
            self._handle_exception(excep, temp_files, excep.message, operation)
        except MemoryError:
            msg = ("Could not execute operation because there is not enough free memory." +
                   " Please adjust operation parameters and re-launch it.")
            self._handle_exception(Exception(msg), temp_files, msg, operation)
        except Exception as excep1:
            msg = "Could not launch Operation with the given input data!"
            self._handle_exception(excep1, temp_files, msg, operation)

        if operation.fk_operation_group and 'SimulatorAdapter' in operation.algorithm.classname:
            next_op = self._prepare_metric_operation(operation)
            self.launch_operation(next_op.id)
        return result_msg

    def _send_to_cluster(self, operation, adapter_instance, current_username="******"):
        """ Initiate operation on cluster"""
        try:
            BackendClientFactory.execute(str(operation.id), current_username, adapter_instance)
        except TVBException as ex:
            self._handle_exception(ex, {}, ex.message, operation)
        except Exception as excep:
            self._handle_exception(excep, {}, "Could not start operation!", operation)

        return operation

    @staticmethod
    def _update_vm_generic_operation_tag(view_model, operation):
        project = dao.get_project_by_id(operation.fk_launched_in)
        h5_path = h5.path_for(operation.id, ViewModelH5, view_model.gid, project.name, type(view_model).__name__)
        with ViewModelH5(h5_path, view_model) as vm_h5:
            vm_h5.operation_tag.store(operation.user_group)

    def launch_operation(self, operation_id, send_to_cluster=False, adapter_instance=None):
        """
        Method exposed for Burst-Workflow related calls.
        It is used for cascading operation in the same workflow.
        """
        if operation_id is not None:
            operation = dao.get_operation_by_id(operation_id)
            if adapter_instance is None:
                algorithm = operation.algorithm
                adapter_instance = ABCAdapter.build_adapter(algorithm)

            if send_to_cluster:
                self._send_to_cluster(operation, adapter_instance, operation.user.username)
            else:
                self.initiate_prelaunch(operation, adapter_instance)

    def _handle_exception(self, exception, temp_files, message, operation=None):
        """
        Common way to treat exceptions:
            - remove temporary files, if any
            - set status ERROR on current operation (if any)
            - log exception
        """
        self.logger.exception(message)
        if operation is not None:
            BurstService().persist_operation_state(operation, STATUS_ERROR, str(exception))
        self._remove_files(temp_files)
        exception.message = message
        raise exception.with_traceback(
            sys.exc_info()[2])  # when rethrowing in python this is required to preserve the stack trace

    def _remove_files(self, file_list):
        """
        Remove any files that exist in the file_dictionary.
        Currently used to delete temporary files created during an operation.
        """
        for pth in file_list:
            if pth is not None:
                pth = str(pth)
                try:
                    if os.path.exists(pth) and os.path.isfile(pth):
                        os.remove(pth)
                        if len(os.listdir(os.path.dirname(pth))) == 0:
                            self.storage_interface.remove_folder(os.path.dirname(pth))
                        self.logger.debug("We no longer need file:" + pth + " => deleted")
                    else:
                        self.logger.warning("Trying to remove not existent file:" + pth)
                except OSError:
                    self.logger.exception("Could not cleanup file!")

    @staticmethod
    def _range_name(range_no):
        return PARAM_RANGE_PREFIX + str(range_no)

    def fire_operation(self, adapter_instance, current_user, project_id, visible=True, view_model=None):
        """
        Launch an operation, specified by AdapterInstance, for current_user and project with project_id.
        """
        operation_name = str(adapter_instance.__class__.__name__)
        try:
            self.logger.info("Starting operation " + operation_name)
            project = dao.get_project_by_id(project_id)

            result = self.initiate_operation(current_user, project, adapter_instance, visible,
                                             model_view=view_model)
            self.logger.info("Finished operation launch:" + operation_name)
            return result

        except TVBException as excep:
            self.logger.exception("Could not launch operation " + operation_name +
                                  " with the given set of input data, because: " + excep.message)
            raise OperationException(excep.message, excep)
        except Exception as excep:
            self.logger.exception("Could not launch operation " + operation_name + " with the given set of input data!")
            raise OperationException(str(excep))

    @staticmethod
    def load_operation(operation_id):
        """ Retrieve previously stored Operation from DB, and load operation.burst attribute"""
        operation = dao.get_operation_by_id(operation_id)
        operation.burst = dao.get_burst_for_operation_id(operation_id)
        return operation

    @staticmethod
    def stop_operation(operation_id, is_group=False, remove_after_stop=False):
        # type: (int, bool, bool) -> bool
        """
        Stop (also named Cancel) the operation given by operation_id,
        and potentially also remove it after (with all linked data).
        In case the Operation has a linked Burst, remove that too.
        :param operation_id: ID for Operation (or OperationGroup) to be canceled/removed
        :param is_group: When true stop all the operations from that group.
        :param remove_after_stop: if True, also remove the operation(s) after stopping
        :returns True if the stop step was successfully
        """
        result = False
        if is_group:
            op_group = ProjectService.get_operation_group_by_id(operation_id)
            operations_in_group = ProjectService.get_operations_in_group(op_group)
            for operation in operations_in_group:
                result = OperationService.stop_operation(operation.id, False, remove_after_stop) or result
        elif dao.try_get_operation_by_id(operation_id) is not None:
            result = BackendClientFactory.stop_operation(operation_id)
            if remove_after_stop:
                burst_config = dao.get_burst_for_direct_operation_id(operation_id)
                ProjectService().remove_operation(operation_id)
                if burst_config is not None:
                    result = dao.remove_entity(BurstConfiguration, burst_config.id) or result

        return result