Пример #1
0
class ImportService(object):
    """
    Service for importing TVB entities into system.
    It supports TVB exported H5 files as input, but it should also handle H5 files 
    generated outside of TVB, as long as they respect the same structure.
    """
    def __init__(self):
        self.logger = get_logger(__name__)
        self.user_id = None
        self.files_helper = FilesHelper()
        self.created_projects = []

    def _download_and_unpack_project_zip(self, uploaded, uq_file_name,
                                         temp_folder):

        if isinstance(uploaded, FieldStorage) or isinstance(uploaded, Part):
            if not uploaded.file:
                raise ProjectImportException(
                    "Please select the archive which contains the project structure."
                )
            with open(uq_file_name, 'wb') as file_obj:
                self.files_helper.copy_file(uploaded.file, file_obj)
        else:
            shutil.copy2(uploaded, uq_file_name)

        try:
            self.files_helper.unpack_zip(uq_file_name, temp_folder)
        except FileStructureException as excep:
            self.logger.exception(excep)
            raise ProjectImportException(
                "Bad ZIP archive provided. A TVB exported project is expected!"
            )

    @staticmethod
    def _compute_unpack_path():
        """
        :return: the name of the folder where to expand uploaded zip
        """
        now = datetime.now()
        date_str = "%d-%d-%d_%d-%d-%d_%d" % (now.year, now.month, now.day,
                                             now.hour, now.minute, now.second,
                                             now.microsecond)
        uq_name = "%s-ImportProject" % date_str
        return os.path.join(TvbProfile.current.TVB_TEMP_FOLDER, uq_name)

    @transactional
    def import_project_structure(self, uploaded, user_id):
        """
        Execute import operations:
         
        1. check if ZIP or folder 
        2. find all project nodes
        3. for each project node:
            - create project
            - create all operations
            - import all images
            - create all dataTypes
        """

        self.user_id = user_id
        self.created_projects = []

        # Now compute the name of the folder where to explode uploaded ZIP file
        temp_folder = self._compute_unpack_path()
        uq_file_name = temp_folder + ".zip"

        try:
            self._download_and_unpack_project_zip(uploaded, uq_file_name,
                                                  temp_folder)
            self._import_projects_from_folder(temp_folder)

        except Exception as excep:
            self.logger.exception(
                "Error encountered during import. Deleting projects created during this operation."
            )
            # Remove project folders created so far.
            # Note that using the project service to remove the projects will not work,
            # because we do not have support for nested transaction.
            # Removing from DB is not necessary because in transactional env a simple exception throw
            # will erase everything to be inserted.
            for project in self.created_projects:
                project_path = os.path.join(TvbProfile.current.TVB_STORAGE,
                                            FilesHelper.PROJECTS_FOLDER,
                                            project.name)
                shutil.rmtree(project_path)
            raise ProjectImportException(str(excep))

        finally:
            # Now delete uploaded file
            if os.path.exists(uq_file_name):
                os.remove(uq_file_name)
            # Now delete temporary folder where uploaded ZIP was exploded.
            if os.path.exists(temp_folder):
                shutil.rmtree(temp_folder)

    @staticmethod
    def _load_burst_info_from_json(project_path):
        bursts_dict = {}
        dt_mappings_dict = {}
        bursts_file = os.path.join(project_path, BURST_INFO_FILE)
        if os.path.isfile(bursts_file):
            with open(bursts_file) as f:
                bursts_info_dict = json.load(f)
            bursts_dict = bursts_info_dict[BURSTS_DICT_KEY]
            dt_mappings_dict = bursts_info_dict[DT_BURST_MAP]
        os.remove(bursts_file)
        return bursts_dict, dt_mappings_dict

    @staticmethod
    def _import_bursts(project_entity, bursts_dict):
        """
        Re-create old bursts, but keep a mapping between the id it has here and the old-id it had
        in the project where they were exported, so we can re-add the datatypes to them.
        """
        burst_ids_mapping = {}

        for old_burst_id in bursts_dict:
            burst_information = BurstInformation.load_from_dict(
                bursts_dict[old_burst_id])
            burst_entity = model.BurstConfiguration(project_entity.id)
            burst_entity.from_dict(burst_information.data)
            burst_entity = dao.store_entity(burst_entity)
            burst_ids_mapping[int(old_burst_id)] = burst_entity.id
            # We don't need the data in dictionary form anymore, so update it with new BurstInformation object
            bursts_dict[old_burst_id] = burst_information
        return burst_ids_mapping

    def _import_projects_from_folder(self, temp_folder):
        """
        Process each project from the uploaded pack, to extract names.
        """
        project_roots = []
        for root, _, files in os.walk(temp_folder):
            if FilesHelper.TVB_PROJECT_FILE in files:
                project_roots.append(root)

        for project_path in project_roots:
            update_manager = ProjectUpdateManager(project_path)
            update_manager.run_all_updates()
            project_entity = self.__populate_project(project_path)

            # Compute the path where to store files of the imported project
            new_project_path = os.path.join(TvbProfile.current.TVB_STORAGE,
                                            FilesHelper.PROJECTS_FOLDER,
                                            project_entity.name)
            if project_path != new_project_path:
                shutil.move(project_path, new_project_path)

            self.created_projects.append(project_entity)

            # Keep a list with all burst that were imported since we will want to also add the workflow
            # steps after we are finished with importing the operations and datatypes. We need to first
            # stored bursts since we need to know which new id's they have for operations parent_burst.
            bursts_dict, dt_mappings_dict = self._load_burst_info_from_json(
                new_project_path)
            burst_ids_mapping = self._import_bursts(project_entity,
                                                    bursts_dict)

            # Now import project operations
            self.import_project_operations(project_entity, new_project_path,
                                           dt_mappings_dict, burst_ids_mapping)
            # Import images
            self._store_imported_images(project_entity)
            # Now we can finally import workflow related entities
            self.import_workflows(project_entity, bursts_dict,
                                  burst_ids_mapping)

    def import_workflows(self, project, bursts_dict, burst_ids_mapping):
        """
        Import the workflow entities for all bursts imported in the project.

        :param project: the current

        :param bursts_dict: a dictionary that holds all the required information in order to
                            import the bursts from the new project

        :param burst_ids_mapping: a dictionary of the form {old_burst_id : new_burst_id} so we
                                  know what burst to link each workflow to
        """
        for burst_id in bursts_dict:
            workflows_info = bursts_dict[burst_id].get_workflows()
            for one_wf_info in workflows_info:
                # Use the new burst id when creating the workflow
                workflow_entity = model.Workflow(
                    project.id, burst_ids_mapping[int(burst_id)])
                workflow_entity.from_dict(one_wf_info.data)
                workflow_entity = dao.store_entity(workflow_entity)
                wf_steps_info = one_wf_info.get_workflow_steps()
                view_steps_info = one_wf_info.get_view_steps()
                self.import_workflow_steps(workflow_entity, wf_steps_info,
                                           view_steps_info)

    def import_workflow_steps(self, workflow, wf_steps, view_steps):
        """
        Import all workflow steps for the given workflow. We create both wf_steps and view_steps
        in the same method, since if a wf_step has to be omited for some reason, we also need to
        omit that view step.
        :param workflow: a model.Workflow entity from which we need to add workflow steps

        :param wf_steps: a list of WorkflowStepInformation entities, from which we will rebuild 
                         the workflow steps

        :param view_steps: a list of WorkflowViewStepInformation entities, from which we will 
                           rebuild the workflow view steps

        """
        for wf_step in wf_steps:
            try:
                algorithm = wf_step.get_algorithm()
                if algorithm is None:
                    # The algorithm is invalid for some reason. Just remove also the view step.
                    position = wf_step.index()
                    for entry in view_steps:
                        if entry.index() == position:
                            view_steps.remove(entry)
                    continue
                wf_step_entity = model.WorkflowStep(algorithm.id)
                wf_step_entity.from_dict(wf_step.data)
                wf_step_entity.fk_workflow = workflow.id
                wf_step_entity.fk_operation = wf_step.get_operation_id()
                dao.store_entity(wf_step_entity)
            except Exception:
                # only log exception and ignore this as it is not very important:
                self.logger.exception("Could not restore WorkflowStep: %s" %
                                      wf_step.get_algorithm().name)

        for view_step in view_steps:
            try:
                algorithm = view_step.get_algorithm()
                if algorithm is None:
                    continue
                view_step_entity = model.WorkflowStepView(algorithm.id)
                view_step_entity.from_dict(view_step.data)
                view_step_entity.fk_workflow = workflow.id
                view_step_entity.fk_portlet = view_step.get_portlet().id
                dao.store_entity(view_step_entity)
            except Exception:
                # only log exception and ignore this as it is not very important:
                self.logger.exception("Could not restore WorkflowViewStep " +
                                      view_step.get_algorithm().name)

    @staticmethod
    def _append_tmp_to_folders_containing_operations(import_path):
        """
        Find folders containing operations and rename them, return the renamed paths
        """
        pths = []
        for root, _, files in os.walk(import_path):
            if FilesHelper.TVB_OPERARATION_FILE in files:
                # Found an operation folder - append TMP to its name
                tmp_op_folder = root + 'tmp'
                os.rename(root, tmp_op_folder)
                operation_file_path = os.path.join(
                    tmp_op_folder, FilesHelper.TVB_OPERARATION_FILE)
                pths.append(operation_file_path)
        return pths

    def _load_operations_from_paths(self, project, op_paths):
        """
        Load operations from paths containing them.
        :returns: Operations ordered by start/creation date to be sure data dependency is resolved correct
        """
        def by_time(op):
            return op.start_date or op.create_date or datetime.now()

        operations = []

        for operation_file_path in op_paths:
            operation = self.__build_operation_from_file(
                project, operation_file_path)
            operation.import_file = operation_file_path
            operations.append(operation)

        operations.sort(key=by_time)
        return operations

    def _load_datatypes_from_operation_folder(self, op_path, operation_entity,
                                              datatype_group):
        """
        Loads datatypes from operation folder
        :returns: Datatypes ordered by creation date (to solve any dependencies)
        """
        all_datatypes = []
        for file_name in os.listdir(op_path):
            if file_name.endswith(FilesHelper.TVB_STORAGE_FILE_EXTENSION):
                h5_file = os.path.join(op_path, file_name)
                try:
                    file_update_manager = FilesUpdateManager()
                    file_update_manager.upgrade_file(h5_file)
                    datatype = self.load_datatype_from_file(
                        op_path, file_name, operation_entity.id,
                        datatype_group)
                    all_datatypes.append(datatype)

                except IncompatibleFileManagerException:
                    os.remove(h5_file)
                    self.logger.warning(
                        "Incompatible H5 file will be ignored: %s" % h5_file)
                    self.logger.exception("Incompatibility details ...")

        all_datatypes.sort(key=lambda dt_date: dt_date.create_date)
        for dt in all_datatypes:
            self.logger.debug("Import order %s: %s" % (dt.type, dt.gid))
        return all_datatypes

    def _store_imported_datatypes_in_db(self, project, all_datatypes,
                                        dt_burst_mappings, burst_ids_mapping):
        def by_time(dt):
            return dt.create_date or datetime.now()

        if burst_ids_mapping is None:
            burst_ids_mapping = {}
        if dt_burst_mappings is None:
            dt_burst_mappings = {}

        all_datatypes.sort(key=by_time)

        for datatype in all_datatypes:
            old_burst_id = dt_burst_mappings.get(datatype.gid)

            if old_burst_id is not None:
                datatype.fk_parent_burst = burst_ids_mapping[old_burst_id]

            datatype_allready_in_tvb = dao.get_datatype_by_gid(datatype.gid)

            if not datatype_allready_in_tvb:
                # Compute disk size. Similar to ABCAdapter._capture_operation_results.
                # No need to close the h5 as we have not written to it.
                associated_file = os.path.join(
                    datatype.storage_path, datatype.get_storage_file_name())
                datatype.disk_size = FilesHelper.compute_size_on_disk(
                    associated_file)

                self.store_datatype(datatype)
            else:
                FlowService.create_link([datatype_allready_in_tvb.id],
                                        project.id)

    def _store_imported_images(self, project):
        """
        Import all images from project
        """
        images_root = self.files_helper.get_images_folder(project.name)
        # for file_name in os.listdir(images_root):
        for root, _, files in os.walk(images_root):
            for file_name in files:
                if file_name.endswith(FilesHelper.TVB_FILE_EXTENSION):
                    self._populate_image(os.path.join(root, file_name),
                                         project.id)

    def import_project_operations(self,
                                  project,
                                  import_path,
                                  dt_burst_mappings=None,
                                  burst_ids_mapping=None):
        """
        This method scans provided folder and identify all operations that needs to be imported
        """
        op_paths = self._append_tmp_to_folders_containing_operations(
            import_path)
        operations = self._load_operations_from_paths(project, op_paths)

        imported_operations = []
        datatypes = []

        # Here we process each operation found
        for operation in operations:
            self.logger.debug("Importing operation " + str(operation))
            old_operation_folder, _ = os.path.split(operation.import_file)
            operation_entity, datatype_group = self.__import_operation(
                operation)

            # Rename operation folder with the ID of the stored operation
            new_operation_path = FilesHelper().get_operation_folder(
                project.name, operation_entity.id)
            if old_operation_folder != new_operation_path:
                # Delete folder of the new operation, otherwise move will fail
                shutil.rmtree(new_operation_path)
                shutil.move(old_operation_folder, new_operation_path)

            operation_datatypes = self._load_datatypes_from_operation_folder(
                new_operation_path, operation_entity, datatype_group)
            imported_operations.append(operation_entity)
            datatypes.extend(operation_datatypes)

        self._store_imported_datatypes_in_db(project, datatypes,
                                             dt_burst_mappings,
                                             burst_ids_mapping)
        return imported_operations

    def _populate_image(self, file_name, project_id):
        """
        Create and store a image entity.
        """
        figure_dict = XMLReader(file_name).read_metadata()
        new_path = os.path.join(
            os.path.split(file_name)[0],
            os.path.split(figure_dict['file_path'])[1])
        if not os.path.exists(new_path):
            self.logger.warn("Expected to find image path %s .Skipping" %
                             new_path)

        op = dao.get_operation_by_gid(figure_dict['fk_from_operation'])
        figure_dict['fk_op_id'] = op.id if op is not None else None
        figure_dict['fk_user_id'] = self.user_id
        figure_dict['fk_project_id'] = project_id
        figure_entity = manager_of_class(model.ResultFigure).new_instance()
        figure_entity = figure_entity.from_dict(figure_dict)
        stored_entity = dao.store_entity(figure_entity)

        # Update image meta-data with the new details after import
        figure = dao.load_figure(stored_entity.id)
        self.logger.debug("Store imported figure")
        self.files_helper.write_image_metadata(figure)

    def load_datatype_from_file(self,
                                storage_folder,
                                file_name,
                                op_id,
                                datatype_group=None,
                                move=True):
        """
        Creates an instance of datatype from storage / H5 file 
        :returns: datatype
        """
        self.logger.debug("Loading datatType from file: %s" % file_name)
        storage_manager = HDF5StorageManager(storage_folder, file_name)
        meta_dictionary = storage_manager.get_metadata()
        meta_structure = DataTypeMetaData(meta_dictionary)

        # Now try to determine class and instantiate it
        class_name = meta_structure[DataTypeMetaData.KEY_CLASS_NAME]
        class_module = meta_structure[DataTypeMetaData.KEY_MODULE]
        datatype = __import__(class_module, globals(), locals(), [class_name])
        datatype = getattr(datatype, class_name)
        type_instance = manager_of_class(datatype).new_instance()

        # Now we fill data into instance
        type_instance.type = str(type_instance.__class__.__name__)
        type_instance.module = str(type_instance.__module__)

        # Fill instance with meta data
        type_instance.load_from_metadata(meta_dictionary)

        #Add all the required attributes
        if datatype_group is not None:
            type_instance.fk_datatype_group = datatype_group.id
        type_instance.set_operation_id(op_id)

        # Now move storage file into correct folder if necessary
        current_file = os.path.join(storage_folder, file_name)
        new_file = type_instance.get_storage_file_path()
        if new_file != current_file and move:
            shutil.move(current_file, new_file)

        return type_instance

    def store_datatype(self, datatype):
        """This method stores data type into DB"""
        try:
            self.logger.debug("Store datatype: %s with Gid: %s" %
                              (datatype.__class__.__name__, datatype.gid))
            return dao.store_entity(datatype)
        except MissingDataSetException:
            self.logger.error(
                "Datatype %s has missing data and could not be imported properly."
                % (datatype, ))
            os.remove(datatype.get_storage_file_path())
        except IntegrityError as excep:
            self.logger.exception(excep)
            error_msg = "Could not import data with gid: %s. There is already a one with " \
                        "the same name or gid." % datatype.gid
            # Delete file if can't be imported
            os.remove(datatype.get_storage_file_path())
            raise ProjectImportException(error_msg)

    def __populate_project(self, project_path):
        """
        Create and store a Project entity.
        """
        self.logger.debug("Creating project from path: %s" % project_path)
        project_dict = self.files_helper.read_project_metadata(project_path)

        project_entity = manager_of_class(model.Project).new_instance()
        project_entity = project_entity.from_dict(project_dict, self.user_id)

        try:
            self.logger.debug("Storing imported project")
            return dao.store_entity(project_entity)
        except IntegrityError as excep:
            self.logger.exception(excep)
            error_msg = (
                "Could not import project: %s with gid: %s. There is already a "
                "project with the same name or gid.") % (project_entity.name,
                                                         project_entity.gid)
            raise ProjectImportException(error_msg)

    def __build_operation_from_file(self, project, operation_file):
        """
        Create Operation entity from metadata file.
        """
        operation_dict = XMLReader(operation_file).read_metadata()
        operation_entity = manager_of_class(model.Operation).new_instance()
        return operation_entity.from_dict(operation_dict, dao, self.user_id,
                                          project.gid)

    @staticmethod
    def __import_operation(operation_entity):
        """
        Store a Operation entity.
        """
        operation_entity = dao.store_entity(operation_entity)
        operation_group_id = operation_entity.fk_operation_group
        datatype_group = None

        if operation_group_id is not None:
            try:
                datatype_group = dao.get_datatypegroup_by_op_group_id(
                    operation_group_id)
            except SQLAlchemyError:
                # If no dataType group present for current op. group, create it.
                operation_group = dao.get_operationgroup_by_id(
                    operation_group_id)
                datatype_group = model.DataTypeGroup(
                    operation_group, operation_id=operation_entity.id)
                datatype_group.state = ADAPTERS['Upload']['defaultdatastate']
                datatype_group = dao.store_entity(datatype_group)

        return operation_entity, datatype_group

    def load_burst_entity(self, json_burst, project_id):
        """
        Load BurstConfiguration from JSON (possibly exported from a different machine).
        Nothing gets persisted in DB or on disk.

        :param json_burst: Burst JSON export
        :param project_id: Current project ID (it will be used if the user later starts this simulation)
        :return: BurstConfiguration  filled from JSON
        """

        burst_information = BurstInformation.load_from_dict(json_burst)
        burst_entity = model.BurstConfiguration(project_id)
        burst_entity.from_dict(burst_information.data)
        burst_entity.prepare_after_load()
        burst_entity.reset_tabs()

        workflow_info = burst_information.get_workflows()[0]
        workflow_entity = model.Workflow(project_id, None)
        workflow_entity.from_dict(workflow_info.data)

        view_steps = workflow_info.get_view_steps()
        analyze_steps = workflow_info.get_workflow_steps()

        for view_step in view_steps:
            try:
                algorithm = view_step.get_algorithm()
                portlet = view_step.get_portlet()
                view_step_entity = model.WorkflowStepView(
                    algorithm.id, portlet_id=portlet.id)
                view_step_entity.from_dict(view_step.data)
                view_step_entity.workflow = workflow_entity

                ## For each visualize step, also load all of the analyze steps.
                analyzers = []
                for an_step in analyze_steps:
                    if (an_step.data["tab_index"] != view_step_entity.tab_index
                            or an_step.data["index_in_tab"] !=
                            view_step_entity.index_in_tab):
                        continue
                    algorithm = an_step.get_algorithm()
                    wf_step_entity = model.WorkflowStep(algorithm.id)
                    wf_step_entity.from_dict(an_step.data)
                    wf_step_entity.workflow = workflow_entity
                    analyzers.append(wf_step_entity)

                portlet = PortletConfiguration(portlet.id)
                portlet.set_visualizer(view_step_entity)
                portlet.set_analyzers(analyzers)
                burst_entity.set_portlet(view_step_entity.tab_index,
                                         view_step_entity.index_in_tab,
                                         portlet)

            except Exception:
                # only log exception and ignore this step from loading
                self.logger.exception("Could not restore Workflow Step " +
                                      view_step.get_algorithm().name)

        return burst_entity
Пример #2
0
class TestFigureService(TransactionalTestCase):
    """
    Tests for the figure service
    """
    def transactional_setup_method(self):
        self.figure_service = FigureService()
        self.user = TestFactory.create_user()
        self.project = TestFactory.create_project(admin=self.user)
        self.files_helper = FilesHelper()


    def transactional_teardown_method(self):
        self.delete_project_folders()


    def assertCanReadImage(self, image_path):
        try:
            Image.open(image_path).load()
        except (IOError, ValueError):
            raise AssertionError("Could not open %s as a image" % image_path)


    def store_test_png(self):
        self.figure_service.store_result_figure(self.project, self.user, "png", IMG_DATA, image_name="test-figure")


    def retrieve_images(self):
        figures_by_session, _ = self.figure_service.retrieve_result_figures(self.project, self.user)
        # flatten image session grouping
        figures = []
        for fg in figures_by_session.itervalues():
            figures.extend(fg)
        return figures


    def test_store_image(self):
        self.store_test_png()


    def test_store_image_from_operation(self):
        # test that image can be retrieved from operation
        test_operation = TestFactory.create_operation(test_user=self.user, test_project=self.project)

        self.figure_service.store_result_figure(self.project, self.user, "png",
                                                IMG_DATA, operation_id=test_operation.id)
        figures = dao.get_figures_for_operation(test_operation.id)
        assert 1 == len(figures)
        image_path = self.files_helper.get_images_folder(self.project.name)
        image_path = os.path.join(image_path, figures[0].file_path)
        self.assertCanReadImage(image_path)


    def test_store_and_retrieve_image(self):
        self.store_test_png()
        figures = self.retrieve_images()
        assert 1 == len(figures)
        image_path = utils.url2path(figures[0].file_path)
        self.assertCanReadImage(image_path)


    def test_load_figure(self):
        self.store_test_png()
        figures = self.retrieve_images()
        self.figure_service.load_figure(figures[0].id)


    def test_edit_figure(self):
        session_name = 'the altered ones'
        name = 'altered'
        self.store_test_png()
        figures = self.retrieve_images()
        self.figure_service.edit_result_figure(figures[0].id, session_name=session_name, name=name)
        figures_by_session, _ = self.figure_service.retrieve_result_figures(self.project, self.user)
        assert [session_name] == figures_by_session.keys()
        assert name == figures_by_session.values()[0][0].name


    def test_remove_figure(self):
        self.store_test_png()
        figures = self.retrieve_images()
        assert 1 == len(figures)
        self.figure_service.remove_result_figure(figures[0].id)
        figures = self.retrieve_images()
        assert 0 == len(figures)
Пример #3
0
class ProjectService:
    """
    Services layer for Project entities.
    """

    def __init__(self):
        self.logger = get_logger(__name__)
        self.structure_helper = FilesHelper()

    def store_project(self, current_user, is_create, selected_id, **data):
        """
        We want to create/update a project entity.
        """
        # Validate Unique Name
        new_name = data["name"]
        if len(new_name) < 1:
            raise ProjectServiceException("Invalid project name!")
        projects_no = dao.count_projects_for_name(new_name, selected_id)
        if projects_no > 0:
            err = {'name': 'Please choose another name, this one is used!'}
            raise formencode.Invalid("Duplicate Name Error", {}, None, error_dict=err)
        started_operations = dao.get_operation_numbers(selected_id)[1]
        if started_operations > 0:
            raise ProjectServiceException("A project can not be renamed while operations are still running!")
        if is_create:
            current_proj = Project(new_name, current_user.id, data["description"])
            self.structure_helper.get_project_folder(current_proj)
        else:
            try:
                current_proj = dao.get_project_by_id(selected_id)
            except Exception as excep:
                self.logger.exception("An error has occurred!")
                raise ProjectServiceException(str(excep))
            if current_proj.name != new_name:
                self.structure_helper.rename_project_structure(current_proj.name, new_name)
            current_proj.name = new_name
            current_proj.description = data["description"]
        # Commit to make sure we have a valid ID
        current_proj.refresh_update_date()
        self.structure_helper.write_project_metadata(current_proj)
        current_proj = dao.store_entity(current_proj)

        # Retrieve, to initialize lazy attributes
        current_proj = dao.get_project_by_id(current_proj.id)
        # Update share settings on current Project entity
        visited_pages = []
        prj_admin = current_proj.administrator.username
        if 'visited_pages' in data and data['visited_pages']:
            visited_pages = data['visited_pages'].split(',')
        for page in visited_pages:
            members = UserService.retrieve_users_except(prj_admin, int(page), MEMBERS_PAGE_SIZE)[0]
            members = [m.id for m in members]
            dao.delete_members_for_project(current_proj.id, members)

        selected_user_ids = data["users"]
        if is_create and current_user.id not in selected_user_ids:
            # Make the project admin also member of the current project
            selected_user_ids.append(current_user.id)
        dao.add_members_to_project(current_proj.id, selected_user_ids)
        # Finish operation
        self.logger.debug("Edit/Save OK for project:" + str(current_proj.id) + ' by user:'******'-'
                result["count"] = one_op[2]
                result["gid"] = one_op[13]
                if one_op[3] is not None and one_op[3]:
                    try:
                        operation_group = dao.get_generic_entity(OperationGroup, one_op[3])[0]
                        result["group"] = operation_group.name
                        result["group"] = result["group"].replace("_", " ")
                        result["operation_group_id"] = operation_group.id
                        datatype_group = dao.get_datatypegroup_by_op_group_id(one_op[3])
                        result["datatype_group_gid"] = datatype_group.gid
                        result["gid"] = operation_group.gid
                        ## Filter only viewers for current DataTypeGroup entity:
                        result["view_groups"] = FlowService().get_visualizers_for_group(datatype_group.gid)
                    except Exception:
                        self.logger.exception("We will ignore group on entity:" + str(one_op))
                        result["datatype_group_gid"] = None
                else:
                    result['group'] = None
                    result['datatype_group_gid'] = None
                result["algorithm"] = dao.get_algorithm_by_id(one_op[4])
                result["user"] = dao.get_user_by_id(one_op[5])
                if type(one_op[6]) is str:
                    result["create"] = string2date(str(one_op[6]))
                else:
                    result["create"] = one_op[6]
                if type(one_op[7]) is str:
                    result["start"] = string2date(str(one_op[7]))
                else:
                    result["start"] = one_op[7]
                if type(one_op[8]) is str:
                    result["complete"] = string2date(str(one_op[8]))
                else:
                    result["complete"] = one_op[8]

                if result["complete"] is not None and result["start"] is not None:
                    result["duration"] = format_timedelta(result["complete"] - result["start"])
                result["status"] = one_op[9]
                result["additional"] = one_op[10]
                result["visible"] = True if one_op[11] > 0 else False
                result['operation_tag'] = one_op[12]
                result['figures'] = None
                if not result['group']:
                    datatype_results = dao.get_results_for_operation(result['id'])
                    result['results'] = []
                    for dt in datatype_results:
                        dt_loaded = ABCAdapter.load_entity_by_gid(dt.gid)
                        if dt_loaded:
                            result['results'].append(dt_loaded)
                        else:
                            self.logger.warning("Could not retrieve datatype %s" % str(dt))

                    operation_figures = dao.get_figures_for_operation(result['id'])

                    # Compute the full path to the figure / image on disk
                    for figure in operation_figures:
                        figures_folder = self.structure_helper.get_images_folder(figure.project.name)
                        figure_full_path = os.path.join(figures_folder, figure.file_path)
                        # Compute the path available from browser
                        figure.figure_path = utils.path2url_part(figure_full_path)

                    result['figures'] = operation_figures
                else:
                    result['results'] = None
                operations.append(result)
            except Exception:
                ## We got an exception when processing one Operation Row. We will continue with the rest of the rows.
                self.logger.exception("Could not prepare operation for display:" + str(one_op))
        return selected_project, total_ops_nr, operations, pages_no

    def retrieve_projects_for_user(self, user_id, current_page=1):
        """
        Return a list with all Projects visible for current user.
        """
        start_idx = PROJECTS_PAGE_SIZE * (current_page - 1)
        total = dao.get_projects_for_user(user_id, is_count=True)
        available_projects = dao.get_projects_for_user(user_id, start_idx, PROJECTS_PAGE_SIZE)
        pages_no = total // PROJECTS_PAGE_SIZE + (1 if total % PROJECTS_PAGE_SIZE else 0)
        for prj in available_projects:
            fns, sta, err, canceled, pending = dao.get_operation_numbers(prj.id)
            prj.operations_finished = fns
            prj.operations_started = sta
            prj.operations_error = err
            prj.operations_canceled = canceled
            prj.operations_pending = pending
            prj.disk_size = dao.get_project_disk_size(prj.id)
            prj.disk_size_human = format_bytes_human(prj.disk_size)
        self.logger.debug("Displaying " + str(len(available_projects)) + " projects in UI for user " + str(user_id))
        return available_projects, pages_no

    @staticmethod
    def retrieve_all_user_projects(user_id, page_start=0, page_size= PROJECTS_PAGE_SIZE):
        """
        Return a list with all projects visible for current user, without pagination.
        """
        return dao.get_projects_for_user(user_id, page_start=page_start, page_size=page_size)

    @staticmethod
    def get_linkable_projects_for_user(user_id, data_id):
        """
        Find projects with are visible for current user, and in which current datatype hasn't been linked yet.
        """
        return dao.get_linkable_projects_for_user(user_id, data_id)

    @transactional
    def remove_project(self, project_id):
        """
        Remove Project from DB and File Storage.
        """
        try:
            project2delete = dao.get_project_by_id(project_id)

            self.logger.debug("Deleting project: id=" + str(project_id) + ' name=' + project2delete.name)
            project_bursts = dao.get_bursts_for_project(project_id)
            for burst in project_bursts:
                dao.remove_entity(burst.__class__, burst.id)

            project_datatypes = dao.get_datatypes_in_project(project_id)
            for one_data in project_datatypes:
                self.remove_datatype(project_id, one_data.gid, True)

            links = dao.get_links_for_project(project_id)
            for one_link in links:
                dao.remove_entity(Links, one_link.id)

            self.structure_helper.remove_project_structure(project2delete.name)
            dao.delete_project(project_id)
            self.logger.debug("Deleted project: id=" + str(project_id) + ' name=' + project2delete.name)

        except RemoveDataTypeException as excep:
            self.logger.exception("Could not execute operation Node Remove!")
            raise ProjectServiceException(str(excep))
        except FileStructureException as excep:
            self.logger.exception("Could not delete because of rights!")
            raise ProjectServiceException(str(excep))
        except Exception as excep:
            self.logger.exception(str(excep))
            raise ProjectServiceException(str(excep))

    # ----------------- Methods for populating Data-Structure Page ---------------

    @staticmethod
    def get_datatype_in_group(group):
        """
        Return all dataTypes that are the result of the same DTgroup.
        """
        return dao.get_datatype_in_group(datatype_group_id=group)

    @staticmethod
    def get_datatypes_from_datatype_group(datatype_group_id):
        """
        Retrieve all dataType which are part from the given dataType group.
        """
        return dao.get_datatypes_from_datatype_group(datatype_group_id)

    @staticmethod
    def load_operation_by_gid(operation_gid):
        """ Retrieve loaded Operation from DB"""
        return dao.get_operation_by_gid(operation_gid)

    @staticmethod
    def load_operation_lazy_by_gid(operation_gid):
        """ Retrieve lazy Operation from DB"""
        return dao.get_operation_lazy_by_gid(operation_gid)

    @staticmethod
    def get_operation_group_by_id(operation_group_id):
        """ Loads OperationGroup from DB"""
        return dao.get_operationgroup_by_id(operation_group_id)

    @staticmethod
    def get_operation_group_by_gid(operation_group_gid):
        """ Loads OperationGroup from DB"""
        return dao.get_operationgroup_by_gid(operation_group_gid)

    @staticmethod
    def get_operations_in_group(operation_group):
        """ Return all the operations from an operation group. """
        return dao.get_operations_in_group(operation_group.id)

    @staticmethod
    def is_upload_operation(operation_gid):
        """ Returns True only if the operation with the given GID is an upload operation. """
        return dao.is_upload_operation(operation_gid)

    @staticmethod
    def get_all_operations_for_uploaders(project_id):
        """ Returns all finished upload operations. """
        return dao.get_all_operations_for_uploaders(project_id)

    def set_operation_and_group_visibility(self, entity_gid, is_visible, is_operation_group=False):
        """
        Sets the operation visibility.

        If 'is_operation_group' is True than this method will change the visibility for all
        the operation from the OperationGroup with the GID field equal to 'entity_gid'.
        """

        def set_visibility(op):
            # workaround:
            # 'reload' the operation so that it has the project property set.
            # get_operations_in_group does not eager load it and now we're out of a sqlalchemy session
            # write_operation_metadata requires that property
            op = dao.get_operation_by_id(op.id)
            # end hack
            op.visible = is_visible
            self.structure_helper.write_operation_metadata(op)
            dao.store_entity(op)

        def set_group_descendants_visibility(operation_group_id):
            ops_in_group = dao.get_operations_in_group(operation_group_id)
            for group_op in ops_in_group:
                set_visibility(group_op)

        if is_operation_group:
            op_group_id = dao.get_operationgroup_by_gid(entity_gid).id
            set_group_descendants_visibility(op_group_id)
        else:
            operation = dao.get_operation_by_gid(entity_gid)
            # we assure that if the operation belongs to a group than the visibility will be changed for the entire group
            if operation.fk_operation_group is not None:
                set_group_descendants_visibility(operation.fk_operation_group)
            else:
                set_visibility(operation)

    def get_operation_details(self, operation_gid, is_group):
        """
        :returns: an entity OperationOverlayDetails filled with all information for current operation details.
        """

        if is_group:
            operation_group = self.get_operation_group_by_gid(operation_gid)
            operation = dao.get_operations_in_group(operation_group.id, False, True)
            # Reload, to make sure all attributes lazy are populated as well.
            operation = dao.get_operation_by_gid(operation.gid)
            no_of_op_in_group = dao.get_operations_in_group(operation_group.id, is_count=True)
            datatype_group = self.get_datatypegroup_by_op_group_id(operation_group.id)
            count_result = dao.count_datatypes_in_group(datatype_group.id)

        else:
            operation = dao.get_operation_by_gid(operation_gid)
            if operation is None:
                return None
            no_of_op_in_group = 1
            count_result = dao.count_resulted_datatypes(operation.id)

        user_display_name = dao.get_user_by_id(operation.fk_launched_by).display_name
        burst = dao.get_burst_for_operation_id(operation.id)
        datatypes_param, all_special_params = self._review_operation_inputs(operation.gid)

        op_pid = dao.get_operation_process_for_operation(operation.id)
        op_details = OperationOverlayDetails(operation, user_display_name, len(datatypes_param),
                                             count_result, burst, no_of_op_in_group, op_pid)

        # Add all parameter which are set differently by the user on this Operation.
        if all_special_params is not None:
            op_details.add_scientific_fields(all_special_params)
        return op_details

    @staticmethod
    def get_filterable_meta():
        """
        Contains all the attributes by which
        the user can structure the tree of DataTypes
        """
        return DataTypeMetaData.get_filterable_meta()

    def get_project_structure(self, project, visibility_filter, first_level, second_level, filter_value):
        """
        Find all DataTypes (including the linked ones and the groups) relevant for the current project.
        In case of a problem, will return an empty list.
        """
        metadata_list = []
        dt_list = dao.get_data_in_project(project.id, visibility_filter, filter_value)

        for dt in dt_list:
            # Prepare the DT results from DB, for usage in controller, by converting into DataTypeMetaData objects
            data = {}
            is_group = False
            group_op = None
            dt_entity = dao.get_datatype_by_gid(dt.gid)
            if dt_entity is None:
                self.logger.warning("Ignored entity (possibly removed DT class)" + str(dt))
                continue
            #  Filter by dt.type, otherwise Links to individual DT inside a group will be mistaken
            if dt.type == "DataTypeGroup" and dt.parent_operation.operation_group is not None:
                is_group = True
                group_op = dt.parent_operation.operation_group

            # All these fields are necessary here for dynamic Tree levels.
            data[DataTypeMetaData.KEY_DATATYPE_ID] = dt.id
            data[DataTypeMetaData.KEY_GID] = dt.gid
            data[DataTypeMetaData.KEY_NODE_TYPE] = dt.display_type
            data[DataTypeMetaData.KEY_STATE] = dt.state
            data[DataTypeMetaData.KEY_SUBJECT] = str(dt.subject)
            data[DataTypeMetaData.KEY_TITLE] = dt_entity.display_name
            data[DataTypeMetaData.KEY_RELEVANCY] = dt.visible
            data[DataTypeMetaData.KEY_LINK] = dt.parent_operation.fk_launched_in != project.id

            data[DataTypeMetaData.KEY_TAG_1] = dt.user_tag_1 if dt.user_tag_1 else ''
            data[DataTypeMetaData.KEY_TAG_2] = dt.user_tag_2 if dt.user_tag_2 else ''
            data[DataTypeMetaData.KEY_TAG_3] = dt.user_tag_3 if dt.user_tag_3 else ''
            data[DataTypeMetaData.KEY_TAG_4] = dt.user_tag_4 if dt.user_tag_4 else ''
            data[DataTypeMetaData.KEY_TAG_5] = dt.user_tag_5 if dt.user_tag_5 else ''

            # Operation related fields:
            operation_name = CommonDetails.compute_operation_name(
                dt.parent_operation.algorithm.algorithm_category.displayname,
                dt.parent_operation.algorithm.displayname)
            data[DataTypeMetaData.KEY_OPERATION_TYPE] = operation_name
            data[DataTypeMetaData.KEY_OPERATION_ALGORITHM] = dt.parent_operation.algorithm.displayname
            data[DataTypeMetaData.KEY_AUTHOR] = dt.parent_operation.user.username
            data[DataTypeMetaData.KEY_OPERATION_TAG] = group_op.name if is_group else dt.parent_operation.user_group
            data[DataTypeMetaData.KEY_OP_GROUP_ID] = group_op.id if is_group else None

            completion_date = dt.parent_operation.completion_date
            string_year = completion_date.strftime(MONTH_YEAR_FORMAT) if completion_date is not None else ""
            string_month = completion_date.strftime(DAY_MONTH_YEAR_FORMAT) if completion_date is not None else ""
            data[DataTypeMetaData.KEY_DATE] = date2string(completion_date) if (completion_date is not None) else ''
            data[DataTypeMetaData.KEY_CREATE_DATA_MONTH] = string_year
            data[DataTypeMetaData.KEY_CREATE_DATA_DAY] = string_month

            data[DataTypeMetaData.KEY_BURST] = dt._parent_burst.name if dt._parent_burst is not None else '-None-'

            metadata_list.append(DataTypeMetaData(data, dt.invalid))

        return StructureNode.metadata2tree(metadata_list, first_level, second_level, project.id, project.name)

    @staticmethod
    def get_datatype_details(datatype_gid):
        """
        :returns: an array. First entry in array is an instance of DataTypeOverlayDetails\
            The second one contains all the possible states for the specified dataType.

        """
        meta_atts = DataTypeOverlayDetails()
        states = DataTypeMetaData.STATES
        try:
            datatype_result = dao.get_datatype_details(datatype_gid)
            meta_atts.fill_from_datatype(datatype_result, datatype_result._parent_burst)
            return meta_atts, states, datatype_result
        except Exception:
            ## We ignore exception here (it was logged above, and we want to return no details).
            return meta_atts, states, None

    def _remove_project_node_files(self, project_id, gid, skip_validation=False):
        """
        Delegate removal of a node in the structure of the project.
        In case of a problem will THROW StructureException.
        """
        try:
            project = self.find_project(project_id)
            datatype = dao.get_datatype_by_gid(gid)
            links = dao.get_links_for_datatype(datatype.id)
            if links:
                was_link = False
                for link in links:
                    # This means it's only a link and we need to remove it
                    if link.fk_from_datatype == datatype.id and link.fk_to_project == project.id:
                        dao.remove_entity(Links, link.id)
                        was_link = True
                if not was_link:
                    # Create a clone of the operation
                    new_op = Operation(dao.get_system_user().id,
                                       links[0].fk_to_project,
                                       datatype.parent_operation.fk_from_algo,
                                       datatype.parent_operation.parameters,
                                       datatype.parent_operation.meta_data,
                                       datatype.parent_operation.status,
                                       datatype.parent_operation.start_date,
                                       datatype.parent_operation.completion_date,
                                       datatype.parent_operation.fk_operation_group,
                                       datatype.parent_operation.additional_info,
                                       datatype.parent_operation.user_group,
                                       datatype.parent_operation.range_values)
                    new_op = dao.store_entity(new_op)
                    to_project = self.find_project(links[0].fk_to_project).name
                    new_op_loaded = dao.get_operation_by_id(new_op.id)
                    self.structure_helper.write_operation_metadata(new_op_loaded)
                    full_path = h5.path_for_stored_index(datatype)
                    self.structure_helper.move_datatype(datatype, to_project, str(new_op.id), full_path)
                    datatype.fk_from_operation = new_op.id
                    datatype.parent_operation = new_op
                    dao.store_entity(datatype)
                    dao.remove_entity(Links, links[0].id)
            else:
                specific_remover = get_remover(datatype.type)(datatype)
                specific_remover.remove_datatype(skip_validation)
                h5_path = h5.path_for_stored_index(datatype)
                self.structure_helper.remove_datatype_file(h5_path)

        except RemoveDataTypeException:
            self.logger.exception("Could not execute operation Node Remove!")
            raise
        except FileStructureException:
            self.logger.exception("Remove operation failed")
            raise StructureException("Remove operation failed for unknown reasons.Please contact system administrator.")

    def remove_operation(self, operation_id):
        """
        Remove a given operation
        """
        operation = dao.try_get_operation_by_id(operation_id)
        if operation is not None:
            self.logger.debug("Deleting operation %s " % operation)
            datatypes_for_op = dao.get_results_for_operation(operation_id)
            for dt in reversed(datatypes_for_op):
                self.remove_datatype(operation.project.id, dt.gid, False)
            dao.remove_entity(Operation, operation.id)
            self.structure_helper.remove_operation_data(operation.project.name, operation_id)
            self.logger.debug("Finished deleting operation %s " % operation)
        else:
            self.logger.warning("Attempt to delete operation with id=%s which no longer exists." % operation_id)

    def remove_datatype(self, project_id, datatype_gid, skip_validation=False):
        """
        Method used for removing a dataType. If the given dataType is a DatatypeGroup
        or a dataType from a DataTypeGroup than this method will remove the entire group.
        The operation(s) used for creating the dataType(s) will also be removed.
        """
        datatype = dao.get_datatype_by_gid(datatype_gid)
        if datatype is None:
            self.logger.warning("Attempt to delete DT[%s] which no longer exists." % datatype_gid)
            return

        is_datatype_group = False
        if dao.is_datatype_group(datatype_gid):
            is_datatype_group = True
        elif datatype.fk_datatype_group is not None:
            is_datatype_group = True
            datatype = dao.get_datatype_by_id(datatype.fk_datatype_group)

        operations_set = [datatype.fk_from_operation]

        correct = True

        if is_datatype_group:
            self.logger.debug("Removing datatype group %s" % datatype)
            data_list = dao.get_datatypes_from_datatype_group(datatype.id)
            for adata in data_list:
                self._remove_project_node_files(project_id, adata.gid, skip_validation)
                if adata.fk_from_operation not in operations_set:
                    operations_set.append(adata.fk_from_operation)

            datatype_group = dao.get_datatype_group_by_gid(datatype.gid)
            dao.remove_datatype(datatype_gid)
            correct = correct and dao.remove_entity(OperationGroup, datatype_group.fk_operation_group)
        else:
            self.logger.debug("Removing datatype %s" % datatype)
            self._remove_project_node_files(project_id, datatype.gid, skip_validation)

        ## Remove Operation entity in case no other DataType needs them.
        project = dao.get_project_by_id(project_id)
        for operation_id in operations_set:
            dependent_dt = dao.get_generic_entity(DataType, operation_id, "fk_from_operation")
            if len(dependent_dt) > 0:
                ### Do not remove Operation in case DataType still exist referring it.
                continue
            correct = correct and dao.remove_entity(Operation, operation_id)
            ## Make sure Operation folder is removed
            self.structure_helper.remove_operation_data(project.name, datatype.fk_from_operation)

        if not correct:
            raise RemoveDataTypeException("Could not remove DataType " + str(datatype_gid))

    def update_metadata(self, submit_data):
        """
        Update DataType/ DataTypeGroup metadata
        THROW StructureException when input data is invalid.
        """
        new_data = dict()
        for key in DataTypeOverlayDetails().meta_attributes_list:
            if key in submit_data:
                new_data[key] = submit_data[key]

        if new_data[CommonDetails.CODE_OPERATION_TAG] == '':
            new_data[CommonDetails.CODE_OPERATION_TAG] = None
        try:
            if (CommonDetails.CODE_OPERATION_GROUP_ID in new_data
                    and new_data[CommonDetails.CODE_OPERATION_GROUP_ID]
                    and new_data[CommonDetails.CODE_OPERATION_GROUP_ID] != ''):
                # We need to edit a group
                all_data_in_group = dao.get_datatype_in_group(operation_group_id=
                                                              new_data[CommonDetails.CODE_OPERATION_GROUP_ID])
                if len(all_data_in_group) < 1:
                    raise StructureException("Inconsistent group, can not be updated!")
                datatype_group = dao.get_generic_entity(DataTypeGroup, all_data_in_group[0].fk_datatype_group)[0]
                all_data_in_group.append(datatype_group)
                for datatype in all_data_in_group:
                    new_data[CommonDetails.CODE_GID] = datatype.gid
                    self._edit_data(datatype, new_data, True)
            else:
                # Get the required DataType and operation from DB to store changes that will be done in XML.
                gid = new_data[CommonDetails.CODE_GID]
                datatype = dao.get_datatype_by_gid(gid)
                self._edit_data(datatype, new_data)
        except Exception as excep:
            self.logger.exception(excep)
            raise StructureException(str(excep))

    def _edit_data(self, datatype, new_data, from_group=False):
        """
        Private method, used for editing a meta-data XML file and a DataType row
        for a given custom DataType entity with new dictionary of data from UI.
        """
        from tvb.basic.traits.types_mapped import MappedType

        if isinstance(datatype, MappedType) and not os.path.exists(datatype.get_storage_file_path()):
            if not datatype.invalid:
                datatype.invalid = True
                dao.store_entity(datatype)
            return
        # 1. First update Operation fields:
        #    Update group field if possible
        new_group_name = new_data[CommonDetails.CODE_OPERATION_TAG]
        empty_group_value = (new_group_name is None or new_group_name == "")
        if from_group:
            if empty_group_value:
                raise StructureException("Empty group is not allowed!")

            group = dao.get_generic_entity(OperationGroup, new_data[CommonDetails.CODE_OPERATION_GROUP_ID])
            if group and len(group) > 0 and new_group_name != group[0].name:
                group = group[0]
                exists_group = dao.get_generic_entity(OperationGroup, new_group_name, 'name')
                if exists_group:
                    raise StructureException("Group '" + new_group_name + "' already exists.")
                group.name = new_group_name
                dao.store_entity(group)
        else:
            operation = dao.get_operation_by_id(datatype.fk_from_operation)
            operation.user_group = new_group_name
            dao.store_entity(operation)

        # 2. Update dateType fields:
        datatype.subject = new_data[DataTypeOverlayDetails.DATA_SUBJECT]
        datatype.state = new_data[DataTypeOverlayDetails.DATA_STATE]
        if DataTypeOverlayDetails.DATA_TAG_1 in new_data:
            datatype.user_tag_1 = new_data[DataTypeOverlayDetails.DATA_TAG_1]
        if DataTypeOverlayDetails.DATA_TAG_2 in new_data:
            datatype.user_tag_2 = new_data[DataTypeOverlayDetails.DATA_TAG_2]
        if DataTypeOverlayDetails.DATA_TAG_3 in new_data:
            datatype.user_tag_3 = new_data[DataTypeOverlayDetails.DATA_TAG_3]
        if DataTypeOverlayDetails.DATA_TAG_4 in new_data:
            datatype.user_tag_4 = new_data[DataTypeOverlayDetails.DATA_TAG_4]
        if DataTypeOverlayDetails.DATA_TAG_5 in new_data:
            datatype.user_tag_5 = new_data[DataTypeOverlayDetails.DATA_TAG_5]

        datatype = dao.store_entity(datatype)
        # 3. Update MetaData in H5 as well.
        datatype.persist_full_metadata()
        # 4. Update the group_name/user_group into the operation meta-data file
        operation = dao.get_operation_by_id(datatype.fk_from_operation)
        self.structure_helper.update_operation_metadata(operation.project.name, new_group_name,
                                                        str(datatype.fk_from_operation), from_group)

    def get_datatype_and_datatypegroup_inputs_for_operation(self, operation_gid, selected_filter):
        """
        Returns the dataTypes that are used as input parameters for the given operation.
        'selected_filter' - is expected to be a visibility filter.

        If any dataType is part of a dataType group then the dataType group will
        be returned instead of that dataType.
        """
        all_datatypes = self._review_operation_inputs(operation_gid)[0]
        datatype_inputs = []
        for datatype in all_datatypes:
            if selected_filter.display_name == StaticFiltersFactory.RELEVANT_VIEW:
                if datatype.visible:
                    datatype_inputs.append(datatype)
            else:
                datatype_inputs.append(datatype)
        datatypes = []
        datatype_groups = dict()
        for data_type in datatype_inputs:
            if data_type.fk_datatype_group is None:
                datatypes.append(data_type)
            elif data_type.fk_datatype_group not in datatype_groups:
                dt_group = dao.get_datatype_by_id(data_type.fk_datatype_group)
                datatype_groups[data_type.fk_datatype_group] = dt_group

        datatypes.extend([v for _, v in six.iteritems(datatype_groups)])
        return datatypes

    def _review_operation_inputs(self, operation_gid):
        """
        :returns: A list of DataTypes that are used as input parameters for the specified operation.
                 And a dictionary will all operation parameters different then the default ones.
        """
        operation = dao.get_operation_by_gid(operation_gid)
        parameters = json.loads(operation.parameters)
        try:
            adapter = ABCAdapter.build_adapter(operation.algorithm)
            return adapter.review_operation_inputs(parameters)

        except Exception:
            self.logger.exception("Could not load details for operation %s" % operation_gid)
            inputs_datatypes = []
            changed_parameters = dict(Warning="Algorithm changed dramatically. We can not offer more details")
            for submit_param in parameters.values():
                self.logger.debug("Searching DT by GID %s" % submit_param)
                datatype = ABCAdapter.load_entity_by_gid(str(submit_param))
                if datatype is not None:
                    inputs_datatypes.append(datatype)
            return inputs_datatypes, changed_parameters

    def get_datatypes_inputs_for_operation_group(self, group_id, selected_filter):
        """
        Returns the dataType inputs for an operation group. If more dataTypes
        are part of the same dataType group then only the dataType group will
        be returned instead of them.
        """
        operations_gids = dao.get_operations_in_group(group_id, only_gids=True)
        op_group_inputs = dict()
        for gid in operations_gids:
            op_inputs = self.get_datatype_and_datatypegroup_inputs_for_operation(gid[0], selected_filter)
            for datatype in op_inputs:
                op_group_inputs[datatype.id] = datatype
        return list(op_group_inputs.values())

    @staticmethod
    def get_results_for_operation(operation_id, selected_filter=None):
        """
        Retrieve the DataTypes entities resulted after the execution of the given operation.
        """
        return dao.get_results_for_operation(operation_id, selected_filter)

    @staticmethod
    def get_operations_for_datatype_group(datatype_group_id, visibility_filter, only_in_groups=False):
        """
        Returns all the operations which uses as an input parameter a dataType from the given DataTypeGroup.
        visibility_filter - is a filter used for retrieving all the operations or only the relevant ones.

        If only_in_groups is True than this method will return only the operations that are
        part from an operation group, otherwise it will return only the operations that
        are NOT part of an operation group.
        """
        if visibility_filter.display_name != StaticFiltersFactory.RELEVANT_VIEW:
            return dao.get_operations_for_datatype_group(datatype_group_id, only_relevant=False,
                                                         only_in_groups=only_in_groups)
        return dao.get_operations_for_datatype_group(datatype_group_id, only_in_groups=only_in_groups)

    @staticmethod
    def get_operations_for_datatype(datatype_gid, visibility_filter, only_in_groups=False):
        """
        Returns all the operations which uses as an input parameter the dataType with the specified GID.

        If only_in_groups is True than this method will return only the operations that are part
        from an operation group, otherwise it will return only the operations that are NOT part of an operation group.
        """
        if visibility_filter.display_name != StaticFiltersFactory.RELEVANT_VIEW:
            return dao.get_operations_for_datatype(datatype_gid, only_relevant=False, only_in_groups=only_in_groups)
        return dao.get_operations_for_datatype(datatype_gid, only_in_groups=only_in_groups)

    @staticmethod
    def get_datatype_by_id(datatype_id):
        """Retrieve a DataType DB reference by its id."""
        return dao.get_datatype_by_id(datatype_id)

    @staticmethod
    def get_datatypegroup_by_gid(datatypegroup_gid):
        """ Returns the DataTypeGroup with the specified gid. """
        return dao.get_datatype_group_by_gid(datatypegroup_gid)

    @staticmethod
    def count_datatypes_generated_from(datatype_gid):
        """
        A list with all the datatypes resulted from operations that had as
        input the datatype given by 'datatype_gid'.
        """
        return dao.count_datatypes_generated_from(datatype_gid)

    @staticmethod
    def get_datatypegroup_by_op_group_id(operation_group_id):
        """ Returns the DataTypeGroup with the specified id. """
        return dao.get_datatypegroup_by_op_group_id(operation_group_id)

    @staticmethod
    def get_datatypes_in_project(project_id, only_visible=False):
        return dao.get_data_in_project(project_id, only_visible)


    @staticmethod
    def set_datatype_visibility(datatype_gid, is_visible):
        """
        Sets the dataType visibility. If the given dataType is a dataType group or it is part of a
        dataType group than this method will set the visibility for each dataType from this group.
        """

        def set_visibility(dt):
            """ set visibility flag, persist in db and h5"""
            dt.visible = is_visible
            dt = dao.store_entity(dt)
            dt.persist_full_metadata()

        def set_group_descendants_visibility(datatype_group_id):
            datatypes_in_group = dao.get_datatypes_from_datatype_group(datatype_group_id)
            for group_dt in datatypes_in_group:
                set_visibility(group_dt)

        datatype = dao.get_datatype_by_gid(datatype_gid)

        if isinstance(datatype, DataTypeGroup):  # datatype is a group
            set_group_descendants_visibility(datatype.id)
        elif datatype.fk_datatype_group is not None:  # datatype is member of a group
            set_group_descendants_visibility(datatype.fk_datatype_group)
            # the datatype to be updated is the parent datatype group
            datatype = dao.get_datatype_by_id(datatype.fk_datatype_group)

        # update the datatype or datatype group.
        set_visibility(datatype)

    @staticmethod
    def is_datatype_group(datatype_gid):
        """ Used to check if the dataType with the specified GID is a DataTypeGroup. """
        return dao.is_datatype_group(datatype_gid)
Пример #4
0
class ImportService(object):
    """
    Service for importing TVB entities into system.
    It supports TVB exported H5 files as input, but it should also handle H5 files 
    generated outside of TVB, as long as they respect the same structure.
    """
    def __init__(self):
        self.logger = get_logger(__name__)
        self.user_id = None
        self.files_helper = FilesHelper()
        self.created_projects = []

    def _download_and_unpack_project_zip(self, uploaded, uq_file_name,
                                         temp_folder):

        if isinstance(uploaded, FieldStorage) or isinstance(uploaded, Part):
            if not uploaded.file:
                raise ProjectImportException(
                    "Please select the archive which contains the project structure."
                )
            with open(uq_file_name, 'wb') as file_obj:
                self.files_helper.copy_file(uploaded.file, file_obj)
        else:
            shutil.copy2(uploaded, uq_file_name)

        try:
            self.files_helper.unpack_zip(uq_file_name, temp_folder)
        except FileStructureException as excep:
            self.logger.exception(excep)
            raise ProjectImportException(
                "Bad ZIP archive provided. A TVB exported project is expected!"
            )

    @staticmethod
    def _compute_unpack_path():
        """
        :return: the name of the folder where to expand uploaded zip
        """
        now = datetime.now()
        date_str = "%d-%d-%d_%d-%d-%d_%d" % (now.year, now.month, now.day,
                                             now.hour, now.minute, now.second,
                                             now.microsecond)
        uq_name = "%s-ImportProject" % date_str
        return os.path.join(TvbProfile.current.TVB_TEMP_FOLDER, uq_name)

    @transactional
    def import_project_structure(self, uploaded, user_id):
        """
        Execute import operations:
         
        1. check if ZIP or folder 
        2. find all project nodes
        3. for each project node:
            - create project
            - create all operations
            - import all images
            - create all dataTypes
        """

        self.user_id = user_id
        self.created_projects = []

        # Now compute the name of the folder where to explode uploaded ZIP file
        temp_folder = self._compute_unpack_path()
        uq_file_name = temp_folder + ".zip"

        try:
            self._download_and_unpack_project_zip(uploaded, uq_file_name,
                                                  temp_folder)
            self._import_projects_from_folder(temp_folder)

        except Exception as excep:
            self.logger.exception(
                "Error encountered during import. Deleting projects created during this operation."
            )
            # Remove project folders created so far.
            # Note that using the project service to remove the projects will not work,
            # because we do not have support for nested transaction.
            # Removing from DB is not necessary because in transactional env a simple exception throw
            # will erase everything to be inserted.
            for project in self.created_projects:
                project_path = os.path.join(TvbProfile.current.TVB_STORAGE,
                                            FilesHelper.PROJECTS_FOLDER,
                                            project.name)
                shutil.rmtree(project_path)
            raise ProjectImportException(str(excep))

        finally:
            # Now delete uploaded file
            if os.path.exists(uq_file_name):
                os.remove(uq_file_name)
            # Now delete temporary folder where uploaded ZIP was exploded.
            if os.path.exists(temp_folder):
                shutil.rmtree(temp_folder)

    @staticmethod
    def _load_burst_info_from_json(project_path):
        bursts_dict = {}
        dt_mappings_dict = {}
        bursts_file = os.path.join(project_path, BURST_INFO_FILE)
        if os.path.isfile(bursts_file):
            with open(bursts_file) as f:
                bursts_info_dict = json.load(f)
            bursts_dict = bursts_info_dict[BURSTS_DICT_KEY]
            dt_mappings_dict = bursts_info_dict[DT_BURST_MAP]
        os.remove(bursts_file)
        return bursts_dict, dt_mappings_dict

    @staticmethod
    def _import_bursts(project_entity, bursts_dict):
        """
        Re-create old bursts, but keep a mapping between the id it has here and the old-id it had
        in the project where they were exported, so we can re-add the datatypes to them.
        """
        burst_ids_mapping = {}

        # for old_burst_id in bursts_dict:
        # burst_information = BurstInformation.load_from_dict(bursts_dict[old_burst_id])
        # burst_entity = BurstConfiguration(project_entity.id)
        # burst_entity.from_dict(burst_information.data)
        # burst_entity = dao.store_entity(burst_entity)
        # burst_ids_mapping[int(old_burst_id)] = burst_entity.id
        # We don't need the data in dictionary form anymore, so update it with new BurstInformation object
        # bursts_dict[old_burst_id] = burst_information
        return burst_ids_mapping

    def _import_projects_from_folder(self, temp_folder):
        """
        Process each project from the uploaded pack, to extract names.
        """
        project_roots = []
        for root, _, files in os.walk(temp_folder):
            if FilesHelper.TVB_PROJECT_FILE in files:
                project_roots.append(root)

        for project_path in project_roots:
            update_manager = ProjectUpdateManager(project_path)
            update_manager.run_all_updates()
            project_entity = self.__populate_project(project_path)

            # Compute the path where to store files of the imported project
            new_project_path = os.path.join(TvbProfile.current.TVB_STORAGE,
                                            FilesHelper.PROJECTS_FOLDER,
                                            project_entity.name)
            if project_path != new_project_path:
                shutil.move(project_path, new_project_path)

            self.created_projects.append(project_entity)

            # Keep a list with all burst that were imported since we will want to also add the workflow
            # steps after we are finished with importing the operations and datatypes. We need to first
            # stored bursts since we need to know which new id's they have for operations parent_burst.
            # bursts_dict, dt_mappings_dict = self._load_burst_info_from_json(new_project_path)
            # burst_ids_mapping = self._import_bursts(project_entity, bursts_dict)

            # Now import project operations
            self.import_project_operations(project_entity, new_project_path)
            # Import images
            self._store_imported_images(project_entity)

    @staticmethod
    def _append_tmp_to_folders_containing_operations(import_path):
        """
        Find folders containing operations and rename them, return the renamed paths
        """
        pths = []
        for root, _, files in os.walk(import_path):
            if FilesHelper.TVB_OPERARATION_FILE in files:
                # Found an operation folder - append TMP to its name
                tmp_op_folder = root + 'tmp'
                os.rename(root, tmp_op_folder)
                operation_file_path = os.path.join(
                    tmp_op_folder, FilesHelper.TVB_OPERARATION_FILE)
                pths.append(operation_file_path)
        return pths

    def _load_operations_from_paths(self, project, op_paths):
        """
        Load operations from paths containing them.
        :returns: Operations ordered by start/creation date to be sure data dependency is resolved correct
        """
        def by_time(op):
            return op.start_date or op.create_date or datetime.now()

        operations = []

        for operation_file_path in op_paths:
            operation = self.__build_operation_from_file(
                project, operation_file_path)
            operation.import_file = operation_file_path
            operations.append(operation)

        operations.sort(key=by_time)
        return operations

    def _load_datatypes_from_operation_folder(self, op_path, operation_entity,
                                              datatype_group):
        """
        Loads datatypes from operation folder
        :returns: Datatypes ordered by creation date (to solve any dependencies)
        """
        all_datatypes = []
        for file_name in os.listdir(op_path):
            if file_name.endswith(FilesHelper.TVB_STORAGE_FILE_EXTENSION):
                h5_file = os.path.join(op_path, file_name)
                try:
                    file_update_manager = FilesUpdateManager()
                    file_update_manager.upgrade_file(h5_file)
                    datatype = self.load_datatype_from_file(
                        op_path, file_name, operation_entity.id,
                        datatype_group)
                    all_datatypes.append(datatype)

                except IncompatibleFileManagerException:
                    os.remove(h5_file)
                    self.logger.warning(
                        "Incompatible H5 file will be ignored: %s" % h5_file)
                    self.logger.exception("Incompatibility details ...")

        all_datatypes.sort(key=lambda dt_date: dt_date.create_date)
        for dt in all_datatypes:
            self.logger.debug("Import order %s: %s" % (dt.type, dt.gid))
        return all_datatypes

    def _store_imported_datatypes_in_db(self, project, all_datatypes,
                                        dt_burst_mappings, burst_ids_mapping):
        def by_time(dt):
            return dt.create_date or datetime.now()

        if burst_ids_mapping is None:
            burst_ids_mapping = {}
        if dt_burst_mappings is None:
            dt_burst_mappings = {}

        all_datatypes.sort(key=by_time)

        for datatype in all_datatypes:
            old_burst_id = dt_burst_mappings.get(datatype.gid)

            if old_burst_id is not None:
                datatype.fk_parent_burst = burst_ids_mapping[old_burst_id]

            datatype_allready_in_tvb = dao.get_datatype_by_gid(datatype.gid)

            if not datatype_allready_in_tvb:
                self.store_datatype(datatype)
            else:
                AlgorithmService.create_link([datatype_allready_in_tvb.id],
                                             project.id)

    def _store_imported_images(self, project):
        """
        Import all images from project
        """
        images_root = self.files_helper.get_images_folder(project.name)
        # for file_name in os.listdir(images_root):
        for root, _, files in os.walk(images_root):
            for file_name in files:
                if file_name.endswith(FilesHelper.TVB_FILE_EXTENSION):
                    self._populate_image(os.path.join(root, file_name),
                                         project.id)

    def import_project_operations(self,
                                  project,
                                  import_path,
                                  dt_burst_mappings=None,
                                  burst_ids_mapping=None):
        """
        This method scans provided folder and identify all operations that needs to be imported
        """
        op_paths = self._append_tmp_to_folders_containing_operations(
            import_path)
        operations = self._load_operations_from_paths(project, op_paths)

        imported_operations = []

        # Here we process each operation found
        for operation in operations:
            self.logger.debug("Importing operation " + str(operation))
            old_operation_folder, _ = os.path.split(operation.import_file)
            operation_entity, datatype_group = self.__import_operation(
                operation)

            # Rename operation folder with the ID of the stored operation
            new_operation_path = FilesHelper().get_operation_folder(
                project.name, operation_entity.id)
            if old_operation_folder != new_operation_path:
                # Delete folder of the new operation, otherwise move will fail
                shutil.rmtree(new_operation_path)
                shutil.move(old_operation_folder, new_operation_path)

            operation_datatypes = self._load_datatypes_from_operation_folder(
                new_operation_path, operation_entity, datatype_group)
            self._store_imported_datatypes_in_db(project, operation_datatypes,
                                                 dt_burst_mappings,
                                                 burst_ids_mapping)
            imported_operations.append(operation_entity)

        return imported_operations

    def _populate_image(self, file_name, project_id):
        """
        Create and store a image entity.
        """
        figure_dict = XMLReader(file_name).read_metadata()
        new_path = os.path.join(
            os.path.split(file_name)[0],
            os.path.split(figure_dict['file_path'])[1])
        if not os.path.exists(new_path):
            self.logger.warning("Expected to find image path %s .Skipping" %
                                new_path)

        op = dao.get_operation_by_gid(figure_dict['fk_from_operation'])
        figure_dict['fk_op_id'] = op.id if op is not None else None
        figure_dict['fk_user_id'] = self.user_id
        figure_dict['fk_project_id'] = project_id
        figure_entity = manager_of_class(ResultFigure).new_instance()
        figure_entity = figure_entity.from_dict(figure_dict)
        stored_entity = dao.store_entity(figure_entity)

        # Update image meta-data with the new details after import
        figure = dao.load_figure(stored_entity.id)
        self.logger.debug("Store imported figure")
        self.files_helper.write_image_metadata(figure)

    def load_datatype_from_file(self,
                                storage_folder,
                                file_name,
                                op_id,
                                datatype_group=None,
                                move=True,
                                final_storage=None):
        """
        Creates an instance of datatype from storage / H5 file 
        :returns: DatatypeIndex
        """
        self.logger.debug("Loading DataType from file: %s" % file_name)
        datatype, generic_attributes = h5.load_with_references(
            os.path.join(storage_folder, file_name))
        index_class = h5.REGISTRY.get_index_for_datatype(datatype.__class__)
        datatype_index = index_class()
        datatype_index.fill_from_has_traits(datatype)
        datatype_index.fill_from_generic_attributes(generic_attributes)

        # Add all the required attributes
        if datatype_group is not None:
            datatype_index.fk_datatype_group = datatype_group.id
        datatype_index.fk_from_operation = op_id

        associated_file = h5.path_for_stored_index(datatype_index)
        if os.path.exists(associated_file):
            datatype_index.disk_size = FilesHelper.compute_size_on_disk(
                associated_file)

        # Now move storage file into correct folder if necessary
        if move and final_storage is not None:
            current_file = os.path.join(storage_folder, file_name)
            h5_type = h5.REGISTRY.get_h5file_for_datatype(datatype.__class__)
            final_path = h5.path_for(final_storage, h5_type, datatype.gid)
            if final_path != current_file and move:
                shutil.move(current_file, final_path)

        return datatype_index

    def store_datatype(self, datatype):
        """This method stores data type into DB"""
        try:
            self.logger.debug("Store datatype: %s with Gid: %s" %
                              (datatype.__class__.__name__, datatype.gid))
            return dao.store_entity(datatype)
        except MissingDataSetException:
            self.logger.error(
                "Datatype %s has missing data and could not be imported properly."
                % (datatype, ))
            os.remove(datatype.get_storage_file_path())
        except IntegrityError as excep:
            self.logger.exception(excep)
            error_msg = "Could not import data with gid: %s. There is already a one with " \
                        "the same name or gid." % datatype.gid
            # Delete file if can't be imported
            os.remove(datatype.get_storage_file_path())
            raise ProjectImportException(error_msg)

    def __populate_project(self, project_path):
        """
        Create and store a Project entity.
        """
        self.logger.debug("Creating project from path: %s" % project_path)
        project_dict = self.files_helper.read_project_metadata(project_path)

        project_entity = manager_of_class(Project).new_instance()
        project_entity = project_entity.from_dict(project_dict, self.user_id)

        try:
            self.logger.debug("Storing imported project")
            return dao.store_entity(project_entity)
        except IntegrityError as excep:
            self.logger.exception(excep)
            error_msg = (
                "Could not import project: %s with gid: %s. There is already a "
                "project with the same name or gid.") % (project_entity.name,
                                                         project_entity.gid)
            raise ProjectImportException(error_msg)

    def __build_operation_from_file(self, project, operation_file):
        """
        Create Operation entity from metadata file.
        """
        operation_dict = XMLReader(operation_file).read_metadata()
        operation_entity = manager_of_class(Operation).new_instance()
        return operation_entity.from_dict(operation_dict, dao, self.user_id,
                                          project.gid)

    @staticmethod
    def __import_operation(operation_entity):
        """
        Store a Operation entity.
        """
        operation_entity = dao.store_entity(operation_entity)
        operation_group_id = operation_entity.fk_operation_group
        datatype_group = None

        if operation_group_id is not None:
            try:
                datatype_group = dao.get_datatypegroup_by_op_group_id(
                    operation_group_id)
            except SQLAlchemyError:
                # If no dataType group present for current op. group, create it.
                operation_group = dao.get_operationgroup_by_id(
                    operation_group_id)
                datatype_group = DataTypeGroup(
                    operation_group, operation_id=operation_entity.id)
                datatype_group.state = UploadAlgorithmCategoryConfig.defaultdatastate
                datatype_group = dao.store_entity(datatype_group)

        return operation_entity, datatype_group

    def import_simulator_configuration_zip(self, zip_file):
        # Now compute the name of the folder where to explode uploaded ZIP file
        temp_folder = self._compute_unpack_path()
        uq_file_name = temp_folder + ".zip"

        if isinstance(zip_file, FieldStorage) or isinstance(zip_file, Part):
            if not zip_file.file:
                raise ServicesBaseException(
                    "Could not process the given ZIP file...")

            with open(uq_file_name, 'wb') as file_obj:
                self.files_helper.copy_file(zip_file.file, file_obj)
        else:
            shutil.copy2(zip_file, uq_file_name)

        try:
            self.files_helper.unpack_zip(uq_file_name, temp_folder)
            return temp_folder
        except FileStructureException as excep:
            raise ServicesBaseException(
                "Could not process the given ZIP file..." + str(excep))
Пример #5
0
class ImportService(object):
    """
    Service for importing TVB entities into system.
    It supports TVB exported H5 files as input, but it should also handle H5 files
    generated outside of TVB, as long as they respect the same structure.
    """
    def __init__(self):
        self.logger = get_logger(__name__)
        self.user_id = None
        self.files_helper = FilesHelper()
        self.created_projects = []

    def _download_and_unpack_project_zip(self, uploaded, uq_file_name,
                                         temp_folder):

        if isinstance(uploaded, FieldStorage) or isinstance(uploaded, Part):
            if not uploaded.file:
                raise ImportException(
                    "Please select the archive which contains the project structure."
                )
            with open(uq_file_name, 'wb') as file_obj:
                self.files_helper.copy_file(uploaded.file, file_obj)
        else:
            shutil.copy2(uploaded, uq_file_name)

        try:
            self.files_helper.unpack_zip(uq_file_name, temp_folder)
        except FileStructureException as excep:
            self.logger.exception(excep)
            raise ImportException(
                "Bad ZIP archive provided. A TVB exported project is expected!"
            )

    @staticmethod
    def _compute_unpack_path():
        """
        :return: the name of the folder where to expand uploaded zip
        """
        now = datetime.now()
        date_str = "%d-%d-%d_%d-%d-%d_%d" % (now.year, now.month, now.day,
                                             now.hour, now.minute, now.second,
                                             now.microsecond)
        uq_name = "%s-ImportProject" % date_str
        return os.path.join(TvbProfile.current.TVB_TEMP_FOLDER, uq_name)

    @transactional
    def import_project_structure(self, uploaded, user_id):
        """
        Execute import operations:

        1. check if ZIP or folder
        2. find all project nodes
        3. for each project node:
            - create project
            - create all operations and groups
            - import all images
            - create all dataTypes
        """

        self.user_id = user_id
        self.created_projects = []

        # Now compute the name of the folder where to explode uploaded ZIP file
        temp_folder = self._compute_unpack_path()
        uq_file_name = temp_folder + ".zip"

        try:
            self._download_and_unpack_project_zip(uploaded, uq_file_name,
                                                  temp_folder)
            self._import_projects_from_folder(temp_folder)

        except Exception as excep:
            self.logger.exception(
                "Error encountered during import. Deleting projects created during this operation."
            )
            # Remove project folders created so far.
            # Note that using the project service to remove the projects will not work,
            # because we do not have support for nested transaction.
            # Removing from DB is not necessary because in transactional env a simple exception throw
            # will erase everything to be inserted.
            for project in self.created_projects:
                project_path = os.path.join(TvbProfile.current.TVB_STORAGE,
                                            FilesHelper.PROJECTS_FOLDER,
                                            project.name)
                shutil.rmtree(project_path)
            raise ImportException(str(excep))

        finally:
            # Now delete uploaded file
            if os.path.exists(uq_file_name):
                os.remove(uq_file_name)
            # Now delete temporary folder where uploaded ZIP was exploded.
            if os.path.exists(temp_folder):
                shutil.rmtree(temp_folder)

    def _import_projects_from_folder(self, temp_folder):
        """
        Process each project from the uploaded pack, to extract names.
        """
        project_roots = []
        for root, _, files in os.walk(temp_folder):
            if FilesHelper.TVB_PROJECT_FILE in files:
                project_roots.append(root)

        for temp_project_path in project_roots:
            update_manager = ProjectUpdateManager(temp_project_path)
            update_manager.run_all_updates()
            project = self.__populate_project(temp_project_path)
            # Populate the internal list of create projects so far, for cleaning up folders, in case of failure
            self.created_projects.append(project)
            # Ensure project final folder exists on disk
            project_path = self.files_helper.get_project_folder(project)
            shutil.move(
                os.path.join(temp_project_path, FilesHelper.TVB_PROJECT_FILE),
                project_path)
            # Now import project operations with their results
            self.import_project_operations(project, temp_project_path)
            # Import images and move them from temp into target
            self._store_imported_images(project, temp_project_path,
                                        project.name)

    def _load_datatypes_from_operation_folder(self, src_op_path,
                                              operation_entity,
                                              datatype_group):
        """
        Loads datatypes from operation folder
        :returns: Datatype entities as dict {original_path: Dt instance}
        """
        all_datatypes = {}
        for file_name in os.listdir(src_op_path):
            if file_name.endswith(FilesHelper.TVB_STORAGE_FILE_EXTENSION):
                h5_file = os.path.join(src_op_path, file_name)
                try:
                    file_update_manager = FilesUpdateManager()
                    file_update_manager.upgrade_file(h5_file)
                    datatype = self.load_datatype_from_file(
                        h5_file, operation_entity.id, datatype_group,
                        operation_entity.fk_launched_in)
                    all_datatypes[h5_file] = datatype

                except IncompatibleFileManagerException:
                    os.remove(h5_file)
                    self.logger.warning(
                        "Incompatible H5 file will be ignored: %s" % h5_file)
                    self.logger.exception("Incompatibility details ...")
        return all_datatypes

    def _store_imported_datatypes_in_db(self, project, all_datatypes):
        # type: (Project, dict) -> int
        sorted_dts = sorted(
            all_datatypes.items(),
            key=lambda dt_item: dt_item[1].create_date or datetime.now())

        count = 0
        for dt_path, datatype in sorted_dts:
            datatype_already_in_tvb = dao.get_datatype_by_gid(datatype.gid)
            if not datatype_already_in_tvb:
                self.store_datatype(datatype, dt_path)
                count += 1
            else:
                AlgorithmService.create_link([datatype_already_in_tvb.id],
                                             project.id)

            file_path = h5.h5_file_for_index(datatype).path
            h5_class = H5File.h5_class_from_file(file_path)
            reference_list = h5_class(file_path).gather_references()

            for _, reference_gid in reference_list:
                if not reference_gid:
                    continue

                ref_index = dao.get_datatype_by_gid(reference_gid.hex)
                if ref_index is None:
                    os.remove(file_path)
                    dao.remove_entity(datatype.__class__, datatype.id)
                    raise MissingReferenceException(
                        'Imported file depends on datatypes that do not exist. Please upload '
                        'those first!')

        return count

    def _store_imported_images(self, project, temp_project_path, project_name):
        """
        Import all images from project
        """
        images_root = os.path.join(temp_project_path,
                                   FilesHelper.IMAGES_FOLDER)
        target_images_path = self.files_helper.get_images_folder(project_name)
        for root, _, files in os.walk(images_root):
            for metadata_file in files:
                if metadata_file.endswith(FilesHelper.TVB_FILE_EXTENSION):
                    self._import_image(root, metadata_file, project.id,
                                       target_images_path)

    def _retrieve_operations_in_order(self, project, import_path):
        # type: (Project, str) -> list[Operation2ImportData]
        retrieved_operations = []

        for root, _, files in os.walk(import_path):
            if OPERATION_XML in files:
                # Previous Operation format for uploading previous versions of projects
                operation_file_path = os.path.join(root, OPERATION_XML)
                operation, operation_xml_parameters = self.__build_operation_from_file(
                    project, operation_file_path)
                operation.import_file = operation_file_path
                self.logger.debug("Found operation in old XML format: " +
                                  str(operation))
                retrieved_operations.append(
                    Operation2ImportData(
                        operation,
                        root,
                        info_from_xml=operation_xml_parameters))

            else:
                # We strive for the new format with ViewModelH5
                main_view_model = None
                dt_paths = []
                all_view_model_files = []
                for file in files:
                    if file.endswith(FilesHelper.TVB_STORAGE_FILE_EXTENSION):
                        h5_file = os.path.join(root, file)
                        try:
                            h5_class = H5File.h5_class_from_file(h5_file)
                            if h5_class is ViewModelH5:
                                all_view_model_files.append(h5_file)
                                if not main_view_model:
                                    view_model = h5.load_view_model_from_file(
                                        h5_file)
                                    if type(view_model
                                            ) in VIEW_MODEL2ADAPTER.keys():
                                        main_view_model = view_model
                            else:
                                file_update_manager = FilesUpdateManager()
                                file_update_manager.upgrade_file(h5_file)
                                dt_paths.append(h5_file)
                        except Exception:
                            self.logger.warning(
                                "Unreadable H5 file will be ignored: %s" %
                                h5_file)

                if main_view_model is not None:
                    alg = VIEW_MODEL2ADAPTER[type(main_view_model)]
                    operation = Operation(main_view_model.gid.hex,
                                          project.fk_admin,
                                          project.id,
                                          alg.id,
                                          status=STATUS_FINISHED,
                                          user_group=main_view_model.
                                          generic_attributes.operation_tag,
                                          start_date=datetime.now(),
                                          completion_date=datetime.now())
                    operation.create_date = main_view_model.create_date
                    self.logger.debug(
                        "Found main ViewModel to create operation for it: " +
                        str(operation))

                    retrieved_operations.append(
                        Operation2ImportData(operation, root, main_view_model,
                                             dt_paths, all_view_model_files))

                elif len(dt_paths) > 0:
                    alg = dao.get_algorithm_by_module(TVB_IMPORTER_MODULE,
                                                      TVB_IMPORTER_CLASS)
                    default_adapter = ABCAdapter.build_adapter(alg)
                    view_model = default_adapter.get_view_model_class()()
                    view_model.data_file = dt_paths[0]
                    vm_path = h5.store_view_model(view_model, root)
                    all_view_model_files.append(vm_path)
                    operation = Operation(view_model.gid.hex,
                                          project.fk_admin,
                                          project.id,
                                          alg.id,
                                          status=STATUS_FINISHED,
                                          start_date=datetime.now(),
                                          completion_date=datetime.now())
                    self.logger.debug(
                        "Found no ViewModel in folder, so we default to " +
                        str(operation))

                    retrieved_operations.append(
                        Operation2ImportData(operation, root, view_model,
                                             dt_paths, all_view_model_files,
                                             True))

        return sorted(retrieved_operations,
                      key=lambda op_data: op_data.order_field)

    def import_project_operations(self, project, import_path):
        """
        This method scans provided folder and identify all operations that needs to be imported
        """
        imported_operations = []
        ordered_operations = self._retrieve_operations_in_order(
            project, import_path)
        success_no = 0

        for operation_data in ordered_operations:

            if operation_data.is_old_form:
                operation_entity, datatype_group = self.__import_operation(
                    operation_data.operation)
                new_op_folder = self.files_helper.get_project_folder(
                    project, str(operation_entity.id))

                try:
                    operation_datatypes = self._load_datatypes_from_operation_folder(
                        operation_data.operation_folder, operation_entity,
                        datatype_group)
                    # Create and store view_model from operation
                    view_model = self._get_new_form_view_model(
                        operation_entity, operation_data.info_from_xml)
                    h5.store_view_model(view_model, new_op_folder)
                    operation_entity.view_model_gid = view_model.gid.hex
                    dao.store_entity(operation_entity)

                    self._store_imported_datatypes_in_db(
                        project, operation_datatypes)
                    imported_operations.append(operation_entity)
                    success_no = success_no + 1
                except MissingReferenceException:
                    operation_entity.status = STATUS_ERROR
                    dao.store_entity(operation_entity)

            elif operation_data.main_view_model is not None:
                operation_entity = dao.store_entity(operation_data.operation)
                dt_group = None  # TODO
                # Store the DataTypes in db
                dts = {}
                for dt_path in operation_data.dt_paths:
                    dt = self.load_datatype_from_file(dt_path,
                                                      operation_entity.id,
                                                      dt_group, project.id)
                    if isinstance(dt, BurstConfiguration):
                        dao.store_entity(dt)
                    else:
                        dts[dt_path] = dt
                try:
                    stored_dts_count = self._store_imported_datatypes_in_db(
                        project, dts)

                    if stored_dts_count > 0 or not operation_data.is_self_generated:
                        imported_operations.append(operation_entity)
                        new_op_folder = self.files_helper.get_project_folder(
                            project, str(operation_entity.id))
                        for h5_file in operation_data.all_view_model_files:
                            shutil.move(h5_file, new_op_folder)
                    else:
                        # In case all Dts under the current operation were Links and the ViewModel is dummy,
                        # don't keep the Operation empty in DB
                        dao.remove_entity(Operation, operation_entity.id)
                except MissingReferenceException:
                    operation_entity.status = STATUS_ERROR
                    dao.store_entity(operation_entity)

            else:
                self.logger.warning(
                    "Folder %s will be ignored, as we could not find a serialized "
                    "operation or DTs inside!" %
                    operation_data.operation_folder)

        self.logger.warning(
            "Project has been only partially imported because of some missing dependent datatypes. "
            + "%d files were successfully imported from a total of %d!" %
            (success_no, len(ordered_operations)))
        return imported_operations

    @staticmethod
    def _get_new_form_view_model(operation, xml_parameters):
        # type (Operation) -> ViewModel
        ad = ABCAdapter.build_adapter(operation.algorithm)
        view_model = ad.get_view_model_class()()

        if xml_parameters:
            params = json.loads(xml_parameters)
            declarative_attrs = type(view_model).declarative_attrs

            for param in params:
                new_param_name = param
                if param[0] == "_":
                    new_param_name = param[1:]
                new_param_name = new_param_name.lower()
                if new_param_name in declarative_attrs:
                    setattr(view_model, new_param_name, params[param])
        return view_model

    def _import_image(self, src_folder, metadata_file, project_id,
                      target_images_path):
        """
        Create and store a image entity.
        """
        figure_dict = XMLReader(os.path.join(src_folder,
                                             metadata_file)).read_metadata()
        actual_figure = os.path.join(
            src_folder,
            os.path.split(figure_dict['file_path'])[1])
        if not os.path.exists(actual_figure):
            self.logger.warning("Expected to find image path %s .Skipping" %
                                actual_figure)
            return
        figure_dict['fk_user_id'] = self.user_id
        figure_dict['fk_project_id'] = project_id
        figure_entity = manager_of_class(ResultFigure).new_instance()
        figure_entity = figure_entity.from_dict(figure_dict)
        stored_entity = dao.store_entity(figure_entity)

        # Update image meta-data with the new details after import
        figure = dao.load_figure(stored_entity.id)
        shutil.move(actual_figure, target_images_path)
        self.logger.debug("Store imported figure")
        self.files_helper.write_image_metadata(figure)

    def load_datatype_from_file(self,
                                current_file,
                                op_id,
                                datatype_group=None,
                                current_project_id=None):
        # type: (str, int, DataTypeGroup, int) -> HasTraitsIndex
        """
        Creates an instance of datatype from storage / H5 file 
        :returns: DatatypeIndex
        """
        self.logger.debug("Loading DataType from file: %s" % current_file)
        h5_class = H5File.h5_class_from_file(current_file)

        if h5_class is BurstConfigurationH5:
            if current_project_id is None:
                op_entity = dao.get_operationgroup_by_id(op_id)
                current_project_id = op_entity.fk_launched_in
            h5_file = BurstConfigurationH5(current_file)
            burst = BurstConfiguration(current_project_id)
            burst.fk_simulation = op_id
            # burst.fk_operation_group = TODO
            # burst.fk_metric_operation_group = TODO
            h5_file.load_into(burst)
            result = burst
        else:
            datatype, generic_attributes = h5.load_with_links(current_file)
            index_class = h5.REGISTRY.get_index_for_datatype(
                datatype.__class__)
            datatype_index = index_class()
            datatype_index.fill_from_has_traits(datatype)
            datatype_index.fill_from_generic_attributes(generic_attributes)

            # Add all the required attributes
            if datatype_group is not None:
                datatype_index.fk_datatype_group = datatype_group.id
            datatype_index.fk_from_operation = op_id

            associated_file = h5.path_for_stored_index(datatype_index)
            if os.path.exists(associated_file):
                datatype_index.disk_size = FilesHelper.compute_size_on_disk(
                    associated_file)
            result = datatype_index

        return result

    def store_datatype(self, datatype, current_file=None):
        """This method stores data type into DB"""
        try:
            self.logger.debug("Store datatype: %s with Gid: %s" %
                              (datatype.__class__.__name__, datatype.gid))
            # Now move storage file into correct folder if necessary
            if current_file is not None:
                final_path = h5.path_for_stored_index(datatype)
                if final_path != current_file:
                    shutil.move(current_file, final_path)

            return dao.store_entity(datatype)
        except MissingDataSetException as e:
            self.logger.exception(e)
            error_msg = "Datatype %s has missing data and could not be imported properly." % (
                datatype, )
            raise ImportException(error_msg)
        except IntegrityError as excep:
            self.logger.exception(excep)
            error_msg = "Could not import data with gid: %s. There is already a one with " \
                        "the same name or gid." % datatype.gid
            raise ImportException(error_msg)

    def __populate_project(self, project_path):
        """
        Create and store a Project entity.
        """
        self.logger.debug("Creating project from path: %s" % project_path)
        project_dict = self.files_helper.read_project_metadata(project_path)

        project_entity = manager_of_class(Project).new_instance()
        project_entity = project_entity.from_dict(project_dict, self.user_id)

        try:
            self.logger.debug("Storing imported project")
            return dao.store_entity(project_entity)
        except IntegrityError as excep:
            self.logger.exception(excep)
            error_msg = (
                "Could not import project: %s with gid: %s. There is already a "
                "project with the same name or gid.") % (project_entity.name,
                                                         project_entity.gid)
            raise ImportException(error_msg)

    def __build_operation_from_file(self, project, operation_file):
        """
        Create Operation entity from metadata file.
        """
        operation_dict = XMLReader(operation_file).read_metadata()
        operation_entity = manager_of_class(Operation).new_instance()
        return operation_entity.from_dict(operation_dict, dao, self.user_id,
                                          project.gid)

    @staticmethod
    def __import_operation(operation_entity):
        """
        Store a Operation entity.
        """
        operation_entity = dao.store_entity(operation_entity)
        operation_group_id = operation_entity.fk_operation_group
        datatype_group = None

        if operation_group_id is not None:
            try:
                datatype_group = dao.get_datatypegroup_by_op_group_id(
                    operation_group_id)
            except SQLAlchemyError:
                # If no dataType group present for current op. group, create it.
                operation_group = dao.get_operationgroup_by_id(
                    operation_group_id)
                datatype_group = DataTypeGroup(
                    operation_group, operation_id=operation_entity.id)
                datatype_group.state = UploadAlgorithmCategoryConfig.defaultdatastate
                datatype_group = dao.store_entity(datatype_group)

        return operation_entity, datatype_group

    def import_simulator_configuration_zip(self, zip_file):
        # Now compute the name of the folder where to explode uploaded ZIP file
        temp_folder = self._compute_unpack_path()
        uq_file_name = temp_folder + ".zip"

        if isinstance(zip_file, FieldStorage) or isinstance(zip_file, Part):
            if not zip_file.file:
                raise ServicesBaseException(
                    "Could not process the given ZIP file...")

            with open(uq_file_name, 'wb') as file_obj:
                self.files_helper.copy_file(zip_file.file, file_obj)
        else:
            shutil.copy2(zip_file, uq_file_name)

        try:
            self.files_helper.unpack_zip(uq_file_name, temp_folder)
            return temp_folder
        except FileStructureException as excep:
            raise ServicesBaseException(
                "Could not process the given ZIP file..." + str(excep))


#     # Sort all h5 files based on their creation date stored in the files themselves
#     sorted_h5_files = sorted(h5_files, key=lambda h5_path: _get_create_date_for_sorting(h5_path) or datetime.now())
#     return sorted_h5_files
#
#
# def _get_create_date_for_sorting(h5_file):
#     storage_manager = HDF5StorageManager(os.path.dirname(h5_file), os.path.basename(h5_file))
#     root_metadata = storage_manager.get_metadata()
#     create_date_str = str(root_metadata['Create_date'], 'utf-8')
#     create_date = datetime.strptime(create_date_str.replace('datetime:', ''), '%Y-%m-%d %H:%M:%S.%f')
#     return create_date
Пример #6
0
class FigureService:
    """
    Service layer for Figure entities.
    """
    _TYPE_PNG = "png"
    _TYPE_SVG = "svg"

    _BRANDING_BAR_PNG = os.path.join(os.path.dirname(__file__), "resources",
                                     "branding_bar.png")
    _BRANDING_BAR_SVG = os.path.join(os.path.dirname(__file__), "resources",
                                     "branding_bar.svg")

    _DEFAULT_SESSION_NAME = "Default"
    _DEFAULT_IMAGE_FILE_NAME = "snapshot."

    def __init__(self):
        self.logger = get_logger(self.__class__.__module__)
        self.file_helper = FilesHelper()

    def _write_png(self, store_path, export_data):
        img_data = base64.b64decode(export_data)  # decode the image
        final_image = Image.open(
            StringIO(img_data))  # place it in a PIL stream

        branding_bar = Image.open(
            FigureService._BRANDING_BAR_PNG)  # place the branding bar over
        final_image.paste(branding_bar,
                          (0, final_image.size[1] - branding_bar.size[1]),
                          branding_bar)

        final_image.save(store_path)  # store to disk as PNG

    def _write_svg(self, store_path, export_data):
        dom = xml.dom.minidom.parseString(export_data)
        figureSvg = dom.getElementsByTagName('svg')[
            0]  # get the original image

        dom = xml.dom.minidom.parse(FigureService._BRANDING_BAR_SVG)

        try:
            width = float(figureSvg.getAttribute('width').replace('px', ''))
            height = float(figureSvg.getAttribute('height').replace('px', ''))
        except ValueError:  # defaults when dimensions are not given
            width = 1024
            height = 768
            figureSvg.setAttribute("width", str(width))
            figureSvg.setAttribute("height", str(height))

        finalSvg = dom.createElement('svg')  # prepare the final svg
        brandingSvg = dom.getElementsByTagName('svg')[
            0]  # get the branding bar
        brandingSvg.setAttribute("y",
                                 str(height))  # position it below the figure
        height += float(brandingSvg.getAttribute('height').replace(
            'px', ''))  # increase original height with branding bar's height
        finalSvg.setAttribute("width",
                              str(width))  # same width as original figure
        finalSvg.setAttribute("height", str(height))

        finalSvg.appendChild(figureSvg)  # add the image
        finalSvg.appendChild(brandingSvg)  # and the branding bar

        # Generate path where to store image
        with open(store_path, 'w') as dest:
            finalSvg.writexml(dest)  # store to disk

    def _image_path(self, project_name, img_type):
        "Generate path where to store image"
        images_folder = self.file_helper.get_images_folder(project_name)
        file_name = FigureService._DEFAULT_IMAGE_FILE_NAME + img_type
        return utils.get_unique_file_name(images_folder, file_name)

    @staticmethod
    def _generate_image_name(project, user, operation, image_name):
        if not image_name:
            if operation is not None:
                # create a name based on the operation that created the image
                # e.g. TVB-Algo-Name-354
                image_name = operation.algorithm.displayname.replace(' ', '-')
            else:
                # default to a generic name
                image_name = "figure"
        figure_count = dao.get_figure_count(project.id, user.id) + 1
        return 'TVB-%s-%s' % (image_name, figure_count)

    def store_result_figure(self,
                            project,
                            user,
                            img_type,
                            export_data,
                            image_name=None,
                            operation_id=None):
        """
        Store into a file, Result Image and reference in DB.
        """
        store_path, file_name = self._image_path(project.name, img_type)

        if img_type == FigureService._TYPE_PNG:  # PNG file from canvas
            self._write_png(store_path, export_data)
        elif img_type == FigureService._TYPE_SVG:  # SVG file from svg viewer
            self._write_svg(store_path, export_data)

        if operation_id:
            operation = dao.get_operation_by_id(operation_id)
        else:
            operation = None
            operation_id = None

        image_name = self._generate_image_name(project, user, operation,
                                               image_name)

        # Store entity into DB
        entity = model.ResultFigure(operation_id, user.id, project.id,
                                    FigureService._DEFAULT_SESSION_NAME,
                                    image_name, file_name, img_type)
        entity = dao.store_entity(entity)

        # Load instance from DB to have lazy fields loaded
        figure = dao.load_figure(entity.id)
        # Write image meta data to disk
        self.file_helper.write_image_metadata(figure)

        if operation:
            # Force writing operation meta data on disk.
            # This is important later for operation import
            self.file_helper.write_operation_metadata(operation)

    def retrieve_result_figures(self,
                                project,
                                user,
                                selected_session_name='all_sessions'):
        """
        Retrieve from DB all the stored Displayer previews that belongs to the specified session. The
        previews are for current user and project; grouped by session.
        """
        result, previews_info = dao.get_previews(project.id, user.id,
                                                 selected_session_name)
        for name in result:
            for figure in result[name]:
                figures_folder = self.file_helper.get_images_folder(
                    project.name)
                figure_full_path = os.path.join(figures_folder,
                                                figure.file_path)
                # Compute the path
                figure.file_path = utils.path2url_part(figure_full_path)
        return result, previews_info

    @staticmethod
    def load_figure(figure_id):
        """
        Loads a stored figure by its id.
        """
        return dao.load_figure(figure_id)

    def edit_result_figure(self, figure_id, **data):
        """
        Retrieve and edit a previously stored figure.
        """
        figure = dao.load_figure(figure_id)
        figure.session_name = data['session_name']
        figure.name = data['name']
        dao.store_entity(figure)

        # Load instance from DB to have lazy fields loaded.
        figure = dao.load_figure(figure_id)
        # Store figure meta data in an XML attached to the image.
        self.file_helper.write_image_metadata(figure)

    def remove_result_figure(self, figure_id):
        """
        Remove figure from DB and file storage.
        """
        figure = dao.load_figure(figure_id)

        # Delete all figure related files from disk.
        figures_folder = self.file_helper.get_images_folder(
            figure.project.name)
        path2figure = os.path.join(figures_folder, figure.file_path)
        if os.path.exists(path2figure):
            os.remove(path2figure)
            self.file_helper.remove_image_metadata(figure)

        # Remove figure reference from DB.
        result = dao.remove_entity(model.ResultFigure, figure_id)
        return result
class FigureServiceTest(TransactionalTestCase):
    """
    Tests for the figure service
    """
    def setUp(self):
        self.figure_service = FigureService()
        self.user = TestFactory.create_user()
        self.project = TestFactory.create_project(admin=self.user)
        self.files_helper = FilesHelper()

    def tearDown(self):
        self.delete_project_folders()

    def assertCanReadImage(self, image_path):
        try:
            Image.open(image_path).load()
        except (IOError, ValueError):
            self.fail("Could not open %s as a image" % image_path)

    def store_test_png(self):
        self.figure_service.store_result_figure(self.project,
                                                self.user,
                                                "png",
                                                IMG_DATA,
                                                image_name="test-figure")

    def retrieve_images(self):
        figures_by_session, _ = self.figure_service.retrieve_result_figures(
            self.project, self.user)
        # flatten image session grouping
        figures = []
        for fg in figures_by_session.itervalues():
            figures.extend(fg)
        return figures

    def test_store_image(self):
        self.store_test_png()

    def test_store_image_from_operation(self):
        # test that image can be retrieved from operation
        test_operation = TestFactory.create_operation(
            test_user=self.user, test_project=self.project)

        self.figure_service.store_result_figure(self.project,
                                                self.user,
                                                "png",
                                                IMG_DATA,
                                                operation_id=test_operation.id)
        figures = dao.get_figures_for_operation(test_operation.id)
        self.assertEqual(1, len(figures))
        image_path = self.files_helper.get_images_folder(self.project.name)
        image_path = os.path.join(image_path, figures[0].file_path)
        self.assertCanReadImage(image_path)

    def test_store_and_retrieve_image(self):
        self.store_test_png()
        figures = self.retrieve_images()
        self.assertEqual(1, len(figures))
        image_path = utils.url2path(figures[0].file_path)
        self.assertCanReadImage(image_path)

    def test_load_figure(self):
        self.store_test_png()
        figures = self.retrieve_images()
        self.figure_service.load_figure(figures[0].id)

    def test_edit_figure(self):
        session_name = 'the altered ones'
        name = 'altered'
        self.store_test_png()
        figures = self.retrieve_images()
        self.figure_service.edit_result_figure(figures[0].id,
                                               session_name=session_name,
                                               name=name)
        figures_by_session, _ = self.figure_service.retrieve_result_figures(
            self.project, self.user)
        self.assertEqual([session_name], figures_by_session.keys())
        self.assertEqual(name, figures_by_session.values()[0][0].name)

    def test_remove_figure(self):
        self.store_test_png()
        figures = self.retrieve_images()
        self.assertEqual(1, len(figures))
        self.figure_service.remove_result_figure(figures[0].id)
        figures = self.retrieve_images()
        self.assertEqual(0, len(figures))
Пример #8
0
class FigureService:
    """
    Service layer for Figure entities.
    """
    _TYPE_PNG = "png"
    _TYPE_SVG = "svg"

    _BRANDING_BAR_PNG = os.path.join(os.path.dirname(__file__), "resources", "branding_bar.png")
    _BRANDING_BAR_SVG = os.path.join(os.path.dirname(__file__), "resources", "branding_bar.svg")

    _DEFAULT_SESSION_NAME = "Default"
    _DEFAULT_IMAGE_FILE_NAME = "snapshot."


    def __init__(self):
        self.logger = get_logger(self.__class__.__module__)
        self.file_helper = FilesHelper()


    def _write_png(self, store_path, export_data):
        img_data = base64.b64decode(export_data)                        # decode the image
        final_image = Image.open(StringIO(img_data))                    # place it in a PIL stream

        branding_bar = Image.open(FigureService._BRANDING_BAR_PNG)      # place the branding bar over
        final_image.paste(branding_bar, (0, final_image.size[1] - branding_bar.size[1]), branding_bar)

        final_image.save(store_path)                                    # store to disk as PNG


    def _write_svg(self, store_path, export_data):
        dom = xml.dom.minidom.parseString(export_data)
        figureSvg = dom.getElementsByTagName('svg')[0]                          # get the original image

        dom = xml.dom.minidom.parse(FigureService._BRANDING_BAR_SVG)

        try:
            width = float(figureSvg.getAttribute('width').replace('px', ''))
            height = float(figureSvg.getAttribute('height').replace('px', ''))
        except ValueError:                                                      # defaults when dimensions are not given
            width = 1024
            height = 768
            figureSvg.setAttribute("width", str(width))
            figureSvg.setAttribute("height", str(height))

        finalSvg = dom.createElement('svg')                                     # prepare the final svg
        brandingSvg = dom.getElementsByTagName('svg')[0]                        # get the branding bar
        brandingSvg.setAttribute("y", str(height))                              # position it below the figure
        height += float(brandingSvg.getAttribute('height').replace('px', ''))   # increase original height with branding bar's height
        finalSvg.setAttribute("width", str(width))                              # same width as original figure
        finalSvg.setAttribute("height", str(height))

        finalSvg.appendChild(figureSvg)                                         # add the image
        finalSvg.appendChild(brandingSvg)                                       # and the branding bar

        # Generate path where to store image
        with open(store_path, 'w') as dest:
            finalSvg.writexml(dest)                                                 # store to disk


    def _image_path(self, project_name, img_type):
        "Generate path where to store image"
        images_folder = self.file_helper.get_images_folder(project_name)
        file_name = FigureService._DEFAULT_IMAGE_FILE_NAME + img_type
        return utils.get_unique_file_name(images_folder, file_name)


    @staticmethod
    def _generate_image_name(project, user, operation, image_name):
        if not image_name:
            if operation is not None:
                # create a name based on the operation that created the image
                # e.g. TVB-Algo-Name-354
                image_name = operation.algorithm.name.replace(' ', '-')
            else:
                # default to a generic name
                image_name = "figure"
        figure_count = dao.get_figure_count(project.id, user.id) + 1
        return 'TVB-%s-%s' % (image_name, figure_count)


    def store_result_figure(self, project, user, img_type, export_data, image_name=None, operation_id=None):
        """
        Store into a file, Result Image and reference in DB.
        """
        store_path, file_name = self._image_path(project.name, img_type)

        if img_type == FigureService._TYPE_PNG:            # PNG file from canvas
            self._write_png(store_path, export_data)
        elif img_type == FigureService._TYPE_SVG:          # SVG file from svg viewer
            self._write_svg(store_path, export_data)

        if operation_id:
            operation = dao.get_operation_by_id(operation_id)
        else:
            operation = None
            operation_id = None

        image_name = self._generate_image_name(project, user, operation, image_name)

        # Store entity into DB
        entity = model.ResultFigure(operation_id, user.id, project.id, FigureService._DEFAULT_SESSION_NAME,
                                    image_name, file_name, img_type)
        entity = dao.store_entity(entity)

        # Load instance from DB to have lazy fields loaded
        figure = dao.load_figure(entity.id)
        # Write image meta data to disk  
        self.file_helper.write_image_metadata(figure)

        if operation:
            # Force writing operation meta data on disk.
            # This is important later for operation import
            self.file_helper.write_operation_metadata(operation)


    def retrieve_result_figures(self, project, user, selected_session_name='all_sessions'):
        """
        Retrieve from DB all the stored Displayer previews that belongs to the specified session. The
        previews are for current user and project; grouped by session.
        """
        result, previews_info = dao.get_previews(project.id, user.id, selected_session_name)
        for name in result:
            for figure in result[name]:
                figures_folder = self.file_helper.get_images_folder(project.name)
                figure_full_path = os.path.join(figures_folder, figure.file_path)
                # Compute the path 
                figure.file_path = utils.path2url_part(figure_full_path)
        return result, previews_info


    @staticmethod
    def load_figure(figure_id):
        """
        Loads a stored figure by its id.
        """
        return dao.load_figure(figure_id)


    def edit_result_figure(self, figure_id, **data):
        """
        Retrieve and edit a previously stored figure.
        """
        figure = dao.load_figure(figure_id)
        figure.session_name = data['session_name']
        figure.name = data['name']
        dao.store_entity(figure)

        # Load instance from DB to have lazy fields loaded.
        figure = dao.load_figure(figure_id)
        # Store figure meta data in an XML attached to the image.
        self.file_helper.write_image_metadata(figure)


    def remove_result_figure(self, figure_id):
        """
        Remove figure from DB and file storage.
        """
        figure = dao.load_figure(figure_id)

        # Delete all figure related files from disk.
        figures_folder = self.file_helper.get_images_folder(figure.project.name)
        path2figure = os.path.join(figures_folder, figure.file_path)
        if os.path.exists(path2figure):
            os.remove(path2figure)
            self.file_helper.remove_image_metadata(figure)

        # Remove figure reference from DB.
        result = dao.remove_entity(model.ResultFigure, figure_id)
        return result
        
        
        
class ProjectService:
    """
    Services layer for Project entities.
    """


    def __init__(self):
        self.logger = get_logger(__name__)
        self.structure_helper = FilesHelper()


    def store_project(self, current_user, is_create, selected_id, **data):
        """
        We want to create/update a project entity.
        """
        # Validate Unique Name
        new_name = data["name"]
        if len(new_name) < 1:
            raise ProjectServiceException("Invalid project name!")
        projects_no = dao.count_projects_for_name(new_name, selected_id)
        if projects_no > 0:
            err = {'name': 'Please choose another name, this one is used!'}
            raise formencode.Invalid("Duplicate Name Error", {}, None, error_dict=err)
        started_operations = dao.get_operation_numbers(selected_id)[1]
        if started_operations > 0:
            raise ProjectServiceException("A project can not be renamed while operations are still running!")
        if is_create:
            current_proj = model.Project(new_name, current_user.id, data["description"])
            self.structure_helper.get_project_folder(current_proj)
        else:
            try:
                current_proj = dao.get_project_by_id(selected_id)
            except Exception as excep:
                self.logger.exception("An error has occurred!")
                raise ProjectServiceException(str(excep))
            if current_proj.name != new_name:
                self.structure_helper.rename_project_structure(current_proj.name, new_name)
            current_proj.name = new_name
            current_proj.description = data["description"]
        # Commit to make sure we have a valid ID
        current_proj.refresh_update_date()
        self.structure_helper.write_project_metadata(current_proj)
        current_proj = dao.store_entity(current_proj)

        # Retrieve, to initialize lazy attributes
        current_proj = dao.get_project_by_id(current_proj.id)
        # Update share settings on current Project entity
        visited_pages = []
        prj_admin = current_proj.administrator.username
        if 'visited_pages' in data and data['visited_pages']:
            visited_pages = data['visited_pages'].split(',')
        for page in visited_pages:
            members = UserService.retrieve_all_users(prj_admin, int(page))[0]
            members = [m.id for m in members]
            dao.delete_members_for_project(current_proj.id, members)
        selected_user_ids = data["users"]
        dao.add_members_to_project(current_proj.id, selected_user_ids)
        # Finish operation
        self.logger.debug("Edit/Save OK for project:" + str(current_proj.id) + ' by user:'******'-'
                result["count"] = one_op[2]
                result["gid"] = one_op[13]
                if one_op[3] is not None and one_op[3]:
                    try:
                        operation_group = dao.get_generic_entity(model.OperationGroup, one_op[3])[0]
                        result["group"] = operation_group.name
                        result["group"] = result["group"].replace("_", " ")
                        result["operation_group_id"] = operation_group.id
                        datatype_group = dao.get_datatypegroup_by_op_group_id(one_op[3])
                        result["datatype_group_gid"] = datatype_group.gid
                        result["gid"] = operation_group.gid
                        ## Filter only viewers for current DataTypeGroup entity:
                        result["view_groups"] = FlowService().get_visualizers_for_group(datatype_group.gid)
                    except Exception:
                        self.logger.exception("We will ignore group on entity:" + str(one_op))
                        result["datatype_group_gid"] = None
                else:
                    result['group'] = None
                    result['datatype_group_gid'] = None
                result["algorithm"] = dao.get_algorithm_by_id(one_op[4])
                result["user"] = dao.get_user_by_id(one_op[5])
                if type(one_op[6]) in (str, unicode):
                    result["create"] = string2date(str(one_op[6]))
                else:
                    result["create"] = one_op[6]
                if type(one_op[7]) in (str, unicode):
                    result["start"] = string2date(str(one_op[7]))
                else:
                    result["start"] = one_op[7]
                if type(one_op[8]) in (str, unicode):
                    result["complete"] = string2date(str(one_op[8]))
                else:
                    result["complete"] = one_op[8]

                if result["complete"] is not None and result["start"] is not None:
                    result["duration"] = format_timedelta(result["complete"] - result["start"])
                result["status"] = one_op[9]
                result["additional"] = one_op[10]
                result["visible"] = True if one_op[11] > 0 else False
                result['operation_tag'] = one_op[12]
                result['figures'] = None
                if not result['group']:
                    datatype_results = dao.get_results_for_operation(result['id'])
                    result['results'] = []
                    for dt in datatype_results:
                        dt_loaded = ABCAdapter.load_entity_by_gid(dt.gid)
                        if dt_loaded:
                            result['results'].append(dt_loaded)
                        else:
                            self.logger.warn("Could not retrieve datatype %s" % str(dt))

                    operation_figures = dao.get_figures_for_operation(result['id'])

                    # Compute the full path to the figure / image on disk
                    for figure in operation_figures:
                        figures_folder = self.structure_helper.get_images_folder(figure.project.name)
                        figure_full_path = os.path.join(figures_folder, figure.file_path)
                        # Compute the path available from browser
                        figure.figure_path = utils.path2url_part(figure_full_path)

                    result['figures'] = operation_figures
                else:
                    result['results'] = None
                operations.append(result)
            except Exception:
                ## We got an exception when processing one Operation Row. We will continue with the rest of the rows.
                self.logger.exception("Could not prepare operation for display:" + str(one_op))
        return selected_project, total_ops_nr, operations, pages_no


    def retrieve_projects_for_user(self, user_id, current_page=1):
        """
        Return a list with all Projects visible for current user.
        """
        start_idx = PROJECTS_PAGE_SIZE * (current_page - 1)
        total = dao.get_projects_for_user(user_id, is_count=True)
        available_projects = dao.get_projects_for_user(user_id, start_idx, PROJECTS_PAGE_SIZE)
        pages_no = total // PROJECTS_PAGE_SIZE + (1 if total % PROJECTS_PAGE_SIZE else 0)
        for prj in available_projects:
            fns, sta, err, canceled, pending = dao.get_operation_numbers(prj.id)
            prj.operations_finished = fns
            prj.operations_started = sta
            prj.operations_error = err
            prj.operations_canceled = canceled
            prj.operations_pending = pending
            prj.disk_size = dao.get_project_disk_size(prj.id)
            prj.disk_size_human = format_bytes_human(prj.disk_size)
        self.logger.debug("Displaying " + str(len(available_projects)) + " projects in UI for user " + str(user_id))
        return available_projects, pages_no


    @staticmethod
    def get_linkable_projects_for_user(user_id, data_id):
        """
        Find projects with are visible for current user, and in which current datatype hasn't been linked yet.
        """
        return dao.get_linkable_projects_for_user(user_id, data_id)


    @transactional
    def remove_project(self, project_id):
        """
        Remove Project from DB and File Storage.
        """
        try:
            project2delete = dao.get_project_by_id(project_id)

            self.logger.debug("Deleting project: id=" + str(project_id) + ' name=' + project2delete.name)
            project_bursts = dao.get_bursts_for_project(project_id)
            for burst in project_bursts:
                dao.remove_entity(burst.__class__, burst.id)

            project_datatypes = dao.get_datatypes_in_project(project_id)
            for one_data in project_datatypes:
                self.remove_datatype(project_id, one_data.gid, True)

            links = dao.get_links_for_project(project_id)
            for one_link in links:
                dao.remove_entity(model.Links, one_link.id)

            self.structure_helper.remove_project_structure(project2delete.name)
            dao.delete_project(project_id)
            self.logger.debug("Deleted project: id=" + str(project_id) + ' name=' + project2delete.name)

        except RemoveDataTypeException as excep:
            self.logger.exception("Could not execute operation Node Remove!")
            raise ProjectServiceException(str(excep))
        except FileStructureException as excep:
            self.logger.exception("Could not delete because of rights!")
            raise ProjectServiceException(str(excep))
        except Exception as excep:
            self.logger.exception(str(excep))
            raise ProjectServiceException(str(excep))


    # ----------------- Methods for populating Data-Structure Page ---------------

    @staticmethod
    def get_datatype_in_group(group):
        """
        Return all dataTypes that are the result of the same DTgroup.
        """
        return dao.get_datatype_in_group(datatype_group_id=group)


    @staticmethod
    def get_datatypes_from_datatype_group(datatype_group_id):
        """
        Retrieve all dataType which are part from the given dataType group.
        """
        return dao.get_datatypes_from_datatype_group(datatype_group_id)


    @staticmethod
    def load_operation_by_gid(operation_gid):
        """ Retrieve loaded Operation from DB"""
        return dao.get_operation_by_gid(operation_gid)


    @staticmethod
    def get_operation_group_by_id(operation_group_id):
        """ Loads OperationGroup from DB"""
        return dao.get_operationgroup_by_id(operation_group_id)


    @staticmethod
    def get_operation_group_by_gid(operation_group_gid):
        """ Loads OperationGroup from DB"""
        return dao.get_operationgroup_by_gid(operation_group_gid)


    @staticmethod
    def get_operations_in_group(operation_group):
        """ Return all the operations from an operation group. """
        return dao.get_operations_in_group(operation_group.id)


    @staticmethod
    def is_upload_operation(operation_gid):
        """ Returns True only if the operation with the given GID is an upload operation. """
        return dao.is_upload_operation(operation_gid)


    @staticmethod
    def get_all_operations_for_uploaders(project_id):
        """ Returns all finished upload operations. """
        return dao.get_all_operations_for_uploaders(project_id)

    def set_operation_and_group_visibility(self, entity_gid, is_visible, is_operation_group=False):
        """
        Sets the operation visibility.

        If 'is_operation_group' is True than this method will change the visibility for all
        the operation from the OperationGroup with the GID field equal to 'entity_gid'.
        """

        def set_visibility(op):
            # workaround:
            # 'reload' the operation so that it has the project property set.
            # get_operations_in_group does not eager load it and now we're out of a sqlalchemy session
            # write_operation_metadata requires that property
            op = dao.get_operation_by_id(op.id)
            # end hack
            op.visible = is_visible
            self.structure_helper.write_operation_metadata(op)
            dao.store_entity(op)

        def set_group_descendants_visibility(operation_group_id):
            ops_in_group = dao.get_operations_in_group(operation_group_id)
            for group_op in ops_in_group:
                set_visibility(group_op)

        if is_operation_group:
            op_group_id = dao.get_operationgroup_by_gid(entity_gid).id
            set_group_descendants_visibility(op_group_id)
        else:
            operation = dao.get_operation_by_gid(entity_gid)
            # we assure that if the operation belongs to a group than the visibility will be changed for the entire group
            if operation.fk_operation_group is not None:
                set_group_descendants_visibility(operation.fk_operation_group)
            else:
                set_visibility(operation)


    def get_operation_details(self, operation_gid, is_group):
        """
        :returns: an entity OperationOverlayDetails filled with all information for current operation details.
        """

        if is_group:
            operation_group = self.get_operation_group_by_gid(operation_gid)
            operation = dao.get_operations_in_group(operation_group.id, False, True)
            ## Reload, to make sure all attributes lazy are populated as well.
            operation = dao.get_operation_by_gid(operation.gid)
            no_of_op_in_group = dao.get_operations_in_group(operation_group.id, is_count=True)
            datatype_group = self.get_datatypegroup_by_op_group_id(operation_group.id)
            count_result = dao.count_datatypes_in_group(datatype_group.id)

        else:
            operation = dao.get_operation_by_gid(operation_gid)
            if operation is None:
                return None
            no_of_op_in_group = 1
            count_result = dao.count_resulted_datatypes(operation.id)

        username = dao.get_user_by_id(operation.fk_launched_by).username
        burst = dao.get_burst_for_operation_id(operation.id)
        datatypes_param, all_special_params = self._review_operation_inputs(operation.gid)

        op_pid = dao.get_operation_process_for_operation(operation.id)
        op_details = OperationOverlayDetails(operation, username, len(datatypes_param),
                                             count_result, burst, no_of_op_in_group, op_pid)

        ## Add all parameter which are set differently by the user on this Operation.
        if all_special_params is not None:
            op_details.add_scientific_fields(all_special_params)
        return op_details


    @staticmethod
    def get_filterable_meta():
        """
        Contains all the attributes by which
        the user can structure the tree of DataTypes
        """
        return DataTypeMetaData.get_filterable_meta()


    def get_project_structure(self, project, visibility_filter, first_level, second_level, filter_value):
        """
        Find all DataTypes (including the linked ones and the groups) relevant for the current project.
        In case of a problem, will return an empty list.
        """
        metadata_list = []
        dt_list = dao.get_data_in_project(project.id, visibility_filter, filter_value)

        for dt in dt_list:
            # Prepare the DT results from DB, for usage in controller, by converting into DataTypeMetaData objects
            data = {}
            is_group = False
            group_op = None
            dt_entity = dao.get_datatype_by_gid(dt.gid)
            if dt_entity is None:
                self.logger.warning("Ignored entity (possibly removed DT class)" + str(dt))
                continue
            ## Filter by dt.type, otherwise Links to individual DT inside a group will be mistaken
            if dt.type == "DataTypeGroup" and dt.parent_operation.operation_group is not None:
                is_group = True
                group_op = dt.parent_operation.operation_group

            # All these fields are necessary here for dynamic Tree levels.
            data[DataTypeMetaData.KEY_DATATYPE_ID] = dt.id
            data[DataTypeMetaData.KEY_GID] = dt.gid
            data[DataTypeMetaData.KEY_NODE_TYPE] = dt.type
            data[DataTypeMetaData.KEY_STATE] = dt.state
            data[DataTypeMetaData.KEY_SUBJECT] = str(dt.subject)
            data[DataTypeMetaData.KEY_TITLE] = dt_entity.display_name
            data[DataTypeMetaData.KEY_RELEVANCY] = dt.visible
            data[DataTypeMetaData.KEY_LINK] = dt.parent_operation.fk_launched_in != project.id

            data[DataTypeMetaData.KEY_TAG_1] = dt.user_tag_1 if dt.user_tag_1 else ''
            data[DataTypeMetaData.KEY_TAG_2] = dt.user_tag_2 if dt.user_tag_2 else ''
            data[DataTypeMetaData.KEY_TAG_3] = dt.user_tag_3 if dt.user_tag_3 else ''
            data[DataTypeMetaData.KEY_TAG_4] = dt.user_tag_4 if dt.user_tag_4 else ''
            data[DataTypeMetaData.KEY_TAG_5] = dt.user_tag_5 if dt.user_tag_5 else ''

            # Operation related fields:
            operation_name = CommonDetails.compute_operation_name(
                dt.parent_operation.algorithm.algorithm_category.displayname,
                dt.parent_operation.algorithm.displayname)
            data[DataTypeMetaData.KEY_OPERATION_TYPE] = operation_name
            data[DataTypeMetaData.KEY_OPERATION_ALGORITHM] = dt.parent_operation.algorithm.displayname
            data[DataTypeMetaData.KEY_AUTHOR] = dt.parent_operation.user.username
            data[DataTypeMetaData.KEY_OPERATION_TAG] = group_op.name if is_group else dt.parent_operation.user_group
            data[DataTypeMetaData.KEY_OP_GROUP_ID] = group_op.id if is_group else None

            completion_date = dt.parent_operation.completion_date
            string_year = completion_date.strftime(MONTH_YEAR_FORMAT) if completion_date is not None else ""
            string_month = completion_date.strftime(DAY_MONTH_YEAR_FORMAT) if completion_date is not None else ""
            data[DataTypeMetaData.KEY_DATE] = date2string(completion_date) if (completion_date is not None) else ''
            data[DataTypeMetaData.KEY_CREATE_DATA_MONTH] = string_year
            data[DataTypeMetaData.KEY_CREATE_DATA_DAY] = string_month

            data[DataTypeMetaData.KEY_BURST] = dt._parent_burst.name if dt._parent_burst is not None else '-None-'

            metadata_list.append(DataTypeMetaData(data, dt.invalid))

        return StructureNode.metadata2tree(metadata_list, first_level, second_level, project.id, project.name)


    @staticmethod
    def get_datatype_details(datatype_gid):
        """
        :returns: an array. First entry in array is an instance of DataTypeOverlayDetails\
            The second one contains all the possible states for the specified dataType.

        """
        meta_atts = DataTypeOverlayDetails()
        states = DataTypeMetaData.STATES
        try:
            datatype_result = dao.get_datatype_details(datatype_gid)
            meta_atts.fill_from_datatype(datatype_result, datatype_result._parent_burst)
            return meta_atts, states, datatype_result
        except Exception:
            ## We ignore exception here (it was logged above, and we want to return no details).
            return meta_atts, states, None


    def _remove_project_node_files(self, project_id, gid, skip_validation=False):
        """
        Delegate removal of a node in the structure of the project.
        In case of a problem will THROW StructureException.
        """
        try:
            project = self.find_project(project_id)
            datatype = dao.get_datatype_by_gid(gid)
            links = dao.get_links_for_datatype(datatype.id)
            if links:
                was_link = False
                for link in links:
                    # This means it's only a link and we need to remove it
                    if link.fk_from_datatype == datatype.id and link.fk_to_project == project.id:
                        dao.remove_entity(model.Links, link.id)
                        was_link = True
                if not was_link:
                    # Create a clone of the operation
                    new_op = model.Operation(dao.get_system_user().id,
                                             links[0].fk_to_project,
                                             datatype.parent_operation.fk_from_algo,
                                             datatype.parent_operation.parameters,
                                             datatype.parent_operation.meta_data,
                                             datatype.parent_operation.status,
                                             datatype.parent_operation.start_date,
                                             datatype.parent_operation.completion_date,
                                             datatype.parent_operation.fk_operation_group,
                                             datatype.parent_operation.additional_info,
                                             datatype.parent_operation.user_group,
                                             datatype.parent_operation.range_values)
                    new_op = dao.store_entity(new_op)
                    to_project = self.find_project(links[0].fk_to_project).name
                    new_op_loaded = dao.get_operation_by_id(new_op.id)
                    self.structure_helper.write_operation_metadata(new_op_loaded)
                    self.structure_helper.move_datatype(datatype, to_project, str(new_op.id))
                    datatype.set_operation_id(new_op.id)
                    datatype.parent_operation = new_op
                    dao.store_entity(datatype)
                    dao.remove_entity(model.Links, links[0].id)
            else:
                specific_remover = get_remover(datatype.type)(datatype)
                specific_remover.remove_datatype(skip_validation)
                self.structure_helper.remove_datatype(datatype)

        except RemoveDataTypeException:
            self.logger.exception("Could not execute operation Node Remove!")
            raise
        except FileStructureException:
            self.logger.exception("Remove operation failed")
            raise StructureException("Remove operation failed for unknown reasons.Please contact system administrator.")


    def remove_operation(self, operation_id):
        """
        Remove a given operation
        """
        operation = dao.try_get_operation_by_id(operation_id)
        if operation is not None:
            self.logger.debug("Deleting operation %s " % operation)
            datatypes_for_op = dao.get_results_for_operation(operation_id)
            for dt in reversed(datatypes_for_op):
                self.remove_datatype(operation.project.id, dt.gid, False)
            dao.remove_entity(model.Operation, operation.id)
            self.logger.debug("Finished deleting operation %s " % operation)
        else:
            self.logger.warning("Attempt to delete operation with id=%s which no longer exists." % operation_id)


    def remove_datatype(self, project_id, datatype_gid, skip_validation=False):
        """
        Method used for removing a dataType. If the given dataType is a DatatypeGroup
        or a dataType from a DataTypeGroup than this method will remove the entire group.
        The operation(s) used for creating the dataType(s) will also be removed.
        """
        datatype = dao.get_datatype_by_gid(datatype_gid)
        if datatype is None:
            self.logger.warning("Attempt to delete DT[%s] which no longer exists." % datatype_gid)
            return

        is_datatype_group = False
        if dao.is_datatype_group(datatype_gid):
            is_datatype_group = True
        elif datatype.fk_datatype_group is not None:
            is_datatype_group = True
            datatype = dao.get_datatype_by_id(datatype.fk_datatype_group)

        operations_set = [datatype.fk_from_operation]

        correct = True

        if is_datatype_group:
            self.logger.debug("Removing datatype group %s" % datatype)
            data_list = dao.get_datatypes_from_datatype_group(datatype.id)
            for adata in data_list:
                self._remove_project_node_files(project_id, adata.gid, skip_validation)
                if adata.fk_from_operation not in operations_set:
                    operations_set.append(adata.fk_from_operation)

            datatype_group = dao.get_datatype_group_by_gid(datatype.gid)
            dao.remove_datatype(datatype_gid)
            correct = correct and dao.remove_entity(model.OperationGroup, datatype_group.fk_operation_group)
        else:
            self.logger.debug("Removing datatype %s" % datatype)
            self._remove_project_node_files(project_id, datatype.gid, skip_validation)

        ## Remove Operation entity in case no other DataType needs them.
        project = dao.get_project_by_id(project_id)
        for operation_id in operations_set:
            dependent_dt = dao.get_generic_entity(model.DataType, operation_id, "fk_from_operation")
            if len(dependent_dt) > 0:
                ### Do not remove Operation in case DataType still exist referring it.
                continue
            correct = correct and dao.remove_entity(model.Operation, operation_id)
            ## Make sure Operation folder is removed
            self.structure_helper.remove_operation_data(project.name, datatype.fk_from_operation)

        if not correct:
            raise RemoveDataTypeException("Could not remove DataType " + str(datatype_gid))


    def update_metadata(self, submit_data):
        """
        Update DataType/ DataTypeGroup metadata
        THROW StructureException when input data is invalid.
        """
        new_data = dict()
        for key in DataTypeOverlayDetails().meta_attributes_list:
            if key in submit_data:
                new_data[key] = submit_data[key]

        if new_data[CommonDetails.CODE_OPERATION_TAG] == '':
            new_data[CommonDetails.CODE_OPERATION_TAG] = None
        try:
            if (CommonDetails.CODE_OPERATION_GROUP_ID in new_data
                    and new_data[CommonDetails.CODE_OPERATION_GROUP_ID]
                    and new_data[CommonDetails.CODE_OPERATION_GROUP_ID] != ''):
                # We need to edit a group
                all_data_in_group = dao.get_datatype_in_group(operation_group_id=
                                                              new_data[CommonDetails.CODE_OPERATION_GROUP_ID])
                if len(all_data_in_group) < 1:
                    raise StructureException("Inconsistent group, can not be updated!")
                datatype_group = dao.get_generic_entity(model.DataTypeGroup, all_data_in_group[0].fk_datatype_group)[0]
                all_data_in_group.append(datatype_group)
                for datatype in all_data_in_group:
                    new_data[CommonDetails.CODE_GID] = datatype.gid
                    self._edit_data(datatype, new_data, True)
            else:
                # Get the required DataType and operation from DB to store changes that will be done in XML.
                gid = new_data[CommonDetails.CODE_GID]
                datatype = dao.get_datatype_by_gid(gid)
                self._edit_data(datatype, new_data)
        except Exception as excep:
            self.logger.exception(excep)
            raise StructureException(str(excep))


    def _edit_data(self, datatype, new_data, from_group=False):
        """
        Private method, used for editing a meta-data XML file and a DataType row
        for a given custom DataType entity with new dictionary of data from UI.
        """
        if isinstance(datatype, MappedType) and not os.path.exists(datatype.get_storage_file_path()):
            if not datatype.invalid:
                datatype.invalid = True
                dao.store_entity(datatype)
            return
        # 1. First update Operation fields:
        #    Update group field if possible
        new_group_name = new_data[CommonDetails.CODE_OPERATION_TAG]
        empty_group_value = (new_group_name is None or new_group_name == "")
        if from_group:
            if empty_group_value:
                raise StructureException("Empty group is not allowed!")

            group = dao.get_generic_entity(model.OperationGroup, new_data[CommonDetails.CODE_OPERATION_GROUP_ID])
            if group and len(group) > 0 and new_group_name != group[0].name:
                group = group[0]
                exists_group = dao.get_generic_entity(model.OperationGroup, new_group_name, 'name')
                if exists_group:
                    raise StructureException("Group '" + new_group_name + "' already exists.")
                group.name = new_group_name
                dao.store_entity(group)
        else:
            operation = dao.get_operation_by_id(datatype.fk_from_operation)
            operation.user_group = new_group_name
            dao.store_entity(operation)

        # 2. Update dateType fields:
        datatype.subject = new_data[DataTypeOverlayDetails.DATA_SUBJECT]
        datatype.state = new_data[DataTypeOverlayDetails.DATA_STATE]
        if DataTypeOverlayDetails.DATA_TAG_1 in new_data:
            datatype.user_tag_1 = new_data[DataTypeOverlayDetails.DATA_TAG_1]
        if DataTypeOverlayDetails.DATA_TAG_2 in new_data:
            datatype.user_tag_2 = new_data[DataTypeOverlayDetails.DATA_TAG_2]
        if DataTypeOverlayDetails.DATA_TAG_3 in new_data:
            datatype.user_tag_3 = new_data[DataTypeOverlayDetails.DATA_TAG_3]
        if DataTypeOverlayDetails.DATA_TAG_4 in new_data:
            datatype.user_tag_4 = new_data[DataTypeOverlayDetails.DATA_TAG_4]
        if DataTypeOverlayDetails.DATA_TAG_5 in new_data:
            datatype.user_tag_5 = new_data[DataTypeOverlayDetails.DATA_TAG_5]

        datatype = dao.store_entity(datatype)
        # 3. Update MetaData in H5 as well.
        datatype.persist_full_metadata()
        # 4. Update the group_name/user_group into the operation meta-data file
        operation = dao.get_operation_by_id(datatype.fk_from_operation)
        self.structure_helper.update_operation_metadata(operation.project.name, new_group_name,
                                                        str(datatype.fk_from_operation), from_group)


    def get_datatype_and_datatypegroup_inputs_for_operation(self, operation_gid, selected_filter):
        """
        Returns the dataTypes that are used as input parameters for the given operation.
        'selected_filter' - is expected to be a visibility filter.

        If any dataType is part of a dataType group then the dataType group will
        be returned instead of that dataType.
        """
        all_datatypes = self._review_operation_inputs(operation_gid)[0]
        datatype_inputs = []
        for datatype in all_datatypes:
            if selected_filter.display_name == StaticFiltersFactory.RELEVANT_VIEW:
                if datatype.visible:
                    datatype_inputs.append(datatype)
            else:
                datatype_inputs.append(datatype)
        datatypes = []
        datatype_groups = dict()
        for data_type in datatype_inputs:
            if data_type.fk_datatype_group is None:
                datatypes.append(data_type)
            elif data_type.fk_datatype_group not in datatype_groups:
                dt_group = dao.get_datatype_by_id(data_type.fk_datatype_group)
                datatype_groups[data_type.fk_datatype_group] = dt_group

        datatypes.extend([v for _, v in six.iteritems(datatype_groups)])
        return datatypes


    def _review_operation_inputs(self, operation_gid):
        """
        :returns: A list of DataTypes that are used as input parameters for the specified operation.
                 And a dictionary will all operation parameters different then the default ones.
        """
        operation = dao.get_operation_by_gid(operation_gid)
        parameters = json.loads(operation.parameters)
        try:
            adapter = ABCAdapter.build_adapter(operation.algorithm)
            return adapter.review_operation_inputs(parameters)

        except IntrospectionException:
            self.logger.warning("Could not find adapter class for operation %s" % operation_gid)
            inputs_datatypes = []
            changed_parameters = dict(Warning="Algorithm was Removed. We can not offer more details")
            for submit_param in parameters.values():
                self.logger.debug("Searching DT by GID %s" % submit_param)
                datatype = ABCAdapter.load_entity_by_gid(str(submit_param))
                if datatype is not None:
                    inputs_datatypes.append(datatype)
            return inputs_datatypes, changed_parameters


    def get_datatypes_inputs_for_operation_group(self, group_id, selected_filter):
        """
        Returns the dataType inputs for an operation group. If more dataTypes
        are part of the same dataType group then only the dataType group will
        be returned instead of them.
        """
        operations_gids = dao.get_operations_in_group(group_id, only_gids=True)
        op_group_inputs = dict()
        for gid in operations_gids:
            op_inputs = self.get_datatype_and_datatypegroup_inputs_for_operation(gid[0], selected_filter)
            for datatype in op_inputs:
                op_group_inputs[datatype.id] = datatype
        return op_group_inputs.values()


    @staticmethod
    def get_results_for_operation(operation_id, selected_filter=None):
        """
        Retrieve the DataTypes entities resulted after the execution of the given operation.
        """
        return dao.get_results_for_operation(operation_id, selected_filter)


    @staticmethod
    def get_operations_for_datatype_group(datatype_group_id, visibility_filter, only_in_groups=False):
        """
        Returns all the operations which uses as an input parameter a dataType from the given DataTypeGroup.
        visibility_filter - is a filter used for retrieving all the operations or only the relevant ones.

        If only_in_groups is True than this method will return only the operations that are
        part from an operation group, otherwise it will return only the operations that
        are NOT part of an operation group.
        """
        if visibility_filter.display_name != StaticFiltersFactory.RELEVANT_VIEW:
            return dao.get_operations_for_datatype_group(datatype_group_id, only_relevant=False,
                                                         only_in_groups=only_in_groups)
        return dao.get_operations_for_datatype_group(datatype_group_id, only_in_groups=only_in_groups)


    @staticmethod
    def get_operations_for_datatype(datatype_gid, visibility_filter, only_in_groups=False):
        """
        Returns all the operations which uses as an input parameter the dataType with the specified GID.

        If only_in_groups is True than this method will return only the operations that are part
        from an operation group, otherwise it will return only the operations that are NOT part of an operation group.
        """
        if visibility_filter.display_name != StaticFiltersFactory.RELEVANT_VIEW:
            return dao.get_operations_for_datatype(datatype_gid, only_relevant=False, only_in_groups=only_in_groups)
        return dao.get_operations_for_datatype(datatype_gid, only_in_groups=only_in_groups)


    @staticmethod
    def get_datatype_by_id(datatype_id):
        """Retrieve a DataType DB reference by its id."""
        return dao.get_datatype_by_id(datatype_id)


    @staticmethod
    def get_datatypegroup_by_gid(datatypegroup_gid):
        """ Returns the DataTypeGroup with the specified gid. """
        return dao.get_datatype_group_by_gid(datatypegroup_gid)

    @staticmethod
    def count_datatypes_generated_from(datatype_gid):
        """
        A list with all the datatypes resulted from operations that had as
        input the datatype given by 'datatype_gid'.
        """
        return dao.count_datatypes_generated_from(datatype_gid)


    @staticmethod
    def get_datatypegroup_by_op_group_id(operation_group_id):
        """ Returns the DataTypeGroup with the specified id. """
        return dao.get_datatypegroup_by_op_group_id(operation_group_id)


    @staticmethod
    def set_datatype_visibility(datatype_gid, is_visible):
        """
        Sets the dataType visibility. If the given dataType is a dataType group or it is part of a
        dataType group than this method will set the visibility for each dataType from this group.
        """

        def set_visibility(dt):
            """ set visibility flag, persist in db and h5"""
            dt.visible = is_visible
            dt = dao.store_entity(dt)
            dt.persist_full_metadata()

        def set_group_descendants_visibility(datatype_group_id):
            datatypes_in_group = dao.get_datatypes_from_datatype_group(datatype_group_id)
            for group_dt in datatypes_in_group:
                set_visibility(group_dt)

        datatype = dao.get_datatype_by_gid(datatype_gid)

        if isinstance(datatype, DataTypeGroup):  # datatype is a group
            set_group_descendants_visibility(datatype.id)
        elif datatype.fk_datatype_group is not None:  # datatype is member of a group
            set_group_descendants_visibility(datatype.fk_datatype_group)
            # the datatype to be updated is the parent datatype group
            datatype = dao.get_datatype_by_id(datatype.fk_datatype_group)

        # update the datatype or datatype group.
        set_visibility(datatype)


    @staticmethod
    def is_datatype_group(datatype_gid):
        """ Used to check if the dataType with the specified GID is a DataTypeGroup. """
        return dao.is_datatype_group(datatype_gid)
Пример #10
0
class FigureController(ProjectController):
    """
    Resulting Figures are user-saved figures with specific visualizers or TVB pages which are considered important.
    """

    def __init__(self):
        ProjectController.__init__(self)
        self.files_helper = FilesHelper()
        self.figure_service = FigureService()


    @cherrypy.expose
    @handle_error(redirect=False)
    @check_user
    @context_selected
    def storeresultfigure(self, img_type, **kwargs):
        """Create preview for current displayed canvas and 
        store image in current session, for future comparison."""
        project = common.get_current_project()
        user = common.get_logged_user()
        suggested_name = kwargs.get("suggestedName")
        self.figure_service.store_result_figure(project, user, img_type, kwargs['export_data'], suggested_name)


    @expose_page
    @context_selected
    def displayresultfigures(self, selected_session='all_sessions'):
        """ Collect and display saved previews, grouped by session."""
        project = common.get_current_project()
        user = common.get_logged_user()
        data, all_sessions_info = self.figure_service.retrieve_result_figures(project, user, selected_session)
        manage_figure_title = "Figures for " + str(selected_session) + " category"
        if selected_session == 'all_sessions':
            manage_figure_title = "Figures for all categories"
        template_specification = dict(mainContent="project/figures_display", title="Stored Visualizer Previews",
                                      controlPage=None, displayControl=False, selected_sessions_data=data,
                                      all_sessions_info=all_sessions_info, selected_session=selected_session,
                                      manageFigureTitle=manage_figure_title)
        template_specification = self.fill_default_attributes(template_specification, subsection='figures')
        return template_specification


    @cherrypy.expose
    @handle_error(redirect=True)
    @check_user
    @context_selected
    def editresultfigures(self, remove_figure=False, rename_session=False, remove_session=False, **data):
        """
        This method knows how to handle the following actions:
        remove figure, update figure, remove session and update session.
        """
        project = common.get_current_project()
        user = common.get_logged_user()

        redirect_url = '/project/figure/displayresultfigures'
        if "selected_session" in data and data["selected_session"] is not None and len(data["selected_session"]):
            redirect_url += '/' + data["selected_session"]
            del data["selected_session"]
        figure_id = None
        if "figure_id" in data:
            figure_id = data["figure_id"]
            del data["figure_id"]

        if cherrypy.request.method == 'POST' and rename_session:
            successfully_updated = True
            if "old_session_name" in data and "new_session_name" in data:
                figures_dict, _ = self.figure_service.retrieve_result_figures(project, user, data["old_session_name"])
                for _key, value in figures_dict.items():
                    for figure in value:
                        new_data = {"name": figure.name, "session_name": data["new_session_name"]}
                        success = self._update_figure(figure.id, **new_data)
                        if not success:
                            successfully_updated = False
                if successfully_updated:
                    common.set_info_message("The session was successfully updated!")
                else:
                    common.set_error_message("The session was not successfully updated! "
                                             "There could be some figures that still refer to the old session.")
        elif cherrypy.request.method == 'POST' and remove_session:
            successfully_removed = True
            if "old_session_name" in data:
                figures_dict, _ = self.figure_service.retrieve_result_figures(project, user, data["old_session_name"])
                for _key, value in figures_dict.items():
                    for figure in value:
                        success = self.figure_service.remove_result_figure(figure.id)
                        if not success:
                            successfully_removed = False
                if successfully_removed:
                    common.set_info_message("The session was removed successfully!")
                else:
                    common.set_error_message("The session was not entirely removed!")
        elif cherrypy.request.method == 'POST' and remove_figure and figure_id is not None:
            success = self.figure_service.remove_result_figure(figure_id)
            if success:
                common.set_info_message("Figure removed successfully!")
            else:
                common.set_error_message("Figure could not be removed!")
        elif figure_id is not None:
            self._update_figure(figure_id, **data)
        raise cherrypy.HTTPRedirect(redirect_url)


    def _update_figure(self, figure_id, **data):
        """
        Updates the figure details to the given data.
        """
        try:
            data = EditPreview().to_python(data)
            self.figure_service.edit_result_figure(figure_id, **data)
            common.set_info_message('Figure details updated successfully.')
            return True
        except formencode.Invalid as excep:
            self.logger.debug(excep)
            common.set_error_message(excep.message)
            return False


    @cherrypy.expose
    @handle_error(redirect=False)
    @check_user
    def downloadimage(self, figure_id):
        """
        Allow a user to download a figure.
        """
        figure = self.figure_service.load_figure(figure_id)
        image_folder = self.files_helper.get_images_folder(figure.project.name)
        figure_path = os.path.join(image_folder, figure.file_path)
        return serve_file(figure_path, "image/" + figure.file_format, "attachment",
                          "%s.%s" % (figure.name, figure.file_format))


    @cherrypy.expose
    @handle_error(redirect=False)
    @using_template("overlay")
    @check_user
    def displayzoomedimage(self, figure_id):
        """
        Displays the image with the specified id in an overlay dialog.
        """
        figure = self.figure_service.load_figure(figure_id)
        figures_folder = self.files_helper.get_images_folder(figure.project.name)
        figure_full_path = os.path.join(figures_folder, figure.file_path)
        figure_file_path = utils.path2url_part(figure_full_path)
        description = figure.session_name + " - " + figure.name
        template_dictionary = dict(figure_file_path=figure_file_path)
        return self.fill_overlay_attributes(template_dictionary, "Detail", description,
                                            "project/figure_zoom_overlay", "lightbox")
class ImportService(object):
    """
    Service for importing TVB entities into system.
    It supports TVB exported H5 files as input, but it should also handle H5 files 
    generated outside of TVB, as long as they respect the same structure.
    """


    def __init__(self):
        self.logger = get_logger(__name__)
        self.user_id = None
        self.files_helper = FilesHelper()
        self.created_projects = []


    def _download_and_unpack_project_zip(self, uploaded, uq_file_name, temp_folder):

        if isinstance(uploaded, FieldStorage) or isinstance(uploaded, Part):
            if not uploaded.file:
                raise ProjectImportException("Please select the archive which contains the project structure.")
            with open(uq_file_name, 'wb') as file_obj:
                self.files_helper.copy_file(uploaded.file, file_obj)
        else:
            shutil.copy2(uploaded, uq_file_name)

        try:
            self.files_helper.unpack_zip(uq_file_name, temp_folder)
        except FileStructureException as excep:
            self.logger.exception(excep)
            raise ProjectImportException("Bad ZIP archive provided. A TVB exported project is expected!")


    @staticmethod
    def _compute_unpack_path():
        """
        :return: the name of the folder where to expand uploaded zip
        """
        now = datetime.now()
        date_str = "%d-%d-%d_%d-%d-%d_%d" % (now.year, now.month, now.day, now.hour,
                                             now.minute, now.second, now.microsecond)
        uq_name = "%s-ImportProject" % date_str
        return os.path.join(TvbProfile.current.TVB_TEMP_FOLDER, uq_name)


    @transactional
    def import_project_structure(self, uploaded, user_id):
        """
        Execute import operations:
         
        1. check if ZIP or folder 
        2. find all project nodes
        3. for each project node:
            - create project
            - create all operations
            - import all images
            - create all dataTypes
        """

        self.user_id = user_id
        self.created_projects = []

        # Now compute the name of the folder where to explode uploaded ZIP file
        temp_folder = self._compute_unpack_path()
        uq_file_name = temp_folder + ".zip"

        try:
            self._download_and_unpack_project_zip(uploaded, uq_file_name, temp_folder)
            self._import_projects_from_folder(temp_folder)

        except Exception as excep:
            self.logger.exception("Error encountered during import. Deleting projects created during this operation.")
            # Remove project folders created so far.
            # Note that using the project service to remove the projects will not work,
            # because we do not have support for nested transaction.
            # Removing from DB is not necessary because in transactional env a simple exception throw
            # will erase everything to be inserted.
            for project in self.created_projects:
                project_path = os.path.join(TvbProfile.current.TVB_STORAGE, FilesHelper.PROJECTS_FOLDER, project.name)
                shutil.rmtree(project_path)
            raise ProjectImportException(str(excep))

        finally:
            # Now delete uploaded file
            if os.path.exists(uq_file_name):
                os.remove(uq_file_name)
            # Now delete temporary folder where uploaded ZIP was exploded.
            if os.path.exists(temp_folder):
                shutil.rmtree(temp_folder)


    @staticmethod
    def _load_burst_info_from_json(project_path):
        bursts_dict = {}
        dt_mappings_dict = {}
        bursts_file = os.path.join(project_path, BURST_INFO_FILE)
        if os.path.isfile(bursts_file):
            with open(bursts_file) as f:
                bursts_info_dict = json.load(f)
            bursts_dict = bursts_info_dict[BURSTS_DICT_KEY]
            dt_mappings_dict = bursts_info_dict[DT_BURST_MAP]
        os.remove(bursts_file)
        return bursts_dict, dt_mappings_dict


    @staticmethod
    def _import_bursts(project_entity, bursts_dict):
        """
        Re-create old bursts, but keep a mapping between the id it has here and the old-id it had
        in the project where they were exported, so we can re-add the datatypes to them.
        """
        burst_ids_mapping = {}

        for old_burst_id in bursts_dict:
            burst_information = BurstInformation.load_from_dict(bursts_dict[old_burst_id])
            burst_entity = model.BurstConfiguration(project_entity.id)
            burst_entity.from_dict(burst_information.data)
            burst_entity = dao.store_entity(burst_entity)
            burst_ids_mapping[int(old_burst_id)] = burst_entity.id
            # We don't need the data in dictionary form anymore, so update it with new BurstInformation object
            bursts_dict[old_burst_id] = burst_information
        return burst_ids_mapping


    def _import_projects_from_folder(self, temp_folder):
        """
        Process each project from the uploaded pack, to extract names.
        """
        project_roots = []
        for root, _, files in os.walk(temp_folder):
            if FilesHelper.TVB_PROJECT_FILE in files:
                project_roots.append(root)

        for project_path in project_roots:
            update_manager = ProjectUpdateManager(project_path)
            update_manager.run_all_updates()
            project_entity = self.__populate_project(project_path)

            # Compute the path where to store files of the imported project
            new_project_path = os.path.join(TvbProfile.current.TVB_STORAGE,
                                            FilesHelper.PROJECTS_FOLDER, project_entity.name)
            if project_path != new_project_path:
                shutil.move(project_path, new_project_path)

            self.created_projects.append(project_entity)

            # Keep a list with all burst that were imported since we will want to also add the workflow
            # steps after we are finished with importing the operations and datatypes. We need to first
            # stored bursts since we need to know which new id's they have for operations parent_burst.
            bursts_dict, dt_mappings_dict = self._load_burst_info_from_json(new_project_path)
            burst_ids_mapping = self._import_bursts(project_entity, bursts_dict)

            # Now import project operations
            self.import_project_operations(project_entity, new_project_path, dt_mappings_dict, burst_ids_mapping)
            # Import images
            self._store_imported_images(project_entity)
            # Now we can finally import workflow related entities
            self.import_workflows(project_entity, bursts_dict, burst_ids_mapping)


    def import_workflows(self, project, bursts_dict, burst_ids_mapping):
        """
        Import the workflow entities for all bursts imported in the project.

        :param project: the current

        :param bursts_dict: a dictionary that holds all the required information in order to
                            import the bursts from the new project

        :param burst_ids_mapping: a dictionary of the form {old_burst_id : new_burst_id} so we
                                  know what burst to link each workflow to
        """
        for burst_id in bursts_dict:
            workflows_info = bursts_dict[burst_id].get_workflows()
            for one_wf_info in workflows_info:
                # Use the new burst id when creating the workflow
                workflow_entity = model.Workflow(project.id, burst_ids_mapping[int(burst_id)])
                workflow_entity.from_dict(one_wf_info.data)
                workflow_entity = dao.store_entity(workflow_entity)
                wf_steps_info = one_wf_info.get_workflow_steps()
                view_steps_info = one_wf_info.get_view_steps()
                self.import_workflow_steps(workflow_entity, wf_steps_info, view_steps_info)


    def import_workflow_steps(self, workflow, wf_steps, view_steps):
        """
        Import all workflow steps for the given workflow. We create both wf_steps and view_steps
        in the same method, since if a wf_step has to be omited for some reason, we also need to
        omit that view step.
        :param workflow: a model.Workflow entity from which we need to add workflow steps

        :param wf_steps: a list of WorkflowStepInformation entities, from which we will rebuild 
                         the workflow steps

        :param view_steps: a list of WorkflowViewStepInformation entities, from which we will 
                           rebuild the workflow view steps

        """
        for wf_step in wf_steps:
            try:
                algorithm = wf_step.get_algorithm()
                if algorithm is None:
                    # The algorithm is invalid for some reason. Just remove also the view step.
                    position = wf_step.index()
                    for entry in view_steps:
                        if entry.index() == position:
                            view_steps.remove(entry)
                    continue
                wf_step_entity = model.WorkflowStep(algorithm.id)
                wf_step_entity.from_dict(wf_step.data)
                wf_step_entity.fk_workflow = workflow.id
                wf_step_entity.fk_operation = wf_step.get_operation_id()
                dao.store_entity(wf_step_entity)
            except Exception:
                # only log exception and ignore this as it is not very important:
                self.logger.exception("Could not restore WorkflowStep: %s" % wf_step.get_algorithm().name)

        for view_step in view_steps:
            try:
                algorithm = view_step.get_algorithm()
                if algorithm is None:
                    continue
                view_step_entity = model.WorkflowStepView(algorithm.id)
                view_step_entity.from_dict(view_step.data)
                view_step_entity.fk_workflow = workflow.id
                view_step_entity.fk_portlet = view_step.get_portlet().id
                dao.store_entity(view_step_entity)
            except Exception:
                # only log exception and ignore this as it is not very important:
                self.logger.exception("Could not restore WorkflowViewStep " + view_step.get_algorithm().name)


    @staticmethod
    def _append_tmp_to_folders_containing_operations(import_path):
        """
        Find folders containing operations and rename them, return the renamed paths
        """
        pths = []
        for root, _, files in os.walk(import_path):
            if FilesHelper.TVB_OPERARATION_FILE in files:
                # Found an operation folder - append TMP to its name
                tmp_op_folder = root + 'tmp'
                os.rename(root, tmp_op_folder)
                operation_file_path = os.path.join(tmp_op_folder, FilesHelper.TVB_OPERARATION_FILE)
                pths.append(operation_file_path)
        return pths


    def _load_operations_from_paths(self, project, op_paths):
        """
        Load operations from paths containing them.
        :returns: Operations ordered by start/creation date to be sure data dependency is resolved correct
        """
        def by_time(op):
            return op.start_date or op.create_date or datetime.now()

        operations = []

        for operation_file_path in op_paths:
            operation = self.__build_operation_from_file(project, operation_file_path)
            operation.import_file = operation_file_path
            operations.append(operation)

        operations.sort(key=by_time)
        return operations


    def _load_datatypes_from_operation_folder(self, op_path, operation_entity, datatype_group):
        """
        Loads datatypes from operation folder
        :returns: Datatypes ordered by creation date (to solve any dependencies)
        """
        all_datatypes = []
        for file_name in os.listdir(op_path):
            if file_name.endswith(FilesHelper.TVB_STORAGE_FILE_EXTENSION):
                h5_file = os.path.join(op_path, file_name)
                try:
                    file_update_manager = FilesUpdateManager()
                    file_update_manager.upgrade_file(h5_file)
                    datatype = self.load_datatype_from_file(op_path, file_name, operation_entity.id, datatype_group)
                    all_datatypes.append(datatype)

                except IncompatibleFileManagerException:
                    os.remove(h5_file)
                    self.logger.warning("Incompatible H5 file will be ignored: %s" % h5_file)
                    self.logger.exception("Incompatibility details ...")

        all_datatypes.sort(key=lambda dt_date: dt_date.create_date)
        for dt in all_datatypes:
            self.logger.debug("Import order %s: %s" % (dt.type, dt.gid))
        return all_datatypes


    def _store_imported_datatypes_in_db(self, project, all_datatypes, dt_burst_mappings, burst_ids_mapping):
        def by_time(dt):
            return dt.create_date or datetime.now()

        if burst_ids_mapping is None:
            burst_ids_mapping = {}
        if dt_burst_mappings is None:
            dt_burst_mappings = {}

        all_datatypes.sort(key=by_time)

        for datatype in all_datatypes:
            old_burst_id = dt_burst_mappings.get(datatype.gid)

            if old_burst_id is not None:
                datatype.fk_parent_burst = burst_ids_mapping[old_burst_id]

            datatype_allready_in_tvb = dao.get_datatype_by_gid(datatype.gid)

            if not datatype_allready_in_tvb:
                # Compute disk size. Similar to ABCAdapter._capture_operation_results.
                # No need to close the h5 as we have not written to it.
                associated_file = os.path.join(datatype.storage_path, datatype.get_storage_file_name())
                datatype.disk_size = FilesHelper.compute_size_on_disk(associated_file)

                self.store_datatype(datatype)
            else:
                FlowService.create_link([datatype_allready_in_tvb.id], project.id)

    def _store_imported_images(self, project):
        """
        Import all images from project
        """
        images_root = self.files_helper.get_images_folder(project.name)
        # for file_name in os.listdir(images_root):
        for root, _, files in os.walk(images_root):
            for file_name in files:
                if file_name.endswith(FilesHelper.TVB_FILE_EXTENSION):
                    self._populate_image(os.path.join(root, file_name), project.id)


    def import_project_operations(self, project, import_path, dt_burst_mappings=None, burst_ids_mapping=None):
        """
        This method scans provided folder and identify all operations that needs to be imported
        """
        op_paths = self._append_tmp_to_folders_containing_operations(import_path)
        operations = self._load_operations_from_paths(project, op_paths)

        imported_operations = []
        datatypes = []

        # Here we process each operation found
        for operation in operations:
            self.logger.debug("Importing operation " + str(operation))
            old_operation_folder, _ = os.path.split(operation.import_file)
            operation_entity, datatype_group = self.__import_operation(operation)

            # Rename operation folder with the ID of the stored operation
            new_operation_path = FilesHelper().get_operation_folder(project.name, operation_entity.id)
            if old_operation_folder != new_operation_path:
                # Delete folder of the new operation, otherwise move will fail
                shutil.rmtree(new_operation_path)
                shutil.move(old_operation_folder, new_operation_path)

            operation_datatypes = self._load_datatypes_from_operation_folder(new_operation_path, operation_entity,
                                                                             datatype_group)
            imported_operations.append(operation_entity)
            datatypes.extend(operation_datatypes)

        self._store_imported_datatypes_in_db(project, datatypes, dt_burst_mappings, burst_ids_mapping)
        return imported_operations


    def _populate_image(self, file_name, project_id):
        """
        Create and store a image entity.
        """
        figure_dict = XMLReader(file_name).read_metadata()
        new_path = os.path.join(os.path.split(file_name)[0], os.path.split(figure_dict['file_path'])[1])
        if not os.path.exists(new_path):
            self.logger.warn("Expected to find image path %s .Skipping" % new_path)

        op = dao.get_operation_by_gid(figure_dict['fk_from_operation'])
        figure_dict['fk_op_id'] = op.id if op is not None else None
        figure_dict['fk_user_id'] = self.user_id
        figure_dict['fk_project_id'] = project_id
        figure_entity = manager_of_class(model.ResultFigure).new_instance()
        figure_entity = figure_entity.from_dict(figure_dict)
        stored_entity = dao.store_entity(figure_entity)

        # Update image meta-data with the new details after import
        figure = dao.load_figure(stored_entity.id)
        self.logger.debug("Store imported figure")
        self.files_helper.write_image_metadata(figure)


    def load_datatype_from_file(self, storage_folder, file_name, op_id, datatype_group=None, move=True):
        """
        Creates an instance of datatype from storage / H5 file 
        :returns: datatype
        """
        self.logger.debug("Loading datatType from file: %s" % file_name)
        storage_manager = HDF5StorageManager(storage_folder, file_name)
        meta_dictionary = storage_manager.get_metadata()
        meta_structure = DataTypeMetaData(meta_dictionary)

        # Now try to determine class and instantiate it
        class_name = meta_structure[DataTypeMetaData.KEY_CLASS_NAME]
        class_module = meta_structure[DataTypeMetaData.KEY_MODULE]
        datatype = __import__(class_module, globals(), locals(), [class_name])
        datatype = getattr(datatype, class_name)
        type_instance = manager_of_class(datatype).new_instance()

        # Now we fill data into instance
        type_instance.type = str(type_instance.__class__.__name__)
        type_instance.module = str(type_instance.__module__)

        # Fill instance with meta data
        type_instance.load_from_metadata(meta_dictionary)

        #Add all the required attributes
        if datatype_group is not None:
            type_instance.fk_datatype_group = datatype_group.id
        type_instance.set_operation_id(op_id)

        # Now move storage file into correct folder if necessary
        current_file = os.path.join(storage_folder, file_name)
        new_file = type_instance.get_storage_file_path()
        if new_file != current_file and move:
            shutil.move(current_file, new_file)

        return type_instance


    def store_datatype(self, datatype):
        """This method stores data type into DB"""
        try:
            self.logger.debug("Store datatype: %s with Gid: %s" % (datatype.__class__.__name__, datatype.gid))
            return dao.store_entity(datatype)
        except MissingDataSetException:
            self.logger.error("Datatype %s has missing data and could not be imported properly." % (datatype,))
            os.remove(datatype.get_storage_file_path())
        except IntegrityError as excep:
            self.logger.exception(excep)
            error_msg = "Could not import data with gid: %s. There is already a one with " \
                        "the same name or gid." % datatype.gid
            # Delete file if can't be imported
            os.remove(datatype.get_storage_file_path())
            raise ProjectImportException(error_msg)


    def __populate_project(self, project_path):
        """
        Create and store a Project entity.
        """
        self.logger.debug("Creating project from path: %s" % project_path)
        project_dict = self.files_helper.read_project_metadata(project_path)

        project_entity = manager_of_class(model.Project).new_instance()
        project_entity = project_entity.from_dict(project_dict, self.user_id)

        try:
            self.logger.debug("Storing imported project")
            return dao.store_entity(project_entity)
        except IntegrityError as excep:
            self.logger.exception(excep)
            error_msg = ("Could not import project: %s with gid: %s. There is already a "
                         "project with the same name or gid.") % (project_entity.name, project_entity.gid)
            raise ProjectImportException(error_msg)


    def __build_operation_from_file(self, project, operation_file):
        """
        Create Operation entity from metadata file.
        """
        operation_dict = XMLReader(operation_file).read_metadata()
        operation_entity = manager_of_class(model.Operation).new_instance()
        return operation_entity.from_dict(operation_dict, dao, self.user_id, project.gid)


    @staticmethod
    def __import_operation(operation_entity):
        """
        Store a Operation entity.
        """
        operation_entity = dao.store_entity(operation_entity)
        operation_group_id = operation_entity.fk_operation_group
        datatype_group = None

        if operation_group_id is not None:
            try:
                datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id)
            except SQLAlchemyError:
                # If no dataType group present for current op. group, create it.
                operation_group = dao.get_operationgroup_by_id(operation_group_id)
                datatype_group = model.DataTypeGroup(operation_group, operation_id=operation_entity.id)
                datatype_group.state = ADAPTERS['Upload']['defaultdatastate']
                datatype_group = dao.store_entity(datatype_group)

        return operation_entity, datatype_group


    def load_burst_entity(self, json_burst, project_id):
        """
        Load BurstConfiguration from JSON (possibly exported from a different machine).
        Nothing gets persisted in DB or on disk.

        :param json_burst: Burst JSON export
        :param project_id: Current project ID (it will be used if the user later starts this simulation)
        :return: BurstConfiguration  filled from JSON
        """

        burst_information = BurstInformation.load_from_dict(json_burst)
        burst_entity = model.BurstConfiguration(project_id)
        burst_entity.from_dict(burst_information.data)
        burst_entity.prepare_after_load()
        burst_entity.reset_tabs()

        workflow_info = burst_information.get_workflows()[0]
        workflow_entity = model.Workflow(project_id, None)
        workflow_entity.from_dict(workflow_info.data)

        view_steps = workflow_info.get_view_steps()
        analyze_steps = workflow_info.get_workflow_steps()

        for view_step in view_steps:
            try:
                algorithm = view_step.get_algorithm()
                portlet = view_step.get_portlet()
                view_step_entity = model.WorkflowStepView(algorithm.id, portlet_id=portlet.id)
                view_step_entity.from_dict(view_step.data)
                view_step_entity.workflow = workflow_entity

                ## For each visualize step, also load all of the analyze steps.
                analyzers = []
                for an_step in analyze_steps:
                    if (an_step.data["tab_index"] != view_step_entity.tab_index
                            or an_step.data["index_in_tab"] != view_step_entity.index_in_tab):
                        continue
                    algorithm = an_step.get_algorithm()
                    wf_step_entity = model.WorkflowStep(algorithm.id)
                    wf_step_entity.from_dict(an_step.data)
                    wf_step_entity.workflow = workflow_entity
                    analyzers.append(wf_step_entity)

                portlet = PortletConfiguration(portlet.id)
                portlet.set_visualizer(view_step_entity)
                portlet.set_analyzers(analyzers)
                burst_entity.set_portlet(view_step_entity.tab_index, view_step_entity.index_in_tab, portlet)

            except Exception:
                # only log exception and ignore this step from loading
                self.logger.exception("Could not restore Workflow Step " + view_step.get_algorithm().name)

        return burst_entity
class FigureService:
    """
    Service layer for Figure entities.
    """
    _TYPE_PNG = "png"
    _TYPE_SVG = "svg"

    _BRANDING_BAR_PNG = os.path.join(os.path.dirname(__file__), "resources", "branding_bar.png")
    _BRANDING_BAR_SVG = os.path.join(os.path.dirname(__file__), "resources", "branding_bar.svg")

    _DEFAULT_SESSION_NAME = "Default"
    _DEFAULT_IMAGE_FILE_NAME = "snapshot."


    def __init__(self):
        self.logger = get_logger(self.__class__.__module__)
        self.file_helper = FilesHelper()


    def store_result_figure(self, project, user, img_type, operation_id, export_data):
        """
        Store into a file, Result Image and reference in DB.
        """
        # Generate path where to store image
        store_path = self.file_helper.get_images_folder(project.name, operation_id)
        store_path = utils.get_unique_file_name(store_path, FigureService._DEFAULT_IMAGE_FILE_NAME + img_type)[0]
        file_path = os.path.split(store_path)[1]

        if img_type == FigureService._TYPE_PNG:                         # PNG file from canvas
            imgData = base64.b64decode(export_data)                     # decode the image
            fakeImgFile = StringIO(imgData)                             # PIL.Image only opens from file, so fake one
            origImg = Image.open(fakeImgFile)
            brandingBar = Image.open(FigureService._BRANDING_BAR_PNG)

            finalSize = (origImg.size[0],                               # original width
                         origImg.size[1] + brandingBar.size[1])         # original height + brandingBar height
            finalImg = Image.new("RGBA", finalSize)

            finalImg.paste(origImg, (0, 0))                             # add the original image
            finalImg.paste(brandingBar, (0, origImg.size[1]))           # add the branding bar, below the original
                                                                        # the extra width will be discarded

            finalImg.save(store_path)                                   # store to disk

        elif img_type == FigureService._TYPE_SVG:                                   # SVG file from svg viewer
            dom = xml.dom.minidom.parseString(export_data)
            figureSvg = dom.getElementsByTagName('svg')[0]                          # get the original image

            dom = xml.dom.minidom.parse(FigureService._BRANDING_BAR_SVG)
            brandingSvg = dom.getElementsByTagName('svg')[0]                        # get the branding bar
            brandingSvg.setAttribute("y", figureSvg.getAttribute("height"))         # position it below the figure

            finalSvg = dom.createElement('svg')                                     # prepare the final svg
            width = figureSvg.getAttribute('width').replace('px', '')               # same width as original figure
            finalSvg.setAttribute("width", width)
            height = float(figureSvg.getAttribute('height').replace('px', ''))      # increase original height with
            height += float(brandingSvg.getAttribute('height').replace('px', ''))   # branding bar's height
            finalSvg.setAttribute("height", str(height))

            finalSvg.appendChild(figureSvg)                                         # add the image
            finalSvg.appendChild(brandingSvg)                                       # and the branding bar

            # Generate path where to store image
            dest = open(store_path, 'w')
            finalSvg.writexml(dest)                                                 # store to disk
            dest.close()

        operation = dao.get_operation_by_id(operation_id)
        file_name = 'TVB-%s-%s' % (operation.algorithm.name.replace(' ', '-'), operation_id)    # e.g. TVB-Algo-Name-352

        # Store entity into DB
        entity = model.ResultFigure(operation_id, user.id, project.id, FigureService._DEFAULT_SESSION_NAME,
                                    file_name, file_path, img_type)
        entity = dao.store_entity(entity)

        # Load instance from DB to have lazy fields loaded
        figure = dao.load_figure(entity.id)
        # Write image meta data to disk  
        self.file_helper.write_image_metadata(figure)

        # Force writing operation meta data on disk. 
        # This is important later for operation import
        self.file_helper.write_operation_metadata(operation)


    def retrieve_result_figures(self, project, user, selected_session_name='all_sessions'):
        """
        Retrieve from DB all the stored Displayer previews that belongs to the specified session. The
        previews are for current user and project; grouped by session.
        """
        result, previews_info = dao.get_previews(project.id, user.id, selected_session_name)
        for name in result:
            for figure in result[name]:
                figures_folder = self.file_helper.get_images_folder(project.name, figure.operation.id)
                figure_full_path = os.path.join(figures_folder, figure.file_path)
                # Compute the path 
                figure.file_path = utils.path2url_part(figure_full_path)
        return result, previews_info


    @staticmethod
    def load_figure(figure_id):
        """
        Loads a stored figure by its id.
        """
        return dao.load_figure(figure_id)


    def edit_result_figure(self, figure_id, **data):
        """
        Retrieve and edit a previously stored figure.
        """
        figure = dao.load_figure(figure_id)
        figure.session_name = data['session_name']
        figure.name = data['name']
        dao.store_entity(figure)

        # Load instance from DB to have lazy fields loaded.
        figure = dao.load_figure(figure_id)
        # Store figure meta data in an XML attached to the image.
        self.file_helper.write_image_metadata(figure)


    def remove_result_figure(self, figure_id):
        """
        Remove figure from DB and file storage.
        """
        figure = dao.load_figure(figure_id)

        # Delete all figure related files from disk.
        figures_folder = self.file_helper.get_images_folder(figure.project.name, figure.operation.id)
        path2figure = os.path.join(figures_folder, figure.file_path)
        if os.path.exists(path2figure):
            os.remove(path2figure)
            self.file_helper.remove_image_metadata(figure)

        # Remove figure reference from DB.
        result = dao.remove_entity(model.ResultFigure, figure_id)
        return result