Beispiel #1
0
class FigureService:
    """
    Service layer for Figure entities.
    """
    _TYPE_PNG = "png"
    _TYPE_SVG = "svg"

    _BRANDING_BAR_PNG = os.path.join(os.path.dirname(__file__), "resources",
                                     "branding_bar.png")
    _BRANDING_BAR_SVG = os.path.join(os.path.dirname(__file__), "resources",
                                     "branding_bar.svg")

    _DEFAULT_SESSION_NAME = "Default"
    _DEFAULT_IMAGE_FILE_NAME = "snapshot."

    def __init__(self):
        self.logger = get_logger(self.__class__.__module__)
        self.storage_interface = StorageInterface()

    def _write_png(self, store_path, export_data):
        img_data = base64.b64decode(export_data)  # decode the image
        final_image = Image.open(BytesIO(img_data))  # place it in a PIL stream

        branding_bar = Image.open(
            FigureService._BRANDING_BAR_PNG)  # place the branding bar over
        final_image.paste(branding_bar,
                          (0, final_image.size[1] - branding_bar.size[1]),
                          branding_bar)

        final_image.save(store_path)  # store to disk as PNG

    def _write_svg(self, store_path, export_data):
        dom = xml.dom.minidom.parseString(export_data)
        figureSvg = dom.getElementsByTagName('svg')[
            0]  # get the original image

        dom = xml.dom.minidom.parse(FigureService._BRANDING_BAR_SVG)

        try:
            width = float(figureSvg.getAttribute('width').replace('px', ''))
            height = float(figureSvg.getAttribute('height').replace('px', ''))
        except ValueError:  # defaults when dimensions are not given
            width = 1024
            height = 768
            figureSvg.setAttribute("width", str(width))
            figureSvg.setAttribute("height", str(height))

        finalSvg = dom.createElement('svg')  # prepare the final svg
        brandingSvg = dom.getElementsByTagName('svg')[
            0]  # get the branding bar
        brandingSvg.setAttribute("y",
                                 str(height))  # position it below the figure
        height += float(brandingSvg.getAttribute('height').replace(
            'px', ''))  # increase original height with branding bar's height
        finalSvg.setAttribute("width",
                              str(width))  # same width as original figure
        finalSvg.setAttribute("height", str(height))

        finalSvg.appendChild(figureSvg)  # add the image
        finalSvg.appendChild(brandingSvg)  # and the branding bar

        # Generate path where to store image
        with open(store_path, 'w') as dest:
            finalSvg.writexml(dest)  # store to disk

    def _image_path(self, project_name, img_type):
        "Generate path where to store image"
        images_folder = self.storage_interface.get_images_folder(project_name)
        file_name = FigureService._DEFAULT_IMAGE_FILE_NAME + img_type
        return utils.get_unique_file_name(images_folder, file_name)

    @staticmethod
    def _generate_image_name(project, user, image_name):
        if not image_name:
            # default to a generic name prefix
            image_name = "figure"
        figure_count = dao.get_figure_count(project.id, user.id) + 1
        return 'TVB-%s-%s' % (image_name, figure_count)

    def store_result_figure(self,
                            project,
                            user,
                            img_type,
                            export_data,
                            image_name=None):
        """
        Store into a file, Result Image and reference in DB.
        """
        store_path, file_name = self._image_path(project.name, img_type)
        image_name = self._generate_image_name(project, user, image_name)

        if img_type == FigureService._TYPE_PNG:  # PNG file from canvas
            self._write_png(store_path, export_data)
        elif img_type == FigureService._TYPE_SVG:  # SVG file from svg viewer
            self._write_svg(store_path, export_data)

        # Store entity into DB
        entity = ResultFigure(user.id, project.id,
                              FigureService._DEFAULT_SESSION_NAME, image_name,
                              file_name, img_type)
        entity = dao.store_entity(entity)

        # Load instance from DB to have lazy fields loaded
        figure = dao.load_figure(entity.id)
        # Write image meta data to disk
        _, meta_data = figure.to_dict()
        self.storage_interface.write_image_metadata(figure, meta_data)
        self.storage_interface.push_folder_to_sync(project.name)

    def retrieve_result_figures(self,
                                project,
                                user,
                                selected_session_name='all_sessions'):
        """
        Retrieve from DB all the stored Displayer previews that belongs to the specified session. The
        previews are for current user and project; grouped by session.
        """
        result, previews_info = dao.get_previews(project.id, user.id,
                                                 selected_session_name)
        for name in result:
            for figure in result[name]:
                figures_folder = self.storage_interface.get_images_folder(
                    project.name)
                figure_full_path = os.path.join(figures_folder,
                                                figure.file_path)
                # Compute the path
                figure.file_path = utils.path2url_part(figure_full_path)
        return result, previews_info

    @staticmethod
    def load_figure(figure_id):
        """
        Loads a stored figure by its id.
        """
        return dao.load_figure(figure_id)

    def edit_result_figure(self, figure_id, **data):
        """
        Retrieve and edit a previously stored figure.
        """
        figure = dao.load_figure(figure_id)
        figure.session_name = data['session_name']
        figure.name = data['name']
        dao.store_entity(figure)

        # Load instance from DB to have lazy fields loaded.
        figure = dao.load_figure(figure_id)
        # Store figure meta data in an XML attached to the image.
        _, meta_data = figure.to_dict()
        self.storage_interface.write_image_metadata(figure, meta_data)
        self.storage_interface.push_folder_to_sync(figure.project.name)

    def remove_result_figure(self, figure_id):
        """
        Remove figure from DB and file storage.
        """
        figure = dao.load_figure(figure_id)

        # Delete all figure related files from disk.
        figures_folder = self.storage_interface.get_images_folder(
            figure.project.name)
        path2figure = os.path.join(figures_folder, figure.file_path)
        if os.path.exists(path2figure):
            os.remove(path2figure)
            self.storage_interface.remove_image_metadata(figure)
            self.storage_interface.push_folder_to_sync(figure.project.name)
        # Remove figure reference from DB.
        result = dao.remove_entity(ResultFigure, figure_id)
        return result
class FigureController(ProjectController):
    """
    Resulting Figures are user-saved figures with specific visualizers or TVB pages which are considered important.
    """
    def __init__(self):
        ProjectController.__init__(self)
        self.storage_interface = StorageInterface()
        self.figure_service = FigureService()

    @cherrypy.expose
    @handle_error(redirect=False)
    @check_user
    @context_selected
    def storeresultfigure(self, img_type, **kwargs):
        """Create preview for current displayed canvas and 
        store image in current session, for future comparison."""
        project = common.get_current_project()
        user = common.get_logged_user()
        suggested_name = kwargs.get("suggestedName")
        self.figure_service.store_result_figure(project, user, img_type,
                                                kwargs['export_data'],
                                                suggested_name)

    @expose_page
    @context_selected
    def displayresultfigures(self, selected_session='all_sessions'):
        """ Collect and display saved previews, grouped by session."""
        project = common.get_current_project()
        user = common.get_logged_user()
        data, all_sessions_info = self.figure_service.retrieve_result_figures(
            project, user, selected_session)
        manage_figure_title = "Figures for " + str(
            selected_session) + " category"
        if selected_session == 'all_sessions':
            manage_figure_title = "Figures for all categories"
        template_specification = dict(mainContent="project/figures_display",
                                      title="Stored Visualizer Previews",
                                      controlPage=None,
                                      displayControl=False,
                                      selected_sessions_data=data,
                                      all_sessions_info=all_sessions_info,
                                      selected_session=selected_session,
                                      manageFigureTitle=manage_figure_title)
        template_specification = self.fill_default_attributes(
            template_specification, subsection='figures')
        return template_specification

    @cherrypy.expose
    @handle_error(redirect=True)
    @check_user
    @context_selected
    def editresultfigures(self,
                          remove_figure=False,
                          rename_session=False,
                          remove_session=False,
                          **data):
        """
        This method knows how to handle the following actions:
        remove figure, update figure, remove session and update session.
        """
        project = common.get_current_project()
        user = common.get_logged_user()

        redirect_url = '/project/figure/displayresultfigures'
        if "selected_session" in data and data[
                "selected_session"] is not None and len(
                    data["selected_session"]):
            redirect_url += '/' + data["selected_session"]
            del data["selected_session"]
        figure_id = None
        if "figure_id" in data:
            figure_id = data["figure_id"]
            del data["figure_id"]

        if cherrypy.request.method == 'POST' and rename_session:
            successfully_updated = True
            if "old_session_name" in data and "new_session_name" in data:
                figures_dict, _ = self.figure_service.retrieve_result_figures(
                    project, user, data["old_session_name"])
                for _key, value in figures_dict.items():
                    for figure in value:
                        new_data = {
                            "name": figure.name,
                            "session_name": data["new_session_name"]
                        }
                        success = self._update_figure(figure.id, **new_data)
                        if not success:
                            successfully_updated = False
                if successfully_updated:
                    common.set_info_message(
                        "The session was successfully updated!")
                else:
                    common.set_error_message(
                        "The session was not successfully updated! "
                        "There could be some figures that still refer to the old session."
                    )
        elif cherrypy.request.method == 'POST' and remove_session:
            successfully_removed = True
            if "old_session_name" in data:
                figures_dict, _ = self.figure_service.retrieve_result_figures(
                    project, user, data["old_session_name"])
                for _key, value in figures_dict.items():
                    for figure in value:
                        success = self.figure_service.remove_result_figure(
                            figure.id)
                        if not success:
                            successfully_removed = False
                if successfully_removed:
                    common.set_info_message(
                        "The session was removed successfully!")
                else:
                    common.set_error_message(
                        "The session was not entirely removed!")
        elif cherrypy.request.method == 'POST' and remove_figure and figure_id is not None:
            success = self.figure_service.remove_result_figure(figure_id)
            if success:
                common.set_info_message("Figure removed successfully!")
            else:
                common.set_error_message("Figure could not be removed!")
        elif figure_id is not None:
            self._update_figure(figure_id, **data)
        raise cherrypy.HTTPRedirect(redirect_url)

    def _update_figure(self, figure_id, **data):
        """
        Updates the figure details to the given data.
        """
        try:
            data = EditPreview().to_python(data)
            self.figure_service.edit_result_figure(figure_id, **data)
            common.set_info_message('Figure details updated successfully.')
            return True
        except formencode.Invalid as excep:
            self.logger.debug(excep)
            common.set_error_message(excep.message)
            return False

    @cherrypy.expose
    @handle_error(redirect=False)
    @check_user
    def downloadimage(self, figure_id):
        """
        Allow a user to download a figure.
        """
        figure = self.figure_service.load_figure(figure_id)
        image_folder = self.storage_interface.get_images_folder(
            figure.project.name)
        figure_path = os.path.join(image_folder, figure.file_path)
        return serve_file(figure_path, "image/" + figure.file_format,
                          "attachment",
                          "%s.%s" % (figure.name, figure.file_format))

    @cherrypy.expose
    @handle_error(redirect=False)
    @using_template("overlay")
    @check_user
    def displayzoomedimage(self, figure_id):
        """
        Displays the image with the specified id in an overlay dialog.
        """
        figure = self.figure_service.load_figure(figure_id)
        figures_folder = self.storage_interface.get_images_folder(
            figure.project.name)
        figure_full_path = os.path.join(figures_folder, figure.file_path)
        figure_file_path = utils.path2url_part(figure_full_path)
        description = figure.session_name + " - " + figure.name
        template_dictionary = dict(figure_file_path=figure_file_path)
        return self.fill_overlay_attributes(template_dictionary, "Detail",
                                            description,
                                            "project/figure_zoom_overlay",
                                            "lightbox")
Beispiel #3
0
class ImportService(object):
    """
    Service for importing TVB entities into system.
    It supports TVB exported H5 files as input, but it should also handle H5 files
    generated outside of TVB, as long as they respect the same structure.
    """
    def __init__(self):
        self.logger = get_logger(__name__)
        self.user_id = None
        self.storage_interface = StorageInterface()
        self.created_projects = []
        self.view_model2adapter = self._populate_view_model2adapter()

    def _download_and_unpack_project_zip(self, uploaded, uq_file_name,
                                         temp_folder):

        if isinstance(uploaded, (FieldStorage, Part)):
            if not uploaded.file:
                raise ImportException(
                    "Please select the archive which contains the project structure."
                )
            with open(uq_file_name, 'wb') as file_obj:
                self.storage_interface.copy_file(uploaded.file, file_obj)
        else:
            shutil.copy2(uploaded, uq_file_name)

        try:
            self.storage_interface.unpack_zip(uq_file_name, temp_folder)
        except FileStructureException as excep:
            self.logger.exception(excep)
            raise ImportException(
                "Bad ZIP archive provided. A TVB exported project is expected!"
            )

    @staticmethod
    def _compute_unpack_path():
        """
        :return: the name of the folder where to expand uploaded zip
        """
        now = datetime.now()
        date_str = "%d-%d-%d_%d-%d-%d_%d" % (now.year, now.month, now.day,
                                             now.hour, now.minute, now.second,
                                             now.microsecond)
        uq_name = "%s-ImportProject" % date_str
        return os.path.join(TvbProfile.current.TVB_TEMP_FOLDER, uq_name)

    @transactional
    def import_project_structure(self, uploaded, user_id):
        """
        Execute import operations:

        1. check if ZIP or folder
        2. find all project nodes
        3. for each project node:
            - create project
            - create all operations and groups
            - import all images
            - create all dataTypes
        """

        self.user_id = user_id
        self.created_projects = []

        # Now compute the name of the folder where to explode uploaded ZIP file
        temp_folder = self._compute_unpack_path()
        uq_file_name = temp_folder + ".zip"

        try:
            self._download_and_unpack_project_zip(uploaded, uq_file_name,
                                                  temp_folder)
            self._import_project_from_folder(temp_folder)

        except Exception as excep:
            self.logger.exception(
                "Error encountered during import. Deleting projects created during this operation."
            )
            # Remove project folders created so far.
            # Note that using the project service to remove the projects will not work,
            # because we do not have support for nested transaction.
            # Removing from DB is not necessary because in transactional env a simple exception throw
            # will erase everything to be inserted.
            for project in self.created_projects:
                self.storage_interface.remove_project(project)
            raise ImportException(str(excep))

        finally:
            # Now delete uploaded file and temporary folder where uploaded ZIP was exploded.
            self.storage_interface.remove_files([uq_file_name, temp_folder])

    def _import_project_from_folder(self, temp_folder):
        """
        Process each project from the uploaded pack, to extract names.
        """
        temp_project_path = None
        for root, _, files in os.walk(temp_folder):
            if StorageInterface.TVB_PROJECT_FILE in files:
                temp_project_path = root
                break

        if temp_project_path is not None:
            update_manager = ProjectUpdateManager(temp_project_path)

            if update_manager.checked_version < 3:
                raise ImportException(
                    'Importing projects with versions older than 3 is not supported in TVB 2! '
                    'Please import the project in TVB 1.5.8 and then launch the current version of '
                    'TVB in order to upgrade this project!')

            update_manager.run_all_updates()
            project = self.__populate_project(temp_project_path)
            # Populate the internal list of create projects so far, for cleaning up folders, in case of failure
            self.created_projects.append(project)
            # Ensure project final folder exists on disk
            project_path = self.storage_interface.get_project_folder(
                project.name)
            shutil.move(
                os.path.join(temp_project_path,
                             StorageInterface.TVB_PROJECT_FILE), project_path)
            # Now import project operations with their results
            self.import_list_of_operations(project, temp_project_path)
            # Import images and move them from temp into target
            self._store_imported_images(project, temp_project_path,
                                        project.name)
            if StorageInterface.encryption_enabled():
                self.storage_interface.remove_project(project, True)

    def _load_datatypes_from_operation_folder(self, src_op_path,
                                              operation_entity,
                                              datatype_group):
        """
        Loads datatypes from operation folder
        :returns: Datatype entities as dict {original_path: Dt instance}
        """
        all_datatypes = {}
        for file_name in os.listdir(src_op_path):
            if self.storage_interface.ends_with_tvb_storage_file_extension(
                    file_name):
                h5_file = os.path.join(src_op_path, file_name)
                try:
                    file_update_manager = FilesUpdateManager()
                    file_update_manager.upgrade_file(h5_file)
                    datatype = self.load_datatype_from_file(
                        h5_file, operation_entity.id, datatype_group,
                        operation_entity.fk_launched_in)
                    all_datatypes[h5_file] = datatype

                except IncompatibleFileManagerException:
                    os.remove(h5_file)
                    self.logger.warning(
                        "Incompatible H5 file will be ignored: %s" % h5_file)
                    self.logger.exception("Incompatibility details ...")
        return all_datatypes

    @staticmethod
    def check_import_references(file_path, datatype):
        h5_class = H5File.h5_class_from_file(file_path)
        reference_list = h5_class(file_path).gather_references()

        for _, reference_gid in reference_list:
            if not reference_gid:
                continue

            ref_index = load.load_entity_by_gid(reference_gid)
            if ref_index is None:
                os.remove(file_path)
                dao.remove_entity(datatype.__class__, datatype.id)
                raise MissingReferenceException(
                    'Imported file depends on datatypes that do not exist. Please upload '
                    'those first!')

    def _store_or_link_burst_config(self, burst_config, bc_path, project_id):
        bc_already_in_tvb = dao.get_generic_entity(BurstConfiguration,
                                                   burst_config.gid, 'gid')
        if len(bc_already_in_tvb) == 0:
            self.store_datatype(burst_config, bc_path)
            return 1
        return 0

    def store_or_link_datatype(self, datatype, dt_path, project_id):
        self.check_import_references(dt_path, datatype)
        stored_dt_count = 0
        datatype_already_in_tvb = load.load_entity_by_gid(datatype.gid)
        if not datatype_already_in_tvb:
            self.store_datatype(datatype, dt_path)
            stored_dt_count = 1
        elif datatype_already_in_tvb.parent_operation.project.id != project_id:
            AlgorithmService.create_link(datatype_already_in_tvb.id,
                                         project_id)
            if datatype_already_in_tvb.fk_datatype_group:
                AlgorithmService.create_link(
                    datatype_already_in_tvb.fk_datatype_group, project_id)
        return stored_dt_count

    def _store_imported_datatypes_in_db(self, project, all_datatypes):
        # type: (Project, dict) -> int
        sorted_dts = sorted(
            all_datatypes.items(),
            key=lambda dt_item: dt_item[1].create_date or datetime.now())
        count = 0
        for dt_path, datatype in sorted_dts:
            count += self.store_or_link_datatype(datatype, dt_path, project.id)
        return count

    def _store_imported_images(self, project, temp_project_path, project_name):
        """
        Import all images from project
        """
        images_root = os.path.join(temp_project_path,
                                   StorageInterface.IMAGES_FOLDER)
        target_images_path = self.storage_interface.get_images_folder(
            project_name)
        for root, _, files in os.walk(images_root):
            for metadata_file in files:
                if self.storage_interface.ends_with_tvb_file_extension(
                        metadata_file):
                    self._import_image(root, metadata_file, project.id,
                                       target_images_path)

    @staticmethod
    def _populate_view_model2adapter():
        if len(VIEW_MODEL2ADAPTER) > 0:
            return VIEW_MODEL2ADAPTER
        view_model2adapter = {}
        algos = dao.get_all_algorithms()
        for algo in algos:
            adapter = ABCAdapter.build_adapter(algo)
            view_model_class = adapter.get_view_model_class()
            view_model2adapter[view_model_class] = algo
        return view_model2adapter

    def _retrieve_operations_in_order(self,
                                      project,
                                      import_path,
                                      importer_operation_id=None):
        # type: (Project, str, int) -> list[Operation2ImportData]
        retrieved_operations = []

        for root, _, files in os.walk(import_path):
            if OPERATION_XML in files:
                # Previous Operation format for uploading previous versions of projects
                operation_file_path = os.path.join(root, OPERATION_XML)
                operation, operation_xml_parameters, _ = self.build_operation_from_file(
                    project, operation_file_path)
                operation.import_file = operation_file_path
                self.logger.debug("Found operation in old XML format: " +
                                  str(operation))
                retrieved_operations.append(
                    Operation2ImportData(
                        operation,
                        root,
                        info_from_xml=operation_xml_parameters))

            else:
                # We strive for the new format with ViewModelH5
                main_view_model = None
                dt_paths = []
                all_view_model_files = []
                for file in files:
                    if self.storage_interface.ends_with_tvb_storage_file_extension(
                            file):
                        h5_file = os.path.join(root, file)
                        try:
                            h5_class = H5File.h5_class_from_file(h5_file)
                            if h5_class is ViewModelH5:
                                all_view_model_files.append(h5_file)
                                if not main_view_model:
                                    view_model = h5.load_view_model_from_file(
                                        h5_file)
                                    if type(
                                            view_model
                                    ) in self.view_model2adapter.keys():
                                        main_view_model = view_model
                            else:
                                file_update_manager = FilesUpdateManager()
                                file_update_manager.upgrade_file(h5_file)
                                dt_paths.append(h5_file)
                        except Exception:
                            self.logger.warning(
                                "Unreadable H5 file will be ignored: %s" %
                                h5_file)

                if main_view_model is not None:
                    alg = self.view_model2adapter[type(main_view_model)]
                    op_group_id = None
                    if main_view_model.operation_group_gid:
                        op_group = dao.get_operationgroup_by_gid(
                            main_view_model.operation_group_gid.hex)
                        if not op_group:
                            op_group = OperationGroup(
                                project.id,
                                ranges=json.loads(main_view_model.ranges),
                                gid=main_view_model.operation_group_gid.hex)
                            op_group = dao.store_entity(op_group)
                        op_group_id = op_group.id
                    operation = Operation(
                        main_view_model.gid.hex,
                        project.fk_admin,
                        project.id,
                        alg.id,
                        status=STATUS_FINISHED,
                        user_group=main_view_model.generic_attributes.
                        operation_tag,
                        start_date=datetime.now(),
                        completion_date=datetime.now(),
                        op_group_id=op_group_id,
                        range_values=main_view_model.range_values)
                    operation.create_date = main_view_model.create_date
                    operation.visible = main_view_model.generic_attributes.visible
                    self.logger.debug(
                        "Found main ViewModel to create operation for it: " +
                        str(operation))

                    retrieved_operations.append(
                        Operation2ImportData(operation, root, main_view_model,
                                             dt_paths, all_view_model_files))

                elif len(dt_paths) > 0:
                    alg = dao.get_algorithm_by_module(TVB_IMPORTER_MODULE,
                                                      TVB_IMPORTER_CLASS)
                    default_adapter = ABCAdapter.build_adapter(alg)
                    view_model = default_adapter.get_view_model_class()()
                    view_model.data_file = dt_paths[0]
                    vm_path = h5.store_view_model(view_model, root)
                    all_view_model_files.append(vm_path)
                    operation = Operation(view_model.gid.hex,
                                          project.fk_admin,
                                          project.id,
                                          alg.id,
                                          status=STATUS_FINISHED,
                                          start_date=datetime.now(),
                                          completion_date=datetime.now())
                    operation.create_date = datetime.min
                    self.logger.debug(
                        "Found no ViewModel in folder, so we default to " +
                        str(operation))

                    if importer_operation_id:
                        operation.id = importer_operation_id

                    retrieved_operations.append(
                        Operation2ImportData(operation, root, view_model,
                                             dt_paths, all_view_model_files,
                                             True))

        return sorted(retrieved_operations,
                      key=lambda op_data: op_data.order_field)

    def create_view_model(self,
                          operation_entity,
                          operation_data,
                          new_op_folder,
                          generic_attributes=None,
                          add_params=None):
        view_model = self._get_new_form_view_model(
            operation_entity, operation_data.info_from_xml)
        if add_params is not None:
            for element in add_params:
                key_attr = getattr(view_model, element[0])
                setattr(key_attr, element[1], element[2])

        view_model.range_values = operation_entity.range_values
        op_group = dao.get_operationgroup_by_id(
            operation_entity.fk_operation_group)
        if op_group:
            view_model.operation_group_gid = uuid.UUID(op_group.gid)
            view_model.ranges = json.dumps(op_group.range_references)
            view_model.is_metric_operation = 'DatatypeMeasure' in op_group.name

        if generic_attributes is not None:
            view_model.generic_attributes = generic_attributes
        view_model.generic_attributes.operation_tag = operation_entity.user_group

        h5.store_view_model(view_model, new_op_folder)
        view_model_disk_size = StorageInterface.compute_recursive_h5_disk_usage(
            new_op_folder)
        operation_entity.view_model_disk_size = view_model_disk_size
        operation_entity.view_model_gid = view_model.gid.hex
        dao.store_entity(operation_entity)
        return view_model

    def import_list_of_operations(self,
                                  project,
                                  import_path,
                                  is_group=False,
                                  importer_operation_id=None):
        """
        This method scans provided folder and identify all operations that needs to be imported
        """
        all_dts_count = 0
        all_stored_dts_count = 0
        imported_operations = []
        ordered_operations = self._retrieve_operations_in_order(
            project, import_path, None if is_group else importer_operation_id)

        if is_group and len(ordered_operations) > 0:
            first_op = dao.get_operation_by_id(importer_operation_id)
            vm_path = h5.determine_filepath(first_op.view_model_gid,
                                            os.path.dirname(import_path))
            os.remove(vm_path)

            ordered_operations[0].operation.id = importer_operation_id

        for operation_data in ordered_operations:
            if operation_data.is_old_form:
                operation_entity, datatype_group = self.import_operation(
                    operation_data.operation)
                new_op_folder = self.storage_interface.get_project_folder(
                    project.name, str(operation_entity.id))

                try:
                    operation_datatypes = self._load_datatypes_from_operation_folder(
                        operation_data.operation_folder, operation_entity,
                        datatype_group)
                    # Create and store view_model from operation
                    self.create_view_model(operation_entity, operation_data,
                                           new_op_folder)

                    self._store_imported_datatypes_in_db(
                        project, operation_datatypes)
                    imported_operations.append(operation_entity)
                except MissingReferenceException:
                    operation_entity.status = STATUS_ERROR
                    dao.store_entity(operation_entity)

            elif operation_data.main_view_model is not None:
                operation_data.operation.create_date = datetime.now()
                operation_data.operation.start_date = datetime.now()
                operation_data.operation.completion_date = datetime.now()

                do_merge = False
                if importer_operation_id:
                    do_merge = True
                operation_entity = dao.store_entity(operation_data.operation,
                                                    merge=do_merge)
                dt_group = None
                op_group = dao.get_operationgroup_by_id(
                    operation_entity.fk_operation_group)
                if op_group:
                    dt_group = dao.get_datatypegroup_by_op_group_id(
                        op_group.id)
                    if not dt_group:
                        first_op = dao.get_operations_in_group(
                            op_group.id, only_first_operation=True)
                        dt_group = DataTypeGroup(
                            op_group,
                            operation_id=first_op.id,
                            state=DEFAULTDATASTATE_INTERMEDIATE)
                        dt_group = dao.store_entity(dt_group)
                # Store the DataTypes in db
                dts = {}
                all_dts_count += len(operation_data.dt_paths)
                for dt_path in operation_data.dt_paths:
                    dt = self.load_datatype_from_file(dt_path,
                                                      operation_entity.id,
                                                      dt_group, project.id)
                    if isinstance(dt, BurstConfiguration):
                        if op_group:
                            dt.fk_operation_group = op_group.id
                        all_stored_dts_count += self._store_or_link_burst_config(
                            dt, dt_path, project.id)
                    else:
                        dts[dt_path] = dt
                        if op_group:
                            op_group.fill_operationgroup_name(dt.type)
                            dao.store_entity(op_group)
                try:
                    stored_dts_count = self._store_imported_datatypes_in_db(
                        project, dts)
                    all_stored_dts_count += stored_dts_count

                    if operation_data.main_view_model.is_metric_operation:
                        self._update_burst_metric(operation_entity)

                    imported_operations.append(operation_entity)
                    new_op_folder = self.storage_interface.get_project_folder(
                        project.name, str(operation_entity.id))
                    view_model_disk_size = 0
                    for h5_file in operation_data.all_view_model_files:
                        view_model_disk_size += StorageInterface.compute_size_on_disk(
                            h5_file)
                        shutil.move(h5_file, new_op_folder)
                    operation_entity.view_model_disk_size = view_model_disk_size
                    dao.store_entity(operation_entity)
                except MissingReferenceException as excep:
                    self.storage_interface.remove_operation_data(
                        project.name, operation_entity.id)
                    operation_entity.fk_operation_group = None
                    dao.store_entity(operation_entity)
                    dao.remove_entity(DataTypeGroup, dt_group.id)
                    raise excep
            else:
                self.logger.warning(
                    "Folder %s will be ignored, as we could not find a serialized "
                    "operation or DTs inside!" %
                    operation_data.operation_folder)

            # We want importer_operation_id to be kept just for the first operation (the first iteration)
            if is_group:
                importer_operation_id = None

        self._update_dt_groups(project.id)
        self._update_burst_configurations(project.id)
        return imported_operations, all_dts_count, all_stored_dts_count

    @staticmethod
    def _get_new_form_view_model(operation, xml_parameters):
        # type (Operation) -> ViewModel
        algo = dao.get_algorithm_by_id(operation.fk_from_algo)
        ad = ABCAdapter.build_adapter(algo)
        view_model = ad.get_view_model_class()()

        if xml_parameters:
            declarative_attrs = type(view_model).declarative_attrs

            if isinstance(xml_parameters, str):
                xml_parameters = json.loads(xml_parameters)
            for param in xml_parameters:
                new_param_name = param
                if param != '' and param[0] == "_":
                    new_param_name = param[1:]
                new_param_name = new_param_name.lower()
                if new_param_name in declarative_attrs:
                    try:
                        setattr(view_model, new_param_name,
                                xml_parameters[param])
                    except (TraitTypeError, TraitAttributeError):
                        pass
        return view_model

    def _import_image(self, src_folder, metadata_file, project_id,
                      target_images_path):
        """
        Create and store a image entity.
        """
        figure_dict = StorageInterface().read_metadata_from_xml(
            os.path.join(src_folder, metadata_file))
        actual_figure = os.path.join(
            src_folder,
            os.path.split(figure_dict['file_path'])[1])
        if not os.path.exists(actual_figure):
            self.logger.warning("Expected to find image path %s .Skipping" %
                                actual_figure)
            return
        figure_dict['fk_user_id'] = self.user_id
        figure_dict['fk_project_id'] = project_id
        figure_entity = manager_of_class(ResultFigure).new_instance()
        figure_entity = figure_entity.from_dict(figure_dict)
        stored_entity = dao.store_entity(figure_entity)

        # Update image meta-data with the new details after import
        figure = dao.load_figure(stored_entity.id)
        shutil.move(actual_figure, target_images_path)
        self.logger.debug("Store imported figure")
        _, meta_data = figure.to_dict()
        self.storage_interface.write_image_metadata(figure, meta_data)

    def load_datatype_from_file(self,
                                current_file,
                                op_id,
                                datatype_group=None,
                                current_project_id=None):
        # type: (str, int, DataTypeGroup, int) -> HasTraitsIndex
        """
        Creates an instance of datatype from storage / H5 file
        :returns: DatatypeIndex
        """
        self.logger.debug("Loading DataType from file: %s" % current_file)
        h5_class = H5File.h5_class_from_file(current_file)

        if h5_class is BurstConfigurationH5:
            if current_project_id is None:
                op_entity = dao.get_operationgroup_by_id(op_id)
                current_project_id = op_entity.fk_launched_in
            h5_file = BurstConfigurationH5(current_file)
            burst = BurstConfiguration(current_project_id)
            burst.fk_simulation = op_id
            h5_file.load_into(burst)
            result = burst
        else:
            datatype, generic_attributes = h5.load_with_links(current_file)

            already_existing_datatype = h5.load_entity_by_gid(datatype.gid)
            if datatype_group is not None and already_existing_datatype is not None:
                raise DatatypeGroupImportException(
                    "The datatype group that you are trying to import"
                    " already exists!")
            index_class = h5.REGISTRY.get_index_for_datatype(
                datatype.__class__)
            datatype_index = index_class()
            datatype_index.fill_from_has_traits(datatype)
            datatype_index.fill_from_generic_attributes(generic_attributes)

            if datatype_group is not None and hasattr(datatype_index, 'fk_source_gid') and \
                    datatype_index.fk_source_gid is not None:
                ts = h5.load_entity_by_gid(datatype_index.fk_source_gid)

                if ts is None:
                    op = dao.get_operations_in_group(
                        datatype_group.fk_operation_group,
                        only_first_operation=True)
                    op.fk_operation_group = None
                    dao.store_entity(op)
                    dao.remove_entity(OperationGroup,
                                      datatype_group.fk_operation_group)
                    dao.remove_entity(DataTypeGroup, datatype_group.id)
                    raise DatatypeGroupImportException(
                        "Please import the time series group before importing the"
                        " datatype measure group!")

            # Add all the required attributes
            if datatype_group:
                datatype_index.fk_datatype_group = datatype_group.id
                if len(datatype_group.subject) == 0:
                    datatype_group.subject = datatype_index.subject
                    dao.store_entity(datatype_group)
            datatype_index.fk_from_operation = op_id

            associated_file = h5.path_for_stored_index(datatype_index)
            if os.path.exists(associated_file):
                datatype_index.disk_size = StorageInterface.compute_size_on_disk(
                    associated_file)
            result = datatype_index

        return result

    def store_datatype(self, datatype, current_file=None):
        """This method stores data type into DB"""
        try:
            self.logger.debug("Store datatype: %s with Gid: %s" %
                              (datatype.__class__.__name__, datatype.gid))
            # Now move storage file into correct folder if necessary
            if current_file is not None:
                final_path = h5.path_for_stored_index(datatype)
                if final_path != current_file:
                    shutil.move(current_file, final_path)
            stored_entry = load.load_entity_by_gid(datatype.gid)
            if not stored_entry:
                stored_entry = dao.store_entity(datatype)

            return stored_entry
        except MissingDataSetException as e:
            self.logger.exception(e)
            error_msg = "Datatype %s has missing data and could not be imported properly." % (
                datatype, )
            raise ImportException(error_msg)
        except IntegrityError as excep:
            self.logger.exception(excep)
            error_msg = "Could not import data with gid: %s. There is already a one with " \
                        "the same name or gid." % datatype.gid
            raise ImportException(error_msg)

    def __populate_project(self, project_path):
        """
        Create and store a Project entity.
        """
        self.logger.debug("Creating project from path: %s" % project_path)
        project_dict = self.storage_interface.read_project_metadata(
            project_path)

        project_entity = manager_of_class(Project).new_instance()
        project_entity = project_entity.from_dict(project_dict, self.user_id)

        try:
            self.logger.debug("Storing imported project")
            return dao.store_entity(project_entity)
        except IntegrityError as excep:
            self.logger.exception(excep)
            error_msg = (
                "Could not import project: %s with gid: %s. There is already a "
                "project with the same name or gid.") % (project_entity.name,
                                                         project_entity.gid)
            raise ImportException(error_msg)

    def build_operation_from_file(self, project, operation_file):
        """
        Create Operation entity from metadata file.
        """
        operation_dict = StorageInterface().read_metadata_from_xml(
            operation_file)
        operation_entity = manager_of_class(Operation).new_instance()
        return operation_entity.from_dict(operation_dict, dao, self.user_id,
                                          project.gid)

    @staticmethod
    def import_operation(operation_entity, migration=False):
        """
        Store a Operation entity.
        """
        do_merge = False
        if operation_entity.id:
            do_merge = True
        operation_entity = dao.store_entity(operation_entity, merge=do_merge)
        operation_group_id = operation_entity.fk_operation_group
        datatype_group = None

        if operation_group_id is not None:
            datatype_group = dao.get_datatypegroup_by_op_group_id(
                operation_group_id)

            if datatype_group is None and migration is False:
                # If no dataType group present for current op. group, create it.
                operation_group = dao.get_operationgroup_by_id(
                    operation_group_id)
                datatype_group = DataTypeGroup(
                    operation_group, operation_id=operation_entity.id)
                datatype_group.state = UploadAlgorithmCategoryConfig.defaultdatastate
                datatype_group = dao.store_entity(datatype_group)

        return operation_entity, datatype_group

    def import_simulator_configuration_zip(self, zip_file):
        # Now compute the name of the folder where to explode uploaded ZIP file
        temp_folder = self._compute_unpack_path()
        uq_file_name = temp_folder + ".zip"

        if isinstance(zip_file, (FieldStorage, Part)):
            if not zip_file.file:
                raise ServicesBaseException(
                    "Could not process the given ZIP file...")

            with open(uq_file_name, 'wb') as file_obj:
                self.storage_interface.copy_file(zip_file.file, file_obj)
        else:
            shutil.copy2(zip_file, uq_file_name)

        try:
            self.storage_interface.unpack_zip(uq_file_name, temp_folder)
            return temp_folder
        except FileStructureException as excep:
            raise ServicesBaseException(
                "Could not process the given ZIP file..." + str(excep))

    @staticmethod
    def _update_burst_metric(operation_entity):
        burst_config = dao.get_burst_for_operation_id(operation_entity.id)
        if burst_config and burst_config.ranges:
            if burst_config.fk_metric_operation_group is None:
                burst_config.fk_metric_operation_group = operation_entity.fk_operation_group
            dao.store_entity(burst_config)

    @staticmethod
    def _update_dt_groups(project_id):
        dt_groups = dao.get_datatypegroup_for_project(project_id)
        for dt_group in dt_groups:
            dt_group.count_results = dao.count_datatypes_in_group(dt_group.id)
            dts_in_group = dao.get_datatypes_from_datatype_group(dt_group.id)
            if dts_in_group:
                dt_group.fk_parent_burst = dts_in_group[0].fk_parent_burst
            dao.store_entity(dt_group)

    @staticmethod
    def _update_burst_configurations(project_id):
        burst_configs = dao.get_bursts_for_project(project_id)
        for burst_config in burst_configs:
            burst_config.datatypes_number = dao.count_datatypes_in_burst(
                burst_config.gid)
            dao.store_entity(burst_config)