Example #1
0
    def _load_datatypes_from_operation_folder(self, op_path, operation_entity,
                                              datatype_group):
        """
        Loads datatypes from operation folder
        :returns: Datatypes ordered by creation date (to solve any dependencies)
        """
        all_datatypes = []
        for file_name in os.listdir(op_path):
            if file_name.endswith(FilesHelper.TVB_STORAGE_FILE_EXTENSION):
                h5_file = os.path.join(op_path, file_name)
                try:
                    file_update_manager = FilesUpdateManager()
                    file_update_manager.upgrade_file(h5_file)
                    datatype = self.load_datatype_from_file(
                        op_path, file_name, operation_entity.id,
                        datatype_group)
                    all_datatypes.append(datatype)

                except IncompatibleFileManagerException:
                    os.remove(h5_file)
                    self.logger.warning(
                        "Incompatible H5 file will be ignored: %s" % h5_file)
                    self.logger.exception("Incompatibility details ...")

        all_datatypes.sort(key=lambda dt_date: dt_date.create_date)
        for dt in all_datatypes:
            self.logger.debug("Import order %s: %s" % (dt.type, dt.gid))
        return all_datatypes
Example #2
0
    def _load_datatypes_from_operation_folder(self, src_op_path,
                                              operation_entity,
                                              datatype_group):
        """
        Loads datatypes from operation folder
        :returns: Datatype entities as dict {original_path: Dt instance}
        """
        all_datatypes = {}
        for file_name in os.listdir(src_op_path):
            if file_name.endswith(FilesHelper.TVB_STORAGE_FILE_EXTENSION):
                h5_file = os.path.join(src_op_path, file_name)
                try:
                    file_update_manager = FilesUpdateManager()
                    file_update_manager.upgrade_file(h5_file)
                    datatype = self.load_datatype_from_file(
                        h5_file, operation_entity.id, datatype_group,
                        operation_entity.fk_launched_in)
                    all_datatypes[h5_file] = datatype

                except IncompatibleFileManagerException:
                    os.remove(h5_file)
                    self.logger.warning(
                        "Incompatible H5 file will be ignored: %s" % h5_file)
                    self.logger.exception("Incompatibility details ...")
        return all_datatypes
Example #3
0
    def launch(self, data_file):
        """
        Execute import operations: unpack ZIP, build and store generic DataType objects.

        :param data_file: an archive (ZIP / HDF5) containing the `DataType`

        :raises LaunchException: when data_file is None, nonexistent, or invalid \
                    (e.g. incomplete meta-data, not in ZIP / HDF5 format etc. )
        """
        if data_file is None:
            raise LaunchException(
                "Please select file which contains data to import")

        if os.path.exists(data_file):
            if zipfile.is_zipfile(data_file):
                current_op = dao.get_operation_by_id(self.operation_id)

                # Creates a new TMP folder where to extract data
                tmp_folder = os.path.join(self.storage_path, "tmp_import")
                FilesHelper().unpack_zip(data_file, tmp_folder)
                operations = ImportService().import_project_operations(
                    current_op.project, self.storage_path)
                shutil.rmtree(tmp_folder)
                self.nr_of_datatypes += len(operations)

            else:
                # upgrade file if necessary
                file_update_manager = FilesUpdateManager()
                file_update_manager.upgrade_file(data_file)

                folder, h5file = os.path.split(data_file)
                manager = HDF5StorageManager(folder, h5file)
                if manager.is_valid_hdf5_file():
                    datatype = None
                    try:
                        service = ImportService()
                        datatype = service.load_datatype_from_file(
                            folder,
                            h5file,
                            self.operation_id,
                            final_storage=self.storage_path)
                        service.store_datatype(datatype)
                        self.nr_of_datatypes += 1
                    except Exception as excep:
                        # If import operation failed delete file from disk.
                        if datatype is not None and os.path.exists(
                                datatype.get_storage_file_path()):
                            os.remove(datatype.get_storage_file_path())
                        self.log.exception(excep)
                        raise LaunchException(
                            "Invalid file received as input. Most probably incomplete "
                            "meta-data ...  " + str(excep))
                else:
                    raise LaunchException(
                        "Uploaded file: %s is neither in ZIP or HDF5 format" %
                        data_file)

        else:
            raise LaunchException("File: %s to import does not exists." %
                                  data_file)
Example #4
0
    def launch(self, view_model):
        # type: (TVBImporterModel) -> []
        """
        Execute import operations: unpack ZIP, build and store generic DataType objects.
        :raises LaunchException: when data_file is None, nonexistent, or invalid \
                    (e.g. incomplete meta-data, not in ZIP / HDF5 format etc. )
        """
        if view_model.data_file is None:
            raise LaunchException(
                "Please select file which contains data to import")

        service = ImportService()
        if os.path.exists(view_model.data_file):
            if zipfile.is_zipfile(view_model.data_file):
                current_op = dao.get_operation_by_id(self.operation_id)

                # Creates a new TMP folder where to extract data
                tmp_folder = os.path.join(self.storage_path, "tmp_import")
                FilesHelper().unpack_zip(view_model.data_file, tmp_folder)
                operations = service.import_project_operations(
                    current_op.project, tmp_folder)
                shutil.rmtree(tmp_folder)
                self.nr_of_datatypes += len(operations)

            else:
                # upgrade file if necessary
                file_update_manager = FilesUpdateManager()
                file_update_manager.upgrade_file(view_model.data_file)

                folder, h5file = os.path.split(view_model.data_file)
                manager = HDF5StorageManager(folder, h5file)
                if manager.is_valid_hdf5_file():
                    datatype = None
                    try:
                        datatype = service.load_datatype_from_file(
                            view_model.data_file, self.operation_id)
                        service.check_import_references(
                            view_model.data_file, datatype)
                        service.store_datatype(datatype, view_model.data_file)
                        self.nr_of_datatypes += 1
                    except ImportException as excep:
                        self.log.exception(excep)
                        if datatype is not None:
                            target_path = h5.path_for_stored_index(datatype)
                            if os.path.exists(target_path):
                                os.remove(target_path)
                        raise LaunchException(
                            "Invalid file received as input. " + str(excep))
                else:
                    raise LaunchException(
                        "Uploaded file: %s is neither in ZIP or HDF5 format" %
                        view_model.data_file)

        else:
            raise LaunchException("File: %s to import does not exists." %
                                  view_model.data_file)
Example #5
0
def load_entity_by_gid(data_gid):
    """
    Load a generic DataType, specified by GID.
    """
    datatype = dao.get_datatype_by_gid(data_gid)
    if isinstance(datatype, MappedType):
        datatype_path = datatype.get_storage_file_path()
        files_update_manager = FilesUpdateManager()
        if not files_update_manager.is_file_up_to_date(datatype_path):
            datatype.invalid = True
            dao.store_entity(datatype)
            raise FileVersioningException("Encountered DataType with an incompatible storage or data version. "
                                          "The DataType was marked as invalid.")
    return datatype
    def launch(self, data_file):
        """
        Execute import operations: unpack ZIP, build and store generic DataType objects.

        :param data_file: an archive (ZIP / HDF5) containing the `DataType`

        :raises LaunchException: when data_file is None, nonexistent, or invalid \
                    (e.g. incomplete meta-data, not in ZIP / HDF5 format etc. )
        """
        if data_file is None:
            raise LaunchException("Please select file which contains data to import")

        if os.path.exists(data_file):
            if zipfile.is_zipfile(data_file):
                current_op = dao.get_operation_by_id(self.operation_id)

                # Creates a new TMP folder where to extract data
                tmp_folder = os.path.join(self.storage_path, "tmp_import")
                FilesHelper().unpack_zip(data_file, tmp_folder)
                operations = ImportService().import_project_operations(current_op.project, self.storage_path)
                shutil.rmtree(tmp_folder)
                self.nr_of_datatypes += len(operations)

            else:
                # upgrade file if necessary
                file_update_manager = FilesUpdateManager()
                file_update_manager.upgrade_file(data_file)

                folder, h5file = os.path.split(data_file)
                manager = HDF5StorageManager(folder, h5file)
                if manager.is_valid_hdf5_file():
                    datatype = None
                    try:
                        service = ImportService()
                        datatype = service.load_datatype_from_file(folder, h5file, self.operation_id)
                        service.store_datatype(datatype)
                        self.nr_of_datatypes += 1
                    except Exception as excep:
                        # If import operation failed delete file from disk.
                        if datatype is not None and os.path.exists(datatype.get_storage_file_path()):
                            os.remove(datatype.get_storage_file_path())
                        self.log.exception(excep)
                        raise LaunchException("Invalid file received as input. Most probably incomplete "
                                              "meta-data ...  " + str(excep))
                else:
                    raise LaunchException("Uploaded file: %s is neither in ZIP or HDF5 format" % data_file)

        else:
            raise LaunchException("File: %s to import does not exists." % data_file)
Example #7
0
def load_entity_by_gid(data_gid):
    """
    Load a generic DataType, specified by GID.
    """
    datatype = dao.get_datatype_by_gid(data_gid)
    if isinstance(datatype, MappedType):
        datatype_path = datatype.get_storage_file_path()
        files_update_manager = FilesUpdateManager()
        if not files_update_manager.is_file_up_to_date(datatype_path):
            datatype.invalid = True
            dao.store_entity(datatype)
            raise FileVersioningException(
                "Encountered DataType with an incompatible storage or data version. "
                "The DataType was marked as invalid.")
    return datatype
Example #8
0
def initialize(introspected_modules, load_xml_events=True):
    """
    Initialize when Application is starting.
    Check for new algorithms or new DataTypes.
    """
    SettingsService().check_db_url(TvbProfile.current.db.DB_URL)

    ## Initialize DB
    is_db_empty = initialize_startup()

    ## Create Projects storage root in case it does not exist.
    initialize_storage()

    ## Populate DB algorithms, by introspection
    event_folders = []
    start_introspection_time = datetime.datetime.now()
    for module in introspected_modules:
        introspector = Introspector(module)
        # Introspection is always done, even if DB was not empty.
        introspector.introspect(True)
        event_path = introspector.get_events_path()
        if event_path:
            event_folders.append(event_path)

    # Now remove or mark as removed any unverified Algo-Group, Algo-Category or Portlet
    to_invalidate, to_remove = dao.get_non_validated_entities(
        start_introspection_time)
    for entity in to_invalidate:
        entity.removed = True
    dao.store_entities(to_invalidate)
    for entity in to_remove:
        dao.remove_entity(entity.__class__, entity.id)

    ## Populate events
    if load_xml_events:
        read_events(event_folders)

    if not TvbProfile.is_first_run():
        ## Create default users.
        if is_db_empty:
            dao.store_entity(
                model.User(TvbProfile.current.web.admin.SYSTEM_USER_NAME, None,
                           None, True, None))
            UserService().create_user(
                username=TvbProfile.current.web.admin.ADMINISTRATOR_NAME,
                password=TvbProfile.current.web.admin.ADMINISTRATOR_PASSWORD,
                email=TvbProfile.current.web.admin.ADMINISTRATOR_EMAIL,
                role=model.ROLE_ADMINISTRATOR)

        ## In case actions related to latest code-changes are needed, make sure they are executed.
        CodeUpdateManager().run_all_updates()

        ## In case the H5 version changed, run updates on all DataTypes
        if TvbProfile.current.version.DATA_CHECKED_TO_VERSION < TvbProfile.current.version.DATA_VERSION:
            thread = threading.Thread(
                target=FilesUpdateManager().run_all_updates)
            thread.start()

        ## Clean tvb-first-time-run temporary folder, as we are no longer at the first run:
        shutil.rmtree(TvbProfile.current.FIRST_RUN_STORAGE, True)
Example #9
0
    def _load_datatypes_from_operation_folder(self, op_path, operation_entity, datatype_group):
        """
        Loads datatypes from operation folder
        :returns: Datatypes ordered by creation date (to solve any dependencies)
        """
        all_datatypes = []
        for file_name in os.listdir(op_path):
            if file_name.endswith(FilesHelper.TVB_STORAGE_FILE_EXTENSION):
                h5_file = os.path.join(op_path, file_name)
                try:
                    file_update_manager = FilesUpdateManager()
                    file_update_manager.upgrade_file(h5_file)
                    datatype = self.load_datatype_from_file(op_path, file_name, operation_entity.id, datatype_group)
                    all_datatypes.append(datatype)

                except IncompatibleFileManagerException:
                    os.remove(h5_file)
                    self.logger.warning("Incompatible H5 file will be ignored: %s" % h5_file)

        all_datatypes.sort(key=lambda dt_date: dt_date.create_date)
        for dt in all_datatypes:
            self.logger.debug("Import order %s: %s" % (dt.type, dt.gid))
        return all_datatypes
Example #10
0
def initialize(skip_import=False, skip_updates=False):
    """
    Initialize when Application is starting.
    Check for new algorithms or new DataTypes.
    """
    SettingsService().check_db_url(TvbProfile.current.db.DB_URL)

    # Initialize DB
    is_db_empty = initialize_startup()

    # Create Projects storage root in case it does not exist.
    initialize_storage()

    # Populate DB algorithms, by introspection
    start_introspection_time = datetime.now()
    # Introspection is always done, even if DB was not empty.
    introspector = Introspector()
    introspector.introspect()

    to_invalidate = dao.get_non_validated_entities(start_introspection_time)
    for entity in to_invalidate:
        entity.removed = True
    dao.store_entities(to_invalidate)

    if not TvbProfile.is_first_run() and not skip_updates:
        # Create default users.
        if is_db_empty:
            dao.store_entity(
                User(TvbProfile.current.web.admin.SYSTEM_USER_NAME, TvbProfile.current.web.admin.SYSTEM_USER_NAME, None,
                     None, True, None))
            UserService().create_user(username=TvbProfile.current.web.admin.ADMINISTRATOR_NAME,
                                      display_name=TvbProfile.current.web.admin.ADMINISTRATOR_DISPLAY_NAME,
                                      password=TvbProfile.current.web.admin.ADMINISTRATOR_PASSWORD,
                                      email=TvbProfile.current.web.admin.ADMINISTRATOR_EMAIL,
                                      role=ROLE_ADMINISTRATOR, skip_import=skip_import)

        # In case actions related to latest code-changes are needed, make sure they are executed.
        CodeUpdateManager().run_all_updates()

        # In case the H5 version changed, run updates on all DataTypes
        thread = None
        if TvbProfile.current.version.DATA_CHECKED_TO_VERSION < TvbProfile.current.version.DATA_VERSION:
            thread = threading.Thread(target=FilesUpdateManager().run_all_updates)
            thread.start()

        # Clean tvb-first-time-run temporary folder, as we are no longer at the first run:
        shutil.rmtree(TvbProfile.current.FIRST_RUN_STORAGE, True)
        return thread
Example #11
0
    def _retrieve_operations_in_order(self, project, import_path):
        # type: (Project, str) -> list[Operation2ImportData]
        retrieved_operations = []

        for root, _, files in os.walk(import_path):
            if OPERATION_XML in files:
                # Previous Operation format for uploading previous versions of projects
                operation_file_path = os.path.join(root, OPERATION_XML)
                operation, operation_xml_parameters = self.__build_operation_from_file(
                    project, operation_file_path)
                operation.import_file = operation_file_path
                self.logger.debug("Found operation in old XML format: " +
                                  str(operation))
                retrieved_operations.append(
                    Operation2ImportData(
                        operation,
                        root,
                        info_from_xml=operation_xml_parameters))

            else:
                # We strive for the new format with ViewModelH5
                main_view_model = None
                dt_paths = []
                all_view_model_files = []
                for file in files:
                    if file.endswith(FilesHelper.TVB_STORAGE_FILE_EXTENSION):
                        h5_file = os.path.join(root, file)
                        try:
                            h5_class = H5File.h5_class_from_file(h5_file)
                            if h5_class is ViewModelH5:
                                all_view_model_files.append(h5_file)
                                if not main_view_model:
                                    view_model = h5.load_view_model_from_file(
                                        h5_file)
                                    if type(view_model
                                            ) in VIEW_MODEL2ADAPTER.keys():
                                        main_view_model = view_model
                            else:
                                file_update_manager = FilesUpdateManager()
                                file_update_manager.upgrade_file(h5_file)
                                dt_paths.append(h5_file)
                        except Exception:
                            self.logger.warning(
                                "Unreadable H5 file will be ignored: %s" %
                                h5_file)

                if main_view_model is not None:
                    alg = VIEW_MODEL2ADAPTER[type(main_view_model)]
                    operation = Operation(main_view_model.gid.hex,
                                          project.fk_admin,
                                          project.id,
                                          alg.id,
                                          status=STATUS_FINISHED,
                                          user_group=main_view_model.
                                          generic_attributes.operation_tag,
                                          start_date=datetime.now(),
                                          completion_date=datetime.now())
                    operation.create_date = main_view_model.create_date
                    self.logger.debug(
                        "Found main ViewModel to create operation for it: " +
                        str(operation))

                    retrieved_operations.append(
                        Operation2ImportData(operation, root, main_view_model,
                                             dt_paths, all_view_model_files))

                elif len(dt_paths) > 0:
                    alg = dao.get_algorithm_by_module(TVB_IMPORTER_MODULE,
                                                      TVB_IMPORTER_CLASS)
                    default_adapter = ABCAdapter.build_adapter(alg)
                    view_model = default_adapter.get_view_model_class()()
                    view_model.data_file = dt_paths[0]
                    vm_path = h5.store_view_model(view_model, root)
                    all_view_model_files.append(vm_path)
                    operation = Operation(view_model.gid.hex,
                                          project.fk_admin,
                                          project.id,
                                          alg.id,
                                          status=STATUS_FINISHED,
                                          start_date=datetime.now(),
                                          completion_date=datetime.now())
                    self.logger.debug(
                        "Found no ViewModel in folder, so we default to " +
                        str(operation))

                    retrieved_operations.append(
                        Operation2ImportData(operation, root, view_model,
                                             dt_paths, all_view_model_files,
                                             True))

        return sorted(retrieved_operations,
                      key=lambda op_data: op_data.order_field)
Example #12
0
    def launch(self, view_model):
        # type: (TVBImporterModel) -> []
        """
        Execute import operations: unpack ZIP, build and store generic DataType objects.
        :raises LaunchException: when data_file is None, nonexistent, or invalid \
                    (e.g. incomplete meta-data, not in ZIP / HDF5 format etc. )
        """
        if view_model.data_file is None:
            raise LaunchException(
                "Please select file which contains data to import")

        service = ImportService()
        if os.path.exists(view_model.data_file):
            current_op = dao.get_operation_by_id(self.operation_id)
            if zipfile.is_zipfile(view_model.data_file):
                # Creates a new TMP folder where to extract data
                tmp_folder = os.path.join(self.storage_path, "tmp_import")
                self.storage_interface.unpack_zip(view_model.data_file,
                                                  tmp_folder)
                is_group = False
                current_op_id = current_op.id
                for file in os.listdir(tmp_folder):
                    # In case we import a DatatypeGroup, we want the default import flow
                    if os.path.isdir(os.path.join(tmp_folder, file)):
                        current_op_id = None
                        is_group = True
                        break
                try:
                    operations, all_dts, stored_dts_count = service.import_project_operations(
                        current_op.project, tmp_folder, is_group,
                        current_op_id)
                    self.nr_of_datatypes += stored_dts_count
                    if stored_dts_count == 0:
                        current_op.additional_info = 'All chosen datatypes already exist!'
                        dao.store_entity(current_op)
                    elif stored_dts_count < all_dts:
                        current_op.additional_info = 'Part of the chosen datatypes already exist!'
                        dao.store_entity(current_op)
                except ImportException as excep:
                    self.log.exception(excep)
                    current_op.additional_info = excep.message
                    current_op.status = STATUS_ERROR
                    raise LaunchException("Invalid file received as input. " +
                                          str(excep))
                finally:
                    shutil.rmtree(tmp_folder)
            else:
                # upgrade file if necessary
                file_update_manager = FilesUpdateManager()
                file_update_manager.upgrade_file(view_model.data_file)

                if self.storage_interface.get_storage_manager(
                        view_model.data_file).is_valid_tvb_file():
                    datatype = None
                    try:
                        datatype = service.load_datatype_from_file(
                            view_model.data_file, self.operation_id)
                        stored_new_dt = service.store_or_link_datatype(
                            datatype, view_model.data_file,
                            current_op.project.id)
                        if stored_new_dt == 0:
                            current_op.additional_info = 'The chosen datatype already exists!'
                            dao.store_entity(current_op)
                        self.nr_of_datatypes += stored_new_dt
                    except ImportException as excep:
                        self.log.exception(excep)
                        if datatype is not None:
                            target_path = h5.path_for_stored_index(datatype)
                            if os.path.exists(target_path):
                                os.remove(target_path)
                        raise LaunchException(
                            "Invalid file received as input. " + str(excep))
                else:
                    raise LaunchException(
                        "Uploaded file: %s is neither in ZIP or HDF5 format" %
                        view_model.data_file)

        else:
            raise LaunchException("File: %s to import does not exists." %
                                  view_model.data_file)
Example #13
0
import sys

from tvb.basic.profile import TvbProfile
from tvb.config.init.initializer import initialize
from tvb.core.entities.file.files_update_manager import FilesUpdateManager

if __name__ == '__main__':
    """
    Script written for testing the migration from version 1.5.8 to 2.1.0.
    """

    # set web profile
    TvbProfile.set_profile(TvbProfile.WEB_PROFILE)

    # migrate the database and h5 files
    h5_migrating_thread = initialize()

    # wait for thread to finish before processing
    h5_migrating_thread.join()

    # copy files in tvb_root folder so Jenkins can find them
    shutil.copytree(
        TvbProfile.current.TVB_LOG_FOLDER,
        os.path.join(TvbProfile.current.EXTERNALS_FOLDER_PARENT, 'logs'))

    # test if there are any files which were not migrated
    number_of_unmigrated_files = len(FilesUpdateManager.get_all_h5_paths())
    if number_of_unmigrated_files != 0:
        sys.exit(-1)
    sys.exit(0)
    def import_project_operations(self, project, import_path, dt_burst_mappings=None, burst_ids_mapping=None):
        """
        This method scans provided folder and identify all operations that needs to be imported
        """
        if burst_ids_mapping is None:
            burst_ids_mapping = {}
        if dt_burst_mappings is None:
            dt_burst_mappings = {}
        # Identify folders containing operations
        operations = []
        for root, _, files in os.walk(import_path):
            if FilesHelper.TVB_OPERARATION_FILE in files:
                # Found an operation folder - append TMP to its name
                tmp_op_folder = os.path.join(os.path.split(root)[0], os.path.split(root)[1] + 'tmp')
                os.rename(root, tmp_op_folder)

                operation_file_path = os.path.join(tmp_op_folder, FilesHelper.TVB_OPERARATION_FILE)
                operation = self.__build_operation_from_file(project, operation_file_path)
                operation.import_file = operation_file_path
                operations.append(operation)

        imported_operations = []

        # Now we sort operations by start date, to be sure data dependency is resolved correct
        operations = sorted(operations, key=lambda operation: operation.start_date)

        # Here we process each operation found
        for operation in operations:
            old_operation_folder, _ = os.path.split(operation.import_file)
            operation_entity, datatype_group = self.__import_operation(operation)

            # Rename operation folder with the ID of the stored operation 
            new_operation_path = FilesHelper().get_operation_folder(project.name, operation_entity.id)
            if old_operation_folder != new_operation_path:
                # Delete folder of the new operation, otherwise move will fail
                shutil.rmtree(new_operation_path)
                shutil.move(old_operation_folder, new_operation_path)

            # Now process data types for each operation
            all_datatypes = []
            for file_name in os.listdir(new_operation_path):
                if file_name.endswith(FilesHelper.TVB_STORAGE_FILE_EXTENSION):
                    file_update_manager = FilesUpdateManager()
                    file_update_manager.upgrade_file(os.path.join(new_operation_path, file_name))
                    datatype = self.load_datatype_from_file(new_operation_path, file_name,
                                                            operation_entity.id, datatype_group)
                    all_datatypes.append(datatype)

            # Before inserting into DB sort data types by creation date (to solve any dependencies)
            all_datatypes = sorted(all_datatypes, key=lambda datatype: datatype.create_date)

            # Now store data types into DB
            for datatype in all_datatypes:
                if datatype.gid in dt_burst_mappings:
                    old_burst_id = dt_burst_mappings[datatype.gid]
                    if old_burst_id is not None:
                        datatype.fk_parent_burst = burst_ids_mapping[old_burst_id]
                self.store_datatype(datatype)

            # Now import all images from current operation
            images_root = self.files_helper.get_images_folder(project.name, operation_entity.id)
            if os.path.exists(images_root):
                for root, _, files in os.walk(images_root):
                    for file_name in files:
                        if file_name.endswith(FilesHelper.TVB_FILE_EXTENSION):
                            self.__populate_image(os.path.join(root, file_name), project.id, operation_entity.id)
            imported_operations.append(operation_entity)

        return imported_operations