示例#1
0
    def remove_project(self, project_id):
        """
        Remove Project from DB and File Storage.
        """
        try:
            project2delete = dao.get_project_by_id(project_id)

            self.logger.debug("Deleting project: id=" + str(project_id) + ' name=' + project2delete.name)
            project_bursts = dao.get_bursts_for_project(project_id)
            for burst in project_bursts:
                dao.remove_entity(burst.__class__, burst.id)

            project_datatypes = dao.get_datatypes_in_project(project_id)
            for one_data in project_datatypes:
                self.remove_datatype(project_id, one_data.gid, True)

            links = dao.get_links_for_project(project_id)
            for one_link in links:
                dao.remove_entity(model.Links, one_link.id)

            self.structure_helper.remove_project_structure(project2delete.name)
            dao.delete_project(project_id)
            self.logger.debug("Deleted project: id=" + str(project_id) + ' name=' + project2delete.name)

        except RemoveDataTypeException, excep:
            self.logger.exception("Could not execute operation Node Remove!")
            raise ProjectServiceException(str(excep))
示例#2
0
 def remove_link(dt_id, project_id):
     """
     Remove the link from the datatype given by dt_id to project given by project_id.
     """
     link = dao.get_link(dt_id, project_id)
     if link is not None:
         dao.remove_entity(model.Links, link.id)
    def tearDown(self):
        """
        Revert changes settings and remove recently imported algorithms
        """
        TvbProfile.current.web.CURRENT_DIR = self.old_current_dir
        adapters_init.__xml_folders__ = self.old_xml_path

        for group in dao.get_generic_entity(model.AlgorithmGroup, "simple", "algorithm_param_name"):
            dao.remove_entity(model.AlgorithmGroup, group.id)
示例#4
0
    def remove_datatype(self, project_id, datatype_gid, skip_validation=False):
        """
        Method used for removing a dataType. If the given dataType is a DatatypeGroup
        or a dataType from a DataTypeGroup than this method will remove the entire group.
        The operation(s) used for creating the dataType(s) will also be removed.
        """
        datatype = dao.get_datatype_by_gid(datatype_gid)
        if datatype is None:
            self.logger.warning("Attempt to delete DT[%s] which no longer exists." % datatype_gid)
            return
        user = dao.get_user_for_datatype(datatype.id)
        freed_space = datatype.disk_size or 0
        is_datatype_group = False
        if dao.is_datatype_group(datatype_gid):
            is_datatype_group = True
            freed_space = dao.get_datatype_group_disk_size(datatype.id)
        elif datatype.fk_datatype_group is not None:
            is_datatype_group = True
            datatype = dao.get_datatype_by_id(datatype.fk_datatype_group)
            freed_space = dao.get_datatype_group_disk_size(datatype.id)

        operations_set = [datatype.fk_from_operation]

        correct = True

        if is_datatype_group:
            self.logger.debug("Removing datatype group %s" % datatype)
            data_list = dao.get_datatypes_from_datatype_group(datatype.id)
            for adata in data_list:
                self._remove_project_node_files(project_id, adata.gid, skip_validation)
                if adata.fk_from_operation not in operations_set:
                    operations_set.append(adata.fk_from_operation)

            datatype_group = dao.get_datatype_group_by_gid(datatype.gid)
            dao.remove_datatype(datatype_gid)
            correct = correct and dao.remove_entity(model.OperationGroup, datatype_group.fk_operation_group)
        else:
            self.logger.debug("Removing datatype %s" % datatype)
            self._remove_project_node_files(project_id, datatype.gid, skip_validation)

        ## Remove Operation entity in case no other DataType needs them.
        project = dao.get_project_by_id(project_id)
        for operation_id in operations_set:
            dependent_dt = dao.get_generic_entity(model.DataType, operation_id, "fk_from_operation")
            if len(dependent_dt) > 0:
                ### Do not remove Operation in case DataType still exist referring it.
                continue
            correct = correct and dao.remove_entity(model.Operation, operation_id)
            ## Make sure Operation folder is removed
            self.structure_helper.remove_operation_data(project.name, datatype.fk_from_operation)

        if not correct:
            raise RemoveDataTypeException("Could not remove DataType " + str(datatype_gid))

        user.used_disk_space = user.used_disk_space - freed_space
        dao.store_entity(user)
示例#5
0
 def teardown_method(self):
     all_categories = dao.get_algorithm_categories()
     category_ids = [
         cat.id for cat in all_categories
         if cat.displayname == TestCategory.category_name
     ]
     adapters = dao.get_adapters_from_categories(category_ids)
     for algorithm in adapters:
         dao.remove_entity(Algorithm, algorithm.id)
     dao.remove_entity(AlgorithmCategory, category_ids[0])
示例#6
0
 def delete_user(self, user_id):
     """
     Delete a user with a given ID.
     Return True when successfully, or False."""
     try:
         dao.remove_entity(model.User, user_id)
         return True
     except Exception, excep:
         self.logger.exception(excep)
         return False
示例#7
0
 def delete_user(self, user_id):
     """
     Delete a user with a given ID.
     Return True when successfully, or False."""
     try:
         dao.remove_entity(User, user_id)
         return True
     except Exception as excep:
         self.logger.exception(excep)
         return False
    def _remove_project_node_files(self,
                                   project_id,
                                   gid,
                                   skip_validation=False):
        """
        Delegate removal of a node in the structure of the project.
        In case of a problem will THROW StructureException.
        """
        try:
            project = self.find_project(project_id)
            datatype = dao.get_datatype_by_gid(gid)
            links = dao.get_links_for_datatype(datatype.id)
            if links:
                was_link = False
                for link in links:
                    # This means it's only a link and we need to remove it
                    if link.fk_from_datatype == datatype.id and link.fk_to_project == project.id:
                        dao.remove_entity(Links, link.id)
                        was_link = True
                if not was_link:
                    # Create a clone of the operation
                    new_op = Operation(
                        dao.get_system_user().id, links[0].fk_to_project,
                        datatype.parent_operation.fk_from_algo,
                        datatype.parent_operation.parameters,
                        datatype.parent_operation.status,
                        datatype.parent_operation.start_date,
                        datatype.parent_operation.completion_date,
                        datatype.parent_operation.fk_operation_group,
                        datatype.parent_operation.additional_info,
                        datatype.parent_operation.user_group,
                        datatype.parent_operation.range_values)
                    new_op = dao.store_entity(new_op)
                    to_project = self.find_project(links[0].fk_to_project).name
                    full_path = h5.path_for_stored_index(datatype)
                    self.structure_helper.move_datatype(
                        datatype, to_project, str(new_op.id), full_path)
                    datatype.fk_from_operation = new_op.id
                    datatype.parent_operation = new_op
                    dao.store_entity(datatype)
                    dao.remove_entity(Links, links[0].id)
            else:
                specific_remover = get_remover(datatype.type)(datatype)
                specific_remover.remove_datatype(skip_validation)
                h5_path = h5.path_for_stored_index(datatype)
                self.structure_helper.remove_datatype_file(h5_path)

        except RemoveDataTypeException:
            self.logger.exception("Could not execute operation Node Remove!")
            raise
        except FileStructureException:
            self.logger.exception("Remove operation failed")
            raise StructureException(
                "Remove operation failed for unknown reasons.Please contact system administrator."
            )
示例#9
0
def initialize(introspected_modules, load_xml_events=True):
    """
    Initialize when Application is starting.
    Check for new algorithms or new DataTypes.
    """
    SettingsService().check_db_url(TvbProfile.current.db.DB_URL)
    
    ## Initialize DB
    is_db_empty = initialize_startup()
    
    ## Create Projects storage root in case it does not exist.
    initialize_storage()
    
    ## Populate DB algorithms, by introspection
    event_folders = []
    start_introspection_time = datetime.datetime.now()
    for module in introspected_modules:
        introspector = Introspector(module)
        # Introspection is always done, even if DB was not empty.
        introspector.introspect(True)
        event_path = introspector.get_events_path()
        if event_path:
            event_folders.append(event_path)

    # Now remove or mark as removed any unverified Algo-Group, Algo-Category or Portlet
    to_invalidate, to_remove = dao.get_non_validated_entities(start_introspection_time)
    for entity in to_invalidate:
        entity.removed = True
    dao.store_entities(to_invalidate)
    for entity in to_remove:
        dao.remove_entity(entity.__class__, entity.id)
   
    ## Populate events
    if load_xml_events:
        read_events(event_folders)

    if not TvbProfile.is_first_run():
        ## Create default users.
        if is_db_empty:
            dao.store_entity(model.User(TvbProfile.current.web.admin.SYSTEM_USER_NAME, None, None, True, None))
            UserService().create_user(username=TvbProfile.current.web.admin.ADMINISTRATOR_NAME,
                                      password=TvbProfile.current.web.admin.ADMINISTRATOR_PASSWORD,
                                      email=TvbProfile.current.web.admin.ADMINISTRATOR_EMAIL,
                                      role=model.ROLE_ADMINISTRATOR)
        
        ## In case actions related to latest code-changes are needed, make sure they are executed.
        CodeUpdateManager().run_all_updates()

        ## In case the H5 version changed, run updates on all DataTypes
        if TvbProfile.current.version.DATA_CHECKED_TO_VERSION < TvbProfile.current.version.DATA_VERSION:
            thread = threading.Thread(target=FilesUpdateManager().run_all_updates)
            thread.start()

        ## Clean tvb-first-time-run temporary folder, as we are no longer at the first run:
        shutil.rmtree(TvbProfile.current.FIRST_RUN_STORAGE, True)
示例#10
0
def initialize(skip_import=False, skip_updates=False):
    """
    Initialize when Application is starting.
    Check for new algorithms or new DataTypes.
    """
    SettingsService().check_db_url(TvbProfile.current.db.DB_URL)

    # Initialize DB
    is_db_empty = initialize_startup()

    # Create Projects storage root in case it does not exist.
    initialize_storage()

    # Populate DB algorithms, by introspection
    start_introspection_time = datetime.datetime.now()
    # Introspection is always done, even if DB was not empty.
    introspector = Introspector()
    introspector.introspect()

    # Now remove or mark as removed any unverified Algorithm, Algo-Category or Portlet
    to_invalidate, to_remove = dao.get_non_validated_entities(
        start_introspection_time)
    for entity in to_invalidate:
        entity.removed = True
    dao.store_entities(to_invalidate)
    for entity in to_remove:
        dao.remove_entity(entity.__class__, entity.id)

    if not TvbProfile.is_first_run() and not skip_updates:
        # Create default users.
        if is_db_empty:
            dao.store_entity(
                User(TvbProfile.current.web.admin.SYSTEM_USER_NAME,
                     TvbProfile.current.web.admin.SYSTEM_USER_NAME, None, None,
                     True, None))
            UserService().create_user(
                username=TvbProfile.current.web.admin.ADMINISTRATOR_NAME,
                display_name=TvbProfile.current.web.admin.
                ADMINISTRATOR_DISPLAY_NAME,
                password=TvbProfile.current.web.admin.ADMINISTRATOR_PASSWORD,
                email=TvbProfile.current.web.admin.ADMINISTRATOR_EMAIL,
                role=ROLE_ADMINISTRATOR,
                skip_import=skip_import)

        # In case actions related to latest code-changes are needed, make sure they are executed.
        CodeUpdateManager().run_all_updates()

        # In case the H5 version changed, run updates on all DataTypes
        if TvbProfile.current.version.DATA_CHECKED_TO_VERSION < TvbProfile.current.version.DATA_VERSION:
            thread = threading.Thread(
                target=FilesUpdateManager().run_all_updates)
            thread.start()

        # Clean tvb-first-time-run temporary folder, as we are no longer at the first run:
        shutil.rmtree(TvbProfile.current.FIRST_RUN_STORAGE, True)
 def clear_data_for_portlet(stored_portlet):
     """
     Remove any reference towards a given portlet already selected in a BurstConfiguration.
     """
     view_step = dao.get_configured_portlets_for_id(stored_portlet.id)
     for step in view_step:
         analizers = dao.get_workflow_steps_for_position(step.fk_workflow, step.tab_index, step.index_in_tab)
         for analyzer in analizers:
             analyzer.tab_index = None
             analyzer.index_in_tab = None
             dao.store_entity(analyzer)
         dao.remove_entity(step.__class__, step.id)
示例#12
0
 def _remove_operation_group(self, operation_group_id, project_id,
                             skip_validation, operations_set):
     metrics_groups = dao.get_generic_entity(DataTypeGroup,
                                             operation_group_id,
                                             'fk_operation_group')
     if len(metrics_groups) > 0:
         metric_datatype_group_id = metrics_groups[0].id
         self._remove_datatype_group_dts(project_id,
                                         metric_datatype_group_id,
                                         skip_validation, operations_set)
         dao.remove_entity(DataTypeGroup, metric_datatype_group_id)
     return dao.remove_entity(OperationGroup, operation_group_id)
示例#13
0
 def clear_data_for_portlet(stored_portlet):
     """
     Remove any reference towards a given portlet already selected in a BurstConfiguration.
     """
     view_step = dao.get_configured_portlets_for_id(stored_portlet.id)
     for step in view_step:
         analizers = dao.get_workflow_steps_for_position(step.fk_workflow, step.tab_index, step.index_in_tab)
         for analyzer in analizers:
             analyzer.tab_index = None
             analyzer.index_in_tab = None
             dao.store_entity(analyzer)
         dao.remove_entity(step.__class__, step.id)
 def remove_operation(self, operation_id):
     """
     Remove a given operation 
     """
     operation = dao.get_operation_by_id(operation_id)
     if operation is not None:
         datatypes_for_op = dao.get_results_for_operation(operation_id)
         for dt in datatypes_for_op:
             self.remove_datatype(operation.project.id, dt.gid, True)
         dao.remove_entity(model.Operation, operation.id)
     else:
         self.logger.warning("Attempt to delete operation with id=%s which no longer exists." % operation_id)
示例#15
0
    def cancel_or_remove_burst(self, burst_id):
        """
        Cancel (if burst is still running) or Remove the burst given by burst_id.
        :returns True when Remove operation was done and False when Cancel
        """
        burst_entity = dao.get_burst_by_id(burst_id)
        if burst_entity.status == burst_entity.BURST_RUNNING:
            self.stop_burst(burst_entity)
            return False

        service = ProjectService()
        ## Remove each DataType in current burst.
        ## We can not leave all on cascade, because it won't work on SQLite for mapped dataTypes.
        datatypes = dao.get_all_datatypes_in_burst(burst_id)
        ## Get operations linked to current burst before removing the burst or else
        ##    the burst won't be there to identify operations any more.
        remaining_ops = dao.get_operations_in_burst(burst_id)

        #Remove burst first to delete work-flow steps which still hold foreign keys to operations.
        correct = dao.remove_entity(burst_entity.__class__, burst_id)
        if not correct:
            raise RemoveDataTypeException("Could not remove Burst entity!")

        for datatype in datatypes:
            service.remove_datatype(burst_entity.fk_project, datatype.gid,
                                    False)

        ## Remove all Operations remained.
        correct = True
        remaining_op_groups = set()
        project = dao.get_project_by_id(burst_entity.fk_project)

        for oper in remaining_ops:
            is_remaining = dao.get_generic_entity(oper.__class__, oper.id)
            if len(is_remaining) == 0:
                ### Operation removed cascaded.
                continue
            if oper.fk_operation_group is not None and oper.fk_operation_group not in remaining_op_groups:
                is_remaining = dao.get_generic_entity(model.OperationGroup,
                                                      oper.fk_operation_group)
                if len(is_remaining) > 0:
                    remaining_op_groups.add(oper.fk_operation_group)
                    correct = correct and dao.remove_entity(
                        model.OperationGroup, oper.fk_operation_group)
            correct = correct and dao.remove_entity(oper.__class__, oper.id)
            service.structure_helper.remove_operation_data(
                project.name, oper.id)

        if not correct:
            raise RemoveDataTypeException(
                "Could not remove Burst because a linked operation could not be dropped!!"
            )
        return True
示例#16
0
    def _remove_project_node_files(self, project_id, gid, links, skip_validation=False):
        """
        Delegate removal of a node in the structure of the project.
        In case of a problem will THROW StructureException.
        """
        try:
            project = self.find_project(project_id)
            datatype = dao.get_datatype_by_gid(gid)

            if links:
                op = dao.get_operation_by_id(datatype.fk_from_operation)
                # Instead of deleting, we copy the datatype to the linked project
                # We also clone the operation
                new_operation = self.__copy_linked_datatype_before_delete(op, datatype, project,
                                                                          links[0].fk_to_project)

                # If there is a  datatype group and operation group and they were not moved yet to the linked project,
                # then do it
                if datatype.fk_datatype_group is not None:
                    dt_group_op = dao.get_operation_by_id(datatype.fk_from_operation)
                    op_group = dao.get_operationgroup_by_id(dt_group_op.fk_operation_group)
                    op_group.fk_launched_in = links[0].fk_to_project
                    dao.store_entity(op_group)

                    burst = dao.get_burst_for_operation_id(op.id)
                    if burst is not None:
                        burst.fk_project = links[0].fk_to_project
                        dao.store_entity(burst)

                    dt_group = dao.get_datatypegroup_by_op_group_id(op_group.id)
                    dt_group.parent_operation = new_operation
                    dt_group.fk_from_operation = new_operation.id
                    dao.store_entity(dt_group)

            else:
                # There is no link for this datatype so it has to be deleted
                specific_remover = get_remover(datatype.type)(datatype)
                specific_remover.remove_datatype(skip_validation)

                # Remove burst if dt has one and it still exists
                if datatype.fk_parent_burst is not None and datatype.is_ts:
                    burst = dao.get_burst_for_operation_id(datatype.fk_from_operation)

                    if burst is not None:
                        dao.remove_entity(BurstConfiguration, burst.id)

        except RemoveDataTypeException:
            self.logger.exception("Could not execute operation Node Remove!")
            raise
        except FileStructureException:
            self.logger.exception("Remove operation failed")
            raise StructureException("Remove operation failed for unknown reasons.Please contact system administrator.")
示例#17
0
    def _remove_project_node_files(self, project_id, gid, skip_validation=False):
        """
        Delegate removal of a node in the structure of the project.
        In case of a problem will THROW StructureException.
        """
        try:
            project = self.find_project(project_id)
            datatype = dao.get_datatype_by_gid(gid)
            links = dao.get_links_for_datatype(datatype.id)
            if links:
                was_link = False
                for link in links:
                    #This means it's only a link and we need to remove it
                    if link.fk_from_datatype == datatype.id and link.fk_to_project == project.id:
                        dao.remove_entity(model.Links, link.id)
                        was_link = True
                if not was_link:
                    # Create a clone of the operation
                    new_op = model.Operation(dao.get_system_user().id,
                                             links[0].fk_to_project,
                                             datatype.parent_operation.fk_from_algo,
                                             datatype.parent_operation.parameters,
                                             datatype.parent_operation.meta_data,
                                             datatype.parent_operation.method_name,
                                             datatype.parent_operation.status,
                                             datatype.parent_operation.start_date,
                                             datatype.parent_operation.completion_date,
                                             datatype.parent_operation.fk_operation_group,
                                             datatype.parent_operation.additional_info,
                                             datatype.parent_operation.user_group,
                                             datatype.parent_operation.range_values)
                    new_op = dao.store_entity(new_op)
                    to_project = self.find_project(links[0].fk_to_project).name
                    new_op_loaded = dao.get_operation_by_id(new_op.id)
                    self.structure_helper.write_operation_metadata(new_op_loaded)
                    self.structure_helper.move_datatype(datatype, to_project, str(new_op.id))
                    datatype.set_operation_id(new_op.id)
                    datatype.parent_operation = new_op
                    dao.store_entity(datatype)
                    dao.remove_entity(model.Links, links[0].id)
            else:
                specific_remover = get_remover(datatype.type)(datatype)
                specific_remover.remove_datatype(skip_validation)
                self.structure_helper.remove_datatype(datatype)

        except RemoveDataTypeException:
            self.logger.exception("Could not execute operation Node Remove!")
            raise
        except FileStructureException:
            self.logger.exception("Remove operation failed")
            raise StructureException("Remove operation failed for unknown reasons.Please contact system administrator.")
示例#18
0
 def remove_operation(self, operation_id):
     """
     Remove a given operation
     """
     operation = dao.try_get_operation_by_id(operation_id)
     if operation is not None:
         self.logger.debug("Deleting operation %s " % operation)
         datatypes_for_op = dao.get_results_for_operation(operation_id)
         for dt in reversed(datatypes_for_op):
             self.remove_datatype(operation.project.id, dt.gid, False)
         dao.remove_entity(model.Operation, operation.id)
         self.logger.debug("Finished deleting operation %s " % operation)
     else:
         self.logger.warning("Attempt to delete operation with id=%s which no longer exists." % operation_id)
示例#19
0
    def remove_datatype(self, project_id, datatype_gid, skip_validation=False):
        """
        Method used for removing a dataType. If the given dataType is a DatatypeGroup
        or a dataType from a DataTypeGroup than this method will remove the entire group.
        The operation(s) used for creating the dataType(s) will also be removed.
        """
        datatype = dao.get_datatype_by_gid(datatype_gid)
        if datatype is None:
            self.logger.warning("Attempt to delete DT[%s] which no longer exists." % datatype_gid)
            return

        is_datatype_group = False
        if dao.is_datatype_group(datatype_gid):
            is_datatype_group = True
        elif datatype.fk_datatype_group is not None:
            is_datatype_group = True
            datatype = dao.get_datatype_by_id(datatype.fk_datatype_group)

        operations_set = [datatype.fk_from_operation]

        correct = True

        if is_datatype_group:
            self.logger.debug("Removing datatype group %s" % datatype)
            data_list = dao.get_datatypes_from_datatype_group(datatype.id)
            for adata in data_list:
                self._remove_project_node_files(project_id, adata.gid, skip_validation)
                if adata.fk_from_operation not in operations_set:
                    operations_set.append(adata.fk_from_operation)

            datatype_group = dao.get_datatype_group_by_gid(datatype.gid)
            dao.remove_datatype(datatype_gid)
            correct = correct and dao.remove_entity(OperationGroup, datatype_group.fk_operation_group)
        else:
            self.logger.debug("Removing datatype %s" % datatype)
            self._remove_project_node_files(project_id, datatype.gid, skip_validation)

        ## Remove Operation entity in case no other DataType needs them.
        project = dao.get_project_by_id(project_id)
        for operation_id in operations_set:
            dependent_dt = dao.get_generic_entity(DataType, operation_id, "fk_from_operation")
            if len(dependent_dt) > 0:
                ### Do not remove Operation in case DataType still exist referring it.
                continue
            correct = correct and dao.remove_entity(Operation, operation_id)
            ## Make sure Operation folder is removed
            self.structure_helper.remove_operation_data(project.name, datatype.fk_from_operation)

        if not correct:
            raise RemoveDataTypeException("Could not remove DataType " + str(datatype_gid))
示例#20
0
 def remove_operation(self, operation_id):
     """
     Remove a given operation
     """
     operation = dao.try_get_operation_by_id(operation_id)
     if operation is not None:
         self.logger.debug("Deleting operation %s " % operation)
         datatypes_for_op = dao.get_results_for_operation(operation_id)
         for dt in reversed(datatypes_for_op):
             self.remove_datatype(operation.project.id, dt.gid, False)
         dao.remove_entity(model.Operation, operation.id)
         self.logger.debug("Finished deleting operation %s " % operation)
     else:
         self.logger.warning("Attempt to delete operation with id=%s which no longer exists." % operation_id)
示例#21
0
    def check_import_references(file_path, datatype):
        h5_class = H5File.h5_class_from_file(file_path)
        reference_list = h5_class(file_path).gather_references()

        for _, reference_gid in reference_list:
            if not reference_gid:
                continue

            ref_index = load.load_entity_by_gid(reference_gid)
            if ref_index is None:
                os.remove(file_path)
                dao.remove_entity(datatype.__class__, datatype.id)
                raise MissingReferenceException(
                    'Imported file depends on datatypes that do not exist. Please upload '
                    'those first!')
示例#22
0
    def _remove_operation_group(self, operation_group_id, project_id, skip_validation, operations_set, links):
        metrics_groups = dao.get_generic_entity(DataTypeGroup, operation_group_id,
                                                'fk_operation_group')
        if len(metrics_groups) > 0:
            metric_datatype_group_id = metrics_groups[0].id
            self._remove_datatype_group_dts(project_id, metric_datatype_group_id, skip_validation,
                                            operations_set, links)
            datatypes = dao.get_datatype_in_group(metric_datatype_group_id)

            # If this condition is false, then there is a link for the datatype group and we don't need to delete it
            if len(datatypes) == 0:
                dao.remove_entity(DataTypeGroup, metric_datatype_group_id)
                return dao.remove_entity(OperationGroup, operation_group_id)
            else:
                return True
示例#23
0
 def stop_operation(operation_id, is_group=False, remove_after_stop=False):
     # type: (int, bool, bool) -> bool
     """
     Stop (also named Cancel) the operation given by operation_id,
     and potentially also remove it after (with all linked data).
     In case the Operation has a linked Burst, remove that too.
     :param operation_id: ID for Operation (or OperationGroup) to be canceled/removed
     :param is_group: When true stop all the operations from that group.
     :param remove_after_stop: if True, also remove the operation(s) after stopping
     :returns True if the stop step was successfully
     """
     result = False
     if is_group:
         op_group = ProjectService.get_operation_group_by_id(operation_id)
         operations_in_group = ProjectService.get_operations_in_group(
             op_group)
         for operation in operations_in_group:
             result = OperationService.stop_operation(
                 operation.id, False, remove_after_stop) or result
     elif dao.try_get_operation_by_id(operation_id) is not None:
         result = BackendClientFactory.stop_operation(operation_id)
         if remove_after_stop:
             burst_config = dao.get_burst_for_direct_operation_id(
                 operation_id)
             ProjectService().remove_operation(operation_id)
             if burst_config is not None:
                 result = dao.remove_entity(BurstConfiguration,
                                            burst_config.id) or result
     return result
示例#24
0
    def cancel_or_remove_burst(self, burst_id):
        """
        Cancel (if burst is still running) or Remove the burst given by burst_id.
        :returns True when Remove operation was done and False when Cancel
        """
        burst_entity = dao.get_burst_by_id(burst_id)
        if burst_entity.status == burst_entity.BURST_RUNNING:
            self.stop_burst(burst_entity)
            return False

        service = ProjectService()
        ## Remove each DataType in current burst.
        ## We can not leave all on cascade, because it won't work on SQLite for mapped dataTypes.
        datatypes = dao.get_all_datatypes_in_burst(burst_id)
        ## Get operations linked to current burst before removing the burst or else
        ##    the burst won't be there to identify operations any more.
        remaining_ops = dao.get_operations_in_burst(burst_id)

        # Remove burst first to delete work-flow steps which still hold foreign keys to operations.
        correct = dao.remove_entity(burst_entity.__class__, burst_id)
        if not correct:
            raise RemoveDataTypeException("Could not remove Burst entity!")

        for datatype in datatypes:
            service.remove_datatype(burst_entity.fk_project, datatype.gid, False)

        ## Remove all Operations remained.
        correct = True
        remaining_op_groups = set()
        project = dao.get_project_by_id(burst_entity.fk_project)

        for oper in remaining_ops:
            is_remaining = dao.get_generic_entity(oper.__class__, oper.id)
            if len(is_remaining) == 0:
                ### Operation removed cascaded.
                continue
            if oper.fk_operation_group is not None and oper.fk_operation_group not in remaining_op_groups:
                is_remaining = dao.get_generic_entity(model.OperationGroup, oper.fk_operation_group)
                if len(is_remaining) > 0:
                    remaining_op_groups.add(oper.fk_operation_group)
                    correct = correct and dao.remove_entity(model.OperationGroup, oper.fk_operation_group)
            correct = correct and dao.remove_entity(oper.__class__, oper.id)
            service.structure_helper.remove_operation_data(project.name, oper.id)

        if not correct:
            raise RemoveDataTypeException("Could not remove Burst because a linked operation could not be dropped!!")
        return True
示例#25
0
def initialize(introspected_modules, load_xml_events=True):
    """
    Initialize when Application is starting.
    Check for new algorithms or new DataTypes.
    """
    SettingsService().check_db_url(cfg.DB_URL)

    ## Initialize DB
    is_db_empty = initialize_startup()

    ## Create Projects storage root in case it does not exist.
    initialize_storage()

    ## Populate DB algorithms, by introspection
    event_folders = []
    start_introspection_time = datetime.datetime.now()
    for module in introspected_modules:
        introspector = Introspector(module)
        # Introspection is always done, even if DB was not empty.
        introspector.introspect(True)
        event_path = introspector.get_events_path()
        if event_path:
            event_folders.append(event_path)
    # Now remove any unverified Algo-Groups, categories or Portlets
    invalid_stored_entities = dao.get_non_validated_entities(
        start_introspection_time)
    for entity in invalid_stored_entities:
        dao.remove_entity(entity.__class__, entity.id)

    ## Populate events
    if load_xml_events:
        eventhandler.read_events(event_folders)

    ## Make sure DB events are linked.
    db_events.attach_db_events()

    ## Create default users.
    if is_db_empty:
        dao.store_entity(
            model.User(cfg.SYSTEM_USER_NAME, None, None, True, None))
        UserService().create_user(username=cfg.ADMINISTRATOR_NAME,
                                  password=cfg.ADMINISTRATOR_PASSWORD,
                                  email=cfg.ADMINISTRATOR_EMAIL,
                                  role=ROLE_ADMINISTRATOR)

    ## In case actions related to latest code-changes are needed, make sure they are executed.
    CodeUpdateManager().update_all()
示例#26
0
    def _prepare_and_launch_sync_burst(self):
        """
        Private method to launch a dummy burst. Return the burst loaded after the launch finished
        as well as the workflow steps that initially formed the burst.
        NOTE: the burst launched by this method is a `dummy` one, meaning we do not use an actual
        simulation, but instead test adapters.
        """
        burst_config = TestFactory.store_burst(self.test_project.id)

        workflow_step_list = []
        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)

        stored_dt = datatypes_factory.DatatypesFactory()._store_datatype(
            Datatype1())
        first_step_algorithm = self.flow_service.get_algorithm_by_module_and_class(
            "tvb.tests.framework.adapters.testadapter1",
            "TestAdapterDatatypeInput")
        metadata = {DataTypeMetaData.KEY_BURST: burst_config.id}
        kwargs = {"test_dt_input": stored_dt.gid, 'test_non_dt_input': '0'}
        operations, group = self.operation_service.prepare_operations(
            self.test_user.id, self.test_project.id, first_step_algorithm,
            first_step_algorithm.algorithm_category, metadata, **kwargs)
        view_step = TestFactory.create_workflow_step(
            "tvb.tests.framework.adapters.testadapter2",
            "TestAdapter2", {"test2": 2}, {},
            0,
            0,
            0,
            0,
            is_view_step=True)
        view_step.fk_portlet = test_portlet.id
        workflow_step_list.append(view_step)

        workflows = self.workflow_service.create_and_store_workflow(
            self.test_project.id, burst_config.id, 0, first_step_algorithm.id,
            operations)
        self.operation_service.prepare_operations_for_workflowsteps(
            workflow_step_list, workflows, self.test_user.id, burst_config.id,
            self.test_project.id, group, operations)
        ### Now fire the workflow and also update and store the burst configuration ##
        self.operation_service.launch_operation(operations[0].id, False)
        loaded_burst, _ = self.burst_service.load_burst(burst_config.id)
        import_operation = dao.get_operation_by_id(stored_dt.fk_from_operation)
        dao.remove_entity(import_operation.__class__, import_operation.id)
        dao.remove_datatype(stored_dt.gid)
        return loaded_burst, workflow_step_list
示例#27
0
 def remove_result_figure(self, figure_id):
     """
     Remove figure from DB and file storage.
     """
     figure = dao.load_figure(figure_id)
     self.storage_interface.remove_figure(figure)
     # Remove figure reference from DB.
     result = dao.remove_entity(ResultFigure, figure_id)
     return result
示例#28
0
 def remove_operation(self, operation_id):
     """
     Remove a given operation
     """
     operation = dao.try_get_operation_by_id(operation_id)
     if operation is not None:
         self.logger.debug("Deleting operation %s " % operation)
         datatypes_for_op = dao.get_results_for_operation(operation_id)
         for dt in reversed(datatypes_for_op):
             self.remove_datatype(operation.project.id, dt.gid, False)
         # Here the Operation is mot probably already removed - in case DTs were found inside
         # but we still remove it for the case when no DTs exist
         dao.remove_entity(Operation, operation.id)
         self.structure_helper.remove_operation_data(operation.project.name, operation_id)
         encryption_handler.push_folder_to_sync(self.structure_helper.get_project_folder(operation.project))
         self.logger.debug("Finished deleting operation %s " % operation)
     else:
         self.logger.warning("Attempt to delete operation with id=%s which no longer exists." % operation_id)
def initialize(introspected_modules, load_xml_events=True):
    """
    Initialize when Application is starting.
    Check for new algorithms or new DataTypes.
    """
    SettingsService().check_db_url(cfg.DB_URL)
    
    ## Initialize DB
    is_db_empty = initialize_startup()
    
    ## Create Projects storage root in case it does not exist.
    initialize_storage()
    
    ## Populate DB algorithms, by introspection
    event_folders = []
    start_introspection_time = datetime.datetime.now()
    for module in introspected_modules:
        introspector = Introspector(module)
        # Introspection is always done, even if DB was not empty.
        introspector.introspect(True)
        event_path = introspector.get_events_path()
        if event_path:
            event_folders.append(event_path)
    # Now remove any unverified Algo-Groups, categories or Portlets
    invalid_stored_entities = dao.get_non_validated_entities(start_introspection_time)
    for entity in invalid_stored_entities:
        dao.remove_entity(entity.__class__, entity.id)
   
    ## Populate events
    if load_xml_events:
        read_events(event_folders)
    
    ## Make sure DB events are linked.
    db_events.attach_db_events()
    
    ## Create default users.
    if is_db_empty:
        dao.store_entity(model.User(cfg.SYSTEM_USER_NAME, None, None, True, None))
        UserService().create_user(username=cfg.ADMINISTRATOR_NAME, password=cfg.ADMINISTRATOR_PASSWORD,
                                  email=cfg.ADMINISTRATOR_EMAIL, role=model.ROLE_ADMINISTRATOR)
        
    ## In case actions related to latest code-changes are needed, make sure they are executed.
    CodeUpdateManager().update_all()
示例#30
0
    def _add_links_for_datatype_references(datatype, fk_to_project, link_to_delete, existing_dt_links):
        # If we found a datatype that has links, we need to link those as well to the linked project
        # so they can be also copied

        linked_datatype_paths = []
        h5_file = h5.h5_file_for_index(datatype)
        h5.gather_all_references_by_index(h5_file, linked_datatype_paths)

        for h5_path in linked_datatype_paths:
            if existing_dt_links is not None and h5_path in existing_dt_links:
                continue

            gid = H5File.get_metadata_param(h5_path, 'gid')
            dt_index = h5.load_entity_by_gid(uuid.UUID(gid))
            new_link = Links(dt_index.id, fk_to_project)
            dao.store_entity(new_link)

        dao.remove_entity(Links, link_to_delete)
        return linked_datatype_paths
 def remove_project(self, project_id):
     """
     Remove Project from DB and File Storage.
     """
     try:
         project2delete = dao.get_project_by_id(project_id)
         project_bursts = dao.get_bursts_for_project(project_id)
         for burst in project_bursts:
             dao.remove_entity(burst.__class__, burst.id)
         project_datatypes = dao.get_datatypes_info_for_project(project_id)
         for one_data in project_datatypes:
             self.remove_datatype(project_id, one_data[9], True)
         self.structure_helper.remove_project_structure(project2delete.name)
         name = project2delete.name
         dao.delete_project(project_id)
         self.logger.debug("Deleted project: id=" + str(project_id) + ' name=' + name)
     except RemoveDataTypeError, excep:
         self.logger.error("Invalid DataType to remove!")
         self.logger.exception(excep)
         raise ProjectServiceException(excep.message)
示例#32
0
def remove_visualizer_references():
    """
    As we removed an algorithm, remove left-overs.
    """

    LOGGER.info("Starting to remove references towards old viewer ....")

    pearson_group = dao.find_group(
        "tvb.adapters.visualizers.cross_correlation", "PearsonCorrelationCoefficientVisualizer"
    )
    pearson_algorithm = dao.get_algorithm_by_group(pearson_group.id)

    pearson_operations = dao.get_generic_entity(model.Operation, pearson_algorithm.id, "fk_from_algo")
    for op in pearson_operations:
        dao.remove_entity(model.Operation, op.id)

    pearson_workflows = dao.get_generic_entity(model.WorkflowStepView, pearson_algorithm.id, "fk_algorithm")
    for ws in pearson_workflows:
        dao.remove_entity(model.WorkflowStepView, ws.id)

    LOGGER.info("References removed.")
示例#33
0
    def remove_project(self, project_id):
        """
        Remove Project from DB and File Storage.
        """
        try:
            project2delete = dao.get_project_by_id(project_id)

            self.logger.debug("Deleting project: id=" + str(project_id) + ' name=' + project2delete.name)
            project_datatypes = dao.get_datatypes_in_project(project_id)
            project_datatypes.sort(key=lambda dt: dt.create_date, reverse=True)
            for one_data in project_datatypes:
                self.remove_datatype(project_id, one_data.gid, True)

            links = dao.get_links_for_project(project_id)
            for one_link in links:
                dao.remove_entity(Links, one_link.id)
            project_bursts = dao.get_bursts_for_project(project_id)
            for burst in project_bursts:
                dao.remove_entity(burst.__class__, burst.id)

            project_folder = self.structure_helper.get_project_folder(project2delete)
            self.structure_helper.remove_project_structure(project2delete.name)
            encrypted_path = encryption_handler.compute_encrypted_folder_path(project_folder)
            if os.path.exists(encrypted_path):
                self.structure_helper.remove_folder(encrypted_path)
            if os.path.exists(encryption_handler.project_key_path(project_id)):
                os.remove(encryption_handler.project_key_path(project_id))
            dao.delete_project(project_id)
            self.logger.debug("Deleted project: id=" + str(project_id) + ' name=' + project2delete.name)

        except RemoveDataTypeException as excep:
            self.logger.exception("Could not execute operation Node Remove!")
            raise ProjectServiceException(str(excep))
        except FileStructureException as excep:
            self.logger.exception("Could not delete because of rights!")
            raise ProjectServiceException(str(excep))
        except Exception as excep:
            self.logger.exception(str(excep))
            raise ProjectServiceException(str(excep))
    def _prepare_and_launch_sync_burst(self):
        """
        Private method to launch a dummy burst. Return the burst loaded after the launch finished
        as well as the workflow steps that initially formed the burst.
        NOTE: the burst launched by this method is a `dummy` one, meaning we do not use an actual
        simulation, but instead test adapters.
        """
        burst_config = TestFactory.store_burst(self.test_project.id)

        workflow_step_list = []
        test_portlet = dao.get_portlet_by_identifier(self.PORTLET_ID)

        stored_dt = datatypes_factory.DatatypesFactory()._store_datatype(Datatype1())
        first_step_algorithm = self.flow_service.get_algorithm_by_module_and_class(
            "tvb.tests.framework.adapters.testadapter1", "TestAdapterDatatypeInput")
        metadata = {DataTypeMetaData.KEY_BURST: burst_config.id}
        kwargs = {"test_dt_input": stored_dt.gid, 'test_non_dt_input': '0'}
        operations, group = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id,
                                                                      first_step_algorithm,
                                                                      first_step_algorithm.algorithm_category,
                                                                      metadata, **kwargs)
        view_step = TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter2", "TestAdapter2",
                                                     {"test2": 2}, {}, 0, 0, 0, 0, is_view_step=True)
        view_step.fk_portlet = test_portlet.id
        workflow_step_list.append(view_step)

        workflows = self.workflow_service.create_and_store_workflow(self.test_project.id, burst_config.id, 0,
                                                                    first_step_algorithm.id, operations)
        self.operation_service.prepare_operations_for_workflowsteps(workflow_step_list, workflows, self.test_user.id,
                                                                    burst_config.id, self.test_project.id, group,
                                                                    operations)
        ### Now fire the workflow and also update and store the burst configuration ##
        self.operation_service.launch_operation(operations[0].id, False)
        loaded_burst, _ = self.burst_service.load_burst(burst_config.id)
        import_operation = dao.get_operation_by_id(stored_dt.fk_from_operation)
        dao.remove_entity(import_operation.__class__, import_operation.id)
        dao.remove_datatype(stored_dt.gid)
        return loaded_burst, workflow_step_list
    def tearDown(self):
        for algo in [self.algorithm1, self.algorithm_v]:
            dao.remove_entity(model.Algorithm, algo.id)

        for group in [self.algo_group1, self.algo_group2, self.algo_group3, self.algorithm_v]:
            dao.remove_entity(model.AlgorithmGroup, group.id)

        for categ in [self.categ1, self.categ2]:
            dao.remove_entity(model.AlgorithmCategory, categ.id)
示例#36
0
    def remove_result_figure(self, figure_id):
        """
        Remove figure from DB and file storage.
        """
        figure = dao.load_figure(figure_id)

        # Delete all figure related files from disk.
        figures_folder = self.file_helper.get_images_folder(figure.project.name)
        path2figure = os.path.join(figures_folder, figure.file_path)
        if os.path.exists(path2figure):
            os.remove(path2figure)
            self.file_helper.remove_image_metadata(figure)

        # Remove figure reference from DB.
        result = dao.remove_entity(model.ResultFigure, figure_id)
        return result
示例#37
0
    def remove_result_figure(self, figure_id):
        """
        Remove figure from DB and file storage.
        """
        figure = dao.load_figure(figure_id)

        # Delete all figure related files from disk.
        figures_folder = self.file_helper.get_images_folder(
            figure.project.name)
        path2figure = os.path.join(figures_folder, figure.file_path)
        if os.path.exists(path2figure):
            os.remove(path2figure)
            self.file_helper.remove_image_metadata(figure)

        # Remove figure reference from DB.
        result = dao.remove_entity(model.ResultFigure, figure_id)
        return result
示例#38
0
    def tearDown(self):
        for algo in [self.algorithm1, self.algorithm_v]:
            dao.remove_entity(model.Algorithm, algo.id)

        for group in [
                self.algo_group1, self.algo_group2, self.algo_group3,
                self.algorithm_v
        ]:
            dao.remove_entity(model.AlgorithmGroup, group.id)

        for categ in [self.categ1, self.categ2]:
            dao.remove_entity(model.AlgorithmCategory, categ.id)
示例#39
0
    def import_project_operations(self,
                                  project,
                                  import_path,
                                  is_group=False,
                                  importer_operation_id=None):
        """
        This method scans provided folder and identify all operations that needs to be imported
        """
        all_dts_count = 0
        all_stored_dts_count = 0
        imported_operations = []
        ordered_operations = self._retrieve_operations_in_order(
            project, import_path, importer_operation_id)

        for operation_data in ordered_operations:

            if operation_data.is_old_form:
                operation_entity, datatype_group = self.import_operation(
                    operation_data.operation)
                new_op_folder = self.files_helper.get_project_folder(
                    project, str(operation_entity.id))

                try:
                    operation_datatypes = self._load_datatypes_from_operation_folder(
                        operation_data.operation_folder, operation_entity,
                        datatype_group)
                    # Create and store view_model from operation
                    self.create_view_model(operation_entity, operation_data,
                                           new_op_folder)

                    self._store_imported_datatypes_in_db(
                        project, operation_datatypes)
                    imported_operations.append(operation_entity)
                except MissingReferenceException:
                    operation_entity.status = STATUS_ERROR
                    dao.store_entity(operation_entity)

            elif operation_data.main_view_model is not None:
                do_merge = False
                if importer_operation_id:
                    do_merge = True
                operation_entity = dao.store_entity(operation_data.operation,
                                                    merge=do_merge)
                dt_group = None
                op_group = dao.get_operationgroup_by_id(
                    operation_entity.fk_operation_group)
                if op_group:
                    dt_group = dao.get_datatypegroup_by_op_group_id(
                        op_group.id)
                    if not dt_group:
                        first_op = dao.get_operations_in_group(
                            op_group.id, only_first_operation=True)
                        dt_group = DataTypeGroup(
                            op_group,
                            operation_id=first_op.id,
                            state=DEFAULTDATASTATE_INTERMEDIATE)
                        dt_group = dao.store_entity(dt_group)
                # Store the DataTypes in db
                dts = {}
                all_dts_count += len(operation_data.dt_paths)
                for dt_path in operation_data.dt_paths:
                    dt = self.load_datatype_from_file(dt_path,
                                                      operation_entity.id,
                                                      dt_group, project.id)
                    if isinstance(dt, BurstConfiguration):
                        if op_group:
                            dt.fk_operation_group = op_group.id
                        all_stored_dts_count += self._store_or_link_burst_config(
                            dt, dt_path, project.id)
                    else:
                        dts[dt_path] = dt
                        if op_group:
                            op_group.fill_operationgroup_name(dt.type)
                            dao.store_entity(op_group)
                try:
                    stored_dts_count = self._store_imported_datatypes_in_db(
                        project, dts)
                    all_stored_dts_count += stored_dts_count

                    if operation_data.main_view_model.is_metric_operation:
                        self._update_burst_metric(operation_entity)

                    #TODO: TVB-2849 to reveiw these flags and simplify condition
                    if stored_dts_count > 0 or (
                            not operation_data.is_self_generated and
                            not is_group) or importer_operation_id is not None:
                        imported_operations.append(operation_entity)
                        new_op_folder = self.files_helper.get_project_folder(
                            project, str(operation_entity.id))
                        view_model_disk_size = 0
                        for h5_file in operation_data.all_view_model_files:
                            view_model_disk_size += FilesHelper.compute_size_on_disk(
                                h5_file)
                            shutil.move(h5_file, new_op_folder)
                        operation_entity.view_model_disk_size = view_model_disk_size
                        dao.store_entity(operation_entity)
                    else:
                        # In case all Dts under the current operation were Links and the ViewModel is dummy,
                        # don't keep the Operation empty in DB
                        dao.remove_entity(Operation, operation_entity.id)
                        self.files_helper.remove_operation_data(
                            project.name, operation_entity.id)
                except MissingReferenceException as excep:
                    dao.remove_entity(Operation, operation_entity.id)
                    self.files_helper.remove_operation_data(
                        project.name, operation_entity.id)
                    raise excep
            else:
                self.logger.warning(
                    "Folder %s will be ignored, as we could not find a serialized "
                    "operation or DTs inside!" %
                    operation_data.operation_folder)

        self._update_dt_groups(project.id)
        self._update_burst_configurations(project.id)
        return imported_operations, all_dts_count, all_stored_dts_count
示例#40
0
 def tearDown(self):
     dao.remove_entity(model.Algorithm, self.algorithm)
 def tearDown(self):
     dao.remove_entity(model.Algorithm, self.algorithm)
示例#42
0
    def remove_datatype(self, project_id, datatype_gid, skip_validation=False):
        """
        Method used for removing a dataType. If the given dataType is a DatatypeGroup
        or a dataType from a DataTypeGroup than this method will remove the entire group.
        The operation(s) used for creating the dataType(s) will also be removed.
        """
        datatype = dao.get_datatype_by_gid(datatype_gid)
        if datatype is None:
            self.logger.warning("Attempt to delete DT[%s] which no longer exists." % datatype_gid)
            return

        is_datatype_group = False
        datatype_group = None
        if dao.is_datatype_group(datatype_gid):
            is_datatype_group = True
            datatype_group = datatype
        elif datatype.fk_datatype_group is not None:
            is_datatype_group = True
            datatype_group = dao.get_datatype_by_id(datatype.fk_datatype_group)

        operations_set = [datatype.fk_from_operation]
        correct = True

        if is_datatype_group:
            operations_set = [datatype_group.fk_from_operation]
            self.logger.debug("Removing datatype group %s" % datatype_group)
            if datatype_group.fk_parent_burst:
                burst = dao.get_generic_entity(BurstConfiguration, datatype_group.fk_parent_burst, 'gid')[0]
                dao.remove_entity(BurstConfiguration, burst.id)
                if burst.fk_metric_operation_group:
                    correct = correct and self._remove_operation_group(burst.fk_metric_operation_group, project_id,
                                                                       skip_validation, operations_set)

                if burst.fk_operation_group:
                    correct = correct and self._remove_operation_group(burst.fk_operation_group, project_id,
                                                                       skip_validation, operations_set)

            else:
                self._remove_datatype_group_dts(project_id, datatype_group.id, skip_validation, operations_set)

                datatype_group = dao.get_datatype_group_by_gid(datatype_group.gid)
                dao.remove_entity(DataTypeGroup, datatype.id)
                correct = correct and dao.remove_entity(OperationGroup, datatype_group.fk_operation_group)
        else:
            self.logger.debug("Removing datatype %s" % datatype)
            self._remove_project_node_files(project_id, datatype.gid, skip_validation)

        # Remove Operation entity in case no other DataType needs them.
        project = dao.get_project_by_id(project_id)
        for operation_id in operations_set:
            dependent_dt = dao.get_generic_entity(DataType, operation_id, "fk_from_operation")
            if len(dependent_dt) > 0:
                # Do not remove Operation in case DataType still exist referring it.
                continue
            op_burst = dao.get_burst_for_operation_id(operation_id)
            if op_burst:
                correct = correct and dao.remove_entity(BurstConfiguration, op_burst.id)
            correct = correct and dao.remove_entity(Operation, operation_id)
            # Make sure Operation folder is removed
            self.structure_helper.remove_operation_data(project.name, operation_id)

        encryption_handler.push_folder_to_sync(self.structure_helper.get_project_folder(project))
        if not correct:
            raise RemoveDataTypeException("Could not remove DataType " + str(datatype_gid))
示例#43
0
 def remove_burst_configuration(burst_config_id):
     # type: (int) -> None
     dao.remove_entity(BurstConfiguration, burst_config_id)
示例#44
0
    def _remove_project_node_files(self, project_id, gid, skip_validation=False):
        """
        Delegate removal of a node in the structure of the project.
        In case of a problem will THROW StructureException.
        """
        try:
            project = self.find_project(project_id)
            datatype = dao.get_datatype_by_gid(gid)
            links = dao.get_links_for_datatype(datatype.id)

            op = dao.get_operation_by_id(datatype.fk_from_operation)
            adapter = ABCAdapter.build_adapter(op.algorithm)
            if links:
                was_link = False
                for link in links:
                    # This means it's only a link and we need to remove it
                    if link.fk_from_datatype == datatype.id and link.fk_to_project == project.id:
                        dao.remove_entity(Links, link.id)
                        was_link = True
                if not was_link:
                    # Create a clone of the operation
                    # There is no view_model so the view_model_gid is None

                    new_op = Operation(op.view_model_gid,
                                       dao.get_system_user().id,
                                       links[0].fk_to_project,
                                       datatype.parent_operation.fk_from_algo,
                                       datatype.parent_operation.status,
                                       datatype.parent_operation.start_date,
                                       datatype.parent_operation.completion_date,
                                       datatype.parent_operation.fk_operation_group,
                                       datatype.parent_operation.additional_info,
                                       datatype.parent_operation.user_group,
                                       datatype.parent_operation.range_values)
                    new_op = dao.store_entity(new_op)
                    to_project = self.find_project(links[0].fk_to_project)
                    to_project_path = self.structure_helper.get_project_folder(to_project)

                    encryption_handler.set_project_active(to_project)
                    encryption_handler.sync_folders(to_project_path)
                    to_project_name = to_project.name

                    full_path = h5.path_for_stored_index(datatype)
                    self.structure_helper.move_datatype(datatype, to_project_name, str(new_op.id), full_path)
                    # Move also the ViewModel H5
                    old_folder = self.structure_helper.get_project_folder(project, str(op.id))
                    view_model = adapter.load_view_model(op)
                    vm_full_path = h5.determine_filepath(op.view_model_gid, old_folder)
                    self.structure_helper.move_datatype(view_model, to_project_name, str(new_op.id), vm_full_path)

                    encryption_handler.sync_folders(to_project_path)
                    encryption_handler.set_project_inactive(to_project)

                    datatype.fk_from_operation = new_op.id
                    datatype.parent_operation = new_op
                    dao.store_entity(datatype)
                    dao.remove_entity(Links, links[0].id)
            else:
                specific_remover = get_remover(datatype.type)(datatype)
                specific_remover.remove_datatype(skip_validation)
                h5_path = h5.path_for_stored_index(datatype)
                self.structure_helper.remove_datatype_file(h5_path)
                encryption_handler.push_folder_to_sync(self.structure_helper.get_project_folder_from_h5(h5_path))

        except RemoveDataTypeException:
            self.logger.exception("Could not execute operation Node Remove!")
            raise
        except FileStructureException:
            self.logger.exception("Remove operation failed")
            raise StructureException("Remove operation failed for unknown reasons.Please contact system administrator.")
示例#45
0
    def remove_datatype(self, project_id, datatype_gid, skip_validation=False, existing_dt_links=None):
        """
        Method used for removing a dataType. If the given dataType is a DatatypeGroup
        or a dataType from a DataTypeGroup than this method will remove the entire group.
        The operation(s) used for creating the dataType(s) will also be removed.
        """
        datatype = dao.get_datatype_by_gid(datatype_gid)
        if datatype is None:
            self.logger.warning("Attempt to delete DT[%s] which no longer exists." % datatype_gid)
            return

        if datatype.parent_operation.fk_launched_in != int(project_id):
            self.logger.warning("Datatype with GUID [%s] has been moved to another project and does "
                                "not need to be deleted anymore." % datatype_gid)
            return

        is_datatype_group = False
        datatype_group = None
        new_dt_links = []

        # Datatype Groups were already handled when the first DatatypeMeasureIndex has been found
        if dao.is_datatype_group(datatype_gid):
            is_datatype_group = True
            datatype_group = datatype
        # Found the first DatatypeMeasureIndex from a group
        elif datatype.fk_datatype_group is not None:
            is_datatype_group = True
            # We load it this way to make sure we have the 'fk_operation_group' in every case
            datatype_group_gid = dao.get_datatype_by_id(datatype.fk_datatype_group).gid
            datatype_group = h5.load_entity_by_gid(datatype_group_gid)

        operations_set = [datatype.fk_from_operation]
        correct = True

        if is_datatype_group:
            operations_set = [datatype_group.fk_from_operation]
            self.logger.debug("Removing datatype group %s" % datatype_group)

            datatypes = self.get_all_datatypes_from_data(datatype_group)
            first_datatype = datatypes[0]

            if hasattr(first_datatype, 'fk_source_gid'):
                ts = h5.load_entity_by_gid(first_datatype.fk_source_gid)
                ts_group = dao.get_datatypegroup_by_op_group_id(ts.parent_operation.fk_operation_group)
                dm_group = datatype_group
            else:
                dt_measure_index = get_class_by_name("{}.{}".format(DATATYPE_MEASURE_INDEX_MODULE,
                                                                    DATATYPE_MEASURE_INDEX_CLASS))
                dm_group = dao.get_datatype_measure_group_from_ts_from_pse(first_datatype.gid, dt_measure_index)
                ts_group = datatype_group

            links = []

            if ts_group:
                links.extend(dao.get_links_for_datatype(ts_group.id))
                correct = correct and self._remove_operation_group(ts_group.fk_operation_group, project_id,
                                                                   skip_validation, operations_set, links)

            if dm_group:
                links.extend(dao.get_links_for_datatype(dm_group.id))
                correct = correct and self._remove_operation_group(dm_group.fk_operation_group, project_id,
                                                                   skip_validation, operations_set, links)

            if len(links) > 0:
                # We want to get the links for the first TSIndex directly
                # This code works for all cases
                datatypes = dao.get_datatype_in_group(ts_group.id)
                ts = datatypes[0]

                new_dt_links = self._add_links_for_datatype_references(ts, links[0].fk_to_project, links[0].id,
                                                                       existing_dt_links)

        else:
            self.logger.debug("Removing datatype %s" % datatype)
            links = dao.get_links_for_datatype(datatype.id)

            if len(links) > 0:
                new_dt_links = self._add_links_for_datatype_references(datatype, links[0].fk_to_project, links[0].id,
                                                                       existing_dt_links)

            self._remove_project_node_files(project_id, datatype.gid, links, skip_validation)

        # Remove Operation entity in case no other DataType needs them.
        project = dao.get_project_by_id(project_id)
        for operation_id in operations_set:
            dependent_dt = dao.get_generic_entity(DataType, operation_id, "fk_from_operation")
            if len(dependent_dt) > 0:
                # Do not remove Operation in case DataType still exist referring it.
                continue
            correct = correct and dao.remove_entity(Operation, operation_id)
            # Make sure Operation folder is removed
            self.storage_interface.remove_operation_data(project.name, operation_id)

        self.storage_interface.push_folder_to_sync(project.name)
        if not correct:
            raise RemoveDataTypeException("Could not remove DataType " + str(datatype_gid))
        return new_dt_links
示例#46
0
    def import_project_operations(self, project, import_path):
        """
        This method scans provided folder and identify all operations that needs to be imported
        """
        imported_operations = []
        ordered_operations = self._retrieve_operations_in_order(
            project, import_path)
        success_no = 0

        for operation_data in ordered_operations:

            if operation_data.is_old_form:
                operation_entity, datatype_group = self.__import_operation(
                    operation_data.operation)
                new_op_folder = self.files_helper.get_project_folder(
                    project, str(operation_entity.id))

                try:
                    operation_datatypes = self._load_datatypes_from_operation_folder(
                        operation_data.operation_folder, operation_entity,
                        datatype_group)
                    # Create and store view_model from operation
                    view_model = self._get_new_form_view_model(
                        operation_entity, operation_data.info_from_xml)
                    h5.store_view_model(view_model, new_op_folder)
                    operation_entity.view_model_gid = view_model.gid.hex
                    dao.store_entity(operation_entity)

                    self._store_imported_datatypes_in_db(
                        project, operation_datatypes)
                    imported_operations.append(operation_entity)
                    success_no = success_no + 1
                except MissingReferenceException:
                    operation_entity.status = STATUS_ERROR
                    dao.store_entity(operation_entity)

            elif operation_data.main_view_model is not None:
                operation_entity = dao.store_entity(operation_data.operation)
                dt_group = None  # TODO
                # Store the DataTypes in db
                dts = {}
                for dt_path in operation_data.dt_paths:
                    dt = self.load_datatype_from_file(dt_path,
                                                      operation_entity.id,
                                                      dt_group, project.id)
                    if isinstance(dt, BurstConfiguration):
                        dao.store_entity(dt)
                    else:
                        dts[dt_path] = dt
                try:
                    stored_dts_count = self._store_imported_datatypes_in_db(
                        project, dts)

                    if stored_dts_count > 0 or not operation_data.is_self_generated:
                        imported_operations.append(operation_entity)
                        new_op_folder = self.files_helper.get_project_folder(
                            project, str(operation_entity.id))
                        for h5_file in operation_data.all_view_model_files:
                            shutil.move(h5_file, new_op_folder)
                    else:
                        # In case all Dts under the current operation were Links and the ViewModel is dummy,
                        # don't keep the Operation empty in DB
                        dao.remove_entity(Operation, operation_entity.id)
                except MissingReferenceException:
                    operation_entity.status = STATUS_ERROR
                    dao.store_entity(operation_entity)

            else:
                self.logger.warning(
                    "Folder %s will be ignored, as we could not find a serialized "
                    "operation or DTs inside!" %
                    operation_data.operation_folder)

        self.logger.warning(
            "Project has been only partially imported because of some missing dependent datatypes. "
            + "%d files were successfully imported from a total of %d!" %
            (success_no, len(ordered_operations)))
        return imported_operations
 def transactional_teardown_method(self):
     dao.remove_entity(model_operation.Algorithm, self.algorithm)
示例#48
0
 def remove_datatype(self, skip_validation=False):
     """
     Perform basic operation, should overwrite in specific implementations.
     """
     dao.remove_entity(self.handled_datatype.__class__,
                       self.handled_datatype.id)