def pipelines_delete(project_uuid, pipeline_uuid): try: with TwoPhaseExecutor(db.session) as tpe: DeletePipeline(tpe).transaction(project_uuid, pipeline_uuid) except Exception as e: return {"message": str(e)}, 500 return jsonify({"success": True})
def _transaction(self, project_uuid): """Synchronizes the state of the pipelines of a project. Synchronizes the state of the filesystem with the db when it comes to the pipelines of a project. Pipelines removed from the filesystem are removed, new pipelines (or pipelines that where there after, for example a project import) are registered in the db. Args: project_uuid: Raises: FileNotFoundError: If the project directory is not found. """ project_path = project_uuid_to_path(project_uuid) project_dir = os.path.join( current_app.config["USER_DIR"], "projects", project_path ) if not os.path.isdir(project_dir): raise FileNotFoundError("Project directory not found") # Find all pipelines in the project directory. pipeline_paths = find_pipelines_in_dir(project_dir, project_dir) # Cleanup pipelines that have been manually removed. fs_removed_pipelines = [ pipeline for pipeline in Pipeline.query.filter(Pipeline.path.notin_(pipeline_paths)) .filter(Pipeline.project_uuid == project_uuid) .all() ] for pip in fs_removed_pipelines: DeletePipeline(self.tpe).transaction(pip.project_uuid, pip.uuid) # Identify all pipeline paths that are not yet a pipeline, that # is, pipelines that were added through the filesystem. existing_pipeline_paths = [ pipeline.path for pipeline in Pipeline.query.filter(Pipeline.path.in_(pipeline_paths)) .filter(Pipeline.project_uuid == project_uuid) .all() ] # TODO: handle existing pipeline assignments. new_pipelines_from_fs = set(pipeline_paths) - set(existing_pipeline_paths) for path in new_pipelines_from_fs: AddPipelineFromFS(self.tpe).transaction(project_uuid, path)
def _transaction(self, project_uuid): """Synchronizes the state of the pipelines of a project. Synchronizes the state of the filesystem with the db when it comes to the pipelines of a project. Pipelines removed from the filesystem are removed, new pipelines (or pipelines that where there after, for example a project import) are registered in the db. Args: project_uuid: Raises: FileNotFoundError: If the project directory is not found. """ project_path = project_uuid_to_path(project_uuid) project_dir = safe_join(current_app.config["USER_DIR"], "projects", project_path) # Lock the project to avoid race conditions in pipeline deletion # or creation. Project.query.with_for_update().filter_by(uuid=project_uuid).one() if not os.path.isdir(project_dir): raise FileNotFoundError("Project directory not found") # Find all pipelines in the project directory. pipeline_paths = find_pipelines_in_dir(project_dir, project_dir) # Cleanup pipelines that have been manually removed. fs_removed_pipelines = [ pipeline for pipeline in Pipeline.query.filter( Pipeline.path.notin_(pipeline_paths)).filter( Pipeline.project_uuid == project_uuid, Pipeline.status == "READY", ).all() ] for pip in fs_removed_pipelines: DeletePipeline(self.tpe).transaction(pip.project_uuid, pip.uuid, remove_file=False) # Identify all pipeline paths that are not yet a pipeline, that # is, pipelines that were added through the filesystem. existing_pipeline_paths = [ pipeline.path for pipeline in Pipeline.query.filter( Pipeline.path.in_(pipeline_paths)).filter( Pipeline.project_uuid == project_uuid).all() ] # TODO: handle existing pipeline assignments. new_pipelines_from_fs = set(pipeline_paths) - set( existing_pipeline_paths) for path in new_pipelines_from_fs: pipeline_json_path = get_pipeline_path(None, project_uuid, pipeline_path=path) with open(pipeline_json_path, "r") as json_file: pipeline_uuid = json.load(json_file)["uuid"] # This is not a new pipeline, the pipeline is being moved. is_moving = (Pipeline.query.filter_by(project_uuid=project_uuid, uuid=pipeline_uuid, status="MOVING").count() > 0) if not is_moving: AddPipelineFromFS(self.tpe).transaction(project_uuid, path)