def _transaction(self, project_uuid: str, pipeline_name: str, pipeline_path: str): # It is important to normalize the path because # find_pipelines_in_dir will return normalized paths as well, # which are used to detect pipelines that were deleted through # the file system in SyncProjectPipelinesDBState. pipeline_path = normalize_project_relative_path(pipeline_path) # Reject creation if a pipeline with this path exists already. if (Pipeline.query.filter( Pipeline.project_uuid == project_uuid).filter( Pipeline.path == pipeline_path).count() > 0): raise FileExistsError( f"Pipeline already exists at path {pipeline_path}.") pipeline_uuid = str(uuid.uuid4()) pipeline = Pipeline(path=pipeline_path, uuid=pipeline_uuid, project_uuid=project_uuid) db.session.add(pipeline) # To be used by the collateral and revert functions. self.collateral_kwargs["project_uuid"] = project_uuid self.collateral_kwargs["pipeline_uuid"] = pipeline_uuid self.collateral_kwargs["pipeline_name"] = pipeline_name self.collateral_kwargs["pipeline_path"] = pipeline_path return pipeline_uuid
def new_pipeline(): """ Creates a new mock dataset to test """ pipeline = Pipeline(id=1, pipeline_id=12, owner_id=1, name='Freesurfer', version='1.9', is_private=False, date_created=datetime.now(), date_updated=datetime.now()) return pipeline
def _transaction(self, project_uuid: str, pipeline_path: str): pipeline_json_path = get_pipeline_path(None, project_uuid, pipeline_path=pipeline_path) # Check the uuid of the pipeline. If the uuid is taken by # another pipeline in the project then generate a new uuid for # the pipeline. with open(pipeline_json_path, "r") as json_file: pipeline_json = json.load(json_file) file_pipeline_uuid = pipeline_json.get("uuid") self.collateral_kwargs["new_uuid"] = None self.collateral_kwargs["project_uuid"] = None self.collateral_kwargs["pipeline_uuid"] = None self.collateral_kwargs["pipeline_path"] = None self.collateral_kwargs["pipeline_json"] = None # If the pipeline has its own uuid and the uuid is not in # the DB already then the pipeline does not need to have a # new uuid assigned and written to disk. if (file_pipeline_uuid is not None and Pipeline.query.filter_by(project_uuid=project_uuid, uuid=file_pipeline_uuid, status="READY").count() == 0): self.collateral_kwargs["new_uuid"] = False else: self.collateral_kwargs["new_uuid"] = True # Generate a new uuid for the pipeline. file_pipeline_uuid = str(uuid.uuid4()) self.collateral_kwargs["project_uuid"] = project_uuid self.collateral_kwargs["pipeline_uuid"] = file_pipeline_uuid self.collateral_kwargs["pipeline_path"] = pipeline_path self.collateral_kwargs["pipeline_json"] = pipeline_json # Add the pipeline to the db. new_pipeline = Pipeline( uuid=file_pipeline_uuid, path=pipeline_path, project_uuid=project_uuid, ) db.session.add(new_pipeline)
def pipelines_create(project_uuid): pipeline_path = request.json["pipeline_path"] if (Pipeline.query.filter( Pipeline.project_uuid == project_uuid).filter( Pipeline.path == pipeline_path).count() == 0): pipeline_uuid = str(uuid.uuid4()) pipeline = Pipeline(path=pipeline_path, uuid=pipeline_uuid, project_uuid=project_uuid) db.session.add(pipeline) db.session.commit() pipeline_dir = get_pipeline_directory(pipeline_uuid, project_uuid) pipeline_json_path = get_pipeline_path(pipeline_uuid, project_uuid) os.makedirs(pipeline_dir, exist_ok=True) # generate clean pipeline.json pipeline_json = { "name": request.json["name"], "version": "1.0.0", "uuid": pipeline_uuid, "settings": { "auto_eviction": False, "data_passing_memory_size": "1GB", }, "steps": {}, } with open(pipeline_json_path, "w") as pipeline_json_file: pipeline_json_file.write(json.dumps(pipeline_json, indent=4)) return jsonify({"success": True}) else: return ( jsonify({ "message": "Pipeline already exists at path '%s'." % pipeline_path }), 409, )
def insert_sample_pipelines(self): with open(self.pipelines_file, 'r') as pipelines_file: reader = csv.reader(pipelines_file) next(reader) for row in reader: pipeline = Pipeline(id=row[0], pipeline_id=row[1], owner_id=row[2], name=row[3], version=row[4], is_private=row[5] == 'True', date_created=datetime.now(), date_updated=datetime.now()) db.session.add(pipeline) db.session.commit() pipelines_file.close()
def _transaction(self, project_uuid: str, pipeline_name: str, pipeline_path: str): # Reject creation if a pipeline with this path exists already. if (Pipeline.query.filter( Pipeline.project_uuid == project_uuid).filter( Pipeline.path == pipeline_path).count() > 0): raise FileExistsError( f"Pipeline already exists at path {pipeline_path}.") pipeline_uuid = str(uuid.uuid4()) pipeline = Pipeline(path=pipeline_path, uuid=pipeline_uuid, project_uuid=project_uuid) db.session.add(pipeline) # To be used by the collateral and revert functions. self.collateral_kwargs["project_uuid"] = project_uuid self.collateral_kwargs["pipeline_uuid"] = pipeline_uuid self.collateral_kwargs["pipeline_name"] = pipeline_name self.collateral_kwargs["pipeline_path"] = pipeline_path
def sync_project_pipelines_db_state(project_uuid): """Synchronizes the state of the pipelines of a project (fs/db). Synchronizes the state of the filesystem with the db when it comes to the pipelines of a project. Pipelines removed from the file system are removed, new pipelines (or pipelines that were there after, for example, a project import) are registered in the db. Args: project_uuid: Raises: FileNotFoundError: If the project directory is not found. """ project_path = project_uuid_to_path(project_uuid) project_dir = os.path.join(app.config["USER_DIR"], "projects", project_path) if not os.path.isdir(project_dir): raise FileNotFoundError("Project directory not found") # find all pipelines in project dir pipeline_paths = find_pipelines_in_dir(project_dir, project_dir) # cleanup pipelines that have been manually removed fs_removed_pipelines = [ pipeline for pipeline in Pipeline.query.filter( Pipeline.path.notin_(pipeline_paths)).filter( Pipeline.project_uuid == project_uuid).all() ] for fs_removed_pipeline in fs_removed_pipelines: cleanup_pipeline_from_orchest(fs_removed_pipeline) # identify all pipeline paths that are not yet a pipeline existing_pipeline_paths = [ pipeline.path for pipeline in Pipeline.query.filter( Pipeline.path.in_(pipeline_paths)).filter( Pipeline.project_uuid == project_uuid).all() ] # TODO: handle existing pipeline assignments new_pipeline_paths = set(pipeline_paths) - set(existing_pipeline_paths) for new_pipeline_path in new_pipeline_paths: # write pipeline uuid to file pipeline_json_path = get_pipeline_path( None, project_uuid, pipeline_path=new_pipeline_path) try: with open(pipeline_json_path, "r") as json_file: pipeline_json = json.load(json_file) file_pipeline_uuid = pipeline_json.get("uuid") new_pipeline_uuid = file_pipeline_uuid # see if pipeline_uuid is taken if (Pipeline.query.filter( Pipeline.uuid == file_pipeline_uuid).filter( Pipeline.project_uuid == project_uuid).count() > 0 or len(file_pipeline_uuid) == 0): new_pipeline_uuid = str(uuid.uuid4()) with open(pipeline_json_path, "w") as json_file: pipeline_json["uuid"] = new_pipeline_uuid json_file.write(json.dumps(pipeline_json, indent=4)) # only commit if writing succeeds new_pipeline = Pipeline( uuid=new_pipeline_uuid, path=new_pipeline_path, project_uuid=project_uuid, ) db.session.add(new_pipeline) db.session.commit() except Exception as e: logging.info(e)
def pipelines_get(project_uuid): project_path = project_uuid_to_path(project_uuid) project_dir = os.path.join(app.config["USER_DIR"], "projects", project_path) if not os.path.isdir(project_dir): return jsonify({"message": "Project directory not found."}), 404 # find all pipelines in project dir pipeline_paths = find_pipelines_in_dir(project_dir, project_dir) # identify all pipeline paths that are not yet a pipeline existing_pipeline_paths = [ pipeline.path for pipeline in Pipeline.query.filter( Pipeline.path.in_(pipeline_paths)).filter( Pipeline.project_uuid == project_uuid).all() ] # TODO: handle existing pipeline assignments new_pipeline_paths = set(pipeline_paths) - set(existing_pipeline_paths) for new_pipeline_path in new_pipeline_paths: # write pipeline uuid to file pipeline_json_path = get_pipeline_path( None, project_uuid, pipeline_path=new_pipeline_path) try: with open(pipeline_json_path, "r") as json_file: pipeline_json = json.load(json_file) file_pipeline_uuid = pipeline_json.get("uuid") new_pipeline_uuid = file_pipeline_uuid # see if pipeline_uuid is taken if (Pipeline.query.filter( Pipeline.uuid == file_pipeline_uuid).filter( Pipeline.project_uuid == project_uuid).count() > 0 or len(file_pipeline_uuid) == 0): new_pipeline_uuid = str(uuid.uuid4()) with open(pipeline_json_path, "w") as json_file: pipeline_json["uuid"] = new_pipeline_uuid json_file.write(json.dumps(pipeline_json, indent=2)) # only commit if writing succeeds new_pipeline = Pipeline( uuid=new_pipeline_uuid, path=new_pipeline_path, project_uuid=project_uuid, ) db.session.add(new_pipeline) db.session.commit() except Exception as e: logging.info(e) pipelines = Pipeline.query.filter( Pipeline.project_uuid == project_uuid).all() pipelines_augmented = [] for pipeline in pipelines: pipeline_json_path = get_pipeline_path(pipeline.uuid, pipeline.project_uuid) pipeline_augmented = { "uuid": pipeline.uuid, "path": pipeline.path, } if os.path.isfile(pipeline_json_path): with open(pipeline_json_path, "r") as json_file: pipeline_json = json.load(json_file) pipeline_augmented["name"] = pipeline_json["name"] else: pipeline_augmented[ "name"] = "Warning: pipeline file was not found." pipelines_augmented.append(pipeline_augmented) json_string = json.dumps({ "success": True, "result": pipelines_augmented }) return json_string, 200, {"content-type": "application/json"}