Ejemplo n.º 1
0
    def _transaction(self, project_uuid):
        """Synchronizes the state of the pipelines of a project.

        Synchronizes the state of the filesystem with the db when it
        comes to the pipelines of a project. Pipelines removed from the
        filesystem are removed, new pipelines (or pipelines that where
        there after, for example a project import) are registered in
        the db.

        Args:
            project_uuid:

        Raises:
            FileNotFoundError: If the project directory is not found.
        """

        project_path = project_uuid_to_path(project_uuid)
        project_dir = os.path.join(
            current_app.config["USER_DIR"], "projects", project_path
        )

        if not os.path.isdir(project_dir):
            raise FileNotFoundError("Project directory not found")

        # Find all pipelines in the project directory.
        pipeline_paths = find_pipelines_in_dir(project_dir, project_dir)
        # Cleanup pipelines that have been manually removed.
        fs_removed_pipelines = [
            pipeline
            for pipeline in Pipeline.query.filter(Pipeline.path.notin_(pipeline_paths))
            .filter(Pipeline.project_uuid == project_uuid)
            .all()
        ]
        for pip in fs_removed_pipelines:
            DeletePipeline(self.tpe).transaction(pip.project_uuid, pip.uuid)

        # Identify all pipeline paths that are not yet a pipeline, that
        # is, pipelines that were added through the filesystem.
        existing_pipeline_paths = [
            pipeline.path
            for pipeline in Pipeline.query.filter(Pipeline.path.in_(pipeline_paths))
            .filter(Pipeline.project_uuid == project_uuid)
            .all()
        ]
        # TODO: handle existing pipeline assignments.
        new_pipelines_from_fs = set(pipeline_paths) - set(existing_pipeline_paths)
        for path in new_pipelines_from_fs:
            AddPipelineFromFS(self.tpe).transaction(project_uuid, path)
Ejemplo n.º 2
0
    def sync_project_pipelines_db_state(project_uuid):
        """Synchronizes the state of the pipelines of a project (fs/db).

        Synchronizes the state of the filesystem with the db
        when it comes to the pipelines of a project. Pipelines removed
        from the file system are removed, new pipelines (or pipelines
        that were there after, for example, a project import) are
        registered in the db.

        Args:
            project_uuid:

        Raises:
            FileNotFoundError: If the project directory is not found.
        """
        project_path = project_uuid_to_path(project_uuid)
        project_dir = os.path.join(app.config["USER_DIR"], "projects",
                                   project_path)

        if not os.path.isdir(project_dir):
            raise FileNotFoundError("Project directory not found")

        # find all pipelines in project dir
        pipeline_paths = find_pipelines_in_dir(project_dir, project_dir)

        # cleanup pipelines that have been manually removed
        fs_removed_pipelines = [
            pipeline for pipeline in Pipeline.query.filter(
                Pipeline.path.notin_(pipeline_paths)).filter(
                    Pipeline.project_uuid == project_uuid).all()
        ]
        for fs_removed_pipeline in fs_removed_pipelines:
            cleanup_pipeline_from_orchest(fs_removed_pipeline)

        # identify all pipeline paths that are not yet a pipeline
        existing_pipeline_paths = [
            pipeline.path for pipeline in Pipeline.query.filter(
                Pipeline.path.in_(pipeline_paths)).filter(
                    Pipeline.project_uuid == project_uuid).all()
        ]

        # TODO: handle existing pipeline assignments
        new_pipeline_paths = set(pipeline_paths) - set(existing_pipeline_paths)

        for new_pipeline_path in new_pipeline_paths:

            # write pipeline uuid to file
            pipeline_json_path = get_pipeline_path(
                None, project_uuid, pipeline_path=new_pipeline_path)

            try:
                with open(pipeline_json_path, "r") as json_file:
                    pipeline_json = json.load(json_file)

                file_pipeline_uuid = pipeline_json.get("uuid")

                new_pipeline_uuid = file_pipeline_uuid

                # see if pipeline_uuid is taken
                if (Pipeline.query.filter(
                        Pipeline.uuid == file_pipeline_uuid).filter(
                            Pipeline.project_uuid == project_uuid).count() > 0
                        or len(file_pipeline_uuid) == 0):
                    new_pipeline_uuid = str(uuid.uuid4())

                with open(pipeline_json_path, "w") as json_file:
                    pipeline_json["uuid"] = new_pipeline_uuid
                    json_file.write(json.dumps(pipeline_json, indent=4))

                # only commit if writing succeeds
                new_pipeline = Pipeline(
                    uuid=new_pipeline_uuid,
                    path=new_pipeline_path,
                    project_uuid=project_uuid,
                )
                db.session.add(new_pipeline)
                db.session.commit()

            except Exception as e:
                logging.info(e)
Ejemplo n.º 3
0
    def pipelines_get(project_uuid):

        project_path = project_uuid_to_path(project_uuid)
        project_dir = os.path.join(app.config["USER_DIR"], "projects",
                                   project_path)

        if not os.path.isdir(project_dir):
            return jsonify({"message": "Project directory not found."}), 404

        # find all pipelines in project dir
        pipeline_paths = find_pipelines_in_dir(project_dir, project_dir)

        # identify all pipeline paths that are not yet a pipeline
        existing_pipeline_paths = [
            pipeline.path for pipeline in Pipeline.query.filter(
                Pipeline.path.in_(pipeline_paths)).filter(
                    Pipeline.project_uuid == project_uuid).all()
        ]

        # TODO: handle existing pipeline assignments
        new_pipeline_paths = set(pipeline_paths) - set(existing_pipeline_paths)

        for new_pipeline_path in new_pipeline_paths:

            # write pipeline uuid to file
            pipeline_json_path = get_pipeline_path(
                None, project_uuid, pipeline_path=new_pipeline_path)

            try:
                with open(pipeline_json_path, "r") as json_file:
                    pipeline_json = json.load(json_file)

                file_pipeline_uuid = pipeline_json.get("uuid")

                new_pipeline_uuid = file_pipeline_uuid

                # see if pipeline_uuid is taken
                if (Pipeline.query.filter(
                        Pipeline.uuid == file_pipeline_uuid).filter(
                            Pipeline.project_uuid == project_uuid).count() > 0
                        or len(file_pipeline_uuid) == 0):
                    new_pipeline_uuid = str(uuid.uuid4())

                with open(pipeline_json_path, "w") as json_file:
                    pipeline_json["uuid"] = new_pipeline_uuid
                    json_file.write(json.dumps(pipeline_json, indent=2))

                # only commit if writing succeeds
                new_pipeline = Pipeline(
                    uuid=new_pipeline_uuid,
                    path=new_pipeline_path,
                    project_uuid=project_uuid,
                )
                db.session.add(new_pipeline)
                db.session.commit()

            except Exception as e:
                logging.info(e)

        pipelines = Pipeline.query.filter(
            Pipeline.project_uuid == project_uuid).all()
        pipelines_augmented = []

        for pipeline in pipelines:

            pipeline_json_path = get_pipeline_path(pipeline.uuid,
                                                   pipeline.project_uuid)

            pipeline_augmented = {
                "uuid": pipeline.uuid,
                "path": pipeline.path,
            }
            if os.path.isfile(pipeline_json_path):
                with open(pipeline_json_path, "r") as json_file:
                    pipeline_json = json.load(json_file)
                    pipeline_augmented["name"] = pipeline_json["name"]
            else:
                pipeline_augmented[
                    "name"] = "Warning: pipeline file was not found."

            pipelines_augmented.append(pipeline_augmented)

        json_string = json.dumps({
            "success": True,
            "result": pipelines_augmented
        })

        return json_string, 200, {"content-type": "application/json"}
Ejemplo n.º 4
0
    def _transaction(self, project_uuid):
        """Synchronizes the state of the pipelines of a project.

        Synchronizes the state of the filesystem with the db when it
        comes to the pipelines of a project. Pipelines removed from the
        filesystem are removed, new pipelines (or pipelines that where
        there after, for example a project import) are registered in
        the db.

        Args:
            project_uuid:

        Raises:
            FileNotFoundError: If the project directory is not found.
        """

        project_path = project_uuid_to_path(project_uuid)
        project_dir = safe_join(current_app.config["USER_DIR"], "projects",
                                project_path)

        # Lock the project to avoid race conditions in pipeline deletion
        # or creation.
        Project.query.with_for_update().filter_by(uuid=project_uuid).one()

        if not os.path.isdir(project_dir):
            raise FileNotFoundError("Project directory not found")

        # Find all pipelines in the project directory.
        pipeline_paths = find_pipelines_in_dir(project_dir, project_dir)
        # Cleanup pipelines that have been manually removed.
        fs_removed_pipelines = [
            pipeline for pipeline in Pipeline.query.filter(
                Pipeline.path.notin_(pipeline_paths)).filter(
                    Pipeline.project_uuid == project_uuid,
                    Pipeline.status == "READY",
                ).all()
        ]
        for pip in fs_removed_pipelines:
            DeletePipeline(self.tpe).transaction(pip.project_uuid,
                                                 pip.uuid,
                                                 remove_file=False)

        # Identify all pipeline paths that are not yet a pipeline, that
        # is, pipelines that were added through the filesystem.
        existing_pipeline_paths = [
            pipeline.path for pipeline in Pipeline.query.filter(
                Pipeline.path.in_(pipeline_paths)).filter(
                    Pipeline.project_uuid == project_uuid).all()
        ]
        # TODO: handle existing pipeline assignments.
        new_pipelines_from_fs = set(pipeline_paths) - set(
            existing_pipeline_paths)

        for path in new_pipelines_from_fs:
            pipeline_json_path = get_pipeline_path(None,
                                                   project_uuid,
                                                   pipeline_path=path)
            with open(pipeline_json_path, "r") as json_file:
                pipeline_uuid = json.load(json_file)["uuid"]
            # This is not a new pipeline, the pipeline is being moved.
            is_moving = (Pipeline.query.filter_by(project_uuid=project_uuid,
                                                  uuid=pipeline_uuid,
                                                  status="MOVING").count() > 0)
            if not is_moving:
                AddPipelineFromFS(self.tpe).transaction(project_uuid, path)