Пример #1
0
    def filemanager_delete():
        root = request.args.get("root")
        path = request.args.get("path")
        project_uuid = request.args.get("project_uuid")

        try:
            root_dir_path, _ = process_request(root=root,
                                               path=path,
                                               project_uuid=project_uuid)
        except Exception as e:
            return jsonify({"message": str(e)}), 400

        # Make absolute path relative
        target_path = safe_join(root_dir_path, path[1:])

        if target_path == root_dir_path:
            return (
                jsonify({
                    "message": ("It is not allowed to delete roots "
                                "through the file-manager.")
                }),
                403,
            )

        if os.path.exists(target_path):
            try:
                rmtree(target_path)
            except Exception:
                return jsonify({"message": "Deletion failed."}), 500
        else:
            return jsonify(
                {"message": "No file or directory at path %s" % path}), 500

        return jsonify({"message": "Success"})
Пример #2
0
def remove_project_jobs_directories(project_uuid):

    project_jobs_path = safe_join(current_app.config["USER_DIR"], "jobs",
                                  project_uuid)

    if os.path.isdir(project_jobs_path):
        rmtree(project_jobs_path, ignore_errors=True)
Пример #3
0
def remove_job_pipeline_run_directory(run_uuid, job_uuid, pipeline_uuid,
                                      project_uuid):

    job_project_path = safe_join(current_app.config["USER_DIR"], "jobs",
                                 project_uuid)
    job_pipeline_path = safe_join(job_project_path, pipeline_uuid)
    job_path = safe_join(job_pipeline_path, job_uuid)
    job_pipeline_run_path = safe_join(job_path, run_uuid)

    if os.path.isdir(job_pipeline_run_path):
        rmtree(job_pipeline_run_path, ignore_errors=True)
Пример #4
0
def remove_job_directory(job_uuid, pipeline_uuid, project_uuid):

    job_project_path = safe_join(current_app.config["USER_DIR"], "jobs",
                                 project_uuid)
    job_pipeline_path = safe_join(job_project_path, pipeline_uuid)
    job_path = safe_join(job_pipeline_path, job_uuid)

    if os.path.isdir(job_path):
        rmtree(job_path, ignore_errors=True)

    # Clean up parent directory if this job removal created empty
    # directories.
    remove_dir_if_empty(job_pipeline_path)
    remove_dir_if_empty(job_project_path)
Пример #5
0
def clear_folder(folder):
    try:
        for filename in os.listdir(folder):
            file_path = safe_join(folder, filename)
            try:
                if os.path.isfile(file_path) or os.path.islink(file_path):
                    os.unlink(file_path)
                elif os.path.isdir(file_path):
                    rmtree(file_path)
            except Exception as e:
                current_app.logger.error("Failed to delete %s. Reason: %s" %
                                         (file_path, e))
    except FileNotFoundError as e:
        current_app.logger.error("Failed to delete %s. Reason: %s" %
                                 (folder, e))
Пример #6
0
def delete_environment(app, project_uuid, environment_uuid):
    """Delete an environment from disk and from the runtime environment

    Args:
        project_uuid:
        environment_uuid:

    Returns:

    """
    url = (f"http://{app.config['ORCHEST_API_ADDRESS']}"
           f"/api/environments/{project_uuid}/{environment_uuid}")
    app.config["SCHEDULER"].add_job(requests.delete, args=[url])

    environment_dir = get_environment_directory(environment_uuid, project_uuid)
    rmtree(environment_dir)
Пример #7
0
def prepare_build_context(task_uuid):
    """Prepares the build context for building the Jupyter image.

    Prepares the build context by copying the JupyterLab fine tune bash
    script.

    Args:
        task_uuid:

    Returns:
        Path to the prepared context.

    """
    # the project path we receive is relative to the projects directory
    jupyterlab_setup_script = os.path.join("/userdir",
                                           _config.JUPYTER_SETUP_SCRIPT)
    jupyter_image_builds_dir = _config.USERDIR_JUPYTER_IMG_BUILDS
    snapshot_path = f"{jupyter_image_builds_dir}/{task_uuid}"

    if os.path.isdir(snapshot_path):
        rmtree(snapshot_path)

    os.system('mkdir "%s"' % (snapshot_path))

    dockerfile_name = ".orchest-reserved-jupyter-dockerfile"
    bash_script_name = ".orchest-reserved-jupyter-setup.sh"
    write_jupyter_dockerfile(
        "tmp/jupyter",
        bash_script_name,
        os.path.join(snapshot_path, dockerfile_name),
    )

    if os.path.isfile(jupyterlab_setup_script):
        # move the setup_script to the context
        os.system('cp "%s" "%s"' % (
            jupyterlab_setup_script,
            os.path.join(snapshot_path, bash_script_name),
        ))
    else:
        # create empty shell script if no setup_script exists
        os.system('touch "%s"' % os.path.join(snapshot_path, bash_script_name))

    return {
        "snapshot_path": snapshot_path,
        "base_image": f"orchest/jupyter-server:{CONFIG_CLASS.ORCHEST_VERSION}",
        "dockerfile_path": dockerfile_name,
    }
Пример #8
0
    def _collateral(self, project_uuid: str):
        """Remove a project from the fs and the orchest-api"""

        # Delete the project directory.
        project_path = project_uuid_to_path(project_uuid)
        # The project has been deleted by a concurrent deletion request.
        if project_path is None:
            return
        full_project_path = safe_join(current_app.config["PROJECTS_DIR"],
                                      project_path)
        rmtree(full_project_path)

        # Remove jobs directories related to project.
        remove_project_jobs_directories(project_uuid)

        # Issue project deletion to the orchest-api.
        url = (
            f"http://{current_app.config['ORCHEST_API_ADDRESS']}/api/projects/"
            f"{project_uuid}")
        current_app.config["SCHEDULER"].add_job(requests.delete, args=[url])

        # Will delete cascade pipeline, pipeline run.
        Project.query.filter_by(uuid=project_uuid).delete()
        db.session.commit()
Пример #9
0
def remove_dir_if_empty(path):
    if os.path.isdir(path) and not any(True for _ in os.scandir(path)):
        rmtree(path, ignore_errors=True)
Пример #10
0
def build_jupyter_image_task(task_uuid: str, image_tag: str):
    """Function called by the celery task to build Jupyter image.

    Builds a Jupyter image given the arguments, the logs produced by the
    user provided script are forwarded to a SocketIO server and
    namespace defined in the orchest internals config.

    Args:
        task_uuid:
        image_tag:

    Returns:

    """
    with requests.sessions.Session() as session:

        try:
            update_jupyter_image_build_status("STARTED", session, task_uuid)

            # Prepare the project snapshot with the correctly placed
            # dockerfile, scripts, etc.
            build_context = prepare_build_context(task_uuid)

            # Use the agreed upon pattern for the image name.
            image_name = _config.JUPYTER_IMAGE_NAME

            if not os.path.exists(__JUPYTER_BUILD_FULL_LOGS_DIRECTORY):
                os.mkdir(__JUPYTER_BUILD_FULL_LOGS_DIRECTORY)
            # place the logs in the celery container
            complete_logs_path = os.path.join(
                __JUPYTER_BUILD_FULL_LOGS_DIRECTORY, image_name)

            status = SioStreamedTask.run(
                # What we are actually running/doing in this task,
                task_lambda=lambda user_logs_fo: build_image(
                    task_uuid,
                    image_name,
                    image_tag,
                    build_context,
                    user_logs_fo,
                    complete_logs_path,
                ),
                identity="jupyter",
                server=_config.ORCHEST_SOCKETIO_SERVER_ADDRESS,
                namespace=_config.
                ORCHEST_SOCKETIO_JUPYTER_IMG_BUILDING_NAMESPACE,
                # note: using task.is_aborted() could be an option but
                # it was giving some issues related to
                # multithreading/processing, moreover, also just passing
                # the task_uuid to this function is less information to
                # rely on, which is good.
                abort_lambda=lambda: AbortableAsyncResult(task_uuid
                                                          ).is_aborted(),
            )

            # cleanup
            rmtree(build_context["snapshot_path"])

            update_jupyter_image_build_status(status, session, task_uuid)

        # Catch all exceptions because we need to make sure to set the
        # build state to failed.
        except Exception as e:
            update_jupyter_image_build_status("FAILURE", session, task_uuid)
            logger.error(e)
            raise e
        finally:
            # We get here either because the task was successful or was
            # aborted, in any case, delete the workflow.
            k8s_custom_obj_api.delete_namespaced_custom_object(
                "argoproj.io",
                "v1alpha1",
                _config.ORCHEST_NAMESPACE,
                "workflows",
                f"image-build-task-{task_uuid}",
            )

    # The status of the Celery task is SUCCESS since it has finished
    # running. Not related to the actual state of the build, e.g.
    # FAILURE.
    return "SUCCESS"
Пример #11
0
def prepare_build_context(task_uuid, project_uuid, environment_uuid,
                          project_path):
    """Prepares the build context for a given environment.

    Prepares the build context by taking a snapshot of the project
    directory, and using this snapshot as a context in which the ad-hoc
    docker file will be placed. This dockerfile is built in a way to
    respect the environment properties (base image, user bash script,
    etc.) while also allowing to log only the messages that are related
    to the user script while building the image.

    Args:
        task_uuid:
        project_uuid:
        environment_uuid:
        project_path:

    Returns:
        Dictionary containing build context details.

    Raises:
        See the check_environment_correctness_function
    """
    # the project path we receive is relative to the projects directory
    userdir_project_path = os.path.join(_config.USERDIR_PROJECTS, project_path)

    # sanity checks, if not respected exception will be raised
    check_environment_correctness(project_uuid, environment_uuid,
                                  userdir_project_path)

    env_builds_dir = _config.USERDIR_ENV_IMG_BUILDS
    # K8S_TODO: remove this?
    Path(env_builds_dir).mkdir(parents=True, exist_ok=True)
    # Make a snapshot of the project state, used for the context.
    snapshot_path = f"{env_builds_dir}/{task_uuid}"
    if os.path.isdir(snapshot_path):
        rmtree(snapshot_path)
    copytree(userdir_project_path, snapshot_path, use_gitignore=True)
    # take the environment from the snapshot
    environment_path = os.path.join(
        snapshot_path, f".orchest/environments/{environment_uuid}")

    # Build the docker file and move it to the context.
    with open(os.path.join(environment_path, "properties.json")) as json_file:
        environment_properties = json.load(json_file)

        # use the task_uuid to avoid clashing with user stuff
        dockerfile_name = (
            f".orchest-reserved-env-dockerfile-{project_uuid}-{environment_uuid}"
        )
        bash_script_name = (
            f".orchest-reserved-env-setup-script-{project_uuid}-{environment_uuid}.sh"
        )

        base_image = environment_properties["base_image"]
        # Temporary workaround for common.tsx not using the orchest
        # version.
        if ":" not in base_image:
            base_image = f"{base_image}:{CONFIG_CLASS.ORCHEST_VERSION}"
        write_environment_dockerfile(
            base_image,
            project_uuid,
            environment_uuid,
            _config.PROJECT_DIR,
            bash_script_name,
            os.path.join(snapshot_path, dockerfile_name),
        )

        # Move the startup script to the context.
        os.system('cp "%s" "%s"' % (
            os.path.join(environment_path, _config.ENV_SETUP_SCRIPT_FILE_NAME),
            os.path.join(snapshot_path, bash_script_name),
        ))

    # hide stuff from the user
    with open(os.path.join(snapshot_path, ".dockerignore"),
              "w") as docker_ignore:
        docker_ignore.write(".dockerignore\n")
        docker_ignore.write(".orchest\n")
        docker_ignore.write("%s\n" % dockerfile_name)

    return {
        "snapshot_path": snapshot_path,
        "base_image": base_image,
        "dockerfile_path": dockerfile_name,
    }