Esempio n. 1
0
def delete_deployment(deployment_id):
    """Delete
    Args:
        deployment_id (str): deployment id.
    """
    kfp_client = init_pipeline_client()

    # Get all SeldonDeployment resources.
    load_kube_config()
    custom_api = client.CustomObjectsApi()
    ret = custom_api.list_namespaced_custom_object(
        "machinelearning.seldon.io",
        "v1alpha2",
        "deployments",
        "seldondeployments"
    )
    deployments = ret['items']

    # Delete SeldonDeployment resource.
    if deployments:
        for deployment in deployments:
            if deployment['metadata']['name'] == deployment_id:
                delete_deployment_resource(deployment)

    # Delete deployment run
    deployment_run_id = get_deployment_by_id(deployment_id)['runId']
    kfp_client.runs.delete_run(deployment_run_id)

    return {
        "message": "Deployment deleted."
    }
Esempio n. 2
0
def get_experiment_run_history(experiment_id):
    """Get experiment run history.
    Args:
        experiment_id (str): PlatIA experiment_id.
    Returns:
       Experiment run history.
    """
    try:
        client = init_pipeline_client()

        experiment = client.get_experiment(experiment_name=experiment_id)

        experiment_runs = client.list_runs(page_size='100',
                                           sort_by=created_at_desc,
                                           experiment_id=experiment.id)

        response = []
        for run in experiment_runs.runs:
            workflow_manifest = json.loads(run.pipeline_spec.workflow_manifest)
            if workflow_manifest['metadata'][
                    'generateName'] == 'common-pipeline-':
                run_id = run.id
                run_details = client.get_run(run_id)
                formated_operators = format_run_operators(run_details)
                if formated_operators:
                    resp = {}
                    resp['runId'] = run_id
                    resp['createdAt'] = run.created_at
                    resp['operators'] = formated_operators
                    response.append(resp)
    except Exception:
        return []

    return response
Esempio n. 3
0
def get_deployments():
    """Get deployments list.

    Returns:
        Deployments list.
    """
    kfp_client = init_pipeline_client()
    token = ''

    deployment_runs = []

    protocol = get_protocol()
    ip = get_cluster_ip()

    while True:
        list_runs = kfp_client.list_runs(
            page_token=token, sort_by='created_at desc', page_size=100)

        if list_runs.runs:
            runs = get_deployment_details(list_runs.runs, ip, protocol)
            deployment_runs.extend(runs)

            token = list_runs.next_page_token
            if token is None:
                break
        else:
            break

    return deployment_runs
Esempio n. 4
0
    def __init__(self, experiment_id, name, operators):
        """Create a new instance of Pipeline.

        Args:
            experiment_id (str): PlatIAgro experiment's uuid.
            name (str): deployment name.
            operators (list): list of pipeline operators.
        """
        self._roots = []
        self._operators = {}
        self._edges = defaultdict(list)  # source: [destinations]
        self._inverted_edges = defaultdict(list)  # destination: [sources]

        self._experiment_id = experiment_id
        self._name = name

        self._client = init_pipeline_client()
        self._experiment = self._client.create_experiment(name=experiment_id)

        for operator in operators:
            self._add_operator(operator)

        # Verify if the given pipeline has cycles
        if self._is_cyclic():
            raise BadRequest('The given pipeline has cycles.')
Esempio n. 5
0
def retry_experiment_run(experiment_id):
    """Re-initiate a failed or terminated experiment run.
    Args:
        experiment_id (str): PlatIA experiment_id.
    Returns:
       Experiment run details.
    """
    client = init_pipeline_client()
    experiment = client.get_experiment(experiment_name=experiment_id)
    experiment_runs = client.list_runs(page_size='1',
                                       sort_by=created_at_desc,
                                       experiment_id=experiment.id)
    retry = False

    for run in experiment_runs.runs:
        if 'Failed' == run.status:
            init_pipeline_client().runs.retry_run(run_id=run.id)
            retry = True
    if not retry:
        raise NotFound('There is no failed experimentation')
    run_details = client.get_run(run.id)
    return format_pipeline_run_details(run_details)
Esempio n. 6
0
def terminate_experiment_run(experiment_id):
    """Terminate experiment run.
    Args:
        experiment_id (str): PlatIA experiment_id.
    Returns:
       Deleted message.
    """
    client = init_pipeline_client()
    experiment = client.get_experiment(experiment_name=experiment_id)
    experiment_runs = client.list_runs(page_size='1',
                                       sort_by=created_at_desc,
                                       experiment_id=experiment.id)

    for run in experiment_runs.runs:
        client.runs.terminate_run(run_id=run.id)
    response = {"message": "Training deleted."}
    return response
Esempio n. 7
0
def delete_deployment_resource(deployment_resource):
    """Delete deployment resource."""
    kfp_client = init_pipeline_client()

    @dsl.pipeline(name='Undeploy')
    def undeploy():
        dsl.ResourceOp(
            name='undeploy',
            k8s_resource=deployment_resource,
            action='delete'
        )

    kfp_client.create_run_from_pipeline_func(
        undeploy,
        {},
        run_name='undeploy',
        namespace=KF_PIPELINES_NAMESPACE,
    )
Esempio n. 8
0
def get_experiment_run(experiment_id, pretty=True):
    """Get experiment run details.
    Args:
        experiment_id (str): PlatIA experiment_id.
        pretty (boolean): well formated response
    Returns:
       Run details.
    """
    run_details = ''
    try:
        client = init_pipeline_client()

        experiment = client.get_experiment(experiment_name=experiment_id)

        # lists runs for trainings and deployments of an experiment
        experiment_runs = client.list_runs(page_size='100',
                                           sort_by=created_at_desc,
                                           experiment_id=experiment.id)

        # find the latest training run
        latest_training_run = None
        for run in experiment_runs.runs:
            workflow_manifest = json.loads(run.pipeline_spec.workflow_manifest)
            if workflow_manifest['metadata'][
                    'generateName'] == 'common-pipeline-':
                latest_training_run = run
                break

        if latest_training_run:
            run_id = latest_training_run.id
            run_details = client.get_run(run_id)
        else:
            return {}
    except Exception:
        return {}

    if pretty:
        return format_pipeline_run_details(run_details)
    else:
        return run_details
Esempio n. 9
0
def retry_run_deployment(deployment_id):
    experiment = list(filter(lambda d: d['experimentId'] == deployment_id, get_deployments()))[0]
    experiment = init_pipeline_client().runs.retry_run(run_id=experiment['runId'])
    return experiment
Esempio n. 10
0
    def setUp(self):
        # Run a default pipeline for tests
        client = init_pipeline_client()
        experiment = client.create_experiment(name=MOCKED_TRAINING_ID)
        client.run_pipeline(experiment.id, MOCKED_TRAINING_ID,
                            "tests/resources/mocked_training.yaml")

        conn = engine.connect()
        text = (
            f"INSERT INTO tasks (uuid, name, description, image, commands, arguments, tags, experiment_notebook_path, deployment_notebook_path, is_default, created_at, updated_at) "
            f"VALUES ('{TASK_ID}', 'name', 'desc', '{IMAGE}', '{COMMANDS_JSON}', '{ARGUMENTS_JSON}', '{TAGS_JSON}', '{EX_NOTEBOOK_PATH}', '{DEPLOY_NOTEBOOK_PATH}', 0, '{CREATED_AT}', '{UPDATED_AT}')"
        )
        conn.execute(text)

        text = (
            f"INSERT INTO projects (uuid, name, created_at, updated_at) "
            f"VALUES ('{PROJECT_ID}', 'name', '{CREATED_AT}', '{UPDATED_AT}')")
        conn.execute(text)

        # Experiment 1
        text = (
            f"INSERT INTO experiments (uuid, name, project_id, position, is_active, created_at, updated_at) "
            f"VALUES ('{EX_ID_1}', 'ex1', '{PROJECT_ID}', '0', 1, '{CREATED_AT}', '{UPDATED_AT}')"
        )
        conn.execute(text)
        text = (
            f"INSERT INTO operators (uuid, experiment_id, task_id, parameters, created_at, updated_at, dependencies) "
            f"VALUES ('{OP_ID_1_1}', '{EX_ID_1}', '{TASK_ID}', '{PARAMETERS_JSON}', '{CREATED_AT}', '{UPDATED_AT}', '{DEP_EMPTY_JSON}')"
        )
        conn.execute(text)
        text = (
            f"INSERT INTO operators (uuid, experiment_id, task_id, parameters, created_at, updated_at, dependencies) "
            f"VALUES ('{OP_ID_1_2}', '{EX_ID_1}', '{TASK_ID}', '{PARAMETERS_JSON}', '{CREATED_AT}', '{UPDATED_AT}', '{DEP_OP_ID_1_1_JSON}')"
        )
        conn.execute(text)

        # Experiment 2
        text = (
            f"INSERT INTO experiments (uuid, name, project_id, position, is_active, created_at, updated_at) "
            f"VALUES ('{EX_ID_2}', 'ex2', '{PROJECT_ID}', '1', 1, '{CREATED_AT}', '{UPDATED_AT}')"
        )
        conn.execute(text)

        # Experiment 3
        text = (
            f"INSERT INTO experiments (uuid, name, project_id, position, is_active, created_at, updated_at) "
            f"VALUES ('{EX_ID_3}', 'ex3', '{PROJECT_ID}', '2', 1, '{CREATED_AT}', '{UPDATED_AT}')"
        )
        conn.execute(text)
        text = (
            f"INSERT INTO operators (uuid, experiment_id, task_id, parameters, created_at, updated_at, dependencies) "
            f"VALUES ('{OP_ID_3_1}', '{EX_ID_3}', '{TASK_ID}', '{PARAMETERS_JSON}', '{CREATED_AT}', '{UPDATED_AT}', '{DEP_OP_INVALID_JSON}')"
        )
        conn.execute(text)

        # Experiment 4
        text = (
            f"INSERT INTO experiments (uuid, name, project_id, position, is_active, created_at, updated_at) "
            f"VALUES ('{EX_ID_4}', 'ex4', '{PROJECT_ID}', '3', 1, '{CREATED_AT}', '{UPDATED_AT}')"
        )
        conn.execute(text)
        text = (
            f"INSERT INTO operators (uuid, experiment_id, task_id, parameters, created_at, updated_at, dependencies) "
            f"VALUES ('{OP_ID_4_1}', '{EX_ID_4}', '{TASK_ID}', '{PARAMETERS_JSON}', '{CREATED_AT}', '{UPDATED_AT}', '{DEP_OP_ID_4_2_JSON}')"
        )
        conn.execute(text)
        text = (
            f"INSERT INTO operators (uuid, experiment_id, task_id, parameters, created_at, updated_at, dependencies) "
            f"VALUES ('{OP_ID_4_2}', '{EX_ID_4}', '{TASK_ID}', '{PARAMETERS_JSON}', '{CREATED_AT}', '{UPDATED_AT}', '{DEP_OP_ID_4_1_JSON}')"
        )
        conn.execute(text)
        conn.close()