Esempio n. 1
0
def run_pipeline(request, pipeline_metadata, pipeline_package_path=None,
                 pipeline_id=None):
    """Run a pipeline."""
    client = _get_client(pipeline_metadata.get("kfp_host", None))
    experiment = client.create_experiment(pipeline_metadata["experiment_name"])
    run_name = kfp_utils.generate_run_name(pipeline_metadata["pipeline_name"])
    run = client.run_pipeline(experiment.id, run_name,
                              pipeline_package_path=pipeline_package_path,
                              pipeline_id=pipeline_id)
    return {"id": run.id, "name": run.name, "status": run.status}
Esempio n. 2
0
    results_task = results_op()\
        .add_pvolumes(pvolumes_dict)\
        .after(randomforest_task, logisticregression_task, naivebayes_task, svm_task, decisiontree_task)
    results_task.container.working_dir = "/kale"
    results_task.container.set_security_context(
        k8s_client.V1SecurityContext(run_as_user=0))
    output_artifacts = {}
    output_artifacts.update(
        {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'})
    output_artifacts.update({'results': '/results.html'})
    results_task.output_artifact_paths.update(output_artifacts)


if __name__ == "__main__":
    pipeline_func = auto_generated_pipeline
    pipeline_filename = pipeline_func.__name__ + '.pipeline.tar.gz'
    import kfp.compiler as compiler
    compiler.Compiler().compile(pipeline_func, pipeline_filename)

    # Get or create an experiment and submit a pipeline run
    import kfp
    client = kfp.Client()
    experiment = client.create_experiment('titanic')

    # Submit a pipeline run
    from kale.utils.kfp_utils import generate_run_name
    run_name = generate_run_name('titanic-ml-rnd')
    run_result = client.run_pipeline(experiment.id, run_name,
                                     pipeline_filename, {})
Esempio n. 3
0
        {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'})
    output_artifacts.update({'explain': '/explain.html'})
    explain_task.output_artifact_paths.update(output_artifacts)
    explain_task.add_pod_label("pipelines.kubeflow.org/metadata_written",
                               "true")
    dep_names = explain_task.dependent_names + volume_step_names
    explain_task.add_pod_annotation("kubeflow-kale.org/dependent-templates",
                                    json.dumps(dep_names))
    if volume_name_parameters:
        explain_task.add_pod_annotation(
            "kubeflow-kale.org/volume-name-parameters",
            json.dumps(volume_name_parameters))


if __name__ == "__main__":
    pipeline_func = auto_generated_pipeline
    pipeline_filename = pipeline_func.__name__ + '.pipeline.tar.gz'
    import kfp.compiler as compiler
    compiler.Compiler().compile(pipeline_func, pipeline_filename)

    # Get or create an experiment and submit a pipeline run
    import kfp
    client = kfp.Client()
    experiment = client.create_experiment('seldon-e2e-adult')

    # Submit a pipeline run
    from kale.utils.kfp_utils import generate_run_name
    run_name = generate_run_name('seldon-e2e-adult-ttonn')
    run_result = client.run_pipeline(experiment.id, run_name,
                                     pipeline_filename, {})
Esempio n. 4
0
        {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'})
    output_artifacts.update({'results': '/results.html'})
    results_task.output_artifact_paths.update(output_artifacts)
    results_task.add_pod_label(
        "pipelines.kubeflow.org/metadata_written", "true")
    dep_names = results_task.dependent_names + volume_step_names
    results_task.add_pod_annotation(
        "kubeflow-kale.org/dependent-templates", json.dumps(dep_names))
    if volume_name_parameters:
        results_task.add_pod_annotation(
            "kubeflow-kale.org/volume-name-parameters",
            json.dumps(volume_name_parameters))


if __name__ == "__main__":
    pipeline_func = auto_generated_pipeline
    pipeline_filename = pipeline_func.__name__ + '.pipeline.tar.gz'
    import kfp.compiler as compiler
    compiler.Compiler().compile(pipeline_func, pipeline_filename)

    # Get or create an experiment and submit a pipeline run
    import kfp
    client = kfp.Client()
    experiment = client.create_experiment('titanic')

    # Submit a pipeline run
    from kale.utils.kfp_utils import generate_run_name
    run_name = generate_run_name('titanic-ml-gxj28')
    run_result = client.run_pipeline(
        experiment.id, run_name, pipeline_filename, {})
        {'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json'})
    output_artifacts.update({'sum_matrix': '/sum_matrix.html'})
    sum_matrix_task.output_artifact_paths.update(output_artifacts)
    sum_matrix_task.add_pod_label("pipelines.kubeflow.org/metadata_written",
                                  "true")
    dep_names = sum_matrix_task.dependent_names + volume_step_names
    sum_matrix_task.add_pod_annotation("kubeflow-kale.org/dependent-templates",
                                       json.dumps(dep_names))
    if volume_name_parameters:
        sum_matrix_task.add_pod_annotation(
            "kubeflow-kale.org/volume-name-parameters",
            json.dumps(volume_name_parameters))


if __name__ == "__main__":
    pipeline_func = auto_generated_pipeline
    pipeline_filename = pipeline_func.__name__ + '.pipeline.tar.gz'
    import kfp.compiler as compiler
    compiler.Compiler().compile(pipeline_func, pipeline_filename)

    # Get or create an experiment and submit a pipeline run
    import kfp
    client = kfp.Client()
    experiment = client.create_experiment('hp-tuning')

    # Submit a pipeline run
    from kale.utils.kfp_utils import generate_run_name
    run_name = generate_run_name('hp-test-rnd')
    run_result = client.run_pipeline(experiment.id, run_name,
                                     pipeline_filename, {})