Exemplo n.º 1
0
def run_v2_pipeline(
    client: kfp.Client,
    fn: Callable,
    driver_image: Optional[str],
    launcher_v2_image: Optional[str],
    pipeline_root: Optional[str],
    enable_caching: bool,
    arguments: dict[str, str],
):
    import tempfile
    import subprocess
    original_pipeline_spec = tempfile.mktemp(suffix='.json',
                                             prefix="original_pipeline_spec")
    kfp.v2.compiler.Compiler().compile(pipeline_func=fn,
                                       package_path=original_pipeline_spec)

    # remove following overriding logic once we use create_run_from_job_spec to trigger kfp pipeline run
    with open(original_pipeline_spec) as f:
        pipeline_job_dict = {
            'pipelineSpec': json.load(f),
            'runtimeConfig': {},
        }

    for component in [pipeline_job_dict['pipelineSpec']['root']] + list(
            pipeline_job_dict['pipelineSpec']['components'].values()):
        if 'dag' in component:
            for task in component['dag']['tasks'].values():
                task['cachingOptions'] = {'enableCache': enable_caching}

    if arguments:
        pipeline_job_dict['runtimeConfig']['parameterValues'] = {}

    for k, v in arguments.items():
        pipeline_job_dict['runtimeConfig']['parameterValues'][k] = v

    pipeline_job = tempfile.mktemp(suffix='.json', prefix="pipeline_job")
    with open(pipeline_job, 'w') as f:
        json.dump(pipeline_job_dict, f)

    argo_workflow_spec = tempfile.mktemp(suffix='.yaml')
    with open(argo_workflow_spec, 'w') as f:
        args = [
            'kfp-v2-compiler',
            '--job',
            pipeline_job,
        ]
        if driver_image:
            args += ['--driver', driver_image]
        if launcher_v2_image:
            args += ['--launcher', launcher_v2_image]
        if pipeline_root:
            args += ['--pipeline_root', pipeline_root]
        # call v2 backend compiler CLI to compile pipeline spec to argo workflow
        subprocess.check_call(args, stdout=f)
    return client.create_run_from_pipeline_package(
        pipeline_file=argo_workflow_spec,
        arguments={},
        enable_caching=enable_caching)
Exemplo n.º 2
0
def run_v2_pipeline(client: kfp.Client, fn: Callable, driver_image: str,
                    launcher_v2_image: str):
    import tempfile
    import subprocess
    pipeline_spec = tempfile.mktemp(suffix='.json')
    kfp.v2.compiler.Compiler().compile(pipeline_func=fn,
                                       package_path=pipeline_spec)
    argo_workflow_spec = tempfile.mktemp(suffix='.yaml')
    with open(argo_workflow_spec, 'w') as f:
        args = [
            'kfp-v2-compiler', '--spec', pipeline_spec, '--driver',
            driver_image, '--launcher', launcher_v2_image
        ]
        # call v2 backend compiler CLI to compile pipeline spec to argo workflow
        subprocess.check_call(args, stdout=f)
    return client.create_run_from_pipeline_package(
        pipeline_file=argo_workflow_spec, arguments={})