コード例 #1
0
 def _run_compiled_code(self, script_path: str):
     _name = self.pipeline.config.pipeline_name
     pipeline_yaml_path = kfputils.compile_pipeline(script_path, _name)
     kfputils.upload_pipeline(pipeline_yaml_path, _name)
     run_name = kfputils.generate_run_name(_name)
     kfputils.run_pipeline(
         run_name=run_name,
         experiment_name=self.pipeline.config.experiment_name,
         pipeline_package_path=pipeline_yaml_path)
コード例 #2
0
ファイル: kfp.py プロジェクト: srinivasav22/kale
def upload_pipeline(request, pipeline_package_path, pipeline_metadata):
    """Upload a KFP package as a new pipeline."""
    pipeline_name = pipeline_metadata["pipeline_name"]
    pid, vid = kfputils.upload_pipeline(pipeline_package_path, pipeline_name)
    return {
        "pipeline": {
            "pipelineid": pid,
            "versionid": vid,
            "name": pipeline_name
        }
    }
コード例 #3
0
ファイル: cli.py プロジェクト: gbrlins/kale-1
def main():
    """Entry-point of CLI command."""
    parser = argparse.ArgumentParser(description=ARGS_DESC,
                                     formatter_class=RawTextHelpFormatter)
    general_group = parser.add_argument_group('General')
    general_group.add_argument('--nb',
                               type=str,
                               help='Path to source JupyterNotebook',
                               required=True)
    # use store_const instead of store_true because we None instead of
    # False in case the flag is missing
    general_group.add_argument('--upload_pipeline',
                               action='store_const',
                               const=True)
    general_group.add_argument('--run_pipeline',
                               action='store_const',
                               const=True)
    general_group.add_argument('--debug', action='store_true')

    metadata_group = parser.add_argument_group('Notebook Metadata Overrides',
                                               METADATA_GROUP_DESC)
    metadata_group.add_argument('--experiment_name',
                                type=str,
                                help='Name of the created experiment')
    metadata_group.add_argument('--pipeline_name',
                                type=str,
                                help='Name of the deployed pipeline')
    metadata_group.add_argument('--pipeline_description',
                                type=str,
                                help='Description of the deployed pipeline')
    metadata_group.add_argument('--docker_image',
                                type=str,
                                help='Docker base image used to build the '
                                'pipeline steps')
    metadata_group.add_argument('--kfp_host',
                                type=str,
                                help='KFP endpoint. Provide address as '
                                '<host>:<port>.')
    metadata_group.add_argument('--storage-class-name',
                                type=str,
                                help='The storage class name for the created'
                                ' volumes')
    metadata_group.add_argument('--volume-access-mode',
                                type=str,
                                help='The access mode for the created volumes')

    args = parser.parse_args()

    # get the notebook metadata args group
    mt_overrides_group = next(
        filter(lambda x: x.title == 'Notebook Metadata Overrides',
               parser._action_groups))
    # get the single args of that group
    mt_overrides_group_dict = {
        a.dest: getattr(args, a.dest, None)
        for a in mt_overrides_group._group_actions
        if getattr(args, a.dest, None) is not None
    }

    # FIXME: We are removing the `debug` arg. This shouldn't be an issue
    processor = NotebookProcessor(args.nb, mt_overrides_group_dict)
    pipeline = processor.run()
    dsl_script_path = Compiler(pipeline).compile()
    pipeline_name = pipeline.config.pipeline_name
    pipeline_package_path = kfputils.compile_pipeline(dsl_script_path,
                                                      pipeline_name)

    if args.upload_pipeline:
        kfputils.upload_pipeline(pipeline_package_path=pipeline_package_path,
                                 pipeline_name=pipeline_name,
                                 host=pipeline.config.kfp_host)

    if args.run_pipeline:
        run_name = kfputils.generate_run_name(pipeline_name)
        kfputils.run_pipeline(run_name=run_name,
                              experiment_name=pipeline.config.experiment_name,
                              pipeline_package_path=pipeline_package_path,
                              host=pipeline.config.kfp_host)
コード例 #4
0
def main():
    """Entry-point of CLI command."""
    parser = argparse.ArgumentParser(description=ARGS_DESC,
                                     formatter_class=RawTextHelpFormatter)
    general_group = parser.add_argument_group('General')
    general_group.add_argument('--nb',
                               type=str,
                               help='Path to source JupyterNotebook',
                               required=True)
    # use store_const instead of store_true because we None instead of
    # False in case the flag is missing
    general_group.add_argument('--upload_pipeline',
                               action='store_const',
                               const=True)
    general_group.add_argument('--run_pipeline',
                               action='store_const',
                               const=True)
    general_group.add_argument('--debug', action='store_true')

    metadata_group = parser.add_argument_group('Notebook Metadata Overrides',
                                               METADATA_GROUP_DESC)
    metadata_group.add_argument('--experiment_name',
                                type=str,
                                help='Name of the created experiment')
    metadata_group.add_argument('--pipeline_name',
                                type=str,
                                help='Name of the deployed pipeline')
    metadata_group.add_argument('--pipeline_description',
                                type=str,
                                help='Description of the deployed pipeline')
    metadata_group.add_argument('--docker_image',
                                type=str,
                                help='Docker base image used to build the '
                                'pipeline steps')
    metadata_group.add_argument('--kfp_host',
                                type=str,
                                help='KFP endpoint. Provide address as '
                                '<host>:<port>.')

    args = parser.parse_args()

    # get the notebook metadata args group
    mt_overrides_group = next(
        filter(lambda x: x.title == 'Notebook Metadata Overrides',
               parser._action_groups))
    # get the single args of that group
    mt_overrides_group_dict = {
        a.dest: getattr(args, a.dest, None)
        for a in mt_overrides_group._group_actions
        if getattr(args, a.dest, None) is not None
    }

    kale = Kale(source_notebook_path=args.nb,
                notebook_metadata_overrides=mt_overrides_group_dict,
                debug=args.debug)
    pipeline_graph, pipeline_parameters = kale.notebook_to_graph()
    script_path = kale.generate_kfp_executable(pipeline_graph,
                                               pipeline_parameters)
    # compile the pipeline to kfp tar package
    pipeline_name = kale.pipeline_metadata['pipeline_name']
    pipeline_package_path = kfputils.compile_pipeline(script_path,
                                                      pipeline_name)

    if args.upload_pipeline:
        kfputils.upload_pipeline(
            pipeline_package_path=pipeline_package_path,
            pipeline_name=kale.pipeline_metadata['pipeline_name'],
            host=kale.pipeline_metadata.get('kfp_host', None))

    if args.run_pipeline:
        run_name = kfputils.generate_run_name(
            kale.pipeline_metadata['pipeline_name'])
        kfputils.run_pipeline(
            run_name=run_name,
            experiment_name=kale.pipeline_metadata['experiment_name'],
            pipeline_package_path=pipeline_package_path,
            host=kale.pipeline_metadata.get('kfp_host', None))
コード例 #5
0
    _kale_sum_matrix_task.add_pod_label(
        "pipelines.kubeflow.org/metadata_written", "true")
    _kale_dep_names = (_kale_sum_matrix_task.dependent_names +
                       _kale_volume_step_names)
    _kale_sum_matrix_task.add_pod_annotation(
        "kubeflow-kale.org/dependent-templates", json.dumps(_kale_dep_names))
    if _kale_volume_name_parameters:
        _kale_sum_matrix_task.add_pod_annotation(
            "kubeflow-kale.org/volume-name-parameters",
            json.dumps(_kale_volume_name_parameters))


if __name__ == "__main__":
    pipeline_func = auto_generated_pipeline
    pipeline_filename = pipeline_func.__name__ + '.pipeline.tar.gz'
    import kfp.compiler as compiler
    compiler.Compiler().compile(pipeline_func, pipeline_filename)

    # Get or create an experiment and submit a pipeline run
    import kfp
    client = kfp.Client()
    experiment = client.create_experiment('hp-tuning')

    # Submit a pipeline run
    from kale.common import kfputils
    pipeline_id, version_id = kfputils.upload_pipeline(pipeline_filename,
                                                       "hp-test")
    run_result = kfputils.run_pipeline(experiment_name=experiment.name,
                                       pipeline_id=pipeline_id,
                                       version_id=version_id)
コード例 #6
0
    _kale_results_task.output_artifact_paths.update(_kale_output_artifacts)
    _kale_results_task.add_pod_label(
        "pipelines.kubeflow.org/metadata_written", "true")
    _kale_dep_names = (_kale_results_task.dependent_names +
                       _kale_volume_step_names)
    _kale_results_task.add_pod_annotation(
        "kubeflow-kale.org/dependent-templates", json.dumps(_kale_dep_names))
    if _kale_volume_name_parameters:
        _kale_results_task.add_pod_annotation(
            "kubeflow-kale.org/volume-name-parameters",
            json.dumps(_kale_volume_name_parameters))


if __name__ == "__main__":
    pipeline_func = auto_generated_pipeline
    pipeline_filename = pipeline_func.__name__ + '.pipeline.tar.gz'
    import kfp.compiler as compiler
    compiler.Compiler().compile(pipeline_func, pipeline_filename)

    # Get or create an experiment and submit a pipeline run
    import kfp
    client = kfp.Client()
    experiment = client.create_experiment('titanic')

    # Submit a pipeline run
    from kale.common import kfputils
    pipeline_id, version_id = kfputils.upload_pipeline(
        pipeline_filename, "titanic-ml")
    run_result = kfputils.run_pipeline(
        experiment_name=experiment.name, pipeline_id=pipeline_id, version_id=version_id)