def compile_notebook(request, source_notebook_path, notebook_metadata_overrides=None, debug=False): """Compile the notebook to KFP DSL.""" instance = Kale(source_notebook_path, notebook_metadata_overrides, debug) instance.logger = request.log if hasattr(request, "log") else logger pipeline_graph, pipeline_parameters = instance.notebook_to_graph() script_path = instance.generate_kfp_executable(pipeline_graph, pipeline_parameters) pipeline_name = instance.pipeline_metadata["pipeline_name"] package_path = kfputils.compile_pipeline(script_path, pipeline_name) return {"pipeline_package_path": os.path.relpath(package_path), "pipeline_metadata": instance.pipeline_metadata}
def test_pipeline_generation_from_local(random_string, abs_working_dir): """Test code generation end to end from notebook to DSL.""" abs_working_dir.return_value = '/kale' random_string.return_value = 'rnd' notebook_path = "../assets/notebooks/pipeline_parameters_and_metrics.ipynb" notebook_path = os.path.join(THIS_DIR, notebook_path) kale = Kale(source_notebook_path=notebook_path) kale.logger = logging.getLogger(__name__) kale.logger.setLevel(logging.DEBUG) pipeline_graph, pipeline_parameters = kale.notebook_to_graph() script_path = kale.generate_kfp_executable(pipeline_graph, pipeline_parameters, save_to_tmp=True) target_asset = os.path.join(THIS_DIR, '../assets/kfp_dsl/', 'pipeline_parameters_and_metrics.py') expected_result = open(target_asset).read() result = open(script_path).read() assert result == expected_result
def test_pipeline_generation_from_gtihub(random_string, abs_working_dir): """Test code generation end to end from notebook to DSL.""" abs_working_dir.return_value = '/kale' random_string.return_value = 'rnd' notebook_url = EX_REPO + "titanic-ml-dataset/titanic_dataset_ml.ipynb" # download notebook to tmp dir notebook_path, response = urlretrieve(notebook_url) kale = Kale(source_notebook_path=notebook_path) kale.logger = logging.getLogger(__name__) kale.logger.setLevel(logging.DEBUG) pipeline_graph, pipeline_parameters = kale.notebook_to_graph() script_path = kale.generate_kfp_executable(pipeline_graph, pipeline_parameters, save_to_tmp=True) target_asset = os.path.join(THIS_DIR, '../assets/kfp_dsl/', 'titanic.py') expected_result = open(target_asset).read() result = open(script_path).read() assert result == expected_result
def compile_notebook(request, source_notebook_path, notebook_metadata_overrides=None, debug=False, auto_snapshot=False): instance = Kale(source_notebook_path, notebook_metadata_overrides, debug, auto_snapshot) instance.logger = request.log if hasattr(request, "log") else logger pipeline_graph, pipeline_parameters = instance.notebook_to_graph() script_path = instance.generate_kfp_executable(pipeline_graph, pipeline_parameters) pipeline_name = instance.pipeline_metadata["pipeline_name"] package_path = kfp_utils.compile_pipeline(script_path, pipeline_name) return { "pipeline_package_path": package_path, "pipeline_metadata": instance.pipeline_metadata }