def testUpdatePipeline(self): # First create pipeline with test_pipeline.py pipeline_path_1 = os.path.join(self.chicago_taxi_pipeline_dir, 'test_pipeline_1.py') flags_dict_1 = { labels.ENGINE_FLAG: self.engine, labels.PIPELINE_DSL_PATH: pipeline_path_1 } handler = kubeflow_v2_handler.KubeflowV2Handler(flags_dict_1) handler.create_pipeline() # Update test_pipeline and run update_pipeline pipeline_path_2 = os.path.join(self.chicago_taxi_pipeline_dir, 'test_pipeline_2.py') flags_dict_2 = { labels.ENGINE_FLAG: self.engine, labels.PIPELINE_DSL_PATH: pipeline_path_2 } handler = kubeflow_v2_handler.KubeflowV2Handler(flags_dict_2) handler.update_pipeline() handler_pipeline_path = os.path.join( handler._handler_home_dir, self.pipeline_args[labels.PIPELINE_NAME], '') self.assertTrue( fileio.exists( os.path.join(handler_pipeline_path, 'pipeline_args.json')))
def compile_pipeline(ctx: Context, pipeline_path: Text) -> None: """Command definition to compile a pipeline.""" click.echo('Compiling pipeline') ctx.flags_dict[labels.ENGINE_FLAG] = kubeflow_labels.KUBEFLOW_V2_ENGINE ctx.flags_dict[labels.PIPELINE_DSL_PATH] = pipeline_path kubeflow_v2_handler.KubeflowV2Handler(ctx.flags_dict).compile_pipeline()
def update_pipeline(ctx: Context, pipeline_path: Text, skaffold_cmd: Text) -> None: """Command definition to update a pipeline.""" click.echo('Updating pipeline') ctx.flags_dict[labels.ENGINE_FLAG] = kubeflow_labels.KUBEFLOW_V2_ENGINE ctx.flags_dict[labels.PIPELINE_DSL_PATH] = pipeline_path ctx.flags_dict[labels.SKAFFOLD_CMD] = skaffold_cmd kubeflow_v2_handler.KubeflowV2Handler(ctx.flags_dict).update_pipeline()
def update_pipeline(ctx: Context, pipeline_path: Text, build_image: bool) -> None: """Command definition to update a pipeline.""" click.echo('Updating pipeline') ctx.flags_dict[labels.ENGINE_FLAG] = kubeflow_labels.KUBEFLOW_V2_ENGINE ctx.flags_dict[labels.PIPELINE_DSL_PATH] = pipeline_path ctx.flags_dict[labels.BUILD_IMAGE] = build_image kubeflow_v2_handler.KubeflowV2Handler(ctx.flags_dict).update_pipeline()
def create_pipeline(ctx: Context, pipeline_path: Text, build_target_image: Text, skaffold_cmd: Text, build_base_image: Text) -> None: """Command definition to create a pipeline.""" click.echo('Creating pipeline') ctx.flags_dict[labels.ENGINE_FLAG] = kubeflow_labels.KUBEFLOW_V2_ENGINE ctx.flags_dict[labels.PIPELINE_DSL_PATH] = pipeline_path ctx.flags_dict[kubeflow_labels.TFX_IMAGE_ENV] = build_target_image ctx.flags_dict[labels.BASE_IMAGE] = build_base_image ctx.flags_dict[labels.SKAFFOLD_CMD] = skaffold_cmd kubeflow_v2_handler.KubeflowV2Handler(ctx.flags_dict).create_pipeline()
def compile_pipeline(ctx: Context, pipeline_path: Text, target_image: Text, project_id: Text) -> None: """Command definition to compile a pipeline.""" click.echo('Compiling pipeline') ctx.flags_dict[labels.ENGINE_FLAG] = kubeflow_labels.KUBEFLOW_V2_ENGINE ctx.flags_dict[labels.PIPELINE_DSL_PATH] = pipeline_path ctx.flags_dict[kubeflow_labels.TFX_IMAGE_ENV] = target_image ctx.flags_dict[kubeflow_labels.GCP_PROJECT_ID_ENV] = project_id kubeflow_v2_handler.KubeflowV2Handler(ctx.flags_dict).compile_pipeline()
def testDeletePipeline(self): # First create a pipeline. flags_dict = { labels.ENGINE_FLAG: self.engine, labels.PIPELINE_DSL_PATH: self.pipeline_path } handler = kubeflow_v2_handler.KubeflowV2Handler(flags_dict) handler.create_pipeline() # Now delete the pipeline created aand check if pipeline folder is deleted. flags_dict = { labels.ENGINE_FLAG: self.engine, labels.PIPELINE_NAME: self.pipeline_name } handler = kubeflow_v2_handler.KubeflowV2Handler(flags_dict) handler.delete_pipeline() handler_pipeline_path = os.path.join( handler._handler_home_dir, self.pipeline_args[labels.PIPELINE_NAME], '') self.assertFalse(fileio.exists(handler_pipeline_path))
def testCompilePipeline(self): flags_dict = { labels.ENGINE_FLAG: self.engine, labels.PIPELINE_DSL_PATH: self.pipeline_path, } handler = kubeflow_v2_handler.KubeflowV2Handler(flags_dict) with self.captureWritesToStream(sys.stdout) as captured: handler.compile_pipeline() self.assertIn(f'Pipeline {self.pipeline_name} compiled successfully', captured.contents())
def testCompilePipeline(self): flags_dict = { labels.ENGINE_FLAG: self.engine, labels.PIPELINE_DSL_PATH: self.pipeline_path, kubeflow_labels.TFX_IMAGE_ENV: _TEST_TFX_IMAGE, kubeflow_labels.GCP_PROJECT_ID_ENV: _TEST_PROJECT_1 } handler = kubeflow_v2_handler.KubeflowV2Handler(flags_dict) with self.captureWritesToStream(sys.stdout) as captured: _ = handler.compile_pipeline() self.assertIn('Pipeline compiled successfully', captured.contents())
def testDeletePipelineNonExistentPipeline(self): flags_dict = { labels.ENGINE_FLAG: self.engine, labels.PIPELINE_NAME: self.pipeline_name } handler = kubeflow_v2_handler.KubeflowV2Handler(flags_dict) with self.assertRaises(SystemExit) as err: handler.delete_pipeline() self.assertEqual( str(err.exception), 'Pipeline "{}" does not exist.'.format( flags_dict[labels.PIPELINE_NAME]))
def testCompilePipelineNoPipelineArgs(self): # Test against a ill-formed pipeline DSL. flags_dict = { labels.ENGINE_FLAG: self.engine, labels.PIPELINE_DSL_PATH: self.bad_pipeline_path, kubeflow_labels.TFX_IMAGE_ENV: _TEST_TFX_IMAGE, kubeflow_labels.GCP_PROJECT_ID_ENV: _TEST_PROJECT_1 } handler = kubeflow_v2_handler.KubeflowV2Handler(flags_dict) # Compilation will fail and a SystemExist will be thrown by subprocess. with self.assertRaises(SystemExit): _ = handler.compile_pipeline()
def testSavePipeline(self): flags_dict = { labels.ENGINE_FLAG: self.engine, labels.PIPELINE_DSL_PATH: self.pipeline_path } handler = kubeflow_v2_handler.KubeflowV2Handler(flags_dict) pipeline_args = handler._extract_pipeline_args() handler._save_pipeline(pipeline_args) self.assertTrue( fileio.exists( os.path.join(handler._handler_home_dir, self.pipeline_args[labels.PIPELINE_NAME])))
def testCreatePipeline(self): flags_dict = { labels.ENGINE_FLAG: self.engine, labels.PIPELINE_DSL_PATH: self.pipeline_path } handler = kubeflow_v2_handler.KubeflowV2Handler(flags_dict) handler.create_pipeline() handler_pipeline_path = os.path.join( handler._handler_home_dir, self.pipeline_args[labels.PIPELINE_NAME], '') self.assertTrue( fileio.exists( os.path.join(handler_pipeline_path, 'pipeline_args.json')))
def testUpdatePipelineNoPipeline(self): # Update pipeline without creating one. flags_dict = { labels.ENGINE_FLAG: self.engine, labels.PIPELINE_DSL_PATH: self.pipeline_path } handler = kubeflow_v2_handler.KubeflowV2Handler(flags_dict) with self.assertRaises(SystemExit) as err: handler.update_pipeline() self.assertEqual( str(err.exception), 'Pipeline "{}" does not exist.'.format( self.pipeline_args[labels.PIPELINE_NAME]))
def testCreatePipelineExistentPipeline(self): flags_dict = { labels.ENGINE_FLAG: self.engine, labels.PIPELINE_DSL_PATH: self.pipeline_path } handler = kubeflow_v2_handler.KubeflowV2Handler(flags_dict) handler.create_pipeline() # Run create_pipeline again to test. with self.assertRaises(SystemExit) as err: handler.create_pipeline() self.assertEqual( str(err.exception), 'Pipeline "{}" already exists.'.format( self.pipeline_args[labels.PIPELINE_NAME]))
def testCreatePipeline(self): flags_dict = { labels.ENGINE_FLAG: self.engine, labels.PIPELINE_DSL_PATH: self.pipeline_path } handler = kubeflow_v2_handler.KubeflowV2Handler(flags_dict) handler.create_pipeline() handler_pipeline_path = os.path.join(handler._handler_home_dir, self.pipeline_name) self.assertTrue( fileio.exists( os.path.join(handler_pipeline_path, kubeflow_v2_dag_runner_patcher._OUTPUT_FILENAME)))
def testListPipelinesNonEmpty(self): # First create two pipelines in the dags folder. handler_pipeline_path_1 = os.path.join(os.environ['KUBEFLOW_V2_HOME'], 'pipeline_1') handler_pipeline_path_2 = os.path.join(os.environ['KUBEFLOW_V2_HOME'], 'pipeline_2') fileio.makedirs(handler_pipeline_path_1) fileio.makedirs(handler_pipeline_path_2) # Now, list the pipelines flags_dict = {labels.ENGINE_FLAG: kubeflow_labels.KUBEFLOW_V2_ENGINE} handler = kubeflow_v2_handler.KubeflowV2Handler(flags_dict) with self.captureWritesToStream(sys.stdout) as captured: handler.list_pipelines() self.assertIn('pipeline_1', captured.contents()) self.assertIn('pipeline_2', captured.contents())
def delete_pipeline(ctx: Context, pipeline_name: Text) -> None: """Command definition to delete a pipeline.""" click.echo('Deleting pipeline') ctx.flags_dict[labels.ENGINE_FLAG] = kubeflow_labels.KUBEFLOW_V2_ENGINE ctx.flags_dict[labels.PIPELINE_NAME] = pipeline_name kubeflow_v2_handler.KubeflowV2Handler(ctx.flags_dict).delete_pipeline()
def list_pipelines(ctx: Context) -> None: """Command definition to list pipelines.""" click.echo('Listing all pipelines') ctx.flags_dict[labels.ENGINE_FLAG] = kubeflow_labels.KUBEFLOW_V2_ENGINE kubeflow_v2_handler.KubeflowV2Handler(ctx.flags_dict).list_pipelines()
def testListPipelinesEmpty(self): flags_dict = {labels.ENGINE_FLAG: kubeflow_labels.KUBEFLOW_V2_ENGINE} handler = kubeflow_v2_handler.KubeflowV2Handler(flags_dict) with self.captureWritesToStream(sys.stdout) as captured: handler.list_pipelines() self.assertIn('No pipelines to display.', captured.contents())