def testPipelineCompile(self): # Invalid DSL path pipeline_path = os.path.join(self._testdata_dir, 'test_pipeline_flink.py') with self.assertRaises(subprocess.CalledProcessError) as cm: test_utils.run_cli([ 'pipeline', 'compile', '--engine', 'kubeflow', '--pipeline_path', pipeline_path ]) self.assertIn('Invalid pipeline path: {}'.format(pipeline_path), cm.exception.output) # Wrong Runner. pipeline_path = os.path.join(self._testdata_dir, 'test_pipeline_airflow_1.py') with self.assertRaises(subprocess.CalledProcessError) as cm: test_utils.run_cli([ 'pipeline', 'compile', '--engine', 'kubeflow', '--pipeline_path', pipeline_path ]) # Successful compilation. result = test_utils.run_cli([ 'pipeline', 'compile', '--engine', 'kubeflow', '--pipeline_path', self._pipeline_path ]) absl.logging.info('pipeline compile result:%s', result) self.assertIn('Compiling pipeline', result) self.assertIn('Pipeline compiled successfully', result)
def testRunCreate(self): with self.assertRaises(subprocess.CalledProcessError) as cm: test_utils.run_cli([ 'run', 'create', '--engine', 'kubeflow', '--pipeline_name', self._pipeline_name, '--endpoint', self._endpoint ]) self.assertIn('Cannot find pipeline "{}".'.format(self._pipeline_name), cm.exception.output) # Now create a pipeline. self._valid_create_and_check(self._pipeline_path, self._pipeline_name) # Run pipeline. result = test_utils.run_cli([ 'run', 'create', '--engine', 'kubeflow', '--pipeline_name', self._pipeline_name, '--endpoint', self._endpoint ]) self.assertIn( 'Creating a run for pipeline: {}'.format(self._pipeline_name), result) self.assertNotIn( 'Pipeline "{}" does not exist.'.format(self._pipeline_name), result) self.assertIn( 'Run created for pipeline: {}'.format(self._pipeline_name), result)
def testPipelineCreate(self): # Create a pipeline. self._valid_create_and_check(self._pipeline_path, self._pipeline_name) # Test pipeline create when pipeline already exists. with self.assertRaises(subprocess.CalledProcessError) as cm: test_utils.run_cli([ 'pipeline', 'create', '--engine', 'kubeflow', '--pipeline_path', self._pipeline_path, '--endpoint', self._endpoint ]) self.assertIn('Pipeline "{}" already exists.'.format(self._pipeline_name), cm.exception.output)
def testPipelineCreateAutoDetectFail(self): if labels.AIRFLOW_PACKAGE_NAME not in self._pip_list: self.skipTest( 'Airflow doesn\'t exist. Airflow needed to make auto detect fail.' ) with self.assertRaises(subprocess.CalledProcessError) as cm: test_utils.run_cli([ 'pipeline', 'create', '--engine', 'auto', '--pipeline_path', self._pipeline_path, '--endpoint', self._endpoint ]) self.assertIn( 'Multiple orchestrators found. Choose one using --engine flag.', cm.exception.output)
def _valid_create_and_check(self, pipeline_path: Text, pipeline_name: Text) -> None: result = test_utils.run_cli([ 'pipeline', 'create', '--engine', 'kubeflow', '--pipeline_path', pipeline_path, '--endpoint', self._endpoint ]) absl.logging.info('[CLI] %s', result) self.assertIn('Creating pipeline', result) self.assertIn('Pipeline "{}" created successfully.'.format(pipeline_name), result)
def testPipelineDelete(self): # Try deleting a non existent pipeline. with self.assertRaises(subprocess.CalledProcessError) as cm: test_utils.run_cli([ 'pipeline', 'delete', '--engine', 'kubeflow', '--pipeline_name', self._pipeline_name, '--endpoint', self._endpoint ]) self.assertIn('Cannot find pipeline "{}".'.format(self._pipeline_name), cm.exception.output) # Create a pipeline. self._valid_create_and_check(self._pipeline_path, self._pipeline_name) # Now delete the pipeline. result = test_utils.run_cli([ 'pipeline', 'delete', '--engine', 'kubeflow', '--pipeline_name', self._pipeline_name, '--endpoint', self._endpoint ]) self.assertIn('Deleting pipeline', result) self.assertIn( 'Pipeline {} deleted successfully.'.format(self._pipeline_name), result)
def testPipelineUpdate(self): # Try pipeline update when pipeline does not exist. with self.assertRaises(subprocess.CalledProcessError) as cm: test_utils.run_cli([ 'pipeline', 'update', '--engine', 'kubeflow', '--pipeline_path', self._pipeline_path, '--endpoint', self._endpoint ]) self.assertIn('Cannot find pipeline "{}".'.format(self._pipeline_name), cm.exception.output) # Now update an existing pipeline. self._valid_create_and_check(self._pipeline_path, self._pipeline_name) result = test_utils.run_cli([ 'pipeline', 'update', '--engine', 'kubeflow', '--pipeline_path', self._pipeline_path, '--endpoint', self._endpoint ]) self.assertIn('Updating pipeline', result) self.assertIn( 'Pipeline "{}" updated successfully.'.format(self._pipeline_name), result)
def testPipelineCreateAutoDetectSuccess(self): if labels.AIRFLOW_PACKAGE_NAME in self._pip_list: self.skipTest( 'Airflow exists. Airflow should not exist to make auto detect fail.' ) result = test_utils.run_cli([ 'pipeline', 'create', '--engine', 'auto', '--pipeline_path', self._pipeline_path, '--endpoint', self._endpoint ]) self.assertIn( 'Pipeline "{}" created successfully.'.format(self._pipeline_name), result)
def testPipelineList(self): # Create pipelines. self._valid_create_and_check(self._pipeline_path, self._pipeline_name) self._valid_create_and_check(self._pipeline_path_v2, self._pipeline_name_v2) self.addCleanup(self._cleanup_kfp_server, self._pipeline_name_v2) # List pipelines. result = test_utils.run_cli([ 'pipeline', 'list', '--engine', 'kubeflow', '--endpoint', self._endpoint ]) self.assertIn('Listing all pipelines', result) self.assertIn(self._pipeline_name, result) self.assertIn(self._pipeline_name_v2, result)
def testRunTerminate(self): # Now create a pipeline. self._valid_create_and_check(self._pipeline_path, self._pipeline_name) # Run pipeline using kfp client to get run_id. run = self._run_pipeline_using_kfp_client(self._pipeline_name) absl.logging.info('Terminating run: %s', run.id) result = test_utils.run_cli([ 'run', 'terminate', '--engine', 'kubeflow', '--endpoint', self._endpoint, '--run_id', run.id ]) self.assertIn('Terminating run.', result) self.assertIn('Run terminated.', result)
def testPipelineCreateAutoDetect(self): result = test_utils.run_cli([ 'pipeline', 'create', '--engine', 'auto', '--pipeline_path', self._pipeline_path, '--endpoint', self._endpoint ]) self.assertIn('Creating pipeline', result) if labels.AIRFLOW_PACKAGE_NAME in self._pip_list and labels.KUBEFLOW_PACKAGE_NAME in self._pip_list: self.assertIn( 'Multiple orchestrators found. Choose one using --engine flag.', result) else: self.assertIn( 'Pipeline "{}" created successfully.'.format(self._pipeline_name), result)
def testRunStatus(self): # Now create a pipeline. self._valid_create_and_check(self._pipeline_path, self._pipeline_name) # Run pipeline using kfp client to get run_id. run = self._run_pipeline_using_kfp_client(self._pipeline_name) absl.logging.info('Retrieving run status: %s(%s)', run.id, self._pipeline_name) result = test_utils.run_cli([ 'run', 'status', '--engine', 'kubeflow', '--pipeline_name', self._pipeline_name, '--endpoint', self._endpoint, '--run_id', run.id ]) self.assertIn('Retrieving run status.', result) self.assertIn(str(run.id), result) self.assertIn(self._pipeline_name, result)
def testRunList(self): # Now create a pipeline. self._valid_create_and_check(self._pipeline_path, self._pipeline_name) # Run pipeline using kfp client to get run_id. run_1 = self._run_pipeline_using_kfp_client(self._pipeline_name) run_2 = self._run_pipeline_using_kfp_client(self._pipeline_name) # List runs. result = test_utils.run_cli([ 'run', 'list', '--engine', 'kubeflow', '--pipeline_name', self._pipeline_name, '--endpoint', self._endpoint ]) self.assertIn( 'Listing all runs of pipeline: {}'.format(self._pipeline_name), result) self.assertIn(str(run_1.id), result) self.assertIn(str(run_2.id), result) self.assertIn(self._pipeline_name, result)
def _runCli(self, args: List[str]) -> str: return cli_test_utils.run_cli(args)