def teardown(shared_state): yield training_pipeline_id = shared_state["training_pipeline_name"].split("/")[-1] # Stop the training pipeline cancel_training_pipeline_sample.cancel_training_pipeline_sample( project=PROJECT_ID, training_pipeline_id=training_pipeline_id ) client_options = {"api_endpoint": "us-central1-aiplatform.googleapis.com"} pipeline_client = aiplatform.gapic.PipelineServiceClient( client_options=client_options ) # Waiting for training pipeline to be in CANCELLED state helpers.wait_for_job_state( get_job_method=pipeline_client.get_training_pipeline, name=shared_state["training_pipeline_name"], ) # Delete the training pipeline delete_training_pipeline_sample.delete_training_pipeline_sample( project=PROJECT_ID, training_pipeline_id=training_pipeline_id )
def training_pipeline_id(capsys): create_training_pipeline_sample.create_training_pipeline_sample( project=PROJECT_ID, display_name=DISPLAY_NAME, training_task_definition=TRAINING_DEFINITION_GCS_PATH, dataset_id=DATASET_ID, model_display_name=f"Temp Model for {DISPLAY_NAME}", ) out, _ = capsys.readouterr() training_pipeline_name = helpers.get_name(out) assert "/" in training_pipeline_name training_pipeline_id = training_pipeline_name.split("/")[-1] yield training_pipeline_id delete_training_pipeline_sample.delete_training_pipeline_sample( project=PROJECT_ID, training_pipeline_id=training_pipeline_id)