def make_pipeline_job(state): return gca_pipeline_job_v1.PipelineJob( name=_TEST_PIPELINE_JOB_NAME, state=state, create_time=_TEST_PIPELINE_CREATE_TIME, service_account=_TEST_SERVICE_ACCOUNT, network=_TEST_NETWORK, )
def mock_pipeline_service_create(): with mock.patch.object(pipeline_service_client_v1.PipelineServiceClient, "create_pipeline_job") as mock_create_pipeline_job: mock_create_pipeline_job.return_value = gca_pipeline_job_v1.PipelineJob( name=_TEST_PIPELINE_JOB_NAME, state=gca_pipeline_state_v1.PipelineState.PIPELINE_STATE_SUCCEEDED, create_time=_TEST_PIPELINE_CREATE_TIME, service_account=_TEST_SERVICE_ACCOUNT, network=_TEST_NETWORK, ) yield mock_create_pipeline_job
def test_submit_call_pipeline_service_pipeline_job_create_legacy( self, mock_pipeline_service_create, mock_pipeline_service_get, job_spec_json, mock_load_json, ): aiplatform.init( project=_TEST_PROJECT, staging_bucket=_TEST_GCS_BUCKET_NAME, location=_TEST_LOCATION, credentials=_TEST_CREDENTIALS, ) job = pipeline_jobs.PipelineJob( display_name=_TEST_PIPELINE_JOB_DISPLAY_NAME, template_path=_TEST_TEMPLATE_PATH, job_id=_TEST_PIPELINE_JOB_ID, parameter_values=_TEST_PIPELINE_PARAMETER_VALUES_LEGACY, enable_caching=True, ) job.submit(service_account=_TEST_SERVICE_ACCOUNT, network=_TEST_NETWORK) expected_runtime_config_dict = { "parameters": {"string_param": {"stringValue": "hello"}}, "gcsOutputDirectory": _TEST_GCS_BUCKET_NAME, } runtime_config = gca_pipeline_job_v1.PipelineJob.RuntimeConfig()._pb json_format.ParseDict(expected_runtime_config_dict, runtime_config) pipeline_spec = job_spec_json.get("pipelineSpec") or job_spec_json # Construct expected request expected_gapic_pipeline_job = gca_pipeline_job_v1.PipelineJob( display_name=_TEST_PIPELINE_JOB_DISPLAY_NAME, pipeline_spec={ "components": {}, "pipelineInfo": pipeline_spec["pipelineInfo"], "root": pipeline_spec["root"], "schemaVersion": "2.0.0", }, runtime_config=runtime_config, service_account=_TEST_SERVICE_ACCOUNT, network=_TEST_NETWORK, ) mock_pipeline_service_create.assert_called_once_with( parent=_TEST_PARENT, pipeline_job=expected_gapic_pipeline_job, pipeline_job_id=_TEST_PIPELINE_JOB_ID, ) assert not mock_pipeline_service_get.called job.wait() mock_pipeline_service_get.assert_called_with( name=_TEST_PIPELINE_JOB_NAME, retry=base._DEFAULT_RETRY ) assert job._gca_resource == make_pipeline_job( gca_pipeline_state_v1.PipelineState.PIPELINE_STATE_SUCCEEDED )
def test_run_call_pipeline_service_create_with_timeout( self, mock_pipeline_service_create, mock_pipeline_service_get, job_spec, mock_load_yaml_and_json, sync, ): aiplatform.init( project=_TEST_PROJECT, staging_bucket=_TEST_GCS_BUCKET_NAME, location=_TEST_LOCATION, credentials=_TEST_CREDENTIALS, ) job = pipeline_jobs.PipelineJob( display_name=_TEST_PIPELINE_JOB_DISPLAY_NAME, template_path=_TEST_TEMPLATE_PATH, job_id=_TEST_PIPELINE_JOB_ID, parameter_values=_TEST_PIPELINE_PARAMETER_VALUES, enable_caching=True, ) job.run( service_account=_TEST_SERVICE_ACCOUNT, network=_TEST_NETWORK, sync=sync, create_request_timeout=180.0, ) if not sync: job.wait() expected_runtime_config_dict = { "gcsOutputDirectory": _TEST_GCS_BUCKET_NAME, "parameterValues": _TEST_PIPELINE_PARAMETER_VALUES, } runtime_config = gca_pipeline_job_v1.PipelineJob.RuntimeConfig()._pb json_format.ParseDict(expected_runtime_config_dict, runtime_config) job_spec = yaml.safe_load(job_spec) pipeline_spec = job_spec.get("pipelineSpec") or job_spec # Construct expected request expected_gapic_pipeline_job = gca_pipeline_job_v1.PipelineJob( display_name=_TEST_PIPELINE_JOB_DISPLAY_NAME, pipeline_spec={ "components": {}, "pipelineInfo": pipeline_spec["pipelineInfo"], "root": pipeline_spec["root"], "schemaVersion": "2.1.0", }, runtime_config=runtime_config, service_account=_TEST_SERVICE_ACCOUNT, network=_TEST_NETWORK, ) mock_pipeline_service_create.assert_called_once_with( parent=_TEST_PARENT, pipeline_job=expected_gapic_pipeline_job, pipeline_job_id=_TEST_PIPELINE_JOB_ID, timeout=180.0, )