def test_launcher_on_custom_job_type(self): job_type = 'CustomJob' payload = ( '{"display_name": "ContainerComponent", "job_spec": ' '{"worker_pool_specs": [{"machine_spec": {"machine_type": ' '"n1-standard-4"}, "replica_count": 1, "container_spec": ' '{"image_uri": "google/cloud-sdk:latest", "command": ["sh", ' '"-c", "set -e -x\\necho \\"$0, this is an output ' 'parameter\\"\\n", "{{$.inputs.parameters[\'input_text\']}}", ' '"{{$.outputs.parameters[\'output_value\'].output_file}}"]}}]}}') input_args = [ '--type', job_type, '--project', self._project, '--location', self._location, '--payload', payload, '--gcp_resources', self._gcp_resources, '--extra_arg', 'extra_arg_value' ] mock_create_custom_job = mock.Mock() with mock.patch.dict(launcher._JOB_TYPE_TO_ACTION_MAP, {job_type: mock_create_custom_job}): launcher.main(input_args) mock_create_custom_job.assert_called_once_with( type=job_type, project=self._project, location=self._location, payload=payload, gcp_resources=self._gcp_resources)
def test_launcher_on_batch_prediction_job_type(self): job_type = 'BatchPredictionJob' payload = ( '{"batchPredictionJob": {"displayName": ' '"BatchPredictionComponentName", "model": ' '"projects/test/locations/test/models/test-model","inputConfig":' ' {"instancesFormat": "CSV","gcsSource": {"uris": ' '["test_gcs_source"]}}, "outputConfig": {"predictionsFormat": ' '"CSV", "gcsDestination": {"outputUriPrefix": ' '"test_gcs_destination"}}}}') input_args = [ '--type', job_type, '--project', self._project, '--location', self._location, '--payload', payload, '--gcp_resources', self._gcp_resources, '--executor_input', 'executor_input' ] mock_create_batch_prediction_job = mock.Mock() with mock.patch.dict(launcher._JOB_TYPE_TO_ACTION_MAP, {job_type: mock_create_batch_prediction_job}): launcher.main(input_args) mock_create_batch_prediction_job.assert_called_once_with( type=job_type, project=self._project, location=self._location, payload=payload, gcp_resources=self._gcp_resources, executor_input='executor_input')
def test_launcher_on_delete_endpoint_type(self): mock_delete_endpoint = mock.Mock() with mock.patch.dict(launcher._JOB_TYPE_TO_ACTION_MAP, {'DeleteEndpoint': mock_delete_endpoint}): launcher.main(self._input_args) mock_delete_endpoint.assert_called_once_with( type='DeleteEndpoint', project='', location='', payload='test_payload', gcp_resources=self._gcp_resources)
def test_launcher_on_upload_model_type(self): mock_upload_model = mock.Mock() with mock.patch.dict(launcher._JOB_TYPE_TO_ACTION_MAP, {'UploadModel': mock_upload_model}): launcher.main(self._input_args) mock_upload_model.assert_called_once_with( type='UploadModel', project='test_project', location='us_central1', payload='test_payload', gcp_resources=self._gcp_resources, executor_input='executor_input')
def test_launcher_on_create_endpoint_type(self): mock_create_endpoint = mock.Mock() with mock.patch.dict(launcher._JOB_TYPE_TO_ACTION_MAP, {'CreateEndpoint': mock_create_endpoint}): launcher.main(self._input_args) mock_create_endpoint.assert_called_once_with( type='CreateEndpoint', project='test_project', location='us_central1', payload='test_payload', gcp_resources=self._gcp_resources, executor_input='executor_input')
def test_launcher_on_dataproc_create_pysparkbatch_job_type(self): mock_dataproc_pysparkbatch_job = mock.Mock() with mock.patch.dict( launcher._JOB_TYPE_TO_ACTION_MAP, {'DataprocSparkSqlBatch': mock_dataproc_pysparkbatch_job}): launcher.main(self._input_args) mock_dataproc_pysparkbatch_job.assert_called_once_with( type='DataprocSparkSqlBatch', project='test_project', location='us_central1', batch_id='test_batch_id', payload='test_payload', gcp_resources=self._gcp_resources)
def test_launcher_on_bigquery_query_job_type(self): mock_bigquery_query_job = mock.Mock() with mock.patch.dict(launcher._JOB_TYPE_TO_ACTION_MAP, {'BigqueryQueryJob': mock_bigquery_query_job}): launcher.main(self._input_args) mock_bigquery_query_job.assert_called_once_with( type='BigqueryQueryJob', project='test_project', location='us_central1', payload='test_payload', job_configuration_query_override='{}', gcp_resources=self._gcp_resources, executor_input='executor_input')
def test_launcher_on_bigquery_export_model_job_type(self): mock_bigquery_export_model_job = mock.Mock() with mock.patch.dict( launcher._JOB_TYPE_TO_ACTION_MAP, {'BigqueryExportModelJob': mock_bigquery_export_model_job}): launcher.main(self._input_args) mock_bigquery_export_model_job.assert_called_once_with( type='BigqueryExportModelJob', project='test_project', location='us_central1', payload='test_payload', model_name='test_model_name', model_destination_path='gs://testbucket/testpath', exported_model_path='exported_model_path', gcp_resources=self._gcp_resources, executor_input='executor_input')
def test_launcher_on_bigquery_evaluate_model_job_type(self): mock_bigquery_evaluate_model_job = mock.Mock() with mock.patch.dict( launcher._JOB_TYPE_TO_ACTION_MAP, {'BigqueryEvaluateModelJob': mock_bigquery_evaluate_model_job}): launcher.main(self._input_args) mock_bigquery_evaluate_model_job.assert_called_once_with( type='BigqueryEvaluateModelJob', project='test_project', location='us_central1', model_name='test_model', table_name='test_table', query_statement='', threshold=0.5, payload='test_payload', job_configuration_query_override='{}', gcp_resources=self._gcp_resources, executor_input='executor_input')
def test_launcher_unsupported_job_type(self): with self.assertRaises(ValueError) as context: launcher.main(self._input_args) self.assertEqual('Unsupported job type: Unknown', str(context.exception))