def dataflow(service_config, no_cache, update, dataflow_job_only, image_uri): """Deploy a python app to Google Dataflow as an Octue service or digital twin.""" if bool(importlib.util.find_spec("apache_beam")): # Import the Dataflow deployer only if the `apache-beam` package is available (due to installing `octue` with # the `dataflow` extras option). from octue.cloud.deployment.google.dataflow.deployer import DataflowDeployer else: raise ImportWarning( "To use this CLI command, you must install `octue` with the `dataflow` option e.g. " "`pip install octue[dataflow]`") deployer = DataflowDeployer(service_config) if dataflow_job_only: deployer.create_streaming_dataflow_job(image_uri=image_uri, update=update) return deployer.deploy(no_cache=no_cache, update=update)
def test_deploy_with_cloud_build_file_provided(self): """Test deploying to Dataflow with a `cloudbuild.yaml` path provided in the `octue.yaml` file""" with tempfile.TemporaryDirectory() as temporary_directory: octue_configuration_path = self._create_octue_configuration_file( OCTUE_CONFIGURATION_WITH_CLOUD_BUILD_PATH, temporary_directory, ) deployer = DataflowDeployer(octue_configuration_path, image_uri_template="blah") with patch("subprocess.run", return_value=Mock(returncode=0)) as mock_run: mock_build_id = "my-build-id" with patch( "json.loads", return_value={ "metadata": { "build": { "images": [deployer.image_uri_template], "id": mock_build_id } }, "status": "SUCCESS", }, ): with patch( "octue.cloud.deployment.google.dataflow.pipeline.DataflowRunner" ): deployer.deploy() # Test the build trigger creation request. self.assertEqual( mock_run.call_args_list[0].args[0], EXPECTED_BUILD_TRIGGER_CREATION_COMMAND + [ f"--build-config={OCTUE_CONFIGURATION_WITH_CLOUD_BUILD_PATH['services'][0]['cloud_build_configuration_path']}" ], ) # Test the build trigger run request. self.assertEqual( mock_run.call_args_list[1].args[0], [ "gcloud", f"--project={OCTUE_CONFIGURATION_WITH_CLOUD_BUILD_PATH['services'][0]['project_name']}", "--format=json", "beta", "builds", "triggers", "run", OCTUE_CONFIGURATION_WITH_CLOUD_BUILD_PATH["services"][0] ["name"], "--branch=my-branch", ], ) # Test waiting for the build trigger run to complete. self.assertEqual( mock_run.call_args_list[2].args[0], [ "gcloud", f'--project={SERVICE["project_name"]}', "--format=json", "builds", "describe", mock_build_id, ], )
def test_deploy(self): """Test that the build trigger creation and run are requested correctly.""" with tempfile.TemporaryDirectory() as temporary_directory: octue_configuration_path = self._create_octue_configuration_file( OCTUE_CONFIGURATION, temporary_directory) deployer = DataflowDeployer(octue_configuration_path) with patch("subprocess.run", return_value=Mock(returncode=0)) as mock_run: mock_build_id = "my-build-id" with patch( "json.loads", return_value={ "metadata": { "build": { "images": [deployer.image_uri_template], "id": mock_build_id } }, "status": "SUCCESS", }, ): temporary_file = tempfile.NamedTemporaryFile(delete=False) with patch("tempfile.NamedTemporaryFile", return_value=temporary_file): deployer.deploy() # Test the build trigger creation request. self.assertEqual( mock_run.call_args_list[0].args[0], EXPECTED_BUILD_TRIGGER_CREATION_COMMAND + [f"--inline-config={temporary_file.name}"], ) # Test the build trigger run request. self.assertEqual( mock_run.call_args_list[1].args[0], [ "gcloud", f"--project={SERVICE['project_name']}", "--format=json", "beta", "builds", "triggers", "run", SERVICE["name"], "--branch=my-branch", ], ) # Test waiting for the build trigger run to complete. self.assertEqual( mock_run.call_args_list[2].args[0], [ "gcloud", f'--project={SERVICE["project_name"]}', "--format=json", "builds", "describe", mock_build_id, ], )