def execute(self, context): hook = DataFlowHook(gcp_conn_id=self.gcp_conn_id, delegate_to=self.delegate_to, poll_sleep=self.poll_sleep) hook.start_template_dataflow(self.job_name, self.dataflow_default_options, self.parameters, self.template)
class TestDataFlowTemplateHook(unittest.TestCase): def setUp(self): with mock.patch(BASE_STRING.format('GoogleCloudBaseHook.__init__'), new=mock_init): self.dataflow_hook = DataFlowHook(gcp_conn_id='test') @mock.patch(DATAFLOW_STRING.format('DataFlowHook._start_template_dataflow') ) def test_start_template_dataflow(self, internal_dataflow_mock): self.dataflow_hook.start_template_dataflow( job_name=JOB_NAME, variables=DATAFLOW_OPTIONS_TEMPLATE, parameters=PARAMETERS, dataflow_template=TEMPLATE) options_with_region = {'region': 'us-central1'} options_with_region.update(DATAFLOW_OPTIONS_TEMPLATE) options_with_region_without_project = copy.deepcopy( options_with_region) del options_with_region_without_project['project'] internal_dataflow_mock.assert_called_once_with( mock.ANY, options_with_region_without_project, PARAMETERS, TEMPLATE, DATAFLOW_OPTIONS_JAVA['project']) @mock.patch(DATAFLOW_STRING.format('_DataflowJobsController')) @mock.patch(DATAFLOW_STRING.format('DataFlowHook.get_conn')) def test_start_template_dataflow_with_runtime_env(self, mock_conn, mock_dataflowjob): dataflow_options_template = copy.deepcopy(DATAFLOW_OPTIONS_TEMPLATE) options_with_runtime_env = copy.deepcopy(RUNTIME_ENV) options_with_runtime_env.update(dataflow_options_template) dataflowjob_instance = mock_dataflowjob.return_value dataflowjob_instance.wait_for_done.return_value = None method = (mock_conn.return_value.projects.return_value.locations. return_value.templates.return_value.launch) self.dataflow_hook.start_template_dataflow( job_name=JOB_NAME, variables=options_with_runtime_env, parameters=PARAMETERS, dataflow_template=TEMPLATE) body = { "jobName": mock.ANY, "parameters": PARAMETERS, "environment": RUNTIME_ENV } method.assert_called_once_with( projectId=options_with_runtime_env['project'], location='us-central1', gcsPath=TEMPLATE, body=body, )
def execute(self, context): """Execute the python dataflow job.""" bucket_helper = GoogleCloudBucketHelper( self.gcp_conn_id, self.delegate_to) self.py_file = bucket_helper.google_cloud_to_local(self.py_file) hook = DataFlowHook(gcp_conn_id=self.gcp_conn_id, delegate_to=self.delegate_to, poll_sleep=self.poll_sleep) dataflow_options = self.dataflow_default_options.copy() dataflow_options.update(self.options) # Convert argument names from lowerCamelCase to snake case. camel_to_snake = lambda name: re.sub( r'[A-Z]', lambda x: '_' + x.group(0).lower(), name) formatted_options = {camel_to_snake(key): dataflow_options[key] for key in dataflow_options} hook.start_python_dataflow( self.job_name, formatted_options, self.py_file, self.py_options, py_interpreter=self.py_interpreter)
def execute(self, context): hook = DataFlowHook(gcp_conn_id=self.gcp_conn_id, delegate_to=self.delegate_to, poll_sleep=self.poll_sleep) dataflow_options = copy.copy(self.dataflow_default_options) dataflow_options.update(self.options) is_running = False if self.check_if_running != CheckJobRunning.IgnoreJob: is_running = hook.is_job_dataflow_running(self.job_name, dataflow_options) while is_running and self.check_if_running == CheckJobRunning.WaitForRun: is_running = hook.is_job_dataflow_running(self.job_name, dataflow_options) if not is_running: bucket_helper = GoogleCloudBucketHelper( self.gcp_conn_id, self.delegate_to) self.jar = bucket_helper.google_cloud_to_local(self.jar) hook.start_java_dataflow(self.job_name, dataflow_options, self.jar, self.job_class, True, self.multiple_jobs)
def setUp(self): with mock.patch(BASE_STRING.format('GoogleCloudBaseHook.__init__'), new=mock_init): self.dataflow_hook = DataFlowHook(gcp_conn_id='test')
class TestDataFlowHook(unittest.TestCase): def setUp(self): with mock.patch(BASE_STRING.format('GoogleCloudBaseHook.__init__'), new=mock_init): self.dataflow_hook = DataFlowHook(gcp_conn_id='test') @mock.patch("airflow.gcp.hooks.dataflow.DataFlowHook._authorize") @mock.patch("airflow.gcp.hooks.dataflow.build") def test_dataflow_client_creation(self, mock_build, mock_authorize): result = self.dataflow_hook.get_conn() mock_build.assert_called_once_with('dataflow', 'v1b3', http=mock_authorize.return_value, cache_discovery=False) self.assertEqual(mock_build.return_value, result) @parameterized.expand([('default_to_python2', "python2"), ('major_version_2', 'python2'), ('major_version_3', 'python3'), ('minor_version', 'python3.6')]) @mock.patch(DATAFLOW_STRING.format('uuid.uuid4')) @mock.patch(DATAFLOW_STRING.format('_DataflowJob')) @mock.patch(DATAFLOW_STRING.format('_Dataflow')) @mock.patch(DATAFLOW_STRING.format('DataFlowHook.get_conn')) def test_start_python_dataflow(self, name, py, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid): del name # unused variable mock_uuid.return_value = MOCK_UUID mock_conn.return_value = None dataflow_instance = mock_dataflow.return_value dataflow_instance.wait_for_done.return_value = None dataflowjob_instance = mock_dataflowjob.return_value dataflowjob_instance.wait_for_done.return_value = None self.dataflow_hook.start_python_dataflow(job_name=JOB_NAME, variables=DATAFLOW_OPTIONS_PY, dataflow=PY_FILE, py_options=PY_OPTIONS, py_interpreter=py) expected_interpreter = py if py else DEFAULT_PY_INTERPRETER expected_cmd = [ expected_interpreter, '-m', PY_FILE, '--region=us-central1', '--runner=DataflowRunner', '--project=test', '--labels=foo=bar', '--staging_location=gs://test/staging', '--job_name={}-{}'.format(JOB_NAME, MOCK_UUID) ] self.assertListEqual(sorted(mock_dataflow.call_args[0][0]), sorted(expected_cmd)) @mock.patch(DATAFLOW_STRING.format('uuid.uuid4')) @mock.patch(DATAFLOW_STRING.format('_DataflowJob')) @mock.patch(DATAFLOW_STRING.format('_Dataflow')) @mock.patch(DATAFLOW_STRING.format('DataFlowHook.get_conn')) def test_start_java_dataflow(self, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid): mock_uuid.return_value = MOCK_UUID mock_conn.return_value = None dataflow_instance = mock_dataflow.return_value dataflow_instance.wait_for_done.return_value = None dataflowjob_instance = mock_dataflowjob.return_value dataflowjob_instance.wait_for_done.return_value = None self.dataflow_hook.start_java_dataflow(job_name=JOB_NAME, variables=DATAFLOW_OPTIONS_JAVA, jar=JAR_FILE) expected_cmd = [ 'java', '-jar', JAR_FILE, '--region=us-central1', '--runner=DataflowRunner', '--project=test', '--stagingLocation=gs://test/staging', '--labels={"foo":"bar"}', '--jobName={}-{}'.format(JOB_NAME, MOCK_UUID) ] self.assertListEqual(sorted(mock_dataflow.call_args[0][0]), sorted(expected_cmd)) @mock.patch(DATAFLOW_STRING.format('uuid.uuid4')) @mock.patch(DATAFLOW_STRING.format('_DataflowJob')) @mock.patch(DATAFLOW_STRING.format('_Dataflow')) @mock.patch(DATAFLOW_STRING.format('DataFlowHook.get_conn')) def test_start_java_dataflow_with_job_class(self, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid): mock_uuid.return_value = MOCK_UUID mock_conn.return_value = None dataflow_instance = mock_dataflow.return_value dataflow_instance.wait_for_done.return_value = None dataflowjob_instance = mock_dataflowjob.return_value dataflowjob_instance.wait_for_done.return_value = None self.dataflow_hook.start_java_dataflow(job_name=JOB_NAME, variables=DATAFLOW_OPTIONS_JAVA, jar=JAR_FILE, job_class=JOB_CLASS) expected_cmd = [ 'java', '-cp', JAR_FILE, JOB_CLASS, '--region=us-central1', '--runner=DataflowRunner', '--project=test', '--stagingLocation=gs://test/staging', '--labels={"foo":"bar"}', '--jobName={}-{}'.format(JOB_NAME, MOCK_UUID) ] self.assertListEqual(sorted(mock_dataflow.call_args[0][0]), sorted(expected_cmd)) @mock.patch('airflow.gcp.hooks.dataflow._Dataflow.log') @mock.patch('subprocess.Popen') @mock.patch('select.select') def test_dataflow_wait_for_done_logging(self, mock_select, mock_popen, mock_logging): mock_logging.info = MagicMock() mock_logging.warning = MagicMock() mock_proc = MagicMock() mock_proc.stderr = MagicMock() mock_proc.stderr.readlines = MagicMock( return_value=['test\n', 'error\n']) mock_stderr_fd = MagicMock() mock_proc.stderr.fileno = MagicMock(return_value=mock_stderr_fd) mock_proc_poll = MagicMock() mock_select.return_value = [[mock_stderr_fd]] def poll_resp_error(): mock_proc.return_code = 1 return True mock_proc_poll.side_effect = [None, poll_resp_error] mock_proc.poll = mock_proc_poll mock_popen.return_value = mock_proc dataflow = _Dataflow(['test', 'cmd']) mock_logging.info.assert_called_once_with('Running command: %s', 'test cmd') self.assertRaises(Exception, dataflow.wait_for_done) def test_valid_dataflow_job_name(self): job_name = self.dataflow_hook._build_dataflow_job_name( job_name=JOB_NAME, append_job_name=False) self.assertEqual(job_name, JOB_NAME) def test_fix_underscore_in_job_name(self): job_name_with_underscore = 'test_example' fixed_job_name = job_name_with_underscore.replace('_', '-') job_name = self.dataflow_hook._build_dataflow_job_name( job_name=job_name_with_underscore, append_job_name=False) self.assertEqual(job_name, fixed_job_name) def test_invalid_dataflow_job_name(self): invalid_job_name = '9test_invalid_name' fixed_name = invalid_job_name.replace('_', '-') with self.assertRaises(ValueError) as e: self.dataflow_hook._build_dataflow_job_name( job_name=invalid_job_name, append_job_name=False) # Test whether the job_name is present in the Error msg self.assertIn('Invalid job_name ({})'.format(fixed_name), str(e.exception)) def test_dataflow_job_regex_check(self): self.assertEqual( self.dataflow_hook._build_dataflow_job_name(job_name='df-job-1', append_job_name=False), 'df-job-1') self.assertEqual( self.dataflow_hook._build_dataflow_job_name(job_name='df-job', append_job_name=False), 'df-job') self.assertEqual( self.dataflow_hook._build_dataflow_job_name(job_name='dfjob', append_job_name=False), 'dfjob') self.assertEqual( self.dataflow_hook._build_dataflow_job_name(job_name='dfjob1', append_job_name=False), 'dfjob1') self.assertRaises(ValueError, self.dataflow_hook._build_dataflow_job_name, job_name='1dfjob', append_job_name=False) self.assertRaises(ValueError, self.dataflow_hook._build_dataflow_job_name, job_name='dfjob@', append_job_name=False) self.assertRaises(ValueError, self.dataflow_hook._build_dataflow_job_name, job_name='df^jo', append_job_name=False)
class TestDataFlowHook(unittest.TestCase): def setUp(self): with mock.patch(BASE_STRING.format('CloudBaseHook.__init__'), new=mock_init): self.dataflow_hook = DataFlowHook(gcp_conn_id='test') @mock.patch("airflow.gcp.hooks.dataflow.DataFlowHook._authorize") @mock.patch("airflow.gcp.hooks.dataflow.build") def test_dataflow_client_creation(self, mock_build, mock_authorize): result = self.dataflow_hook.get_conn() mock_build.assert_called_once_with('dataflow', 'v1b3', http=mock_authorize.return_value, cache_discovery=False) self.assertEqual(mock_build.return_value, result) @mock.patch(DATAFLOW_STRING.format('uuid.uuid4')) @mock.patch(DATAFLOW_STRING.format('_DataflowJobsController')) @mock.patch(DATAFLOW_STRING.format('_DataflowRunner')) @mock.patch(DATAFLOW_STRING.format('DataFlowHook.get_conn')) def test_start_python_dataflow(self, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid): mock_uuid.return_value = MOCK_UUID mock_conn.return_value = None dataflow_instance = mock_dataflow.return_value dataflow_instance.wait_for_done.return_value = None dataflowjob_instance = mock_dataflowjob.return_value dataflowjob_instance.wait_for_done.return_value = None self.dataflow_hook.start_python_dataflow(job_name=JOB_NAME, variables=DATAFLOW_OPTIONS_PY, dataflow=PY_FILE, py_options=PY_OPTIONS) expected_cmd = [ "python2", '-m', PY_FILE, '--region=us-central1', '--runner=DataflowRunner', '--project=test', '--labels=foo=bar', '--staging_location=gs://test/staging', '--job_name={}-{}'.format(JOB_NAME, MOCK_UUID) ] self.assertListEqual(sorted(mock_dataflow.call_args[0][0]), sorted(expected_cmd)) @parameterized.expand([('default_to_python2', "python2"), ('major_version_2', 'python2'), ('major_version_3', 'python3'), ('minor_version', 'python3.6')]) @mock.patch(DATAFLOW_STRING.format('uuid.uuid4')) @mock.patch(DATAFLOW_STRING.format('_DataflowJobsController')) @mock.patch(DATAFLOW_STRING.format('_DataflowRunner')) @mock.patch(DATAFLOW_STRING.format('DataFlowHook.get_conn')) def test_start_python_dataflow_with_custom_interpreter( self, name, py_interpreter, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid): del name # unused variable mock_uuid.return_value = MOCK_UUID mock_conn.return_value = None dataflow_instance = mock_dataflow.return_value dataflow_instance.wait_for_done.return_value = None dataflowjob_instance = mock_dataflowjob.return_value dataflowjob_instance.wait_for_done.return_value = None self.dataflow_hook.start_python_dataflow(job_name=JOB_NAME, variables=DATAFLOW_OPTIONS_PY, dataflow=PY_FILE, py_options=PY_OPTIONS, py_interpreter=py_interpreter) expected_cmd = [ py_interpreter, '-m', PY_FILE, '--region=us-central1', '--runner=DataflowRunner', '--project=test', '--labels=foo=bar', '--staging_location=gs://test/staging', '--job_name={}-{}'.format(JOB_NAME, MOCK_UUID) ] self.assertListEqual(sorted(mock_dataflow.call_args[0][0]), sorted(expected_cmd)) @mock.patch(DATAFLOW_STRING.format('uuid.uuid4')) @mock.patch(DATAFLOW_STRING.format('_DataflowJobsController')) @mock.patch(DATAFLOW_STRING.format('_DataflowRunner')) @mock.patch(DATAFLOW_STRING.format('DataFlowHook.get_conn')) def test_start_java_dataflow(self, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid): mock_uuid.return_value = MOCK_UUID mock_conn.return_value = None dataflow_instance = mock_dataflow.return_value dataflow_instance.wait_for_done.return_value = None dataflowjob_instance = mock_dataflowjob.return_value dataflowjob_instance.wait_for_done.return_value = None self.dataflow_hook.start_java_dataflow(job_name=JOB_NAME, variables=DATAFLOW_OPTIONS_JAVA, jar=JAR_FILE) expected_cmd = [ 'java', '-jar', JAR_FILE, '--region=us-central1', '--runner=DataflowRunner', '--project=test', '--stagingLocation=gs://test/staging', '--labels={"foo":"bar"}', '--jobName={}-{}'.format(JOB_NAME, MOCK_UUID) ] self.assertListEqual(sorted(mock_dataflow.call_args[0][0]), sorted(expected_cmd)) @mock.patch(DATAFLOW_STRING.format('uuid.uuid4')) @mock.patch(DATAFLOW_STRING.format('_DataflowJobsController')) @mock.patch(DATAFLOW_STRING.format('_DataflowRunner')) @mock.patch(DATAFLOW_STRING.format('DataFlowHook.get_conn')) def test_start_java_dataflow_with_job_class(self, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid): mock_uuid.return_value = MOCK_UUID mock_conn.return_value = None dataflow_instance = mock_dataflow.return_value dataflow_instance.wait_for_done.return_value = None dataflowjob_instance = mock_dataflowjob.return_value dataflowjob_instance.wait_for_done.return_value = None self.dataflow_hook.start_java_dataflow(job_name=JOB_NAME, variables=DATAFLOW_OPTIONS_JAVA, jar=JAR_FILE, job_class=JOB_CLASS) expected_cmd = [ 'java', '-cp', JAR_FILE, JOB_CLASS, '--region=us-central1', '--runner=DataflowRunner', '--project=test', '--stagingLocation=gs://test/staging', '--labels={"foo":"bar"}', '--jobName={}-{}'.format(JOB_NAME, MOCK_UUID) ] self.assertListEqual(sorted(mock_dataflow.call_args[0][0]), sorted(expected_cmd)) @parameterized.expand([ (JOB_NAME, JOB_NAME, False), ('test-example', 'test_example', False), ('test-dataflow-pipeline-12345678', JOB_NAME, True), ('test-example-12345678', 'test_example', True), ('df-job-1', 'df-job-1', False), ('df-job', 'df-job', False), ('dfjob', 'dfjob', False), ('dfjob1', 'dfjob1', False), ]) @mock.patch(DATAFLOW_STRING.format('uuid.uuid4'), return_value=MOCK_UUID) def test_valid_dataflow_job_name(self, expected_result, job_name, append_job_name, mock_uuid4): job_name = self.dataflow_hook._build_dataflow_job_name( job_name=job_name, append_job_name=append_job_name) self.assertEqual(expected_result, job_name) @parameterized.expand([("1dfjob@", ), ("dfjob@", ), ("df^jo", )]) def test_build_dataflow_job_name_with_invalid_value(self, job_name): self.assertRaises(ValueError, self.dataflow_hook._build_dataflow_job_name, job_name=job_name, append_job_name=False)