class DataFlowHookTest(unittest.TestCase): def setUp(self): with mock.patch(BASE_STRING.format('GoogleCloudBaseHook.__init__'), new=mock_init): self.dataflow_hook = DataFlowHook(gcp_conn_id='test') @mock.patch(DATAFLOW_STRING.format('DataFlowHook._start_dataflow')) def test_start_python_dataflow(self, internal_dataflow_mock): self.dataflow_hook.start_python_dataflow( task_id=TASK_ID, variables=OPTIONS, dataflow=PY_FILE, py_options=PY_OPTIONS) internal_dataflow_mock.assert_called_once_with( TASK_ID, OPTIONS, PY_FILE, mock.ANY, ['python'] + PY_OPTIONS)
def execute(self, context): """Execute the python dataflow job.""" hook = DataFlowHook(gcp_conn_id=self.gcp_conn_id, delegate_to=self.delegate_to) dataflow_options = self.dataflow_default_options.copy() dataflow_options.update(self.options) # Convert argument names from lowerCamelCase to snake case. camel_to_snake = lambda name: re.sub( r'[A-Z]', lambda x: '_' + x.group(0).lower(), name) formatted_options = { camel_to_snake(key): dataflow_options[key] for key in dataflow_options } hook.start_python_dataflow(self.task_id, formatted_options, self.py_file, self.py_options)
def execute(self, context): """Execute the python dataflow job.""" bucket_helper = GoogleCloudBucketHelper( self.gcp_conn_id, self.delegate_to) self.py_file = bucket_helper.google_cloud_to_local(self.py_file) hook = DataFlowHook(gcp_conn_id=self.gcp_conn_id, delegate_to=self.delegate_to) dataflow_options = self.dataflow_default_options.copy() dataflow_options.update(self.options) # Convert argument names from lowerCamelCase to snake case. camel_to_snake = lambda name: re.sub( r'[A-Z]', lambda x: '_' + x.group(0).lower(), name) formatted_options = {camel_to_snake(key): dataflow_options[key] for key in dataflow_options} hook.start_python_dataflow( self.task_id, formatted_options, self.py_file, self.py_options)
def execute(self, context): """Execute the python dataflow job.""" bucket_helper = GoogleCloudBucketHelper( self.gcp_conn_id, self.delegate_to) self.py_file = bucket_helper.google_cloud_to_local(self.py_file) hook = DataFlowHook(gcp_conn_id=self.gcp_conn_id, delegate_to=self.delegate_to, poll_sleep=self.poll_sleep) dataflow_options = self.dataflow_default_options.copy() dataflow_options.update(self.options) # Convert argument names from lowerCamelCase to snake case. camel_to_snake = lambda name: re.sub( r'[A-Z]', lambda x: '_' + x.group(0).lower(), name) formatted_options = {camel_to_snake(key): dataflow_options[key] for key in dataflow_options} hook.start_python_dataflow( self.job_name, formatted_options, self.py_file, self.py_options)
class DataFlowHookTest(unittest.TestCase): def setUp(self): with mock.patch(BASE_STRING.format('GoogleCloudBaseHook.__init__'), new=mock_init): self.dataflow_hook = DataFlowHook(gcp_conn_id='test') @mock.patch(DATAFLOW_STRING.format('DataFlowHook._start_dataflow')) def test_start_python_dataflow(self, internal_dataflow_mock): self.dataflow_hook.start_python_dataflow(task_id=TASK_ID, variables=OPTIONS, dataflow=PY_FILE, py_options=PY_OPTIONS) internal_dataflow_mock.assert_called_once_with(TASK_ID, OPTIONS, PY_FILE, mock.ANY, ['python'] + PY_OPTIONS) @mock.patch('airflow.contrib.hooks.gcp_dataflow_hook._Dataflow.log') @mock.patch('subprocess.Popen') @mock.patch('select.select') def test_dataflow_wait_for_done_logging(self, mock_select, mock_popen, mock_logging): mock_logging.info = MagicMock() mock_logging.warning = MagicMock() mock_proc = MagicMock() mock_proc.stderr = MagicMock() mock_proc.stderr.readlines = MagicMock( return_value=['test\n', 'error\n']) mock_stderr_fd = MagicMock() mock_proc.stderr.fileno = MagicMock(return_value=mock_stderr_fd) mock_proc_poll = MagicMock() mock_select.return_value = [[mock_stderr_fd]] def poll_resp_error(): mock_proc.return_code = 1 return True mock_proc_poll.side_effect = [None, poll_resp_error] mock_proc.poll = mock_proc_poll mock_popen.return_value = mock_proc dataflow = _Dataflow(['test', 'cmd']) mock_logging.info.assert_called_with('Running command: %s', 'test cmd') self.assertRaises(Exception, dataflow.wait_for_done) mock_logging.warning.assert_has_calls([call('test'), call('error')])
class DataFlowHookTest(unittest.TestCase): def setUp(self): with mock.patch(BASE_STRING.format('GoogleCloudBaseHook.__init__'), new=mock_init): self.dataflow_hook = DataFlowHook(gcp_conn_id='test') @mock.patch(DATAFLOW_STRING.format('DataFlowHook._start_dataflow')) def test_start_python_dataflow(self, internal_dataflow_mock): self.dataflow_hook.start_python_dataflow( task_id=TASK_ID, variables=OPTIONS, dataflow=PY_FILE, py_options=PY_OPTIONS) internal_dataflow_mock.assert_called_once_with( TASK_ID, OPTIONS, PY_FILE, mock.ANY, ['python'] + PY_OPTIONS) @mock.patch('airflow.contrib.hooks.gcp_dataflow_hook._Dataflow.log') @mock.patch('subprocess.Popen') @mock.patch('select.select') def test_dataflow_wait_for_done_logging(self, mock_select, mock_popen, mock_logging): mock_logging.info = MagicMock() mock_logging.warning = MagicMock() mock_proc = MagicMock() mock_proc.stderr = MagicMock() mock_proc.stderr.readlines = MagicMock(return_value=['test\n','error\n']) mock_stderr_fd = MagicMock() mock_proc.stderr.fileno = MagicMock(return_value=mock_stderr_fd) mock_proc_poll = MagicMock() mock_select.return_value = [[mock_stderr_fd]] def poll_resp_error(): mock_proc.return_code = 1 return True mock_proc_poll.side_effect=[None, poll_resp_error] mock_proc.poll = mock_proc_poll mock_popen.return_value = mock_proc dataflow = _Dataflow(['test', 'cmd']) mock_logging.info.assert_called_with('Running command: %s', 'test cmd') self.assertRaises(Exception, dataflow.wait_for_done) mock_logging.warning.assert_has_calls([call('test'), call('error')])
class DataFlowPythonHookTest(unittest.TestCase): def setUp(self): with mock.patch(BASE_STRING.format('GoogleCloudBaseHook.__init__'), new=mock_init): self.dataflow_hook = DataFlowHook(gcp_conn_id='test') @mock.patch(DATAFLOW_STRING.format('uuid.uuid1')) @mock.patch(DATAFLOW_STRING.format('_DataflowJob')) @mock.patch(DATAFLOW_STRING.format('_Dataflow')) @mock.patch(DATAFLOW_STRING.format('DataFlowHook.get_conn')) def test_start_python_dataflow(self, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid): mock_uuid.return_value = MOCK_UUID mock_conn.return_value = None dataflow_instance = mock_dataflow.return_value dataflow_instance.wait_for_done.return_value = None dataflowjob_instance = mock_dataflowjob.return_value dataflowjob_instance.wait_for_done.return_value = None self.dataflow_hook.start_python_dataflow(task_id=TASK_ID, variables=DATAFLOW_OPTIONS_PY, dataflow=PY_FILE, py_options=PY_OPTIONS) EXPECTED_CMD = [ 'python', '-m', PY_FILE, '--runner=DataflowRunner', '--project=test', '--labels=foo=bar', '--staging_location=gs://test/staging', '--job_name={}-{}'.format(TASK_ID, MOCK_UUID) ] self.assertListEqual(sorted(mock_dataflow.call_args[0][0]), sorted(EXPECTED_CMD)) @mock.patch(DATAFLOW_STRING.format('uuid.uuid1')) @mock.patch(DATAFLOW_STRING.format('_DataflowJob')) @mock.patch(DATAFLOW_STRING.format('_Dataflow')) @mock.patch(DATAFLOW_STRING.format('DataFlowHook.get_conn')) def test_start_java_dataflow(self, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid): mock_uuid.return_value = MOCK_UUID mock_conn.return_value = None dataflow_instance = mock_dataflow.return_value dataflow_instance.wait_for_done.return_value = None dataflowjob_instance = mock_dataflowjob.return_value dataflowjob_instance.wait_for_done.return_value = None self.dataflow_hook.start_java_dataflow(task_id=TASK_ID, variables=DATAFLOW_OPTIONS_JAVA, dataflow=JAR_FILE) EXPECTED_CMD = [ 'java', '-jar', JAR_FILE, '--runner=DataflowRunner', '--project=test', '--stagingLocation=gs://test/staging', '--labels={"foo":"bar"}', '--jobName={}-{}'.format(TASK_ID, MOCK_UUID) ] self.assertListEqual(sorted(mock_dataflow.call_args[0][0]), sorted(EXPECTED_CMD)) @mock.patch('airflow.contrib.hooks.gcp_dataflow_hook._Dataflow.log') @mock.patch('subprocess.Popen') @mock.patch('select.select') def test_dataflow_wait_for_done_logging(self, mock_select, mock_popen, mock_logging): mock_logging.info = MagicMock() mock_logging.warning = MagicMock() mock_proc = MagicMock() mock_proc.stderr = MagicMock() mock_proc.stderr.readlines = MagicMock( return_value=['test\n', 'error\n']) mock_stderr_fd = MagicMock() mock_proc.stderr.fileno = MagicMock(return_value=mock_stderr_fd) mock_proc_poll = MagicMock() mock_select.return_value = [[mock_stderr_fd]] def poll_resp_error(): mock_proc.return_code = 1 return True mock_proc_poll.side_effect = [None, poll_resp_error] mock_proc.poll = mock_proc_poll mock_popen.return_value = mock_proc dataflow = _Dataflow(['test', 'cmd']) mock_logging.info.assert_called_with('Running command: %s', 'test cmd') self.assertRaises(Exception, dataflow.wait_for_done) mock_logging.warning.assert_has_calls([call('test'), call('error')])
class DataFlowHookTest(unittest.TestCase): def setUp(self): with mock.patch(BASE_STRING.format('GoogleCloudBaseHook.__init__'), new=mock_init): self.dataflow_hook = DataFlowHook(gcp_conn_id='test') @mock.patch(DATAFLOW_STRING.format('uuid.uuid4')) @mock.patch(DATAFLOW_STRING.format('_DataflowJob')) @mock.patch(DATAFLOW_STRING.format('_Dataflow')) @mock.patch(DATAFLOW_STRING.format('DataFlowHook.get_conn')) def test_start_python_dataflow(self, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid): mock_uuid.return_value = MOCK_UUID mock_conn.return_value = None dataflow_instance = mock_dataflow.return_value dataflow_instance.wait_for_done.return_value = None dataflowjob_instance = mock_dataflowjob.return_value dataflowjob_instance.wait_for_done.return_value = None self.dataflow_hook.start_python_dataflow(job_name=JOB_NAME, variables=DATAFLOW_OPTIONS_PY, dataflow=PY_FILE, py_options=PY_OPTIONS) EXPECTED_CMD = [ 'python2', '-m', PY_FILE, '--region=us-central1', '--runner=DataflowRunner', '--project=test', '--labels=foo=bar', '--staging_location=gs://test/staging', '--job_name={}-{}'.format(JOB_NAME, MOCK_UUID) ] self.assertListEqual(sorted(mock_dataflow.call_args[0][0]), sorted(EXPECTED_CMD)) @mock.patch(DATAFLOW_STRING.format('uuid.uuid4')) @mock.patch(DATAFLOW_STRING.format('_DataflowJob')) @mock.patch(DATAFLOW_STRING.format('_Dataflow')) @mock.patch(DATAFLOW_STRING.format('DataFlowHook.get_conn')) def test_start_java_dataflow(self, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid): mock_uuid.return_value = MOCK_UUID mock_conn.return_value = None dataflow_instance = mock_dataflow.return_value dataflow_instance.wait_for_done.return_value = None dataflowjob_instance = mock_dataflowjob.return_value dataflowjob_instance.wait_for_done.return_value = None self.dataflow_hook.start_java_dataflow(job_name=JOB_NAME, variables=DATAFLOW_OPTIONS_JAVA, dataflow=JAR_FILE) EXPECTED_CMD = [ 'java', '-jar', JAR_FILE, '--region=us-central1', '--runner=DataflowRunner', '--project=test', '--stagingLocation=gs://test/staging', '--labels={"foo":"bar"}', '--jobName={}-{}'.format(JOB_NAME, MOCK_UUID) ] self.assertListEqual(sorted(mock_dataflow.call_args[0][0]), sorted(EXPECTED_CMD)) @mock.patch(DATAFLOW_STRING.format('uuid.uuid4')) @mock.patch(DATAFLOW_STRING.format('_DataflowJob')) @mock.patch(DATAFLOW_STRING.format('_Dataflow')) @mock.patch(DATAFLOW_STRING.format('DataFlowHook.get_conn')) def test_start_java_dataflow_with_job_class(self, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid): mock_uuid.return_value = MOCK_UUID mock_conn.return_value = None dataflow_instance = mock_dataflow.return_value dataflow_instance.wait_for_done.return_value = None dataflowjob_instance = mock_dataflowjob.return_value dataflowjob_instance.wait_for_done.return_value = None self.dataflow_hook.start_java_dataflow(job_name=JOB_NAME, variables=DATAFLOW_OPTIONS_JAVA, dataflow=JAR_FILE, job_class=JOB_CLASS) EXPECTED_CMD = [ 'java', '-cp', JAR_FILE, JOB_CLASS, '--region=us-central1', '--runner=DataflowRunner', '--project=test', '--stagingLocation=gs://test/staging', '--labels={"foo":"bar"}', '--jobName={}-{}'.format(JOB_NAME, MOCK_UUID) ] self.assertListEqual(sorted(mock_dataflow.call_args[0][0]), sorted(EXPECTED_CMD)) @mock.patch('airflow.contrib.hooks.gcp_dataflow_hook._Dataflow.log') @mock.patch('subprocess.Popen') @mock.patch('select.select') def test_dataflow_wait_for_done_logging(self, mock_select, mock_popen, mock_logging): mock_logging.info = MagicMock() mock_logging.warning = MagicMock() mock_proc = MagicMock() mock_proc.stderr = MagicMock() mock_proc.stderr.readlines = MagicMock( return_value=['test\n', 'error\n']) mock_stderr_fd = MagicMock() mock_proc.stderr.fileno = MagicMock(return_value=mock_stderr_fd) mock_proc_poll = MagicMock() mock_select.return_value = [[mock_stderr_fd]] def poll_resp_error(): mock_proc.return_code = 1 return True mock_proc_poll.side_effect = [None, poll_resp_error] mock_proc.poll = mock_proc_poll mock_popen.return_value = mock_proc dataflow = _Dataflow(['test', 'cmd']) mock_logging.info.assert_called_with('Running command: %s', 'test cmd') self.assertRaises(Exception, dataflow.wait_for_done) def test_valid_dataflow_job_name(self): job_name = self.dataflow_hook._build_dataflow_job_name( job_name=JOB_NAME, append_job_name=False) self.assertEqual(job_name, JOB_NAME) def test_fix_underscore_in_job_name(self): job_name_with_underscore = 'test_example' fixed_job_name = job_name_with_underscore.replace('_', '-') job_name = self.dataflow_hook._build_dataflow_job_name( job_name=job_name_with_underscore, append_job_name=False) self.assertEqual(job_name, fixed_job_name) def test_invalid_dataflow_job_name(self): invalid_job_name = '9test_invalid_name' fixed_name = invalid_job_name.replace('_', '-') with self.assertRaises(ValueError) as e: self.dataflow_hook._build_dataflow_job_name( job_name=invalid_job_name, append_job_name=False) # Test whether the job_name is present in the Error msg self.assertIn('Invalid job_name ({})'.format(fixed_name), str(e.exception)) def test_dataflow_job_regex_check(self): self.assertEqual( self.dataflow_hook._build_dataflow_job_name(job_name='df-job-1', append_job_name=False), 'df-job-1') self.assertEqual( self.dataflow_hook._build_dataflow_job_name(job_name='df-job', append_job_name=False), 'df-job') self.assertEqual( self.dataflow_hook._build_dataflow_job_name(job_name='dfjob', append_job_name=False), 'dfjob') self.assertEqual( self.dataflow_hook._build_dataflow_job_name(job_name='dfjob1', append_job_name=False), 'dfjob1') self.assertRaises(ValueError, self.dataflow_hook._build_dataflow_job_name, job_name='1dfjob', append_job_name=False) self.assertRaises(ValueError, self.dataflow_hook._build_dataflow_job_name, job_name='dfjob@', append_job_name=False) self.assertRaises(ValueError, self.dataflow_hook._build_dataflow_job_name, job_name='df^jo', append_job_name=False)
class DataFlowHookTest(unittest.TestCase): def setUp(self): with mock.patch(BASE_STRING.format('GoogleCloudBaseHook.__init__'), new=mock_init): self.dataflow_hook = DataFlowHook(gcp_conn_id='test') @mock.patch(DATAFLOW_STRING.format('uuid.uuid1')) @mock.patch(DATAFLOW_STRING.format('_DataflowJob')) @mock.patch(DATAFLOW_STRING.format('_Dataflow')) @mock.patch(DATAFLOW_STRING.format('DataFlowHook.get_conn')) def test_start_python_dataflow(self, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid): mock_uuid.return_value = MOCK_UUID mock_conn.return_value = None dataflow_instance = mock_dataflow.return_value dataflow_instance.wait_for_done.return_value = None dataflowjob_instance = mock_dataflowjob.return_value dataflowjob_instance.wait_for_done.return_value = None self.dataflow_hook.start_python_dataflow( task_id=TASK_ID, variables=DATAFLOW_OPTIONS_PY, dataflow=PY_FILE, py_options=PY_OPTIONS) EXPECTED_CMD = ['python', '-m', PY_FILE, '--region=us-central1', '--runner=DataflowRunner', '--project=test', '--labels=foo=bar', '--staging_location=gs://test/staging', '--job_name={}-{}'.format(TASK_ID, MOCK_UUID)] self.assertListEqual(sorted(mock_dataflow.call_args[0][0]), sorted(EXPECTED_CMD)) @mock.patch(DATAFLOW_STRING.format('uuid.uuid1')) @mock.patch(DATAFLOW_STRING.format('_DataflowJob')) @mock.patch(DATAFLOW_STRING.format('_Dataflow')) @mock.patch(DATAFLOW_STRING.format('DataFlowHook.get_conn')) def test_start_java_dataflow(self, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid): mock_uuid.return_value = MOCK_UUID mock_conn.return_value = None dataflow_instance = mock_dataflow.return_value dataflow_instance.wait_for_done.return_value = None dataflowjob_instance = mock_dataflowjob.return_value dataflowjob_instance.wait_for_done.return_value = None self.dataflow_hook.start_java_dataflow( task_id=TASK_ID, variables=DATAFLOW_OPTIONS_JAVA, dataflow=JAR_FILE) EXPECTED_CMD = ['java', '-jar', JAR_FILE, '--region=us-central1', '--runner=DataflowRunner', '--project=test', '--stagingLocation=gs://test/staging', '--labels={"foo":"bar"}', '--jobName={}-{}'.format(TASK_ID, MOCK_UUID)] self.assertListEqual(sorted(mock_dataflow.call_args[0][0]), sorted(EXPECTED_CMD)) @mock.patch(DATAFLOW_STRING.format('uuid.uuid1')) @mock.patch(DATAFLOW_STRING.format('_DataflowJob')) @mock.patch(DATAFLOW_STRING.format('_Dataflow')) @mock.patch(DATAFLOW_STRING.format('DataFlowHook.get_conn')) def test_start_java_dataflow_with_job_class( self, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid): mock_uuid.return_value = MOCK_UUID mock_conn.return_value = None dataflow_instance = mock_dataflow.return_value dataflow_instance.wait_for_done.return_value = None dataflowjob_instance = mock_dataflowjob.return_value dataflowjob_instance.wait_for_done.return_value = None self.dataflow_hook.start_java_dataflow( task_id=TASK_ID, variables=DATAFLOW_OPTIONS_JAVA, dataflow=JAR_FILE, job_class=JOB_CLASS) EXPECTED_CMD = ['java', '-cp', JAR_FILE, JOB_CLASS, '--region=us-central1', '--runner=DataflowRunner', '--project=test', '--stagingLocation=gs://test/staging', '--labels={"foo":"bar"}', '--jobName={}-{}'.format(TASK_ID, MOCK_UUID)] self.assertListEqual(sorted(mock_dataflow.call_args[0][0]), sorted(EXPECTED_CMD)) @mock.patch('airflow.contrib.hooks.gcp_dataflow_hook._Dataflow.log') @mock.patch('subprocess.Popen') @mock.patch('select.select') def test_dataflow_wait_for_done_logging(self, mock_select, mock_popen, mock_logging): mock_logging.info = MagicMock() mock_logging.warning = MagicMock() mock_proc = MagicMock() mock_proc.stderr = MagicMock() mock_proc.stderr.readlines = MagicMock(return_value=['test\n','error\n']) mock_stderr_fd = MagicMock() mock_proc.stderr.fileno = MagicMock(return_value=mock_stderr_fd) mock_proc_poll = MagicMock() mock_select.return_value = [[mock_stderr_fd]] def poll_resp_error(): mock_proc.return_code = 1 return True mock_proc_poll.side_effect=[None, poll_resp_error] mock_proc.poll = mock_proc_poll mock_popen.return_value = mock_proc dataflow = _Dataflow(['test', 'cmd']) mock_logging.info.assert_called_with('Running command: %s', 'test cmd') self.assertRaises(Exception, dataflow.wait_for_done) mock_logging.warning.assert_has_calls([call('test'), call('error')]) def test_valid_dataflow_job_name(self): job_name = self.dataflow_hook._build_dataflow_job_name( task_id=TASK_ID, append_job_name=False ) self.assertEquals(job_name, TASK_ID) def test_fix_underscore_in_task_id(self): task_id_with_underscore = 'test_example' fixed_job_name = task_id_with_underscore.replace( '_', '-' ) job_name = self.dataflow_hook._build_dataflow_job_name( task_id=task_id_with_underscore, append_job_name=False ) self.assertEquals(job_name, fixed_job_name) def test_invalid_dataflow_job_name(self): invalid_job_name = '9test_invalid_name' fixed_name = invalid_job_name.replace( '_', '-') with self.assertRaises(AssertionError) as e: self.dataflow_hook._build_dataflow_job_name( task_id=invalid_job_name, append_job_name=False ) # Test whether the job_name is present in the Error msg self.assertIn('Invalid job_name ({})'.format(fixed_name), str(e.exception)) def test_dataflow_job_regex_check(self): self.assertEquals(self.dataflow_hook._build_dataflow_job_name( task_id='df-job-1', append_job_name=False ), 'df-job-1') self.assertEquals(self.dataflow_hook._build_dataflow_job_name( task_id='df-job', append_job_name=False ), 'df-job') self.assertEquals(self.dataflow_hook._build_dataflow_job_name( task_id='dfjob', append_job_name=False ), 'dfjob') self.assertEquals(self.dataflow_hook._build_dataflow_job_name( task_id='dfjob1', append_job_name=False ), 'dfjob1') self.assertRaises( AssertionError, self.dataflow_hook._build_dataflow_job_name, task_id='1dfjob', append_job_name=False ) self.assertRaises( AssertionError, self.dataflow_hook._build_dataflow_job_name, task_id='dfjob@', append_job_name=False ) self.assertRaises( AssertionError, self.dataflow_hook._build_dataflow_job_name, task_id='df^jo', append_job_name=False )
class DataFlowHookTest(unittest.TestCase): def setUp(self): with mock.patch(BASE_STRING.format('GoogleCloudBaseHook.__init__'), new=mock_init): self.dataflow_hook = DataFlowHook(gcp_conn_id='test') @mock.patch(DATAFLOW_STRING.format('uuid.uuid1')) @mock.patch(DATAFLOW_STRING.format('_DataflowJob')) @mock.patch(DATAFLOW_STRING.format('_Dataflow')) @mock.patch(DATAFLOW_STRING.format('DataFlowHook.get_conn')) def test_start_python_dataflow(self, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid): mock_uuid.return_value = MOCK_UUID mock_conn.return_value = None dataflow_instance = mock_dataflow.return_value dataflow_instance.wait_for_done.return_value = None dataflowjob_instance = mock_dataflowjob.return_value dataflowjob_instance.wait_for_done.return_value = None self.dataflow_hook.start_python_dataflow( task_id=TASK_ID, variables=DATAFLOW_OPTIONS_PY, dataflow=PY_FILE, py_options=PY_OPTIONS) EXPECTED_CMD = ['python', '-m', PY_FILE, '--runner=DataflowRunner', '--project=test', '--labels=foo=bar', '--staging_location=gs://test/staging', '--job_name={}-{}'.format(TASK_ID, MOCK_UUID)] self.assertListEqual(sorted(mock_dataflow.call_args[0][0]), sorted(EXPECTED_CMD)) @mock.patch(DATAFLOW_STRING.format('uuid.uuid1')) @mock.patch(DATAFLOW_STRING.format('_DataflowJob')) @mock.patch(DATAFLOW_STRING.format('_Dataflow')) @mock.patch(DATAFLOW_STRING.format('DataFlowHook.get_conn')) def test_start_java_dataflow(self, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid): mock_uuid.return_value = MOCK_UUID mock_conn.return_value = None dataflow_instance = mock_dataflow.return_value dataflow_instance.wait_for_done.return_value = None dataflowjob_instance = mock_dataflowjob.return_value dataflowjob_instance.wait_for_done.return_value = None self.dataflow_hook.start_java_dataflow( task_id=TASK_ID, variables=DATAFLOW_OPTIONS_JAVA, dataflow=JAR_FILE) EXPECTED_CMD = ['java', '-jar', JAR_FILE, '--runner=DataflowRunner', '--project=test', '--stagingLocation=gs://test/staging', '--labels={"foo":"bar"}', '--jobName={}-{}'.format(TASK_ID, MOCK_UUID)] self.assertListEqual(sorted(mock_dataflow.call_args[0][0]), sorted(EXPECTED_CMD)) @mock.patch(DATAFLOW_STRING.format('uuid.uuid1')) @mock.patch(DATAFLOW_STRING.format('_DataflowJob')) @mock.patch(DATAFLOW_STRING.format('_Dataflow')) @mock.patch(DATAFLOW_STRING.format('DataFlowHook.get_conn')) def test_start_java_dataflow_with_job_class( self, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid): mock_uuid.return_value = MOCK_UUID mock_conn.return_value = None dataflow_instance = mock_dataflow.return_value dataflow_instance.wait_for_done.return_value = None dataflowjob_instance = mock_dataflowjob.return_value dataflowjob_instance.wait_for_done.return_value = None self.dataflow_hook.start_java_dataflow( task_id=TASK_ID, variables=DATAFLOW_OPTIONS_JAVA, dataflow=JAR_FILE, job_class=JOB_CLASS) EXPECTED_CMD = ['java', '-cp', JAR_FILE, JOB_CLASS, '--runner=DataflowRunner', '--project=test', '--stagingLocation=gs://test/staging', '--labels={"foo":"bar"}', '--jobName={}-{}'.format(TASK_ID, MOCK_UUID)] self.assertListEqual(sorted(mock_dataflow.call_args[0][0]), sorted(EXPECTED_CMD)) @mock.patch('airflow.contrib.hooks.gcp_dataflow_hook._Dataflow.log') @mock.patch('subprocess.Popen') @mock.patch('select.select') def test_dataflow_wait_for_done_logging(self, mock_select, mock_popen, mock_logging): mock_logging.info = MagicMock() mock_logging.warning = MagicMock() mock_proc = MagicMock() mock_proc.stderr = MagicMock() mock_proc.stderr.readlines = MagicMock(return_value=['test\n','error\n']) mock_stderr_fd = MagicMock() mock_proc.stderr.fileno = MagicMock(return_value=mock_stderr_fd) mock_proc_poll = MagicMock() mock_select.return_value = [[mock_stderr_fd]] def poll_resp_error(): mock_proc.return_code = 1 return True mock_proc_poll.side_effect=[None, poll_resp_error] mock_proc.poll = mock_proc_poll mock_popen.return_value = mock_proc dataflow = _Dataflow(['test', 'cmd']) mock_logging.info.assert_called_with('Running command: %s', 'test cmd') self.assertRaises(Exception, dataflow.wait_for_done) mock_logging.warning.assert_has_calls([call('test'), call('error')])