def test_create_beam_job_run_result_model(self): model = beam_job_services.create_beam_job_run_result_model( '123', 'abc', '123') model.put() result = beam_job_services.get_beam_job_run_result('123') self.assertEqual(result.stdout, 'abc') self.assertEqual(result.stderr, '123')
def test_get_beam_run_result(self) -> None: beam_job_models.BeamJobRunResultModel( job_id='123', stdout='abc', stderr='def').put() beam_job_run_result = beam_job_services.get_beam_job_run_result('123') self.assertEqual(beam_job_run_result.stdout, 'abc') self.assertEqual(beam_job_run_result.stderr, 'def')
def test_job_with_failed_status(self) -> None: self.run_model.latest_job_state = 'RUNNING' self.dataflow_job.current_state = dataflow.JobState.JOB_STATE_FAILED jobs_manager.refresh_state_of_beam_job_run_model(self.run_model) self.assertEqual(self.run_model.latest_job_state, 'FAILED') result = beam_job_services.get_beam_job_run_result(self.run_model.id) self.assertIn(self.dataflow_job.id, result.stderr)
def test_failing_sync_job(self) -> None: run = jobs_manager.run_job(FailingJob, True, namespace=self.namespace) self.assertEqual(run.latest_job_state, 'FAILED') run_model = beam_job_models.BeamJobRunModel.get(run.id) self.assertEqual(run, run_model) self.assertIn('uh-oh', beam_job_services.get_beam_job_run_result(run.id).stderr)
def test_working_sync_job(self) -> None: run = jobs_manager.run_job(WorkingJob, True, namespace=self.namespace) self.assertEqual(run.latest_job_state, 'DONE') run_model = beam_job_models.BeamJobRunModel.get(run.id) self.assertEqual(run, run_model) self.assertEqual( beam_job_services.get_beam_job_run_result(run.id).to_dict(), {'stdout': 'o', 'stderr': 'e'})
def test_get_beam_run_result_with_result_batches(self) -> None: beam_job_models.BeamJobRunResultModel(job_id='123', stdout='abc').put() beam_job_models.BeamJobRunResultModel(job_id='123', stderr='123').put() beam_job_models.BeamJobRunResultModel( job_id='123', stdout='def', stderr='456').put() beam_job_run_result = beam_job_services.get_beam_job_run_result('123') self.assertItemsEqual( # type: ignore[no-untyped-call] beam_job_run_result.stdout.split('\n'), ['abc', 'def']) self.assertItemsEqual( # type: ignore[no-untyped-call] beam_job_run_result.stderr.split('\n'), ['123', '456'])
def test_single_output(self) -> None: messages = [ job_run_result.JobRunResult(stdout='Hello, World!', stderr='Uh-oh, World!'), ] self.assert_pcoll_empty(self.pipeline | beam.Create(messages) | job_io.PutResults(self.JOB_ID)) result = beam_job_services.get_beam_job_run_result(self.JOB_ID) self.assertEqual(result.stdout, 'Hello, World!') self.assertEqual(result.stderr, 'Uh-oh, World!')
def test_sharded_output(self): messages = [ job_run_result.JobRunResult(stdout='abc', stderr='123'), job_run_result.JobRunResult(stdout='def', stderr='456'), job_run_result.JobRunResult(stdout='ghi', stderr='789'), ] with self.swap(job_run_result, 'MAX_OUTPUT_BYTES', 11): self.assert_pcoll_empty(self.pipeline | beam.Create(messages) | job_io.PutResults(self.JOB_ID)) result = beam_job_services.get_beam_job_run_result(self.JOB_ID) self.assertItemsEqual(result.stdout.split('\n'), ['abc', 'def', 'ghi']) self.assertItemsEqual(result.stderr.split('\n'), ['123', '456', '789'])
def test_failing_job(self) -> None: run = jobs_manager.run_job_sync('FailingJob', [], namespace=self.namespace) self.assertEqual(run.job_state, 'FAILED') run_model = beam_job_models.BeamJobRunModel.get( run.job_id) # type: ignore[attr-defined] self.assertEqual( run.to_dict(), beam_job_services.get_beam_job_run_from_model(run_model).to_dict()) self.assertIn( 'uh-oh', beam_job_services.get_beam_job_run_result(run.job_id).stderr)
def test_working_job(self) -> None: run = jobs_manager.run_job_sync('WorkingJob', [], namespace=self.namespace) self.assertEqual(run.job_state, 'DONE') run_model = beam_job_models.BeamJobRunModel.get( run.job_id) # type: ignore[attr-defined] self.assertEqual( run.to_dict(), beam_job_services.get_beam_job_run_from_model(run_model).to_dict()) self.assertEqual( beam_job_services.get_beam_job_run_result(run.job_id).to_dict(), { 'stdout': 'o', 'stderr': 'e' })
def test_async_job_that_does_not_start(self) -> None: mock_run_result = mock.Mock() mock_run_result.has_job = False mock_run_result.job_id.return_value = None mock_run_result.state = 'UNKNOWN' pipeline = beam.Pipeline( runner=runners.DirectRunner(), options=job_options.JobOptions(namespace=self.namespace)) with self.swap_to_always_return(pipeline, 'run', value=mock_run_result): run = jobs_manager.run_job(WorkingJob, False, pipeline=pipeline) self.assertIsNone(run.dataflow_job_id) self.assertEqual(run.latest_job_state, 'FAILED') result = beam_job_services.get_beam_job_run_result(run.id) self.assertIn('Failed to deploy WorkingJob', result.stderr)
def get(self) -> None: job_id = self.request.get('job_id') beam_job_run_result = beam_job_services.get_beam_job_run_result(job_id) self.render_json(beam_job_run_result.to_dict())
def test_get_beam_run_result_with_no_results(self): beam_job_run_result = beam_job_services.get_beam_job_run_result('123') self.assertEqual(beam_job_run_result.stdout, '') self.assertEqual(beam_job_run_result.stderr, '')
def test_get_beam_run_result_with_no_results(self): self.assertIsNone(beam_job_services.get_beam_job_run_result('123'))