def test_can_push_with_tags(self): self.tag_repo(['1.0.0', '1', '1.0', '1.0.0+20200228.blue-ivory']) checkout = SemanticCheckoutTask(upstream_repos=dict( abc=f'sgr://{remote_name}/abc/1234?tag=1', ), ) workspaces = checkout.run() push = PushRepoTask(workspaces=workspaces, ) self.repo.commit() runner = TaskRunner(task=push) tags_edge = Edge(Task(), push, key='sgr_tags') tag_state = Success(result=ConstantResult(value=dict( abc=['foo', 'bar', 'tag1_w_upstream']))) with raise_on_exception(): with prefect.context(): state = runner.run(upstream_states={tags_edge: tag_state}) if state.is_failed(): print(state) self.fail() self.assertCountEqual( self.repo.head.get_tags(), ['HEAD', 'foo', 'bar', 'tag1_w_upstream'])
def test_can_clone_with_hourly_prerelease_tags(self): self.tag_repo([ '1.0.0', '1', '1.0', '1.0.0+20200228.blue-ivory', '1.0.1', '1.0.1+20200307.pink-bear', '1-hourly', '1.0-hourly', '1.0.2-hourly.1+20200301.blue-ivory', '1.0.2-hourly.2+20200301.green-monster' ]) checkout = SemanticCheckoutTask(upstream_repos=dict( abc='sgr:///abc/1234?tag=1.1-hourly', ), ) runner = TaskRunner(task=checkout) with raise_on_exception(): with prefect.context(): state = runner.run() if state.is_failed(): print(state) self.fail() workspaces = state.result workspace = workspaces['abc'] self.assertEqual( workspace['version'], Version('1.0.2-hourly.2+20200301.green-monster'))
def test_can_semantic_bump_prerelease(self): semantic_bump = SemanticBumpTask() runner = TaskRunner(task=semantic_bump) edge = Edge(Task(), semantic_bump, key='workspaces') upstream_state = Success(result=ConstantResult(value=dict(abc=dict( repo_uri='sgr:///abc/1234?tag=1-hourly', version=Version('1.0.1-hourly.4+2021-03-08.zip-fur'), )))) date = datetime.utcnow() flow_run_name = 'testflow1' with raise_on_exception(): with prefect.context(date=date, flow_run_name=flow_run_name): state = runner.run(upstream_states={edge: upstream_state}) if state.is_failed(): print(state) self.fail() self.assertEqual( state.result, dict(abc=[ '1-hourly', '1.0-hourly', f'1.0.1-hourly.5+{date:%Y-%m-%dT%H}.{date:%M}.{flow_run_name}' ]))
def test_download_data(): with prefect.context(today=datetime.today().strftime('%Y-%m-%d')): new_state = TaskRunner( task=socrata.download_dataset).get_task_run_state( state=Running(), inputs={ "dataset": Result("rq3b-xjk8"), "since": Result(parse('2020-09-21', tz=None)) }) assert new_state.is_successful()
def test_checkout_new_prerelease_fails(self): checkout = SemanticCheckoutTask(upstream_repos=dict( abc='sgr:///abc/1234?tag=1-hourly', ), ) runner = TaskRunner(task=checkout) with prefect.context(): state = runner.run() self.assertTrue( state.is_failed(), 'A repo must first be initialized with a non-prerelease tag.')
def test_can_checkout_new_tag(self): checkout = SemanticCheckoutTask(upstream_repos=dict( abc='sgr:///abc/1234?tag=1', ), ) runner = TaskRunner(task=checkout) with raise_on_exception(): with prefect.context(): state = runner.run() if state.is_failed(): print(state) self.fail() workspaces = state.result workspace = workspaces['abc'] self.assertEqual(workspace['version'], None)
def test_version_to_date(self): version_to_date = VersionToDateTask() runner = TaskRunner(task=version_to_date) edge = Edge(Task(), version_to_date, key='version') upstream_state = Success(result=ConstantResult( value=Version('1.0.0+2021-03-03T00.stinky-fish'))) with raise_on_exception(): with prefect.context(): state = runner.run(upstream_states={edge: upstream_state}) if state.is_failed(): print(state) self.fail() self.assertEqual(state.result, pendulum.parse('2021-03-03T00'))
def test_can_build_splitfile(self): splitfile = pkgutil.get_data(__package__, "example.splitfile").decode("utf-8") splitfile_task = SplitfileTask( upstream_repos=dict( test='sgr:///integration-tests/splitfile-test?tag=1' ), output=Workspace( repo_uri='sgr:///integration-tests/splitfile-test?tag=1', image_hash=None, ), splitfile_commands=splitfile, ) runner = TaskRunner(task=splitfile_task) with raise_on_exception(): with prefect.context(today_nodash="20210226"): state = runner.run() self.assertTrue(state.is_successful())
def test_can_import_df(self): checkout = SemanticCheckoutTask(upstream_repos=dict( abc='sgr:///abc/1234?tag=1', ), ) workspaces = checkout.run() df_to_table = DataFrameToTableRequestTask( repo_uri='sgr:///abc/1234?tag=1', ) runner = TaskRunner(task=df_to_table) df_edge = Edge(Task(), df_to_table, key='request') upstream_state = Success(result=ConstantResult( value=DataFrameToTableRequest(data_frame=fake_data(10), table='footable1'))) with raise_on_exception(): with prefect.context(): state = runner.run(upstream_states={df_edge: upstream_state}) if state.is_failed(): print(state) self.fail() self.assertTrue(table_exists_at(self.repo, 'footable1'))
def test_can_checkout_with_new_major(self): self.tag_repo([ '1', '1.0', '1.1', '1.0.0+20200228.blue-ivory', '1.0.1+20200228.silver-fish', '1.1.1+20200228.blue-moon' ]) checkout = SemanticCheckoutTask(upstream_repos=dict( abc='sgr:///abc/1234?tag=2', ), ) runner = TaskRunner(task=checkout) with raise_on_exception(): with prefect.context(): state = runner.run() if state.is_failed(): print(state) self.fail() workspaces = state.result workspace = workspaces['abc'] self.assertEqual(workspace['version'], Version('1.1.1+20200228.blue-moon'))
def test_skip_if_already_run(monkeypatch, test_logger, state, is_skipped): """ Test that the skip_if_already_run task skips if the workflow's most recent state is 'running' or 'success', and does not skip if the state is None (i.e. not run before) or 'failed'. """ get_session_mock = Mock() get_most_recent_state_mock = Mock(return_value=state) monkeypatch.setattr("autoflow.utils.get_session", get_session_mock) monkeypatch.setattr("autoflow.sensor.WorkflowRuns.get_most_recent_state", get_most_recent_state_mock) runner = TaskRunner(task=skip_if_already_run) upstream_edge = Edge(prefect.Task(), skip_if_already_run, key="parametrised_workflow") with set_temporary_config({"db_uri": "DUMMY_DB_URI"}): task_state = runner.run( upstream_states={ upstream_edge: Success(result=( prefect.Flow(name="DUMMY_WORFLOW_NAME"), { "DUMMY_PARAM": "DUMMY_VALUE" }, )) }, context=dict(logger=test_logger), ) get_session_mock.assert_called_once_with("DUMMY_DB_URI") get_most_recent_state_mock.assert_called_once_with( workflow_name="DUMMY_WORFLOW_NAME", parameters={"DUMMY_PARAM": "DUMMY_VALUE"}, session=get_session_mock.return_value, ) assert task_state.is_successful() assert is_skipped == task_state.is_skipped()
def test_run_workflow_fails(test_logger): """ Test that the run_workflow task fails if the workflow fails. """ function_mock = create_autospec(lambda dummy_param: None, side_effect=Exception("Workflow failed")) with prefect.Flow("Dummy workflow") as dummy_workflow: dummy_param = prefect.Parameter("dummy_param") FunctionTask(function_mock)(dummy_param=dummy_param) runner = TaskRunner(task=run_workflow) upstream_edge = Edge(prefect.Task(), run_workflow, key="parametrised_workflow") task_state = runner.run( upstream_states={ upstream_edge: Success(result=(dummy_workflow, dict(dummy_param="DUMMY_VALUE"))) }, context=dict(logger=test_logger), ) assert task_state.is_failed()
def test_can_commit(self): old_image_hash = self.repo.head.image_hash checkout = SemanticCheckoutTask(upstream_repos=dict( abc='sgr:///abc/1234?tag=1', ), ) workspaces = checkout.run() commit = CommitTask(workspaces=workspaces, ) df_to_table(fake_data(10), repository=self.repo, table="unit_test", if_exists='replace') runner = TaskRunner(task=commit) with raise_on_exception(): with prefect.context(): state = runner.run() if state.is_failed(): print(state) self.fail() self.assertNotEqual(self.repo.head.image_hash, old_image_hash)
def test_run_workflow(test_logger): """ Test that the run_workflow task runs a workflow with the given parameters. """ function_mock = create_autospec(lambda dummy_param: None) with prefect.Flow("Dummy workflow") as dummy_workflow: dummy_param = prefect.Parameter("dummy_param") FunctionTask(function_mock)(dummy_param=dummy_param) runner = TaskRunner(task=run_workflow) upstream_edge = Edge(prefect.Task(), run_workflow, key="parametrised_workflow") task_state = runner.run( upstream_states={ upstream_edge: Success(result=(dummy_workflow, dict(dummy_param="DUMMY_VALUE"))) }, context=dict(logger=test_logger), ) assert task_state.is_successful() function_mock.assert_called_once_with(dummy_param="DUMMY_VALUE")
def test_can_push(self): checkout = SemanticCheckoutTask(upstream_repos=dict( abc=f'sgr://{remote_name}/abc/1234?tag=1', ), ) workspaces = checkout.run() push = PushRepoTask(workspaces=workspaces, ) df_to_table(fake_data(10), repository=self.repo, table="unit_test", if_exists='replace') self.repo.commit() runner = TaskRunner(task=push) with raise_on_exception(): with prefect.context(): state = runner.run() if state.is_failed(): print(state) self.fail()
def test_raise_on_exception_works_at_the_task_level_with_error(): taskrunner = TaskRunner(task=MathTask()) with pytest.raises(ZeroDivisionError): with raise_on_exception(): taskrunner.run()
def test_raise_on_exception_works_at_the_task_level_with_signal(): taskrunner = TaskRunner(task=BusinessTask()) with pytest.raises(prefect.engine.signals.FAIL) as error: with raise_on_exception(): taskrunner.run() assert "needs more blockchain!" in str(error.value)
def test_complete_load(): runner = TaskRunner(task=postgres.complete_load) state = runner.run() assert state.is_successful()
def test_load_datafile(): new_state = TaskRunner(task=postgres.load_datafile).get_task_run_state( state=Running(), inputs={"datafile": Result("rq3b-xjk8-full.csv")}) assert new_state.is_successful()
def test_prep_load(): runner = TaskRunner(task=postgres.prep_load) state = runner.run() assert state.is_successful()
def test_last_updated(): runner = TaskRunner(task=postgres.get_last_updated) state = runner.run() assert state.is_successful() assert state.result > parse('2020-01-01', tz=None)