def test_docker_agent_deploy_flow_run_config(api, image_on_run_config): if image_on_run_config: storage = Local() image = "on-run-config" run = DockerRun(image=image, env={"TESTING": "VALUE"}) else: storage = Docker( registry_url="testing", image_name="on-storage", image_tag="tag" ) image = "testing/on-storage:tag" run = DockerRun(env={"TESTING": "VALUE"}) agent = DockerAgent() agent.deploy_flow( flow_run=GraphQLResult( { "flow": GraphQLResult( { "id": "foo", "storage": storage.serialize(), "run_config": run.serialize(), "core_version": "0.13.11", } ), "id": "id", "name": "name", } ) ) assert api.create_container.called assert api.create_container.call_args[0][0] == image assert api.create_container.call_args[1]["environment"]["TESTING"] == "VALUE"
def test_run_flow(monkeypatch, tmpdir, job_spec_file): environment = KubernetesJobEnvironment(job_spec_file=job_spec_file) flow_runner = MagicMock() flow_runner_class = MagicMock(return_value=flow_runner) monkeypatch.setattr( "prefect.engine.get_default_flow_runner_class", MagicMock(return_value=flow_runner_class), ) d = Local(str(tmpdir)) d.add_flow(prefect.Flow("name")) gql_return = MagicMock(return_value=MagicMock(data=MagicMock(flow_run=[ GraphQLResult({ "flow": GraphQLResult({ "name": "name", "storage": d.serialize(), }) }) ], ))) client = MagicMock() client.return_value.graphql = gql_return monkeypatch.setattr("prefect.environments.execution.base.Client", client) with set_temporary_config({"cloud.auth_token": "test"}), prefect.context({"flow_run_id": "id"}): environment.run_flow() assert flow_runner_class.call_args[1]["flow"].name == "name" assert flow_runner.run.call_args[1]["executor"] is environment.executor
def build_flow_run(self, config, storage=None): if storage is None: storage = Local() return GraphQLResult({ "flow": GraphQLResult({ "storage": storage.serialize(), "run_config": RunConfigSchema().dump(config), "id": "new_id", "core_version": "0.13.0", }), "id": "id", })
def test_run_flow_calls_callbacks(monkeypatch): start_func = MagicMock() exit_func = MagicMock() environment = DaskKubernetesEnvironment(on_start=start_func, on_exit=exit_func) flow_runner = MagicMock() monkeypatch.setattr( "prefect.engine.get_default_flow_runner_class", MagicMock(return_value=flow_runner), ) kube_cluster = MagicMock() monkeypatch.setattr("dask_kubernetes.KubeCluster", kube_cluster) with tempfile.TemporaryDirectory() as directory: d = Local(directory) d.add_flow(prefect.Flow("name")) gql_return = MagicMock( return_value=MagicMock( data=MagicMock( flow_run=[ GraphQLResult( { "flow": GraphQLResult( {"name": "name", "storage": d.serialize(),} ) } ) ], ) ) ) client = MagicMock() client.return_value.graphql = gql_return monkeypatch.setattr("prefect.environments.execution.dask.k8s.Client", client) with set_temporary_config({"cloud.auth_token": "test"}), prefect.context( {"flow_run_id": "id"} ): environment.run_flow() assert flow_runner.call_args[1]["flow"].name == "name" assert start_func.called assert exit_func.called
def generate_task_definition(self, run_config, storage=None, **kwargs): if storage is None: storage = Local() agent = ECSAgent(**kwargs) flow_run = GraphQLResult({ "flow": GraphQLResult({ "storage": storage.serialize(), "run_config": run_config.serialize(), "id": "flow-id", "version": 1, "name": "Test Flow", "core_version": "0.13.0", }), "id": "flow-run-id", }) return agent.generate_task_definition(flow_run, run_config)
def build_flow_run(self, config, storage=None): if storage is None: storage = Local() return GraphQLResult({ "flow": GraphQLResult({ "storage": storage.serialize(), "run_config": None if config is None else config.serialize(), "id": "new_id", "core_version": "0.13.0", }), "id": "id", })
def test_run_flow(monkeypatch): environment = FargateTaskEnvironment(executor_kwargs={"test": "here"}) flow_runner = MagicMock() monkeypatch.setattr( "prefect.engine.get_default_flow_runner_class", MagicMock(return_value=flow_runner), ) executor = MagicMock() monkeypatch.setattr( "prefect.engine.get_default_executor_class", MagicMock(return_value=executor), ) with tempfile.TemporaryDirectory() as directory: d = Local(directory) d.add_flow(prefect.Flow("name")) gql_return = MagicMock( return_value=MagicMock( data=MagicMock( flow_run=[ GraphQLResult( { "flow": GraphQLResult( {"name": "name", "storage": d.serialize(),} ) } ) ], ) ) ) client = MagicMock() client.return_value.graphql = gql_return monkeypatch.setattr("prefect.environments.execution.base.Client", client) with set_temporary_config({"cloud.auth_token": "test"}), prefect.context( {"flow_run_id": "id"} ): environment.run_flow() assert flow_runner.call_args[1]["flow"].name == "name" assert executor.call_args[1] == {"test": "here"}
def test_load_and_run_flow(monkeypatch, tmpdir): myflow = Flow("test-flow") # This is gross. Since the flow is pickled/unpickled, there's no easy way # to access the same object to set a flag. Resort to setting an environment # variable as a global flag that won't get copied eagerly through # cloudpickle. monkeypatch.setenv("TEST_RUN_CALLED", "FALSE") class MyEnvironment(Environment): def run(self, flow): assert flow is myflow os.environ["TEST_RUN_CALLED"] = "TRUE" myflow.environment = MyEnvironment() storage = Local(str(tmpdir)) myflow.storage = storage storage.add_flow(myflow) gql_return = MagicMock( return_value=MagicMock( data=MagicMock( flow_run=[ GraphQLResult( { "flow": GraphQLResult( {"name": myflow.name, "storage": storage.serialize()} ) } ) ], ) ) ) client = MagicMock() client.return_value.graphql = gql_return monkeypatch.setattr("prefect.environments.execution.base.Client", client) with set_temporary_config({"cloud.auth_token": "test"}), prefect.context( {"flow_run_id": "id"} ): load_and_run_flow() assert os.environ["TEST_RUN_CALLED"] == "TRUE"
def test_run_flow_calls_callbacks(monkeypatch, tmpdir): start_func = MagicMock() exit_func = MagicMock() file_path = os.path.dirname( prefect.environments.execution.dask.k8s.__file__) environment = KubernetesJobEnvironment(os.path.join(file_path, "job.yaml"), on_start=start_func, on_exit=exit_func) flow_runner = MagicMock() monkeypatch.setattr( "prefect.engine.get_default_flow_runner_class", MagicMock(return_value=flow_runner), ) d = Local(str(tmpdir)) d.add_flow(prefect.Flow("name")) gql_return = MagicMock(return_value=MagicMock(data=MagicMock(flow_run=[ GraphQLResult({ "flow": GraphQLResult({ "name": "name", "storage": d.serialize() }) }) ], ))) client = MagicMock() client.return_value.graphql = gql_return monkeypatch.setattr("prefect.environments.execution.base.Client", client) with set_temporary_config({"cloud.auth_token": "test"}), prefect.context({"flow_run_id": "id"}): environment.run_flow() assert flow_runner.call_args[1]["flow"].name == "name" assert start_func.called assert exit_func.called
def test_entire_environment_process_together(monkeypatch): boto3_client = MagicMock() boto3_client.describe_task_definition.side_effect = ClientError({}, None) boto3_client.register_task_definition.return_value = {} boto3_client.run_task.return_value = {} monkeypatch.setattr("boto3.client", MagicMock(return_value=boto3_client)) flow_runner = MagicMock() monkeypatch.setattr( "prefect.engine.get_default_flow_runner_class", MagicMock(return_value=flow_runner), ) monkeypatch.setenv("AWS_ACCESS_KEY_ID", "id") monkeypatch.setenv("AWS_SECRET_ACCESS_KEY", "secret") monkeypatch.setenv("AWS_SESSION_TOKEN", "session") monkeypatch.setenv("REGION_NAME", "region") with prefect.context({"flow_run_id": "id"}), set_temporary_config( {"cloud.auth_token": "test", "logging.extra_loggers": "['test_logger']",} ): storage = Docker(registry_url="test", image_name="image", image_tag="tag") flow = Flow("name", storage=storage) environment = FargateTaskEnvironment( containerDefinitions=[ { "name": "flow-container", "image": "image", "command": [], "environment": [], "essential": True, } ], cluster="test", family="test", taskDefinition="test", ) assert environment assert environment.aws_access_key_id == "id" assert environment.aws_secret_access_key == "secret" assert environment.aws_session_token == "session" assert environment.region_name == "region" environment.setup(flow=flow) assert boto3_client.describe_task_definition.called assert boto3_client.register_task_definition.called assert boto3_client.register_task_definition.call_args[1]["family"] == "test" assert boto3_client.register_task_definition.call_args[1][ "containerDefinitions" ] == [ { "name": "flow-container", "image": "test/image:tag", "command": [ "/bin/sh", "-c", "python -c 'import prefect; prefect.environments.FargateTaskEnvironment().run_flow()'", ], "environment": [ { "name": "PREFECT__CLOUD__GRAPHQL", "value": prefect.config.cloud.graphql, }, {"name": "PREFECT__CLOUD__USE_LOCAL_SECRETS", "value": "false"}, { "name": "PREFECT__ENGINE__FLOW_RUNNER__DEFAULT_CLASS", "value": "prefect.engine.cloud.CloudFlowRunner", }, { "name": "PREFECT__ENGINE__TASK_RUNNER__DEFAULT_CLASS", "value": "prefect.engine.cloud.CloudTaskRunner", }, {"name": "PREFECT__LOGGING__LOG_TO_CLOUD", "value": "true"}, { "name": "PREFECT__LOGGING__EXTRA_LOGGERS", "value": "['test_logger']", }, ], "essential": True, } ] environment.execute(flow=flow) assert boto3_client.run_task.called assert boto3_client.run_task.call_args[1]["taskDefinition"] == "test" assert boto3_client.run_task.call_args[1]["overrides"] == { "containerOverrides": [ { "name": "flow-container", "environment": [ { "name": "PREFECT__CLOUD__AUTH_TOKEN", "value": prefect.config.cloud.get("auth_token"), }, {"name": "PREFECT__CONTEXT__FLOW_RUN_ID", "value": "id"}, {"name": "PREFECT__CONTEXT__IMAGE", "value": "test/image:tag"}, ], } ] } assert boto3_client.run_task.call_args[1]["launchType"] == "FARGATE" assert boto3_client.run_task.call_args[1]["cluster"] == "test" with tempfile.TemporaryDirectory() as directory: d = Local(directory) d.add_flow(prefect.Flow("name")) gql_return = MagicMock( return_value=MagicMock( data=MagicMock( flow_run=[ GraphQLResult( { "flow": GraphQLResult( {"name": "name", "storage": d.serialize(),} ) } ) ], ) ) ) client = MagicMock() client.return_value.graphql = gql_return monkeypatch.setattr("prefect.environments.execution.base.Client", client) with set_temporary_config({"cloud.auth_token": "test"}): environment.run_flow() assert flow_runner.call_args[1]["flow"].name == "name"