def test_build_no_upload_if_file(monkeypatch): storage = Azure(container="container", stored_as_script=True) with pytest.raises(ValueError): storage.build() storage = Azure(container="container", stored_as_script=True, blob_name="flow.py") assert storage == storage.build()
def test_get_flow_from_file_azure_runs(monkeypatch): client = MagicMock(download_blob=MagicMock()) service = MagicMock(get_blob_client=MagicMock(return_value=client)) monkeypatch.setattr( "prefect.environments.storage.Azure._azure_block_blob_service", service ) f = Flow("test") monkeypatch.setattr( "prefect.environments.storage.azure.extract_flow_from_file", MagicMock(return_value=f), ) storage = Azure(container="container", stored_as_script=True) assert f.name not in storage flow_location = storage.add_flow(f) new_flow = storage.get_flow(flow_location) assert client.download_blob.called assert f.name in storage assert isinstance(new_flow, Flow) assert new_flow.name == "test" assert len(new_flow.tasks) == 0 state = new_flow.run() assert state.is_successful()
def test_add_flow_to_azure(): storage = Azure(container="test") f = Flow("test") assert f.name not in storage assert storage.add_flow(f) assert f.name in storage
def test_create_azure_storage_init_args(): storage = Azure(container="test", connection_string="conn", blob_name="name") assert storage assert storage.flows == dict() assert storage.container == "test" assert storage.connection_string == "conn" assert storage.blob_name == "name"
def test_add_multiple_flows_to_Azure(): storage = Azure(container="container") f = Flow("test") g = Flow("testg") assert f.name not in storage assert storage.add_flow(f) assert storage.add_flow(g) assert f.name in storage assert g.name in storage
def test_blob_service_client_property(monkeypatch): connection = MagicMock() azure = MagicMock(from_connection_string=connection) monkeypatch.setattr("azure.storage.blob.BlobServiceClient", azure) storage = Azure(container="test", connection_string="conn") azure_client = storage._azure_block_blob_service assert azure_client connection.assert_called_with(conn_str="conn")
def test_add_flow_to_azure_already_added(monkeypatch): storage = Azure(container="container") f = Flow("test") assert f.name not in storage assert storage.add_flow(f) assert f.name in storage with pytest.raises(ValueError): storage.add_flow(f)
def test_upload_flow_to_azure_blob_name(monkeypatch): client = MagicMock(upload_blob=MagicMock()) service = MagicMock(get_blob_client=MagicMock(return_value=client)) monkeypatch.setattr( "prefect.environments.storage.Azure._azure_block_blob_service", service ) storage = Azure(container="container", blob_name="name") f = Flow("test") assert storage.add_flow(f) assert storage.build() assert service.get_blob_client.call_args[1]["container"] == "container" assert service.get_blob_client.call_args[1]["blob"] == "name"
def test_upload_flow_to_azure(monkeypatch): client = MagicMock(upload_blob=MagicMock()) service = MagicMock(get_blob_client=MagicMock(return_value=client)) monkeypatch.setattr( "prefect.environments.storage.Azure._azure_block_blob_service", service ) storage = Azure(container="container") f = Flow("test") assert f.name not in storage assert storage.add_flow(f) assert storage.build() assert client.upload_blob.called assert f.name in storage
def test_local_agent_deploy_processes_azure_storage(monkeypatch, runner_token): popen = MagicMock() monkeypatch.setattr("prefect.agent.local.agent.Popen", popen) agent = LocalAgent() agent.deploy_flow(flow_run=GraphQLResult({ "flow": GraphQLResult({"storage": Azure(container="test").serialize()}), "id": "id", })) assert popen.called assert len(agent.processes) == 1
def test_get_flow_azure_bucket_key(monkeypatch): client = MagicMock(download_blob=MagicMock()) service = MagicMock(get_blob_client=MagicMock(return_value=client)) monkeypatch.setattr( "prefect.environments.storage.Azure._azure_block_blob_service", service ) f = Flow("test") monkeypatch.setattr("cloudpickle.loads", MagicMock(return_value=f)) storage = Azure(container="container", blob_name="name") assert f.name not in storage flow_location = storage.add_flow(f) assert storage.get_flow(flow_location) assert service.get_blob_client.call_args[1]["container"] == "container" assert service.get_blob_client.call_args[1]["blob"] == flow_location
def test_get_flow_azure(monkeypatch): client = MagicMock(download_blob=MagicMock()) service = MagicMock(get_blob_client=MagicMock(return_value=client)) monkeypatch.setattr( "prefect.environments.storage.Azure._azure_block_blob_service", service ) f = Flow("test") monkeypatch.setattr("cloudpickle.loads", MagicMock(return_value=f)) storage = Azure(container="container") with pytest.raises(ValueError): storage.get_flow() assert f.name not in storage flow_location = storage.add_flow(f) assert storage.get_flow(flow_location) assert client.download_blob.called assert f.name in storage
def test_serialize_azure_storage(): storage = Azure(container="test") serialized_storage = storage.serialize() assert serialized_storage["type"] == "Azure"
def test_create_azure_storage(): storage = Azure(container="test") assert storage assert storage.logger
}), run, ) assert env_vars["KEY1"] == "VAL1" assert env_vars["KEY2"] == "OVERRIDE" assert env_vars["PREFECT__LOGGING__LEVEL"] == "TEST" assert working_dir in env_vars["PYTHONPATH"] @pytest.mark.parametrize( "storage", [ Local(directory="test"), GCS(bucket="test"), S3(bucket="test"), Azure(container="test"), GitLab("test/repo", path="path/to/flow.py"), Bitbucket(project="PROJECT", repo="test-repo", path="test-flow.py"), CodeCommit("test/repo", path="path/to/flow.py"), Webhook( build_request_kwargs={"url": "test-service/upload"}, build_request_http_method="POST", get_flow_request_kwargs={"url": "test-service/download"}, get_flow_request_http_method="GET", ), ], ) def test_local_agent_deploy_processes_valid_storage(storage, monkeypatch): popen = MagicMock() monkeypatch.setattr("prefect.agent.local.agent.Popen", popen)