def test_build_no_upload_if_file_and_no_local_script_path(self, google_client): storage = GCS(bucket="awesome-bucket", stored_as_script=True) with pytest.raises(ValueError): storage.build() storage = GCS(bucket="awesome-bucket", stored_as_script=True, key="myflow.py") assert storage == storage.build()
def test_upload_multiple_flows_to_gcs(self, google_client): blob_mock = MagicMock() bucket_mock = MagicMock(blob=MagicMock(return_value=blob_mock)) google_client.return_value.get_bucket = MagicMock( return_value=bucket_mock) storage = GCS(bucket="awesome-bucket") flows = (Flow("awesome-flow-1"), Flow("awesome-flow-2")) for f in flows: storage.add_flow(f) assert storage.build() assert bucket_mock.blob.call_count == 2 assert blob_mock.upload_from_string.call_count == 2 expected_blob_calls = [] expected_upload_calls = [] for f in flows: expected_blob_calls.append(call(blob_name=storage.flows[f.name])) expected_upload_calls.append(call(flow_to_bytes_pickle(f))) # note, we don't upload until build() is called, which iterates on a dictionary, which is not ordered older versions of python bucket_mock.blob.assert_has_calls(expected_blob_calls, any_order=True) blob_mock.upload_from_string.assert_has_calls(expected_upload_calls, any_order=True)
def test_upload_script_if_path(self, google_client, tmpdir): blob_mock = MagicMock() bucket_mock = MagicMock(blob=MagicMock(return_value=blob_mock)) google_client.return_value.get_bucket = MagicMock( return_value=bucket_mock) with open(f"{tmpdir}/flow.py", "w") as tmpfile: tmpfile.write("foo") storage = GCS( bucket="awesome-bucket", stored_as_script=True, local_script_path=f"{tmpdir}/flow.py", key="key", ) f = Flow("awesome-flow") assert f.name not in storage assert storage.add_flow(f) assert f.name in storage assert storage.build() bucket_mock.blob.assert_called_with(blob_name=storage.flows[f.name]) blob_mock.upload_from_file.called assert blob_mock.upload_from_file.call_args[0]
def test_upload_single_flow_with_custom_key_to_gcs(self, google_client): blob_mock = MagicMock() bucket_mock = MagicMock(blob=MagicMock(return_value=blob_mock)) google_client.return_value.get_bucket = MagicMock(return_value=bucket_mock) storage = GCS(bucket="awesome-bucket", key="the-best-key") f = Flow("awesome-flow") assert f.name not in storage assert storage.add_flow(f) assert f.name in storage assert storage.build() bucket_mock.blob.assert_called_with(blob_name="the-best-key") blob_mock.upload_from_string.assert_called_with(cloudpickle.dumps(f))
def test_upload_single_flow_to_gcs(self, google_client): blob_mock = MagicMock() bucket_mock = MagicMock(blob=MagicMock(return_value=blob_mock)) google_client.return_value.get_bucket = MagicMock( return_value=bucket_mock) storage = GCS(bucket="awesome-bucket") f = Flow("awesome-flow") assert f.name not in storage assert storage.add_flow(f) assert f.name in storage assert storage.build() bucket_mock.blob.assert_called_with(blob_name=storage.flows[f.name]) blob_mock.upload_from_string.assert_called_with( flow_to_bytes_pickle(f))
def test_put_get_and_run_single_flow_to_gcs(self, google_client): blob_mock = MagicMock() bucket_mock = MagicMock(blob=MagicMock(return_value=blob_mock)) google_client.return_value.get_bucket = MagicMock(return_value=bucket_mock) storage = GCS(bucket="awesome-bucket") f = Flow("awesome-flow") assert f.name not in storage assert storage.add_flow(f) assert f.name in storage assert storage.build() flow_as_bytes = blob_mock.upload_from_string.call_args[0][0] new_flow = cloudpickle.loads(flow_as_bytes) assert new_flow.name == "awesome-flow" state = new_flow.run() assert state.is_successful()