def test_unregister_resets_default(): """Unregistering a backend unsets it as the default backend (if it was set).""" class CountingCollector(object): CONSTRUCTED_COUNT = 0 def __init__(self): CountingCollector.CONSTRUCTED_COUNT += 1 Collector.register_backend("counter", CountingCollector) Collector.set_default_backend("counter") # Calling get() should construct custom backend once. Collector.get() assert CountingCollector.CONSTRUCTED_COUNT == 1 # Calling get() should construct custom backend again. Collector.get() assert CountingCollector.CONSTRUCTED_COUNT == 2 Collector.register_backend("counter", None) # Calling get() after unregister should succeed assert Collector.get() # And it should not have constructed the custom backend assert CountingCollector.CONSTRUCTED_COUNT == 2
def test_set_default(): """Can set and use a default collector.""" class MyCollector(object): INSTANCES = [] def __init__(self): MyCollector.INSTANCES.append(self) self.pushed = [] def update_push_items(self, items): self.pushed.extend(items) Collector.register_backend("my-collector", MyCollector) Collector.set_default_backend("my-collector") items = [ { "filename": "file1", "state": "PENDING" }, { "filename": "file2", "state": "PENDING" }, ] # Updating push items through default collector should succeed Collector.get().update_push_items(items).result() # It should have used the class we installed as the default assert len(MyCollector.INSTANCES) == 1 assert MyCollector.INSTANCES[0].pushed == items
def test_get_missing(): """Can't get a collector using unregistered backend.""" with pytest.raises(ValueError) as excinfo: Collector.get("not-registered") value = excinfo.value assert "No registered pushcollector backend: 'not-registered'" in str( value)
def _load_metadata(self, topdir): # Load the top-level metadata file in the staging directory, if any. for candidate in METADATA_FILES: metadata_file = os.path.join(topdir, candidate) if os.path.exists(metadata_file): break else: # no metadata file return StagingMetadata() basename = os.path.basename(metadata_file) with open(metadata_file, "rt") as f: content = f.read() # Save a copy of the file for later reference Collector.get().attach_file(basename, content).result() if metadata_file.endswith(".json"): metadata = json.loads(content) else: metadata = yaml.safe_load(content) return StagingMetadata.from_data(metadata, os.path.basename(metadata_file))
def test_local_saves_to_artifacts(caplog, tmpdir, monkeypatch): """local collector can be obtained and used successfully, and writes data under 'artifacts' dir as expected.""" monkeypatch.chdir(tmpdir) caplog.set_level(logging.INFO) collector = Collector.get("local") collector.update_push_items( [ {"filename": "file1", "state": "PUSHED"}, {"filename": "somedir/file2", "state": "INVALIDFILE"}, ] ).result() collector.update_push_items([{"filename": "file3", "state": "MISSING"}]).result() collector.attach_file("some-file.txt", "Hello, world\n").result() collector.attach_file("some-file.txt", "Hello again\n").result() collector.attach_file("some-file.bin", b"\x00\x01\x02").result() collector.append_file("appended-file.txt", "chunk 1").result() collector.append_file("appended-file.txt", b"\nchunk 2\n").result() # It should have created an "artifacts/latest" directory/symlink artifactsdir = tmpdir.join("artifacts", "latest") assert artifactsdir.check(dir=True, link=True) # Resolve symlink for later comparison with log messages artifactsdir = artifactsdir.realpath() # It should have saved push item data as JSONL assert ( artifactsdir.join("pushitems.jsonl").open().read() == textwrap.dedent( """ {"filename": "file1", "state": "PUSHED"} {"filename": "somedir/file2", "state": "INVALIDFILE"} {"filename": "file3", "state": "MISSING"} """ ).lstrip() ) # It should have saved the text file with requested content assert artifactsdir.join("some-file.txt").open().read() == "Hello again\n" # It should have saved the binary file with requested content assert artifactsdir.join("some-file.bin").open("rb").read() == b"\x00\x01\x02" # It should have saved the appended-text file with requested content assert artifactsdir.join("appended-file.txt").open().read() == "chunk 1\nchunk 2\n" # It should have logged about the created files assert caplog.messages == [ "Logging to %s" % artifactsdir.join("pushitems.jsonl"), "Logging to %s" % artifactsdir.join("some-file.txt"), "Logging to %s" % artifactsdir.join("some-file.bin"), "Logging to %s" % artifactsdir.join("appended-file.txt"), ]
def test_bad_obj_push(): """Any invalid object type push item raises an error.""" collector = Collector.get("dummy") pushitem = object() with pytest.raises(AttributeError): collector.update_push_items([pushitem])
def test_bad_push_items(): """Passing push items with incorrect data to update_push_items raises a validation error.""" coll = Collector.get("dummy") with pytest.raises(jsonschema.ValidationError): coll.update_push_items([{"foo": "bar"}])
def test_pushitem_obj_push(): """PushItem object passed to the update_push_items works fine.""" collector = Collector.get("dummy") pushitem = PushItem(name="test_pushitem") ret_val = collector.update_push_items([pushitem]) assert ret_val.result() is None
def test_base_class_context_manager(): """Exercise the __enter__ and __exit__ methods of Collector.""" collector = Collector() # use my-collector name because the autouse fixture `reset_backend` # will clean it up for us Collector.register_backend("my-collector", lambda: collector) Collector.set_default_backend("my-collector") # empty with just to exercise __enter__ and __exit__ with Collector.get(): pass
def test_local_dir_sequence(tmpdir, monkeypatch): """local collector creates timestamped directories per run, with 'latest' symlink pointing to latest""" monkeypatch.chdir(tmpdir) # Use first collector monkeypatch.setattr(LocalCollector, "timestamp", lambda cls: "time1") Collector.get("local").update_push_items( [{"filename": "file1", "state": "PUSHED"}] ).result() # Use another collector a few seconds later monkeypatch.setattr(LocalCollector, "timestamp", lambda cls: "time2") Collector.get("local").update_push_items( [{"filename": "file1", "state": "PUSHED"}] ).result() # And another even later monkeypatch.setattr(LocalCollector, "timestamp", lambda cls: "time3") Collector.get("local").update_push_items( [{"filename": "file1", "state": "PUSHED"}] ).result() # It should have created these paths: artifactsdir = tmpdir.join("artifacts") assert artifactsdir.check(dir=True) assert artifactsdir.join("time1").check(dir=True) assert artifactsdir.join("time2").check(dir=True) assert artifactsdir.join("time3").check(dir=True) assert artifactsdir.join("latest").check(dir=True, link=True) # and latest should be a symlink to the last created timestamp dir assert artifactsdir.join("latest").readlink() == "time3"
def test_context_manager_backend(): """Test a backend's context manager protocol methods are called properly.""" collector = MagicMock(spec=Collector) # use my-collector name because the autouse fixture `reset_backend` # will clean it up for us Collector.register_backend("my-collector", lambda: collector) Collector.set_default_backend("my-collector") with Collector.get(): pass collector.__enter__.assert_called_once() collector.__exit__.assert_called_once_with(*(None, ) * 3)
def test_can_use_dummy(): """Dummy collector can be obtained and used successfully.""" coll = Collector.get("dummy") coll.update_push_items( [ {"filename": "file1", "state": "PUSHED"}, {"filename": "file2", "state": "UNKNOWN"}, ] ).result() coll.attach_file("somefile.txt", "hello, world").result() coll.append_file("otherfile.txt", "line of text\n").result()
def test_always_returns_future(): """Collector interface returns futures regardless of backend return type.""" return_value = None class TestCollector(object): def update_push_items(self, items): return return_value def attach_file(self, filename, content): return return_value def append_file(self, filename, content): return return_value Collector.register_backend("test", TestCollector) collector = Collector.get("test") # If backend returns a successful future (of any value), # interface returns an empty future return_value = f_return("abc") assert collector.update_push_items([]).result() is None assert collector.attach_file("somefile", "").result() is None assert collector.append_file("somefile", "").result() is None # If backend returns a failed future, # interface returns a failed future with error propagated error = RuntimeError("oops") return_value = f_return_error(error) assert collector.update_push_items([]).exception() is error assert collector.attach_file("somefile", "").exception() is error assert collector.append_file("somefile", "").exception() is error # If backend returns a non-future, # interface returns an empty future return_value = "abc" assert collector.update_push_items([]).result() is None assert collector.attach_file("somefile", "").result() is None assert collector.append_file("somefile", "").result() is None
def test_minimal_pushitem_obj(): """PushItem object with minimal attributes is translated to pushitem dict with destination and checksums as None along with other attributes as in the object""" mock = Mock() Collector.register_backend("mock", lambda: mock) collector = Collector.get("mock") pushitem = PushItem(name="test_push") collector.update_push_items([pushitem]) update_push_item_args = mock.update_push_items.call_args[0][0] assert len(update_push_item_args) == 1 push_args = update_push_item_args[0] assert push_args["filename"] == pushitem.name assert push_args["state"] == pushitem.state assert push_args["src"] == pushitem.src assert push_args["dest"] is None assert push_args["checksums"] is None assert push_args["origin"] == pushitem.origin assert push_args["build"] == pushitem.build assert push_args["signing_key"] == pushitem.signing_key
def test_pushitem_obj_attributes(): """PushItem object attributes are translated and available as expected in pushitem dict passed to update_push_items. A pushitem is generated for each destination""" mock = Mock() Collector.register_backend("mock", lambda: mock) collector = Collector.get("mock") pushitem = PushItem( name="test_push", origin="some_origin", src="source", dest=["dest1", "dest2"], md5sum="bb1b0d528129f47798006e73307ba7a7", sha256sum= "4fd23ae44f3366f12f769f82398e96dce72adab8e45dea4d721ddf43fdce31e2", build="test_build-1.0.0-1", signing_key="FD431D51", ) collector.update_push_items([pushitem]) update_push_item_args = mock.update_push_items.call_args[0][0] assert len(update_push_item_args) == 2 for i in range(len(update_push_item_args) - 1): push_args = update_push_item_args[i] assert push_args["filename"] == pushitem.name assert push_args["state"] == pushitem.state assert push_args["src"] == pushitem.src assert push_args["dest"] == pushitem.dest[i] assert push_args["checksums"] == { "md5": pushitem.md5sum, "sha256": pushitem.sha256sum, } assert push_args["origin"] == pushitem.origin assert push_args["build"] == pushitem.build assert push_args["signing_key"] == pushitem.signing_key
def test_get_default(): """Can get a default collector.""" collector = Collector.get() assert collector