def test_user_metadata_fields(): """FileUnit.from_data parses pulp_user_metadata fields OK""" loaded = FileUnit.from_data({ "_content_type_id": "iso", "name": "my-file", "checksum": "49ae93732fcf8d63fe1cce759664982dbd5b23161f007dba8561862adc96d063", "size": 123, "pulp_user_metadata": { "description": "The best file I ever saw", "cdn_path": "/some/path/to/my-file", "cdn_published": "2021-04-01T01:08:26", }, }) assert loaded == FileUnit( path="my-file", size=123, sha256sum= "49ae93732fcf8d63fe1cce759664982dbd5b23161f007dba8561862adc96d063", content_type_id="iso", description="The best file I ever saw", cdn_path="/some/path/to/my-file", cdn_published=datetime.datetime(2021, 4, 1, 1, 8, 26), )
def test_can_upload_units(tmpdir): """repo.upload_file() succeeds with fake client and populates units.""" controller = FakeController() controller.insert_repository(FileRepository(id="repo1")) client = controller.client repo1 = client.get_repository("repo1").result() somefile = tmpdir.join("some-file.txt") somefile.write(b"there is some binary data:\x00\x01\x02") otherfile = tmpdir.join("another.txt") otherfile.write("ahoy there") upload1_f = repo1.upload_file(str(somefile)) upload2_f = repo1.upload_file(str(otherfile), relative_url="another/path.txt") for f in [upload1_f, upload2_f]: # The future should resolve successfully tasks = f.result() # The task should be successful. assert tasks[0].succeeded # If I now search for content in that repo, or content across all repos... units_in_repo = sorted(repo1.search_content().result(), key=lambda u: u.sha256sum) units_all = sorted(client.search_content().result(), key=lambda u: u.sha256sum) # They should be equal assert units_all == units_in_repo # And they should be this assert units_in_repo == [ FileUnit( path="another/path.txt", size=10, sha256sum= "94c0c9d847ecaa45df01999676db772e5cb69cc54e1ff9db31d02385c56a86e1", repository_memberships=["repo1"], unit_id="d4713d60-c8a7-0639-eb11-67b367a9c378", ), FileUnit( path="some-file.txt", size=29, sha256sum= "fad3fc1e6d583b2003ec0a5273702ed8fcc2504271c87c40d9176467ebe218cb", repository_memberships=["repo1"], unit_id="e3e70682-c209-4cac-629f-6fbed82c07cd", ), ]
def test_replace_file(tmpdir): """repo.upload_file() behaves as expected when replacing a file of the same name.""" controller = FakeController() controller.insert_repository(FileRepository(id="repo1")) client = controller.client repo1 = client.get_repository("repo1").result() somefile = tmpdir.join("some-file.txt") somefile.write(b"there is some binary data:\x00\x01\x02") otherfile = tmpdir.join("another.txt") otherfile.write("ahoy there") # Upload both files, using the same relative_url for each. repo1.upload_file(str(somefile), relative_url="darmok-jalad.txt").result() repo1.upload_file(str(otherfile), relative_url="darmok-jalad.txt").result() # If I now search for content in that repo, or content across all repos... units_in_repo = sorted(repo1.search_content().result(), key=lambda u: u.sha256sum) units_all = sorted(client.search_content().result(), key=lambda u: u.sha256sum) # I should find that only the second uploaded file is still present in the repo. assert units_in_repo == [ FileUnit( path="darmok-jalad.txt", size=10, sha256sum= "94c0c9d847ecaa45df01999676db772e5cb69cc54e1ff9db31d02385c56a86e1", repository_memberships=["repo1"], unit_id="d4713d60-c8a7-0639-eb11-67b367a9c378", ) ] # However, both units should still exist in the system; the first uploaded unit # has become an orphan. assert units_all == units_in_repo + [ FileUnit( path="darmok-jalad.txt", size=29, sha256sum= "fad3fc1e6d583b2003ec0a5273702ed8fcc2504271c87c40d9176467ebe218cb", content_type_id="iso", repository_memberships=[], unit_id="e3e70682-c209-4cac-629f-6fbed82c07cd", ) ]
def test_clear_file_skip_publish(command_tester): """Clearing a repo with file content while skipping publish succeeds.""" task_instance = FakeClearRepo() repo = FileRepository( id="some-filerepo", eng_product_id=123, relative_url="some/publish/url", mutable_urls=[], ) files = [FileUnit(path="hello.txt", size=123, sha256sum="a" * 64)] task_instance.pulp_client_controller.insert_repository(repo) task_instance.pulp_client_controller.insert_units(repo, files) # It should run with expected output. command_tester.test( task_instance.main, [ "test-clear-repo", "--pulp-url", "https://pulp.example.com/", "--skip", "foo,publish,bar", "some-filerepo", ], ) # It should not have published Pulp repos assert task_instance.pulp_client_controller.publish_history == []
def make_iso_unit(self, unit_key, unit_metadata): usermeta = (unit_metadata or {}).get("pulp_user_metadata") or {} return FileUnit(unit_id=self.next_unit_id(), path=unit_key["name"], size=unit_key["size"], sha256sum=unit_key["checksum"], **usermeta)
def test_noninteger_size(): """FileUnit.from_data accepts a floating point size.""" loaded = FileUnit.from_data({ "_content_type_id": "iso", "name": "my-impossible-file", "checksum": "49ae93732fcf8d63fe1cce759664982dbd5b23161f007dba8561862adc96d063", "size": 123.4, }) assert loaded == FileUnit( path="my-impossible-file", size=123, sha256sum= "49ae93732fcf8d63fe1cce759664982dbd5b23161f007dba8561862adc96d063", content_type_id="iso", )
def test_zero_size(): """FileUnit.from_data accepts a size of zero.""" loaded = FileUnit.from_data({ "_content_type_id": "iso", "name": "my-empty-file", "checksum": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", "size": 0, }) assert loaded == FileUnit( path="my-empty-file", size=0, sha256sum= "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", content_type_id="iso", )
def test_update_checks_state(): """Update fails if update apparently succeeded in pulp client, yet the item doesn't match the desired state.""" pulp_unit = FileUnit( unit_id="some-file-unit", path="some/file.txt", size=5, sha256sum= "49ae93732fcf8d63fe1cce759664982dbd5b23161f007dba8561862adc96d063", description="a test file", repository_memberships=["some-repo"], ) pulp_ctrl = FakeController() repo = FileRepository(id="some-repo") pulp_ctrl.insert_repository(repo) pulp_ctrl.insert_units(repo, [pulp_unit]) item = NeverUpToDateItem( pushsource_item=FilePushItem( name="some/file.txt", sha256sum= "49ae93732fcf8d63fe1cce759664982dbd5b23161f007dba8561862adc96d063", dest=["some-repo"], ), pulp_unit=pulp_unit, pulp_state=State.NEEDS_UPDATE, ) # Try updating it. update_f = item.ensure_uptodate(pulp_ctrl.client) # The update attempt should fail. exc = update_f.exception() # It should tell us why. assert ( "item supposedly updated successfully, but actual and desired state still differ:" in str(exc)) # It should tell us the item we failed to process. assert "item: FilePushItem(name='some/file.txt'" in str(exc) # It should show the current and desired field values: # The 'current unit', i.e. the state after we updated, reversed the original # description. assert re.search(r"current unit: FileUnit.*elif tset a", str(exc)) # The 'desired unit', i.e. the reason we still don't consider the unit up-to-date, # wants to reverse the description back again... assert re.search(r"desired unit: FileUnit.*a test file", str(exc))
def test_can_insert_orphans(): """insert_units with a null repo inserts units as orphans.""" controller = FakeController() units = [ FileUnit( path="bar", size=0, sha256sum= "b1a6cb41223dcd02f208827517fe4b59a12684b76e15dbee645e9f9a9daa952e", ), FileUnit( path="quux", size=0, sha256sum= "b1a6cb41223dcd02f208827517fe4b59a12684b76e15dbee645e9f9a9daa952e", ), ] # Can insert controller.insert_units(None, units) client = controller.client # If I now search for all content... found = list(client.search_content()) for unit in found: # It should have an ID assert unit.unit_id # It should have memberships [] (no repos) rather than None (unknown repos) assert unit.repository_memberships == [] # Other than those two fields, it should be identical to the input found_cmp = [ attr.evolve(u, unit_id=None, repository_memberships=None) for u in found ] assert sorted(found_cmp, key=repr) == units
def test_update_no_id(requests_mocker, client): # Try to update something with no ID; it should fail immediately # (no future) as we can't even try to update without an ID. with pytest.raises(ValueError) as excinfo: client.update_content( FileUnit( path="x", size=0, sha256sum= "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", )) # It should tell us why assert "unit_id missing on call to update_content()" in str(excinfo.value)
def test_order_out_of_range(): """FileUnit rejects an order out of the allowed range.""" with pytest.raises(ValueError) as excinfo: FileUnit( path="my-empty-file", size=0, sha256sum= "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", content_type_id="iso", display_order=1e17, ) assert "display_order must be within range -99999 .. 99999" in str( excinfo.value)
def test_mixed_search(client, requests_mocker): """Searching with a criteria mixing several fields works correctly""" repo = Repository(id="some-repo") repo.__dict__["_client"] = client requests_mocker.post( "https://pulp.example.com/pulp/api/v2/repositories/some-repo/search/units/", json=[{ "metadata": { "_content_type_id": "iso", "name": "hello.txt", "size": 23, "checksum": "a" * 64, } }], ) crit = Criteria.and_( Criteria.with_field_in("content_type_id", ["rpm", "iso"]), Criteria.with_field("name", "hello.txt"), ) files = list(repo.search_content(crit)) assert files == [FileUnit(path="hello.txt", size=23, sha256sum="a" * 64)] history = requests_mocker.request_history # There should have been just one request assert len(history) == 1 request = history[0] body = request.json() # This should have been the request body. assert body == { "criteria": { "type_ids": ["rpm", "iso"], "skip": 0, "limit": 2000, "filters": { "unit": { "name": { "$eq": "hello.txt" } } }, } }
def test_can_update_content(requests_mocker, client): requests_mocker.put( "https://pulp.example.com/pulp/api/v2/content/units/iso/some-unit/pulp_user_metadata/", # Note: passing json=None here doesn't work as requests_mocker seems # unable to differentiate between "json should be None" and "no json response # is specified". text="null", headers={"Content-Type": "application/json"}, ) unit = FileUnit( unit_id="some-unit", description="A unit I'm about to update", cdn_published=datetime.datetime(2021, 12, 6, 11, 19, 0), path="x", version="1.0.0", # display_order will tolerate strings display_order="2.3", size=0, sha256sum= "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", ) update_f = client.update_content(unit) # It should succeed. update_f.result() # It should have done a single request. assert len(requests_mocker.request_history) == 1 req = requests_mocker.request_history[0] # Should have been a PUT to the appropriate API. assert req.method == "PUT" assert ( req.url == "https://pulp.example.com/pulp/api/v2/content/units/iso/some-unit/pulp_user_metadata/" ) # Should have included all the usermeta fields in request body. assert req.json() == { "cdn_path": None, "cdn_published": "2021-12-06T11:19:00Z", "description": "A unit I'm about to update", "display_order": 2.3, "version": "1.0.0", }
def test_update_content_fails(requests_mocker, client): requests_mocker.put( "https://pulp.example.com/pulp/api/v2/content/units/iso/some-unit/pulp_user_metadata/", status_code=400, ) unit = FileUnit( unit_id="some-unit", description="A unit I'm about to update", cdn_path="/some/path.txt", path="x", size=0, sha256sum= "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", ) update_f = client.update_content(unit) # It should fail, since the HTTP request failed. assert "400 Client Error" in str(update_f.exception())
def test_upload_file_verylarge(): """Fake client can upload a 2GB file successfully.""" controller = FakeController() controller.insert_repository(FileRepository(id="repo1")) client = controller.client repo1 = client.get_repository("repo1").result() file_size = 2000000000 file_obj = ZeroesIO(file_size) upload_f = repo1.upload_file(file_obj, relative_url="big-file") # The future should resolve successfully tasks = upload_f.result() # The task should be successful. assert tasks[0].succeeded # I should be able to find the corresponding unit. units_all = sorted(client.search_content().result(), key=lambda u: u.sha256sum) assert units_all == [ FileUnit( path="big-file", size=2000000000, # If you want to verify this checksum, try: # # dd if=/dev/zero bs=1000000 count=2000 status=none | sha256sum # sha256sum= "2e0c654b6cba3a1e816726bae0eac481eb7fd0351633768c3c18392e0f02b619", repository_memberships=["repo1"], unit_id="e3e70682-c209-4cac-629f-6fbed82c07cd", ) ]
def test_push_copy_fails(fake_controller, fake_nocopy_push, fake_state_path, command_tester, caplog): """Test that push detects and fails in the case where a Pulp content copy claims to succeed, but doesn't put expected content in the target repo. While not expected to happen under normal conditions, there have historically been a handful of Pulp bugs or operational issues which can trigger this. """ client = fake_controller.client iso_dest1 = client.get_repository("iso-dest1").result() iso_dest2 = client.get_repository("iso-dest2").result() # Make this file exist but not in all the desired repos. existing_file = FileUnit( path="some-file", sha256sum= "db68c8a70f8383de71c107dca5fcfe53b1132186d1a6681d9ee3f4eea724fabb", size=46, ) fake_controller.insert_units(iso_dest1, [existing_file]) # Unit is now in iso-dest1. # Set up a pushsource backend which requests push of the same content # to both (iso-dest1, iso-dest2). Source.register_backend( "test", lambda: [ FilePushItem( # Note: a real push item would have to have 'src' pointing at an # existing file here. It's OK to omit that if the checksum exactly # matches something already in Pulp. name="some-file", sha256sum= "db68c8a70f8383de71c107dca5fcfe53b1132186d1a6681d9ee3f4eea724fabb", dest=["iso-dest1", "iso-dest2"], ) ], ) args = [ "", "--source", "test:", "--pulp-url", "https://pulp.example.com/", ] run = functools.partial(entry_point, cls=lambda: fake_nocopy_push) # Ask it to push. with pytest.raises(SystemExit) as excinfo: command_tester.test( run, args, # Can't guarantee a stable log order. compare_plaintext=False, compare_jsonl=False, ) # It should have failed. assert excinfo.value.code == 59 # It should tell us why it failed. msg = ("Fatal error: Pulp unit not present in repo(s) iso-dest2 " "after copy: FileUnit(path='some-file'") assert msg in caplog.text
def test_negative_size(): """Can't have a FileUnit with a size less than 0.""" with pytest.raises(ValueError) as error: FileUnit(path="hello.txt", sha256sum="a" * 64, size=-30) assert "Not a valid size" in str(error.value)
def test_remove_loads_units(fast_poller, requests_mocker, client): """Remove returns unit info loaded from units_successful.""" repo = Repository(id="some-repo") repo.__dict__["_client"] = client requests_mocker.post( "https://pulp.example.com/pulp/api/v2/repositories/some-repo/actions/unassociate/", [{ "json": { "spawned_tasks": [{ "task_id": "task1" }] } }], ) unit_data = [ { "type_id": "iso", "unit_key": { "name": "hello.txt", "size": 23, "checksum": "a" * 64 }, }, { "type_id": "rpm", "unit_key": { "name": "bash", "epoch": "0", "version": "4.0", "release": "1", "arch": "x86_64", }, }, { "type_id": "modulemd", "unit_key": { "name": "module", "stream": "s1", "version": 1234, "context": "a1b2c3", "arch": "s390x", }, }, { "type_id": "yum_repo_metadata_file", "unit_key": { "data_type": "productid", "repo_id": "some-repo" }, }, { "type_id": "bizarre_type", "unit_key": { "whatever": "data" } }, ] requests_mocker.post( "https://pulp.example.com/pulp/api/v2/tasks/search/", [{ "json": [{ "task_id": "task1", "state": "finished", "result": { "units_successful": unit_data }, }] }], ) tasks = repo.remove_content().result() # It should return one task assert len(tasks) == 1 task = tasks[0] # It should be the expected successful task assert task.id == "task1" assert task.completed assert task.succeeded # It should have loaded expected units from the units_successful dict assert set(task.units) == set([ FileUnit(path="hello.txt", size=23, sha256sum="a" * 64), RpmUnit( name="bash", epoch="0", version="4.0", release="1", arch="x86_64", sourcerpm=None, ), ModulemdUnit(name="module", stream="s1", version=1234, context="a1b2c3", arch="s390x"), YumRepoMetadataFileUnit(data_type="productid", content_type_id="yum_repo_metadata_file"), Unit(content_type_id="bizarre_type"), ])
def test_update_push(fake_controller, data_path, fake_push, fake_state_path, command_tester, monkeypatch): """Test a more complex push where items already exist in Pulp in a variety of different states. """ # For this test we'll force an abnormally small queue size. # This will verify that nothing breaks in edge cases such as the queue size # being smaller than the batch size. monkeypatch.setattr(context, "QUEUE_SIZE", 2) # Sanity check that the Pulp server is, initially, empty. client = fake_controller.client assert list(client.search_content()) == [] all_rpm_content = client.get_repository("all-rpm-content").result() iso_dest1 = client.get_repository("iso-dest1").result() dest1 = client.get_repository("dest1").result() # Make this RPM exist, but not in all the desired repos. existing_rpm = RpmUnit( cdn_published=datetime.datetime(2021, 12, 14, 9, 59), arch="src", filename="test-srpm01-1.0-1.src.rpm", md5sum="ba9257ced24f77f4d777e399e67924f5", name="test-srpm01", version="1.0", release="1", provides=[], requires=[ RpmDependency( epoch="0", version="4.6.0", release="1", flags="LE", name="rpmlib(FileDigests)", ), RpmDependency( epoch="0", version="3.0.4", release="1", flags="LE", name="rpmlib(CompressedFileNames)", ), ], sha1sum="d9629c034fed3a2f47870fc6fdc78a30c5556e1d", sha256sum= "54cc4713fe704dfc7a4fd5b398f834ceb6a692f53b0c6aefaf89d88417b4c51d", unit_id="existing-rpm-id1", ) fake_controller.insert_units(all_rpm_content, [existing_rpm]) # Make this file exist, but with an outdated description. existing_file = FileUnit( cdn_path= "/content/origin/files/sha256/db/db68c8a70f8383de71c107dca5fcfe53b1132186d1a6681d9ee3f4eea724fabb/some-iso", cdn_published=datetime.datetime(2021, 12, 14, 9, 59), description="A wrong description", path="some-iso", sha256sum= "db68c8a70f8383de71c107dca5fcfe53b1132186d1a6681d9ee3f4eea724fabb", size=46, unit_id="existing-file-id1", ) fake_controller.insert_units(iso_dest1, [existing_file]) # Make this file exist, but in no repos at all, making it an orphan orphan_file = FileUnit( cdn_path= "/content/origin/files/sha256/d8/d8301c5f72f16455dbc300f3d1bef8972424255caad103cc6c7ba7dc92d90ca8/test.txt", cdn_published=datetime.datetime(2021, 12, 14, 9, 59), path="test.txt", sha256sum= "d8301c5f72f16455dbc300f3d1bef8972424255caad103cc6c7ba7dc92d90ca8", size=33, unit_id="orphan-file-id1", ) fake_controller.insert_units(None, [orphan_file]) # Make this erratum exist, but with most fields missing existing_erratum = ErratumUnit( id="RHSA-2020:0509", unit_id="existing-erratum-id1", # make this have a non-integral version right now so usual bumping # does not work version="oops-not-integer", ) fake_controller.insert_units(dest1, [existing_erratum]) # Set it up to find content from our staging dir, which contains a mixture # of just about every content type stagedir = os.path.join(data_path, "staged-mixed") compare_extra = { "pulp.yaml": { "filename": fake_state_path, "normalize": hide_unit_ids, } } args = [ "", "--source", "staged:%s" % stagedir, "--allow-unsigned", "--pulp-url", "https://pulp.example.com/", ] run = functools.partial(entry_point, cls=lambda: fake_push) # It should be able to run without crashing. command_tester.test( run, args, compare_plaintext=False, compare_jsonl=False, # This will ensure the Pulp state matches the baseline. compare_extra=compare_extra, ) # Pulp state is covered by compare_extra, but let's also explicitly compare # the changes we expect on those existing units... updated_rpm = list( client.search_content( Criteria.with_field("unit_id", existing_rpm.unit_id))) assert len(updated_rpm) == 1 updated_rpm = updated_rpm[0] updated_file = list( client.search_content( Criteria.with_field("unit_id", existing_file.unit_id))) assert len(updated_file) == 1 updated_file = updated_file[0] updated_orphan_file = list( client.search_content( Criteria.with_field("unit_id", orphan_file.unit_id))) assert len(updated_orphan_file) == 1 updated_orphan_file = updated_orphan_file[0] updated_erratum = list( client.search_content( Criteria.with_field("unit_id", existing_erratum.unit_id))) assert len(updated_erratum) == 1 updated_erratum = updated_erratum[0] # RPM after push should be as it was before except that dest1 was added into # repository_memberships. assert updated_rpm == attr.evolve( existing_rpm, repository_memberships=["all-rpm-content", "dest1"]) # File after push should be as it was before except that description was updated # to the desired value. assert updated_file == attr.evolve(updated_file, description="My wonderful ISO") # Orphaned file after push should be as it was before except no longer an orphan. assert updated_orphan_file == attr.evolve( orphan_file, repository_memberships=["iso-dest1", "iso-dest2"], ) # Erratum after push should be updated. The full update will not be tested here # as it's extremely verbose, we'll just sample some fields. But, critically, # the 'version' field (which was not an integer in pulp) should have been # simply overwritten with the input rather than bumped. assert updated_erratum.title == "Important: sudo security update" assert updated_erratum.pkglist assert updated_erratum.version == "3"
def test_clear_file_repo(command_tester, fake_collector): """Clearing a repo with file content succeeds.""" task_instance = FakeClearRepo() repo = FileRepository( id="some-filerepo", eng_product_id=123, relative_url="some/publish/url", mutable_urls=["mutable1", "mutable2"], ) files = [ FileUnit(path="hello.txt", size=123, sha256sum="a" * 64), FileUnit(path="with/subdir.json", size=0, sha256sum="b" * 64), ] fakepulp = task_instance.pulp_client_controller fakepulp.insert_repository(repo) fakepulp.insert_units(repo, files) # It should run with expected output. command_tester.test( task_instance.main, [ "test-clear-repo", "--pulp-url", "https://pulp.example.com/", "--pulp-insecure", "--fastpurge-host", "fakehost-xxx.example.net", "--fastpurge-client-secret", "abcdef", "--fastpurge-client-token", "efg", "--fastpurge-access-token", "tok", "--fastpurge-root-url", "https://cdn.example.com/", "--udcache-url", "https://ud.example.com/", "some-filerepo", ], ) # It should record that it removed these push items: assert sorted(fake_collector.items, key=lambda pi: pi["filename"]) == [ { "state": "DELETED", "origin": "pulp", "filename": "hello.txt", "checksums": { "sha256": "a" * 64 }, }, { "state": "DELETED", "origin": "pulp", "filename": "with/subdir.json", "checksums": { "sha256": "b" * 64 }, }, ] # It should have published the Pulp repo assert [hist.repository.id for hist in fakepulp.publish_history] == ["some-filerepo"] # It should have flushed these URLs assert sorted(task_instance.fastpurge_client.purged_urls) == [ "https://cdn.example.com/some/publish/url/mutable1", "https://cdn.example.com/some/publish/url/mutable2", ] # It should have flushed these UD objects assert task_instance.udcache_client.flushed_repos == ["some-filerepo"] assert task_instance.udcache_client.flushed_products == [123]
def test_delete_files(command_tester, fake_collector, monkeypatch): """Deleting files from repos succeeds""" repo1 = FileRepository( id="some-filerepo", eng_product_id=123, relative_url="some/publish/url", mutable_urls=["mutable1", "mutable2"], ) repo2 = FileRepository( id="other-filerepo", eng_product_id=123, relative_url="other/publish/url", mutable_urls=["mutable1", "mutable2"], ) files1 = [ FileUnit(path="hello.iso", size=123, sha256sum="a" * 64, unit_id="files1_f1"), FileUnit(path="some.iso", size=454435, sha256sum="b" * 64, unit_id="files1_f2"), ] files2 = [ FileUnit(path="other.iso", size=123, sha256sum="a" * 64, unit_id="files2_f1") ] with FakeDeletePackages() as task_instance: task_instance.pulp_client_controller.insert_repository(repo1) task_instance.pulp_client_controller.insert_repository(repo2) task_instance.pulp_client_controller.insert_units(repo1, files1) task_instance.pulp_client_controller.insert_units(repo2, files2) # Let's try setting the cache flush root via env. monkeypatch.setenv("FASTPURGE_ROOT_URL", "https://cdn.example2.com/") # It should run with expected output. command_tester.test( task_instance.main, [ "test-delete", "--pulp-url", "https://pulp.example.com/", "--fastpurge-host", "fakehost-xxx.example.net", "--fastpurge-client-secret", "abcdef", "--fastpurge-client-token", "efg", "--fastpurge-access-token", "tok", "--repo", "some-filerepo", "--file", "some.iso,hello.iso", "--file", "other.iso", "--file", "random.txt", ], ) # deleted units are collected assert sorted(fake_collector.items, key=lambda pi: pi["filename"]) == [ { "origin": "pulp", "src": None, "state": "DELETED", "build": None, "dest": "some-filerepo", "checksums": {"sha256": "a" * 64}, "signing_key": None, "filename": "hello.iso", }, { "origin": "pulp", "src": None, "state": "DELETED", "build": None, "dest": "some-filerepo", "checksums": {"sha256": "b" * 64}, "signing_key": None, "filename": "some.iso", }, ] # verify whether files were deleted on Pulp client = task_instance.pulp_client # get the repo where the files were deleted repos = list( client.search_repository(Criteria.with_id("some-filerepo")).result() ) assert len(repos) == 1 repo = repos[0] unit_ids = [] for f in files1: unit_ids.append(f.unit_id) criteria = Criteria.with_field("unit_id", Matcher.in_(unit_ids)) # deleted files are not in the repo files = list(repo.search_content(criteria).result()) assert len(files) == 0 # same files exist on Pulp as orphans files_search = list(client.search_content(criteria).result()) assert len(files_search) == 2
def test_bad_sum(): """Can't have a FileUnit with an invalid checksum string.""" with pytest.raises(ValueError) as error: FileUnit(path="hello.txt", sha256sum="fake-sum", size=30) assert "Not a valid SHA256" in str(error.value)