def adjust_maintenance_report(self, report): """Remove entries from maintenance report by repo ids or repo url regex or both""" to_remove = [] existed_repo_ids = [entry.repo_id for entry in report.entries] if self.args.repo_url_regex: # search all repos with id existed in the report existed_repos = self.pulp_client.search_repository( Criteria.with_id(existed_repo_ids)).result() for repo in existed_repos: if repo.relative_url and re.search(self.args.repo_url_regex, repo.relative_url): to_remove.append(repo.id) if self.args.repo_ids: for repo_id in self.args.repo_ids: if repo_id in existed_repo_ids: to_remove.append(repo_id) else: LOG.warning("Repository %s is not in maintenance mode", repo_id) if to_remove: LOG.info( "Following repositories will be removed from maintenance mode:" ) for repo_id in to_remove: LOG.info(" - %s", repo_id) report = report.remove(to_remove, owner=self.args.owner) return report
def check_repos(self): self._sanitize_repo_ids_args() repo_ids = self.args.repo_ids found_repo_ids = [] out = [] # apply the filters and get repo_ids repo_ids = self._filter_repos(repo_ids) # get repo objects for the repo_ids searched_repos = self.pulp_client.search_repository(Criteria.with_id(repo_ids)) for repo in searched_repos: out.append(repo) found_repo_ids.append(repo.id) # Bail out if user requested repos which don't exist # or there are no repos returned to publish missing = set(repo_ids) - set(found_repo_ids) missing = sorted(list(missing)) if missing: self.fail("Requested repo(s) don't exist: %s", ", ".join(missing)) if not out: self.fail("No repo(s) found to publish") return out
def run_with_client(self, client): # At the time we run, it is the case that all items exist with the desired # state, in the desired repos. Now we need to publish affected repos. # # This is also a synchronization point. The idea is that publishing repo # makes content available, and there may be dependencies between the bits # of content we've handled, so we should ensure *all* of them are in correct # repos before we start publish of *any* repos to increase the chance that # all of them land at once. # # TODO: once exodus is live, consider refactoring this to not be a # synchronization point (or make it optional?) as the above motivation goes # away - the CDN origin supports near-atomic update. all_repo_ids = set() set_cdn_published = set() all_items = [] for item in self.iter_input(): all_repo_ids.update(item.publish_pulp_repos) # any unit which supports cdn_published but hasn't had it set yet should # have it set once the publish completes. unit = item.pulp_unit if hasattr(unit, "cdn_published") and unit.cdn_published is None: set_cdn_published.add(unit) all_items.append(item) # Locate all the repos for publish. repo_fs = client.search_repository( Criteria.with_id(sorted(all_repo_ids))) # Start publishing them, including cache flushes. publish_fs = self.publish_with_cache_flush(repo_fs, set_cdn_published, client) # Then wait for publishes to finish. for f in publish_fs: f.result() # At this stage we consider all items to be fully "pushed". pushed_items = [ attr.evolve(item, pushsource_item=attr.evolve(item.pushsource_item, state="PUSHED")) for item in all_items ] self.update_push_items(pushed_items) # Mark as done for accurate progress logs. # Note we don't keep track of exactly which items got published through each # repo, so this will simply show that everything moved from in progress to done # at once. for _ in pushed_items: self.in_queue.task_done() # And we know nothing more happens to the push items, so we can tell # collector that we're finished. self.update_push_items([self.FINISHED])
def get_advisories(self, advisory_ids): criteria = criteria = Criteria.and_( Criteria.with_unit_type(ErratumUnit), Criteria.with_id(advisory_ids) ) adv_f = self.search_content(criteria) advisories = [a for a in adv_f.result()] if not advisories: self.fail("Advisory(ies) not found: %s", ", ".join(sorted(advisory_ids))) return advisories
def get_repository(self, repository_id): if not isinstance(repository_id, six.string_types): raise TypeError("Invalid argument: id=%s" % id) data = self.search_repository( Criteria.with_id(repository_id)).result().data if len(data) != 1: return f_return_error( PulpException("Repository id=%s not found" % repository_id)) return f_return(data[0])
def test_can_search_id(): """Can search for a repo by ID with fake client.""" controller = FakeController() repo1 = Repository(id="repo1") repo2 = Repository(id="repo2") controller.insert_repository(repo1) controller.insert_repository(repo2) client = controller.client crit = Criteria.with_id("repo1") found = client.search_repository(crit).data assert found == [repo1]
def _ensure_repos_exist(self, repo_ids): """Checks if repositories are existed in Pulp server, if not, users will be warned and corresponding ids will be removed. """ found_repos = self.pulp_client.search_repository( Criteria.with_id(repo_ids) ).result() found_ids = [r.id for r in found_repos] missing_ids = set(repo_ids) - set(found_ids) if missing_ids: LOG.warning("Didn't find following repositories:") for repo_id in missing_ids: LOG.warning(" - %s", repo_id) return sorted(found_ids)
def test_can_search_ids(): """Can search for a repo by list of IDs with fake client.""" controller = FakeController() repo1 = Repository(id="repo1") repo2 = Repository(id="repo2") repo3 = Repository(id="repo3") controller.insert_repository(repo1) controller.insert_repository(repo2) controller.insert_repository(repo3) client = controller.client crit = Criteria.with_id(["repo1", "repo3", "other-id"]) found = client.search_repository(crit).data assert sorted(found) == [repo1, repo3]
def test_search_or(): controller = FakeController() repo1 = Repository(id="repo1") repo2 = Repository(id="repo2") repo3 = Repository(id="repo3") controller.insert_repository(repo1) controller.insert_repository(repo2) controller.insert_repository(repo3) client = controller.client crit = Criteria.or_(Criteria.with_id("repo3"), Criteria.with_field("id", Matcher.equals("repo1"))) found = client.search_repository(crit).data assert sorted(found) == [repo1, repo3]
def test_search_and(): controller = FakeController() repo1 = Repository(id="repo1") repo2 = Repository(id="repo2", created=datetime.datetime.utcnow()) repo3 = Repository(id="repo3", created=datetime.datetime.utcnow()) controller.insert_repository(repo1) controller.insert_repository(repo2) controller.insert_repository(repo3) client = controller.client crit = Criteria.and_(Criteria.with_field("notes.created", Criteria.exists), Criteria.with_id("repo2")) found = client.search_repository(crit).data assert sorted(found) == [repo2]
def get_repos(self): # Returns all repos to be operated on by this task. # Eagerly loads the repos so we fail early if the user passed any nonexistent # repo. repo_ids = self.args.repo found_repo_ids = [] out = [] search = self.pulp_client.search_repository(Criteria.with_id(repo_ids)) for repo in search.result(): out.append(repo) found_repo_ids.append(repo.id) # Bail out if user requested repos which don't exist missing = set(repo_ids) - set(found_repo_ids) missing = sorted(list(missing)) if missing: self.fail("Requested repo(s) don't exist: %s", ", ".join(missing)) # Bail out if we'd be processing any container image repos. # We don't support this now because: # # - recording push items isn't implemented yet and it's not clear # how to implement it (as we traditionally used docker-image-*.tar.gz # filenames from brew as push item filename, but those aren't available # in pulp metadata) # # - no known use-case for clearing them # container_repo_ids = sorted( [repo.id for repo in out if isinstance(repo, ContainerImageRepository)] ) if container_repo_ids: self.fail( "Container image repo(s) provided, not supported: %s" % ", ".join(sorted(container_repo_ids)) ) return out
def test_delete_rpms(command_tester, fake_collector, monkeypatch): """Deleting RPMs from repos succeeds""" repo1 = YumRepository( id="some-yumrepo", relative_url="some/publish/url", mutable_urls=["repomd.xml"] ) repo2 = YumRepository( id="other-yumrepo", relative_url="other/publish/url", mutable_urls=["repomd.xml"], ) files1 = [ RpmUnit( name="bash", version="1.23", release="1.test8", arch="x86_64", filename="bash-1.23-1.test8_x86_64.rpm", sha256sum="a" * 64, md5sum="b" * 32, signing_key="aabbcc", unit_id="file1_rpm1", ), RpmUnit( name="dash", version="2.25", release="1.test8", arch="x86_64", filename="dash-2.25-1.test8_x86_64.rpm", sha256sum="a" * 64, md5sum="b" * 32, signing_key="aabbcc", unit_id="file1_rpm2", ), ModulemdUnit( name="mymod", stream="s1", version=123, context="a1c2", arch="s390x", unit_id="file1_mod1", ), ] files2 = [ RpmUnit( name="crash", version="3.30", release="1.test8", arch="s390x", filename="crash-3.30-1.test8_s390x.rpm", sha256sum="a" * 64, md5sum="b" * 32, signing_key="aabbcc", unit_id="file2_rpm1", ) ] files3 = [ RpmUnit( name="rash", version="1.30", release="1.test8", arch="noarch", filename="rash-1.30-1.test8_noarch.rpm", sha256sum="a" * 64, md5sum="b" * 32, signing_key="aabbcc", unit_id="file3_rpm1", ) ] undeleted = [ RpmUnit( name="exist", version="1.34", release="1.test8", arch="noarch", filename="exist-1.34-1.test8_noarch.rpm", sha256sum="a" * 64, md5sum="b" * 32, signing_key="aabbcc", unit_id="undeleted_rpm1", ) ] files1.extend(files3) files1.extend(undeleted) files2.extend(files3) with FakeDeletePackages() as task_instance: task_instance.pulp_client_controller.insert_repository(repo1) task_instance.pulp_client_controller.insert_repository(repo2) task_instance.pulp_client_controller.insert_units(repo1, files1) task_instance.pulp_client_controller.insert_units(repo2, files2) # Let's try setting the cache flush root via env. monkeypatch.setenv("FASTPURGE_ROOT_URL", "https://cdn.example2.com/") # It should run with expected output. command_tester.test( task_instance.main, [ "test-delete", "--pulp-url", "https://pulp.example.com/", "--fastpurge-host", "fakehost-xxx.example.net", "--fastpurge-client-secret", "abcdef", "--fastpurge-client-token", "efg", "--fastpurge-access-token", "tok", "--repo", "some-yumrepo,other-yumrepo", "--repo", "some-other-repo", "--file", "bash-1.23-1.test8_x86_64.rpm", "--file", "dash-2.25-1.test8_x86_64.rpm,crash-3.30-1.test8_s390x.rpm", "--file", "trash-1.0-1.test8_noarch.rpm,rash-1.30-1.test8_noarch.rpm", "--signing-key", "aabbcc", ], ) # It should record that it removed these push items: assert sorted(fake_collector.items, key=lambda pi: pi["filename"]) == [ { "origin": "pulp", "src": None, "dest": "some-yumrepo", "signing_key": None, "filename": "bash-1.23-1.test8.x86_64.rpm", "state": "DELETED", "build": None, "checksums": {"sha256": "a" * 64}, }, { "origin": "pulp", "src": None, "dest": "other-yumrepo", "signing_key": None, "filename": "crash-3.30-1.test8.s390x.rpm", "state": "DELETED", "build": None, "checksums": {"sha256": "a" * 64}, }, { "origin": "pulp", "src": None, "dest": "some-yumrepo", "signing_key": None, "filename": "dash-2.25-1.test8.x86_64.rpm", "state": "DELETED", "build": None, "checksums": {"sha256": "a" * 64}, }, { "origin": "pulp", "src": None, "dest": "other-yumrepo", "signing_key": None, "filename": "rash-1.30-1.test8.noarch.rpm", "state": "DELETED", "build": None, "checksums": {"sha256": "a" * 64}, }, { "origin": "pulp", "src": None, "dest": "some-yumrepo", "signing_key": None, "filename": "rash-1.30-1.test8.noarch.rpm", "state": "DELETED", "build": None, "checksums": {"sha256": "a" * 64}, }, ] # verify whether files were deleted on Pulp client = task_instance.pulp_client # get the repo where the files were deleted repos = sorted( list( client.search_repository( Criteria.with_id(["some-yumrepo", "other-yumrepo"]) ).result() ), key=lambda r: r.id, ) assert len(repos) == 2 r2, r1 = repos assert r1.id == repo1.id assert r2.id == repo2.id # criteria with the unit_ids # critera1 for files1 in repo1 unit_ids = [] for f in files1: unit_ids.append(f.unit_id) criteria1 = Criteria.with_field("unit_id", Matcher.in_(unit_ids)) # critera2 for files2 in repo2 unit_ids = [] for f in files2: unit_ids.append(f.unit_id) criteria2 = Criteria.with_field("unit_id", Matcher.in_(unit_ids)) # files are not in the repo1 except undeleted rpm and module result1 = sorted( list(r1.search_content(criteria1).result()), key=lambda v: v.unit_id ) assert len(result1) == 2 # modulemd in files1 assert result1[0].unit_id == files1[2].unit_id # undeleted file assert result1[1].unit_id == undeleted[0].unit_id # files are not in repo2 result2 = list(r2.search_content(criteria1).result()) assert len(result2) == 0 # All the files exist on Pulp files_search = list( client.search_content(Criteria.or_(criteria1, criteria2)).result() ) assert len(files_search) == 6
def test_delete_unsigned_rpms(command_tester, fake_collector, monkeypatch): """Deleting unsigned RPMs from repos succeeds""" repo = YumRepository( id="some-yumrepo", relative_url="some/publish/url", mutable_urls=["repomd.xml"] ) files = [ RpmUnit( name="signed", version="1.23", release="1.test8", arch="x86_64", filename="signed-1.23-1.test8_x86_64.rpm", sha256sum="a" * 64, md5sum="b" * 32, signing_key="aabbcc", unit_id="signed_rpm", ), RpmUnit( name="unsigned", version="2.25", release="1.test8", arch="x86_64", filename="unsigned-2.25-1.test8_x86_64.rpm", sha256sum="a" * 64, md5sum="b" * 32, signing_key=None, unit_id="unsigned_rpm", ), ] with FakeDeletePackages() as task_instance: task_instance.pulp_client_controller.insert_repository(repo) task_instance.pulp_client_controller.insert_units(repo, files) # Let's try setting the cache flush root via env. monkeypatch.setenv("FASTPURGE_ROOT_URL", "https://cdn.example2.com/") # It should run with expected output. command_tester.test( task_instance.main, [ "test-delete", "--pulp-url", "https://pulp.example.com/", "--fastpurge-host", "fakehost-xxx.example.net", "--fastpurge-client-secret", "abcdef", "--fastpurge-client-token", "efg", "--fastpurge-access-token", "tok", "--repo", "some-yumrepo", "--file", "unsigned-2.25-1.test8_x86_64.rpm,signed-1.23-1.test8_x86_64.rpm", "--allow-unsigned", ], ) # It should record that it removed these push items: assert sorted(fake_collector.items, key=lambda pi: pi["filename"]) == [ { "origin": "pulp", "src": None, "dest": "some-yumrepo", "signing_key": None, "filename": "unsigned-2.25-1.test8.x86_64.rpm", "state": "DELETED", "build": None, "checksums": {"sha256": "a" * 64}, } ] # verify whether files were deleted on Pulp client = task_instance.pulp_client # get the repo where the files were deleted repos = list( client.search_repository(Criteria.with_id("some-yumrepo")).result() ) assert len(repos) == 1 repo = repos[0] # criteria with the unit_ids unit_ids = [] for f in files: unit_ids.append(f.unit_id) criteria = Criteria.with_field("unit_id", Matcher.in_(unit_ids)) # unsigned RPM is deleted, only signed RPM left in the repo result_files = list(repo.search_content(criteria).result()) assert len(result_files) == 1 assert files[0].filename == "signed-1.23-1.test8_x86_64.rpm"
def test_delete_modules(command_tester, fake_collector, monkeypatch): """Deleting modules and it's artifacts from repos succeeds""" repo = YumRepository( id="some-yumrepo", relative_url="some/publish/url", mutable_urls=["repomd.xml"] ) files = [ RpmUnit( name="bash", version="1.23", release="1.test8", arch="x86_64", filename="bash-1.23-1.test8_x86_64.rpm", sha256sum="a" * 64, md5sum="b" * 32, signing_key="aabbcc", provides=[], requires=[], unit_id="rpm1", ), RpmUnit( name="dash", version="1.23", release="1.test8", arch="x86_64", filename="dash-1.23-1.test8_x86_64.rpm", sha256sum="a" * 64, md5sum="b" * 32, signing_key="aabbcc", provides=[], requires=[], unit_id="rpm2", ), ModulemdUnit( name="mymod", stream="s1", version=123, context="a1c2", arch="s390x", artifacts=["bash-0:1.23-1.test8_x86_64", "dash-0:1.23-1.test8_x86_64"], unit_id="module1", ), ] with FakeDeletePackages() as task_instance: task_instance.pulp_client_controller.insert_repository(repo) task_instance.pulp_client_controller.insert_units(repo, files) # Let's try setting the cache flush root via env. monkeypatch.setenv("FASTPURGE_ROOT_URL", "https://cdn.example2.com/") # It should run with expected output. command_tester.test( task_instance.main, [ "test-delete", "--pulp-url", "https://pulp.example.com/", "--fastpurge-host", "fakehost-xxx.example.net", "--fastpurge-client-secret", "abcdef", "--fastpurge-client-token", "efg", "--fastpurge-access-token", "tok", "--repo", "some-yumrepo", "--file", "mymod:s1:123:a1c2:s390x", "--signing-key", "aabbcc", ], ) assert sorted(fake_collector.items, key=lambda pi: pi["filename"]) == [ { "origin": "pulp", "src": None, "state": "DELETED", "build": None, "dest": "some-yumrepo", "checksums": {"sha256": "a" * 64}, "signing_key": None, "filename": "bash-1.23-1.test8.x86_64.rpm", }, { "origin": "pulp", "src": None, "state": "DELETED", "build": None, "dest": "some-yumrepo", "checksums": {"sha256": "a" * 64}, "signing_key": None, "filename": "dash-1.23-1.test8.x86_64.rpm", }, { "origin": "pulp", "src": None, "state": "DELETED", "build": None, "dest": "some-yumrepo", "checksums": None, "signing_key": None, "filename": "mymod:s1:123:a1c2:s390x", }, ] # verify whether files were deleted on Pulp client = task_instance.pulp_client # get the repo where the files were deleted repos = list( client.search_repository(Criteria.with_id("some-yumrepo")).result() ) assert len(repos) == 1 repo = repos[0] # criteria with the unit_ids unit_ids = [] for f in files: unit_ids.append(f.unit_id) criteria = Criteria.with_field("unit_id", Matcher.in_(unit_ids)) # deleted files are not in the repo files = list(repo.search_content(criteria).result()) assert len(files) == 0 # same files exist on Pulp as orphans files_search = list(client.search_content(criteria).result()) assert len(files_search) == 3
def test_delete_files(command_tester, fake_collector, monkeypatch): """Deleting files from repos succeeds""" repo1 = FileRepository( id="some-filerepo", eng_product_id=123, relative_url="some/publish/url", mutable_urls=["mutable1", "mutable2"], ) repo2 = FileRepository( id="other-filerepo", eng_product_id=123, relative_url="other/publish/url", mutable_urls=["mutable1", "mutable2"], ) files1 = [ FileUnit(path="hello.iso", size=123, sha256sum="a" * 64, unit_id="files1_f1"), FileUnit(path="some.iso", size=454435, sha256sum="b" * 64, unit_id="files1_f2"), ] files2 = [ FileUnit(path="other.iso", size=123, sha256sum="a" * 64, unit_id="files2_f1") ] with FakeDeletePackages() as task_instance: task_instance.pulp_client_controller.insert_repository(repo1) task_instance.pulp_client_controller.insert_repository(repo2) task_instance.pulp_client_controller.insert_units(repo1, files1) task_instance.pulp_client_controller.insert_units(repo2, files2) # Let's try setting the cache flush root via env. monkeypatch.setenv("FASTPURGE_ROOT_URL", "https://cdn.example2.com/") # It should run with expected output. command_tester.test( task_instance.main, [ "test-delete", "--pulp-url", "https://pulp.example.com/", "--fastpurge-host", "fakehost-xxx.example.net", "--fastpurge-client-secret", "abcdef", "--fastpurge-client-token", "efg", "--fastpurge-access-token", "tok", "--repo", "some-filerepo", "--file", "some.iso,hello.iso", "--file", "other.iso", "--file", "random.txt", ], ) # deleted units are collected assert sorted(fake_collector.items, key=lambda pi: pi["filename"]) == [ { "origin": "pulp", "src": None, "state": "DELETED", "build": None, "dest": "some-filerepo", "checksums": {"sha256": "a" * 64}, "signing_key": None, "filename": "hello.iso", }, { "origin": "pulp", "src": None, "state": "DELETED", "build": None, "dest": "some-filerepo", "checksums": {"sha256": "b" * 64}, "signing_key": None, "filename": "some.iso", }, ] # verify whether files were deleted on Pulp client = task_instance.pulp_client # get the repo where the files were deleted repos = list( client.search_repository(Criteria.with_id("some-filerepo")).result() ) assert len(repos) == 1 repo = repos[0] unit_ids = [] for f in files1: unit_ids.append(f.unit_id) criteria = Criteria.with_field("unit_id", Matcher.in_(unit_ids)) # deleted files are not in the repo files = list(repo.search_content(criteria).result()) assert len(files) == 0 # same files exist on Pulp as orphans files_search = list(client.search_content(criteria).result()) assert len(files_search) == 2
def get_erratum_from_pulp(self, advisory_id): crit = Criteria.and_(Criteria.with_unit_type(ErratumUnit), Criteria.with_id(advisory_id)) return self.pulp_client.search_content(criteria=crit).result()
def test_delete_advisory(command_tester, fake_collector, monkeypatch): """Deletion of packages and modules in advisories from provided repos succeeds""" repo1 = YumRepository(id="some-yumrepo", relative_url="some/publish/url", mutable_urls=["repomd.xml"]) repo2 = YumRepository( id="some-other-repo", relative_url="other/publish/url", mutable_urls=["repomd.xml"], ) pkglist = [ ErratumPackageCollection( name="colection-0", packages=None, short="", module=ErratumModule(name="mymod", stream="s1", version="123", context="a1c2", arch="s390x"), ), ErratumPackageCollection( name="collection-1", packages=[ ErratumPackage( name="bash", version="1.23", release="1.test8", arch="x86_64", filename="bash-1.23-1.test8_x86_64.rpm", sha256sum="a" * 64, md5sum="b" * 32, ), ErratumPackage( name="dash", version="1.23", release="1.test8", arch="x86_64", filename="dash-1.23-1.test8_x86_64.rpm", sha256sum="a" * 64, md5sum="b" * 32, ), ], short="", module=None, ), ] files1 = [ RpmUnit( name="bash", version="1.23", release="1.test8", arch="x86_64", filename="bash-1.23-1.test8_x86_64.rpm", sha256sum="a" * 64, md5sum="b" * 32, signing_key="aabbcc", unit_id="files1_rpm1", ), RpmUnit( name="dash", version="1.23", release="1.test8", arch="x86_64", filename="dash-1.23-1.test8_x86_64.rpm", sha256sum="a" * 64, md5sum="b" * 32, signing_key="aabbcc", unit_id="files1_rpm2", ), RpmUnit( name="crash", version="1.23", release="1.test8", arch="x86_64", filename= "crash-1.23-1.test8.module+el8.0.0+3049+59fd2bba.x86_64.rpm", sha256sum="a" * 64, md5sum="b" * 32, signing_key="aabbcc", unit_id="files1_rpm3", ), ModulemdUnit( name="mymod", stream="s1", version=123, context="a1c2", arch="s390x", artifacts=[ "crash-0:1.23-1.test8.module+el8.0.0+3049+59fd2bba.x86_64" ], unit_id="files1_mod1", ), ErratumUnit( unit_id="e3e70682-c209-4cac-629f-6fbed82c07cd", id="RHSA-1111:22", summary="Dummy erratum", content_type_id="erratum", repository_memberships=["some-yumrepo"], pkglist=pkglist, ), ] files2 = [ ErratumUnit( unit_id="x4e73262-e239-44ac-629f-6fbed82c07cd", id="RHBA-1001:22", summary="Other erratum", content_type_id="erratum", repository_memberships=["some-other-repo"], pkglist=[], ), ] with FakeDeleteAdvisory() as task_instance: task_instance.pulp_client_controller.insert_repository(repo1) task_instance.pulp_client_controller.insert_repository(repo2) task_instance.pulp_client_controller.insert_units(repo1, files1) task_instance.pulp_client_controller.insert_units(repo2, files2) # Let's try setting the cache flush root via env. monkeypatch.setenv("FASTPURGE_ROOT_URL", "https://cdn.example2.com/") # It should run with expected output. command_tester.test( task_instance.main, [ "test-delete", "--pulp-url", "https://pulp.example.com/", "--fastpurge-host", "fakehost-xxx.example.net", "--fastpurge-client-secret", "abcdef", "--fastpurge-client-token", "efg", "--fastpurge-access-token", "tok", "--repo", "some-yumrepo,other-yumrepo", "--advisory", "RHSA-1111:22", "--advisory", "RHBA-1001:22", ], ) assert sorted(fake_collector.items, key=lambda pi: pi["filename"]) == [ { "origin": "pulp", "src": None, "dest": "some-yumrepo", "signing_key": None, "filename": "bash-1.23-1.test8.x86_64.rpm", "state": "DELETED", "build": None, "checksums": { "sha256": "a" * 64 }, }, { "origin": "pulp", "src": None, "dest": "some-yumrepo", "signing_key": None, "filename": "crash-1.23-1.test8.x86_64.rpm", "state": "DELETED", "build": None, "checksums": { "sha256": "a" * 64 }, }, { "origin": "pulp", "src": None, "dest": "some-yumrepo", "signing_key": None, "filename": "dash-1.23-1.test8.x86_64.rpm", "state": "DELETED", "build": None, "checksums": { "sha256": "a" * 64 }, }, { "origin": "pulp", "src": None, "dest": "some-yumrepo", "signing_key": None, "filename": "mymod:s1:123:a1c2:s390x", "state": "DELETED", "build": None, "checksums": None, }, ] # verify whether the rpms and modules were deleted from the repo on Pulp client = task_instance.pulp_client # effectively only some-yumrepo(repo1) was modified repos = list( client.search_repository( Criteria.with_id("some-yumrepo")).result()) assert len(repos) == 1 repo = repos[0] # list the removed unit's unit_id # RPMs from the erratum package list unit_ids = ["files1_rpm1", "files1_rpm2"] # module from the erratum package list unit_ids.append("files1_mod1") # package in the above module unit_ids.append("files1_rpm3") criteria = Criteria.with_field("unit_id", Matcher.in_(unit_ids)) # deleted files are not in the repo files = list(repo.search_content(criteria).result()) assert len(files) == 0 # same files exist on Pulp as orphans files_search = list(client.search_content(criteria).result()) assert len(files_search) == 4
def get_repo_from_pulp(self, repo_id): crit = Criteria.with_id(repo_id) resp = self.pulp_client.search_repository(criteria=crit) return [r for r in resp.result()]
def search_repo(self, repo_ids): return self.pulp_client.search_repository(Criteria.with_id(repo_ids)).result()
def test_delete_advisory_no_repos_provided(command_tester, fake_collector, monkeypatch): """Deletion of packages succeeds in all the repos when the same advisory is present in multiple repos and repos are not provided in the request""" repo1 = YumRepository(id="some-yumrepo", relative_url="some/publish/url", mutable_urls=["repomd.xml"]) repo2 = YumRepository( id="other-yumrepo", relative_url="other/publish/url", mutable_urls=["repomd.xml"], ) pkglist = [ ErratumPackageCollection( name="collection-1", packages=[ ErratumPackage( name="bash", version="1.23", release="1.test8", arch="x86_64", filename="bash-1.23-1.test8_x86_64.rpm", sha256sum="a" * 64, md5sum="b" * 32, ), ErratumPackage( name="dash", version="1.23", release="1.test8", arch="x86_64", filename="dash-1.23-1.test8_x86_64.rpm", sha256sum="a" * 64, md5sum="b" * 32, ), ], short="", module=None, ), ] files = [ RpmUnit( name="bash", version="1.23", release="1.test8", arch="x86_64", filename="bash-1.23-1.test8_x86_64.rpm", sha256sum="a" * 64, md5sum="b" * 32, signing_key="aabbcc", unit_id="files1_rpm1", ), RpmUnit( name="dash", version="1.23", release="1.test8", arch="x86_64", filename="dash-1.23-1.test8_x86_64.rpm", sha256sum="a" * 64, md5sum="b" * 32, signing_key="aabbcc", unit_id="files1_rpm2", ), RpmUnit( name="crash", version="1.23", release="1.test8", arch="x86_64", filename= "crash-1.23-1.test8.module+el8.0.0+3049+59fd2bba.x86_64.rpm", sha256sum="a" * 64, md5sum="b" * 32, signing_key="aabbcc", unit_id="files1_rpm3", ), ErratumUnit( unit_id="x4e73262-e239-44ac-629f-6fbed82c07cd", id="RHBA-1001:22", summary="Other erratum", content_type_id="erratum", repository_memberships=["some-yumrepo", "other-yumrepo"], pkglist=pkglist, ), ] with FakeDeleteAdvisory() as task_instance: task_instance.pulp_client_controller.insert_repository(repo1) task_instance.pulp_client_controller.insert_repository(repo2) task_instance.pulp_client_controller.insert_units(repo1, files) task_instance.pulp_client_controller.insert_units(repo2, files) # Let's try setting the cache flush root via env. monkeypatch.setenv("FASTPURGE_ROOT_URL", "https://cdn.example2.com/") # It should run with expected output. command_tester.test( task_instance.main, [ "test-delete", "--pulp-url", "https://pulp.example.com/", "--fastpurge-host", "fakehost-xxx.example.net", "--fastpurge-client-secret", "abcdef", "--fastpurge-client-token", "efg", "--fastpurge-access-token", "tok", "--advisory", "RHBA-1001:22", ], ) assert sorted(fake_collector.items, key=lambda pi: (pi["filename"], pi["dest"])) == [ { "build": None, "checksums": { "sha256": "a" * 64 }, "dest": "other-yumrepo", "filename": "bash-1.23-1.test8.x86_64.rpm", "origin": "pulp", "signing_key": None, "src": None, "state": "DELETED", }, { "build": None, "checksums": { "sha256": "a" * 64 }, "dest": "some-yumrepo", "filename": "bash-1.23-1.test8.x86_64.rpm", "origin": "pulp", "signing_key": None, "src": None, "state": "DELETED", }, { "build": None, "checksums": { "sha256": "a" * 64 }, "dest": "other-yumrepo", "filename": "dash-1.23-1.test8.x86_64.rpm", "origin": "pulp", "signing_key": None, "src": None, "state": "DELETED", }, { "build": None, "checksums": { "sha256": "a" * 64 }, "dest": "some-yumrepo", "filename": "dash-1.23-1.test8.x86_64.rpm", "origin": "pulp", "signing_key": None, "src": None, "state": "DELETED", }, ] # verify whether the rpms were deleted from the repo on Pulp client = task_instance.pulp_client # get all the repos repos = list( client.search_repository( Criteria.with_id("some-yumrepo")).result()) assert len(repos) == 1 repo1 = repos[0] repos = list( client.search_repository( Criteria.with_id("other-yumrepo")).result()) assert len(repos) == 1 repo2 = repos[0] # list the removed unit's unit_id # RPMs from the erratum package list unit_ids = ["files1_rpm1", "files1_rpm2"] criteria = Criteria.with_field("unit_id", Matcher.in_(unit_ids)) # deleted packages from the advisory are not in both the repos files = list(repo1.search_content(criteria).result()) assert len(files) == 0 files = list(repo2.search_content(criteria).result()) assert len(files) == 0 # same files exist on Pulp as orphans files_search = list(client.search_content(criteria).result()) assert len(files_search) == 2