def test_field_less_than_criteria(): """with_field with less_than is translated as expected for date and non-date types """ publish_date = datetime.datetime(2019, 8, 27, 0, 0, 0) c1 = Criteria.with_field("num_field", Matcher.less_than(5)) c2 = Criteria.with_field("date_field", Matcher.less_than(publish_date)) assert filters_for_criteria(c1) == {"num_field": {"$lt": 5}} assert filters_for_criteria(c2) == { "date_field": {"$lt": {"$date": "2019-08-27T00:00:00Z"}} }
def clean_all_rpm_content(self): # Clear out old all-rpm-content LOG.info("Start old all-rpm-content deletion") arc_threshold = self.args.arc_threshold criteria = Criteria.and_( Criteria.with_unit_type(RpmUnit), Criteria.with_field( "cdn_published", Matcher.less_than(datetime.utcnow() - timedelta(days=arc_threshold)), ), ) clean_repos = list( self.pulp_client.search_repository( Criteria.with_field("id", "all-rpm-content"))) if not clean_repos: LOG.info("No repos found for cleaning.") return arc_repo = clean_repos[0] deleted_arc = list(arc_repo.remove_content(criteria=criteria)) deleted_content = [] for task in deleted_arc: if task.repo_id == "all-rpm-content": for unit in task.units: LOG.info("Old all-rpm-content deleted: %s", unit.name) deleted_content.append(unit) if not deleted_content: LOG.info("No all-rpm-content found older than %s", arc_threshold)
def run(self): LOG.debug("Garbage collection begins") criteria = Criteria.and_( Criteria.with_field("notes.created", Matcher.exists()), Criteria.with_field("notes.pub_temp_repo", True), ) # fetch repos for the criteria repos = self.pulp_client.search_repository(criteria).result() LOG.debug("repos fetched") gc_threshold = self.args.gc_threshold deleted_repos = [] # initiate deletion task for the repos for repo in repos.as_iter(): repo_age = datetime.utcnow() - repo.created if repo_age > timedelta(days=gc_threshold): LOG.info("Deleting %s (created on %s)", repo.id, repo.created) deleted_repos.append(repo.delete()) if not deleted_repos: LOG.info("No repo(s) found older than %s day(s)", gc_threshold) # log for error during deletion for task in deleted_repos: out = task.result()[0] if out.error_details or out.error_summary: LOG.error(out.error_details or out.error_summary) LOG.info("Temporary repo(s) deletion completed")
def test_stringify_complex_criteria(): crit = Criteria.and_( Criteria.with_field("must-exist", Matcher.exists()), Criteria.with_field("foo", Matcher.equals("bar")), Criteria.true(), Criteria.or_( Criteria.with_field("foo", Matcher.regex("quux")), Criteria.with_field("other", Matcher.in_(["x", "y", "z"])), Criteria.with_field("num", Matcher.less_than(9000)), ), Criteria.with_unit_type(FileUnit), ) assert (str(crit) == "((must-exist EXISTS) AND foo=='bar' AND TRUE " "AND (foo=~/quux/ OR (other IN ['x', 'y', 'z']) OR num<9000) " "AND (content_type_id IN ['iso']))")
def test_search_distributor_with_relative_url(): controller = FakeController() dist1 = Distributor( id="yum_distributor", type_id="yum_distributor", repo_id="repo1", relative_url="relative/path", ) dist2 = Distributor( id="cdn_distributor", type_id="rpm_rsync_distributor", repo_id="repo1", relative_url="relative/path", ) repo1 = Repository(id="repo1", distributors=(dist1, dist2)) dist3 = Distributor( id="yum_distributor", type_id="yum_distributor", repo_id="repo2", relative_url="another/path", ) repo2 = Repository(id="repo2", distributors=(dist3, )) controller.insert_repository(repo1) controller.insert_repository(repo2) client = controller.client crit = Criteria.with_field("relative_url", Matcher.regex("relative/path")) found = client.search_distributor(crit).result().data assert sorted(found) == [dist2, dist1]
def test_search_mapped_field_less_than(): controller = FakeController() dist1 = Distributor( id="yum_distributor", type_id="yum_distributor", repo_id="repo1", last_publish=datetime.datetime(2019, 8, 23, 2, 5, 0, tzinfo=None), ) dist2 = Distributor( id="cdn_distributor", type_id="rpm_rsync_distributor", repo_id="repo1", last_publish=datetime.datetime(2019, 8, 27, 2, 5, 0, tzinfo=None), ) repo1 = Repository(id="repo1", distributors=(dist1, dist2)) controller.insert_repository(repo1) client = controller.client crit = Criteria.with_field( "last_publish", Matcher.less_than(datetime.datetime(2019, 8, 24, 0, 0, 0))) found = client.search_distributor(crit).result().data assert found == [dist1]
def test_search_content_unsupported_operator(populated_repo): """search_content using unsupported operators on content_type_id raises""" with pytest.raises(ValueError) as e: populated_repo.search_content( Criteria.with_field("content_type_id", Matcher.regex("foobar"))) assert "unsupported expression for content_type_id" in str(e.value)
def _filtered_repo_distributors(self): published_before = self.args.published_before url_regex = self.args.repo_url_regex # define the criteria on available filters crit = [Criteria.true()] if published_before: crit.append( Criteria.with_field("last_publish", Matcher.less_than(published_before)) ) if url_regex: crit.append( Criteria.with_field("relative_url", Matcher.regex(url_regex.pattern)) ) crit = Criteria.and_(*crit) return self.pulp_client.search_distributor(crit)
def test_type(): """type is mapped correctly""" crit = Criteria.with_field("type", Matcher.regex("foobar")) assert filters_for_criteria(crit, Repository) == { "notes._repo-type": { "$regex": "foobar" } }
def test_field_regex_criteria(): """with_field with regex is translated to a mongo fragment as expected.""" assert filters_for_criteria( Criteria.with_field("some.field", Matcher.regex("abc"))) == { "some.field": { "$regex": "abc" } }
def test_complex_type_ids(client): """content searches raise if using criteria with unsupported operators on content_type_id""" repo = Repository(id="some-repo") repo.__dict__["_client"] = client with pytest.raises(ValueError) as e: repo.search_content( Criteria.with_field("content_type_id", Matcher.regex("foobar"))) assert "unsupported expression for content_type_id" in str(e.value)
def test_dict_matcher_value(): """criteria using a dict as matcher value""" crit = Criteria.with_field( "created", Matcher.less_than({"created_date": datetime.datetime(2019, 9, 4, 0, 0, 0)}), ) assert filters_for_criteria(crit) == { "created": {"$lt": {"created_date": {"$date": "2019-09-04T00:00:00Z"}}} }
def test_remove_fail_without_type_id(fast_poller, client): """Remove fails when a critria is provided without unit type""" repo = Repository(id="some-repo") repo.__dict__["_client"] = client criteria = Criteria.and_( Criteria.with_field("filename", "some.rpm"), Criteria.with_field("signing_key", Matcher.in_(["key123"])), ) with pytest.raises(ValueError): repo.remove_content(criteria=criteria)
def test_copy_with_criteria(fast_poller, requests_mocker, client): """Copy with criteria succeeds, and serializes criteria correctly.""" src = Repository(id="src-repo") dest = Repository(id="dest-repo") src.__dict__["_client"] = client dest.__dict__["_client"] = client requests_mocker.post( "https://pulp.example.com/pulp/api/v2/repositories/dest-repo/actions/associate/", [{"json": {"spawned_tasks": [{"task_id": "task1"}, {"task_id": "task2"}]}}], ) requests_mocker.post( "https://pulp.example.com/pulp/api/v2/tasks/search/", [ { "json": [ {"task_id": "task1", "state": "finished"}, {"task_id": "task2", "state": "skipped"}, ] } ], ) crit = Criteria.and_( Criteria.with_unit_type(RpmUnit), Criteria.with_field("name", Matcher.in_(["bash", "glibc"])), ) # Copy should succeed, and return the tasks (in this case with no matches) assert sorted(client.copy_content(src, dest, crit), key=lambda t: t.id) == [ Task(id="task1", completed=True, succeeded=True), Task(id="task2", completed=True, succeeded=True), ] hist = requests_mocker.request_history # First request should have been the associate. assert ( hist[0].url == "https://pulp.example.com/pulp/api/v2/repositories/dest-repo/actions/associate/" ) # It should have encoded our criteria object as needed by the Pulp API. assert hist[0].json() == { "criteria": { "filters": {"unit": {"name": {"$in": ["bash", "glibc"]}}}, "type_ids": ["rpm", "srpm"], }, "source_repo_id": "src-repo", }
def _rpm_criteria(self, filename, signing_keys=None, sha256sum=None): if signing_keys: return Criteria.and_( Criteria.with_field("filename", filename), Criteria.with_field("signing_key", Matcher.in_(signing_keys)), ) if sha256sum: return Criteria.and_( Criteria.with_field("filename", filename), Criteria.with_field("sha256sum", sha256sum), ) return Criteria.with_field("filename", filename)
def test_can_search_id_exists(): """Can search for a repo using exists operator with fake client.""" controller = FakeController() repo1 = Repository(id="repo1") repo2 = Repository(id="repo2") controller.insert_repository(repo1) controller.insert_repository(repo2) client = controller.client crit = Criteria.with_field("id", Matcher.exists()) found = client.search_repository(crit).data assert sorted(found) == [repo1, repo2]
def test_search_created_exists(): controller = FakeController() repo1 = Repository(id="repo1") repo2 = Repository(id="repo2", created=datetime.datetime.utcnow()) repo3 = Repository(id="repo3", created=datetime.datetime.utcnow()) controller.insert_repository(repo1) controller.insert_repository(repo2) controller.insert_repository(repo3) client = controller.client crit = Criteria.with_field("notes.created", Matcher.exists()) found = client.search_repository(crit).data assert sorted(found) == [repo2, repo3]
def test_search_mapped_field_in(): """Can do 'in' search with fields subject to Python<=>Pulp conversion.""" controller = FakeController() repo1 = Repository(id="repo1", eng_product_id=888) repo2 = Repository(id="repo2", eng_product_id=123) repo3 = Repository(id="repo3", eng_product_id=456) controller.insert_repository(repo1) controller.insert_repository(repo2) controller.insert_repository(repo3) client = controller.client crit = Criteria.with_field("eng_product_id", Matcher.in_([123, 456])) found = client.search_repository(crit).data assert sorted(found) == [repo2, repo3]
def _get_rpms_criteria(self): criteria_values = [] for package_pattern in self._ubi_config.packages.whitelist: # skip src packages, they are searched seprately if package_pattern.arch == "src": continue arch = ( PulpLibMatcher.exists() if package_pattern.arch in ("*", None) else package_pattern.arch ) criteria_values.append((package_pattern.name, arch)) fields = ("name", "arch") or_criteria = self._create_or_criteria(fields, criteria_values) return or_criteria
def test_search_mapped_field_regex(): """Can do regex search with fields subject to Python<=>Pulp conversion.""" controller = FakeController() repo1 = Repository(id="repo1", type="foobar") repo2 = Repository(id="repo2", type="foobaz") repo3 = Repository(id="repo3", type="quux") controller.insert_repository(repo1) controller.insert_repository(repo2) controller.insert_repository(repo3) client = controller.client crit = Criteria.with_field("type", Matcher.regex("fooba[rz]")) found = client.search_repository(crit).data assert sorted(found) == [repo1, repo2]
def test_search_or(): controller = FakeController() repo1 = Repository(id="repo1") repo2 = Repository(id="repo2") repo3 = Repository(id="repo3") controller.insert_repository(repo1) controller.insert_repository(repo2) controller.insert_repository(repo3) client = controller.client crit = Criteria.or_(Criteria.with_id("repo3"), Criteria.with_field("id", Matcher.equals("repo1"))) found = client.search_repository(crit).data assert sorted(found) == [repo1, repo3]
def test_can_search_distributors_with_relative_url(client, requests_mocker): requests_mocker.post( "https://pulp.example.com/pulp/api/v2/distributors/search/", json=[ { "id": "yum_distributor", "distributor_type_id": "yum_distributor", "repo_id": "test_rpm", "config": { "relative_url": "relative/path" }, }, { "id": "cdn_distributor", "distributor_type_id": "rpm_rsync_distributor", "config": { "relative_url": "relative/path" }, }, ], ) crit = Criteria.with_field("relative_url", Matcher.regex("relative/path")) distributors_f = client.search_distributor(crit) distributors = [dist for dist in distributors_f.result()] # distributor objects are returned assert sorted(distributors) == [ Distributor( id="cdn_distributor", type_id="rpm_rsync_distributor", relative_url="relative/path", ), Distributor( id="yum_distributor", type_id="yum_distributor", repo_id="test_rpm", relative_url="relative/path", ), ] # api is called once assert requests_mocker.call_count == 1
def test_search_with_model_field(): """search repos using model field 'created' in criteria. returns only those repos which has the field and value is not None """ controller = FakeController() repo1 = Repository(id="repo1") repo2 = Repository(id="repo2", created=datetime.datetime.utcnow()) repo3 = Repository(id="repo3", created=None) controller.insert_repository(repo1) controller.insert_repository(repo2) controller.insert_repository(repo3) client = controller.client crit = Criteria.with_field("created", Matcher.exists()) found = client.search_repository(crit).result().data assert sorted(found) == [repo2]
def test_search_created_regex(): """Can search using regular expressions.""" controller = FakeController() when1 = datetime.datetime(2019, 6, 11, 14, 47, 0, tzinfo=None) when2 = datetime.datetime(2019, 3, 1, 1, 1, 0, tzinfo=None) when3 = datetime.datetime(2019, 6, 1, 1, 1, 0, tzinfo=None) repo1 = Repository(id="repo1", created=when1) repo2 = Repository(id="repo2", created=when2) repo3 = Repository(id="repo3", created=when3) repo4 = Repository(id="repo4") controller.insert_repository(repo1) controller.insert_repository(repo2) controller.insert_repository(repo3) controller.insert_repository(repo4) client = controller.client crit = Criteria.with_field("notes.created", Matcher.regex("19-06")) found = client.search_repository(crit) assert sorted(found) == [repo1, repo3]
def adjust_maintenance_report(self, report): to_add = [] if self.args.repo_ids: found_ids = self._ensure_repos_exist(self.args.repo_ids) to_add.extend(found_ids) if self.args.repo_url_regex: # search distributors with relative_url, get the repo id from distributors crit = Criteria.with_field( "relative_url", Matcher.regex(self.args.repo_url_regex.pattern) ) dists = self.pulp_client.search_distributor(crit).result() to_add.extend(set([dist.repo_id for dist in dists])) if to_add: LOG.info("Setting following repos to maintenance mode:") for repo_id in to_add: LOG.info(" - %s", repo_id) report = report.add( to_add, owner=self.args.owner, message=self.args.message ) return report
def test_delete_unsigned_rpms(command_tester, fake_collector, monkeypatch): """Deleting unsigned RPMs from repos succeeds""" repo = YumRepository( id="some-yumrepo", relative_url="some/publish/url", mutable_urls=["repomd.xml"] ) files = [ RpmUnit( name="signed", version="1.23", release="1.test8", arch="x86_64", filename="signed-1.23-1.test8_x86_64.rpm", sha256sum="a" * 64, md5sum="b" * 32, signing_key="aabbcc", unit_id="signed_rpm", ), RpmUnit( name="unsigned", version="2.25", release="1.test8", arch="x86_64", filename="unsigned-2.25-1.test8_x86_64.rpm", sha256sum="a" * 64, md5sum="b" * 32, signing_key=None, unit_id="unsigned_rpm", ), ] with FakeDeletePackages() as task_instance: task_instance.pulp_client_controller.insert_repository(repo) task_instance.pulp_client_controller.insert_units(repo, files) # Let's try setting the cache flush root via env. monkeypatch.setenv("FASTPURGE_ROOT_URL", "https://cdn.example2.com/") # It should run with expected output. command_tester.test( task_instance.main, [ "test-delete", "--pulp-url", "https://pulp.example.com/", "--fastpurge-host", "fakehost-xxx.example.net", "--fastpurge-client-secret", "abcdef", "--fastpurge-client-token", "efg", "--fastpurge-access-token", "tok", "--repo", "some-yumrepo", "--file", "unsigned-2.25-1.test8_x86_64.rpm,signed-1.23-1.test8_x86_64.rpm", "--allow-unsigned", ], ) # It should record that it removed these push items: assert sorted(fake_collector.items, key=lambda pi: pi["filename"]) == [ { "origin": "pulp", "src": None, "dest": "some-yumrepo", "signing_key": None, "filename": "unsigned-2.25-1.test8.x86_64.rpm", "state": "DELETED", "build": None, "checksums": {"sha256": "a" * 64}, } ] # verify whether files were deleted on Pulp client = task_instance.pulp_client # get the repo where the files were deleted repos = list( client.search_repository(Criteria.with_id("some-yumrepo")).result() ) assert len(repos) == 1 repo = repos[0] # criteria with the unit_ids unit_ids = [] for f in files: unit_ids.append(f.unit_id) criteria = Criteria.with_field("unit_id", Matcher.in_(unit_ids)) # unsigned RPM is deleted, only signed RPM left in the repo result_files = list(repo.search_content(criteria).result()) assert len(result_files) == 1 assert files[0].filename == "signed-1.23-1.test8_x86_64.rpm"
def test_remove_with_criteria(fast_poller, requests_mocker, client): """Remove succeeds when given a critria/filter for removal""" repo = Repository(id="some-repo") repo.__dict__["_client"] = client requests_mocker.post( "https://pulp.example.com/pulp/api/v2/repositories/some-repo/actions/unassociate/", [ { "json": { "spawned_tasks": [{ "task_id": "task1" }] } }, { "json": { "spawned_tasks": [{ "task_id": "task2" }] } }, ], ) requests_mocker.post( "https://pulp.example.com/pulp/api/v2/tasks/search/", [ { "json": [{ "task_id": "task1", "state": "finished" }] }, { "json": [{ "task_id": "task2", "state": "finished" }] }, ], ) criteria = Criteria.and_( Criteria.with_unit_type(RpmUnit), Criteria.with_field("filename", "some.rpm"), Criteria.with_field("signing_key", Matcher.in_(["key123"])), ) assert repo.remove_content(criteria=criteria).result() == [ Task(id="task1", completed=True, succeeded=True) ] # It should have passed the criteria to Pulp req = requests_mocker.request_history assert ( req[0].url == "https://pulp.example.com/pulp/api/v2/repositories/some-repo/actions/unassociate/" ) assert req[0].json() == { "criteria": { "filters": { "unit": { "$and": [ { "filename": { "$eq": "some.rpm" } }, { "signing_key": { "$in": ["key123"] } }, ] } }, "type_ids": ["rpm", "srpm"], } } # Providing both criteria and type_ids assert repo.remove_content(criteria=criteria, type_ids=["type1", "type2"]).result() == [ Task(id="task2", completed=True, succeeded=True) ] # It should have passed only the critera to Pulp and ignore type_ids as kwarg req = requests_mocker.request_history assert ( req[0].url == "https://pulp.example.com/pulp/api/v2/repositories/some-repo/actions/unassociate/" ) assert req[0].json() == { "criteria": { "filters": { "unit": { "$and": [ { "filename": { "$eq": "some.rpm" } }, { "signing_key": { "$in": ["key123"] } }, ] } }, "type_ids": ["rpm", "srpm"], } }
def test_delete_modules(command_tester, fake_collector, monkeypatch): """Deleting modules and it's artifacts from repos succeeds""" repo = YumRepository( id="some-yumrepo", relative_url="some/publish/url", mutable_urls=["repomd.xml"] ) files = [ RpmUnit( name="bash", version="1.23", release="1.test8", arch="x86_64", filename="bash-1.23-1.test8_x86_64.rpm", sha256sum="a" * 64, md5sum="b" * 32, signing_key="aabbcc", provides=[], requires=[], unit_id="rpm1", ), RpmUnit( name="dash", version="1.23", release="1.test8", arch="x86_64", filename="dash-1.23-1.test8_x86_64.rpm", sha256sum="a" * 64, md5sum="b" * 32, signing_key="aabbcc", provides=[], requires=[], unit_id="rpm2", ), ModulemdUnit( name="mymod", stream="s1", version=123, context="a1c2", arch="s390x", artifacts=["bash-0:1.23-1.test8_x86_64", "dash-0:1.23-1.test8_x86_64"], unit_id="module1", ), ] with FakeDeletePackages() as task_instance: task_instance.pulp_client_controller.insert_repository(repo) task_instance.pulp_client_controller.insert_units(repo, files) # Let's try setting the cache flush root via env. monkeypatch.setenv("FASTPURGE_ROOT_URL", "https://cdn.example2.com/") # It should run with expected output. command_tester.test( task_instance.main, [ "test-delete", "--pulp-url", "https://pulp.example.com/", "--fastpurge-host", "fakehost-xxx.example.net", "--fastpurge-client-secret", "abcdef", "--fastpurge-client-token", "efg", "--fastpurge-access-token", "tok", "--repo", "some-yumrepo", "--file", "mymod:s1:123:a1c2:s390x", "--signing-key", "aabbcc", ], ) assert sorted(fake_collector.items, key=lambda pi: pi["filename"]) == [ { "origin": "pulp", "src": None, "state": "DELETED", "build": None, "dest": "some-yumrepo", "checksums": {"sha256": "a" * 64}, "signing_key": None, "filename": "bash-1.23-1.test8.x86_64.rpm", }, { "origin": "pulp", "src": None, "state": "DELETED", "build": None, "dest": "some-yumrepo", "checksums": {"sha256": "a" * 64}, "signing_key": None, "filename": "dash-1.23-1.test8.x86_64.rpm", }, { "origin": "pulp", "src": None, "state": "DELETED", "build": None, "dest": "some-yumrepo", "checksums": None, "signing_key": None, "filename": "mymod:s1:123:a1c2:s390x", }, ] # verify whether files were deleted on Pulp client = task_instance.pulp_client # get the repo where the files were deleted repos = list( client.search_repository(Criteria.with_id("some-yumrepo")).result() ) assert len(repos) == 1 repo = repos[0] # criteria with the unit_ids unit_ids = [] for f in files: unit_ids.append(f.unit_id) criteria = Criteria.with_field("unit_id", Matcher.in_(unit_ids)) # deleted files are not in the repo files = list(repo.search_content(criteria).result()) assert len(files) == 0 # same files exist on Pulp as orphans files_search = list(client.search_content(criteria).result()) assert len(files_search) == 3
def test_delete_files(command_tester, fake_collector, monkeypatch): """Deleting files from repos succeeds""" repo1 = FileRepository( id="some-filerepo", eng_product_id=123, relative_url="some/publish/url", mutable_urls=["mutable1", "mutable2"], ) repo2 = FileRepository( id="other-filerepo", eng_product_id=123, relative_url="other/publish/url", mutable_urls=["mutable1", "mutable2"], ) files1 = [ FileUnit(path="hello.iso", size=123, sha256sum="a" * 64, unit_id="files1_f1"), FileUnit(path="some.iso", size=454435, sha256sum="b" * 64, unit_id="files1_f2"), ] files2 = [ FileUnit(path="other.iso", size=123, sha256sum="a" * 64, unit_id="files2_f1") ] with FakeDeletePackages() as task_instance: task_instance.pulp_client_controller.insert_repository(repo1) task_instance.pulp_client_controller.insert_repository(repo2) task_instance.pulp_client_controller.insert_units(repo1, files1) task_instance.pulp_client_controller.insert_units(repo2, files2) # Let's try setting the cache flush root via env. monkeypatch.setenv("FASTPURGE_ROOT_URL", "https://cdn.example2.com/") # It should run with expected output. command_tester.test( task_instance.main, [ "test-delete", "--pulp-url", "https://pulp.example.com/", "--fastpurge-host", "fakehost-xxx.example.net", "--fastpurge-client-secret", "abcdef", "--fastpurge-client-token", "efg", "--fastpurge-access-token", "tok", "--repo", "some-filerepo", "--file", "some.iso,hello.iso", "--file", "other.iso", "--file", "random.txt", ], ) # deleted units are collected assert sorted(fake_collector.items, key=lambda pi: pi["filename"]) == [ { "origin": "pulp", "src": None, "state": "DELETED", "build": None, "dest": "some-filerepo", "checksums": {"sha256": "a" * 64}, "signing_key": None, "filename": "hello.iso", }, { "origin": "pulp", "src": None, "state": "DELETED", "build": None, "dest": "some-filerepo", "checksums": {"sha256": "b" * 64}, "signing_key": None, "filename": "some.iso", }, ] # verify whether files were deleted on Pulp client = task_instance.pulp_client # get the repo where the files were deleted repos = list( client.search_repository(Criteria.with_id("some-filerepo")).result() ) assert len(repos) == 1 repo = repos[0] unit_ids = [] for f in files1: unit_ids.append(f.unit_id) criteria = Criteria.with_field("unit_id", Matcher.in_(unit_ids)) # deleted files are not in the repo files = list(repo.search_content(criteria).result()) assert len(files) == 0 # same files exist on Pulp as orphans files_search = list(client.search_content(criteria).result()) assert len(files_search) == 2
def test_copy_content_with_criteria(controller): """copy_content can filter copied units by field values""" src = YumRepository(id="src-repo") dest = YumRepository(id="dest-repo") controller.insert_repository(src) controller.insert_repository(dest) src_units = [ RpmUnit(name="bash", version="4.0", release="1", arch="x86_64"), RpmUnit(name="bash", version="4.0", release="2", arch="x86_64"), RpmUnit(name="bash", version="4.1", release="3", arch="x86_64"), RpmUnit(name="glibc", version="5.0", release="1", arch="x86_64"), ] controller.insert_units(src, src_units) client = controller.client # Repos are initially detached, re-fetch them via client src = client.get_repository(src.id).result() dest = client.get_repository(dest.id).result() # This is what we want to copy... crit = Criteria.and_( Criteria.with_field("name", "bash"), Criteria.with_field("release", Matcher.in_(["1", "3"])), ) # Copy should succeed copy_tasks = list( client.copy_content(src, dest, crit, options=CopyOptions(require_signed_rpms=False))) # It should have copied only those units matching the criteria units = sum([t.units for t in copy_tasks], []) assert sorted(units, key=repr) == [ RpmUnit(name="bash", version="4.0", release="1", arch="x86_64", epoch="0"), RpmUnit(name="bash", version="4.1", release="3", arch="x86_64", epoch="0"), ] # The copy should also impact subsequent content searches. dest_units = list(dest.search_content()) assert sorted(dest_units, key=repr) == [ RpmUnit( unit_id="e3e70682-c209-4cac-629f-6fbed82c07cd", name="bash", version="4.0", release="1", arch="x86_64", epoch="0", repository_memberships=["src-repo", "dest-repo"], ), RpmUnit( unit_id="d4713d60-c8a7-0639-eb11-67b367a9c378", name="bash", version="4.1", release="3", arch="x86_64", epoch="0", repository_memberships=["src-repo", "dest-repo"], ), ]