def test_search_paginates(): controller = FakeController() repos = [] for i in range(0, 1000): repo = Repository(id="repo-%s" % i) repos.append(repo) controller.insert_repository(repo) client = controller.client crit = Criteria.true() page = client.search_repository(crit) found_repos = list(page) page_count = 1 while page.next: page_count += 1 page = page.next.result() # There should have been several pages (it is not defined exactly # what page size the fake client uses, but it should be relatively # small to enforce that clients think about pagination) assert page_count >= 10 # All repos should have been found assert sorted(found_repos) == sorted(repos)
def _get_current_content(self): """ Gather current content of output repos """ criteria = [Criteria.true()] current_modulemds = f_proxy( self._executor.submit(Matcher.search_modulemds, criteria, [self.repos.out_repos.rpm])) current_modulemd_defaults = f_proxy( self._executor.submit(Matcher.search_modulemd_defaults, criteria, [self.repos.out_repos.rpm])) current_rpms = f_proxy( self._executor.submit(Matcher.search_rpms, criteria, [self.repos.out_repos.rpm])) current_srpms = f_proxy( self._executor.submit(Matcher.search_srpms, criteria, [self.repos.out_repos.source])) if self.repos.out_repos.debug.result(): current_debug_rpms = f_proxy( self._executor.submit(Matcher.search_rpms, criteria, [self.repos.out_repos.debug])) else: current_debug_rpms = f_proxy(f_return([])) current_content = RepoContent( current_rpms, current_srpms, current_debug_rpms, current_modulemds, current_modulemd_defaults, ) return current_content
def search_content(self, criteria=None): self._ensure_alive() criteria = criteria or Criteria.true() out = [] # Pass the criteria through the code used by the real client to build # up the Pulp query. We don't actually *use* the resulting query since # we're not accessing a real Pulp server. The point is to ensure the # same validation and error behavior as used by the real client also # applies to the fake. prepared_search = search_for_criteria(criteria, Unit) available_type_ids = set(self._type_ids) missing_type_ids = set(prepared_search.type_ids or []) - available_type_ids if missing_type_ids: return f_return_error( PulpException( "following type ids are not supported on the server: %s" % ",".join(missing_type_ids))) for unit in self._all_units: if (prepared_search.type_ids and unit.content_type_id not in prepared_search.type_ids): continue if match_object(criteria, unit): out.append(unit) # callers should not make any assumption about the order of returned # values. Encourage that by returning output in unpredictable order random.shuffle(out) return self._prepare_pages(out)
def _search_repo_units(self, repo_id, criteria): criteria = criteria or Criteria.true() # Pass the criteria through the same handling as used by the real client # for serialization, to ensure we reject criteria also rejected by real client # and also accumulate unit_fields. prepared_search = search_for_criteria(criteria, Unit) repo_f = self.get_repository(repo_id) if repo_f.exception(): return repo_f with self._state.lock: repo_units = self._state.repo_units(repo_id) out = [] try: for unit in repo_units: if match_object(criteria, unit): unit = units.with_filtered_fields(unit, prepared_search.unit_fields) out.append(unit) except Exception as ex: # pylint: disable=broad-except return f_return_error(ex) random.shuffle(out) return self._prepare_pages(out)
def _get_pkgs_from_all_modules(self): # search for modulesmds in all input repos # and extract filenames only def extract_modular_filenames(): modular_rpm_filenames = set() for module in modules: modular_rpm_filenames |= set(module.artifacts_filenames) return modular_rpm_filenames modules = self.search_modulemds([Criteria.true()], self._input_repos.rpm) return self._executor.submit(extract_modular_filenames)
def search_task(self, criteria=None): self._ensure_alive() tasks = [] criteria = criteria or Criteria.true() search_for_criteria(criteria) try: for task in self._tasks: if match_object(criteria, task): tasks.append(task) except Exception as ex: # pylint: disable=broad-except return f_return_error(ex) random.shuffle(tasks) return self._prepare_pages(tasks)
def test_stringify_complex_criteria(): crit = Criteria.and_( Criteria.with_field("must-exist", Matcher.exists()), Criteria.with_field("foo", Matcher.equals("bar")), Criteria.true(), Criteria.or_( Criteria.with_field("foo", Matcher.regex("quux")), Criteria.with_field("other", Matcher.in_(["x", "y", "z"])), Criteria.with_field("num", Matcher.less_than(9000)), ), Criteria.with_unit_type(FileUnit), ) assert (str(crit) == "((must-exist EXISTS) AND foo=='bar' AND TRUE " "AND (foo=~/quux/ OR (other IN ['x', 'y', 'z']) OR num<9000) " "AND (content_type_id IN ['iso']))")
def _filtered_repo_distributors(self): published_before = self.args.published_before url_regex = self.args.repo_url_regex # define the criteria on available filters crit = [Criteria.true()] if published_before: crit.append( Criteria.with_field("last_publish", Matcher.less_than(published_before)) ) if url_regex: crit.append( Criteria.with_field("relative_url", Matcher.regex(url_regex.pattern)) ) crit = Criteria.and_(*crit) return self.pulp_client.search_distributor(crit)
def search_distributor(self, criteria=None): criteria = criteria or Criteria.true() distributors = [] filters_for_criteria(criteria, Distributor) try: for repo in self._repositories: for distributor in repo.distributors: if match_object(criteria, distributor): distributors.append( attr.evolve(distributor, repo_id=repo.id)) except Exception as ex: # pylint: disable=broad-except return f_return_error(ex) random.shuffle(distributors) return self._prepare_pages(distributors)
def test_search_distributor(): controller = FakeController() dist1 = Distributor(id="yum_distributor", type_id="yum_distributor", repo_id="repo1") dist2 = Distributor(id="cdn_distributor", type_id="rpm_rsync_distributor", repo_id="repo1") repo1 = Repository(id="repo1", distributors=(dist1, dist2)) controller.insert_repository(repo1) client = controller.client crit = Criteria.true() found = client.search_distributor(crit).result().data assert sorted(found) == [dist2, dist1]
def _do_unassociate(self, repo_id, criteria=None): repo_f = self.get_repository(repo_id) if repo_f.exception(): return repo_f with self._state.lock: current = self._state.repo_unit_keys.get(repo_id, set()) units_with_key = [ {"key": key, "unit": self._state.units_by_key[key]} for key in current ] removed_units = set() kept_keys = set() criteria = criteria or Criteria.true() # validating the criteria here like in actual scenario. pulp_search = search_for_criteria( criteria, type_hint=Unit, unit_type_accum=None ) # raise an error if criteria with filters doesn't include type_ids if pulp_search.filters and not pulp_search.type_ids: raise ValueError( "Criteria to remove_content must specify at least one unit type!" ) for unit_with_key in units_with_key: unit = unit_with_key["unit"] if match_object(criteria, unit): removed_units.add(unit) else: kept_keys.add(unit_with_key["key"]) self._state.repo_unit_keys[repo_id] = kept_keys task = Task( id=self._state.next_task_id(), repo_id=repo_id, completed=True, succeeded=True, units=units.with_key_only(removed_units), ) return f_return([task])
def search_repository(self, criteria=None): criteria = criteria or Criteria.true() repos = [] # Pass the criteria through the code used by the real client to build # up the Pulp query. We don't actually *use* the resulting query since # we're not accessing a real Pulp server. The point is to ensure the # same validation and error behavior as used by the real client also # applies to the fake. filters_for_criteria(criteria, Repository) try: for repo in self._repositories: if match_object(criteria, repo): repos.append(self._attach(repo)) except Exception as ex: # pylint: disable=broad-except return f_return_error(ex) # callers should not make any assumption about the order of returned # values. Encourage that by returning output in unpredictable order random.shuffle(repos) # Split it into pages page_data = [] current_page_data = [] while repos: next_elem = repos.pop() current_page_data.append(next_elem) if len(current_page_data) == self._PAGE_SIZE and repos: page_data.append(current_page_data) current_page_data = [] page_data.append(current_page_data) page = Page() next_page = None for batch in reversed(page_data): page = Page(data=batch, next=next_page) next_page = f_return(page) return f_return(page)
def search_repository(self, criteria=None): criteria = criteria or Criteria.true() repos = [] # Pass the criteria through the code used by the real client to build # up the Pulp query. We don't actually *use* the resulting query since # we're not accessing a real Pulp server. The point is to ensure the # same validation and error behavior as used by the real client also # applies to the fake. filters_for_criteria(criteria, Repository) try: for repo in self._repositories: if match_object(criteria, repo): repos.append(self._attach_repo(repo)) except Exception as ex: # pylint: disable=broad-except return f_return_error(ex) # callers should not make any assumption about the order of returned # values. Encourage that by returning output in unpredictable order random.shuffle(repos) return self._prepare_pages(repos)
def _search_repo_units(self, repo_id, criteria): criteria = criteria or Criteria.true() # Pass the criteria through the same handling as used by the real client # for serialization, to ensure we reject criteria also rejected by real client. # We don't actually use the result, this is only for validation. search_for_criteria(criteria, Unit) repo_f = self.get_repository(repo_id) if repo_f.exception(): return repo_f repo_units = self._repo_units(repo_id) out = [] try: for unit in repo_units: if match_object(criteria, unit): out.append(unit) except Exception as ex: # pylint: disable=broad-except return f_return_error(ex) random.shuffle(out) return self._prepare_pages(out)
def test_null_criteria(): """Searching for None or True translates to empty filters.""" assert filters_for_criteria(None) == {} assert filters_for_criteria(Criteria.true()) == {}