def search_repository(self, criteria=None): criteria = criteria or Criteria.true() repos = [] # Pass the criteria through the code used by the real client to build # up the Pulp query. We don't actually *use* the resulting query since # we're not accessing a real Pulp server. The point is to ensure the # same validation and error behavior as used by the real client also # applies to the fake. filters_for_criteria(criteria, Repository) try: for repo in self._repositories: if match_object(criteria, repo): repos.append(self._attach(repo)) except Exception as ex: # pylint: disable=broad-except return f_return_error(ex) # callers should not make any assumption about the order of returned # values. Encourage that by returning output in unpredictable order random.shuffle(repos) # Split it into pages page_data = [] current_page_data = [] while repos: next_elem = repos.pop() current_page_data.append(next_elem) if len(current_page_data) == self._PAGE_SIZE and repos: page_data.append(current_page_data) current_page_data = [] page_data.append(current_page_data) page = Page() next_page = None for batch in reversed(page_data): page = Page(data=batch, next=next_page) next_page = f_return(page) return f_return(page)
def _prepare_pages(self, resource_list): # Split resource_list into pages # resource_list: list of objects that paginated page_data = [] current_page_data = [] while resource_list: next_elem = resource_list.pop() current_page_data.append(next_elem) if len(current_page_data) == self._PAGE_SIZE and resource_list: page_data.append(current_page_data) current_page_data = [] page_data.append(current_page_data) page = Page() next_page = None for batch in reversed(page_data): page = Page(data=batch, next=next_page) next_page = f_proxy(f_return(page)) return f_proxy(f_return(page))