def update_content(self, unit): self._ensure_alive() if not unit.unit_id: raise ValueError("unit_id missing on call to update_content()") # The unit has to exist. existing_unit = None for candidate in self._all_units: if (candidate.content_type_id == unit.content_type_id and candidate.unit_id == unit.unit_id): existing_unit = candidate break else: return f_return_error( PulpException("unit not found: %s" % unit.unit_id)) # OK, we have a unit to update. Figure out which fields we can update. update = {} for fld in unit._usermeta_fields(): update[fld.name] = getattr(unit, fld.name) updated_unit = attr.evolve(existing_unit, **update) unit_key = units.make_unit_key(updated_unit) self._units_by_key[unit_key] = updated_unit return f_return()
def _search_repo_units(self, repo_id, criteria): criteria = criteria or Criteria.true() # Pass the criteria through the same handling as used by the real client # for serialization, to ensure we reject criteria also rejected by real client # and also accumulate unit_fields. prepared_search = search_for_criteria(criteria, Unit) repo_f = self.get_repository(repo_id) if repo_f.exception(): return repo_f with self._state.lock: repo_units = self._state.repo_units(repo_id) out = [] try: for unit in repo_units: if match_object(criteria, unit): unit = units.with_filtered_fields(unit, prepared_search.unit_fields) out.append(unit) except Exception as ex: # pylint: disable=broad-except return f_return_error(ex) random.shuffle(out) return self._prepare_pages(out)
def update_repository(self, repository): self._ensure_alive() with self._state.lock: existing_repo = None for candidate in self._state.repositories: if candidate.id == repository.id: existing_repo = candidate break else: return f_return_error( PulpException("repository not found: %s" % repository.id) ) # We've got a repo, now update it. update = {} for fld in existing_repo._mutable_note_fields(): update[fld.name] = getattr(repository, fld.name) updated_repo = attr.evolve(existing_repo, **update) self._state.repositories = [ repo for repo in self._state.repositories if repo.id != updated_repo.id ] + [updated_repo] return f_return()
def test_no_retries_http_404_errors(): """Retry policy does not retry on HTTP 404 responses.""" policy = PulpRetryPolicy() response = requests.Response() response.status_code = 404 error = requests.HTTPError(response=response) assert not policy.should_retry(0, f_return_error(error))
def search_content(self, criteria=None): self._ensure_alive() criteria = criteria or Criteria.true() out = [] # Pass the criteria through the code used by the real client to build # up the Pulp query. We don't actually *use* the resulting query since # we're not accessing a real Pulp server. The point is to ensure the # same validation and error behavior as used by the real client also # applies to the fake. prepared_search = search_for_criteria(criteria, Unit) available_type_ids = set(self._type_ids) missing_type_ids = set(prepared_search.type_ids or []) - available_type_ids if missing_type_ids: return f_return_error( PulpException( "following type ids are not supported on the server: %s" % ",".join(missing_type_ids))) for unit in self._all_units: if (prepared_search.type_ids and unit.content_type_id not in prepared_search.type_ids): continue if match_object(criteria, unit): out.append(unit) # callers should not make any assumption about the order of returned # values. Encourage that by returning output in unpredictable order random.shuffle(out) return self._prepare_pages(out)
def test_retries_http_errors(): """Retry policy will retry on HTTP-level errors.""" policy = PulpRetryPolicy() response = requests.Response() response.status_code = 500 error = requests.HTTPError(response=response) assert policy.should_retry(0, f_return_error(error))
def get_repository(self, repository_id): if not isinstance(repository_id, six.string_types): raise TypeError("Invalid argument: id=%s" % id) data = self.search_repository( Criteria.with_id(repository_id)).result().data if len(data) != 1: return f_return_error( PulpException("Repository id=%s not found" % repository_id)) return f_return(data[0])
def search_task(self, criteria=None): self._ensure_alive() tasks = [] criteria = criteria or Criteria.true() search_for_criteria(criteria) try: for task in self._tasks: if match_object(criteria, task): tasks.append(task) except Exception as ex: # pylint: disable=broad-except return f_return_error(ex) random.shuffle(tasks) return self._prepare_pages(tasks)
def search_distributor(self, criteria=None): criteria = criteria or Criteria.true() distributors = [] filters_for_criteria(criteria, Distributor) try: for repo in self._repositories: for distributor in repo.distributors: if match_object(criteria, distributor): distributors.append( attr.evolve(distributor, repo_id=repo.id)) except Exception as ex: # pylint: disable=broad-except return f_return_error(ex) random.shuffle(distributors) return self._prepare_pages(distributors)
def search_repository(self, criteria=None): criteria = criteria or Criteria.true() repos = [] # Pass the criteria through the code used by the real client to build # up the Pulp query. We don't actually *use* the resulting query since # we're not accessing a real Pulp server. The point is to ensure the # same validation and error behavior as used by the real client also # applies to the fake. filters_for_criteria(criteria, Repository) try: for repo in self._repositories: if match_object(criteria, repo): repos.append(self._attach(repo)) except Exception as ex: # pylint: disable=broad-except return f_return_error(ex) # callers should not make any assumption about the order of returned # values. Encourage that by returning output in unpredictable order random.shuffle(repos) # Split it into pages page_data = [] current_page_data = [] while repos: next_elem = repos.pop() current_page_data.append(next_elem) if len(current_page_data) == self._PAGE_SIZE and repos: page_data.append(current_page_data) current_page_data = [] page_data.append(current_page_data) page = Page() next_page = None for batch in reversed(page_data): page = Page(data=batch, next=next_page) next_page = f_return(page) return f_return(page)
def search_repository(self, criteria=None): criteria = criteria or Criteria.true() repos = [] # Pass the criteria through the code used by the real client to build # up the Pulp query. We don't actually *use* the resulting query since # we're not accessing a real Pulp server. The point is to ensure the # same validation and error behavior as used by the real client also # applies to the fake. filters_for_criteria(criteria, Repository) try: for repo in self._repositories: if match_object(criteria, repo): repos.append(self._attach_repo(repo)) except Exception as ex: # pylint: disable=broad-except return f_return_error(ex) # callers should not make any assumption about the order of returned # values. Encourage that by returning output in unpredictable order random.shuffle(repos) return self._prepare_pages(repos)
def test_always_returns_future(): """Collector interface returns futures regardless of backend return type.""" return_value = None class TestCollector(object): def update_push_items(self, items): return return_value def attach_file(self, filename, content): return return_value def append_file(self, filename, content): return return_value Collector.register_backend("test", TestCollector) collector = Collector.get("test") # If backend returns a successful future (of any value), # interface returns an empty future return_value = f_return("abc") assert collector.update_push_items([]).result() is None assert collector.attach_file("somefile", "").result() is None assert collector.append_file("somefile", "").result() is None # If backend returns a failed future, # interface returns a failed future with error propagated error = RuntimeError("oops") return_value = f_return_error(error) assert collector.update_push_items([]).exception() is error assert collector.attach_file("somefile", "").exception() is error assert collector.append_file("somefile", "").exception() is error # If backend returns a non-future, # interface returns an empty future return_value = "abc" assert collector.update_push_items([]).result() is None assert collector.attach_file("somefile", "").result() is None assert collector.append_file("somefile", "").result() is None
def _search_repo_units(self, repo_id, criteria): criteria = criteria or Criteria.true() # Pass the criteria through the same handling as used by the real client # for serialization, to ensure we reject criteria also rejected by real client. # We don't actually use the result, this is only for validation. search_for_criteria(criteria, Unit) repo_f = self.get_repository(repo_id) if repo_f.exception(): return repo_f repo_units = self._repo_units(repo_id) out = [] try: for unit in repo_units: if match_object(criteria, unit): out.append(unit) except Exception as ex: # pylint: disable=broad-except return f_return_error(ex) random.shuffle(out) return self._prepare_pages(out)
def as_future(x): if x is CANCELLED: return f_return_cancelled() elif isinstance(x, Exception): return f_return_error(x) return f_return(x)
def test_retries_by_default(): """Retry policy will retry on generic exception types.""" policy = PulpRetryPolicy() assert policy.should_retry(0, f_return_error(RuntimeError("oops!")))
def test_sequence_error(): error = ValueError("simulated error") f = f_sequence([f_return("a"), f_return_error(error), f_return("c")]) assert f.exception() is error
def test_f_return_error(): exception = RuntimeError("simulated error") assert f_return_error(exception).exception() is exception
def div10(x): try: return f_return(10 / x) except Exception as ex: return f_return_error(ex)
def test_retries_http_errors_no_response(): """Retry policy will retry on requests exception types with response=None.""" policy = PulpRetryPolicy() error = requests.HTTPError(response=None) assert policy.should_retry(0, f_return_error(error))