def test_convert_query_params(): """Test that query parameter dicts are properly converted.""" lv = convert_query_params({'answer': 42, 'hup': [2, 3, 4], 'goody': 'twoshoes'}) assert isinstance(lv, list) assert len(lv) == 5 assert ('answer', 42) in lv assert ('hup', 2) in lv assert ('hup', 3) in lv assert ('hup', 4) in lv assert ('goody', 'twoshoes') in lv lv = convert_query_params({}) assert isinstance(lv, list) assert len(lv) == 0
def _count(self): if self._count_valid: return self._total_results args = {} args = self.prepare_query(args) query_args = convert_query_params(args) self._total_results = int(self._cb.get_object(self._doc_class.urlobject, query_parameters=query_args) .get("totalResults", 0)) self._count_valid = True return self._total_results
def _search(self, start=0, rows=0): # iterate over total result set, in batches of self._batch_size at a time # defaults to 100 results each call args = {} if start != 0: args['start'] = start args['rows'] = self._batch_size current = start numrows = 0 args = self.prepare_query(args) still_querying = True while still_querying: query_args = convert_query_params(args) result = self._cb.get_object(self._doc_class.urlobject, query_parameters=query_args) self._total_results = result.get("totalResults", 0) self._count_valid = True results = result.get('results', []) if results is None: log.debug("Results are None") if current >= 100000: log.info("Max result size exceeded. Truncated to 100k.") break for item in results: yield item current += 1 numrows += 1 if rows and numrows == rows: still_querying = False break # as of 6/2017, the indexing on the Cb Endpoint Standard backend is still 1-based args['start'] = current + 1 if current >= self._total_results: break if not results: log.debug( "server reported total_results overestimated the number of results for this query by {0}" .format(self._total_results - current)) log.debug( "resetting total_results for this query to {0}".format( current)) self._total_results = current break