def _count(self): args = {'limit': 0} args = self.prepare_query(args) query_args = convert_query_params(args) self._total_results = int(self._cb.get_object(self._doc_class.urlobject, query_parameters=query_args) .get("totalResults", 0)) self._count_valid = True return self._total_results
def results(self): if not self._full_init: full_results = self._cb.get_object(self._urlobject, query_parameters=convert_query_params(self._query)) if not full_results: self._results = [] else: self._results = [self._doc_class.new_object(self._cb, it, full_doc=True) for it in full_results] self._results = self._sort(self._results) self._full_init = True return self._results
def results(self): if not self._full_init: full_results = self._cb.get_object(self._urlobject, query_parameters=convert_query_params(self._query)) if not full_results: self._results = [] else: self._results = [self._doc_class.new_object(self._cb, it, full_doc=True) for it in full_results] self._results = self._sort(self._results) self._full_init = True return self._results
def _search(self, start=0, rows=0): # iterate over total result set, 1000 at a time args = {} if start != 0: args['start'] = start args['rows'] = self._batch_size current = start numrows = 0 args = self.prepare_query(args) still_querying = True while still_querying: query_args = convert_query_params(args) result = self._cb.get_object(self._doc_class.urlobject, query_parameters=query_args) self._total_results = result.get("totalResults", 0) self._count_valid = True results = result.get('results', []) if results is None: log.debug("Results are None") if current >= 100000: log.info("Max result size exceeded. Truncated to 100k.") break for item in results: yield item current += 1 numrows += 1 if rows and numrows == rows: still_querying = False break args[ 'start'] = current + 1 # as of 6/2017, the indexing on the Cb Defense backend is still 1-based if current >= self._total_results: break if not results: log.debug( "server reported total_results overestimated the number of results for this query by {0}" .format(self._total_results - current)) log.debug( "resetting total_results for this query to {0}".format( current)) self._total_results = current break
def _search(self, start=0, rows=0): # iterate over total result set, 1000 at a time args = {} if start != 0: args['start'] = start args['rows'] = self._batch_size current = start numrows = 0 args = self.prepare_query(args) still_querying = True while still_querying: query_args = convert_query_params(args) result = self._cb.get_object(self._doc_class.urlobject, query_parameters=query_args) self._total_results = result.get("totalResults", 0) self._count_valid = True results = result.get('results', []) if results is None: log.debug("Results are None") if current >= 100000: log.info("Max result size exceeded. Truncated to 100k.") break for item in results: yield item current += 1 numrows += 1 if rows and numrows == rows: still_querying = False break args['start'] = current + 1 # as of 6/2017, the indexing on the Cb Defense backend is still 1-based if current >= self._total_results: break if not results: log.debug("server reported total_results overestimated the number of results for this query by {0}" .format(self._total_results - current)) log.debug("resetting total_results for this query to {0}".format(current)) self._total_results = current break