def __init__(self, partition_key_target_range, client, collection_link, query, document_producer_comp): ''' Constructor ''' # TODO: is that fine we build the options dict and we don't inherit it? self._options = {} self._partition_key_target_range = partition_key_target_range self._doc_producer_comp = document_producer_comp self._client = client self._buffer = deque() self._is_finished = False self._has_started = False self._cur_item = None # initiate execution context path = base.GetPathFromLink(collection_link, 'docs') collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) def fetch_fn(options): return self._client.QueryFeed(path, collection_id, query, options, partition_key_target_range['id']) self._ex_context = _DefaultQueryExecutionContext(client, self._options, fetch_fn)
def __init__(self, partition_key_target_range, client, collection_link, query, document_producer_comp, options): ''' Constructor ''' self._options = options self._partition_key_target_range = partition_key_target_range self._doc_producer_comp = document_producer_comp self._client = client self._buffer = deque() self._is_finished = False self._has_started = False self._cur_item = None # initiate execution context path = base.GetPathFromLink(collection_link, 'docs') collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) def fetch_fn(options): return self._client.QueryFeed(path, collection_id, query, options, partition_key_target_range['id']) self._ex_context = _DefaultQueryExecutionContext( client, self._options, fetch_fn)
def __init__(self, client, resource_link, query, options, fetch_function): ''' Constructor ''' super(_ProxyQueryExecutionContext, self).__init__(client, options) self._execution_context = _DefaultQueryExecutionContext( client, options, fetch_function) self._resource_link = resource_link self._query = query self._fetch_function = fetch_function
def _test_default_execution_context(self, options, query, expected_number_of_results): page_size = options['maxItemCount'] collection_link = self.GetDocumentCollectionLink(self.created_db, self.created_collection) path = base.GetPathFromLink(collection_link, 'docs') collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) def fetch_fn(options): return self.client.QueryFeed(path, collection_id, query, options) ###################################### # test next() behavior ###################################### ex = base_execution_context._DefaultQueryExecutionContext(self.client, options, fetch_fn) it = ex.__iter__() def invokeNext(): return next(it) results = {} # validate that invocations of next() produces the same results as expected for _ in xrange(expected_number_of_results): item = invokeNext() results[item['id']] = item self.assertEqual(len(results), expected_number_of_results) # after the result set is exhausted, invoking next must raise a StopIteration exception self.assertRaises(StopIteration, invokeNext) ###################################### # test fetch_next_block() behavior ###################################### ex = base_execution_context._DefaultQueryExecutionContext(self.client, options, fetch_fn) results = {} cnt = 0 while True: fetched_res = ex.fetch_next_block() fetched_size = len(fetched_res) for item in fetched_res: results[item['id']] = item cnt += fetched_size if (cnt < expected_number_of_results): # backend may not necessarily return exactly page_size of results self.assertEqual(fetched_size, page_size, "page size") else: if cnt == expected_number_of_results: self.assertTrue(fetched_size <= page_size, "last page size") break else: #cnt > expected_number_of_results self.fail("more results than expected") # validate the number of collected results self.assertEqual(len(results), expected_number_of_results) # no more results will be returned self.assertEqual(ex.fetch_next_block(), [])
def _test_default_execution_context(self, options, query, expected_number_of_results): page_size = options['maxItemCount'] collection_link = self.GetDocumentCollectionLink( self.created_db, self.created_collection) path = base.GetPathFromLink(collection_link, 'docs') collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) def fetch_fn(options): return self.client.client_connection.QueryFeed( path, collection_id, query, options) ###################################### # test next() behavior ###################################### ex = base_execution_context._DefaultQueryExecutionContext( self.client.client_connection, options, fetch_fn) it = ex.__iter__() def invokeNext(): return next(it) results = {} # validate that invocations of next() produces the same results as expected for _ in xrange(expected_number_of_results): item = invokeNext() results[item['id']] = item self.assertEqual(len(results), expected_number_of_results) # after the result set is exhausted, invoking next must raise a StopIteration exception self.assertRaises(StopIteration, invokeNext) ###################################### # test fetch_next_block() behavior ###################################### ex = base_execution_context._DefaultQueryExecutionContext( self.client.client_connection, options, fetch_fn) results = {} cnt = 0 while True: fetched_res = ex.fetch_next_block() fetched_size = len(fetched_res) for item in fetched_res: results[item['id']] = item cnt += fetched_size if (cnt < expected_number_of_results): # backend may not necessarily return exactly page_size of results self.assertEqual(fetched_size, page_size, "page size") else: if cnt == expected_number_of_results: self.assertTrue(fetched_size <= page_size, "last page size") break else: #cnt > expected_number_of_results self.fail("more results than expected") # validate the number of collected results self.assertEqual(len(results), expected_number_of_results) # no more results will be returned self.assertEqual(ex.fetch_next_block(), [])