def check_get_splits(self, query, num_splits, num_entities, batch_size): """A helper method to test the query_splitter get_splits method. Args: query: the query to be split num_splits: number of splits num_entities: number of scatter entities contained in the fake datastore. batch_size: the number of entities returned by fake datastore in one req. """ entities = fake_datastore.create_entities(num_entities) mock_datastore = MagicMock() # Assign a fake run_query method as a side_effect to the mock. mock_datastore.run_query.side_effect = \ fake_datastore.create_run_query(entities, batch_size) split_queries = query_splitter.get_splits(mock_datastore, query, num_splits) # if request num_splits is greater than num_entities, the best it can # do is one entity per split. expected_num_splits = min(num_splits, num_entities + 1) self.assertEqual(len(split_queries), expected_num_splits) expected_requests = QuerySplitterTest.create_scatter_requests( query, num_splits, batch_size, num_entities) expected_calls = [] for req in expected_requests: expected_calls.append(call(req)) self.assertEqual(expected_calls, mock_datastore.run_query.call_args_list)
def check_get_splits(self, query, num_splits, num_entities, batch_size): """A helper method to test the query_splitter get_splits method. Args: query: the query to be split num_splits: number of splits num_entities: number of scatter entities contained in the fake datastore. batch_size: the number of entities returned by fake datastore in one req. """ # Test for random long ids, string ids, and a mix of both. id_or_name = [True, False, None] for id_type in id_or_name: if id_type is None: entities = fake_datastore.create_entities(num_entities, False) entities.extend( fake_datastore.create_entities(num_entities, True)) num_entities *= 2 else: entities = fake_datastore.create_entities( num_entities, id_type) mock_datastore = MagicMock() # Assign a fake run_query method as a side_effect to the mock. mock_datastore.run_query.side_effect = \ fake_datastore.create_run_query(entities, batch_size) split_queries = self.query_splitter.get_splits( mock_datastore, query, num_splits) # if request num_splits is greater than num_entities, the best it can # do is one entity per split. expected_num_splits = min(num_splits, num_entities + 1) self.assertEqual(len(split_queries), expected_num_splits) expected_requests = self.create_scatter_requests( query, num_splits, batch_size, num_entities) expected_calls = [] for req in expected_requests: expected_calls.append(call(req)) self.assertEqual(expected_calls, mock_datastore.run_query.call_args_list)
def check_query_iterator(self, num_entities, batch_size, query): """A helper method to test the QueryIterator. Args: num_entities: number of entities contained in the fake datastore. batch_size: the number of entities returned by fake datastore in one req. query: the query to be executed """ entities = fake_datastore.create_entities(num_entities) self._mock_datastore.run_query.side_effect = \ fake_datastore.create_run_query(entities, batch_size) query_iterator = helper.QueryIterator("project", None, self._query, self._mock_datastore) i = 0 for entity in query_iterator: self.assertEqual(entity, entities[i].entity) i += 1 limit = query.limit.value if query.HasField('limit') else sys.maxsize self.assertEqual(i, min(num_entities, limit))