def test_run_query(self, mock_create_stub): # Mock gRPC layer grpc_stub = mock.Mock() mock_create_stub.return_value = grpc_stub client = datastore_client.DatastoreClient() # Mock request project_id = 'projectId-1969970175' partition_id = entity_pb2.PartitionId() # Mock response expected_response = datastore_pb2.RunQueryResponse() grpc_stub.RunQuery.return_value = expected_response response = client.run_query(project_id, partition_id) self.assertEqual(expected_response, response) grpc_stub.RunQuery.assert_called_once() args, kwargs = grpc_stub.RunQuery.call_args self.assertEqual(len(args), 2) self.assertEqual(len(kwargs), 1) self.assertIn('metadata', kwargs) actual_request = args[0] expected_request = datastore_pb2.RunQueryRequest( project_id=project_id, partition_id=partition_id) self.assertEqual(expected_request, actual_request)
def make_request(project, namespace, query): """Make a Cloud Datastore request for the given query.""" req = datastore_pb2.RunQueryRequest() req.partition_id.CopyFrom(make_partition(project, namespace)) req.query.CopyFrom(query) return req
def test_run_query_w_namespace_nonempty_result(self): from google.cloud.proto.datastore.v1 import datastore_pb2 from google.cloud.proto.datastore.v1 import entity_pb2 from google.cloud.proto.datastore.v1 import query_pb2 project = 'PROJECT' kind = 'Kind' entity = entity_pb2.Entity() q_pb = self._make_query_pb(kind) rsp_pb = datastore_pb2.RunQueryResponse() rsp_pb.batch.entity_results.add(entity=entity) rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL rsp_pb.batch.more_results = query_pb2.QueryResultBatch.NO_MORE_RESULTS # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) client = mock.Mock(_http=http, _base_url='test.invalid', spec=['_http', '_base_url']) # Make request. conn = self._make_one(client) namespace = 'NS' response = conn.run_query(project, q_pb, namespace=namespace) # Check the result and verify the callers. self.assertEqual(response, rsp_pb) cw = http._called_with uri = _build_expected_url(conn.api_base_url, project, 'runQuery') _verify_protobuf_call(self, cw, uri) request = datastore_pb2.RunQueryRequest() request.ParseFromString(cw['body']) self.assertEqual(request.partition_id.namespace_id, namespace) self.assertEqual(request.query, q_pb)
def run_query(self, project_id, partition_id, read_options, query=None, gql_query=None, options=None): """ Queries for entities. Example: >>> from google.cloud.gapic.datastore.v1 import datastore_client >>> from google.cloud.proto.datastore.v1 import datastore_pb2 >>> from google.cloud.proto.datastore.v1 import entity_pb2 >>> api = datastore_client.DatastoreClient() >>> project_id = '' >>> partition_id = entity_pb2.PartitionId() >>> read_options = datastore_pb2.ReadOptions() >>> response = api.run_query(project_id, partition_id, read_options) Args: project_id (string): The ID of the project against which to make the request. partition_id (:class:`google.cloud.proto.datastore.v1.entity_pb2.PartitionId`): Entities are partitioned into subsets, identified by a partition ID. Queries are scoped to a single partition. This partition ID is normalized with the standard default context partition ID. read_options (:class:`google.cloud.proto.datastore.v1.datastore_pb2.ReadOptions`): The options for this query. query (:class:`google.cloud.proto.datastore.v1.query_pb2.Query`): The query to run. gql_query (:class:`google.cloud.proto.datastore.v1.query_pb2.GqlQuery`): The GQL query to run. options (:class:`google.gax.CallOptions`): Overrides the default settings for this call, e.g, timeout, retries etc. Returns: A :class:`google.cloud.proto.datastore.v1.datastore_pb2.RunQueryResponse` instance. Raises: :exc:`google.gax.errors.GaxError` if the RPC is aborted. :exc:`ValueError` if the parameters are invalid. """ # Sanity check: We have some fields which are mutually exclusive; # raise ValueError if more than one is sent. oneof.check_oneof( query=query, gql_query=gql_query, ) # Create the request object. request = datastore_pb2.RunQueryRequest(project_id=project_id, partition_id=partition_id, read_options=read_options, query=query, gql_query=gql_query) return self._run_query(request, options)
def run_query(self, project, query_pb, namespace=None, eventual=False, transaction_id=None): """Run a query on the Cloud Datastore. Maps the ``DatastoreService.RunQuery`` protobuf RPC. Given a Query protobuf, sends a ``runQuery`` request to the Cloud Datastore API and returns a list of entity protobufs matching the query. You typically wouldn't use this method directly, in favor of the :meth:`google.cloud.datastore.query.Query.fetch` method. Under the hood, the :class:`google.cloud.datastore.query.Query` class uses this method to fetch data. :type project: str :param project: The project over which to run the query. :type query_pb: :class:`.query_pb2.Query` :param query_pb: The Protobuf representing the query to run. :type namespace: str :param namespace: The namespace over which to run the query. :type eventual: bool :param eventual: If False (the default), request ``STRONG`` read consistency. If True, request ``EVENTUAL`` read consistency. :type transaction_id: str :param transaction_id: If passed, make the request in the scope of the given transaction. Incompatible with ``eventual==True``. :rtype: :class:`.datastore_pb2.RunQueryResponse` :returns: The protobuf response from a ``runQuery`` request. """ request = _datastore_pb2.RunQueryRequest() _set_read_options(request, eventual, transaction_id) if namespace: request.partition_id.namespace_id = namespace request.query.CopyFrom(query_pb) return self._datastore_api.run_query(project, request)
def test_run_query_w_namespace_nonempty_result(self): from google.cloud.proto.datastore.v1 import datastore_pb2 from google.cloud.proto.datastore.v1 import entity_pb2 from google.cloud.proto.datastore.v1 import query_pb2 project = 'PROJECT' kind = 'Kind' namespace = 'NS' query_pb = self._make_query_pb(kind) partition_id = entity_pb2.PartitionId(project_id=project, namespace_id=namespace) read_options = datastore_pb2.ReadOptions() rsp_pb = datastore_pb2.RunQueryResponse( batch=query_pb2.QueryResultBatch( entity_result_type=query_pb2.EntityResult.FULL, entity_results=[ query_pb2.EntityResult(entity=entity_pb2.Entity()), ], more_results=query_pb2.QueryResultBatch.NO_MORE_RESULTS, )) # Create mock HTTP and client with response. http = _make_requests_session( [_make_response(content=rsp_pb.SerializeToString())]) client = mock.Mock(_http=http, _base_url='test.invalid', spec=['_http', '_base_url']) # Make request. ds_api = self._make_one(client) response = ds_api.run_query(project, partition_id, read_options, query=query_pb) # Check the result and verify the callers. self.assertEqual(response, rsp_pb) uri = _build_expected_url(client._base_url, project, 'runQuery') request = _verify_protobuf_call(http, uri, datastore_pb2.RunQueryRequest()) self.assertEqual(request.partition_id, partition_id) self.assertEqual(request.query, query_pb)
def run_query(self, project, partition_id, read_options, query=None, gql_query=None): """Perform a ``runQuery`` request. :type project: str :param project: The project to connect to. This is usually your project name in the cloud console. :type partition_id: :class:`.entity_pb2.PartitionId` :param partition_id: Partition ID corresponding to an optional namespace and project ID. :type read_options: :class:`.datastore_pb2.ReadOptions` :param read_options: The options for this query. Contains a either the transaction for the read or ``STRONG`` or ``EVENTUAL`` read consistency. :type query: :class:`.query_pb2.Query` :param query: (Optional) The query protobuf to run. At most one of ``query`` and ``gql_query`` can be specified. :type gql_query: :class:`.query_pb2.GqlQuery` :param gql_query: (Optional) The GQL query to run. At most one of ``query`` and ``gql_query`` can be specified. :rtype: :class:`.datastore_pb2.RunQueryResponse` :returns: The returned protobuf response object. """ request_pb = _datastore_pb2.RunQueryRequest( project_id=project, partition_id=partition_id, read_options=read_options, query=query, gql_query=gql_query, ) return _rpc(self.client._http, project, 'runQuery', self.client._base_url, request_pb, _datastore_pb2.RunQueryResponse)
def test_run_query_wo_eventual_w_transaction(self): from google.cloud.proto.datastore.v1 import datastore_pb2 from google.cloud.proto.datastore.v1 import entity_pb2 from google.cloud.proto.datastore.v1 import query_pb2 project = 'PROJECT' kind = 'Nonesuch' cursor = b'\x00' transaction = b'TRANSACTION' query_pb = self._make_query_pb(kind) partition_id = entity_pb2.PartitionId(project_id=project) read_options = datastore_pb2.ReadOptions(transaction=transaction) rsp_pb = datastore_pb2.RunQueryResponse( batch=query_pb2.QueryResultBatch( entity_result_type=query_pb2.EntityResult.FULL, end_cursor=cursor, more_results=query_pb2.QueryResultBatch.NO_MORE_RESULTS, )) # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) client = mock.Mock(_http=http, _base_url='test.invalid', spec=['_http', '_base_url']) # Make request. ds_api = self._make_one(client) response = ds_api.run_query(project, partition_id, read_options, query=query_pb) # Check the result and verify the callers. self.assertEqual(response, rsp_pb) uri = _build_expected_url(client._base_url, project, 'runQuery') cw = http._called_with _verify_protobuf_call(self, cw, uri) request = datastore_pb2.RunQueryRequest() request.ParseFromString(cw['body']) self.assertEqual(request.partition_id, partition_id) self.assertEqual(request.query, query_pb) self.assertEqual(request.read_options, read_options)
def test_run_query_wo_eventual_w_transaction(self): from google.cloud.proto.datastore.v1 import datastore_pb2 from google.cloud.proto.datastore.v1 import query_pb2 project = 'PROJECT' kind = 'Nonesuch' cursor = b'\x00' transaction = b'TRANSACTION' q_pb = self._make_query_pb(kind) rsp_pb = datastore_pb2.RunQueryResponse() rsp_pb.batch.end_cursor = cursor no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS rsp_pb.batch.more_results = no_more rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) client = mock.Mock(_http=http, spec=['_http']) # Make request. conn = self._make_one(client) response = conn.run_query(project, q_pb, transaction_id=transaction) # Check the result and verify the callers. self.assertEqual(response, rsp_pb) uri = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', project + ':runQuery', ]) cw = http._called_with self._verify_protobuf_call(cw, uri, conn) request = datastore_pb2.RunQueryRequest() request.ParseFromString(cw['body']) self.assertEqual(request.partition_id.namespace_id, '') self.assertEqual(request.query, q_pb) self.assertEqual( request.read_options.read_consistency, datastore_pb2.ReadOptions.READ_CONSISTENCY_UNSPECIFIED) self.assertEqual(request.read_options.transaction, transaction)
def create_scatter_requests(query, num_splits, batch_size, num_entities): """Creates a list of expected scatter requests from the query splitter. This list of requests returned is used to verify that the query splitter made the same number of requests in the same order to datastore. """ requests = [] count = (num_splits - 1) * query_splitter.KEYS_PER_SPLIT start_cursor = '' i = 0 scatter_query = query_splitter._create_scatter_query(query, count) while i < count and i < num_entities: request = datastore_pb2.RunQueryRequest() request.query.CopyFrom(scatter_query) request.query.start_cursor = start_cursor request.query.limit.value = count - i requests.append(request) i += batch_size start_cursor = str(i) return requests
def _get_scatter_keys(datastore, query, num_splits, partition): """Gets a list of split keys given a desired number of splits. This list will contain multiple split keys for each split. Only a single split key will be chosen as the split point, however providing multiple keys allows for more uniform sharding. Args: numSplits: the number of desired splits. query: the user query. partition: the partition to run the query in. datastore: the client to datastore containing the data. Returns: A list of scatter keys returned by Datastore. """ scatter_point_query = _create_scatter_query(query, num_splits) key_splits = [] while True: req = datastore_pb2.RunQueryRequest() if partition: req.partition_id.CopyFrom(partition) req.query.CopyFrom(scatter_point_query) resp = datastore.run_query(req) for entity_result in resp.batch.entity_results: key_splits.append(entity_result.entity.key) if resp.batch.more_results != query_pb2.QueryResultBatch.NOT_FINISHED: break scatter_point_query.start_cursor = resp.batch.end_cursor scatter_point_query.limit.value -= len(resp.batch.entity_results) key_splits.sort(helper.key_comparator) return key_splits
def test_run_query_w_eventual_no_transaction(self): from google.cloud.proto.datastore.v1 import datastore_pb2 from google.cloud.proto.datastore.v1 import query_pb2 project = 'PROJECT' kind = 'Nonesuch' cursor = b'\x00' q_pb = self._make_query_pb(kind) rsp_pb = datastore_pb2.RunQueryResponse() rsp_pb.batch.end_cursor = cursor no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS rsp_pb.batch.more_results = no_more rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL # Create mock HTTP and client with response. http = Http({'status': '200'}, rsp_pb.SerializeToString()) client = mock.Mock(_http=http, _base_url='test.invalid', spec=['_http', '_base_url']) # Make request. conn = self._make_one(client) response = conn.run_query(project, q_pb, eventual=True) # Check the result and verify the callers. self.assertEqual(response, rsp_pb) uri = _build_expected_url(conn.api_base_url, project, 'runQuery') cw = http._called_with _verify_protobuf_call(self, cw, uri) request = datastore_pb2.RunQueryRequest() request.ParseFromString(cw['body']) self.assertEqual(request.partition_id.namespace_id, '') self.assertEqual(request.query, q_pb) self.assertEqual(request.read_options.read_consistency, datastore_pb2.ReadOptions.EVENTUAL) self.assertEqual(request.read_options.transaction, b'')