def _read(stub): """Probe to test Read and StreamingRead grpc call from Spanner stub. Args: stub: An object of SpannerStub. Raises: ValueError: An error occurred when read result is not as expected. """ _read_tracer = initialize_tracer() with _read_tracer.span(name='_read'): session = None try: # Create session with _read_tracer.span(name='stub.CreateSession'): session = stub.CreateSession( spanner_pb2.CreateSessionRequest(database=_DATABASE)) # Probing Read call with _read_tracer.span(name='stub.Read'): result_set = stub.Read( spanner_pb2.ReadRequest( session=session.name, table='users', columns=['username', 'firstname', 'lastname'], key_set=keys_pb2.KeySet(all=True))) if result_set is None: raise ValueError('result_set is None') if len(result_set.rows) != 1: raise ValueError('incorrect result_set rows %d' % len(result_set.rows)) if result_set.rows[0].values[0].string_value != _TEST_USERNAME: raise ValueError('incorrect sql result %s' % result_set.rows[0].values[0].string_value) # Probing StreamingRead call with _read_tracer.span(name='stub.StreamingRead'): partial_result_set = stub.StreamingRead( spanner_pb2.ReadRequest( session=session.name, table='users', columns=['username', 'firstname', 'lastname'], key_set=keys_pb2.KeySet(all=True))) if partial_result_set is None: raise ValueError('streaming_result_set is None') with _read_tracer.span(name='partial_result_set.next'): first_result = partial_result_set.next() if first_result.values[0].string_value != _TEST_USERNAME: raise ValueError('incorrect streaming sql first result %s' % first_result.values[0].string_value) finally: if session is not None: with _read_tracer.span(name='stub.DeleteSession'): stub.DeleteSession( spanner_pb2.DeleteSessionRequest(name=session.name))
def test_streaming_read(self): # Setup Expected Response chunked_value = True resume_token = b'103' expected_response = { 'chunked_value': chunked_value, 'resume_token': resume_token } expected_response = result_set_pb2.PartialResultSet( **expected_response) # Mock the API response channel = ChannelStub(responses=[iter([expected_response])]) client = spanner_v1.SpannerClient(channel=channel) # Setup Request session = client.session_path('[PROJECT]', '[INSTANCE]', '[DATABASE]', '[SESSION]') table = 'table110115790' columns = [] key_set = {} response = client.streaming_read(session, table, columns, key_set) resources = list(response) assert len(resources) == 1 assert expected_response == resources[0] assert len(channel.requests) == 1 expected_request = spanner_pb2.ReadRequest(session=session, table=table, columns=columns, key_set=key_set) actual_request = channel.requests[0][1] assert expected_request == actual_request
def test_read(self): # Setup Expected Response expected_response = {} expected_response = result_set_pb2.ResultSet(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) client = spanner_v1.SpannerClient(channel=channel) # Setup Request session = client.session_path('[PROJECT]', '[INSTANCE]', '[DATABASE]', '[SESSION]') table = 'table110115790' columns = [] key_set = {} response = client.read(session, table, columns, key_set) assert expected_response == response assert len(channel.requests) == 1 expected_request = spanner_pb2.ReadRequest(session=session, table=table, columns=columns, key_set=key_set) actual_request = channel.requests[0][1] assert expected_request == actual_request
def test_read(self, mock_create_stub): # Mock gRPC layer grpc_stub = mock.Mock() mock_create_stub.return_value = grpc_stub client = spanner_v1.SpannerClient() # Mock request session = client.session_path('[PROJECT]', '[INSTANCE]', '[DATABASE]', '[SESSION]') table = 'table110115790' columns = [] key_set = {} # Mock response expected_response = {} expected_response = result_set_pb2.ResultSet(**expected_response) grpc_stub.Read.return_value = expected_response response = client.read(session, table, columns, key_set) self.assertEqual(expected_response, response) grpc_stub.Read.assert_called_once() args, kwargs = grpc_stub.Read.call_args self.assertEqual(len(args), 2) self.assertEqual(len(kwargs), 1) self.assertIn('metadata', kwargs) actual_request = args[0] expected_request = spanner_pb2.ReadRequest(session=session, table=table, columns=columns, key_set=key_set) self.assertEqual(expected_request, actual_request)
def test_read(self): # Setup Expected Response expected_response = {} expected_response = result_set_pb2.ResultSet(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = spanner_v1.SpannerClient() # Setup Request session = client.session_path("[PROJECT]", "[INSTANCE]", "[DATABASE]", "[SESSION]") table = "table110115790" columns = [] key_set = {} response = client.read(session, table, columns, key_set) assert expected_response == response assert len(channel.requests) == 1 expected_request = spanner_pb2.ReadRequest(session=session, table=table, columns=columns, key_set=key_set) actual_request = channel.requests[0][1] assert expected_request == actual_request
def test_streaming_read(self, mock_create_stub): # Mock gRPC layer grpc_stub = mock.Mock() mock_create_stub.return_value = grpc_stub client = spanner_v1.SpannerClient() # Mock request session = client.session_path('[PROJECT]', '[INSTANCE]', '[DATABASE]', '[SESSION]') table = 'table110115790' columns = [] key_set = {} # Mock response chunked_value = True resume_token = b'103' expected_response = { 'chunked_value': chunked_value, 'resume_token': resume_token } expected_response = result_set_pb2.PartialResultSet( **expected_response) grpc_stub.StreamingRead.return_value = iter([expected_response]) response = client.streaming_read(session, table, columns, key_set) resources = list(response) self.assertEqual(1, len(resources)) self.assertEqual(expected_response, resources[0]) grpc_stub.StreamingRead.assert_called_once() args, kwargs = grpc_stub.StreamingRead.call_args self.assertEqual(len(args), 2) self.assertEqual(len(kwargs), 1) self.assertIn('metadata', kwargs) actual_request = args[0] expected_request = spanner_pb2.ReadRequest(session=session, table=table, columns=columns, key_set=key_set) self.assertEqual(expected_request, actual_request)
def test_streaming_read(self): # Setup Expected Response chunked_value = True resume_token = b"103" expected_response = { "chunked_value": chunked_value, "resume_token": resume_token, } expected_response = result_set_pb2.PartialResultSet( **expected_response) # Mock the API response channel = ChannelStub(responses=[iter([expected_response])]) patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = spanner_v1.SpannerClient() # Setup Request session = client.session_path("[PROJECT]", "[INSTANCE]", "[DATABASE]", "[SESSION]") table = "table110115790" columns = [] key_set = {} response = client.streaming_read(session, table, columns, key_set) resources = list(response) assert len(resources) == 1 assert expected_response == resources[0] assert len(channel.requests) == 1 expected_request = spanner_pb2.ReadRequest(session=session, table=table, columns=columns, key_set=key_set) actual_request = channel.requests[0][1] assert expected_request == actual_request
def streaming_read(self, session, table, columns, key_set, transaction=None, index=None, limit=None, resume_token=None, options=None): """ Like ``Read``, except returns the result set as a stream. Unlike ``Read``, there is no limit on the size of the returned result set. However, no individual row in the result set can exceed 100 MiB, and no column value can exceed 10 MiB. Example: >>> from google.cloud import spanner_v1 >>> >>> client = spanner_v1.SpannerClient() >>> >>> session = client.session_path('[PROJECT]', '[INSTANCE]', '[DATABASE]', '[SESSION]') >>> table = '' >>> columns = [] >>> key_set = {} >>> >>> for element in client.streaming_read(session, table, columns, key_set): ... # process element ... pass Args: session (str): Required. The session in which the read should be performed. table (str): Required. The name of the table in the database to be read. columns (list[str]): The columns of ``table`` to be returned for each row matching this request. key_set (Union[dict, ~google.cloud.spanner_v1.types.KeySet]): Required. ``key_set`` identifies the rows to be yielded. ``key_set`` names the primary keys of the rows in ``table`` to be yielded, unless ``index`` is present. If ``index`` is present, then ``key_set`` instead names index keys in ``index``. Rows are yielded in table primary key order (if ``index`` is empty) or index key order (if ``index`` is non-empty). It is not an error for the ``key_set`` to name rows that do not exist in the database. Read yields nothing for nonexistent rows. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.spanner_v1.types.KeySet` transaction (Union[dict, ~google.cloud.spanner_v1.types.TransactionSelector]): The transaction to use. If none is provided, the default is a temporary read-only transaction with strong concurrency. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.spanner_v1.types.TransactionSelector` index (str): If non-empty, the name of an index on ``table``. This index is used instead of the table primary key when interpreting ``key_set`` and sorting result rows. See ``key_set`` for further information. limit (long): If greater than zero, only the first ``limit`` rows are yielded. If ``limit`` is zero, the default is no limit. resume_token (bytes): If this request is resuming a previously interrupted read, ``resume_token`` should be copied from the last ``PartialResultSet`` yielded before the interruption. Doing this enables the new read to resume where the last read left off. The rest of the request parameters must exactly match the request that yielded this token. options (~google.gax.CallOptions): Overrides the default settings for this call, e.g, timeout, retries etc. Returns: Iterable[~google.cloud.spanner_v1.types.PartialResultSet]. Raises: :exc:`google.gax.errors.GaxError` if the RPC is aborted. :exc:`ValueError` if the parameters are invalid. """ request = spanner_pb2.ReadRequest( session=session, table=table, columns=columns, key_set=key_set, transaction=transaction, index=index, limit=limit, resume_token=resume_token) return self._streaming_read(request, options)
def read(self, session, table, columns, key_set, transaction=None, index=None, limit=None, resume_token=None, options=None): """ Reads rows from the database using key lookups and scans, as a simple key/value style alternative to ``ExecuteSql``. This method cannot be used to return a result set larger than 10 MiB; if the read matches more data than that, the read fails with a ``FAILED_PRECONDITION`` error. Reads inside read-write transactions might return ``ABORTED``. If this occurs, the application should restart the transaction from the beginning. See ``Transaction`` for more details. Larger result sets can be yielded in streaming fashion by calling ``StreamingRead`` instead. Example: >>> from google.cloud import spanner_v1 >>> >>> client = spanner_v1.SpannerClient() >>> >>> session = client.session_path('[PROJECT]', '[INSTANCE]', '[DATABASE]', '[SESSION]') >>> table = '' >>> columns = [] >>> key_set = {} >>> >>> response = client.read(session, table, columns, key_set) Args: session (str): Required. The session in which the read should be performed. table (str): Required. The name of the table in the database to be read. columns (list[str]): The columns of ``table`` to be returned for each row matching this request. key_set (Union[dict, ~google.cloud.spanner_v1.types.KeySet]): Required. ``key_set`` identifies the rows to be yielded. ``key_set`` names the primary keys of the rows in ``table`` to be yielded, unless ``index`` is present. If ``index`` is present, then ``key_set`` instead names index keys in ``index``. Rows are yielded in table primary key order (if ``index`` is empty) or index key order (if ``index`` is non-empty). It is not an error for the ``key_set`` to name rows that do not exist in the database. Read yields nothing for nonexistent rows. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.spanner_v1.types.KeySet` transaction (Union[dict, ~google.cloud.spanner_v1.types.TransactionSelector]): The transaction to use. If none is provided, the default is a temporary read-only transaction with strong concurrency. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.spanner_v1.types.TransactionSelector` index (str): If non-empty, the name of an index on ``table``. This index is used instead of the table primary key when interpreting ``key_set`` and sorting result rows. See ``key_set`` for further information. limit (long): If greater than zero, only the first ``limit`` rows are yielded. If ``limit`` is zero, the default is no limit. resume_token (bytes): If this request is resuming a previously interrupted read, ``resume_token`` should be copied from the last ``PartialResultSet`` yielded before the interruption. Doing this enables the new read to resume where the last read left off. The rest of the request parameters must exactly match the request that yielded this token. options (~google.gax.CallOptions): Overrides the default settings for this call, e.g, timeout, retries etc. Returns: A :class:`~google.cloud.spanner_v1.types.ResultSet` instance. Raises: :exc:`google.gax.errors.GaxError` if the RPC is aborted. :exc:`ValueError` if the parameters are invalid. """ request = spanner_pb2.ReadRequest( session=session, table=table, columns=columns, key_set=key_set, transaction=transaction, index=index, limit=limit, resume_token=resume_token) return self._read(request, options)