def test_read_rows(self): # Setup Expected Response expected_response = {} expected_response = storage_pb2.ReadRowsResponse(**expected_response) # Mock the API response channel = ChannelStub(responses=[iter([expected_response])]) patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = big_query_storage_client.BigQueryStorageClient() # Setup Request read_position = {} response = client.read_rows(read_position) resources = list(response) assert len(resources) == 1 assert expected_response == resources[0] assert len(channel.requests) == 1 expected_request = storage_pb2.ReadRowsRequest( read_position=read_position) actual_request = channel.requests[0][1] assert expected_request == actual_request
def ReadRows(self, request, context): # pylint: disable=unused-argument """ReadRows""" print("called ReadRows on a fake server") response = storage_pb2.ReadRowsResponse() stream_index = self._streams.index(request.read_position.stream.name) if 0 <= stream_index < len( self._avro_serialized_rows_per_stream): response.avro_rows.serialized_binary_rows = \ self._avro_serialized_rows_per_stream[stream_index] response.avro_rows.row_count = \ self._avro_serialized_rows_count_per_stream[stream_index] yield response
def ReadRows(self, request, context): # pylint: disable=unused-argument """ReadRows""" print("called ReadRows on a fake server: %s" % str(request)) response = storage_pb2.ReadRowsResponse() stream_index = self._streams.index(request.read_position.stream.name) if 0 <= stream_index < len(self._rows_per_stream): rows = self._rows_per_stream[stream_index][request.read_position.offset :] serialized_rows = FakeBigQueryServer.serialize_to_avro( rows, self._avro_schema ) response.avro_rows.serialized_binary_rows = serialized_rows response.avro_rows.row_count = len(rows) yield response