def _execute_sql(stub, metrics): """Probes to test ExecuteSql and ExecuteStreamingSql call from Spanner stub. Args: stub: An object of SpannerStub. metrics: A list of metrics. Raises: ValueError: An error occurred when sql result is not as expected. """ session = None try: session = stub.CreateSession( spanner_pb2.CreateSessionRequest(database=_DATABASE)) # Probing ExecuteSql call start = time.time() result_set = stub.ExecuteSql( spanner_pb2.ExecuteSqlRequest(session=session.name, sql='select * FROM users')) latency = (time.time() - start) * 1000 metrics['execute_sql_latency_ms'] = latency if result_set is None: raise ValueError('result_set is None') if len(result_set.rows) != 1: raise ValueError('incorrect result_set rows %d' % len(result_set.rows)) if result_set.rows[0].values[0].string_value != _TEST_USERNAME: raise ValueError('incorrect sql result %s' % result_set.rows[0].values[0].string_value) # Probing ExecuteStreamingSql call partial_result_set = stub.ExecuteStreamingSql( spanner_pb2.ExecuteSqlRequest(session=session.name, sql='select * FROM users')) if partial_result_set is None: raise ValueError('streaming_result_set is None') start = time.time() first_result = partial_result_set.next() latency = (time.time() - start) * 1000 metrics['execute_streaming_sql_latency_ms'] = latency if first_result.values[0].string_value != _TEST_USERNAME: raise ValueError('incorrect streaming sql first result %s' % first_result.values[0].string_value) finally: if session is not None: stub.DeleteSession( spanner_pb2.DeleteSessionRequest(name=session.name))
def execute_streaming_sql(): rendezvous = stub.ExecuteStreamingSql( spanner_pb2.ExecuteSqlRequest( session=session.name, sql='select * from {}'.format(_LARGE_TABLE))) futures.append(rendezvous)
def test_execute_sql(): channel = _create_channel() stub = _create_stub(channel) session = stub.CreateSession( spanner_pb2.CreateSessionRequest(database=_DATABASE)) # warm up for _ in range(_NUM_WARM_UP_CALLS): stub.ExecuteSql( spanner_pb2.ExecuteSqlRequest( session=session.name, sql='select data from {}'.format(_TABLE))) def execute_sql(result): # session = stub.CreateSession( # spanner_pb2.CreateSessionRequest(database=_DATABASE)) for _ in range(_NUM_OF_RPC): start = timeit.default_timer() stub.ExecuteSql( spanner_pb2.ExecuteSqlRequest( session=session.name, sql='select data from {}'.format(_TABLE))) dur = timeit.default_timer() - start print('single call latency: {} ms'.format(dur * 1000)) result.append(dur) # stub.DeleteSession( # spanner_pb2.DeleteSessionRequest(name=session.name)) print('Executing blocking unary-unary call.') _run_test(channel, execute_sql) stub.DeleteSession(spanner_pb2.DeleteSessionRequest(name=session.name))
def test_execute_sql_async(stub): session = stub.CreateSession( spanner_pb2.CreateSessionRequest(database=_DATABASE)) response_future = stub.ExecuteSql.future( spanner_pb2.ExecuteSqlRequest(session=session.name, sql=_TEST_SQL)) response_future.result() stub.DeleteSession(spanner_pb2.DeleteSessionRequest(name=session.name))
def test_execute_sql_future(self): stub = spanner_pb2_grpc.SpannerStub(self.channel) session = stub.CreateSession( spanner_pb2.CreateSessionRequest(database=_DATABASE)) self.assertEqual(1, len(self.channel._channel_refs)) self.assertEqual(1, self.channel._channel_refs[0]._affinity_ref) self.assertEqual(0, self.channel._channel_refs[0]._active_stream_ref) self.assertIsNotNone(session) rendezvous = stub.ExecuteSql.future( spanner_pb2.ExecuteSqlRequest(session=session.name, sql=_TEST_SQL)) self.assertEqual(1, len(self.channel._channel_refs)) self.assertEqual(1, self.channel._channel_refs[0]._affinity_ref) self.assertEqual(1, self.channel._channel_refs[0]._active_stream_ref) result_set = rendezvous.result() self.assertEqual(1, len(self.channel._channel_refs)) self.assertEqual(1, self.channel._channel_refs[0]._affinity_ref) self.assertEqual(0, self.channel._channel_refs[0]._active_stream_ref) self.assertIsNotNone(result_set) self.assertEqual(1, len(result_set.rows)) self.assertEqual(_TEST_COLUMN_DATA, result_set.rows[0].values[0].string_value) stub.DeleteSession(spanner_pb2.DeleteSessionRequest(name=session.name)) self.assertEqual(1, len(self.channel._channel_refs)) self.assertEqual(0, self.channel._channel_refs[0]._affinity_ref) self.assertEqual(0, self.channel._channel_refs[0]._active_stream_ref)
def test_execute_streaming_sql(stub): session = stub.CreateSession( spanner_pb2.CreateSessionRequest(database=_DATABASE)) rendezvous = stub.ExecuteStreamingSql( spanner_pb2.ExecuteSqlRequest(session=session.name, sql=_TEST_SQL)) for _ in rendezvous: pass stub.DeleteSession(spanner_pb2.DeleteSessionRequest(name=session.name))
def execute_sql(result): # session = stub.CreateSession( # spanner_pb2.CreateSessionRequest(database=_DATABASE)) for _ in range(_NUM_OF_RPC): start = timeit.default_timer() stub.ExecuteSql( spanner_pb2.ExecuteSqlRequest( session=session.name, sql='select data from {}'.format(_TABLE))) dur = timeit.default_timer() - start print('single call latency: {} ms'.format(dur * 1000)) result.append(dur)
def execute_sql_async(result): # session = stub.CreateSession( # spanner_pb2.CreateSessionRequest(database=_DATABASE)) for _ in range(_NUM_OF_RPC): start = timeit.default_timer() resp_future = stub.ExecuteSql.future( spanner_pb2.ExecuteSqlRequest(session=session.name, sql='select data from storage'), _TIMEOUT) def callback(resp, start_copy=start): dur = time.time() - start_copy result.append(dur) resp_future.add_done_callback(callback)
def execute_streaming_sql(result): for _ in range(_NUM_OF_RPC): start = timeit.default_timer() rendezvous = stub.ExecuteStreamingSql( spanner_pb2.ExecuteSqlRequest( session=session.name, sql='select data from {}'.format(_TABLE))) def callback(resp, start_copy=start): dur = time.time() - start_copy result.append(dur) rendezvous.add_done_callback(callback) for _ in rendezvous: pass
def test_execute_streaming_sql(): channel = _create_channel() stub = _create_stub(channel) # _prepare_test_data(stub) session = stub.CreateSession( spanner_pb2.CreateSessionRequest(database=_DATABASE)) # warm up print('Begin warm up calls.') for _ in range(_NUM_WARM_UP_CALLS): rendezvous = stub.ExecuteStreamingSql( spanner_pb2.ExecuteSqlRequest( session=session.name, sql='select data from {}'.format(_TABLE))) for _ in rendezvous: pass print('Warm up finished.') def execute_streaming_sql(result): for _ in range(_NUM_OF_RPC): start = timeit.default_timer() rendezvous = stub.ExecuteStreamingSql( spanner_pb2.ExecuteSqlRequest( session=session.name, sql='select data from {}'.format(_TABLE))) def callback(resp, start_copy=start): dur = time.time() - start_copy result.append(dur) rendezvous.add_done_callback(callback) for _ in rendezvous: pass print('Executing unary-streaming call.') _run_test(channel, execute_streaming_sql) stub.DeleteSession(spanner_pb2.DeleteSessionRequest(name=session.name))
def test_execute_sql_async(): channel = _create_channel() stub = _create_stub(channel) session = stub.CreateSession( spanner_pb2.CreateSessionRequest(database=_DATABASE)) # warm up for _ in range(_NUM_WARM_UP_CALLS): resp_future = stub.ExecuteSql.future( spanner_pb2.ExecuteSqlRequest(session=session.name, sql='select data from storage')) resp_future.result() def execute_sql_async(result): # session = stub.CreateSession( # spanner_pb2.CreateSessionRequest(database=_DATABASE)) for _ in range(_NUM_OF_RPC): start = timeit.default_timer() resp_future = stub.ExecuteSql.future( spanner_pb2.ExecuteSqlRequest(session=session.name, sql='select data from storage'), _TIMEOUT) def callback(resp, start_copy=start): dur = time.time() - start_copy result.append(dur) resp_future.add_done_callback(callback) # stub.DeleteSession( # spanner_pb2.DeleteSessionRequest(name=session.name)) print('Executing async unary-unary call.') _run_test(channel, execute_sql_async) stub.DeleteSession(spanner_pb2.DeleteSessionRequest(name=session.name))
def test_execute_sql(stub): session = stub.CreateSession( spanner_pb2.CreateSessionRequest(database=_DATABASE)) stub.ExecuteSql( spanner_pb2.ExecuteSqlRequest(session=session.name, sql=_TEST_SQL)) stub.DeleteSession(spanner_pb2.DeleteSessionRequest(name=session.name))
def test_concurrent_streams_watermark(self): stub = spanner_pb2_grpc.SpannerStub(self.channel) watermark = 2 self.channel._max_concurrent_streams_low_watermark = watermark self.assertEqual(self.channel._max_concurrent_streams_low_watermark, watermark) session_list = [] rendezvous_list = [] # When active streams have not reached the concurrent_streams_watermark, # gRPC calls should be reusing the same channel. for i in range(watermark): session = stub.CreateSession( spanner_pb2.CreateSessionRequest(database=_DATABASE)) self.assertEqual(1, len(self.channel._channel_refs)) self.assertEqual(i + 1, self.channel._channel_refs[0]._affinity_ref) self.assertEqual(i, self.channel._channel_refs[0]._active_stream_ref) self.assertIsNotNone(session) session_list.append(session) rendezvous = stub.ExecuteStreamingSql( spanner_pb2.ExecuteSqlRequest(session=session.name, sql=_TEST_SQL)) self.assertEqual(1, len(self.channel._channel_refs)) self.assertEqual(i + 1, self.channel._channel_refs[0]._affinity_ref) self.assertEqual(i + 1, self.channel._channel_refs[0]._active_stream_ref) rendezvous_list.append(rendezvous) # When active streams reach the concurrent_streams_watermark, # channel pool will create a new channel. another_session = stub.CreateSession( spanner_pb2.CreateSessionRequest(database=_DATABASE)) self.assertEqual(2, len(self.channel._channel_refs)) self.assertEqual(2, self.channel._channel_refs[0]._affinity_ref) self.assertEqual(2, self.channel._channel_refs[0]._active_stream_ref) self.assertEqual(1, self.channel._channel_refs[1]._affinity_ref) self.assertEqual(0, self.channel._channel_refs[1]._active_stream_ref) self.assertIsNotNone(another_session) session_list.append(another_session) another_rendezvous = stub.ExecuteStreamingSql( spanner_pb2.ExecuteSqlRequest(session=another_session.name, sql=_TEST_SQL)) self.assertEqual(2, len(self.channel._channel_refs)) self.assertEqual(2, self.channel._channel_refs[0]._affinity_ref) self.assertEqual(2, self.channel._channel_refs[0]._active_stream_ref) self.assertEqual(1, self.channel._channel_refs[1]._affinity_ref) self.assertEqual(1, self.channel._channel_refs[1]._active_stream_ref) rendezvous_list.append(another_rendezvous) # Iterate through the rendezous list to clean active streams. for rendezvous in rendezvous_list: for _ in rendezvous: continue # After cleaning, previously created channels will remain in the pool. self.assertEqual(2, len(self.channel._channel_refs)) self.assertEqual(2, self.channel._channel_refs[0]._affinity_ref) self.assertEqual(0, self.channel._channel_refs[0]._active_stream_ref) self.assertEqual(1, self.channel._channel_refs[1]._affinity_ref) self.assertEqual(0, self.channel._channel_refs[1]._active_stream_ref) # Delete all sessions to clean affinity. for session in session_list: stub.DeleteSession( spanner_pb2.DeleteSessionRequest(name=session.name)) self.assertEqual(2, len(self.channel._channel_refs)) self.assertEqual(0, self.channel._channel_refs[0]._affinity_ref) self.assertEqual(0, self.channel._channel_refs[0]._active_stream_ref) self.assertEqual(0, self.channel._channel_refs[1]._affinity_ref) self.assertEqual(0, self.channel._channel_refs[1]._active_stream_ref)