def send_query_info(): yield clickhouse_grpc_pb2.QueryInfo( query= "SELECT number, sleep(0.2) FROM numbers(10) SETTINGS max_block_size=2" ) time.sleep(0.5) yield clickhouse_grpc_pb2.QueryInfo(cancel=True)
def send_query_info(): yield clickhouse_grpc_pb2.QueryInfo( query="INSERT INTO t FORMAT TabSeparated", input_data=b'1\n2\n3\n', next_query_info=True) yield clickhouse_grpc_pb2.QueryInfo(input_data=b'4\n5\n6\n', next_query_info=True) yield clickhouse_grpc_pb2.QueryInfo(cancel=True)
def send_query_info(): yield clickhouse_grpc_pb2.QueryInfo(query="INSERT INTO t VALUES", input_data=d1, compression_type="lz4", next_query_info=True) yield clickhouse_grpc_pb2.QueryInfo(input_data=d2, next_query_info=True) yield clickhouse_grpc_pb2.QueryInfo(input_data=d3)
def test_compressed_totals_and_extremes(): query("CREATE TABLE t (x UInt8, y UInt8) ENGINE = Memory") query("INSERT INTO t VALUES (1, 2), (2, 4), (3, 2), (3, 3), (3, 4)") stub = clickhouse_grpc_pb2_grpc.ClickHouseStub(main_channel) query_info = clickhouse_grpc_pb2.QueryInfo(query="SELECT sum(x), y FROM t GROUP BY y WITH TOTALS", output_compression_type="lz4") result = stub.ExecuteQuery(query_info) assert lz4.frame.decompress(result.totals) == b'12\t0\n' query_info = clickhouse_grpc_pb2.QueryInfo(query="SELECT x, y FROM t", settings={"extremes": "1"}, output_compression_type="lz4") result = stub.ExecuteQuery(query_info) assert lz4.frame.decompress(result.extremes) == b'1\t2\n3\t4\n'
def send_query_info(): yield clickhouse_grpc_pb2.QueryInfo( query="SELECT * FROM exts ORDER BY UserID", external_tables=[ clickhouse_grpc_pb2.ExternalTable( name='exts', columns=columns, data=b'1\tAlex\n2\tBen\n3\tCarl\n') ], next_query_info=True) yield clickhouse_grpc_pb2.QueryInfo(external_tables=[ clickhouse_grpc_pb2.ExternalTable(name='exts', data=b'4\tDaniel\n5\tEthan\n') ])
def test_compressed_external_table(): columns = [ clickhouse_grpc_pb2.NameAndType(name="UserID", type="UInt64"), clickhouse_grpc_pb2.NameAndType(name="UserName", type="String"), ] d1 = lz4.frame.compress(b"1\tAlex\n2\tBen\n3\tCarl\n") d2 = gzip.compress(b"4,Daniel\n5,Ethan\n") ext1 = clickhouse_grpc_pb2.ExternalTable( name="ext1", columns=columns, data=d1, format="TabSeparated", compression_type="lz4", ) ext2 = clickhouse_grpc_pb2.ExternalTable(name="ext2", columns=columns, data=d2, format="CSV", compression_type="gzip") stub = clickhouse_grpc_pb2_grpc.ClickHouseStub(main_channel) query_info = clickhouse_grpc_pb2.QueryInfo( query= "SELECT * FROM (SELECT * FROM ext1 UNION ALL SELECT * FROM ext2) ORDER BY UserID", external_tables=[ext1, ext2], ) result = stub.ExecuteQuery(query_info) assert (result.output == b"1\tAlex\n" b"2\tBen\n" b"3\tCarl\n" b"4\tDaniel\n" b"5\tEthan\n")
def query(query_text, channel): query_info = clickhouse_grpc_pb2.QueryInfo(query=query_text) stub = clickhouse_grpc_pb2_grpc.ClickHouseStub(channel) result = stub.ExecuteQuery(query_info) if result and result.HasField("exception"): raise Exception(result.exception.display_text) return result.output.decode(DEFAULT_ENCODING)
def send_query_info(): yield query_info() while input_data: input_data_part = input_data.pop(0) if type(input_data_part) is str: input_data_part = input_data_part.encode(DEFAULT_ENCODING) yield clickhouse_grpc_pb2.QueryInfo(input_data=input_data_part, next_query_info=bool(input_data))
def test_result_compression(): query_info = clickhouse_grpc_pb2.QueryInfo(query="SELECT 0 FROM numbers(1000000)", result_compression=clickhouse_grpc_pb2.Compression(algorithm=clickhouse_grpc_pb2.CompressionAlgorithm.GZIP, level=clickhouse_grpc_pb2.CompressionLevel.COMPRESSION_HIGH)) stub = clickhouse_grpc_pb2_grpc.ClickHouseStub(main_channel) result = stub.ExecuteQuery(query_info) assert result.output == (b'0\n')*1000000
def test_compressed_external_table(): columns = [ clickhouse_grpc_pb2.NameAndType(name='UserID', type='UInt64'), clickhouse_grpc_pb2.NameAndType(name='UserName', type='String') ] d1 = lz4.frame.compress(b'1\tAlex\n2\tBen\n3\tCarl\n') d2 = gzip.compress(b'4,Daniel\n5,Ethan\n') ext1 = clickhouse_grpc_pb2.ExternalTable(name='ext1', columns=columns, data=d1, format='TabSeparated', compression_type="lz4") ext2 = clickhouse_grpc_pb2.ExternalTable(name='ext2', columns=columns, data=d2, format='CSV', compression_type="gzip") stub = clickhouse_grpc_pb2_grpc.ClickHouseStub(main_channel) query_info = clickhouse_grpc_pb2.QueryInfo( query= "SELECT * FROM (SELECT * FROM ext1 UNION ALL SELECT * FROM ext2) ORDER BY UserID", external_tables=[ext1, ext2]) result = stub.ExecuteQuery(query_info) assert result.output == b"1\tAlex\n"\ b"2\tBen\n"\ b"3\tCarl\n"\ b"4\tDaniel\n"\ b"5\tEthan\n"
def query_info(): input_data_part = input_data.pop(0) if input_data else b'' if type(input_data_part) is str: input_data_part = input_data_part.encode(DEFAULT_ENCODING) return clickhouse_grpc_pb2.QueryInfo(query=query_text, settings=settings, input_data=input_data_part, input_data_delimiter=input_data_delimiter, output_format=output_format, external_tables=external_tables, user_name=user_name, password=password, query_id=query_id, session_id=session_id, next_query_info=bool(input_data))
def test_compressed_output_gzip(): query_info = clickhouse_grpc_pb2.QueryInfo( query="SELECT 0 FROM numbers(1000)", compression_type="gzip", compression_level=6) stub = clickhouse_grpc_pb2_grpc.ClickHouseStub(main_channel) result = stub.ExecuteQuery(query_info) assert gzip.decompress(result.output) == (b'0\n') * 1000
def test_transport_compression(): query_info = clickhouse_grpc_pb2.QueryInfo( query="SELECT 0 FROM numbers(1000000)", transport_compression_type='gzip', transport_compression_level=3) stub = clickhouse_grpc_pb2_grpc.ClickHouseStub(main_channel) result = stub.ExecuteQuery(query_info) assert result.output == (b'0\n') * 1000000
def test_compressed_output_streaming(): query_info = clickhouse_grpc_pb2.QueryInfo(query="SELECT 0 FROM numbers(100000)", output_compression_type="lz4") stub = clickhouse_grpc_pb2_grpc.ClickHouseStub(main_channel) d_context = lz4.frame.create_decompression_context() data = b'' for result in stub.ExecuteQueryWithStreamOutput(query_info): d1, _, _ = lz4.frame.decompress_chunk(d_context, result.output) data += d1 assert data == (b'0\n')*100000
def test_opentelemetry_context_propagation(): trace_id = "80c190b5-9dc1-4eae-82b9-6c261438c817" parent_span_id = 123 trace_state = "some custom state" trace_id_hex = trace_id.replace("-", "") parent_span_id_hex = f'{parent_span_id:0>16X}' metadata = [("traceparent", f"00-{trace_id_hex}-{parent_span_id_hex}-01"), ("tracestate", trace_state)] stub = clickhouse_grpc_pb2_grpc.ClickHouseStub(main_channel) query_info = clickhouse_grpc_pb2.QueryInfo(query="SELECT 1") result = stub.ExecuteQuery(query_info, metadata=metadata) assert result.output == b"1\n" node.query("SYSTEM FLUSH LOGS") assert node.query(f"SELECT attribute['db.statement'], attribute['clickhouse.tracestate'] FROM system.opentelemetry_span_log " f"WHERE trace_id='{trace_id}' AND parent_span_id={parent_span_id}") == "SELECT 1\tsome custom state\n"
def send_query_info(): yield query_info() while input_data: input_data_part = input_data.pop(0) yield clickhouse_grpc_pb2.QueryInfo(input_data=input_data_part, next_query_info=bool(input_data))
def test_compressed_output(): query_info = clickhouse_grpc_pb2.QueryInfo( query="SELECT 0 FROM numbers(1000)", output_compression_type="lz4") stub = clickhouse_grpc_pb2_grpc.ClickHouseStub(main_channel) result = stub.ExecuteQuery(query_info) assert lz4.frame.decompress(result.output) == (b"0\n") * 1000