def Enum(self, connection, nullable=False): """Verify support for Enum data type.""" with Scenario("utf8", flags=TE, description="UTF-8 encoding"): key0 = b'\xe5\x8d\xb0'.decode('utf-8') key1 = b'\xe5\x88\xb7'.decode('utf-8') check_datatype(connection, f"Enum('{key0}' = 1, '{key1}' = 2)", [key0, key1], expected={ "all": f"[('{key0}', ), ('{key1}', )]", key0: f"[('{key0}', )]", key1: f"[('{key1}', )]" }, encoding="utf-8", quote=True, nullable=nullable) with Scenario("ascii", flags=TE, description="ASCII encoding"): check_datatype(connection, "Enum('hello' = 1, 'world' = 2)", ["hello", "world"], expected={ "all": "[('hello', ), ('world', )]", "hello": "[('hello', )]", "world": "[('world', )]" }, encoding="ascii", quote=True, nullable=nullable)
def funcvalues(self, nullable=False): """Check clickhouse-odbc driver support for parameterized queries with functions and values using pyodbc connector. """ with Logs() as logs, PyODBCConnection(logs=logs) as connection: args = {"connection": connection} Scenario("isNull", run=isNull, args=args, flags=TE) Scenario("Null", run=Null, args=args, flags=TE)
def parameterized(self): """Test suite for clickhouse-odbc support of parameterized queries. """ dsn = os.getenv("DSN", "ClickHouse DSN (ANSI)") with Feature(f"{dsn}", flags=TE): Scenario(run=load("parameterized.sanity", test="sanity"), flags=TE) Feature(run=load("parameterized.datatypes", test="datatypes"), flags=TE) Feature(run=load("parameterized.datatypes", test="nullable"), flags=TE) Feature(run=load("parameterized.funcvalues", test="funcvalues"), flags=TE)
def FixedString(self, connection, nullable=False): """Verify support for FixedString data type.""" with Scenario("utf8", flags=TE, description="UTF-8 encoding"): values = [ "", "hello", (b'\xe5\x8d\xb0\xe5\x88\xb7\xe5\x8e\x82\xe6\x8b\xbf\xe8\xb5\xb7' ).decode("utf-8") ] expected = { "all": f"[('\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00', ), ('hello\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00', ), ('{values[2]}\\x00', )]", values[0]: "[('\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00', )]", values[1]: "[('hello\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00', )]", values[2]: f"[('{values[2]}\\x00', )]" } check_datatype(connection, "FixedString(16)", values=values, expected=expected, encoding="utf-8", quote=True, nullable=nullable) with Scenario("ascii", flags=TE, description="ASCII encoding."): values = ["", "hello", "ABCDEFGHIJKLMN"] expected = { "all": "[('\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00', ), ('ABCDEFGHIJKLMN\\x00\\x00', ), ('hello\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00', )]", values[0]: "[('\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00', )]", values[1]: "[('hello\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00', )]", values[2]: "[('ABCDEFGHIJKLMN\\x00\\x00', )]" } check_datatype(connection, "FixedString(16)", values=values, expected=expected, encoding="ascii", quote=True, nullable=nullable)
with Then('insert data x2'): clickhouse.query( chi['metadata']['name'], 'INSERT INTO default.zk_repl SELECT number*3 FROM numbers(1000)', pod="chi-test-cluster-for-zk-default-0-0-0") assert clickhouse.query(chi['metadata']['name'], 'SELECT count() FROM default.zk_repl', pod="chi-test-cluster-for-zk-default-0-1-0" ) == '3000', "Invalid rows after 3x1000 inserts" clickhouse.drop_table_on_cluster(chi, 'all-sharded', 'default.zk_repl') if main(): with Module("main"): clickhouse_operator_spec, chi = util.install_clickhouse_and_zookeeper( chi_file='configs/test-cluster-for-zookeeper.yaml', chi_template_file='templates/tpl-clickhouse-latest.yaml', chi_name='test-cluster-for-zk', ) util.wait_clickhouse_cluster_ready(chi) all_tests = [test_zookeeper_rescale] for t in all_tests: if callable(t): Scenario(test=t)() else: Scenario(test=t[0], args=t[1])()
"pod_volumes": { "/var/lib/clickhouse", "/var/log/clickhouse-server", }, }) @TestScenario @Name("Persistent volume mapping via podTemplate") def test_examples02_2(): kubectl.create_and_check( config="../docs/chi-examples/03-persistent-volume-02-pod-template.yaml", check={ "pod_count": 1, "pod_image": "yandex/clickhouse-server:19.3.7", "pod_volumes": { "/var/lib/clickhouse", "/var/log/clickhouse-server", }, }) if main(): with Module("examples"): examples = [ test_examples01_1, test_examples01_2, test_examples02_1, test_examples02_2 ] for t in examples: Scenario(test=t)
"statefulset": 2, "pod": 2, "service": 3, }, "do_not_delete": True, }) with And("Check not empty /metrics"): check_monitoring_metrics(operator_namespace, operator_pod, expect_result={ '# HELP chi_clickhouse_metric_VersionInteger': True, '# TYPE chi_clickhouse_metric_VersionInteger gauge': True, 'chi_clickhouse_metric_VersionInteger{chi="test-017-multi-version",hostname="chi-test-017-multi-version-default-0-0': True, 'chi_clickhouse_metric_VersionInteger{chi="test-017-multi-version",hostname="chi-test-017-multi-version-default-1-0': True, }) with Then("check empty /metrics after delete namespace"): kubectl.delete_ns(kubectl.namespace) check_monitoring_metrics(operator_namespace, operator_pod, expect_result={ 'chi_clickhouse_metric_VersionInteger': False, }) if main(): with Module("metrics_exporter", flags=TE): test_cases = [ test_metrics_exporter_setup, test_metrics_exporter_reboot, test_metrics_exporter_with_multiple_clickhouse_version, ] for t in test_cases: Scenario(test=t, flags=TE)()
def datatypes(self, nullable=False): """Check clickhouse-odbc driver support for parameterized queries with various data types using pyodbc connector. """ with Logs() as logs, PyODBCConnection(logs=logs) as connection: args = {"connection": connection, "nullable": nullable} Scenario("Sanity check", run=sanity_check, args={"connection": connection}) Scenario("Int8", run=Int8, args=args, flags=TE) Scenario("Int16", run=Int16, args=args, flags=TE) Scenario("Int32", run=Int32, args=args, flags=TE) Scenario("Int64", run=Int64, args=args, flags=TE) Scenario("UInt8", run=UInt8, args=args, flags=TE) Scenario("UInt16", run=UInt16, args=args, flags=TE) Scenario("UInt32", run=UInt32, args=args, flags=TE) Scenario("UInt64", run=UInt64, args=args, flags=TE) Scenario("Float32", run=Float32, args=args, flags=TE) Scenario("Float64", run=Float64, args=args, flags=TE) Scenario("Decimal32", run=Decimal32, args=args, flags=TE) Scenario("Decimal64", run=Decimal64, args=args, flags=TE) Scenario("Decimal128", run=Decimal128, args=args, flags=TE) Scenario("String", run=String, args=args, flags=TE) Scenario("FixedString", run=FixedString, args=args, flags=TE) Scenario("Date", run=Date, args=args, flags=TE) Scenario("DateTime", run=DateTime, args=args, flags=TE) Scenario("Enum", run=Enum, args=args, flags=TE) Scenario("UUID", run=UUID, args=args, flags=TE) Scenario("IPv4", run=IPv4, args=args, flags=TE) Scenario("IPv6", run=IPv6, args=args, flags=TE)
def String(self, connection, nullable=False): """Verify support for String data type.""" with Scenario( "empty", description="Check empty string.", flags=TE, requirements=[ RQ_SRS_003_ParameterizedQueries_DataType_Select_String_Empty( "1.0") ]): with Scenario("utf-8", flags=TE, description="UTF-8 encoding"): values = ["", b''.decode("utf-8")] expected = { "all": f"[('{values[0]}', ), ('{values[1]}', )]", values[0]: f"[('{values[0]}', ), ('{values[1]}', )]", values[1]: f"[('{values[0]}', ), ('{values[1]}', )]" } check_datatype(connection, "String", values=values, expected=expected, encoding="utf-8", quote=True, nullable=nullable) with Scenario("ascii", flags=TE, description="ASCII encoding."): values = ["", b''.decode("ascii")] expected = { "all": f"[('{values[0]}', ), ('{values[1]}', )]", values[0]: f"[('{values[0]}', ), ('{values[1]}', )]", values[1]: f"[('{values[0]}', ), ('{values[1]}', )]" } check_datatype(connection, "String", values=values, expected=expected, encoding="ascii", quote=True, nullable=nullable) with Scenario( "utf8", flags=TE, requirements=[ RQ_SRS_003_ParameterizedQueries_DataType_Select_String_UTF8( "1.0") ], description="Check UTF-8 encoding."): values = [ "hello", (b'\xe5\x8d\xb0\xe5\x88\xb7\xe5\x8e\x82\xe6\x8b\xbf\xe8\xb5\xb7' b'\xe4\xb8\x80\xe4\xb8\xaa\xe6\xa0\xb7\xe6\x9d\xbf\xe9\x97\xb4' b'\xef\xbc\x8c\xe7\x84\xb6\xe5\x90\x8e\xe5\xb0\x86\xe5\x85\xb6' b'\xe6\x89\x93\xe6\x8b\xbc\xe6\x88\x90\xe6\xa0\xb7\xe6\x9c\xac' b'\xe3\x80\x82 \xe5\xae\x83\xe4\xb8\x8d\xe4\xbb\x85\xe7\x94\x9f' b'\xe5\xad\x98\xe4\xba\x86\xe4\xba\x94\xe4\xb8\xaa\xe4\xb8\x96' b'\xe7\xba\xaa\xef\xbc\x8c\xe8\x80\x8c\xe4\xb8\x94\xe5\x9c\xa8' b'\xe7\x94\xb5\xe5\xad\x90\xe6\x8e\x92\xe7\x89\x88\xe6\x96\xb9' b'\xe9\x9d\xa2\xe4\xb9\x9f\xe5\x8f\x96\xe5\xbe\x97\xe4\xba\x86' b'\xe9\xa3\x9e\xe8\xb7\x83\xef\xbc\x8c\xe4\xbd\x86\xe5\x9f\xba' b'\xe6\x9c\xac\xe4\xb8\x8a\xe6\xb2\xa1\xe6\x9c\x89\xe6\x94\xb9' b'\xe5\x8f\x98\xe3\x80\x82 \xe5\xae\x83\xe5\x9c\xa81960\xe5\xb9' b'\xb4\xe4\xbb\xa3\xe9\x9a\x8f\xe7\x9d\x80Letraset\xe5\xba\x8a' b'\xe5\x8d\x95\xe7\x9a\x84\xe5\x8f\x91\xe5\xb8\x83\xe8\x80\x8c' b'\xe6\x99\xae\xe5\x8f\x8a\xef\xbc\x8c\xe5\x85\xb6\xe4\xb8\xad' b'\xe5\x8c\x85\xe5\x90\xabLerem Ipsum\xe6\xae\xb5\xe8\x90\xbd' b'\xe7\xad\x89').decode("utf-8") ] expected = { "all": f"[('{values[0]}', ), ('{values[1]}', )]", values[0]: f"[('{values[0]}', )]", values[1]: f"[('{values[1]}', )]" } check_datatype(connection, "String", values=values, expected=expected, encoding="utf-8", quote=True, nullable=nullable) with Scenario( "ascii", flags=TE, requirements=[ RQ_SRS_003_ParameterizedQueries_DataType_Select_String_ASCII( "1.0") ], description="Check ASCII encoding."): values = [ "hello", r' !"#$%%&()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[]^_`abcdefghijklmnopqrstuvwxyz{|}~' ] expected = { "all": f"[('{values[1]}', ), ('{values[0]}', )]", values[0]: f"[('{values[0]}', )]", values[1]: f"[('{values[1]}', )]" } check_datatype(connection, "String", values=values, expected=expected, encoding="ascii", quote=True, nullable=nullable) with Scenario( "binary", flags=TE, requirements=[ RQ_SRS_003_ParameterizedQueries_DataType_Select_String_Binary( "1.0") ], description="Check binary data."): values = ["\x00\x01\x02\0x03\x00\x00\xFF"] expected = { "all": f"[('{values[0]}', )]", values[0]: f"[('{values[0]}', )]", } check_datatype(connection, "String", values=values, expected=expected, encoding="ascii", quote=False, nullable=nullable)