def regression(self, local, clickhouse_binary_path, parallel=None, stress=None): """ClickHouse LDAP integration regression module. """ args = {"local": local, "clickhouse_binary_path": clickhouse_binary_path} if stress is not None: self.context.stress = stress with Pool(3) as pool: try: Feature(test=load("ldap.authentication.regression", "regression"), parallel=True, executor=pool)(**args) Feature(test=load("ldap.external_user_directory.regression", "regression"), parallel=True, executor=pool)(**args) Feature(test=load("ldap.role_mapping.regression", "regression"), parallel=True, executor=pool)(**args) finally: join()
def regression(self, local, clickhouse_binary_path, stress=None, parallel=None): """ClickHouse regression. """ top().terminating = False args = { "local": local, "clickhouse_binary_path": clickhouse_binary_path, "stress": stress, "parallel": parallel } self.context.stress = stress self.context.parallel = parallel tasks = [] with Pool(8) as pool: try: run_scenario( pool, tasks, Feature(test=load("example.regression", "regression")), args) #run_scenario(pool, tasks, Feature(test=load("ldap.regression", "regression")), args) #run_scenario(pool, tasks, Feature(test=load("rbac.regression", "regression")), args) #run_scenario(pool, tasks, Feature(test=load("aes_encryption.regression", "regression")), args) #run_scenario(pool, tasks, Feature(test=load("map_type.regression", "regression")), args) #run_scenario(pool, tasks, Feature(test=load("window_functions.regression", "regression")), args) #run_scenario(pool, tasks, Feature(test=load("datetime64_extended_range.regression", "regression")), args) #run_scenario(pool, tasks, Feature(test=load("kerberos.regression", "regression")), args) #run_scenario(pool, tasks, Feature(test=load("extended_precision_data_types.regression", "regression")), args) finally: join(tasks)
def regression(self, local, clickhouse_binary_path, parallel=None, stress=None): """ClickHouse LDAP integration regression module. """ top().terminating = False args = {"local": local, "clickhouse_binary_path": clickhouse_binary_path} if stress is not None: self.context.stress = stress if parallel is not None: self.context.parallel = parallel tasks = [] with Pool(3) as pool: try: run_scenario( pool, tasks, Feature( test=load("ldap.authentication.regression", "regression")), args) run_scenario( pool, tasks, Feature(test=load("ldap.external_user_directory.regression", "regression")), args) run_scenario( pool, tasks, Feature( test=load("ldap.role_mapping.regression", "regression")), args) finally: join(tasks)
def regression(self, local, clickhouse_binary_path, stress=None, parallel=None): """ClickHouse AES encryption functions regression module. """ top().terminating = False nodes = { "clickhouse": ("clickhouse1", "clickhouse2", "clickhouse3"), } if stress is not None: self.context.stress = stress if parallel is not None: self.context.parallel = parallel with Cluster(local, clickhouse_binary_path, nodes=nodes, docker_compose_project_dir=os.path.join( current_dir(), "aes_encryption_env")) as cluster: self.context.cluster = cluster tasks = [] with Pool(5) as pool: try: run_scenario( pool, tasks, Feature(test=load("aes_encryption.tests.encrypt", "feature"), flags=TE)) run_scenario( pool, tasks, Feature(test=load("aes_encryption.tests.decrypt", "feature"), flags=TE)) run_scenario( pool, tasks, Feature(test=load("aes_encryption.tests.encrypt_mysql", "feature"), flags=TE)) run_scenario( pool, tasks, Feature(test=load("aes_encryption.tests.decrypt_mysql", "feature"), flags=TE)) run_scenario( pool, tasks, Feature(test=load( "aes_encryption.tests.compatibility.feature", "feature"), flags=TE)) finally: join(tasks)
def parallel_login(self, server=None, user_count=10, timeout=300): """Check that login of valid and invalid users works in parallel using local users defined using RBAC and LDAP users authenticated using multiple LDAP external user directories when server is restarted in the middle of parallel login attempts. After server is restarted makes sure that parallel logins work as expected. """ servers = { "openldap1": { "host": "openldap1", "port": "389", "enable_tls": "no", "auth_dn_prefix": "cn=", "auth_dn_suffix": ",ou=users,dc=company,dc=com" }, "openldap2": { "host": "openldap2", "port": "636", "enable_tls": "yes", "auth_dn_prefix": "cn=", "auth_dn_suffix": ",ou=users,dc=company,dc=com", "tls_require_cert": "never", } } with Given("I have two LDAP servers"): entries = [(["openldap1"], []), (["openldap2"], [])] with And("I define a group of users to be created on each LDAP server"): user_groups = { "openldap1_users": [{ "cn": f"openldap1_parallel_user{i}", "userpassword": randomword(20) } for i in range(user_count)], "openldap2_users": [{ "cn": f"openldap2_parallel_user{i}", "userpassword": randomword(20) } for i in range(user_count)], "local_users": [{ "cn": f"local_parallel_user{i}", "userpassword": randomword(20) } for i in range(user_count)] } @TestStep(When) @Name("I login as {username} and execute query") def login_and_execute_query_during_restart(self, username, password, exitcode, message, steps=True, timeout=60): """Execute a query and ignore exitcode and message as during restart exit codes and messages vary based on the state of the restarted container and the ClickHouse server and there are too many cases and complete list is not fully known therefore trying to list all possible cases produces random fails. """ r = self.context.cluster.command( None, f"{self.context.cluster.docker_compose} exec {self.context.node.name} " + f"clickhouse client -q \"SELECT 1\" --user {username} --password {password}", steps=steps, timeout=timeout) return r @TestStep(When) @Name("I login as {username} and execute query") def login_and_execute_query(self, username, password, exitcode=None, message=None, steps=True, timeout=60): self.context.node.query("SELECT 1", settings=[("user", username), ("password", password)], exitcode=exitcode or 0, message=message, steps=steps, timeout=timeout) def login_with_valid_username_and_password(users, i, iterations=10, during_restart=False): """Login with valid username and password. """ query = login_and_execute_query if during_restart: query = login_and_execute_query_during_restart with When(f"valid users try to login #{i}"): for i in range(iterations): random_user = users[random.randint(0, len(users) - 1)] query(username=random_user["cn"], password=random_user["userpassword"], exitcode=0, message="1", steps=False) def login_with_valid_username_and_invalid_password(users, i, iterations=10, during_restart=False): """Login with valid username and invalid password. """ query = login_and_execute_query if during_restart: query = login_and_execute_query_during_restart with When( f"users try to login with valid username and invalid password #{i}" ): for i in range(iterations): random_user = users[random.randint(0, len(users) - 1)] query( username=random_user["cn"], password=(random_user["userpassword"] + randomword(1)), exitcode=4, message= f"DB::Exception: {random_user['cn']}: Authentication failed: password is incorrect or there is no user with such name", steps=False) def login_with_invalid_username_and_valid_password(users, i, iterations=10, during_restart=False): """Login with invalid username and valid password. """ query = login_and_execute_query if during_restart: query = login_and_execute_query_during_restart with When( f"users try to login with invalid username and valid password #{i}" ): for i in range(iterations): random_user = dict(users[random.randint(0, len(users) - 1)]) random_user["cn"] += randomword(1) query( username=random_user["cn"], password=random_user["userpassword"], exitcode=4, message= f"DB::Exception: {random_user['cn']}: Authentication failed: password is incorrect or there is no user with such name", steps=False) with And("I have a list of checks that I want to run for each user group"): checks = [ login_with_valid_username_and_password, login_with_valid_username_and_invalid_password, login_with_invalid_username_and_valid_password ] with And( "I create config file to define LDAP external user directory for each LDAP server" ): config = create_entries_ldap_external_user_directory_config_content( entries) with ldap_servers(servers): with ldap_external_user_directory(server=None, roles=None, restart=True, config=config): with ldap_users(*user_groups["openldap1_users"], node=self.context.cluster.node("openldap1")): with ldap_users(*user_groups["openldap2_users"], node=self.context.cluster.node("openldap2")): with rbac_users(*user_groups["local_users"]): tasks = [] with Pool(4) as pool: try: with When( "I restart the server during parallel login of users in each group" ): for users in user_groups.values(): for check in checks: tasks.append( pool.submit( check, (users, 0, 25, True))) tasks.append(pool.submit(restart)) finally: with Then("logins during restart should work"): for task in tasks: task.result(timeout=timeout) tasks = [] with Pool(4) as pool: try: with When( "I perform parallel login of users in each group after restart" ): for users in user_groups.values(): for check in checks: tasks.append( pool.submit( check, (users, 0, 10, False))) finally: with Then("logins after restart should work"): for task in tasks: task.result(timeout=timeout)
def parallel_login(self, server, user_count=10, timeout=300, rbac=False): """Check that login of valid and invalid LDAP authenticated users works in parallel. """ self.context.ldap_node = self.context.cluster.node(server) user = None users = [{ "cn": f"parallel_user{i}", "userpassword": randomword(20) } for i in range(user_count)] with ldap_users(*users): with ldap_authenticated_users(*[{ "username": user["cn"], "server": server } for user in users], rbac=rbac): def login_with_valid_username_and_password(users, i, iterations=10): with When(f"valid users try to login #{i}"): for i in range(iterations): random_user = users[random.randint(0, len(users) - 1)] login_and_execute_query( username=random_user["cn"], password=random_user["userpassword"], steps=False) def login_with_valid_username_and_invalid_password( users, i, iterations=10): with When( f"users try to login with valid username and invalid password #{i}" ): for i in range(iterations): random_user = users[random.randint(0, len(users) - 1)] login_and_execute_query( username=random_user["cn"], password=(random_user["userpassword"] + randomword(1)), exitcode=4, message= f"DB::Exception: {random_user['cn']}: Authentication failed: password is incorrect or there is no user with such name", steps=False) def login_with_invalid_username_and_valid_password( users, i, iterations=10): with When( f"users try to login with invalid username and valid password #{i}" ): for i in range(iterations): random_user = dict(users[random.randint( 0, len(users) - 1)]) random_user["cn"] += randomword(1) login_and_execute_query( username=random_user["cn"], password=random_user["userpassword"], exitcode=4, message= f"DB::Exception: {random_user['cn']}: Authentication failed: password is incorrect or there is no user with such name", steps=False) with When("I login in parallel"): tasks = [] with Pool(4) as pool: try: for i in range(5): tasks.append( pool.submit( login_with_valid_username_and_password, ( users, i, 50, ))) tasks.append( pool.submit( login_with_valid_username_and_invalid_password, ( users, i, 50, ))) tasks.append( pool.submit( login_with_invalid_username_and_valid_password, ( users, i, 50, ))) finally: with Then("it should work"): for task in tasks: task.result(timeout=timeout)