def test_rest_api_authorization_cancel_request(self): validation_failed = False self._setupForTest() roles = [ # {"role": "ro_admin", # "expected_status": 401}, { "role": "cluster_admin", "expected_status": 200 }, { "role": "admin", "expected_status": 200 }, { "role": "analytics_manager[*]", "expected_status": 200 }, { "role": "analytics_reader", "expected_status": 200 } ] for role in roles: rbac_utils(self.master)._create_user_and_grant_role( "testuser", role["role"]) self.sleep(5) client_context_id = "abcd1234" statement = "select sleep(count(*),5000) from {0} where mutated=0;".format( self.cbas_dataset_name) status, metrics, errors, results, handle = self.cbas_util.execute_statement_on_cbas_util( statement, mode="async", client_context_id=client_context_id) status = self.cbas_util.delete_request(client_context_id, username="******") if str(status) != str(role["expected_status"]): self.log.info( "Error cancelling request as user with {0} role. Response = {1}" .format(role["role"], status)) validation_failed = True else: self.log.info( "Cancelling request as user with {0} role worked as expected" .format(role["role"])) rbac_utils(self.master)._drop_user("testuser") self.assertFalse( validation_failed, "Authentication errors with some APIs. Check the test log above.")
def test_audit_logs_with_filtered_user_list(self): self.log.info("Create a user with role cluster admin") rbac_util = rbac_utils(self.master) rbac_util._create_user_and_grant_role("cbas_admin", "cluster_admin") self.log.info("Read configuration audit ids") self.audit_id = self.input.param("audit_id") self.log.info("Disabled audit logs for user") audit_obj = audit(host=self.master) audit_obj.setWhiteListUsers("cbas_admin/local") self.log.info("Update service configuration service parameter: logLevel") service_configuration_map = {"logLevel": "TRACE"} status, _, _ = self.cbas_util.update_service_parameter_configuration_on_cbas(service_configuration_map, username="******") self.assertTrue(status, msg="Incorrect status for service configuration PUT request") self.log.info("Verify audit logs are not generated as cbas_admin is whitelisted") server_audit_obj = audit(eventID=self.audit_id, host=self.cbas_node) self.assertFalse(server_audit_obj.check_if_audit_event_generated(), msg="Audit event must not be generated") self.log.info("Remove whitelabel user") audit_obj.setWhiteListUsers() self.log.info("Update service configuration service parameter: logLevel") service_configuration_map = {"logLevel": "TRACE"} status, _, _ = self.cbas_util.update_service_parameter_configuration_on_cbas(service_configuration_map, username="******") self.assertTrue(status, msg="Incorrect status for service configuration PUT request") self.log.info("Verify audit logs are not generated as cbas_admin is whitelisted") server_audit_obj = audit(eventID=self.audit_id, host=self.cbas_node) self.assertTrue(server_audit_obj.check_if_audit_event_generated(), msg="Audit event must be generated")
def test_error_response_user_permissions(self): self.log.info("Create a user with analytics reader role") rbac_util = rbac_utils(self.master) rbac_util._create_user_and_grant_role("reader_admin", "analytics_reader") status, _, errors, _, _ = self.cbas_util.execute_statement_on_cbas_util(self.error_response["query"], username="******", password="******") self.validate_error_response(status, errors, self.error_response["msg"], self.error_response["code"])
def test_cbbackupmgr_backup_bucket_only_if_user_has_bucket_permission(self): self.log.info('Load documents in KV, create dataverse, datasets, index and validate') self.create_ds_index_and_validate_count() self.log.info('Create user with backup permission on beer-sample') user = '******' self.rbac_util = rbac_utils(self.master) self.rbac_util._create_user_and_grant_role(user, 'data_backup[beer-sample]') self.sleep(2) self.log.info('Backup Analytics metadata using cbbackupmgr') shell = RemoteMachineShellConnection(self.master) o = shell.create_backup(self.master, username=user) self.assertTrue('Backup successfully completed' in ''.join(o), msg='Backup was unsuccessful') self.log.info('Drop all analytics data - Dataverses, Datasets, Indexes') self.cleanup_cbas() self.log.info('Restore Analytics metadata using cbbackupmgr') shell = RemoteMachineShellConnection(self.master) o = shell.restore_backup(self.cbas_node, username=user) self.assertTrue('Restore completed successfully' in ''.join(o), msg='Restore was unsuccessful') self.log.info('Verify bucket state') self.build_bucket_status_map() self.assertEqual(self.dataverse_bucket_map[self.dataverse][self.beer_sample_bucket], 'disconnected') self.assertEqual(self.dataverse_bucket_map[self.dataverse_1][self.beer_sample_bucket], 'disconnected') self.assertFalse(self.dataverse_2 in self.dataverse_bucket_map) self.log.info('Connect to Local link') self.cbas_util.connect_link() self.log.info('Connect to Local link on dataverse') self.cbas_util.connect_link(link_name=self.dataverse_1 + '.Local') self.log.info('Validate metadata for %s dataverse' % self.dataverse) self.validate_metadata(self.dataverse, self.dataset, self.index_name, dataverse_count=1, dataset_count=1, index_count=1) self.log.info('Validate metadata for %s dataverse' % self.dataverse_1) self.validate_metadata(self.dataverse_1, self.dataset_1, self.index_name_1, dataverse_count=1, dataset_count=1, index_count=1) self.log.info('Validate metadata for %s dataverse' % self.dataverse_2) self.validate_metadata(self.dataverse_2, self.dataset_2, self.index_name_2, dataverse_count=0, dataset_count=0, index_count=0)
def setUp(self): self.input = TestInputSingleton.input if "default_bucket" not in self.input.test_params: self.input.test_params.update({"default_bucket": False}) super(CBASRBACTests, self).setUp() self.rbac_util = rbac_utils(self.master)
def setUp(self): super(x509tests, self).setUp() self._reset_original() self.ip_address = self.getLocalIPAddress() self.ip_address = '172.16.1.174' self.root_ca_path = x509main.CACERTFILEPATH + x509main.CACERTFILE self.client_cert_pem = x509main.CACERTFILEPATH + self.ip_address + ".pem" self.client_cert_key = x509main.CACERTFILEPATH + self.ip_address + ".key" SSLtype = "openssl" encryption_type = self.input.param('encryption_type', "") key_length = self.input.param("key_length", 1024) #Input parameters for state, path, delimeters and prefixes self.client_cert_state = self.input.param("client_cert_state", "disable") self.paths = self.input.param( 'paths', "subject.cn:san.dnsname:san.uri").split(":") self.prefixs = self.input.param('prefixs', 'www.cb-:us.:www.').split(":") self.delimeters = self.input.param('delimeter', '.:.:.').split(":") self.setup_once = self.input.param("setup_once", False) self.dns = self.input.param('dns', None) self.uri = self.input.param('uri', None) copy_servers = copy.deepcopy(self.servers) self.rbac_user = self.input.param('rbac_user', None) if self.rbac_user: self.rbac_util = rbac_utils(self.master) self.rbac_util._create_user_and_grant_role("ro_admin", "ro_admin") #Generate cert and pass on the client ip for cert generation if (self.dns is not None) or (self.uri is not None): x509main(self.master)._generate_cert(copy_servers, type=SSLtype, encryption=encryption_type, key_length=key_length, client_ip=self.ip_address, alt_names='non_default', dns=self.dns, uri=self.uri) else: x509main(self.master)._generate_cert(copy_servers, type=SSLtype, encryption=encryption_type, key_length=key_length, client_ip=self.ip_address) self.log.info( " Path is {0} - Prefixs - {1} -- Delimeters - {2}".format( self.paths, self.prefixs, self.delimeters)) if (self.setup_once): x509main(self.master).setup_master(self.client_cert_state, self.paths, self.prefixs, self.delimeters) x509main().setup_cluster_nodes_ssl(self.servers) #reset the severs to ipv6 if there were ipv6 ''' for server in self.servers: if server.ip.count(':') > 0: # raw ipv6? enclose in square brackets server.ip = '[' + server.ip + ']' ''' self.log.info(" list of server {0}".format(self.servers)) self.log.info(" list of server {0}".format(copy_servers))