def test_drop_dataverse_with_udf_and_dependent_entities(self): self.log.info("Test started") self.setup_for_test() self.log.debug("Setup complete.") udf_obj = self.create_udf_object( 2, self.input.param('body_type', "dataset"), self.input.param('dependent_entity_dv', "same"), True, False) self.log.debug("Udf objects created") if not self.cbas_util.create_udf( self.cluster, name=udf_obj.name, dataverse=udf_obj.dataverse_name, or_replace=False, parameters=udf_obj.parameters, body=udf_obj.body, if_not_exists=False, query_context=False, use_statement=False, validate_error_msg=False, expected_error=None, timeout=300, analytics_timeout=300): self.fail("Error while creating Analytics UDF") if udf_obj.dataset_dependencies: dataverse_to_be_dropped = CBASHelper.format_name( udf_obj.dataset_dependencies[0][0]) elif udf_obj.synonym_dependencies: dataverse_to_be_dropped = CBASHelper.format_name( udf_obj.synonym_dependencies[0][0]) if not self.cbas_util.drop_dataverse( self.cluster, dataverse_name=dataverse_to_be_dropped, validate_error_msg=self.input.param('validate_error', False), expected_error=self.input.param('expected_error', None), timeout=300, analytics_timeout=300, delete_dataverse_obj=True, disconnect_local_link=True): self.fail("Successfully dropped dataverse being used by a UDF") self.log.info("Test Finished")
def __init__(self, name="cbas_ds", dataverse_name="Dafault", link_name=None, dataset_source="internal", dataset_properties={}, bucket=None, scope=None, collection=None, enabled_from_KV=False, num_of_items=0): """ :param name str, name of the dataset :param dataverse_name str, dataverse where the dataset is present. :param link_name str, name of the link to which dataset is associated, required if dataset is being created on remote or external source. :param dataset_source str, determines whether the dataset is created on couchbase buckets or external data source. Valid values are internal or external. :param dataset_properties dict, valid only for dataset with dataset_source as external :param bucket bucket_obj KV bucket on which dataset is based. :param scope str KV scope on which dataset is based. If only bucket name is specified, then default scope is selected. :param collection str KV collection on which dataset is based. If only bucket name is specified, then default collection in default scope is selected. :param enabled_from_KV bool, specify whether the dataset was created by enabling analytics from KV. :param num_of_items int, expected number of items in dataset. """ self.name = CBASHelper.format_name(name) self.dataverse_name = CBASHelper.format_name(dataverse_name) self.full_name = CBASHelper.format_name(self.dataverse_name, self.name) self.link_name = CBASHelper.format_name(link_name) self.dataset_source = dataset_source self.indexes = dict() if self.dataset_source == "internal": self.dataset_properties = {} self.enabled_from_KV = enabled_from_KV self.kv_bucket = bucket self.kv_scope = scope self.kv_collection = collection if self.kv_collection: self.full_kv_entity_name = self.get_fully_qualified_kv_entity_name( cardinality=3) else: self.full_kv_entity_name = self.get_fully_qualified_kv_entity_name( cardinality=1) self.num_of_items = num_of_items elif self.dataset_source == "external": self.dataset_properties = dataset_properties self.enabled_from_KV = False self.kv_bucket = None self.kv_scope = None self.kv_collection = None self.full_kv_entity_name = None self.num_of_items = 0
def get_fully_qualified_kv_entity_name(self, cardinality=1): if cardinality == 1: return CBASHelper.format_name(self.kv_bucket.name) elif cardinality == 2: return CBASHelper.format_name(self.kv_bucket.name, self.kv_scope.name) elif cardinality == 3: return CBASHelper.format_name(self.kv_bucket.name, self.kv_scope.name, self.kv_collection.name)
def test_analytics_synonym_events(self): dataset_obj = self.cbas_util.create_dataset_obj( self.cluster, self.bucket_util, dataset_cardinality=3, bucket_cardinality=3, enabled_from_KV=False, no_of_objs=1)[0] if not self.cbas_util.create_dataset( self.cluster, dataset_obj.name, dataset_obj.full_kv_entity_name, dataverse_name=dataset_obj.dataverse_name, analytics_collection=random.choice(["True", "False"])): self.fail("Error while creating analytics collection") syn_name_1 = CBASHelper.format_name( self.cbas_util.generate_name(name_cardinality=1)) if not self.cbas_util.create_analytics_synonym( self.cluster, CBASHelper.format_name( dataset_obj.dataverse_name, syn_name_1), dataset_obj.full_name): self.fail("Error while creating Synonym") self.log.info("Adding event for synonym_created event") self.system_events.add_event(AnalyticsEvents.synonym_created( self.cluster.cbas_cc_node.ip, CBASHelper.metadata_format(dataset_obj.dataverse_name), CBASHelper.metadata_format(syn_name_1), CBASHelper.metadata_format(dataset_obj.dataverse_name), CBASHelper.metadata_format(dataset_obj.name))) syn_name_2 = CBASHelper.format_name( self.cbas_util.generate_name(name_cardinality=1)) self.log.info("Creating dangling Synonym") if not self.cbas_util.create_analytics_synonym( self.cluster, CBASHelper.format_name( dataset_obj.dataverse_name, syn_name_2), "dangling"): self.fail("Error while creating Synonym") self.log.info("Adding event for synonym_created event for dangling " "synonym") self.system_events.add_event(AnalyticsEvents.synonym_created( self.cluster.cbas_cc_node.ip, CBASHelper.metadata_format(dataset_obj.dataverse_name), CBASHelper.metadata_format(syn_name_2), CBASHelper.metadata_format(dataset_obj.dataverse_name), CBASHelper.metadata_format("dangling"))) for syn_name in [syn_name_1, syn_name_2]: if not self.cbas_util.drop_analytics_synonym( self.cluster, CBASHelper.format_name( dataset_obj.dataverse_name, syn_name)): self.fail("Error while dropping synonym") self.log.info("Adding event for synonym_dropped events") self.system_events.add_event(AnalyticsEvents.synonym_dropped( self.cluster.cbas_cc_node.ip, CBASHelper.metadata_format(dataset_obj.dataverse_name), CBASHelper.metadata_format(syn_name)))
def test_analytics_index_events(self): dataset_obj = self.cbas_util.create_dataset_obj( self.cluster, self.bucket_util, dataset_cardinality=3, bucket_cardinality=3, enabled_from_KV=False, no_of_objs=1)[0] if not self.cbas_util.create_dataset( self.cluster, dataset_obj.name, dataset_obj.full_kv_entity_name, dataverse_name=dataset_obj.dataverse_name, analytics_collection=random.choice(["True", "False"])): self.fail("Error while creating analytics collection") index_name = CBASHelper.format_name( self.cbas_util.generate_name(name_cardinality=1)) if not self.cbas_util.create_cbas_index( self.cluster, index_name, ["age:bigint"], dataset_obj.full_name, analytics_index=random.choice(["True", "False"])): self.fail("Error while creating analytics index") self.log.info("Adding event for index_created events") self.system_events.add_event(AnalyticsEvents.index_created( self.cluster.cbas_cc_node.ip, CBASHelper.metadata_format(dataset_obj.dataverse_name), CBASHelper.metadata_format(index_name), CBASHelper.metadata_format(dataset_obj.name))) if not self.cbas_util.drop_cbas_index( self.cluster, index_name, dataset_obj.full_name, analytics_index=random.choice(["True", "False"])): self.fail("Error while dropping analytics index") self.log.info("Adding event for index_dropped events") self.system_events.add_event(AnalyticsEvents.index_dropped( self.cluster.cbas_cc_node.ip, CBASHelper.metadata_format(dataset_obj.dataverse_name), CBASHelper.metadata_format(index_name), CBASHelper.metadata_format(dataset_obj.name)))
def test_create_dataset_with_udf_in_where_clause(self): self.log.info("Test started") self.setup_for_test() self.log.debug("Setup for test completed") udf_obj = self.create_udf_object(2, "expression", "same", True) self.log.debug("Udf objects created") if not self.cbas_util.create_udf( self.cluster, name=udf_obj.name, dataverse=udf_obj.dataverse_name, or_replace=False, parameters=udf_obj.parameters, body=udf_obj.body, if_not_exists=False, query_context=False, use_statement=False, validate_error_msg=False, expected_error=None, timeout=300, analytics_timeout=300): self.fail("Error while creating Analytics UDF") if not self.cbas_util.create_dataset( self.cluster, dataset_name=CBASHelper.format_name( self.cbas_util.generate_name()), kv_entity=(self.cbas_util.list_all_dataset_objs()[0]).full_kv_entity_name, dataverse_name=udf_obj.dataverse_name, where_clause="age > {0}({1})".format( udf_obj.full_name, ",".join(udf_obj.parameters)), validate_error_msg=True, expected_error="Illegal use of user-defined function {0}".format( CBASHelper.unformat_name(CBASHelper.metadata_format( udf_obj.dataverse_name), udf_obj.name)), timeout=300, analytics_timeout=300, analytics_collection=False): self.fail("Dataset creation was successfull while using user " "defined function in where clause of the DDL") self.log.info("Test Finished")
def test_drop_dataset_while_it_is_being_used_by_UDF(self): self.log.info("Test started") self.setup_for_test() self.log.debug("Setup for test completed") udf_obj = self.create_udf_object( 2, "dataset", self.input.param('dependent_entity_dv', "same"), True) self.log.debug("Udf objects created") if not self.cbas_util.create_udf( self.cluster, name=udf_obj.name, dataverse=udf_obj.dataverse_name, or_replace=False, parameters=udf_obj.parameters, body=udf_obj.body, if_not_exists=False, query_context=False, use_statement=False, validate_error_msg=False, expected_error=None, timeout=300, analytics_timeout=300): self.fail("Error while creating Analytics UDF") self.log.debug("Udf created") dataset_name=CBASHelper.format_name(*udf_obj.dataset_dependencies[0]) if not self.cbas_util.drop_dataset( self.cluster, dataset_name=dataset_name, validate_error_msg=True, expected_error="Cannot drop analytics collection", expected_error_code=24142, timeout=300, analytics_timeout=300): self.fail("Successfully dropped dataset being used by a UDF") self.log.info("Test Finished")
def __init__(self, name, dataset_name, dataverse_name, indexed_fields=None): """ :param name str, name of the index :param dataset_name str, dataset/analytics_collection on which index is created :param dataverse_name str, name of the dataverse where the dataset is present. :param indexed_fields str fields on which index is created, format "field_name_1:field_type_1-field_name_2:field_type_2" """ self.name = CBASHelper.format_name(name) self.dataset_name = CBASHelper.format_name(dataset_name) self.dataverse_name = CBASHelper.format_name(dataverse_name) self.full_dataset_name = CBASHelper.format_name( self.dataverse_name, self.dataset_name) self.analytics_index = False self.indexed_fields = [] if indexed_fields: self.indexed_fields = indexed_fields.split("-")
def __init__(self, name, dataverse_name, parameters, body, referenced_entities): """ :param name str, name of the User defined fucntion :param dataverse str, name of the dataverse where the UDF is present. :param parameters list parameters used while creating the UDF. :param body str function body :param referenced_entities list list of datasets or UDF referenced in the function body """ self.name = CBASHelper.format_name(name) self.dataverse_name = CBASHelper.format_name(dataverse_name) self.parameters = parameters if parameters and parameters[0] == "...": self.arity = -1 else: self.arity = len(parameters) self.body = body self.dataset_dependencies = list() self.udf_dependencies = list() self.synonym_dependencies = list() for entity in referenced_entities: if isinstance(entity, Dataset) or isinstance( entity, CBAS_Collection): self.dataset_dependencies.append([ CBASHelper.unformat_name(entity.dataverse_name), CBASHelper.unformat_name(entity.name) ]) elif isinstance(entity, Synonym): self.synonym_dependencies.append([ CBASHelper.unformat_name(entity.dataverse_name), CBASHelper.unformat_name(entity.name) ]) elif isinstance(entity, CBAS_UDF): self.udf_dependencies.append([ CBASHelper.unformat_name(entity.dataverse_name), CBASHelper.unformat_name(entity.name), entity.arity ]) self.full_name = CBASHelper.format_name(self.dataverse_name, self.name)
def __init__(self, name=None, dataverse_name="Default", properties={}): """ :param name str, name of the link, not needed in case of a link of type Local :param dataverse_name str, dataverse where the link is present. :param properties: dict, contains all the properties required to create a link. Common for both AWS and couchbase link. <Required> name : name of the link to be created. <Required> scope : name of the dataverse under which the link has to be created. <Required> type : s3/couchbase For links to external couchbase cluster. <Required> hostname : The hostname of the link <Optional> username : The username for host authentication. Required if encryption is set to "none" or "half. Optional if encryption is set to "full". <Optional> password : The password for host authentication. Required if encryption is set to "none" or "half. Optional if encryption is set to "full". <Required> encryption : The link secure connection type ('none', 'full' or 'half') <Optional> certificate : The root certificate of target cluster for authentication. Required only if encryption is set to "full" <Optional> clientCertificate : The user certificate for authentication. Required only if encryption is set to "full" and username and password is not used. <Optional> clientKey : The client key for user authentication. Required only if encryption is set to "full" and username and password is not used. For links to AWS S3 <Required> accessKeyId : The access key of the link <Required> secretAccessKey : The secret key of the link <Required> region : The region of the link <Optional> serviceEndpoint : The service endpoint of the link. Note - please use the exact key names as provided above in link properties dict. """ self.name = CBASHelper.format_name(name) self.dataverse_name = CBASHelper.format_name(dataverse_name) self.properties = properties self.properties["name"] = CBASHelper.unformat_name(self.name) self.properties["dataverse"] = CBASHelper.unformat_name( self.dataverse_name) self.link_type = self.properties["type"].lower() self.full_name = CBASHelper.format_name(self.dataverse_name, self.name)
def test_restart_kv_server_impact_on_bucket(self): self.log.info('Restart couchbase') shell = RemoteMachineShellConnection(self.cluster.master) shell.reboot_server_and_wait_for_cb_run(self.cluster_util, self.cluster.master) dataset = self.cbas_util.list_all_dataset_objs()[0] self.log.info('Validate document count') count_n1ql = self.cluster.rest.query_tool( 'select count(*) from %s' % CBASHelper.format_name(dataset.kv_bucket.name))["results"][0]["$1"] if not self.cbas_util.wait_for_ingestion_complete( self.cluster, dataset.full_name, count_n1ql, timeout=300): self.fail("No. of items in CBAS dataset do not match " "that in the KV bucket")
def test_analytics_scope_events(self): dataverse_name = CBASHelper.format_name( self.cbas_util.generate_name(name_cardinality=2)) if not self.cbas_util.create_dataverse( self.cluster, dataverse_name, analytics_scope=random.choice(["True", "False"])): self.fail("Error while creating dataverse") self.log.info( "Adding event for scope_created event") self.system_events.add_event(AnalyticsEvents.scope_created( self.cluster.cbas_cc_node.ip, CBASHelper.metadata_format( dataverse_name))) if not self.cbas_util.drop_dataverse( self.cluster, dataverse_name, analytics_scope=random.choice(["True", "False"])): self.fail("Error while dropping dataverse") self.log.info("Adding event for scope_dropped event") self.system_events.add_event(AnalyticsEvents.scope_dropped( self.cluster.cbas_cc_node.ip, CBASHelper.metadata_format( dataverse_name)))
def __init__(self, name, cbas_entity_name, cbas_entity_dataverse, dataverse_name="Default", synonym_on_synonym=False): """ :param name str, name of the synonym :param cbas_entity_name str, Cbas entity on which Synonym is based, can be Dataset/CBAS_collection/Synonym name. :param cbas_entity_dataverse str, dataverse name where the cbas_entity is present. :param dataverse str dataverse where the synonym is present. :param synonym_on_synonym bool, True if synonym was created on another synonym. """ self.name = CBASHelper.format_name(name) self.cbas_entity_name = CBASHelper.format_name(cbas_entity_name) self.cbas_entity_dataverse = CBASHelper.format_name( cbas_entity_dataverse) self.dataverse_name = CBASHelper.format_name(dataverse_name) self.full_name = CBASHelper.format_name(self.dataverse_name, self.name) self.cbas_entity_full_name = CBASHelper.format_name( self.cbas_entity_dataverse, self.cbas_entity_name) self.synonym_on_synonym = synonym_on_synonym
def reset_full_name(self): self.full_name = CBASHelper.format_name(self.dataverse_name, self.name)
def __init__(self, name="Default"): self.name = CBASHelper.format_name(name) self.links = dict() self.datasets = dict() self.synonyms = dict() self.udfs = dict()
def test_drop_analytics_udf(self): self.log.info("Test started") self.setup_for_test() self.log.debug("Setup for test completed") udf_obj = self.create_udf_object( self.input.param('num_create_params', 0), self.input.param('body_type', "expression"), "same", True) self.log.debug("Udf objects created") if not self.cbas_util.create_udf(self.cluster, name=udf_obj.name, dataverse=udf_obj.dataverse_name, or_replace=False, parameters=udf_obj.parameters, body=udf_obj.body, if_not_exists=False, query_context=False, use_statement=False, validate_error_msg=False, expected_error=None, timeout=300, analytics_timeout=300): self.fail("Error while creating Analytics UDF") if self.input.param('second_udf', False): # Create UDF using another UDF udf_obj_2 = self.create_udf_object( self.input.param('num_test_udf_params', 0), "udf", self.input.param('dependent_entity_dv', "same"), self.input.param('use_full_name', True)) if not self.cbas_util.create_udf( self.cluster, name=udf_obj_2.name, dataverse=udf_obj_2.dataverse_name, or_replace=False, parameters=udf_obj_2.parameters, body=udf_obj_2.body, if_not_exists=False, query_context=False, use_statement=False, validate_error_msg=False, expected_error=None, timeout=300, analytics_timeout=300): self.fail("Error while creating Analytics UDF") if self.input.param('no_params', False): udf_obj.parameters = None if self.input.param('invalid_name', False): udf_obj.name = "invalid" if self.input.param('invalid_dataverse', False): udf_obj.dataverse_name = "invalid" if isinstance(self.input.param('change_params', None), int): if self.input.param('change_params', None) == -1: udf_obj.parameters = ["..."] else: udf_obj.parameters = [] for i in range(0, self.input.param('change_params', None)): udf_obj.parameters.append( CBASHelper.format_name(self.cbas_util.generate_name())) if not self.cbas_util.drop_udf( self.cluster, name=udf_obj.name, dataverse=udf_obj.dataverse_name, parameters=udf_obj.parameters, if_exists=self.input.param('if_exists', False), use_statement=self.input.param('use_statement', False), query_context=self.input.param('query_context', False), validate_error_msg=self.input.param('validate_error', False), expected_error=self.input.param('expected_error', None), timeout=300, analytics_timeout=300): self.fail("Failed to drop Analytics UDF") if not (self.input.param('validate_error', False) or self.input.param('if_exists', False)): if self.cbas_util.validate_udf_in_metadata( self.cluster, udf_name=udf_obj.name, udf_dataverse_name=udf_obj.dataverse_name, parameters=udf_obj.parameters, body=udf_obj.body, dataset_dependencies=udf_obj.dataset_dependencies, udf_dependencies=udf_obj.udf_dependencies): self.fail("Metadata entry for UDF is still present even " "after dropping the UDF") self.log.info("Test Finished")
def test_create_analytics_udf(self): self.log.info("Test started") self.setup_for_test() udf_obj = self.create_udf_object( self.input.param('num_create_params', 0), self.input.param('body_type', "expression"), self.input.param('dependent_entity_dv', "same"), self.input.param('use_full_name', True), ) if self.input.param('func_name', None): udf_obj.name = self.input.param('func_name') udf_obj.full_name = udf_obj.dataverse_name + "." + udf_obj.name if self.input.param('no_dataverse', False): udf_obj.dataverse_name = None if self.input.param('dataverse_name', None): udf_obj.dataverse_name = self.input.param('dataverse_name') if self.input.param('num_create_params', 0) == -2: udf_obj.parameters = None if self.input.param('no_body', False): udf_obj.body = None if self.input.param('invalid_ds', False): udf_obj.body = "select count(*) from invalid" if self.input.param('custom_params', None): if self.input.param('custom_params') == "empty_string": udf_obj.parameters = ["", ""] elif self.input.param('custom_params') == "mix_param_1": udf_obj.parameters = ["a", "b", "..."] elif self.input.param('custom_params') == "mix_param_2": udf_obj.parameters = ["...", "a", "b"] elif self.input.param('custom_params') == "int_param": udf_obj.parameters = ["1", "2"] elif self.input.param('custom_params') == "bool_param": udf_obj.parameters = ["True", "False"] if not self.cbas_util.create_udf( self.cluster, name=udf_obj.name, dataverse=udf_obj.dataverse_name, or_replace=False, parameters=udf_obj.parameters, body=udf_obj.body, if_not_exists=False, query_context=self.input.param('query_context', False), use_statement=self.input.param('use_statement', False), validate_error_msg=self.input.param('validate_error', False), expected_error=self.input.param('expected_error', "").format( udf_obj.dataverse_name), timeout=300, analytics_timeout=300): self.fail("Error while creating Analytics UDF") if not self.input.param('validate_error', False): if self.input.param('no_dataverse', False): udf_obj.dataverse_name = "Default" udf_obj.reset_full_name() if not self.cbas_util.validate_udf_in_metadata( self.cluster, udf_name=udf_obj.name, udf_dataverse_name=udf_obj.dataverse_name, parameters=udf_obj.parameters, body=udf_obj.body, dataset_dependencies=udf_obj.dataset_dependencies, udf_dependencies=udf_obj.udf_dependencies, synonym_dependencies=udf_obj.synonym_dependencies): self.fail("Error while validating Function in Metadata") if self.input.param('num_execute_params', -1) == -1: num_execute_params = len(udf_obj.parameters) else: num_execute_params = self.input.param('num_execute_params') execute_params = [i for i in range(1, num_execute_params + 1)] if not execute_params: expected_result = 0 if udf_obj.dataset_dependencies: for dependency in udf_obj.dataset_dependencies: obj = self.cbas_util.get_dataset_obj( self.cluster, CBASHelper.format_name(dependency[1]), CBASHelper.format_name(dependency[0])) expected_result += obj.num_of_items elif udf_obj.synonym_dependencies: for dependency in udf_obj.synonym_dependencies: obj = self.cbas_util.get_dataset_obj_for_synonym( self.cluster, synonym_name=CBASHelper.format_name(dependency[1]), synonym_dataverse=CBASHelper.format_name( dependency[0])) expected_result += obj.num_of_items else: expected_result = 1 else: expected_result = sum(execute_params) if not self.cbas_util.verify_function_execution_result( self.cluster, func_name=udf_obj.full_name, func_parameters=execute_params, expected_result=expected_result, validate_error_msg=self.input.param( 'validate_execute_error', False), expected_error=self.input.param('expected_error', None)): self.fail("Failed while verifying function execution result") self.log.info("Test Finished")