def test_create_dataset_with_udf_in_where_clause(self): self.log.info("Test started") self.setup_for_test() self.log.debug("Setup for test completed") udf_obj = self.create_udf_object(2, "expression", "same", True) self.log.debug("Udf objects created") if not self.cbas_util.create_udf( self.cluster, name=udf_obj.name, dataverse=udf_obj.dataverse_name, or_replace=False, parameters=udf_obj.parameters, body=udf_obj.body, if_not_exists=False, query_context=False, use_statement=False, validate_error_msg=False, expected_error=None, timeout=300, analytics_timeout=300): self.fail("Error while creating Analytics UDF") if not self.cbas_util.create_dataset( self.cluster, dataset_name=CBASHelper.format_name( self.cbas_util.generate_name()), kv_entity=(self.cbas_util.list_all_dataset_objs()[0]).full_kv_entity_name, dataverse_name=udf_obj.dataverse_name, where_clause="age > {0}({1})".format( udf_obj.full_name, ",".join(udf_obj.parameters)), validate_error_msg=True, expected_error="Illegal use of user-defined function {0}".format( CBASHelper.unformat_name(CBASHelper.metadata_format( udf_obj.dataverse_name), udf_obj.name)), timeout=300, analytics_timeout=300, analytics_collection=False): self.fail("Dataset creation was successfull while using user " "defined function in where clause of the DDL") self.log.info("Test Finished")
def test_analytics_collection_attach_dettach_events(self): dataset_obj = self.cbas_util.create_dataset_obj( self.cluster, self.bucket_util, dataset_cardinality=3, bucket_cardinality=3, enabled_from_KV=False, no_of_objs=1, exclude_collection=["_default"])[0] if not self.cbas_util.create_dataset( self.cluster, dataset_obj.name, dataset_obj.full_kv_entity_name, dataverse_name=dataset_obj.dataverse_name, analytics_collection=random.choice(["True", "False"])): self.fail("Error while creating analytics collection") self.log.info("Dropping collection {0}".format( dataset_obj.full_kv_entity_name)) self.bucket_util.drop_collection( self.cluster.master, dataset_obj.kv_bucket, scope_name=dataset_obj.kv_scope.name, collection_name=dataset_obj.kv_collection.name, session=None) if not self.cbas_util.wait_for_ingestion_complete( self.cluster, dataset_obj.full_name, 0, timeout=300): self.fail("Data is present in the dataset when it should not") self.log.info("Adding event for collection_detach events") self.system_events.add_event(AnalyticsEvents.collection_detached( self.cluster.cbas_cc_node.ip, CBASHelper.metadata_format(dataset_obj.dataverse_name), CBASHelper.unformat_name(dataset_obj.name))) self.log.info("Creating collection {0}".format( dataset_obj.full_kv_entity_name)) self.bucket_util.create_collection( self.cluster.master, dataset_obj.kv_bucket, scope_name=dataset_obj.kv_scope.name, collection_spec=dataset_obj.kv_collection.get_dict_object(), session=None) if not self.cbas_util.wait_for_ingestion_complete( self.cluster, dataset_obj.full_name, 0, timeout=300): self.fail("Data ingestion failed.") self.sleep(3, "Waiting for event to be generated") self.log.info("Adding event for collection_attach events") self.system_events.add_event(AnalyticsEvents.collection_attached( self.cluster.cbas_cc_node.ip, CBASHelper.metadata_format(dataset_obj.dataverse_name), CBASHelper.unformat_name(dataset_obj.name)))
def __init__(self, name=None, dataverse_name="Default", properties={}): """ :param name str, name of the link, not needed in case of a link of type Local :param dataverse_name str, dataverse where the link is present. :param properties: dict, contains all the properties required to create a link. Common for both AWS and couchbase link. <Required> name : name of the link to be created. <Required> scope : name of the dataverse under which the link has to be created. <Required> type : s3/couchbase For links to external couchbase cluster. <Required> hostname : The hostname of the link <Optional> username : The username for host authentication. Required if encryption is set to "none" or "half. Optional if encryption is set to "full". <Optional> password : The password for host authentication. Required if encryption is set to "none" or "half. Optional if encryption is set to "full". <Required> encryption : The link secure connection type ('none', 'full' or 'half') <Optional> certificate : The root certificate of target cluster for authentication. Required only if encryption is set to "full" <Optional> clientCertificate : The user certificate for authentication. Required only if encryption is set to "full" and username and password is not used. <Optional> clientKey : The client key for user authentication. Required only if encryption is set to "full" and username and password is not used. For links to AWS S3 <Required> accessKeyId : The access key of the link <Required> secretAccessKey : The secret key of the link <Required> region : The region of the link <Optional> serviceEndpoint : The service endpoint of the link. Note - please use the exact key names as provided above in link properties dict. """ self.name = CBASHelper.format_name(name) self.dataverse_name = CBASHelper.format_name(dataverse_name) self.properties = properties self.properties["name"] = CBASHelper.unformat_name(self.name) self.properties["dataverse"] = CBASHelper.unformat_name( self.dataverse_name) self.link_type = self.properties["type"].lower() self.full_name = CBASHelper.format_name(self.dataverse_name, self.name)
def __init__(self, name, dataverse_name, parameters, body, referenced_entities): """ :param name str, name of the User defined fucntion :param dataverse str, name of the dataverse where the UDF is present. :param parameters list parameters used while creating the UDF. :param body str function body :param referenced_entities list list of datasets or UDF referenced in the function body """ self.name = CBASHelper.format_name(name) self.dataverse_name = CBASHelper.format_name(dataverse_name) self.parameters = parameters if parameters and parameters[0] == "...": self.arity = -1 else: self.arity = len(parameters) self.body = body self.dataset_dependencies = list() self.udf_dependencies = list() self.synonym_dependencies = list() for entity in referenced_entities: if isinstance(entity, Dataset) or isinstance( entity, CBAS_Collection): self.dataset_dependencies.append([ CBASHelper.unformat_name(entity.dataverse_name), CBASHelper.unformat_name(entity.name) ]) elif isinstance(entity, Synonym): self.synonym_dependencies.append([ CBASHelper.unformat_name(entity.dataverse_name), CBASHelper.unformat_name(entity.name) ]) elif isinstance(entity, CBAS_UDF): self.udf_dependencies.append([ CBASHelper.unformat_name(entity.dataverse_name), CBASHelper.unformat_name(entity.name), entity.arity ]) self.full_name = CBASHelper.format_name(self.dataverse_name, self.name)
def test_analytics_udf_system_event_logs(self): self.log.info("Test started") self.setup_for_test() udf_types = [(0, "expression", "diff"), (2, "expression", "diff"), (-1, "expression", "diff"), (0, "dataset", "diff"), (2, "dataset", "diff"), (-1, "dataset", "diff"), (0, "synonym", "diff"), (2, "synonym", "diff"), (-1, "synonym", "diff"), (0, "udf", "diff"), (2, "udf", "diff"), (-1, "udf", "diff")] udf_objs = list() for udf_type in udf_types: udf_obj = self.create_udf_object(udf_type[0], udf_type[1], udf_type[2]) if not self.cbas_util.create_udf(self.cluster, name=udf_obj.name, dataverse=udf_obj.dataverse_name, or_replace=False, parameters=udf_obj.parameters, body=udf_obj.body, if_not_exists=False, query_context=False, use_statement=False, validate_error_msg=False, expected_error=None, timeout=300, analytics_timeout=300): self.fail("Error while creating Analytics UDF") udf_objs.append(udf_obj) self.log.info( "Adding event for user_defined_function_created events") self.system_events.add_event( AnalyticsEvents.user_defined_function_created( self.cluster.cbas_cc_node.ip, CBASHelper.metadata_format(udf_obj.dataverse_name), CBASHelper.unformat_name(udf_obj.name), udf_obj.arity)) # Create UDF to test replace idx = random.choice(range(len(udf_objs))) udf_type = udf_types[idx] test_udf_obj = self.create_udf_object(udf_type[0], udf_type[1], udf_type[2]) test_udf_obj.name = udf_objs[idx].name test_udf_obj.dataverse_name = udf_objs[idx].dataverse_name test_udf_obj.parameters = udf_objs[idx].parameters test_udf_obj.reset_full_name() if not self.cbas_util.create_udf(self.cluster, name=test_udf_obj.name, dataverse=test_udf_obj.dataverse_name, or_replace=True, parameters=test_udf_obj.parameters, body=test_udf_obj.body, if_not_exists=False, query_context=False, use_statement=False, validate_error_msg=False, expected_error="", timeout=300, analytics_timeout=300): self.fail("Error while creating Analytics UDF") self.log.info("Adding event for user_defined_function_replaced events") self.system_events.add_event( AnalyticsEvents.user_defined_function_replaced( self.cluster.cbas_cc_node.ip, CBASHelper.metadata_format(test_udf_obj.dataverse_name), CBASHelper.unformat_name(test_udf_obj.name), test_udf_obj.arity)) self.sleep( 2, "Waiting for user_defined_function_replaced events to " "be generated") self.log.info("Adding event for user_defined_function_dropped events") udf_deleted_successfully = list() i = 0 while udf_objs: if (i < len(udf_objs)) and (i not in udf_deleted_successfully): udf_obj = udf_objs[i] if self.cbas_util.drop_udf(self.cluster, name=udf_obj.name, dataverse=udf_obj.dataverse_name, parameters=udf_obj.parameters, if_exists=False, use_statement=False, query_context=False, validate_error_msg=False, expected_error=None, timeout=300, analytics_timeout=300): udf_deleted_successfully.append(i) self.system_events.add_event( AnalyticsEvents.user_defined_function_dropped( self.cluster.cbas_cc_node.ip, CBASHelper.metadata_format(udf_obj.dataverse_name), CBASHelper.unformat_name(udf_obj.name), udf_obj.arity)) i += 1 elif len(udf_deleted_successfully) == len(udf_objs): break elif i >= len(udf_objs): i = 0 elif i in udf_deleted_successfully: i += 1 self.log.info("Test Finished")
def test_create_multiple_analytics_udfs(self): self.log.info("Test started") self.setup_for_test() udf_objs = list() for i in range(0, self.input.param('num_init_udf', 1)): udf_obj = self.create_udf_object( self.input.param('num_create_params', 0), "expression", "same", True) if not self.cbas_util.create_udf(self.cluster, name=udf_obj.name, dataverse=udf_obj.dataverse_name, or_replace=False, parameters=udf_obj.parameters, body=udf_obj.body, if_not_exists=False, query_context=False, use_statement=False, validate_error_msg=False, expected_error=None, timeout=300, analytics_timeout=300): self.fail("Error while creating Analytics UDF") udf_objs.append(udf_obj) # Create UDF test to test test_udf_obj = self.create_udf_object( self.input.param('num_test_udf_params', 0), self.input.param('body_type', "expression"), self.input.param('dependent_entity_dv', "same"), self.input.param('use_full_name', True)) if self.input.param('test_udf_name', "diff") == "same": test_udf_obj.name = udf_objs[0].name test_udf_obj.reset_full_name() if self.input.param('test_udf_dv', "diff") == "same": test_udf_obj.dataverse_name = udf_objs[0].dataverse_name test_udf_obj.reset_full_name() else: while test_udf_obj.dataverse_name == udf_objs[0].dataverse_name: test_udf_obj.dataverse_name = random.choice( self.cbas_util.dataverses.values()).name if self.input.param('test_udf_param_name', "diff") == "same": test_udf_obj.parameters = udf_objs[0].parameters if not self.cbas_util.create_udf( self.cluster, name=test_udf_obj.name, dataverse=test_udf_obj.dataverse_name, or_replace=self.input.param('or_replace', False), parameters=test_udf_obj.parameters, body=test_udf_obj.body, if_not_exists=self.input.param('if_not_exists', False), query_context=False, use_statement=False, validate_error_msg=self.input.param('validate_error', False), expected_error=self.input.param('expected_error', "").format( CBASHelper.unformat_name( CBASHelper.metadata_format( test_udf_obj.dataverse_name), test_udf_obj.name)), timeout=300, analytics_timeout=300): self.fail("Error while creating Analytics UDF") if not self.input.param('validate_error', False): if self.input.param('if_not_exists', False): object_to_validate = udf_objs[0] else: object_to_validate = test_udf_obj if not self.cbas_util.validate_udf_in_metadata( self.cluster, udf_name=object_to_validate.name, udf_dataverse_name=object_to_validate.dataverse_name, parameters=object_to_validate.parameters, body=object_to_validate.body, dataset_dependencies=object_to_validate. dataset_dependencies, udf_dependencies=object_to_validate.udf_dependencies): self.fail("Error while validating Function in Metadata") if self.input.param('num_execute_params', -1) == -1: num_execute_params = len(test_udf_obj.parameters) else: num_execute_params = self.input.param('num_execute_params') execute_params = [i for i in range(1, num_execute_params + 1)] if not self.cbas_util.verify_function_execution_result( self.cluster, test_udf_obj.full_name, execute_params, sum(execute_params)): self.fail("Failed while verifying function execution result") self.log.info("Test Finished")