Exemplo n.º 1
0
    def test_drop_dataverse_with_udf_and_dependent_entities(self):
        self.log.info("Test started")
        self.setup_for_test()
        self.log.debug("Setup complete.")

        udf_obj = self.create_udf_object(
            2, self.input.param('body_type', "dataset"),
            self.input.param('dependent_entity_dv', "same"), True, False)
        self.log.debug("Udf objects created")

        if not self.cbas_util.create_udf(
            self.cluster, name=udf_obj.name, dataverse=udf_obj.dataverse_name,
            or_replace=False, parameters=udf_obj.parameters, body=udf_obj.body,
            if_not_exists=False, query_context=False, use_statement=False,
            validate_error_msg=False, expected_error=None, timeout=300,
            analytics_timeout=300):
            self.fail("Error while creating Analytics UDF")

        if udf_obj.dataset_dependencies:
            dataverse_to_be_dropped = CBASHelper.format_name(
                udf_obj.dataset_dependencies[0][0])
        elif udf_obj.synonym_dependencies:
            dataverse_to_be_dropped = CBASHelper.format_name(
                udf_obj.synonym_dependencies[0][0])

        if not self.cbas_util.drop_dataverse(
            self.cluster, dataverse_name=dataverse_to_be_dropped,
            validate_error_msg=self.input.param('validate_error', False),
            expected_error=self.input.param('expected_error', None),
            timeout=300, analytics_timeout=300, delete_dataverse_obj=True,
            disconnect_local_link=True):
            self.fail("Successfully dropped dataverse being used by a UDF")
        self.log.info("Test Finished")
Exemplo n.º 2
0
    def test_create_dataset_with_udf_in_where_clause(self):
        self.log.info("Test started")
        self.setup_for_test()
        self.log.debug("Setup for test completed")

        udf_obj = self.create_udf_object(2, "expression", "same", True)
        self.log.debug("Udf objects created")

        if not self.cbas_util.create_udf(
            self.cluster, name=udf_obj.name, dataverse=udf_obj.dataverse_name,
            or_replace=False, parameters=udf_obj.parameters, body=udf_obj.body,
            if_not_exists=False, query_context=False, use_statement=False,
            validate_error_msg=False, expected_error=None,
            timeout=300, analytics_timeout=300):
            self.fail("Error while creating Analytics UDF")

        if not self.cbas_util.create_dataset(
            self.cluster, dataset_name=CBASHelper.format_name(
                self.cbas_util.generate_name()),
            kv_entity=(self.cbas_util.list_all_dataset_objs()[0]).full_kv_entity_name,
            dataverse_name=udf_obj.dataverse_name,
            where_clause="age > {0}({1})".format(
                udf_obj.full_name, ",".join(udf_obj.parameters)),
            validate_error_msg=True,
            expected_error="Illegal use of user-defined function {0}".format(
                CBASHelper.unformat_name(CBASHelper.metadata_format(
                    udf_obj.dataverse_name), udf_obj.name)),
            timeout=300, analytics_timeout=300, analytics_collection=False):
            self.fail("Dataset creation was successfull while using user "
                      "defined function in where clause of the DDL")
        self.log.info("Test Finished")
Exemplo n.º 3
0
    def __init__(self,
                 name="cbas_ds",
                 dataverse_name="Dafault",
                 link_name=None,
                 dataset_source="internal",
                 dataset_properties={},
                 bucket=None,
                 scope=None,
                 collection=None,
                 enabled_from_KV=False,
                 num_of_items=0):
        """
        :param name str, name of the dataset
        :param dataverse_name str, dataverse where the dataset is present.
        :param link_name str, name of the link to which dataset is associated,
        required if dataset is being created on remote or external source.
        :param dataset_source str, determines whether the dataset is created on couchbase buckets or
        external data source. Valid values are internal or external.
        :param dataset_properties dict, valid only for dataset with dataset_source as external
        :param bucket bucket_obj KV bucket on which dataset is based.
        :param scope str KV scope on which dataset is based.
        If only bucket name is specified, then default scope is selected.
        :param collection str KV collection on which dataset is based.
        If only bucket name is specified, then default collection in default scope is selected.
        :param enabled_from_KV bool, specify whether the dataset was created by enabling analytics from KV.
        :param num_of_items int, expected number of items in dataset.
        """
        self.name = CBASHelper.format_name(name)
        self.dataverse_name = CBASHelper.format_name(dataverse_name)
        self.full_name = CBASHelper.format_name(self.dataverse_name, self.name)
        self.link_name = CBASHelper.format_name(link_name)
        self.dataset_source = dataset_source
        self.indexes = dict()

        if self.dataset_source == "internal":
            self.dataset_properties = {}
            self.enabled_from_KV = enabled_from_KV
            self.kv_bucket = bucket
            self.kv_scope = scope
            self.kv_collection = collection
            if self.kv_collection:
                self.full_kv_entity_name = self.get_fully_qualified_kv_entity_name(
                    cardinality=3)
            else:
                self.full_kv_entity_name = self.get_fully_qualified_kv_entity_name(
                    cardinality=1)
            self.num_of_items = num_of_items

        elif self.dataset_source == "external":
            self.dataset_properties = dataset_properties
            self.enabled_from_KV = False
            self.kv_bucket = None
            self.kv_scope = None
            self.kv_collection = None
            self.full_kv_entity_name = None
            self.num_of_items = 0
Exemplo n.º 4
0
 def get_fully_qualified_kv_entity_name(self, cardinality=1):
     if cardinality == 1:
         return CBASHelper.format_name(self.kv_bucket.name)
     elif cardinality == 2:
         return CBASHelper.format_name(self.kv_bucket.name,
                                       self.kv_scope.name)
     elif cardinality == 3:
         return CBASHelper.format_name(self.kv_bucket.name,
                                       self.kv_scope.name,
                                       self.kv_collection.name)
Exemplo n.º 5
0
    def test_drop_dataset_while_it_is_being_used_by_UDF(self):
        self.log.info("Test started")
        self.setup_for_test()
        self.log.debug("Setup for test completed")

        udf_obj = self.create_udf_object(
            2, "dataset", self.input.param('dependent_entity_dv', "same"), True)

        self.log.debug("Udf objects created")

        if not self.cbas_util.create_udf(
            self.cluster, name=udf_obj.name, dataverse=udf_obj.dataverse_name,
            or_replace=False, parameters=udf_obj.parameters, body=udf_obj.body,
            if_not_exists=False, query_context=False, use_statement=False,
            validate_error_msg=False, expected_error=None,
            timeout=300, analytics_timeout=300):
            self.fail("Error while creating Analytics UDF")

        self.log.debug("Udf created")

        dataset_name=CBASHelper.format_name(*udf_obj.dataset_dependencies[0])
        if not self.cbas_util.drop_dataset(
            self.cluster, dataset_name=dataset_name, validate_error_msg=True,
            expected_error="Cannot drop analytics collection",
            expected_error_code=24142, timeout=300, analytics_timeout=300):
            self.fail("Successfully dropped dataset being used by a UDF")
        self.log.info("Test Finished")
Exemplo n.º 6
0
    def test_analytics_collection_attach_dettach_events(self):
        dataset_obj = self.cbas_util.create_dataset_obj(
            self.cluster, self.bucket_util, dataset_cardinality=3,
            bucket_cardinality=3, enabled_from_KV=False,
            no_of_objs=1, exclude_collection=["_default"])[0]
        if not self.cbas_util.create_dataset(
                self.cluster, dataset_obj.name,
                dataset_obj.full_kv_entity_name,
                dataverse_name=dataset_obj.dataverse_name,
                analytics_collection=random.choice(["True", "False"])):
            self.fail("Error while creating analytics collection")

        self.log.info("Dropping collection {0}".format(
            dataset_obj.full_kv_entity_name))
        self.bucket_util.drop_collection(
            self.cluster.master, dataset_obj.kv_bucket,
            scope_name=dataset_obj.kv_scope.name,
            collection_name=dataset_obj.kv_collection.name, session=None)
        if not self.cbas_util.wait_for_ingestion_complete(
                self.cluster, dataset_obj.full_name, 0, timeout=300):
            self.fail("Data is present in the dataset when it should not")
        self.log.info("Adding event for collection_detach events")
        self.system_events.add_event(AnalyticsEvents.collection_detached(
            self.cluster.cbas_cc_node.ip,
            CBASHelper.metadata_format(dataset_obj.dataverse_name),
            CBASHelper.unformat_name(dataset_obj.name)))

        self.log.info("Creating collection {0}".format(
            dataset_obj.full_kv_entity_name))
        self.bucket_util.create_collection(
            self.cluster.master, dataset_obj.kv_bucket,
            scope_name=dataset_obj.kv_scope.name,
            collection_spec=dataset_obj.kv_collection.get_dict_object(),
            session=None)
        if not self.cbas_util.wait_for_ingestion_complete(
                self.cluster, dataset_obj.full_name, 0, timeout=300):
            self.fail("Data ingestion failed.")

        self.sleep(3, "Waiting for event to be generated")

        self.log.info("Adding event for collection_attach events")
        self.system_events.add_event(AnalyticsEvents.collection_attached(
            self.cluster.cbas_cc_node.ip,
            CBASHelper.metadata_format(dataset_obj.dataverse_name),
            CBASHelper.unformat_name(dataset_obj.name)))
Exemplo n.º 7
0
    def test_analytics_index_events(self):
        dataset_obj = self.cbas_util.create_dataset_obj(
            self.cluster, self.bucket_util, dataset_cardinality=3,
            bucket_cardinality=3, enabled_from_KV=False,
            no_of_objs=1)[0]
        if not self.cbas_util.create_dataset(
                self.cluster, dataset_obj.name, dataset_obj.full_kv_entity_name,
                dataverse_name=dataset_obj.dataverse_name,
                analytics_collection=random.choice(["True", "False"])):
            self.fail("Error while creating analytics collection")
        index_name = CBASHelper.format_name(
            self.cbas_util.generate_name(name_cardinality=1))
        if not self.cbas_util.create_cbas_index(
                self.cluster, index_name, ["age:bigint"], dataset_obj.full_name,
                analytics_index=random.choice(["True", "False"])):
            self.fail("Error while creating analytics index")

        self.log.info("Adding event for index_created events")
        self.system_events.add_event(AnalyticsEvents.index_created(
            self.cluster.cbas_cc_node.ip,
            CBASHelper.metadata_format(dataset_obj.dataverse_name),
            CBASHelper.metadata_format(index_name),
            CBASHelper.metadata_format(dataset_obj.name)))

        if not self.cbas_util.drop_cbas_index(
                self.cluster, index_name, dataset_obj.full_name,
                analytics_index=random.choice(["True", "False"])):
            self.fail("Error while dropping analytics index")

        self.log.info("Adding event for index_dropped events")
        self.system_events.add_event(AnalyticsEvents.index_dropped(
            self.cluster.cbas_cc_node.ip,
            CBASHelper.metadata_format(dataset_obj.dataverse_name),
            CBASHelper.metadata_format(index_name),
            CBASHelper.metadata_format(dataset_obj.name)))
Exemplo n.º 8
0
 def test_analytics_collection_events(self):
     dataset_objs = self.cbas_util.create_dataset_obj(
         self.cluster, self.bucket_util, dataset_cardinality=3,
         bucket_cardinality=3, enabled_from_KV=False,
         no_of_objs=1)
     dataset_objs += self.cbas_util.create_dataset_obj(
         self.cluster, self.bucket_util, dataset_cardinality=3,
         bucket_cardinality=3, enabled_from_KV=True,
         no_of_objs=1)
     for dataset in dataset_objs:
         if dataset.enabled_from_KV:
             if not self.cbas_util.enable_analytics_from_KV(
                     self.cluster, dataset.full_kv_entity_name):
                 self.fail("Error while mapping KV collection to analytics")
             self.system_events.add_event(AnalyticsEvents.collection_mapped(
                 self.cluster.cbas_cc_node.ip, dataset.kv_bucket.name,
                 dataset.kv_scope.name, dataset.kv_collection.name))
             if not self.cbas_util.disable_analytics_from_KV(
                     self.cluster, dataset.full_kv_entity_name):
                 self.fail("Error while unmapping KV collection from "
                           "analytics")
         else:
             if not self.cbas_util.create_dataset(
                     self.cluster, dataset.name, dataset.full_kv_entity_name,
                     dataverse_name=dataset.dataverse_name,
                     analytics_collection=random.choice(["True", "False"])):
                 self.fail("Error while creating analytics collection")
             self.system_events.add_event(AnalyticsEvents.collection_created(
                 self.cluster.cbas_cc_node.ip,
                 CBASHelper.metadata_format(dataset.dataverse_name),
                 CBASHelper.metadata_format(dataset.name),
                 CBASHelper.metadata_format(dataset.dataverse_name),
                 "Local", dataset.kv_bucket.name, dataset.kv_scope.name,
                 dataset.kv_collection.name))
             if not self.cbas_util.drop_dataset(
                     self.cluster, dataset.full_name,
                     analytics_collection=random.choice(["True", "False"])):
                 self.fail("Error while dropping datasets")
         self.system_events.add_event(AnalyticsEvents.collection_dropped(
             self.cluster.cbas_cc_node.ip,
             CBASHelper.metadata_format(dataset.dataverse_name),
             CBASHelper.metadata_format(dataset.name)))
Exemplo n.º 9
0
 def __init__(self,
              name,
              dataset_name,
              dataverse_name,
              indexed_fields=None):
     """
     :param name str, name of the index
     :param dataset_name str, dataset/analytics_collection on which index is created
     :param dataverse_name str, name of the dataverse where the dataset is present.
     :param indexed_fields str fields on which index is created,
     format "field_name_1:field_type_1-field_name_2:field_type_2"
     """
     self.name = CBASHelper.format_name(name)
     self.dataset_name = CBASHelper.format_name(dataset_name)
     self.dataverse_name = CBASHelper.format_name(dataverse_name)
     self.full_dataset_name = CBASHelper.format_name(
         self.dataverse_name, self.dataset_name)
     self.analytics_index = False
     self.indexed_fields = []
     if indexed_fields:
         self.indexed_fields = indexed_fields.split("-")
Exemplo n.º 10
0
    def test_analytics_scope_events(self):
        dataverse_name = CBASHelper.format_name(
            self.cbas_util.generate_name(name_cardinality=2))
        if not self.cbas_util.create_dataverse(
                self.cluster, dataverse_name,
                analytics_scope=random.choice(["True", "False"])):
            self.fail("Error while creating dataverse")
        self.log.info(
            "Adding event for scope_created event")
        self.system_events.add_event(AnalyticsEvents.scope_created(
            self.cluster.cbas_cc_node.ip, CBASHelper.metadata_format(
                dataverse_name)))

        if not self.cbas_util.drop_dataverse(
                self.cluster, dataverse_name,
                analytics_scope=random.choice(["True", "False"])):
            self.fail("Error while dropping dataverse")
        self.log.info("Adding event for scope_dropped event")
        self.system_events.add_event(AnalyticsEvents.scope_dropped(
            self.cluster.cbas_cc_node.ip, CBASHelper.metadata_format(
                dataverse_name)))
Exemplo n.º 11
0
    def __init__(self, name=None, dataverse_name="Default", properties={}):
        """
        :param name str, name of the link, not needed in case of a link of type Local
        :param dataverse_name str, dataverse where the link is present.
        :param properties: dict, contains all the properties required to create a link.
        Common for both AWS and couchbase link.
        <Required> name : name of the link to be created.
        <Required> scope : name of the dataverse under which the link has to be created.
        <Required> type : s3/couchbase

        For links to external couchbase cluster.
        <Required> hostname : The hostname of the link
        <Optional> username : The username for host authentication. Required if encryption is set to
        "none" or "half. Optional if encryption is set to "full".
        <Optional> password : The password for host authentication. Required if encryption is set to
        "none" or "half. Optional if encryption is set to "full".
        <Required> encryption : The link secure connection type ('none', 'full' or 'half')
        <Optional> certificate : The root certificate of target cluster for authentication.
        Required only if encryption is set to "full"
        <Optional> clientCertificate : The user certificate for authentication.
        Required only if encryption is set to "full" and username and password is not used.
        <Optional> clientKey : The client key for user authentication.
        Required only if encryption is set to "full" and username and password is not used.

        For links to AWS S3
        <Required> accessKeyId : The access key of the link
        <Required> secretAccessKey : The secret key of the link
        <Required> region : The region of the link
        <Optional> serviceEndpoint : The service endpoint of the link.
        Note - please use the exact key names as provided above in link properties dict.
        """
        self.name = CBASHelper.format_name(name)
        self.dataverse_name = CBASHelper.format_name(dataverse_name)
        self.properties = properties
        self.properties["name"] = CBASHelper.unformat_name(self.name)
        self.properties["dataverse"] = CBASHelper.unformat_name(
            self.dataverse_name)
        self.link_type = self.properties["type"].lower()
        self.full_name = CBASHelper.format_name(self.dataverse_name, self.name)
Exemplo n.º 12
0
    def test_restart_kv_server_impact_on_bucket(self):

        self.log.info('Restart couchbase')
        shell = RemoteMachineShellConnection(self.cluster.master)
        shell.reboot_server_and_wait_for_cb_run(self.cluster_util,
                                                self.cluster.master)

        dataset = self.cbas_util.list_all_dataset_objs()[0]
        self.log.info('Validate document count')
        count_n1ql = self.cluster.rest.query_tool(
            'select count(*) from %s' %
            CBASHelper.format_name(dataset.kv_bucket.name))["results"][0]["$1"]
        if not self.cbas_util.wait_for_ingestion_complete(
                self.cluster, dataset.full_name, count_n1ql, timeout=300):
            self.fail("No. of items in CBAS dataset do not match "
                      "that in the KV bucket")
Exemplo n.º 13
0
 def __init__(self, name, dataverse_name, parameters, body,
              referenced_entities):
     """
     :param name str, name of the User defined fucntion
     :param dataverse str, name of the dataverse where the UDF is
     present.
     :param parameters list parameters used while creating the UDF.
     :param body str function body
     :param referenced_entities list list of datasets or UDF referenced in
     the function body
     """
     self.name = CBASHelper.format_name(name)
     self.dataverse_name = CBASHelper.format_name(dataverse_name)
     self.parameters = parameters
     if parameters and parameters[0] == "...":
         self.arity = -1
     else:
         self.arity = len(parameters)
     self.body = body
     self.dataset_dependencies = list()
     self.udf_dependencies = list()
     self.synonym_dependencies = list()
     for entity in referenced_entities:
         if isinstance(entity, Dataset) or isinstance(
                 entity, CBAS_Collection):
             self.dataset_dependencies.append([
                 CBASHelper.unformat_name(entity.dataverse_name),
                 CBASHelper.unformat_name(entity.name)
             ])
         elif isinstance(entity, Synonym):
             self.synonym_dependencies.append([
                 CBASHelper.unformat_name(entity.dataverse_name),
                 CBASHelper.unformat_name(entity.name)
             ])
         elif isinstance(entity, CBAS_UDF):
             self.udf_dependencies.append([
                 CBASHelper.unformat_name(entity.dataverse_name),
                 CBASHelper.unformat_name(entity.name), entity.arity
             ])
     self.full_name = CBASHelper.format_name(self.dataverse_name, self.name)
Exemplo n.º 14
0
 def __init__(self,
              name,
              cbas_entity_name,
              cbas_entity_dataverse,
              dataverse_name="Default",
              synonym_on_synonym=False):
     """
     :param name str, name of the synonym
     :param cbas_entity_name str, Cbas entity on which Synonym is based,
     can be Dataset/CBAS_collection/Synonym name.
     :param cbas_entity_dataverse str, dataverse name where the cbas_entity is present.
     :param dataverse str dataverse where the synonym is present.
     :param synonym_on_synonym bool, True if synonym was created on another synonym.
     """
     self.name = CBASHelper.format_name(name)
     self.cbas_entity_name = CBASHelper.format_name(cbas_entity_name)
     self.cbas_entity_dataverse = CBASHelper.format_name(
         cbas_entity_dataverse)
     self.dataverse_name = CBASHelper.format_name(dataverse_name)
     self.full_name = CBASHelper.format_name(self.dataverse_name, self.name)
     self.cbas_entity_full_name = CBASHelper.format_name(
         self.cbas_entity_dataverse, self.cbas_entity_name)
     self.synonym_on_synonym = synonym_on_synonym
Exemplo n.º 15
0
 def __init__(self, name="Default"):
     self.name = CBASHelper.format_name(name)
     self.links = dict()
     self.datasets = dict()
     self.synonyms = dict()
     self.udfs = dict()
Exemplo n.º 16
0
    def test_drop_analytics_udf(self):
        self.log.info("Test started")
        self.setup_for_test()
        self.log.debug("Setup for test completed")

        udf_obj = self.create_udf_object(
            self.input.param('num_create_params', 0),
            self.input.param('body_type', "expression"), "same", True)
        self.log.debug("Udf objects created")

        if not self.cbas_util.create_udf(self.cluster,
                                         name=udf_obj.name,
                                         dataverse=udf_obj.dataverse_name,
                                         or_replace=False,
                                         parameters=udf_obj.parameters,
                                         body=udf_obj.body,
                                         if_not_exists=False,
                                         query_context=False,
                                         use_statement=False,
                                         validate_error_msg=False,
                                         expected_error=None,
                                         timeout=300,
                                         analytics_timeout=300):
            self.fail("Error while creating Analytics UDF")

        if self.input.param('second_udf', False):
            # Create UDF using another UDF
            udf_obj_2 = self.create_udf_object(
                self.input.param('num_test_udf_params', 0), "udf",
                self.input.param('dependent_entity_dv', "same"),
                self.input.param('use_full_name', True))

            if not self.cbas_util.create_udf(
                    self.cluster,
                    name=udf_obj_2.name,
                    dataverse=udf_obj_2.dataverse_name,
                    or_replace=False,
                    parameters=udf_obj_2.parameters,
                    body=udf_obj_2.body,
                    if_not_exists=False,
                    query_context=False,
                    use_statement=False,
                    validate_error_msg=False,
                    expected_error=None,
                    timeout=300,
                    analytics_timeout=300):
                self.fail("Error while creating Analytics UDF")

        if self.input.param('no_params', False):
            udf_obj.parameters = None
        if self.input.param('invalid_name', False):
            udf_obj.name = "invalid"
        if self.input.param('invalid_dataverse', False):
            udf_obj.dataverse_name = "invalid"
        if isinstance(self.input.param('change_params', None), int):
            if self.input.param('change_params', None) == -1:
                udf_obj.parameters = ["..."]
            else:
                udf_obj.parameters = []
                for i in range(0, self.input.param('change_params', None)):
                    udf_obj.parameters.append(
                        CBASHelper.format_name(self.cbas_util.generate_name()))

        if not self.cbas_util.drop_udf(
                self.cluster,
                name=udf_obj.name,
                dataverse=udf_obj.dataverse_name,
                parameters=udf_obj.parameters,
                if_exists=self.input.param('if_exists', False),
                use_statement=self.input.param('use_statement', False),
                query_context=self.input.param('query_context', False),
                validate_error_msg=self.input.param('validate_error', False),
                expected_error=self.input.param('expected_error', None),
                timeout=300,
                analytics_timeout=300):
            self.fail("Failed to drop Analytics UDF")

        if not (self.input.param('validate_error', False)
                or self.input.param('if_exists', False)):
            if self.cbas_util.validate_udf_in_metadata(
                    self.cluster,
                    udf_name=udf_obj.name,
                    udf_dataverse_name=udf_obj.dataverse_name,
                    parameters=udf_obj.parameters,
                    body=udf_obj.body,
                    dataset_dependencies=udf_obj.dataset_dependencies,
                    udf_dependencies=udf_obj.udf_dependencies):
                self.fail("Metadata entry for UDF is still present even "
                          "after dropping the UDF")
        self.log.info("Test Finished")
Exemplo n.º 17
0
    def test_create_multiple_analytics_udfs(self):
        self.log.info("Test started")
        self.setup_for_test()
        udf_objs = list()

        for i in range(0, self.input.param('num_init_udf', 1)):
            udf_obj = self.create_udf_object(
                self.input.param('num_create_params', 0), "expression", "same",
                True)

            if not self.cbas_util.create_udf(self.cluster,
                                             name=udf_obj.name,
                                             dataverse=udf_obj.dataverse_name,
                                             or_replace=False,
                                             parameters=udf_obj.parameters,
                                             body=udf_obj.body,
                                             if_not_exists=False,
                                             query_context=False,
                                             use_statement=False,
                                             validate_error_msg=False,
                                             expected_error=None,
                                             timeout=300,
                                             analytics_timeout=300):
                self.fail("Error while creating Analytics UDF")
            udf_objs.append(udf_obj)

        # Create UDF test to test
        test_udf_obj = self.create_udf_object(
            self.input.param('num_test_udf_params', 0),
            self.input.param('body_type', "expression"),
            self.input.param('dependent_entity_dv', "same"),
            self.input.param('use_full_name', True))

        if self.input.param('test_udf_name', "diff") == "same":
            test_udf_obj.name = udf_objs[0].name
            test_udf_obj.reset_full_name()
        if self.input.param('test_udf_dv', "diff") == "same":
            test_udf_obj.dataverse_name = udf_objs[0].dataverse_name
            test_udf_obj.reset_full_name()
        else:
            while test_udf_obj.dataverse_name == udf_objs[0].dataverse_name:
                test_udf_obj.dataverse_name = random.choice(
                    self.cbas_util.dataverses.values()).name
        if self.input.param('test_udf_param_name', "diff") == "same":
            test_udf_obj.parameters = udf_objs[0].parameters

        if not self.cbas_util.create_udf(
                self.cluster,
                name=test_udf_obj.name,
                dataverse=test_udf_obj.dataverse_name,
                or_replace=self.input.param('or_replace', False),
                parameters=test_udf_obj.parameters,
                body=test_udf_obj.body,
                if_not_exists=self.input.param('if_not_exists', False),
                query_context=False,
                use_statement=False,
                validate_error_msg=self.input.param('validate_error', False),
                expected_error=self.input.param('expected_error', "").format(
                    CBASHelper.unformat_name(
                        CBASHelper.metadata_format(
                            test_udf_obj.dataverse_name), test_udf_obj.name)),
                timeout=300,
                analytics_timeout=300):
            self.fail("Error while creating Analytics UDF")

        if not self.input.param('validate_error', False):
            if self.input.param('if_not_exists', False):
                object_to_validate = udf_objs[0]
            else:
                object_to_validate = test_udf_obj
            if not self.cbas_util.validate_udf_in_metadata(
                    self.cluster,
                    udf_name=object_to_validate.name,
                    udf_dataverse_name=object_to_validate.dataverse_name,
                    parameters=object_to_validate.parameters,
                    body=object_to_validate.body,
                    dataset_dependencies=object_to_validate.
                    dataset_dependencies,
                    udf_dependencies=object_to_validate.udf_dependencies):
                self.fail("Error while validating Function in Metadata")

            if self.input.param('num_execute_params', -1) == -1:
                num_execute_params = len(test_udf_obj.parameters)
            else:
                num_execute_params = self.input.param('num_execute_params')

            execute_params = [i for i in range(1, num_execute_params + 1)]
            if not self.cbas_util.verify_function_execution_result(
                    self.cluster, test_udf_obj.full_name, execute_params,
                    sum(execute_params)):
                self.fail("Failed while verifying function execution result")
        self.log.info("Test Finished")
Exemplo n.º 18
0
    def test_create_analytics_udf(self):
        self.log.info("Test started")
        self.setup_for_test()
        udf_obj = self.create_udf_object(
            self.input.param('num_create_params', 0),
            self.input.param('body_type', "expression"),
            self.input.param('dependent_entity_dv', "same"),
            self.input.param('use_full_name', True),
        )

        if self.input.param('func_name', None):
            udf_obj.name = self.input.param('func_name')
            udf_obj.full_name = udf_obj.dataverse_name + "." + udf_obj.name
        if self.input.param('no_dataverse', False):
            udf_obj.dataverse_name = None
        if self.input.param('dataverse_name', None):
            udf_obj.dataverse_name = self.input.param('dataverse_name')
        if self.input.param('num_create_params', 0) == -2:
            udf_obj.parameters = None
        if self.input.param('no_body', False):
            udf_obj.body = None
        if self.input.param('invalid_ds', False):
            udf_obj.body = "select count(*) from invalid"
        if self.input.param('custom_params', None):
            if self.input.param('custom_params') == "empty_string":
                udf_obj.parameters = ["", ""]
            elif self.input.param('custom_params') == "mix_param_1":
                udf_obj.parameters = ["a", "b", "..."]
            elif self.input.param('custom_params') == "mix_param_2":
                udf_obj.parameters = ["...", "a", "b"]
            elif self.input.param('custom_params') == "int_param":
                udf_obj.parameters = ["1", "2"]
            elif self.input.param('custom_params') == "bool_param":
                udf_obj.parameters = ["True", "False"]

        if not self.cbas_util.create_udf(
                self.cluster,
                name=udf_obj.name,
                dataverse=udf_obj.dataverse_name,
                or_replace=False,
                parameters=udf_obj.parameters,
                body=udf_obj.body,
                if_not_exists=False,
                query_context=self.input.param('query_context', False),
                use_statement=self.input.param('use_statement', False),
                validate_error_msg=self.input.param('validate_error', False),
                expected_error=self.input.param('expected_error', "").format(
                    udf_obj.dataverse_name),
                timeout=300,
                analytics_timeout=300):
            self.fail("Error while creating Analytics UDF")
        if not self.input.param('validate_error', False):
            if self.input.param('no_dataverse', False):
                udf_obj.dataverse_name = "Default"
                udf_obj.reset_full_name()
            if not self.cbas_util.validate_udf_in_metadata(
                    self.cluster,
                    udf_name=udf_obj.name,
                    udf_dataverse_name=udf_obj.dataverse_name,
                    parameters=udf_obj.parameters,
                    body=udf_obj.body,
                    dataset_dependencies=udf_obj.dataset_dependencies,
                    udf_dependencies=udf_obj.udf_dependencies,
                    synonym_dependencies=udf_obj.synonym_dependencies):
                self.fail("Error while validating Function in Metadata")

            if self.input.param('num_execute_params', -1) == -1:
                num_execute_params = len(udf_obj.parameters)
            else:
                num_execute_params = self.input.param('num_execute_params')

            execute_params = [i for i in range(1, num_execute_params + 1)]
            if not execute_params:
                expected_result = 0
                if udf_obj.dataset_dependencies:
                    for dependency in udf_obj.dataset_dependencies:
                        obj = self.cbas_util.get_dataset_obj(
                            self.cluster,
                            CBASHelper.format_name(dependency[1]),
                            CBASHelper.format_name(dependency[0]))
                        expected_result += obj.num_of_items
                elif udf_obj.synonym_dependencies:
                    for dependency in udf_obj.synonym_dependencies:
                        obj = self.cbas_util.get_dataset_obj_for_synonym(
                            self.cluster,
                            synonym_name=CBASHelper.format_name(dependency[1]),
                            synonym_dataverse=CBASHelper.format_name(
                                dependency[0]))
                        expected_result += obj.num_of_items
                else:
                    expected_result = 1
            else:
                expected_result = sum(execute_params)
            if not self.cbas_util.verify_function_execution_result(
                    self.cluster,
                    func_name=udf_obj.full_name,
                    func_parameters=execute_params,
                    expected_result=expected_result,
                    validate_error_msg=self.input.param(
                        'validate_execute_error', False),
                    expected_error=self.input.param('expected_error', None)):
                self.fail("Failed while verifying function execution result")
        self.log.info("Test Finished")
Exemplo n.º 19
0
    def test_analytics_synonym_events(self):
        dataset_obj = self.cbas_util.create_dataset_obj(
            self.cluster, self.bucket_util, dataset_cardinality=3,
            bucket_cardinality=3, enabled_from_KV=False,
            no_of_objs=1)[0]
        if not self.cbas_util.create_dataset(
                self.cluster, dataset_obj.name,
                dataset_obj.full_kv_entity_name,
                dataverse_name=dataset_obj.dataverse_name,
                analytics_collection=random.choice(["True", "False"])):
            self.fail("Error while creating analytics collection")

        syn_name_1 = CBASHelper.format_name(
            self.cbas_util.generate_name(name_cardinality=1))
        if not self.cbas_util.create_analytics_synonym(
            self.cluster, CBASHelper.format_name(
                    dataset_obj.dataverse_name, syn_name_1), dataset_obj.full_name):
            self.fail("Error while creating Synonym")

        self.log.info("Adding event for synonym_created event")
        self.system_events.add_event(AnalyticsEvents.synonym_created(
            self.cluster.cbas_cc_node.ip,
            CBASHelper.metadata_format(dataset_obj.dataverse_name),
            CBASHelper.metadata_format(syn_name_1),
            CBASHelper.metadata_format(dataset_obj.dataverse_name),
            CBASHelper.metadata_format(dataset_obj.name)))

        syn_name_2 = CBASHelper.format_name(
            self.cbas_util.generate_name(name_cardinality=1))
        self.log.info("Creating dangling Synonym")
        if not self.cbas_util.create_analytics_synonym(
            self.cluster, CBASHelper.format_name(
                    dataset_obj.dataverse_name, syn_name_2), "dangling"):
            self.fail("Error while creating Synonym")
        self.log.info("Adding event for synonym_created event for dangling "
                      "synonym")
        self.system_events.add_event(AnalyticsEvents.synonym_created(
            self.cluster.cbas_cc_node.ip,
            CBASHelper.metadata_format(dataset_obj.dataverse_name),
            CBASHelper.metadata_format(syn_name_2),
            CBASHelper.metadata_format(dataset_obj.dataverse_name),
            CBASHelper.metadata_format("dangling")))

        for syn_name in [syn_name_1, syn_name_2]:
            if not self.cbas_util.drop_analytics_synonym(
                self.cluster, CBASHelper.format_name(
                    dataset_obj.dataverse_name, syn_name)):
                self.fail("Error while dropping synonym")

            self.log.info("Adding event for synonym_dropped events")
            self.system_events.add_event(AnalyticsEvents.synonym_dropped(
                self.cluster.cbas_cc_node.ip,
                CBASHelper.metadata_format(dataset_obj.dataverse_name),
                CBASHelper.metadata_format(syn_name)))
Exemplo n.º 20
0
 def reset_full_name(self):
     self.full_name = CBASHelper.format_name(self.dataverse_name, self.name)
Exemplo n.º 21
0
class CBASCGroup(CGroupBase):
    def setUp(self):
        super(CBASCGroup, self).setUp()
        self.cbas_helper = CBASHelper(self.servers[0])

    def tearDown(self):
        pass

    def get_cpu_count_from_cbas_diagnostics(self):
        response = self.cbas_helper.get_analytics_diagnostics(timeout=120)
        input_args = response["nodes"][0]["runtime"]["inputArguments"]
        for arguement in input_args:
            if "ActiveProcessorCount" in arguement:
                return int(arguement.split("=")[1])
        return 0

    def get_actual_number_of_partitions(self):
        response = self.cbas_helper.get_analytics_diagnostics(timeout=120)
        partition_info = response["nodes"][0]["cc"]["partitions"]
        return len(partition_info)

    def get_expected_number_of_partitions(self, cpu_count, memory):
        max_partition_to_create = min((int(memory) / 1024), 16)
        actual_partitions_created = min(cpu_count, max_partition_to_create)
        return actual_partitions_created

    def get_cbas_memory_allocated(self):
        return self.service_and_memory_allocation["analytics"]

    def test_cbas_autopartioning(self):
        cpu_count = self.get_cpu_count_from_cbas_diagnostics()
        expected_number_of_partitions = self.get_expected_number_of_partitions(
            cpu_count, self.get_cbas_memory_allocated())
        actual_number_of_partitions = self.get_actual_number_of_partitions()
        self.log.info("Expected number of partitions : {0} Actual number of "
                      "partitions : {1}".format(expected_number_of_partitions,
                                                actual_number_of_partitions))
        if expected_number_of_partitions != actual_number_of_partitions:
            self.fail("Expected number of partitions is not equal to Actual "
                      "number of partitions")

    def test_cbas_multiple_disk_paths(self):
        expected_number_of_partitions = self.service_disk_paths["analytics"]
        actual_number_of_partitions = self.get_actual_number_of_partitions()
        self.log.info("Expected number of partitions : {0} Actual number of "
                      "partitions : {1}".format(expected_number_of_partitions,
                                                actual_number_of_partitions))
        if expected_number_of_partitions != actual_number_of_partitions:
            self.fail("Expected number of partitions is not equal to Actual "
                      "number of partitions")

    def test_effects_of_dynamic_updation_of_cpus_limit(self):
        dynamic_update_cpus = self.input.param("dynamic_update_cpus", 2)
        cpu_count = self.get_cpu_count_from_cbas_diagnostics()
        expected_number_of_partitions = self.get_expected_number_of_partitions(
            cpu_count, self.get_cbas_memory_allocated())
        actual_number_of_partitions = self.get_actual_number_of_partitions()
        self.log.info("Expected number of partitions : {0} Actual number of "
                      "partitions : {1}".format(expected_number_of_partitions,
                                                actual_number_of_partitions))
        if expected_number_of_partitions != actual_number_of_partitions:
            self.fail("Expected number of partitions is not equal to Actual "
                      "number of partitions before dynamic updation of CPU "
                      "limit")

        self.update_container_cpu_limit(cpus=dynamic_update_cpus)
        self.restart_server()

        actual_number_of_partitions = self.get_actual_number_of_partitions()
        self.log.info("Expected number of partitions : {0} Actual number of "
                      "partitions : {1}".format(expected_number_of_partitions,
                                                actual_number_of_partitions))
        if expected_number_of_partitions != actual_number_of_partitions:
            self.fail("Expected number of partitions is not equal to Actual "
                      "number of partitions after dynamic updation of CPU "
                      "limit")
Exemplo n.º 22
0
    def test_analytics_udf_system_event_logs(self):
        self.log.info("Test started")
        self.setup_for_test()
        udf_types = [(0, "expression", "diff"), (2, "expression", "diff"),
                     (-1, "expression", "diff"), (0, "dataset", "diff"),
                     (2, "dataset", "diff"), (-1, "dataset", "diff"),
                     (0, "synonym", "diff"), (2, "synonym", "diff"),
                     (-1, "synonym", "diff"), (0, "udf", "diff"),
                     (2, "udf", "diff"), (-1, "udf", "diff")]
        udf_objs = list()

        for udf_type in udf_types:
            udf_obj = self.create_udf_object(udf_type[0], udf_type[1],
                                             udf_type[2])
            if not self.cbas_util.create_udf(self.cluster,
                                             name=udf_obj.name,
                                             dataverse=udf_obj.dataverse_name,
                                             or_replace=False,
                                             parameters=udf_obj.parameters,
                                             body=udf_obj.body,
                                             if_not_exists=False,
                                             query_context=False,
                                             use_statement=False,
                                             validate_error_msg=False,
                                             expected_error=None,
                                             timeout=300,
                                             analytics_timeout=300):
                self.fail("Error while creating Analytics UDF")
            udf_objs.append(udf_obj)
            self.log.info(
                "Adding event for user_defined_function_created events")
            self.system_events.add_event(
                AnalyticsEvents.user_defined_function_created(
                    self.cluster.cbas_cc_node.ip,
                    CBASHelper.metadata_format(udf_obj.dataverse_name),
                    CBASHelper.unformat_name(udf_obj.name), udf_obj.arity))

        # Create UDF to test replace
        idx = random.choice(range(len(udf_objs)))
        udf_type = udf_types[idx]
        test_udf_obj = self.create_udf_object(udf_type[0], udf_type[1],
                                              udf_type[2])
        test_udf_obj.name = udf_objs[idx].name
        test_udf_obj.dataverse_name = udf_objs[idx].dataverse_name
        test_udf_obj.parameters = udf_objs[idx].parameters
        test_udf_obj.reset_full_name()

        if not self.cbas_util.create_udf(self.cluster,
                                         name=test_udf_obj.name,
                                         dataverse=test_udf_obj.dataverse_name,
                                         or_replace=True,
                                         parameters=test_udf_obj.parameters,
                                         body=test_udf_obj.body,
                                         if_not_exists=False,
                                         query_context=False,
                                         use_statement=False,
                                         validate_error_msg=False,
                                         expected_error="",
                                         timeout=300,
                                         analytics_timeout=300):
            self.fail("Error while creating Analytics UDF")

        self.log.info("Adding event for user_defined_function_replaced events")
        self.system_events.add_event(
            AnalyticsEvents.user_defined_function_replaced(
                self.cluster.cbas_cc_node.ip,
                CBASHelper.metadata_format(test_udf_obj.dataverse_name),
                CBASHelper.unformat_name(test_udf_obj.name),
                test_udf_obj.arity))

        self.sleep(
            2, "Waiting for user_defined_function_replaced events to "
            "be generated")

        self.log.info("Adding event for user_defined_function_dropped events")
        udf_deleted_successfully = list()
        i = 0
        while udf_objs:
            if (i < len(udf_objs)) and (i not in udf_deleted_successfully):
                udf_obj = udf_objs[i]
                if self.cbas_util.drop_udf(self.cluster,
                                           name=udf_obj.name,
                                           dataverse=udf_obj.dataverse_name,
                                           parameters=udf_obj.parameters,
                                           if_exists=False,
                                           use_statement=False,
                                           query_context=False,
                                           validate_error_msg=False,
                                           expected_error=None,
                                           timeout=300,
                                           analytics_timeout=300):
                    udf_deleted_successfully.append(i)
                    self.system_events.add_event(
                        AnalyticsEvents.user_defined_function_dropped(
                            self.cluster.cbas_cc_node.ip,
                            CBASHelper.metadata_format(udf_obj.dataverse_name),
                            CBASHelper.unformat_name(udf_obj.name),
                            udf_obj.arity))
                i += 1
            elif len(udf_deleted_successfully) == len(udf_objs):
                break
            elif i >= len(udf_objs):
                i = 0
            elif i in udf_deleted_successfully:
                i += 1
        self.log.info("Test Finished")
Exemplo n.º 23
0
 def setUp(self):
     super(CBASCGroup, self).setUp()
     self.cbas_helper = CBASHelper(self.servers[0])