예제 #1
0
    def test_analytics_synonym_events(self):
        dataset_obj = self.cbas_util.create_dataset_obj(
            self.cluster, self.bucket_util, dataset_cardinality=3,
            bucket_cardinality=3, enabled_from_KV=False,
            no_of_objs=1)[0]
        if not self.cbas_util.create_dataset(
                self.cluster, dataset_obj.name,
                dataset_obj.full_kv_entity_name,
                dataverse_name=dataset_obj.dataverse_name,
                analytics_collection=random.choice(["True", "False"])):
            self.fail("Error while creating analytics collection")

        syn_name_1 = CBASHelper.format_name(
            self.cbas_util.generate_name(name_cardinality=1))
        if not self.cbas_util.create_analytics_synonym(
            self.cluster, CBASHelper.format_name(
                    dataset_obj.dataverse_name, syn_name_1), dataset_obj.full_name):
            self.fail("Error while creating Synonym")

        self.log.info("Adding event for synonym_created event")
        self.system_events.add_event(AnalyticsEvents.synonym_created(
            self.cluster.cbas_cc_node.ip,
            CBASHelper.metadata_format(dataset_obj.dataverse_name),
            CBASHelper.metadata_format(syn_name_1),
            CBASHelper.metadata_format(dataset_obj.dataverse_name),
            CBASHelper.metadata_format(dataset_obj.name)))

        syn_name_2 = CBASHelper.format_name(
            self.cbas_util.generate_name(name_cardinality=1))
        self.log.info("Creating dangling Synonym")
        if not self.cbas_util.create_analytics_synonym(
            self.cluster, CBASHelper.format_name(
                    dataset_obj.dataverse_name, syn_name_2), "dangling"):
            self.fail("Error while creating Synonym")
        self.log.info("Adding event for synonym_created event for dangling "
                      "synonym")
        self.system_events.add_event(AnalyticsEvents.synonym_created(
            self.cluster.cbas_cc_node.ip,
            CBASHelper.metadata_format(dataset_obj.dataverse_name),
            CBASHelper.metadata_format(syn_name_2),
            CBASHelper.metadata_format(dataset_obj.dataverse_name),
            CBASHelper.metadata_format("dangling")))

        for syn_name in [syn_name_1, syn_name_2]:
            if not self.cbas_util.drop_analytics_synonym(
                self.cluster, CBASHelper.format_name(
                    dataset_obj.dataverse_name, syn_name)):
                self.fail("Error while dropping synonym")

            self.log.info("Adding event for synonym_dropped events")
            self.system_events.add_event(AnalyticsEvents.synonym_dropped(
                self.cluster.cbas_cc_node.ip,
                CBASHelper.metadata_format(dataset_obj.dataverse_name),
                CBASHelper.metadata_format(syn_name)))
예제 #2
0
    def test_create_dataset_with_udf_in_where_clause(self):
        self.log.info("Test started")
        self.setup_for_test()
        self.log.debug("Setup for test completed")

        udf_obj = self.create_udf_object(2, "expression", "same", True)
        self.log.debug("Udf objects created")

        if not self.cbas_util.create_udf(
            self.cluster, name=udf_obj.name, dataverse=udf_obj.dataverse_name,
            or_replace=False, parameters=udf_obj.parameters, body=udf_obj.body,
            if_not_exists=False, query_context=False, use_statement=False,
            validate_error_msg=False, expected_error=None,
            timeout=300, analytics_timeout=300):
            self.fail("Error while creating Analytics UDF")

        if not self.cbas_util.create_dataset(
            self.cluster, dataset_name=CBASHelper.format_name(
                self.cbas_util.generate_name()),
            kv_entity=(self.cbas_util.list_all_dataset_objs()[0]).full_kv_entity_name,
            dataverse_name=udf_obj.dataverse_name,
            where_clause="age > {0}({1})".format(
                udf_obj.full_name, ",".join(udf_obj.parameters)),
            validate_error_msg=True,
            expected_error="Illegal use of user-defined function {0}".format(
                CBASHelper.unformat_name(CBASHelper.metadata_format(
                    udf_obj.dataverse_name), udf_obj.name)),
            timeout=300, analytics_timeout=300, analytics_collection=False):
            self.fail("Dataset creation was successfull while using user "
                      "defined function in where clause of the DDL")
        self.log.info("Test Finished")
예제 #3
0
    def test_analytics_collection_attach_dettach_events(self):
        dataset_obj = self.cbas_util.create_dataset_obj(
            self.cluster, self.bucket_util, dataset_cardinality=3,
            bucket_cardinality=3, enabled_from_KV=False,
            no_of_objs=1, exclude_collection=["_default"])[0]
        if not self.cbas_util.create_dataset(
                self.cluster, dataset_obj.name,
                dataset_obj.full_kv_entity_name,
                dataverse_name=dataset_obj.dataverse_name,
                analytics_collection=random.choice(["True", "False"])):
            self.fail("Error while creating analytics collection")

        self.log.info("Dropping collection {0}".format(
            dataset_obj.full_kv_entity_name))
        self.bucket_util.drop_collection(
            self.cluster.master, dataset_obj.kv_bucket,
            scope_name=dataset_obj.kv_scope.name,
            collection_name=dataset_obj.kv_collection.name, session=None)
        if not self.cbas_util.wait_for_ingestion_complete(
                self.cluster, dataset_obj.full_name, 0, timeout=300):
            self.fail("Data is present in the dataset when it should not")
        self.log.info("Adding event for collection_detach events")
        self.system_events.add_event(AnalyticsEvents.collection_detached(
            self.cluster.cbas_cc_node.ip,
            CBASHelper.metadata_format(dataset_obj.dataverse_name),
            CBASHelper.unformat_name(dataset_obj.name)))

        self.log.info("Creating collection {0}".format(
            dataset_obj.full_kv_entity_name))
        self.bucket_util.create_collection(
            self.cluster.master, dataset_obj.kv_bucket,
            scope_name=dataset_obj.kv_scope.name,
            collection_spec=dataset_obj.kv_collection.get_dict_object(),
            session=None)
        if not self.cbas_util.wait_for_ingestion_complete(
                self.cluster, dataset_obj.full_name, 0, timeout=300):
            self.fail("Data ingestion failed.")

        self.sleep(3, "Waiting for event to be generated")

        self.log.info("Adding event for collection_attach events")
        self.system_events.add_event(AnalyticsEvents.collection_attached(
            self.cluster.cbas_cc_node.ip,
            CBASHelper.metadata_format(dataset_obj.dataverse_name),
            CBASHelper.unformat_name(dataset_obj.name)))
예제 #4
0
 def test_analytics_collection_events(self):
     dataset_objs = self.cbas_util.create_dataset_obj(
         self.cluster, self.bucket_util, dataset_cardinality=3,
         bucket_cardinality=3, enabled_from_KV=False,
         no_of_objs=1)
     dataset_objs += self.cbas_util.create_dataset_obj(
         self.cluster, self.bucket_util, dataset_cardinality=3,
         bucket_cardinality=3, enabled_from_KV=True,
         no_of_objs=1)
     for dataset in dataset_objs:
         if dataset.enabled_from_KV:
             if not self.cbas_util.enable_analytics_from_KV(
                     self.cluster, dataset.full_kv_entity_name):
                 self.fail("Error while mapping KV collection to analytics")
             self.system_events.add_event(AnalyticsEvents.collection_mapped(
                 self.cluster.cbas_cc_node.ip, dataset.kv_bucket.name,
                 dataset.kv_scope.name, dataset.kv_collection.name))
             if not self.cbas_util.disable_analytics_from_KV(
                     self.cluster, dataset.full_kv_entity_name):
                 self.fail("Error while unmapping KV collection from "
                           "analytics")
         else:
             if not self.cbas_util.create_dataset(
                     self.cluster, dataset.name, dataset.full_kv_entity_name,
                     dataverse_name=dataset.dataverse_name,
                     analytics_collection=random.choice(["True", "False"])):
                 self.fail("Error while creating analytics collection")
             self.system_events.add_event(AnalyticsEvents.collection_created(
                 self.cluster.cbas_cc_node.ip,
                 CBASHelper.metadata_format(dataset.dataverse_name),
                 CBASHelper.metadata_format(dataset.name),
                 CBASHelper.metadata_format(dataset.dataverse_name),
                 "Local", dataset.kv_bucket.name, dataset.kv_scope.name,
                 dataset.kv_collection.name))
             if not self.cbas_util.drop_dataset(
                     self.cluster, dataset.full_name,
                     analytics_collection=random.choice(["True", "False"])):
                 self.fail("Error while dropping datasets")
         self.system_events.add_event(AnalyticsEvents.collection_dropped(
             self.cluster.cbas_cc_node.ip,
             CBASHelper.metadata_format(dataset.dataverse_name),
             CBASHelper.metadata_format(dataset.name)))
예제 #5
0
    def test_analytics_scope_events(self):
        dataverse_name = CBASHelper.format_name(
            self.cbas_util.generate_name(name_cardinality=2))
        if not self.cbas_util.create_dataverse(
                self.cluster, dataverse_name,
                analytics_scope=random.choice(["True", "False"])):
            self.fail("Error while creating dataverse")
        self.log.info(
            "Adding event for scope_created event")
        self.system_events.add_event(AnalyticsEvents.scope_created(
            self.cluster.cbas_cc_node.ip, CBASHelper.metadata_format(
                dataverse_name)))

        if not self.cbas_util.drop_dataverse(
                self.cluster, dataverse_name,
                analytics_scope=random.choice(["True", "False"])):
            self.fail("Error while dropping dataverse")
        self.log.info("Adding event for scope_dropped event")
        self.system_events.add_event(AnalyticsEvents.scope_dropped(
            self.cluster.cbas_cc_node.ip, CBASHelper.metadata_format(
                dataverse_name)))
예제 #6
0
    def test_analytics_index_events(self):
        dataset_obj = self.cbas_util.create_dataset_obj(
            self.cluster, self.bucket_util, dataset_cardinality=3,
            bucket_cardinality=3, enabled_from_KV=False,
            no_of_objs=1)[0]
        if not self.cbas_util.create_dataset(
                self.cluster, dataset_obj.name, dataset_obj.full_kv_entity_name,
                dataverse_name=dataset_obj.dataverse_name,
                analytics_collection=random.choice(["True", "False"])):
            self.fail("Error while creating analytics collection")
        index_name = CBASHelper.format_name(
            self.cbas_util.generate_name(name_cardinality=1))
        if not self.cbas_util.create_cbas_index(
                self.cluster, index_name, ["age:bigint"], dataset_obj.full_name,
                analytics_index=random.choice(["True", "False"])):
            self.fail("Error while creating analytics index")

        self.log.info("Adding event for index_created events")
        self.system_events.add_event(AnalyticsEvents.index_created(
            self.cluster.cbas_cc_node.ip,
            CBASHelper.metadata_format(dataset_obj.dataverse_name),
            CBASHelper.metadata_format(index_name),
            CBASHelper.metadata_format(dataset_obj.name)))

        if not self.cbas_util.drop_cbas_index(
                self.cluster, index_name, dataset_obj.full_name,
                analytics_index=random.choice(["True", "False"])):
            self.fail("Error while dropping analytics index")

        self.log.info("Adding event for index_dropped events")
        self.system_events.add_event(AnalyticsEvents.index_dropped(
            self.cluster.cbas_cc_node.ip,
            CBASHelper.metadata_format(dataset_obj.dataverse_name),
            CBASHelper.metadata_format(index_name),
            CBASHelper.metadata_format(dataset_obj.name)))
예제 #7
0
    def test_analytics_udf_system_event_logs(self):
        self.log.info("Test started")
        self.setup_for_test()
        udf_types = [(0, "expression", "diff"), (2, "expression", "diff"),
                     (-1, "expression", "diff"), (0, "dataset", "diff"),
                     (2, "dataset", "diff"), (-1, "dataset", "diff"),
                     (0, "synonym", "diff"), (2, "synonym", "diff"),
                     (-1, "synonym", "diff"), (0, "udf", "diff"),
                     (2, "udf", "diff"), (-1, "udf", "diff")]
        udf_objs = list()

        for udf_type in udf_types:
            udf_obj = self.create_udf_object(udf_type[0], udf_type[1],
                                             udf_type[2])
            if not self.cbas_util.create_udf(self.cluster,
                                             name=udf_obj.name,
                                             dataverse=udf_obj.dataverse_name,
                                             or_replace=False,
                                             parameters=udf_obj.parameters,
                                             body=udf_obj.body,
                                             if_not_exists=False,
                                             query_context=False,
                                             use_statement=False,
                                             validate_error_msg=False,
                                             expected_error=None,
                                             timeout=300,
                                             analytics_timeout=300):
                self.fail("Error while creating Analytics UDF")
            udf_objs.append(udf_obj)
            self.log.info(
                "Adding event for user_defined_function_created events")
            self.system_events.add_event(
                AnalyticsEvents.user_defined_function_created(
                    self.cluster.cbas_cc_node.ip,
                    CBASHelper.metadata_format(udf_obj.dataverse_name),
                    CBASHelper.unformat_name(udf_obj.name), udf_obj.arity))

        # Create UDF to test replace
        idx = random.choice(range(len(udf_objs)))
        udf_type = udf_types[idx]
        test_udf_obj = self.create_udf_object(udf_type[0], udf_type[1],
                                              udf_type[2])
        test_udf_obj.name = udf_objs[idx].name
        test_udf_obj.dataverse_name = udf_objs[idx].dataverse_name
        test_udf_obj.parameters = udf_objs[idx].parameters
        test_udf_obj.reset_full_name()

        if not self.cbas_util.create_udf(self.cluster,
                                         name=test_udf_obj.name,
                                         dataverse=test_udf_obj.dataverse_name,
                                         or_replace=True,
                                         parameters=test_udf_obj.parameters,
                                         body=test_udf_obj.body,
                                         if_not_exists=False,
                                         query_context=False,
                                         use_statement=False,
                                         validate_error_msg=False,
                                         expected_error="",
                                         timeout=300,
                                         analytics_timeout=300):
            self.fail("Error while creating Analytics UDF")

        self.log.info("Adding event for user_defined_function_replaced events")
        self.system_events.add_event(
            AnalyticsEvents.user_defined_function_replaced(
                self.cluster.cbas_cc_node.ip,
                CBASHelper.metadata_format(test_udf_obj.dataverse_name),
                CBASHelper.unformat_name(test_udf_obj.name),
                test_udf_obj.arity))

        self.sleep(
            2, "Waiting for user_defined_function_replaced events to "
            "be generated")

        self.log.info("Adding event for user_defined_function_dropped events")
        udf_deleted_successfully = list()
        i = 0
        while udf_objs:
            if (i < len(udf_objs)) and (i not in udf_deleted_successfully):
                udf_obj = udf_objs[i]
                if self.cbas_util.drop_udf(self.cluster,
                                           name=udf_obj.name,
                                           dataverse=udf_obj.dataverse_name,
                                           parameters=udf_obj.parameters,
                                           if_exists=False,
                                           use_statement=False,
                                           query_context=False,
                                           validate_error_msg=False,
                                           expected_error=None,
                                           timeout=300,
                                           analytics_timeout=300):
                    udf_deleted_successfully.append(i)
                    self.system_events.add_event(
                        AnalyticsEvents.user_defined_function_dropped(
                            self.cluster.cbas_cc_node.ip,
                            CBASHelper.metadata_format(udf_obj.dataverse_name),
                            CBASHelper.unformat_name(udf_obj.name),
                            udf_obj.arity))
                i += 1
            elif len(udf_deleted_successfully) == len(udf_objs):
                break
            elif i >= len(udf_objs):
                i = 0
            elif i in udf_deleted_successfully:
                i += 1
        self.log.info("Test Finished")
예제 #8
0
    def test_create_multiple_analytics_udfs(self):
        self.log.info("Test started")
        self.setup_for_test()
        udf_objs = list()

        for i in range(0, self.input.param('num_init_udf', 1)):
            udf_obj = self.create_udf_object(
                self.input.param('num_create_params', 0), "expression", "same",
                True)

            if not self.cbas_util.create_udf(self.cluster,
                                             name=udf_obj.name,
                                             dataverse=udf_obj.dataverse_name,
                                             or_replace=False,
                                             parameters=udf_obj.parameters,
                                             body=udf_obj.body,
                                             if_not_exists=False,
                                             query_context=False,
                                             use_statement=False,
                                             validate_error_msg=False,
                                             expected_error=None,
                                             timeout=300,
                                             analytics_timeout=300):
                self.fail("Error while creating Analytics UDF")
            udf_objs.append(udf_obj)

        # Create UDF test to test
        test_udf_obj = self.create_udf_object(
            self.input.param('num_test_udf_params', 0),
            self.input.param('body_type', "expression"),
            self.input.param('dependent_entity_dv', "same"),
            self.input.param('use_full_name', True))

        if self.input.param('test_udf_name', "diff") == "same":
            test_udf_obj.name = udf_objs[0].name
            test_udf_obj.reset_full_name()
        if self.input.param('test_udf_dv', "diff") == "same":
            test_udf_obj.dataverse_name = udf_objs[0].dataverse_name
            test_udf_obj.reset_full_name()
        else:
            while test_udf_obj.dataverse_name == udf_objs[0].dataverse_name:
                test_udf_obj.dataverse_name = random.choice(
                    self.cbas_util.dataverses.values()).name
        if self.input.param('test_udf_param_name', "diff") == "same":
            test_udf_obj.parameters = udf_objs[0].parameters

        if not self.cbas_util.create_udf(
                self.cluster,
                name=test_udf_obj.name,
                dataverse=test_udf_obj.dataverse_name,
                or_replace=self.input.param('or_replace', False),
                parameters=test_udf_obj.parameters,
                body=test_udf_obj.body,
                if_not_exists=self.input.param('if_not_exists', False),
                query_context=False,
                use_statement=False,
                validate_error_msg=self.input.param('validate_error', False),
                expected_error=self.input.param('expected_error', "").format(
                    CBASHelper.unformat_name(
                        CBASHelper.metadata_format(
                            test_udf_obj.dataverse_name), test_udf_obj.name)),
                timeout=300,
                analytics_timeout=300):
            self.fail("Error while creating Analytics UDF")

        if not self.input.param('validate_error', False):
            if self.input.param('if_not_exists', False):
                object_to_validate = udf_objs[0]
            else:
                object_to_validate = test_udf_obj
            if not self.cbas_util.validate_udf_in_metadata(
                    self.cluster,
                    udf_name=object_to_validate.name,
                    udf_dataverse_name=object_to_validate.dataverse_name,
                    parameters=object_to_validate.parameters,
                    body=object_to_validate.body,
                    dataset_dependencies=object_to_validate.
                    dataset_dependencies,
                    udf_dependencies=object_to_validate.udf_dependencies):
                self.fail("Error while validating Function in Metadata")

            if self.input.param('num_execute_params', -1) == -1:
                num_execute_params = len(test_udf_obj.parameters)
            else:
                num_execute_params = self.input.param('num_execute_params')

            execute_params = [i for i in range(1, num_execute_params + 1)]
            if not self.cbas_util.verify_function_execution_result(
                    self.cluster, test_udf_obj.full_name, execute_params,
                    sum(execute_params)):
                self.fail("Failed while verifying function execution result")
        self.log.info("Test Finished")