Ejemplo n.º 1
0
    def test_cbas_ingestion_with_documents_containing_multilingual_data(self):
        """
        1. Create reference to SDK client
        2. Add multilingual json documents to default bucket
        3. Verify ingestion on dataset with and with out secondary index
        """
        multilingual_strings = [
            'De flesta sagorna här är från Hans Hörner svenska översättning',
            'Il était une fois une maman cochon qui avait trois petits cochons',
            '森林里住着一只小兔子,它叫“丑丑”。它的眼睛红红的,像一对红宝石',
            '外治オヒル回条フ聞定ッ加官言岸ムモヱツ求碁込ヌトホヒ舞高メ旅位',
            'ان عدة الشهور عند الله اثنا عشر شهرا في',
        ]

        self.log.info("Fetch test case arguments")
        self.fetch_test_case_arguments()

        self.log.info("Create reference to SDK client")
        client = SDKClient(scheme="couchbase", hosts=[self.master.ip], bucket=self.cb_bucket_name,
                           password=self.master.rest_password)

        self.log.info("Add multilingual documents to the default bucket")
        client.insert_custom_json_documents("custom-key-", multilingual_strings)

        self.log.info("Create connections, datasets and indexes")
        self.cbas_dataset_setup()

        self.log.info("Wait for ingestion to complete and verify count")
        self.cbas_util.wait_for_ingestion_complete([self.dataset_name], len(multilingual_strings))
        self.assertTrue(self.cbas_util.validate_cbas_dataset_items_count(self.dataset_name, len(multilingual_strings)))
Ejemplo n.º 2
0
    def test_ingestion_impact_for_documents_containing_xattr_meta_information(self):

        self.log.info("Fetch test case arguments")
        self.fetch_test_case_arguments()

        self.log.info("Create reference to SDK client")
        client = SDKClient(scheme="couchbase", hosts=[self.master.ip], bucket="default",
                           password=self.master.rest_password)

        self.log.info("Insert custom data into default bucket")
        documents = ['{"name":"value"}'] * self.num_of_documents
        document_id_prefix = "id-"
        client.insert_custom_json_documents(document_id_prefix, documents)

        self.log.info("Create connections, datasets, indexes")
        self.cbas_dataset_setup()

        self.log.info("Wait for ingestion to complete and verify count")
        self.cbas_util.wait_for_ingestion_complete([self.dataset_name], self.num_of_documents)
        self.assertTrue(self.cbas_util.validate_cbas_dataset_items_count(self.dataset_name, self.num_of_documents))

        self.log.info("Insert xattr attribute for all the documents and assert document count on dataset")
        for i in range(self.num_of_documents):
            client.insert_xattr_attribute(document_id=document_id_prefix + str(i), path="a", value="{'xattr-value': 1}",
                                          xattr=True, create_parents=True)
        self.assertTrue(self.cbas_util.validate_cbas_dataset_items_count(self.dataset_name, self.num_of_documents))

        self.log.info("Update xattr attribute and assert document count on dataset")
        for i in range(self.num_of_documents):
            client.update_xattr_attribute(document_id=document_id_prefix + str(i), path="a",
                                          value="{'xattr-value': 11}", xattr=True, create_parents=True)
        self.assertTrue(self.cbas_util.validate_cbas_dataset_items_count(self.dataset_name, self.num_of_documents))
    def test_swap_rebalance_cb_cbas_together(self):

        self.log.info("Creates cbas buckets and dataset")
        wait_for_rebalance = self.input.param("wait_for_rebalance", True)
        dataset_count_query = "select count(*) from {0};".format(
            self.cbas_dataset_name)
        self.setup_for_test()

        self.log.info("Add KV node and don't rebalance")
        self.add_node(node=self.rebalanceServers[1], rebalance=False)

        self.log.info("Add cbas node and don't rebalance")
        self.add_node(node=self.rebalanceServers[3], rebalance=False)

        otpnodes = []
        nodes = self.rest.node_statuses()
        for node in nodes:
            if node.ip == self.rebalanceServers[
                    0].ip or node.ip == self.rebalanceServers[2].ip:
                otpnodes.append(node)

        self.log.info("Remove master node")
        self.remove_node(otpnode=otpnodes,
                         wait_for_rebalance=wait_for_rebalance)
        self.master = self.rebalanceServers[1]

        self.log.info("Create instances pointing to new master nodes")
        c_utils = cbas_utils(self.rebalanceServers[1],
                             self.rebalanceServers[3])
        c_utils.createConn(self.cb_bucket_name)

        self.log.info("Create reference to SDK client")
        client = SDKClient(scheme="couchbase",
                           hosts=[self.rebalanceServers[1].ip],
                           bucket=self.cb_bucket_name,
                           password=self.rebalanceServers[1].rest_password)

        self.log.info("Add more document to default bucket")
        documents = ['{"name":"value"}'] * (self.num_items // 10)
        document_id_prefix = "custom-id-"
        client.insert_custom_json_documents(document_id_prefix, documents)

        self.log.info(
            "Run queries as rebalance is in progress : Rebalance state:%s" %
            self.rest._rebalance_progress_status())
        handles = c_utils._run_concurrent_queries(
            dataset_count_query,
            "immediate",
            2000,
            batch_size=self.concurrent_batch_size)

        self.log.info("Log concurrent query status")
        self.cbas_util.log_concurrent_query_outcome(self.master, handles)

        if not c_utils.validate_cbas_dataset_items_count(
                self.cbas_dataset_name, self.num_items +
            (self.num_items // 10), 0):
            self.fail(
                "No. of items in CBAS dataset do not match that in the CB bucket"
            )