Example #1
0
    def test_create_delete_collection_same_order(self):
        # Create collection in increasing order
        shell_conn = RemoteMachineShellConnection(self.cluster.master)
        cb_stat = Cbstats(shell_conn)
        collection_count = 1
        while collection_count < 1000:
            doc_loading_spec = \
                self.bucket_util.get_crud_template_from_package(
                    "def_add_collection")
            self.bucket_util.run_scenario_from_spec(self.task,
                                                    self.cluster,
                                                    self.bucket_util.buckets,
                                                    doc_loading_spec,
                                                    mutation_num=0,
                                                    batch_size=self.batch_size)
            collection_count = cb_stat.get_collections(self.bucket)["count"]
        self.bucket_util.validate_docs_per_collections_all_buckets()

        # Delete collections
        while collection_count > 1:
            doc_loading_spec = \
                self.bucket_util.get_crud_template_from_package(
                    "def_drop_collection")
            self.bucket_util.run_scenario_from_spec(self.task,
                                                    self.cluster,
                                                    self.bucket_util.buckets,
                                                    doc_loading_spec,
                                                    mutation_num=0,
                                                    batch_size=self.batch_size)
            collection_count = cb_stat.get_collections(self.bucket)["count"]

        # Validate doc count as per bucket collections
        self.bucket_util.validate_docs_per_collections_all_buckets()
        self.validate_test_failure()
Example #2
0
    def create_delete_collections(self):
        """
        1. Create Scope-Collection
        2. Validate '_default' collection values are intact
        3. Load documents into created collection
        4. Validate documents are loaded in new collection
        5. Delete the collection and validate the '_default' collection
           is unaffected
        """
        use_scope_name_for_collection = \
            self.input.param("use_scope_name_for_collection", False)
        scope_name = BucketUtils.get_random_name()
        collection_name = scope_name
        if not use_scope_name_for_collection:
            collection_name = BucketUtils.get_random_name()

        gen_add = doc_generator(self.key, 0, self.num_items)
        gen_set = doc_generator(self.key,
                                0,
                                self.num_items,
                                mutate=1,
                                mutation_type='SET')

        self.log.info("Creating scope::collection '%s::%s'" %
                      (scope_name, collection_name))
        self.bucket_util.create_scope(self.cluster.master, self.bucket,
                                      scope_name)
        self.bucket_util.create_collection(self.cluster.master, self.bucket,
                                           scope_name, collection_name)

        self.bucket_util.create_collection(self.cluster.master, self.bucket,
                                           scope_name, "my_collection_2")

        shell_conn = RemoteMachineShellConnection(self.cluster.master)
        cbstats = Cbstats(shell_conn)
        cbstats.get_collections(self.bucket_util.buckets[0])

        self.log.info("Validating the documents in default collection")
        self.bucket_util._wait_for_stats_all_buckets()
        # Prints bucket stats after doc_ops
        self.bucket_util.print_bucket_stats()
        self.bucket_util.verify_stats_all_buckets(self.num_items)

        self.log.info("Load documents into the created collection")
        sdk_client = SDKClient([self.cluster.master],
                               self.bucket,
                               scope=scope_name,
                               collection=collection_name,
                               compression_settings=self.sdk_compression)
        while gen_add.has_next():
            key, value = gen_add.next()
            result = sdk_client.crud("create",
                                     key,
                                     value,
                                     replicate_to=self.replicate_to,
                                     persist_to=self.persist_to,
                                     durability=self.durability_level,
                                     timeout=self.sdk_timeout)
            if result["status"] is False:
                self.log_failure("Doc create failed for collection: %s" %
                                 result)
                break
        sdk_client.close()
        self.validate_test_failure()
        self.bucket.scopes[scope_name] \
            .collections[collection_name] \
            .num_items += self.num_items

        self.bucket_util._wait_for_stats_all_buckets()
        # Prints bucket stats after doc_ops
        self.bucket_util.print_bucket_stats()
        self.bucket_util.verify_stats_all_buckets(self.num_items * 2)

        task = self.task.async_load_gen_docs(self.cluster,
                                             self.bucket,
                                             gen_set,
                                             "update",
                                             0,
                                             batch_size=10,
                                             process_concurrency=8,
                                             replicate_to=self.replicate_to,
                                             persist_to=self.persist_to,
                                             durability=self.durability_level,
                                             compression=self.sdk_compression,
                                             timeout_secs=self.sdk_timeout,
                                             scope=scope_name,
                                             collection=collection_name)
        self.task_manager.get_task_result(task)

        self.bucket_util._wait_for_stats_all_buckets()
        # Prints bucket stats after doc_ops
        self.bucket_util.print_bucket_stats()
        self.bucket_util.verify_stats_all_buckets(self.num_items * 2)
        self.validate_test_failure()
Example #3
0
    def test_delete_default_collection(self):
        """
        Test to delete '_default' collection under '_default' scope.

        Params:
        client_type: Supports collection deletion using REST/SDK client.
        data_load: Load data into default collection based load_during_phase.
                   supports 'disabled / before_drop / during_drop'
        """
        task = None
        client_type = self.input.param("client_type", "sdk").lower()
        data_load = self.input.param("load_data", "disabled")
        load_gen = doc_generator('test_drop_default',
                                 0,
                                 self.num_items,
                                 mutate=0,
                                 target_vbucket=self.target_vbucket)

        if data_load in ["before_drop", "during_drop"]:
            self.log.info("Loading %s docs into '%s::%s' collection" %
                          (self.num_items, CbServer.default_scope,
                           CbServer.default_collection))
            task = self.task.async_load_gen_docs(
                self.cluster,
                self.bucket,
                load_gen,
                "create",
                self.maxttl,
                batch_size=10,
                process_concurrency=8,
                replicate_to=self.replicate_to,
                persist_to=self.persist_to,
                durability=self.durability_level,
                compression=self.sdk_compression,
                timeout_secs=self.sdk_timeout,
                scope=CbServer.default_scope,
                collection=CbServer.default_collection,
                suppress_error_table=True)
            self.bucket.scopes[CbServer.default_scope] \
                .collections[CbServer.default_collection] \
                .num_items += self.num_items

        # To make sure data_loading done before collection drop
        if data_load == "before_drop":
            self.task_manager.get_task_result(task)
            if task.fail:
                self.log_failure("Doc loading failed for keys: %s" %
                                 task.fail.keys())

        # Data validation
        self.bucket_util._wait_for_stats_all_buckets()
        # Prints bucket stats after doc_ops
        self.bucket_util.print_bucket_stats()
        self.bucket_util.validate_doc_count_as_per_collections(self.bucket)

        # Drop collection phase
        self.log.info("Deleting collection '%s::%s'" %
                      (CbServer.default_scope, CbServer.default_collection))
        if client_type == "sdk":
            client = SDKClient([self.cluster.master],
                               self.bucket,
                               compression_settings=self.sdk_compression)
            client.drop_collection(CbServer.default_scope,
                                   CbServer.default_collection)
            client.close()
            BucketUtils.mark_collection_as_dropped(self.bucket,
                                                   CbServer.default_scope,
                                                   CbServer.default_collection)
        elif client_type == "rest":
            self.bucket_util.drop_collection(self.cluster.master, self.bucket,
                                             CbServer.default_scope,
                                             CbServer.default_collection)
        else:
            self.log_failure("Invalid client_type '%s'" % client_type)

        self.sleep(60)

        # Wait for doc_loading task to complete
        if data_load == "during_drop":
            self.task_manager.get_task_result(task)
            if task.fail:
                self.log.info("Doc loading failed for keys: %s" % task.fail)

        self.bucket_util._wait_for_stats_all_buckets()
        # Prints bucket stats after doc_ops
        self.bucket_util.print_bucket_stats()
        # Validate drop collection using cbstats
        for node in self.cluster_util.get_kv_nodes():
            shell_conn = RemoteMachineShellConnection(node)
            cbstats = Cbstats(shell_conn)
            c_data = cbstats.get_collections(self.bucket)
            expected_collection_count = \
                len(self.bucket_util.get_active_collections(
                    self.bucket,
                    CbServer.default_scope,
                    only_names=True))
            if c_data["count"] != expected_collection_count:
                self.log_failure(
                    "%s - Expected collection count is '%s'. "
                    "Actual: %s" %
                    (node.ip, expected_collection_count, c_data["count"]))
            if "_default" in c_data:
                self.log_failure("%s: _default collection exists in cbstats" %
                                 node.ip)

        # SDK connection to default(dropped) collection to validate failure
        try:
            client = SDKClient([self.cluster.master],
                               self.bucket,
                               scope=CbServer.default_scope,
                               collection=CbServer.default_collection,
                               compression_settings=self.sdk_compression)
            result = client.crud("create", "test_key-1", "TestValue")
            if result["status"] is True:
                self.log_failure("CRUD succeeded on deleted collection")
            elif SDKException.RetryReason.KV_COLLECTION_OUTDATED \
                    not in result["error"]:
                self.log_failure("Invalid error '%s'" % result["error"])
            client.close()
        except Exception as e:
            self.log.info(e)

        # Validate the bucket doc count is '0' after drop collection
        self.bucket_util._wait_for_stats_all_buckets()
        # Prints bucket stats after doc_ops
        self.bucket_util.print_bucket_stats()
        self.bucket_util.validate_doc_count_as_per_collections(self.bucket)
        self.validate_test_failure()
Example #4
0
    def test_create_scopes(self):
        """
        1. Load data into '_default' collection (if required by test)
        2. Create scope(s) under the bucket
        3. Validate the scopes are created properly
        4. Validate '_default' collection is intact
        """
        num_scopes = self.input.param("num_scopes", 1)
        if self.action_phase == "before_default_load":
            BucketUtils.create_scopes(self.cluster, self.bucket, num_scopes)

        create_gen = doc_generator("scope_create_key",
                                   0,
                                   self.num_items,
                                   doc_size=self.doc_size,
                                   doc_type=self.doc_type,
                                   target_vbucket=self.target_vbucket,
                                   mutation_type="ADD",
                                   mutate=1,
                                   key_size=self.key_size)
        update_gen = doc_generator("scope_create_key",
                                   0,
                                   self.num_items,
                                   doc_size=self.doc_size,
                                   doc_type=self.doc_type,
                                   target_vbucket=self.target_vbucket,
                                   mutation_type="SET",
                                   mutate=2,
                                   key_size=self.key_size)
        self.log.info("Loading %d docs into '_default' collection" %
                      self.num_items)
        client = SDKClient([self.cluster.master],
                           self.bucket,
                           compression_settings=self.sdk_compression)
        while create_gen.has_next():
            key, val = create_gen.next()
            result = client.crud("create",
                                 key,
                                 val,
                                 exp=self.maxttl,
                                 durability=self.durability_level,
                                 timeout=self.sdk_timeout)
            if result["status"] is False:
                self.log_failure("Doc create failed for '_default' collection")
                break
        client.close()
        # Update num_items for default collection
        self.bucket.scopes[CbServer.default_scope] \
            .collections[CbServer.default_collection] \
            .num_items += self.num_items

        # Doc count validation
        self.bucket_util._wait_for_stats_all_buckets()
        # Prints bucket stats after doc_ops
        self.bucket_util.print_bucket_stats()
        self.bucket_util.validate_doc_count_as_per_collections(self.bucket)

        # Perform update mutation
        task = self.task.async_load_gen_docs(
            self.cluster,
            self.bucket,
            update_gen,
            "update",
            self.maxttl,
            batch_size=10,
            process_concurrency=8,
            replicate_to=self.replicate_to,
            persist_to=self.persist_to,
            durability=self.durability_level,
            compression=self.sdk_compression,
            timeout_secs=self.sdk_timeout,
            scope=CbServer.default_scope,
            collection=CbServer.default_collection)

        # Create scope(s) while CRUDs are running in background
        if self.action_phase == "during_default_load":
            BucketUtils.create_scopes(self.cluster, self.bucket, num_scopes)

        # Validate drop collection using cbstats
        for node in self.cluster_util.get_kv_nodes():
            shell_conn = RemoteMachineShellConnection(node)
            cbstats = Cbstats(shell_conn)
            c_data = cbstats.get_collections(self.bucket)
            if c_data["count"] != 1:
                self.log_failure("%s - Expected scope count is '1'."
                                 "Actual: %s" % (node.ip, c_data["count"]))
            if "_default" not in c_data:
                self.log_failure("%s: _default collection missing in cbstats" %
                                 node.ip)

        # Wait for doc_loading to complete
        self.task_manager.get_task_result(task)
        self.validate_test_failure()