Пример #1
0
 def test_more_than_max_collections_multiple_scopes(self):
     try:
         BucketUtils.create_scopes(self.cluster, self.bucket, 10, collection_count=200)
     except Exception as e:
         self.log.info("Creating more than max collections failed as expected")
     else:
         self.fail("Creating more than max collections did not fail")
Пример #2
0
 def test_more_than_max_collections_multiple_scopes(self):
     # create max collections across 10 scopes
     BucketUtils.create_scopes(self.cluster, self.bucket, 10, collection_count=120)
     try:
         # create one more collection under a new scope
         BucketUtils.create_scopes(self.cluster, self.bucket, 1, collection_count=1)
     except Exception as e:
         self.log.info("Creating more than max collections failed as expected")
     else:
         self.fail("Creating more than max collections did not fail")
Пример #3
0
 def test_more_than_max_scopes(self):
     # Max_scopes count, after considering default scope in setup
     max_scopes = 1000
     BucketUtils.create_scopes(self.cluster, self.bucket, max_scopes - 1)
     try:
         BucketUtils.create_scopes(self.cluster, self.bucket, 500)
     except Exception as e:
         self.log.info("Creating more than max scopes failed as expected")
     else:
         self.fail("Creating more than max scopes did not fail")
Пример #4
0
 def test_more_than_max_scopes(self):
     # create max scopes
     scopes_dict = BucketUtils.create_scopes(self.cluster, self.bucket, self.MAX_SCOPES)
     actual_count = len(scopes_dict)
     if actual_count != self.MAX_SCOPES:
         self.fail("failed to create max number of scopes")
     try:
         # create one more than the max allowed
         BucketUtils.create_scopes(self.cluster, self.bucket, 1)
     except Exception as e:
         self.log.info("Creating more than max scopes failed as expected")
     else:
         self.fail("Creating more than max scopes did not fail")
Пример #5
0
    def test_create_scopes(self):
        """
        1. Load data into '_default' collection (if required by test)
        2. Create scope(s) under the bucket
        3. Validate the scopes are created properly
        4. Validate '_default' collection is intact
        """
        num_scopes = self.input.param("num_scopes", 1)
        if self.action_phase == "before_default_load":
            BucketUtils.create_scopes(self.cluster, self.bucket, num_scopes)

        create_gen = doc_generator("scope_create_key",
                                   0,
                                   self.num_items,
                                   doc_size=self.doc_size,
                                   doc_type=self.doc_type,
                                   target_vbucket=self.target_vbucket,
                                   mutation_type="ADD",
                                   mutate=1,
                                   key_size=self.key_size)
        update_gen = doc_generator("scope_create_key",
                                   0,
                                   self.num_items,
                                   doc_size=self.doc_size,
                                   doc_type=self.doc_type,
                                   target_vbucket=self.target_vbucket,
                                   mutation_type="SET",
                                   mutate=2,
                                   key_size=self.key_size)
        self.log.info("Loading %d docs into '_default' collection" %
                      self.num_items)
        client = SDKClient([self.cluster.master],
                           self.bucket,
                           compression_settings=self.sdk_compression)
        while create_gen.has_next():
            key, val = create_gen.next()
            result = client.crud("create",
                                 key,
                                 val,
                                 exp=self.maxttl,
                                 durability=self.durability_level,
                                 timeout=self.sdk_timeout)
            if result["status"] is False:
                self.log_failure("Doc create failed for '_default' collection")
                break
        client.close()
        # Update num_items for default collection
        self.bucket.scopes[CbServer.default_scope] \
            .collections[CbServer.default_collection] \
            .num_items += self.num_items

        # Doc count validation
        self.bucket_util._wait_for_stats_all_buckets()
        # Prints bucket stats after doc_ops
        self.bucket_util.print_bucket_stats()
        self.bucket_util.validate_doc_count_as_per_collections(self.bucket)

        # Perform update mutation
        task = self.task.async_load_gen_docs(
            self.cluster,
            self.bucket,
            update_gen,
            "update",
            self.maxttl,
            batch_size=10,
            process_concurrency=8,
            replicate_to=self.replicate_to,
            persist_to=self.persist_to,
            durability=self.durability_level,
            compression=self.sdk_compression,
            timeout_secs=self.sdk_timeout,
            scope=CbServer.default_scope,
            collection=CbServer.default_collection)

        # Create scope(s) while CRUDs are running in background
        if self.action_phase == "during_default_load":
            BucketUtils.create_scopes(self.cluster, self.bucket, num_scopes)

        # Validate drop collection using cbstats
        for node in self.cluster_util.get_kv_nodes():
            shell_conn = RemoteMachineShellConnection(node)
            cbstats = Cbstats(shell_conn)
            c_data = cbstats.get_collections(self.bucket)
            if c_data["count"] != 1:
                self.log_failure("%s - Expected scope count is '1'."
                                 "Actual: %s" % (node.ip, c_data["count"]))
            if "_default" not in c_data:
                self.log_failure("%s: _default collection missing in cbstats" %
                                 node.ip)

        # Wait for doc_loading to complete
        self.task_manager.get_task_result(task)
        self.validate_test_failure()