Exemplo n.º 1
0
 def test_delete_nonexistant_collection(self):
     try:
         BucketUtils.drop_collection(self.cluster.master, self.bucket, "sumedh")
     except Exception as e:
         self.log.info("Non existant collection deletion failed as expected")
     else:
         self.fail("deletion of non existing collection did not fail")
Exemplo n.º 2
0
 def test_create_delete_recreate_collection(self):
     collections = BucketUtils.get_random_collections(
         self.bucket_util.buckets, 10, 10, 1)
     # delete collection
     for bucket_name, scope_dict in collections.iteritems():
         bucket = BucketUtils.get_bucket_obj(self.bucket_util.buckets,
                                             bucket_name)
         scope_dict = scope_dict["scopes"]
         for scope_name, collection_dict in scope_dict.items():
             collection_dict = collection_dict["collections"]
             for c_name, _ in collection_dict.items():
                 BucketUtils.drop_collection(self.cluster.master, bucket,
                                             scope_name, c_name)
     # recreate collection
     for bucket_name, scope_dict in collections.iteritems():
         bucket = BucketUtils.get_bucket_obj(self.bucket_util.buckets,
                                             bucket_name)
         scope_dict = scope_dict["scopes"]
         for scope_name, collection_dict in scope_dict.items():
             collection_dict = collection_dict["collections"]
             for c_name, _ in collection_dict.items():
                 # Cannot create a _default collection
                 if c_name == CbServer.default_collection:
                     continue
                 col_obj = \
                     bucket.scopes[scope_name].collections[c_name]
                 BucketUtils.create_collection(self.cluster.master, bucket,
                                               scope_name,
                                               col_obj.get_dict_object())
     # Validate doc count as per bucket collections
     self.bucket_util.validate_docs_per_collections_all_buckets()
     self.validate_test_failure()
Exemplo n.º 3
0
    def test_load_default_collection(self):
        self.delete_default_collection = \
                    self.input.param("delete_default_collection", False)
        self.perform_ops = self.input.param("perform_ops", False)
        load_gen = doc_generator('test_drop_default',
                                 0,
                                 self.num_items,
                                 mutate=0,
                                 target_vbucket=self.target_vbucket)

        self.log.info("Loading %s docs into '%s::%s' collection" %
                      (self.num_items, CbServer.default_scope,
                       CbServer.default_collection))
        task = self.task.async_load_gen_docs(
            self.cluster,
            self.bucket,
            load_gen,
            "create",
            self.maxttl,
            batch_size=10,
            process_concurrency=2,
            replicate_to=self.replicate_to,
            persist_to=self.persist_to,
            durability=self.durability_level,
            compression=self.sdk_compression,
            timeout_secs=self.sdk_timeout,
            scope=CbServer.default_scope,
            collection=CbServer.default_collection,
            suppress_error_table=True)

        # perform some collection operation
        if self.perform_ops:
            doc_loading_spec = \
                self.bucket_util.get_crud_template_from_package("initial_load")
            self.bucket_util.run_scenario_from_spec(self.task,
                                                    self.cluster,
                                                    self.bucket_util.buckets,
                                                    doc_loading_spec,
                                                    mutation_num=0)
        self.task_manager.get_task_result(task)
        # Data validation
        self.bucket_util._wait_for_stats_all_buckets()
        task = self.task.async_validate_docs(self.cluster,
                                             self.bucket,
                                             load_gen,
                                             "create",
                                             self.maxttl,
                                             batch_size=10,
                                             process_concurrency=2)
        self.task_manager.get_task_result(task)

        if self.delete_default_collection:
            for bucket in self.bucket_util.buckets:
                BucketUtils.drop_collection(self.cluster.master, bucket)

        # Validate doc count as per bucket collections
        self.bucket_util.validate_docs_per_collections_all_buckets()
        self.validate_test_failure()
Exemplo n.º 4
0
 def test_drop_collection_compaction(self):
     collections = BucketUtils.get_random_collections(
         self.bucket_util.buckets, 10, 10, 1)
     # Delete collection
     for self.bucket_name, scope_dict in collections.iteritems():
         bucket = BucketUtils.get_bucket_obj(self.bucket_util.buckets,
                                             self.bucket_name)
         scope_dict = scope_dict["scopes"]
         for scope_name, collection_dict in scope_dict.items():
             collection_dict = collection_dict["collections"]
             for c_name, c_data in collection_dict.items():
                 BucketUtils.drop_collection(self.cluster.master, bucket,
                                             scope_name, c_name)
     # Trigger compaction
     remote_client = RemoteMachineShellConnection(self.cluster.master)
     _ = remote_client.wait_till_compaction_end(
         RestConnection(self.cluster.master),
         self.bucket_name,
         timeout_in_seconds=(self.wait_timeout * 10))
     remote_client.disconnect()
     # Validate doc count as per bucket collections
     self.bucket_util.validate_docs_per_collections_all_buckets()
     self.validate_test_failure()