Exemplo n.º 1
0
 def setUp(self):
     super(SubdocAutoTestGenerator, self).setUp()
     self.prepopulate_data = self.input.param("prepopulate_data", False)
     self.verify_data_without_paths = self.input.param(
         "verify_data_without_paths", True)
     self.number_of_arrays = self.input.param("number_of_arrays", 1)
     self.verbose_func_usage = self.input.param("verbose_func_usage", False)
     self.nesting_level = self.input.param("nesting_level", 0)
     self.mutation_operation_type = self.input.param(
         "mutation_operation_type", "any")
     self.force_operation_type = self.input.param("force_operation_type",
                                                  None)
     self.run_data_verification = self.input.param("run_data_verification",
                                                   True)
     self.prepopulate_item_count = self.input.param(
         "prepopulate_item_count", 10000)
     self.seed = self.input.param("seed", 0)
     self.run_mutation_mode = self.input.param("run_mutation_mode", "seq")
     self.client = self.direct_client(self.master, self.buckets[0])
     self.build_kv_store = self.input.param("build_kv_store", False)
     self.total_writer_threads = self.input.param("total_writer_threads",
                                                  10)
     self.number_of_documents = self.input.param("number_of_documents", 10)
     self.concurrent_threads = self.input.param("concurrent_threads", 10)
     self.randomDataGenerator = RandomDataGenerator()
     self.subdoc_gen_helper = SubdocHelper()
     self.kv_store = {}
     self.load_thread_list = []
     if self.prepopulate_data:
         self.run_sync_data()
 def test_concurrent_mutations(self):
     randomDataGenerator = RandomDataGenerator()
     randomDataGenerator.set_seed(self.seed)
     base_json = randomDataGenerator.random_json()
     data_set = randomDataGenerator.random_json()
     json_document = self.generate_nested(base_json, data_set, self.nesting_level)
     data_key = "test_concurrent_mutations"
     self.run_mutation_concurrent_operations(self.buckets[0], data_key, json_document)
 def push_document_info(self, number_of_documents, document_info_queue):
     for x in range(number_of_documents):
         document_info = {}
         randomDataGenerator = RandomDataGenerator()
         randomDataGenerator.set_seed(self.seed)
         document_info["document_key"] = self.randomDataGenerator.random_uuid()+"_key_"+str(x)
         document_info["seed"] = randomDataGenerator.random_int()
         base_json = randomDataGenerator.random_json(random_array_count = self.number_of_arrays)
         data_set = randomDataGenerator.random_json(random_array_count = self.number_of_arrays)
         json_document = self.generate_nested(base_json, data_set, self.nesting_level)
         document_info["json_document"] = json_document
         document_info_queue.put(document_info)
 def run_async_data(self):
     self.load_thread_list = []
     randomDataGenerator = RandomDataGenerator()
     randomDataGenerator.set_seed(self.seed)
     base_json = randomDataGenerator.random_json(random_array_count = self.number_of_arrays)
     data_set = randomDataGenerator.random_json(random_array_count = self.number_of_arrays)
     json_document = self.generate_nested(base_json, data_set, self.nesting_level)
     if self.prepopulate_data:
         self.load_thread_list = []
         for bucket in self.buckets:
             for x in range(self.total_writer_threads):
                 client = VBucketAwareMemcached( RestConnection(self.master), bucket)
                 t = Process(target=self.run_populate_data_per_bucket, args = (client, bucket, json_document, (self.prepopulate_item_count/self.total_writer_threads), x))
                 t.start()
                 self.load_thread_list.append(t)