def test_concurrent_mutations(self):
     randomDataGenerator = RandomDataGenerator()
     randomDataGenerator.set_seed(self.seed)
     base_json = randomDataGenerator.random_json()
     data_set = randomDataGenerator.random_json()
     json_document = self.generate_nested(base_json, data_set, self.nesting_level)
     data_key = "test_concurrent_mutations"
     self.run_mutation_concurrent_operations(self.buckets[0], data_key, json_document)
 def test_concurrent_mutations(self):
     randomDataGenerator = RandomDataGenerator()
     randomDataGenerator.set_seed(self.seed)
     base_json = randomDataGenerator.random_json()
     data_set = randomDataGenerator.random_json()
     json_document = self.generate_nested(base_json, data_set, self.nesting_level)
     data_key = "test_concurrent_mutations"
     self.run_mutation_concurrent_operations(self.buckets[0], data_key, json_document)
 def push_document_info(self, number_of_documents, document_info_queue):
     for x in range(number_of_documents):
         document_info = {}
         randomDataGenerator = RandomDataGenerator()
         randomDataGenerator.set_seed(self.seed)
         document_info["seed"] = randomDataGenerator.random_int()
         base_json = randomDataGenerator.random_json()
         data_set = randomDataGenerator.random_json()
         json_document = self.generate_nested(base_json, data_set, self.nesting_level)
         document_info["json_document"] = json_document
         document_info_queue.put(document_info)
 def push_document_info(self, number_of_documents, document_info_queue):
     for x in range(number_of_documents):
         document_info = {}
         randomDataGenerator = RandomDataGenerator()
         randomDataGenerator.set_seed(self.seed)
         document_info["document_key"] = self.randomDataGenerator.random_uuid()+"_key_"+str(x)
         document_info["seed"] = randomDataGenerator.random_int()
         base_json = randomDataGenerator.random_json(random_array_count = self.number_of_arrays)
         data_set = randomDataGenerator.random_json(random_array_count = self.number_of_arrays)
         json_document = self.generate_nested(base_json, data_set, self.nesting_level)
         document_info["json_document"] = json_document
         document_info_queue.put(document_info)
 def push_document_info(self, number_of_documents, document_info_queue):
     for x in range(number_of_documents):
         document_info = {}
         randomDataGenerator = RandomDataGenerator()
         randomDataGenerator.set_seed(self.seed)
         document_info["document_key"] = self.randomDataGenerator.random_uuid() + "_key_" + str(x)
         document_info["seed"] = randomDataGenerator.random_int()
         base_json = randomDataGenerator.random_json(random_array_count=self.number_of_arrays)
         data_set = randomDataGenerator.random_json(random_array_count=self.number_of_arrays)
         json_document = self.generate_nested(base_json, data_set, self.nesting_level)
         document_info["json_document"] = json_document
         document_info_queue.put(document_info)
 def run_async_data(self):
     self.load_thread_list = []
     randomDataGenerator = RandomDataGenerator()
     randomDataGenerator.set_seed(self.seed)
     base_json = randomDataGenerator.random_json(random_array_count = self.number_of_arrays)
     data_set = randomDataGenerator.random_json(random_array_count = self.number_of_arrays)
     json_document = self.generate_nested(base_json, data_set, self.nesting_level)
     if self.prepopulate_data:
         self.load_thread_list = []
         for bucket in self.buckets:
             for x in range(self.total_writer_threads):
                 client = VBucketAwareMemcached( RestConnection(self.master), bucket)
                 t = Process(target=self.run_populate_data_per_bucket, args = (client, bucket, json_document, (self.prepopulate_item_count/self.total_writer_threads), x))
                 t.start()
                 self.load_thread_list.append(t)
 def run_async_data(self):
     self.load_thread_list = []
     randomDataGenerator = RandomDataGenerator()
     randomDataGenerator.set_seed(self.seed)
     base_json = randomDataGenerator.random_json(random_array_count = self.number_of_arrays)
     data_set = randomDataGenerator.random_json(random_array_count = self.number_of_arrays)
     json_document = self.generate_nested(base_json, data_set, self.nesting_level)
     if self.prepopulate_data:
         self.load_thread_list = []
         for bucket in self.buckets:
             for x in range(self.total_writer_threads):
                 client = VBucketAwareMemcached( RestConnection(self.master), bucket)
                 t = Process(target=self.run_populate_data_per_bucket, args = (client, bucket, json_document, (self.prepopulate_item_count/self.total_writer_threads), x))
                 t.start()
                 self.load_thread_list.append(t)