def test_mutation_operations(self):
     self.run_load_during_mutations =  self.input.param("run_load_during_mutations",False)
     self.number_of_documents =  self.input.param("number_of_documents",10)
     self.number_of_operations =  self.input.param("number_of_operations",10)
     self.concurrent_threads =  self.input.param("concurrent_threads",10)
     error_queue = Queue.Queue()
     document_info_queue = Queue.Queue()
     thread_list = []
     # RUN INPUT FILE READ THREAD
     document_push = threading.Thread(target=self.push_document_info, args = (self.number_of_documents, document_info_queue))
     document_push.start()
     document_push.join()
     self.sleep(2)
     # RUN WORKER THREADS
     for bucket in self.buckets:
         for x in range(self.concurrent_threads*len(self.buckets)):
             t = Process(target=self.worker_operation_run, args = (document_info_queue, error_queue, bucket, self.mutation_operation_type, self.force_operation_type))
             t.start()
             thread_list.append(t)
     if self.run_load_during_mutations:
         self.run_async_data()
     for t in thread_list:
         t.join()
     for t in self.load_thread_list:
         if t.is_alive():
             if t != None:
                 t.signal = False
     # ERROR ANALYSIS
     error_msg =""
     error_count = 0
     if not error_queue.empty():
         # Dump Re-run file
         filename = '/tmp/dump_failure_{0}.txt'.format(self.randomDataGenerator.self.randomDataGenerator.random_uuid())
         self._dump_data(filename, error_queue)
         self.assertTrue(queue_size == 0, "error count {0}, see error dump {1}".format(queue_size, filename))