def test_stop_loading(self): ''' Load through transactions and close the transaction abruptly, create a new transaction sleep for 60 seconds and perform create on the same set of docs ''' self.num_txn = self.input.param("num_txn", 9) self.doc_gen(self.num_items) threads = [] docs = list(self.__chunks(self.docs, len(self.docs)/self.num_txn)) for doc in docs: threads.append(threading.Thread(target=self.__thread_to_transaction, args=(self.transaction, "create", doc, self.transaction_commit, self.update_count, True, False))) for thread in threads: thread.start() self.client.cluster.shutdown() self.transaction.close() self.client1 = VBucketAwareMemcached(RestConnection(self.cluster.master), self.def_bucket[0]) self.create_Transaction(self.client1) self.sleep(self.transaction_timeout+60) # sleep for 60 seconds so that transaction cleanup can happen self.test_log.info("going to start the load") for doc in docs: exception = Transaction().RunTransaction(self.transaction, [self.client1.collection], doc, [], [], self.transaction_commit, self.sync, self.update_count) if exception: time.sleep(60) self.verify_doc(self.num_items, self.client1) self.client1.close()
def setUp(self): super(basic_ops, self).setUp() self.test_log = logging.getLogger("test") self.fail = self.input.param("fail", False) nodes_init = self.cluster.servers[1:self.nodes_init] if self.nodes_init != 1 else [] self.task.rebalance([self.cluster.master], nodes_init, []) self.cluster.nodes_in_cluster.extend([self.cluster.master] + nodes_init) self.bucket_util.add_rbac_user() if self.default_bucket: self.bucket_util.create_default_bucket(replica=self.num_replicas, compression_mode=self.compression_mode, ram_quota=100, bucket_type=self.bucket_type) time.sleep(10) self.def_bucket= self.bucket_util.get_all_buckets() self.client = VBucketAwareMemcached(RestConnection(self.cluster.master), self.def_bucket[0]) self.__durability_level() self.operation = self.input.param("operation", "afterAtrPending") # create load self.value = {'value':'value1'} self.content = self.client.translate_to_json_object(self.value) self.docs = [] self.keys = [] for i in range(self.num_items): key = "test_docs-" + str(i) doc = Tuples.of(key, self.content) self.keys.append(key) self.docs.append(doc) self.transaction_config = Transaction().createTransactionConfig(self.transaction_timeout, self.durability) self.log.info("==========Finished Basic_ops base setup========")
def async_validate_docs(self, cluster, bucket, generator, opt_type, exp=0, flag=0, only_store_hash=True, batch_size=1, pause_secs=1, timeout_secs=5, compression=True, process_concurrency=4): log.info("Validating documents") client = VBucketAwareMemcached(RestConnection(cluster.master), bucket) _task = jython_tasks.DocumentsValidatorTask( cluster, self.jython_task_manager, bucket, client, [generator], opt_type, exp, flag=flag, only_store_hash=only_store_hash, batch_size=batch_size, pause_secs=pause_secs, timeout_secs=timeout_secs, compression=compression, process_concurrency=process_concurrency) self.jython_task_manager.add_new_task(_task) return _task
def async_load_gen_docs_atomicity(self, cluster, buckets, generator, op_type, exp=0, flag=0, persist_to=0, replicate_to=0, only_store_hash=True, batch_size=1, pause_secs=1, timeout_secs=5, compression=True, process_concurrency=1, retries=5, update_count=1, transaction_timeout=5, commit=True, durability=0): log.info("Loading documents ") bucket_list = [] client_list = [] for bucket in buckets: client = VBucketAwareMemcached(RestConnection(cluster.master), bucket) client_list.append(client) bucket_list.append(client.collection) _task = jython_tasks.Atomicity(cluster, self.jython_task_manager, bucket_list, client, client_list, [generator], op_type, exp, flag=flag, persist_to=persist_to, replicate_to=replicate_to, only_store_hash=only_store_hash, batch_size=batch_size, pause_secs=pause_secs, timeout_secs=timeout_secs, compression=compression, process_concurrency=process_concurrency, retries=retries, update_count=update_count, transaction_timeout=transaction_timeout, commit=commit, durability=durability) self.jython_task_manager.add_new_task(_task) return _task
def async_load_gen_docs_durable(self, cluster, bucket, generator, op_type, exp=0, flag=0, persist_to=0, replicate_to=0, only_store_hash=True, batch_size=1, pause_secs=1, timeout_secs=5, compression=True, process_concurrency=1, retries=5, durability=""): self.log.debug("Loading documents to {}".format(bucket.name)) clients = [] gen_start = int(generator.start) gen_end = max(int(generator.end), 1) gen_range = max( int((generator.end - generator.start) / process_concurrency), 1) for _ in range(gen_start, gen_end, gen_range): client = VBucketAwareMemcached(RestConnection(cluster.master), bucket) clients.append(client) majority_value = (bucket.replicaNumber + 1) / 2 + 1 _task = jython_tasks.Durability( cluster, self.jython_task_manager, bucket, clients, generator, op_type, exp, flag=flag, persist_to=persist_to, replicate_to=replicate_to, only_store_hash=only_store_hash, batch_size=batch_size, pause_secs=pause_secs, timeout_secs=timeout_secs, compression=compression, process_concurrency=process_concurrency, retries=retries, durability=durability, majority_value=majority_value) self.jython_task_manager.add_new_task(_task) return _task
def async_load_bucket_for_dgm(self, cluster, bucket, generator, opt_type, active_resident_threshold, exp=0, flag=0, only_store_hash=True, batch_size=1, pause_secs=1, timeout_secs=5, compression=True, process_concurrency=4): """ Loads specified bucket with docs until specified DGM percentage is achieved Parameters: cluster - Cluster object bucket - Bucket object to which docs needs to be loaded generator - Document generator object opt_type - Operation type active_resident_threshold - Percentage of DGM needs to be achieved Returns: _task - Async task created for DGM task """ log.info("Loading doc into {0} until dgm is {1}%".format( bucket.name, active_resident_threshold)) client = VBucketAwareMemcached(RestConnection(cluster.master), bucket) _task = jython_tasks.LoadDocumentsForDgmTask( cluster, self.jython_task_manager, bucket, client, [generator], opt_type, exp, flag=flag, only_store_hash=only_store_hash, batch_size=batch_size, pause_secs=pause_secs, timeout_secs=timeout_secs, compression=compression, process_concurrency=process_concurrency, active_resident_threshold=active_resident_threshold) self.jython_task_manager.add_new_task(_task) return _task
def setUp(self): super(basic_ops, self).setUp() self.test_log = logging.getLogger("test") self.key = 'test_docs'.rjust(self.key_size, '0') nodes_init = self.cluster.servers[1:self.nodes_init] if self.nodes_init != 1 else [] self.task.rebalance([self.cluster.master], nodes_init, []) self.cluster.nodes_in_cluster.extend([self.cluster.master] + nodes_init) self.bucket_util.add_rbac_user() if self.default_bucket: self.bucket_util.create_default_bucket(replica=self.num_replicas, compression_mode=self.compression_mode, ram_quota=100) time.sleep(10) self.def_bucket= self.bucket_util.get_all_buckets() self.client = VBucketAwareMemcached(RestConnection(self.cluster.master), self.def_bucket[0]) self.__durability_level() self.create_Transaction() self._stop = threading.Event() self.log.info("==========Finished Basic_ops base setup========")
def basic_concurrency(self): self.crash = self.input.param("crash", False) self.doc_gen(self.num_items) # run transaction thread = threading.Thread(target=self.__thread_to_transaction, args=(self.transaction, "create", self.docs, self.transaction_commit, self.update_count, True, False)) thread.start() self.sleep(1) if self.crash: self.client.cluster.shutdown() self.transaction.close() print "going to create a new transaction" self.client1 = VBucketAwareMemcached(RestConnection(self.cluster.master), self.def_bucket[0]) self.create_Transaction(self.client1) self.sleep(self.transaction_timeout+60) exception = Transaction().RunTransaction(self.transaction, [self.client1.collection], self.docs, [], [], self.transaction_commit, self.sync, self.update_count) if exception: time.sleep(60) self.verify_doc(self.num_items, self.client1) self.client1.close() else: key = "test_docs-0" # insert will fail result = self.client.insert(key, "value") self.assertEqual(result["status"], False) # Update should pass result = self.client.upsert(key,"value") self.assertEqual(result["status"], True) # delete should pass result = self.client.delete(key) self.assertEqual(result["status"], True) thread.join()
def async_load_gen_docs_durable(self, cluster, bucket, generator, op_type, exp=0, flag=0, persist_to=0, replicate_to=0, only_store_hash=True, batch_size=1, pause_secs=1, timeout_secs=5, compression=True, process_concurrency=1, retries=5): log.info("Loading documents to {}".format(bucket.name)) client = VBucketAwareMemcached(RestConnection(cluster.master), bucket) _task = jython_tasks.Durability( cluster, self.jython_task_manager, bucket, client, generator, op_type, exp, flag=flag, persist_to=persist_to, replicate_to=replicate_to, only_store_hash=only_store_hash, batch_size=batch_size, pause_secs=pause_secs, timeout_secs=timeout_secs, compression=compression, process_concurrency=process_concurrency, retries=retries) self.jython_task_manager.add_new_task(_task) return _task
def async_continuous_update_docs(self, cluster, bucket, generator, exp=0, flag=0, persist_to=0, replicate_to=0, only_store_hash=True, batch_size=1, pause_secs=1, timeout_secs=5, compression=True, process_concurrency=8, retries=5): log.info("Mutating documents to {}".format(bucket.name)) client = VBucketAwareMemcached(RestConnection(cluster.master), bucket) _task = jython_tasks.ContinuousDocUpdateTask( cluster, self.jython_task_manager, bucket, client, [generator], "update", exp, flag=flag, persist_to=persist_to, replicate_to=replicate_to, only_store_hash=only_store_hash, batch_size=batch_size, pause_secs=pause_secs, timeout_secs=timeout_secs, compression=compression, process_concurrency=process_concurrency, retries=retries) self.jython_task_manager.add_new_task(_task) return _task
def async_load_gen_docs(self, cluster, bucket, generator, op_type, exp=0, flag=0, persist_to=0, replicate_to=0, only_store_hash=True, batch_size=1, pause_secs=1, timeout_secs=5, compression=True, process_concurrency=8, retries=5, active_resident_threshold=100, durability=""): log.info("Loading documents to {}".format(bucket.name)) clients = [] gen_start = int(generator.start) gen_end = max(int(generator.end), 1) gen_range = max( int((generator.end - generator.start) / process_concurrency), 1) for _ in range(gen_start, gen_end, gen_range): client = VBucketAwareMemcached(RestConnection(cluster.master), bucket) clients.append(client) if active_resident_threshold == 100: _task = jython_tasks.LoadDocumentsGeneratorsTask( cluster, self.jython_task_manager, bucket, clients, [generator], op_type, exp, exp_unit="second", flag=flag, persist_to=persist_to, replicate_to=replicate_to, only_store_hash=only_store_hash, batch_size=batch_size, pause_secs=pause_secs, timeout_secs=timeout_secs, compression=compression, process_concurrency=process_concurrency, retries=retries, durability=durability) else: _task = jython_tasks.LoadDocumentsForDgmTask( cluster, self.jython_task_manager, bucket, client, [generator], op_type, exp, flag=flag, persist_to=persist_to, replicate_to=replicate_to, only_store_hash=only_store_hash, batch_size=batch_size, pause_secs=pause_secs, timeout_secs=timeout_secs, compression=compression, process_concurrency=process_concurrency, retries=retries, active_resident_threshold=active_resident_threshold) self.jython_task_manager.add_new_task(_task) return _task