def erl_crash_during_querying(self): #TESTED index = self.create_index_generate_queries() node = self._cb_cluster.get_random_fts_node() NodeHelper.kill_erlang(node) self.is_index_partitioned_balanced(index) self.run_query_and_compare(index)
def fts_crash_during_querying(self): #TESTED index = self.create_index_generate_queries() self.run_query_and_compare(index) node = self._cb_cluster.get_random_fts_node() NodeHelper.kill_cbft_process(node) self._cb_cluster.set_bypass_fts_node(node) self.run_query_and_compare(index)
def memc_crash_during_indexing_and_querying(self): self.load_data() index = self.create_index( bucket=self._cb_cluster.get_bucket_by_name('default'), index_name="default_index") self.generate_random_queries(index, self.num_queries, self.query_types) node = self._cb_cluster.get_random_fts_node() NodeHelper.kill_memcached(node) self._cb_cluster.set_bypass_fts_node(node) self.run_query_and_compare(index)
def memc_crash_between_indexing_and_querying(self): self.load_data() self.create_fts_indexes_all_buckets() self.wait_for_indexing_complete() self.validate_index_count(equal_bucket_doc_count=True) NodeHelper.kill_memcached(self._cb_cluster.get_random_fts_node()) for index in self._cb_cluster.get_indexes(): self.is_index_partitioned_balanced(index) for index in self._cb_cluster.get_indexes(): hits, _, _, _ = index.execute_query(query=self.query, expected_hits=self._num_items) self.log.info("SUCCESS! Hits: %s" % hits)
def fts_node_crash_during_index_building(self): self.load_data() self.create_fts_indexes_all_buckets() self.sleep(10) self.log.info("Index building has begun...") for index in self._cb_cluster.get_indexes(): self.log.info("Index count for %s: %s" %(index.name, index.get_indexed_doc_count())) NodeHelper.kill_cbft_process(self._cb_cluster.get_random_fts_node()) for index in self._cb_cluster.get_indexes(): self.is_index_partitioned_balanced(index) self.wait_for_indexing_complete() self.validate_index_count(equal_bucket_doc_count=True)
def fts_node_crash_between_indexing_and_querying(self): #TESTED self.load_data() self.create_fts_indexes_all_buckets() self.sleep(10) self.log.info("Index building has begun...") for index in self._cb_cluster.get_indexes(): self.is_index_partitioned_balanced(index) self.wait_for_indexing_complete() self.validate_index_count(equal_bucket_doc_count=True) NodeHelper.kill_cbft_process(self._cb_cluster.get_random_fts_node()) for index in self._cb_cluster.get_indexes(): hits, _, _, _ = index.execute_query(query=self.query, expected_hits=self._num_items) self.log.info("SUCCESS! Hits: %s" % hits)
def fts_node_crash_between_indexing_and_querying(self): #TESTED self.load_data() self.create_fts_indexes_all_buckets() self.sleep(10) self.log.info("Index building has begun...") for index in self._cb_cluster.get_indexes(): self.is_index_partitioned_balanced(index) self.wait_for_indexing_complete() self.validate_index_count(equal_bucket_doc_count=True) NodeHelper.kill_cbft_process(self._cb_cluster.get_random_fts_node()) self.sleep(60) for index in self._cb_cluster.get_indexes(): hits, _, _, _ = index.execute_query(query=self.query, expected_hits=self._num_items) self.log.info("SUCCESS! Hits: %s" % hits)
def fts_node_down_with_replicas_during_querying(self): index = self.create_index_generate_queries() node = self._cb_cluster.get_random_fts_node() NodeHelper.stop_couchbase(node) try: hits, _, _, _ = index.execute_query(query=self.query, expected_hits=self._num_items) except Exception as e: self.log.info("Expected exception : %s" % e) NodeHelper.start_couchbase(node) NodeHelper.wait_warmup_completed([node]) self.run_query_and_compare(index)