Exemplo n.º 1
0
class SpatialQueryTests(unittest.TestCase):
    def setUp(self):
        self.log = logger.Logger.get_logger()
        self.helper = SpatialHelper(self, "default")
        self.helper.setup_cluster()
        self.cluster = Cluster()
        self.servers = self.helper.servers

    def tearDown(self):
        self.helper.cleanup_cluster()

    def test_simple_dataset_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Make limit queries on a simple "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_limit_queries()
        self._query_test_init(data_set)

    def test_simple_dataset_skip_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Make skip (and limit) queries on a "
                      "simple dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_skip_queries()
        self._query_test_init(data_set)

    def test_simple_dataset_bbox_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Make bounding box queries on a simple "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_bbox_queries()
        self._query_test_init(data_set)

    def test_simple_dataset_range_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Make range queries on a simple "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_range_queries()
        self._query_test_init(data_set)

    def test_multidim_dataset_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Make limit queries on a multidimensional "
                      "dataset with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_limit_queries()
        self._query_test_init(data_set)

    def test_multidim_dataset_skip_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Make skip (and limit) queries on a "
                      "multidimensional dataset with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_skip_queries()
        self._query_test_init(data_set)

    def test_multidim_dataset_range_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Make range queries on a "
                      "multidimensional with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_range_queries()
        self._query_test_init(data_set)

    def test_multidim_dataset_range_and_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Make range queries with limits on a "
                      "multidimensional with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_range_and_limit_queries()
        self._query_test_init(data_set)

## Rebalance In
    def test_rebalance_in_simple_dataset_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Rebalance In and limit queries on a simple "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_limit_queries()
        self._rebalance_cluster(data_set)

    def test_rebalance_in_simple_dataset_skip_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Rebalance In and skip (and limit) queries on a "
                      "simple dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_skip_queries()
        self._rebalance_cluster(data_set)

    def test_rebalance_in_simple_dataset_bbox_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Rebalance In and bounding box queries on a simple "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_bbox_queries()
        self._rebalance_cluster(data_set)

    def test_rebalance_in_simple_dataset_range_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Rebalance In and range queries on a simple "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_range_queries()
        self._rebalance_cluster(data_set)

    def test_rebalance_in_multidim_dataset_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Rebalance In and limit queries on a multidimensional "
                      "dataset with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_limit_queries()
        self._rebalance_cluster(data_set)

    def test_rebalance_in_multidim_dataset_skip_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Rebalance In and skip (and limit) queries on a "
                      "multidimensional dataset with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_skip_queries()
        self._rebalance_cluster(data_set)

    def test_rebalance_in_multidim_dataset_range_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Rebalance In and range queries on a "
                      "multidimensional with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_range_queries()
        self._rebalance_cluster(data_set)

    def test_rebalance_in_multidim_dataset_range_and_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Rebalance In and range queries with limits on a "
                      "multidimensional with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_range_and_limit_queries()
        self._rebalance_cluster(data_set)

#Rebalance Out
    def test_rebalance_out_simple_dataset_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Rebalance Out and  limit queries on a simple "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_limit_queries()
        self._rebalance_cluster(data_set)

    def test_rebalance_out_simple_dataset_skip_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Rebalance Out and skip (and limit) queries on a "
                      "simple dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_skip_queries()
        self._rebalance_cluster(data_set)

    def test_rebalance_out_simple_dataset_bbox_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Rebalance Out and bounding box queries on a simple "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_bbox_queries()
        self._rebalance_cluster(data_set)

    def test_rebalance_out_simple_dataset_range_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Rebalance Out and range queries on a simple "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_range_queries()
        self._rebalance_cluster(data_set)

    def test_rebalance_out_multidim_dataset_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Rebalance Out and limit queries on a multidimensional "
                      "dataset with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_limit_queries()
        self._rebalance_cluster(data_set)

    def test_rebalance_out_multidim_dataset_skip_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Rebalance Out and skip (and limit) queries on a "
                      "multidimensional dataset with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_skip_queries()
        self._rebalance_cluster(data_set)

    def test_rebalance_out_multidim_dataset_range_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Rebalance Out and range queries on a "
                      "multidimensional with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_range_queries()
        self._rebalance_cluster(data_set)

    def test_rebalance_out_multidim_dataset_range_and_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Rebalance Out and range queries with limits on a "
                      "multidimensional with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_range_and_limit_queries()
        self._rebalance_cluster(data_set)

# Warmup Tests

    def test_warmup_simple_dataset_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Warmup with skip and limit queries on a simple "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_limit_queries()
        self._query_test_init_integration(data_set)

    def test_warmup_simple_dataset_skip_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Warmup with  skip (and limit) queries on a "
                      "simple dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_skip_queries()
        self._query_test_init_integration(data_set)

    def test_warmup_simple_dataset_bbox_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Warmup with  bounding box queries on a simple "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_bbox_queries()
        self._query_test_init_integration(data_set)

    def test_warmup_simple_dataset_range_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Warmup with  range queries on a simple "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_range_queries()
        self._query_test_init_integration(data_set)

    def test_warmup_multidim_dataset_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Warmup with  limit queries on a multidimensional "
                      "dataset with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_limit_queries()
        self._query_test_init_integration(data_set)

    def test_warmup_multidim_dataset_skip_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Warmup with  skip (and limit) queries on a "
                      "multidimensional dataset with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_skip_queries()
        self._query_test_init_integration(data_set)

    def test_warmup_multidim_dataset_range_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Warmup with  range queries on a "
                      "multidimensional with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_range_queries()
        self._query_test_init_integration(data_set)

    def test_warmup_multidim_dataset_range_and_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Warmup with  range queries with limits on a "
                      "multidimensional with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_range_and_limit_queries()
        self._query_test_init_integration(data_set)


# Reboot Tests
    def test_reboot_simple_dataset_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Reboot and limit queries on a simple "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_limit_queries()
        self._query_test_init_integration(data_set)

    def test_reboot_simple_dataset_skip_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Reboot with  skip (and limit) queries on a "
                      "simple dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_skip_queries()
        self._query_test_init_integration(data_set)

    def test_reboot_simple_dataset_bbox_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Reboot with  bounding box queries on a simple "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_bbox_queries()
        self._query_test_init_integration(data_set)

    def test_reboot_simple_dataset_range_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Reboot with  range queries on a simple "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_range_queries()
        self._query_test_init_integration(data_set)

    def test_reboot_multidim_dataset_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Reboot with  limit queries on a multidimensional "
                      "dataset with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_limit_queries()
        self._query_test_init_integration(data_set)

    def test_reboot_multidim_dataset_skip_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Reboot with  skip (and limit) queries on a "
                      "multidimensional dataset with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_skip_queries()
        self._query_test_init_integration(data_set)

    def test_reboot_multidim_dataset_range_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Reboot with  range queries on a "
                      "multidimensional with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_range_queries()
        self._query_test_init_integration(data_set)

    def test_reboot_multidim_dataset_range_and_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Reboot with  range queries with limits on a "
                      "multidimensional with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_range_and_limit_queries()
        self._query_test_init_integration(data_set)

# Failover Tests
    def test_failover_simple_dataset_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Failover and limit queries on a simple "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_limit_queries()
        self._failover_cluster(data_set)

    def test_failover_simple_dataset_skip_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Rebalance Out and skip (and limit) queries on a "
                      "simple dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_skip_queries()
        self._failover_cluster(data_set)

    def test_failover_simple_dataset_bbox_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Rebalance Out and bounding box queries on a simple "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_bbox_queries()
        self._failover_cluster(data_set)

    def test_failover_simple_dataset_range_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Rebalance Out and range queries on a simple "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_range_queries()
        self._failover_cluster(data_set)

    def test_failover_multidim_dataset_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Rebalance Out and limit queries on a multidimensional "
                      "dataset with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_limit_queries()
        self._failover_cluster(data_set)

    def test_failover_multidim_dataset_skip_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Rebalance Out and skip (and limit) queries on a "
                      "multidimensional dataset with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_skip_queries()
        self._failover_cluster(data_set)

    def test_failover_multidim_dataset_range_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Rebalance Out and range queries on a "
                      "multidimensional with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_range_queries()
        self._failover_cluster(data_set)

    def test_failover_multidim_dataset_range_and_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Rebalance Out and range queries with limits on a "
                      "multidimensional with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_range_and_limit_queries()
        self._failover_cluster(data_set)

    ###
    # load the data defined for this dataset.
    # create views and query the data as it loads.
    # verification is optional, and best practice is to
    # set to False if you plan on running _query_all_views()
    # later in the test case
    ###
    def _query_test_init(self, data_set, verify_results = True):
        views = data_set.views

        # start loading data
        t = Thread(target=data_set.load,
                   name="load_data_set",
                   args=())
        t.start()

        # run queries while loading data
        while(t.is_alive()):
            self._query_all_views(views, False)
            time.sleep(5)
        t.join()

        # results will be verified if verify_results set
        if verify_results:
            self._query_all_views(views, verify_results)
        else:
            self._check_view_intergrity(views)

    def _query_test_init_integration(self, data_set, verify_results = True):
        views = data_set.views
        inserted_keys = data_set.load()
        target_fn = ()

        if self.helper.num_nodes_reboot >= 1:
            target_fn = self._reboot_cluster(data_set)
        elif self.helper.num_nodes_warmup >= 1:
            target_fn = self._warmup_cluster(data_set)
        elif self.helper.num_nodes_to_add >= 1 or self.helper.num_nodes_to_remove >= 1:
            target_fn = self._rebalance_cluster(data_set)

        t = Thread(target=self._query_all_views(views, False))
        t.start()
        # run queries while loading data
        while t.is_alive():
            self._rebalance_cluster(data_set)
            time.sleep(5)
        t.join()

        # results will be verified if verify_results set
        if verify_results:
            self._query_all_views(views, verify_results)
        else:
            self._check_view_intergrity(views)

    ##
    # run all queries for all views in parallel
    ##
    def _query_all_views(self, views, verify_results = True):
        query_threads = []
        for view in views:
            t = RunQueriesThread(view, verify_results)
            query_threads.append(t)
            t.start()

        [t.join() for t in query_threads]

        self._check_view_intergrity(query_threads)

    ##
    # If an error occured loading or querying data for a view
    # it is queued and checked here. Fail on the first one that
    # occurs.
    ##
    def _check_view_intergrity(self, thread_results):
        for result in thread_results:
            if result.test_results.errors:
                self.fail(result.test_results.errors[0][1])
            if result.test_results.failures:
                self.fail(result.test_results.failures[0][1])

    ###
    # Rebalance
    ###
    def _rebalance_cluster(self, data_set):
        if self.helper.num_nodes_to_add >= 1:
            rebalance = self.cluster.async_rebalance(self.servers[:1],
                self.servers[1:self.helper.num_nodes_to_add + 1],
                [])
            self._query_test_init(data_set)
            rebalance.result()

        elif self.helper.num_nodes_to_remove >= 1:
            rebalance = self.cluster.async_rebalance(self.servers[:1],[],
                self.servers[1:self.helper.num_nodes_to_add + 1])
            self._query_test_init(data_set)
            rebalance.result()

    def _failover_cluster(self, data_set):
        failover_nodes = self.servers[1 : self.helper.failover_factor + 1]
        try:
            # failover and verify loaded data
            #self.cluster.failover(self.servers, failover_nodes)
            self.cluster.failover(self.servers, self.servers[1:2])
            self.log.info("120 seconds sleep after failover before invoking rebalance...")
            time.sleep(120)
            rebalance = self.cluster.async_rebalance(self.servers,
                [], self.servers[1:2])

            self._query_test_init(data_set)

            msg = "rebalance failed while removing failover nodes {0}".format(failover_nodes)
            self.assertTrue(rebalance.result(), msg=msg)

            #verify queries after failover
            self._query_test_init(data_set)
        finally:
            self.log.info("Completed the failover testing for spatial querying")

    ###
    # Warmup
    ###
    def _warmup_cluster(self, data_set):
        for server in self.servers[0:self.helper.num_nodes_warmup]:
            remote = RemoteMachineShellConnection(server)
            remote.stop_server()
            remote.start_server()
            remote.disconnect()
            self.log.info("Node {0} should be warming up ".format(server.ip))
            time.sleep(120)
        self._query_test_init(data_set)

    # REBOOT
    def _reboot_cluster(self, data_set):
        try:
            for server in self.servers[0:self.helper.num_nodes_reboot]:
                shell = RemoteMachineShellConnection(server)
                if shell.extract_remote_info().type.lower() == 'windows':
                    o, r = shell.execute_command("shutdown -r -f -t 0")
                    shell.log_command_output(o, r)
                    shell.disconnect()
                    self.log.info("Node {0} is being stopped".format(server.ip))
                elif shell.extract_remote_info().type.lower() == 'linux':
                    o, r = shell.execute_command("reboot")
                    shell.log_command_output(o, r)
                    shell.disconnect()
                    self.log.info("Node {0} is being stopped".format(server.ip))

                    time.sleep(120)
                    shell = RemoteMachineShellConnection(server)
                    command = "/sbin/iptables -F"
                    o, r = shell.execute_command(command)
                    shell.log_command_output(o, r)
                    shell.disconnect()
                    self.log.info("Node {0} backup".format(server.ip))
        finally:
            self.log.info("Warming-up server ..".format(server.ip))
            time.sleep(100)
Exemplo n.º 2
0
class BucketConfig(BaseTestCase):

    def setUp(self):
        super(BucketConfig, self).setUp()
        self.testcase = '2'
        self.log = logger.Logger.get_logger()
        self.input = TestInputSingleton.input
        self.servers = self.input.servers
        #self.time_synchronization = self.input.param("time_sync", "enabledWithoutDrift")
        self.lww = self.input.param("lww", True)
        self.drift = self.input.param("drift", False)
        self.bucket='bucket-1'
        self.master = self.servers[0]
        self.rest = RestConnection(self.master)
        self.cluster = Cluster()
        self.skip_rebalance = self.input.param("skip_rebalance", False)

        node_ram_ratio = BucketOperationHelper.base_bucket_ratio(self.servers)
        mem_quota = int(self.rest.get_nodes_self().mcdMemoryReserved *
                        node_ram_ratio)

        if not self.skip_rebalance:
            self.rest.init_cluster(self.master.rest_username,
                self.master.rest_password)
            self.rest.init_cluster_memoryQuota(self.master.rest_username,
                self.master.rest_password,
                memoryQuota=mem_quota)
            for server in self.servers:
                ClusterOperationHelper.cleanup_cluster([server])
                ClusterOperationHelper.wait_for_ns_servers_or_assert(
                    [self.master], self.testcase)
            try:
                rebalanced = ClusterOperationHelper.add_and_rebalance(
                    self.servers)

            except Exception as e:
                self.fail(e, 'cluster is not rebalanced')

        self._create_bucket(self.lww, self.drift)

    def tearDown(self):
        super(BucketConfig, self).tearDown()
        return
        if not "skip_cleanup" in TestInputSingleton.input.test_params:
            BucketOperationHelper.delete_all_buckets_or_assert(
                self.servers, self.testcase)
            ClusterOperationHelper.cleanup_cluster(self.servers)
            ClusterOperationHelper.wait_for_ns_servers_or_assert(
                self.servers, self.testcase)

    def test_modify_bucket_params(self):
        try:
            self.log.info("Modifying timeSynchronization value after bucket creation .....")
            self._modify_bucket()
        except Exception as e:
            traceback.print_exc()
            self.fail('[ERROR] Modify testcase failed .., {0}'.format(e))

    def test_restart(self):
        try:
            self.log.info("Restarting the servers ..")
            self._restart_server(self.servers[:])
            self.log.info("Verifying bucket settings after restart ..")
            self._check_config()
        except Exception as e:
            traceback.print_exc()
            self.fail("[ERROR] Check data after restart failed with exception {0}".format(e))

    def test_failover(self):
        num_nodes=1
        self.cluster.failover(self.servers, self.servers[1:num_nodes])
        try:
            self.log.info("Failing over 1 of the servers ..")
            self.cluster.rebalance(self.servers, [], self.servers[1:num_nodes])
            self.log.info("Verifying bucket settings after failover ..")
            self._check_config()
        except Exception as e:
            traceback.print_exc()
            self.fail('[ERROR]Failed to failover .. , {0}'.format(e))

    def test_rebalance_in(self):
        try:
            self.log.info("Rebalancing 1 of the servers ..")
            ClusterOperationHelper.add_and_rebalance(
                self.servers)
            self.log.info("Verifying bucket settings after rebalance ..")
            self._check_config()
        except Exception as e:
            self.fail('[ERROR]Rebalance failed .. , {0}'.format(e))

    def test_backup_same_cluster(self):
        self.shell = RemoteMachineShellConnection(self.master)
        self.buckets = RestConnection(self.master).get_buckets()
        self.couchbase_login_info = "%s:%s" % (self.input.membase_settings.rest_username,
                                               self.input.membase_settings.rest_password)
        self.backup_location = "/tmp/backup"
        self.command_options = self.input.param("command_options", '')
        try:
            shell = RemoteMachineShellConnection(self.master)
            self.shell.execute_cluster_backup(self.couchbase_login_info, self.backup_location, self.command_options)

            time.sleep(5)
            shell.restore_backupFile(self.couchbase_login_info, self.backup_location, [bucket.name for bucket in self.buckets])

        finally:
            self._check_config()

    def test_backup_diff_bucket(self):
        self.shell = RemoteMachineShellConnection(self.master)
        self.buckets = RestConnection(self.master).get_buckets()
        self.couchbase_login_info = "%s:%s" % (self.input.membase_settings.rest_username,
                                               self.input.membase_settings.rest_password)
        self.backup_location = "/tmp/backup"
        self.command_options = self.input.param("command_options", '')
        try:
            shell = RemoteMachineShellConnection(self.master)
            self.shell.execute_cluster_backup(self.couchbase_login_info, self.backup_location, self.command_options)

            time.sleep(5)
            self._create_bucket(lww=False, name="new_bucket")
            self.buckets = RestConnection(self.master).get_buckets()
            shell.restore_backupFile(self.couchbase_login_info, self.backup_location, ["new_bucket"])

        finally:
            self._check_config()

    ''' Helper functions for above testcases
    '''
    #create a bucket if it doesn't exist. The drift parameter is currently unused
    def _create_bucket(self, lww=True, drift=False, name=None):

        if lww:
            self.lww=lww

        if  name:
            self.bucket=name

        helper = RestHelper(self.rest)
        if not helper.bucket_exists(self.bucket):
            node_ram_ratio = BucketOperationHelper.base_bucket_ratio(
                self.servers)
            info = self.rest.get_nodes_self()
            self.rest.create_bucket(bucket=self.bucket,
                ramQuotaMB=512, authType='sasl', lww=self.lww)
            try:
                ready = BucketOperationHelper.wait_for_memcached(self.master,
                    self.bucket)
            except Exception as e:
                self.fail('unable to create bucket')

    # KETAKI tochange this
    def _modify_bucket(self):
        helper = RestHelper(self.rest)
        node_ram_ratio = BucketOperationHelper.base_bucket_ratio(
            self.servers)
        info = self.rest.get_nodes_self()

        status, content = self.rest.change_bucket_props(bucket=self.bucket,
            ramQuotaMB=512, authType='sasl', timeSynchronization='enabledWithOutDrift')
        if re.search('TimeSyncronization not allowed in update bucket', content):
            self.log.info('[PASS]Expected modify bucket to disallow Time Synchronization.')
        else:
            self.fail('[ERROR] Not expected to allow modify bucket for Time Synchronization')

    def _restart_server(self, servers):
        for server in servers:
            shell = RemoteMachineShellConnection(server)
            shell.stop_couchbase()
            time.sleep(10)
            shell.start_couchbase()
            shell.disconnect()
        ClusterOperationHelper.wait_for_ns_servers_or_assert(servers, self, wait_if_warmup=True)

    # REBOOT
    def _reboot_server(self):
        try:
            for server in self.servers[:]:
                shell = RemoteMachineShellConnection(server)
                if shell.extract_remote_info().type.lower() == 'windows':
                    o, r = shell.execute_command("shutdown -r -f -t 0")
                    shell.log_command_output(o, r)
                    shell.disconnect()
                    self.log.info("Node {0} is being stopped".format(server.ip))
                elif shell.extract_remote_info().type.lower() == 'linux':
                    o, r = shell.execute_command("reboot")
                    shell.log_command_output(o, r)
                    shell.disconnect()
                    self.log.info("Node {0} is being stopped".format(server.ip))

                    ClusterOperationHelper.wait_for_ns_servers_or_assert([server], self, wait_if_warmup=True)
                    shell = RemoteMachineShellConnection(server)
                    command = "/sbin/iptables -F"
                    o, r = shell.execute_command(command)
                    shell.log_command_output(o, r)
                    shell.disconnect()
                    self.log.info("Node {0} backup".format(server.ip))
        finally:
            self.log.info("Warming-up servers ..")
            ClusterOperationHelper.wait_for_ns_servers_or_assert(self.servers, self, wait_if_warmup=True)



    def _check_config(self):
        rc = self.rest.get_bucket_json(self.bucket)
        if 'conflictResolution' in rc:
            conflictResolution  = self.rest.get_bucket_json(self.bucket)['conflictResolutionType']
            self.assertTrue(conflictResolution == 'lww', 'Expected conflict resolution of lww but got {0}'.format(conflictResolution))


        """ drift is disabled in 4.6, commenting out for now as it may come back later
Exemplo n.º 3
0
class SpatialQueryTests(unittest.TestCase):
    def setUp(self):
        self.log = logger.Logger.get_logger()
        self.helper = SpatialHelper(self, "default")
        self.helper.setup_cluster()
        self.cluster = Cluster()
        self.servers = self.helper.servers

    def tearDown(self):
        self.helper.cleanup_cluster()

    def test_simple_dataset_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Make limit queries on a simple "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_limit_queries()
        self._query_test_init(data_set)

    def test_simple_dataset_skip_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Make skip (and limit) queries on a "
                      "simple dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_skip_queries()
        self._query_test_init(data_set)

    def test_simple_dataset_bbox_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Make bounding box queries on a simple "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_bbox_queries()
        self._query_test_init(data_set)

    def test_simple_dataset_range_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Make range queries on a simple "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_range_queries()
        self._query_test_init(data_set)

    def test_multidim_dataset_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Make limit queries on a multidimensional "
                      "dataset with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_limit_queries()
        self._query_test_init(data_set)

    def test_multidim_dataset_skip_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Make skip (and limit) queries on a "
            "multidimensional dataset with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_skip_queries()
        self._query_test_init(data_set)

    def test_multidim_dataset_range_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Make range queries on a "
                      "multidimensional with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_range_queries()
        self._query_test_init(data_set)

    def test_multidim_dataset_range_and_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Make range queries with limits on a "
                      "multidimensional with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_range_and_limit_queries()
        self._query_test_init(data_set)

## Rebalance In

    def test_rebalance_in_simple_dataset_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Rebalance In and limit queries on a simple "
            "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_limit_queries()
        self._rebalance_cluster(data_set)

    def test_rebalance_in_simple_dataset_skip_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Rebalance In and skip (and limit) queries on a "
            "simple dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_skip_queries()
        self._rebalance_cluster(data_set)

    def test_rebalance_in_simple_dataset_bbox_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Rebalance In and bounding box queries on a simple "
            "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_bbox_queries()
        self._rebalance_cluster(data_set)

    def test_rebalance_in_simple_dataset_range_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Rebalance In and range queries on a simple "
            "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_range_queries()
        self._rebalance_cluster(data_set)

    def test_rebalance_in_multidim_dataset_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Rebalance In and limit queries on a multidimensional "
            "dataset with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_limit_queries()
        self._rebalance_cluster(data_set)

    def test_rebalance_in_multidim_dataset_skip_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Rebalance In and skip (and limit) queries on a "
            "multidimensional dataset with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_skip_queries()
        self._rebalance_cluster(data_set)

    def test_rebalance_in_multidim_dataset_range_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Rebalance In and range queries on a "
                      "multidimensional with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_range_queries()
        self._rebalance_cluster(data_set)

    def test_rebalance_in_multidim_dataset_range_and_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Rebalance In and range queries with limits on a "
            "multidimensional with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_range_and_limit_queries()
        self._rebalance_cluster(data_set)

#Rebalance Out

    def test_rebalance_out_simple_dataset_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Rebalance Out and  limit queries on a simple "
            "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_limit_queries()
        self._rebalance_cluster(data_set)

    def test_rebalance_out_simple_dataset_skip_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Rebalance Out and skip (and limit) queries on a "
            "simple dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_skip_queries()
        self._rebalance_cluster(data_set)

    def test_rebalance_out_simple_dataset_bbox_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Rebalance Out and bounding box queries on a simple "
            "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_bbox_queries()
        self._rebalance_cluster(data_set)

    def test_rebalance_out_simple_dataset_range_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Rebalance Out and range queries on a simple "
            "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_range_queries()
        self._rebalance_cluster(data_set)

    def test_rebalance_out_multidim_dataset_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Rebalance Out and limit queries on a multidimensional "
            "dataset with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_limit_queries()
        self._rebalance_cluster(data_set)

    def test_rebalance_out_multidim_dataset_skip_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Rebalance Out and skip (and limit) queries on a "
            "multidimensional dataset with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_skip_queries()
        self._rebalance_cluster(data_set)

    def test_rebalance_out_multidim_dataset_range_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Rebalance Out and range queries on a "
                      "multidimensional with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_range_queries()
        self._rebalance_cluster(data_set)

    def test_rebalance_out_multidim_dataset_range_and_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Rebalance Out and range queries with limits on a "
            "multidimensional with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_range_and_limit_queries()
        self._rebalance_cluster(data_set)

# Warmup Tests

    def test_warmup_simple_dataset_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Warmup with skip and limit queries on a simple "
            "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_limit_queries()
        self._query_test_init_integration(data_set)

    def test_warmup_simple_dataset_skip_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Warmup with  skip (and limit) queries on a "
            "simple dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_skip_queries()
        self._query_test_init_integration(data_set)

    def test_warmup_simple_dataset_bbox_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Warmup with  bounding box queries on a simple "
            "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_bbox_queries()
        self._query_test_init_integration(data_set)

    def test_warmup_simple_dataset_range_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Warmup with  range queries on a simple "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_range_queries()
        self._query_test_init_integration(data_set)

    def test_warmup_multidim_dataset_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Warmup with  limit queries on a multidimensional "
            "dataset with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_limit_queries()
        self._query_test_init_integration(data_set)

    def test_warmup_multidim_dataset_skip_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Warmup with  skip (and limit) queries on a "
            "multidimensional dataset with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_skip_queries()
        self._query_test_init_integration(data_set)

    def test_warmup_multidim_dataset_range_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Warmup with  range queries on a "
                      "multidimensional with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_range_queries()
        self._query_test_init_integration(data_set)

    def test_warmup_multidim_dataset_range_and_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Warmup with  range queries with limits on a "
            "multidimensional with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_range_and_limit_queries()
        self._query_test_init_integration(data_set)

# Reboot Tests

    def test_reboot_simple_dataset_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Reboot and limit queries on a simple "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_limit_queries()
        self._query_test_init_integration(data_set)

    def test_reboot_simple_dataset_skip_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Reboot with  skip (and limit) queries on a "
            "simple dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_skip_queries()
        self._query_test_init_integration(data_set)

    def test_reboot_simple_dataset_bbox_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Reboot with  bounding box queries on a simple "
            "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_bbox_queries()
        self._query_test_init_integration(data_set)

    def test_reboot_simple_dataset_range_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Reboot with  range queries on a simple "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_range_queries()
        self._query_test_init_integration(data_set)

    def test_reboot_multidim_dataset_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Reboot with  limit queries on a multidimensional "
            "dataset with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_limit_queries()
        self._query_test_init_integration(data_set)

    def test_reboot_multidim_dataset_skip_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Reboot with  skip (and limit) queries on a "
            "multidimensional dataset with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_skip_queries()
        self._query_test_init_integration(data_set)

    def test_reboot_multidim_dataset_range_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Reboot with  range queries on a "
                      "multidimensional with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_range_queries()
        self._query_test_init_integration(data_set)

    def test_reboot_multidim_dataset_range_and_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Reboot with  range queries with limits on a "
            "multidimensional with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_range_and_limit_queries()
        self._query_test_init_integration(data_set)


# Failover Tests

    def test_failover_simple_dataset_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Failover and limit queries on a simple "
                      "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_limit_queries()
        self._failover_cluster(data_set)

    def test_failover_simple_dataset_skip_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Rebalance Out and skip (and limit) queries on a "
            "simple dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_skip_queries()
        self._failover_cluster(data_set)

    def test_failover_simple_dataset_bbox_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Rebalance Out and bounding box queries on a simple "
            "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_bbox_queries()
        self._failover_cluster(data_set)

    def test_failover_simple_dataset_range_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Rebalance Out and range queries on a simple "
            "dataset with {0} docs".format(num_docs))

        data_set = SimpleDataSet(self.helper, num_docs)
        data_set.add_range_queries()
        self._failover_cluster(data_set)

    def test_failover_multidim_dataset_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Rebalance Out and limit queries on a multidimensional "
            "dataset with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_limit_queries()
        self._failover_cluster(data_set)

    def test_failover_multidim_dataset_skip_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Rebalance Out and skip (and limit) queries on a "
            "multidimensional dataset with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_skip_queries()
        self._failover_cluster(data_set)

    def test_failover_multidim_dataset_range_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info("description : Rebalance Out and range queries on a "
                      "multidimensional with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_range_queries()
        self._failover_cluster(data_set)

    def test_failover_multidim_dataset_range_and_limit_queries(self):
        num_docs = self.helper.input.param("num-docs")
        self.log.info(
            "description : Rebalance Out and range queries with limits on a "
            "multidimensional with {0} docs".format(num_docs))

        data_set = MultidimDataSet(self.helper, num_docs)
        data_set.add_range_and_limit_queries()
        self._failover_cluster(data_set)

    ###
    # load the data defined for this dataset.
    # create views and query the data as it loads.
    # verification is optional, and best practice is to
    # set to False if you plan on running _query_all_views()
    # later in the test case
    ###
    def _query_test_init(self, data_set, verify_results=True):
        views = data_set.views

        # start loading data
        t = Thread(target=data_set.load, name="load_data_set", args=())
        t.start()

        # run queries while loading data
        while (t.is_alive()):
            self._query_all_views(views, False)
            time.sleep(5)
        t.join()

        # results will be verified if verify_results set
        if verify_results:
            self._query_all_views(views, verify_results)
        else:
            self._check_view_intergrity(views)

    def _query_test_init_integration(self, data_set, verify_results=True):
        views = data_set.views
        inserted_keys = data_set.load()
        target_fn = ()

        if self.helper.num_nodes_reboot >= 1:
            target_fn = self._reboot_cluster(data_set)
        elif self.helper.num_nodes_warmup >= 1:
            target_fn = self._warmup_cluster(data_set)
        elif self.helper.num_nodes_to_add >= 1 or self.helper.num_nodes_to_remove >= 1:
            target_fn = self._rebalance_cluster(data_set)

        t = Thread(target=self._query_all_views(views, False))
        t.start()
        # run queries while loading data
        while t.is_alive():
            self._rebalance_cluster(data_set)
            time.sleep(5)
        t.join()

        # results will be verified if verify_results set
        if verify_results:
            self._query_all_views(views, verify_results)
        else:
            self._check_view_intergrity(views)

    ##
    # run all queries for all views in parallel
    ##
    def _query_all_views(self, views, verify_results=True):
        query_threads = []
        for view in views:
            t = RunQueriesThread(view, verify_results)
            query_threads.append(t)
            t.start()

        [t.join() for t in query_threads]

        self._check_view_intergrity(query_threads)

    ##
    # If an error occured loading or querying data for a view
    # it is queued and checked here. Fail on the first one that
    # occurs.
    ##
    def _check_view_intergrity(self, thread_results):
        for result in thread_results:
            if result.test_results.errors:
                self.fail(result.test_results.errors[0][1])
            if result.test_results.failures:
                self.fail(result.test_results.failures[0][1])

    ###
    # Rebalance
    ###
    def _rebalance_cluster(self, data_set):
        if self.helper.num_nodes_to_add >= 1:
            rebalance = self.cluster.async_rebalance(
                self.servers[:1],
                self.servers[1:self.helper.num_nodes_to_add + 1], [])
            self._query_test_init(data_set)
            rebalance.result()

        elif self.helper.num_nodes_to_remove >= 1:
            rebalance = self.cluster.async_rebalance(
                self.servers[:1], [],
                self.servers[1:self.helper.num_nodes_to_add + 1])
            self._query_test_init(data_set)
            rebalance.result()

    def _failover_cluster(self, data_set):
        failover_nodes = self.servers[1:self.helper.failover_factor + 1]
        try:
            # failover and verify loaded data
            #self.cluster.failover(self.servers, failover_nodes)
            self.cluster.failover(self.servers, self.servers[1:2])
            self.log.info(
                "120 seconds sleep after failover before invoking rebalance..."
            )
            time.sleep(120)
            rebalance = self.cluster.async_rebalance(self.servers, [],
                                                     self.servers[1:2])

            self._query_test_init(data_set)

            msg = "rebalance failed while removing failover nodes {0}".format(
                failover_nodes)
            self.assertTrue(rebalance.result(), msg=msg)

            #verify queries after failover
            self._query_test_init(data_set)
        finally:
            self.log.info(
                "Completed the failover testing for spatial querying")

    ###
    # Warmup
    ###
    def _warmup_cluster(self, data_set):
        for server in self.servers[0:self.helper.num_nodes_warmup]:
            remote = RemoteMachineShellConnection(server)
            remote.stop_server()
            remote.start_server()
            remote.disconnect()
            self.log.info("Node {0} should be warming up ".format(server.ip))
            time.sleep(120)
        self._query_test_init(data_set)

    # REBOOT
    def _reboot_cluster(self, data_set):
        try:
            for server in self.servers[0:self.helper.num_nodes_reboot]:
                shell = RemoteMachineShellConnection(server)
                if shell.extract_remote_info().type.lower() == 'windows':
                    o, r = shell.execute_command("shutdown -r -f -t 0")
                    shell.log_command_output(o, r)
                    shell.disconnect()
                    self.log.info("Node {0} is being stopped".format(
                        server.ip))
                elif shell.extract_remote_info().type.lower() == 'linux':
                    o, r = shell.execute_command("reboot")
                    shell.log_command_output(o, r)
                    shell.disconnect()
                    self.log.info("Node {0} is being stopped".format(
                        server.ip))

                    time.sleep(120)
                    shell = RemoteMachineShellConnection(server)
                    command = "/sbin/iptables -F"
                    o, r = shell.execute_command(command)
                    shell.log_command_output(o, r)
                    shell.disconnect()
                    self.log.info("Node {0} backup".format(server.ip))
        finally:
            self.log.info("Warming-up server ..".format(server.ip))
            time.sleep(100)