コード例 #1
0
 def setUp(self):
     super(BaseSecondaryIndexingTests, self).setUp()
     self.index_lost_during_move_out = []
     self.verify_using_index_status = self.input.param(
         "verify_using_index_status", False)
     self.use_replica_when_active_down = self.input.param(
         "use_replica_when_active_down", True)
     self.use_where_clause_in_index = self.input.param(
         "use_where_clause_in_index", False)
     self.scan_consistency = self.input.param("scan_consistency",
                                              "request_plus")
     self.scan_vector_per_values = self.input.param(
         "scan_vector_per_values", None)
     self.timeout_for_index_online = self.input.param(
         "timeout_for_index_online", 600)
     self.verify_query_result = self.input.param("verify_query_result",
                                                 True)
     self.verify_explain_result = self.input.param("verify_explain_result",
                                                   True)
     self.defer_build = self.input.param("defer_build", True)
     self.run_query_with_explain = self.input.param(
         "run_query_with_explain", True)
     self.run_query = self.input.param("run_query", True)
     self.graceful = self.input.param("graceful", False)
     self.groups = self.input.param("groups", "all").split(":")
     self.use_rest = self.input.param("use_rest", False)
     if not self.use_rest:
         query_definition_generator = SQLDefinitionGenerator()
         if self.dataset == "default" or self.dataset == "employee":
             self.query_definitions = query_definition_generator.generate_employee_data_query_definitions(
             )
         if self.dataset == "simple":
             self.query_definitions = query_definition_generator.generate_simple_data_query_definitions(
             )
         if self.dataset == "sabre":
             self.query_definitions = query_definition_generator.generate_sabre_data_query_definitions(
             )
         if self.dataset == "bigdata":
             self.query_definitions = query_definition_generator.generate_big_data_query_definitions(
             )
         if self.dataset == "array":
             self.query_definitions = query_definition_generator.generate_airlines_data_query_definitions(
             )
         self.query_definitions = query_definition_generator.filter_by_group(
             self.groups, self.query_definitions)
     self.ops_map = self._create_operation_map()
     self.find_nodes_in_list()
     self.generate_map_nodes_out_dist()
     self.memory_create_list = []
     self.memory_drop_list = []
     self.skip_cleanup = self.input.param("skip_cleanup", False)
     self.index_loglevel = self.input.param("index_loglevel", None)
     if self.index_loglevel:
         self.set_indexer_logLevel(self.index_loglevel)
     if self.dgm_run:
         self._load_doc_data_all_buckets(gen_load=self.gens_load)
     self.gsi_thread = Cluster()
     self.defer_build = self.defer_build and self.use_gsi_for_secondary
コード例 #2
0
 def setUp(self):
     super(QueryHelperTests, self).setUp()
     self.create_primary_index = self.input.param("create_primary_index",
                                                  True)
     self.use_gsi_for_primary = self.input.param("use_gsi_for_primary",
                                                 True)
     self.use_gsi_for_secondary = self.input.param("use_gsi_for_secondary",
                                                   True)
     self.scan_consistency = self.input.param("scan_consistency",
                                              "request_plus")
     self.skip_host_login = self.input.param("skip_host_login", False)
     if not self.skip_host_login:
         self.shell = RemoteMachineShellConnection(self.master)
     else:
         self.shell = None
     if not self.skip_init_check_cbserver:  # for upgrade tests
         self.buckets = RestConnection(self.master).get_buckets()
     self.docs_per_day = self.input.param("doc-per-day", 49)
     self.use_rest = self.input.param("use_rest", True)
     self.max_verify = self.input.param("max_verify", None)
     self.item_flag = self.input.param("item_flag", 0)
     self.n1ql_port = self.input.param("n1ql_port", 8093)
     self.dataset = self.input.param("dataset", "default")
     self.groups = self.input.param("groups", "all").split(":")
     self.doc_ops = self.input.param("doc_ops", False)
     self.batch_size = self.input.param("batch_size", 1)
     self.create_ops_per = self.input.param("create_ops_per", 0)
     self.expiry_ops_per = self.input.param("expiry_ops_per", 0)
     self.delete_ops_per = self.input.param("delete_ops_per", 0)
     self.update_ops_per = self.input.param("update_ops_per", 0)
     self.gens_load = self.generate_docs(self.docs_per_day)
     self.full_docs_list = self.generate_full_docs_list(self.gens_load)
     self.gen_results = TuqGenerators(self.log,
                                      full_set=self.full_docs_list)
     if not self.skip_init_check_cbserver:  # for upgrade tests
         self.n1ql_server = self.get_nodes_from_services_map(
             service_type="n1ql")
     query_definition_generator = SQLDefinitionGenerator()
     if self.dataset == "default" or self.dataset == "employee":
         self.query_definitions = query_definition_generator.generate_employee_data_query_definitions(
         )
     if self.dataset == "simple":
         self.query_definitions = query_definition_generator.generate_simple_data_query_definitions(
         )
     if self.dataset == "sabre":
         self.query_definitions = query_definition_generator.generate_sabre_data_query_definitions(
         )
     if self.dataset == "bigdata":
         self.query_definitions = query_definition_generator.generate_big_data_query_definitions(
         )
     if self.dataset == "array":
         self.query_definitions = query_definition_generator.generate_airlines_data_query_definitions(
         )
     self.query_definitions = query_definition_generator.filter_by_group(
         self.groups, self.query_definitions)
     self.num_index_replicas = self.input.param("num_index_replica", 0)
コード例 #3
0
ファイル: base_2i.py プロジェクト: pm48/testrunner
 def _pick_query_definitions_employee(self):
     query_definition_generator = SQLDefinitionGenerator()
     if self.create_index_usage == "where":
         self.query_definitions = query_definition_generator.generate_employee_data_query_definitions_for_where_clause()
         self.use_where_clause_in_index = True
     elif self.create_index_usage == "expressions":
         self.query_definitions = query_definition_generator.generate_employee_data_query_definitions_for_index_expressions()
         self.use_where_clause_in_index = True
     else:
         self.query_definitions =  query_definition_generator.generate_employee_data_query_definitions()
コード例 #4
0
ファイル: base_2i.py プロジェクト: EricACooper/testrunner
 def setUp(self):
     super(BaseSecondaryIndexingTests, self).setUp()
     self.initial_stats = None
     self.final_stats = None
     self.index_lost_during_move_out =[]
     self.verify_using_index_status = self.input.param("verify_using_index_status",False)
     self.use_replica_when_active_down = self.input.param("use_replica_when_active_down",True)
     self.use_where_clause_in_index= self.input.param("use_where_clause_in_index",False)
     self.check_stats= self.input.param("check_stats",True)
     self.create_index_usage= self.input.param("create_index_usage","no_usage")
     self.scan_consistency= self.input.param("scan_consistency","request_plus")
     self.scan_vector_per_values= self.input.param("scan_vector_per_values",None)
     self.timeout_for_index_online= self.input.param("timeout_for_index_online",600)
     self.max_attempts_check_index= self.input.param("max_attempts_check_index",10)
     self.max_attempts_query_and_validate= self.input.param("max_attempts_query_and_validate",10)
     self.index_present= self.input.param("index_present",True)
     self.run_create_index= self.input.param("run_create_index",True)
     self.verify_query_result= self.input.param("verify_query_result",True)
     self.verify_explain_result= self.input.param("verify_explain_result",True)
     self.defer_build= self.input.param("defer_build",True)
     self.deploy_on_particular_node= self.input.param("deploy_on_particular_node",None)
     self.run_drop_index= self.input.param("run_drop_index",True)
     self.run_query_with_explain= self.input.param("run_query_with_explain",True)
     self.run_query= self.input.param("run_query",True)
     self.graceful = self.input.param("graceful",False)
     self.groups = self.input.param("groups", "all").split(":")
     self.use_rest = self.input.param("use_rest", False)
     if not self.use_rest:
         query_definition_generator = SQLDefinitionGenerator()
         if self.dataset == "default" or self.dataset == "employee":
             self.query_definitions = query_definition_generator.generate_employee_data_query_definitions()
         if self.dataset == "simple":
             self.query_definitions = query_definition_generator.generate_simple_data_query_definitions()
         if self.dataset == "sabre":
             self.query_definitions = query_definition_generator.generate_sabre_data_query_definitions()
         if self.dataset == "bigdata":
             self.query_definitions = query_definition_generator.generate_big_data_query_definitions()
         self.query_definitions = query_definition_generator.filter_by_group(self.groups, self.query_definitions)
     self.ops_map = self._create_operation_map()
     #self.log.info(self.ops_map)
     self.find_nodes_in_list()
     self.generate_map_nodes_out_dist()
     self.memory_create_list = []
     self.memory_drop_list = []
     self.n1ql_node = self.get_nodes_from_services_map(service_type = "n1ql")
     self.skip_cleanup = self.input.param("skip_cleanup", False)
     self.index_loglevel = self.input.param("index_loglevel", None)
     if self.index_loglevel:
         self.set_indexer_logLevel(self.index_loglevel)
     if self.dgm_run:
         self._load_doc_data_all_buckets(gen_load=self.gens_load)
     self.gsi_thread = Cluster()
     self.index_op = self.input.param("index_op", None)
     self.defer_build = self.defer_build and self.use_gsi_for_secondary
コード例 #5
0
ファイル: base_2i.py プロジェクト: rayleyva/testrunner
 def setUp(self):
     super(BaseSecondaryIndexingTests, self).setUp()
     self.run_create_index = self.input.param("run_create_index", True)
     self.run_drop_index = self.input.param("run_drop_index", True)
     self.run_query_with_explain = self.input.param(
         "run_query_with_explain", True)
     self.run_query = self.input.param("run_query", True)
     self.graceful = self.input.param("graceful", False)
     self.groups = self.input.param("groups", "simple").split(":")
     query_definition_generator = SQLDefinitionGenerator()
     if self.dataset == "default" or self.dataset == "employee":
         self.query_definitions = query_definition_generator.generate_employee_data_query_definitions(
         )
     if self.dataset == "simple":
         self.query_definitions = query_definition_generator.generate_simple_data_query_definitions(
         )
     self.query_definitions = query_definition_generator.filter_by_group(
         self.groups, self.query_definitions)
     self.ops_map = self._create_operation_map()
     self.find_nodes_in_list()
     self.generate_map_nodes_out_dist()
     self.memory_create_list = []
     self.memory_drop_list = []