def test_multi_create_query_explain_drop_index_scan_consistency_with_where_clause(self): query_definition_generator = SQLDefinitionGenerator() self.query_definitions = query_definition_generator.generate_employee_data_query_definitions_for_index_where_clause() self.use_where_clause_in_index = True self.query_definitions = query_definition_generator.filter_by_group(self.groups, self.query_definitions) self.test_multi_create_query_explain_drop_index_scan_consistency()
def test_gsi_with_flush_bucket_redaction_enabled(self): # load bucket and do some ops self.set_indexer_logLevel("trace") self.set_projector_logLevel("trace") json_generator = JsonGenerator() gen_docs = json_generator.generate_all_type_documents_for_gsi( docs_per_day=self.doc_per_day, start=0) full_docs_list = self.generate_full_docs_list(gen_docs) n1ql_helper = N1QLHelper(use_rest=True, buckets=self.buckets, full_docs_list=full_docs_list, log=log, input=self.input, master=self.master) self.load(gen_docs) n1ql_node = self.get_nodes_from_services_map(service_type="n1ql") n1ql_helper.create_primary_index(using_gsi=True, server=n1ql_node) query_definition_generator = SQLDefinitionGenerator() query_definitions = query_definition_generator.generate_airlines_data_query_definitions( ) query_definitions = query_definition_generator.filter_by_group( ["simple"], query_definitions) # set log redaction level, collect logs, verify log files exist and verify them for redaction self.set_redaction_level() self.start_logs_collection() # Create partial Index for query_definition in query_definitions: for bucket in self.buckets: create_query = query_definition.generate_index_create_query( bucket.name) n1ql_helper.run_cbq_query(query=create_query, server=n1ql_node) for query_definition in query_definitions: for bucket in self.buckets: scan_query = query_definition.generate_query( bucket=bucket.name) n1ql_helper.run_cbq_query(query=scan_query, server=n1ql_node) rest = RestConnection(self.master) rest.flush_bucket(self.buckets[0].name) self.sleep(100) self.load(gen_docs, buckets=[self.buckets[0]]) for query_definition in query_definitions: for bucket in self.buckets: scan_query = query_definition.generate_query( bucket=bucket.name) n1ql_helper.run_cbq_query(query=scan_query, server=n1ql_node) for query_definition in query_definitions: for bucket in self.buckets: drop_query = query_definition.generate_index_drop_query( bucket=bucket.name) n1ql_helper.run_cbq_query(query=drop_query, server=n1ql_node) result = self.monitor_logs_collection() log.info(result) try: logs_path = result["perNode"]["ns_1@" + str(self.master.ip)]["path"] except KeyError: logs_path = result["perNode"]["[email protected]"]["path"] redactFileName = logs_path.split('/')[-1] nonredactFileName = logs_path.split('/')[-1].replace('-redacted', '') remotepath = logs_path[0:logs_path.rfind('/') + 1] log_file = self.input.param("log_file_name", "indexer.log") self.verify_log_files_exist(remotepath=remotepath, redactFileName=redactFileName, nonredactFileName=nonredactFileName) self.verify_log_redaction(remotepath=remotepath, redactFileName=redactFileName, nonredactFileName=nonredactFileName, logFileName="ns_server.{0}".format(log_file))
def test_multi_create_query_explain_drop_index_with_index_expressions(self): query_definition_generator = SQLDefinitionGenerator() self.query_definitions = query_definition_generator.generate_employee_data_query_definitions_for_index_expressions() self.query_definitions = query_definition_generator.filter_by_group(self.groups, self.query_definitions) self.test_multi_create_query_explain_drop_index()