def _convert_template_query_info(self, n1ql_queries=[], table_map={}, define_gsi_index=True, gen_expected_result=False, ansi_joins=False, aggregate_pushdown=False, partitioned_indexes=False): helper = QueryHelper() query_input_list = [] for n1ql_query in n1ql_queries: # check if ["UNION ALL", "INTERSECT ALL", "EXCEPT ALL", "UNION", "INTERSECT", "EXCEPT"] not in query if not helper._check_deeper_query_condition(n1ql_query): if "SUBTABLE" in n1ql_query: sql_n1ql_index_map = helper._convert_sql_template_to_value_with_subqueryenhancements(n1ql_query, table_map=table_map, define_gsi_index=define_gsi_index) elif "SUBQUERY" in n1ql_query: sql_n1ql_index_map = helper._convert_sql_template_to_value_with_subqueries(n1ql_query, table_map=table_map, define_gsi_index=define_gsi_index) else: # takes in sql and n1ql queries and create indexes for them sql_n1ql_index_map = helper._convert_sql_template_to_value_for_secondary_indexes(n1ql_query, table_map=table_map, define_gsi_index=define_gsi_index, ansi_joins=ansi_joins, aggregate_pushdown=aggregate_pushdown, partitioned_indexes=partitioned_indexes) else: sql_n1ql_index_map = helper._convert_sql_template_to_value_for_secondary_indexes_sub_queries(n1ql_query, table_map=table_map, define_gsi_index=define_gsi_index, partitioned_indexes=partitioned_indexes) if gen_expected_result: sql_query = sql_n1ql_index_map["sql"] try: sql_result = self._query_and_convert_to_json(sql_query) sql_n1ql_index_map["expected_result"] = sql_result except Exception, ex: print ex query_input_list.append(sql_n1ql_index_map)
def _convert_template_query_info_with_gsi(self, file_path, gsi_index_file_path=None, table_map={}, table_name="simple_table", define_gsi_index=True, gen_expected_result=False): helper = QueryHelper() f = open(gsi_index_file_path, 'w') n1ql_queries = self._read_from_file(file_path) for n1ql_query in n1ql_queries: check = True if not helper._check_deeper_query_condition(n1ql_query): if "SUBQUERY" in n1ql_query: map = helper._convert_sql_template_to_value_with_subqueries(n1ql_query, table_map=table_map, define_gsi_index=define_gsi_index) else: map=helper._convert_sql_template_to_value_for_secondary_indexes(n1ql_query, table_map=table_map, table_name=table_name, define_gsi_index=define_gsi_index) else: map=helper._convert_sql_template_to_value_for_secondary_indexes_sub_queries(n1ql_query, table_map=table_map, table_name=table_name, define_gsi_index=define_gsi_index) if gen_expected_result: query = map["sql"] try: sql_result = self._query_and_convert_to_json(query) map["expected_result"] = sql_result except Exception, ex: print ex check = False if check: f.write(json.dumps(map)+"\n")
def _gen_data_simple_table(self, number_of_rows=1000): helper = QueryHelper() map = self._get_pkey_map_for_tables_wit_primary_key_column() for table_name in map.keys(): for x in range(0, number_of_rows): statement = helper._generate_insert_statement(table_name, map[table_name], "\""+str(x+1)+"\"") self._insert_execute_query(statement)
def _convert_template_query_info(self, n1ql_queries = [], table_map= {}, define_gsi_index = True, gen_expected_result = False): helper = QueryHelper() query_input_list = [] for n1ql_query in n1ql_queries: check = True if not helper._check_deeper_query_condition(n1ql_query): if "SUBTABLE" in n1ql_query: map = helper._convert_sql_template_to_value_with_subqueryenhancements( n1ql_query, table_map = table_map, define_gsi_index= define_gsi_index) elif "SUBQUERY" in n1ql_query : map = helper._convert_sql_template_to_value_with_subqueries( n1ql_query, table_map = table_map, define_gsi_index= define_gsi_index) else: map=helper._convert_sql_template_to_value_for_secondary_indexes( n1ql_query, table_map = table_map, define_gsi_index= define_gsi_index) else: map=helper._convert_sql_template_to_value_for_secondary_indexes_sub_queries( n1ql_query, table_map = table_map, define_gsi_index= define_gsi_index) if gen_expected_result: query = map["sql"] try: sql_result = self._query_and_convert_to_json(query) map["expected_result"] = sql_result except Exception, ex: print ex check = False query_input_list.append(map)
def _convert_delete_template_query_info_with_merge(self, source_table="copy_simple_table", target_table="simple_table" ,n1ql_queries=[], table_map={}): helper = QueryHelper() query_input_list = [] for n1ql_query in n1ql_queries: query_input_list.append(helper._delete_sql_template_to_values_with_merge(source_table=source_table, target_table=target_table, sql=n1ql_query, table_map=table_map)) return query_input_list
def _gen_queries_from_template(self, query_path="./queries.txt", table_name=None): helper = QueryHelper() map = self._get_values_with_type_for_fields_in_table() table_map = map[table_name] with open(query_path) as f: content = f.readlines() for query in content: helper._convert_sql_template_to_value(sql=query, table_map=table_map, table_name=table_name)
def _convert_delete_template_query_info(self, n1ql_queries=[], table_map={}): helper = QueryHelper() query_input_list = [] for n1ql_query in n1ql_queries: query_input_list.append( helper._delete_sql_template_to_values(sql=n1ql_query, table_map=table_map)) return query_input_list
def setUp(self): super(RQGASTERIXTests, self).setUp() self.client_map = {} self.log.info("============== RQGTests setup was finished for test #{0} {1} =============="\ .format(self.case_number, self._testMethodName)) self.skip_setup_cleanup = True self.remove_alias = self.input.param("remove_alias", True) self.number_of_buckets = self.input.param("number_of_buckets", 5) self.crud_type = self.input.param("crud_type", "update") self.populate_with_replay = self.input.param("populate_with_replay", False) self.crud_batch_size = self.input.param("crud_batch_size", 1) self.skip_cleanup = self.input.param("skip_cleanup", False) self.record_failure = self.input.param("record_failure", False) self.failure_record_path = self.input.param("failure_record_path", "/tmp") self.use_mysql = self.input.param("use_mysql", True) self.joins = self.input.param("joins", False) self.ansi_joins = self.input.param("ansi_joins", False) self.subquery = self.input.param("subquery", False) self.initial_loading_to_cb = self.input.param("initial_loading_to_cb", True) self.change_bucket_properties = self.input.param( "change_bucket_properties", False) self.database = self.input.param("database", "flightstats") self.merge_operation = self.input.param("merge_operation", False) self.load_copy_table = self.input.param("load_copy_table", False) self.user_id = self.input.param("user_id", "root") self.user_cluster = self.input.param("user_cluster", "Administrator") self.password = self.input.param("password", "") self.password_cluster = self.input.param("password_cluster", "password") self.generate_input_only = self.input.param("generate_input_only", False) self.using_gsi = self.input.param("using_gsi", True) self.reset_database = self.input.param("reset_database", True) self.items = self.input.param("items", 1000) self.mysql_url = self.input.param("mysql_url", "localhost") self.mysql_url = self.mysql_url.replace("_", ".") self.n1ql_server = self.get_nodes_from_services_map( service_type="n1ql") self.concurreny_count = self.input.param("concurreny_count", 10) self.total_queries = self.input.param("total_queries", None) self.run_query_with_primary = self.input.param( "run_query_with_primary", False) self.run_query_with_secondary = self.input.param( "run_query_with_secondary", False) self.run_explain_with_hints = self.input.param( "run_explain_with_hints", False) self.test_file_path = self.input.param("test_file_path", None) self.db_dump_path = self.input.param("db_dump_path", None) self.input_rqg_path = self.input.param("input_rqg_path", None) self.set_limit = self.input.param("set_limit", 0) self.query_count = 0 self.use_rest = self.input.param("use_rest", True) self.ram_quota = self.input.param("ram_quota", 512) self.drop_bucket = self.input.param("drop_bucket", False) if self.input_rqg_path != None: self.db_dump_path = self.input_rqg_path + "/db_dump/database_dump.zip" self.test_file_path = self.input_rqg_path + "/input/source_input_rqg_run.txt" self.query_helper = QueryHelper() self.keyword_list = self.query_helper._read_keywords_from_file( "b/resources/rqg/n1ql_info/keywords.txt") self._initialize_analytics_helper() self.rest = RestConnection(self.master) self.indexer_memQuota = self.input.param("indexer_memQuota", 1024) if self.initial_loading_to_cb: self._initialize_cluster_setup() if not (self.use_rest): self._ssh_client = paramiko.SSHClient() self._ssh_client.set_missing_host_key_policy( paramiko.AutoAddPolicy()) try: self.os = self.shell.extract_remote_info().type.lower() except Exception as ex: self.log.error('SETUP FAILED') self.tearDown()