def __get_connection():
     """
     :returns: Creates connection to postgres which is configured with postgres_idx_advisor.so
     """
     # Create connection only if it is not done before
     if PostgresQueryHandler.connection is None:
         # Connect to postgres
         try:
             # Read database config from config file
             database_config = Utils.read_config_data(
                 Constants.CONFIG_DATABASE)
             # connect to postgres
             PostgresQueryHandler.connection = psycopg2.connect(
                 database=database_config["dbname"],
                 user=database_config["user"],
                 password=database_config["password"],
                 host=database_config["host"],
                 port=database_config["port"])
             PostgresQueryHandler.connection.autocommit = True
         # Capture connection exception
         except psycopg2.OperationalError as exception:
             print('Unable to connect to postgres \n Reason: {0}').format(
                 str(exception))
             # exit code
             sys.exit(1)
         else:
             cursor = PostgresQueryHandler.connection.cursor()
             # Print PostgreSQL version
             cursor.execute("SELECT version();")
             record = cursor.fetchone()
             #print("***You are connected to below Postgres database*** \n ", record, "\n")
     return PostgresQueryHandler.connection
 def __get_default_connection():
     if PostgresQueryHandler.connectionDefault is None:
         # connect to postgres
         try:
             # read database config from config file
             database_config = Utils.read_config_data(Constants.CONFIG_DATABASE)
             # connect to postgres
             PostgresQueryHandler.connectionDefault = psycopg2.connect(database=database_config["dbname"],
                                                                user=database_config["user"],
                                                                password=database_config["password"],
                                                                host=database_config["host"],
                                                                port=database_config["port"])
             PostgresQueryHandler.connectionDefault.autocommit = True
         # capture connection exception
         except psycopg2.OperationalError as exception:
             print('Unable to connect to postgres \n Reason: {0}').format(str(exception))
             # exit code
             sys.exit(1)
         else:
             cursor = PostgresQueryHandler.connectionDefault.cursor()
             # Print PostgreSQL version
             cursor.execute("SELECT version();")
             record = cursor.fetchone()
             print("***You are connected to below Postgres database*** \n ", record, "\n")
     return PostgresQueryHandler.connectionDefault
示例#3
0
 def generate_next_state(queries_list, action, observation_space):
     """
     :param queries_list: list of queries
     :param action: action given by the agent
     :param observation_space: existing observation
     :return: new observation and the cost of the action in the DB
         Generates the next observation state and the cost of performing an action given by the agent
     """
     observation_space[0, action] = 1
     action_space = Utils.read_json_action_space()
     #table_name, col_name = None
     for key, value in action_space.items():
         if value == action:
             table_name, col_name = key.split(".")
             break;
     PostgresQueryHandler.create_hypo_index(table_name, col_name)
     PostgresQueryHandler.check_hypo_indexes()
     query_cost_with_idx_advisor_suggestion = 0.0
     for query in queries_list:
         result = PostgresQueryHandler.execute_select_query(query.query_string, load_index_advisor=False,
                                                            get_explain_plan=True)
         query_cost_with_idx_advisor_suggestion += PostgresQueryHandler.add_query_cost_suggested_indexes(
             result)
     #print('Agent Cost')
     #print(query_cost_with_idx_advisor_suggestion)
     return observation_space, query_cost_with_idx_advisor_suggestion
示例#4
0
 def get_gin_properties():
     """
     :returns: offset, train file and test file locations
     """
     gin_config = Utils.read_config_data(Constants.CONFIG_GINPROPERTIES)
     k_offset = gin_config["k_offset"]
     train_file = gin_config["train_file"]
     test_file = gin_config["test_file"]
     agent = gin_config["agent"]
     return int(k_offset), train_file, test_file, agent
示例#5
0
 def init_variables(filename):
     """
     :param filename: contains queries
     :return: retrieved queries, predicates and suggested indexes
     """
     query_executor = QueryExecutor()
     query_executor.initialize_table_information()
     queries_list, all_predicates, idx_advisor_suggested_indexes = Utils.get_queries_from_sql_file(
         query_executor.column_map, query_executor.tables_map, filename)
     print('Suggested indexes',idx_advisor_suggested_indexes)
     return queries_list, all_predicates, idx_advisor_suggested_indexes
示例#6
0
    def initialize_table_information():
        # Get list of tables
        tables = tuple(Utils.read_config_data(Constants.CONFIG_TABLES).keys())

        # Call postgres to get table details from database
        returned_table_details = PostgresQueryHandler.execute_select_query(
            Constants.QUERY_GET_TABLE_DETAILS.format(tables))

        for table_column in returned_table_details:
            """
                Table_column will have
                       at position 0: table_name
                       at position 1: column_name
                       at position 2: data type and size
                       at position 3: primary key (true , false)
            """
            data_type = table_column[2]
            table_name = table_column[0]
            column_name = table_column[1]

            # Find column size
            # Fixed length data types are stored in map
            if data_type in Constants.POSTGRES_DATA_TYPE_SIZE_MAP:
                data_size = Constants.POSTGRES_DATA_TYPE_SIZE_MAP[data_type]

            # If data_type is not present in dict then it is variable length data type ,
            # Data size needs to extracted from the text present data_type
            else:
                # Size is present with in brackets
                # Examples : "character varying(44)" , "numeric(15,2)" , "character(25)"

                # Extract size information
                from_index = data_type.find("(")
                to_index = data_type.find(")")
                temp_text = str(data_type[from_index + 1:to_index])
                data_size = sum(int(val) for val in temp_text.split(','))

            # Check whether map entry exists for table if not create one
            if table_name not in QueryExecutor.tables_map:
                # Get number of rows add it to table object
                QueryExecutor.tables_map[table_name] = Table(table_name,
                                                             PostgresQueryHandler.get_table_row_count(table_name))

            # Add column  to table object
            QueryExecutor.tables_map[table_name].add_column(Column(column_name, data_type, data_size))
            # Check whether map entry exists for column name if not create one
            if column_name not in QueryExecutor.column_map:
                QueryExecutor.column_map[column_name] = list()
            # Add column as key and table as value for easier find
            QueryExecutor.column_map[column_name].append(table_name)
 def get_observation_space(queries_list):
     """
     :param queries_list: list of queries
     :return: observation matrix containing the selectivity factor
         Calculates the initial observation space with the selectivity factors.
         Currently a static matrix as the observation space has only 7 queries and 61 coloumns.
         To work on dynamic method a whole new mechanism needs to be written on how to construct the action_space_json file
     """
     observation_space = np.array(np.ones((8, 61)))
     observation_space[0, :] = np.array((np.zeros((61, ))))
     action_space = Utils.read_json_action_space()
     for query_number in range(len(queries_list)):
         for key, value in queries_list[
                 query_number].where_clause_columns_query.items():
             table_name, col_name = key.split(
                 Constants.MULTI_KEY_CONCATENATION_STRING)
             selectivity_index = action_space[(table_name + "." +
                                               col_name).upper()]
             observation_space[
                 query_number + 1, selectivity_index] = queries_list[
                     query_number].selectivity_for_where_clause_columns[key]
     return observation_space