Exemplo n.º 1
0
def test_broken_aqo_tables(install_postgres):
    """
    Learn aqo with specific SQL query, partially remove records
    for this query from aqo tables and make sure aqo will add it again.
    """

    SQL_QUERY = "SELECT 1"

    install_postgres.load_extension('aqo')
    conn = psycopg2.connect(install_postgres.connstring)
    execute(conn, 'SET aqo.mode TO intelligent')
    execute(conn, SQL_QUERY)

    num = execute(
        conn, "SELECT COUNT(*) FROM aqo_query_texts"
        " WHERE query_text = '%s'" % SQL_QUERY)[0][0]
    assert num == 1

    execute(conn,
            "DELETE FROM aqo_query_texts WHERE query_text = '%s'" % SQL_QUERY)
    execute(conn, SQL_QUERY)
    num = execute(
        conn, "SELECT COUNT(*) FROM aqo_query_texts"
        "WHERE query_text = '%s'" % SQL_QUERY)[0][0]

    assert num == 1
Exemplo n.º 2
0
def populate_tpch(request):
    """ This method setup tables for TPC-H benchmark.
    """

    import psycopg2
    from helpers.sql_helpers import execute

    CONN_STRING = "host='localhost' user='******'"

    # GETTING A BENCHMARK

    TPCH_SCALE = "1"    # 1, 10, 100, 300, 1000, 3000, 10000, 30000, 100000
    COMMIT_HASH = "c5cd7711cc35"
    TPCH_BENCHMARK = "https://bitbucket.org/tigvarts/tpch-dbgen/get/" \
        "%s.zip" % COMMIT_HASH
    tbls = ["region.tbl", "nation.tbl", "customer.tbl", "supplier.tbl",
            "part.tbl", "partsupp.tbl", "orders.tbl", "lineitem.tbl"]
    sqls = ["postgres_dll.sql", "postgres_load.sql", "postgres_ri.sql"]

    tpch_archive = os.path.join(TMP_DIR,
                                "tpch-benchmark-%s.zip" % COMMIT_HASH)
    if not os.path.exists(tpch_archive):
        download_file(TPCH_BENCHMARK, tpch_archive)

    tpch_dir = os.path.join(TMP_DIR, "tigvarts-tpch-dbgen-%s" % COMMIT_HASH)
    if not os.path.exists(tpch_dir):
        os.mkdir(tpch_dir)
        subprocess.check_output(["unzip", tpch_archive, "-d", TMP_DIR])
    os.chdir(tpch_dir)

    # SETUP DATABASE (see ./install.sh)
    subprocess.check_output(["make"])
    # TODO: run multiple parallel streams when
    #  generating large amounts of data
    subprocess.check_output(["./dbgen", "-s", TPCH_SCALE, "-vf"])

    assert TMP_DIR == '/tmp'
    for t in tbls:
        if os.path.exists(os.path.join(TMP_DIR, t)):
            os.remove(os.path.join(TMP_DIR, t))
        shutil.move(os.path.join(tpch_dir, t), TMP_DIR)

    conn = psycopg2.connect(CONN_STRING)
    for sql_file in sqls:
        execute(conn, open(os.path.join(tpch_dir, sql_file)).read())
    conn.close()

    for t in tbls:
        tbl_path = os.path.join(TMP_DIR, t)
        if os.path.exists(tbl_path):
            os.remove(tbl_path)
Exemplo n.º 3
0
def populate_imdb(request):
    """ This method needed for creating tables and populate
        them with IMDb dataset.

    http://www.imdb.com/interfaces
    """

    import psycopg2
    from helpers.sql_helpers import execute

    CONN_STRING = "host='localhost' user='******'"

    job_file = os.path.join(TMP_DIR, "join-order-benchmark.tar.gz")
    job_dir = os.path.join(TMP_DIR, "join-order-benchmark-0.1")
    job_url = "https://codeload.github.com/" \
        "ligurio/join-order-benchmark/tar.gz/0.1"
    if not os.path.exists(job_file):
        download_file(job_url, job_file)

    subprocess.check_output(["tar", "xvzf", job_file, "-C", TMP_DIR])

    # CUSTOM METHOD OF DATABASE SETUP

    imdb_tgz = os.path.join(TMP_DIR, "imdb.tgz")
    if not os.path.exists(imdb_tgz):
        download_file("http://homepages.cwi.nl/~boncz/job/imdb.tgz", imdb_tgz)

    imdb_csv = os.path.join(TMP_DIR, "imdb_csv")
    if not os.path.exists(imdb_csv):
        os.mkdir(imdb_csv)
    subprocess.check_output(["tar", "xvzf", imdb_tgz, "-C", imdb_csv])

    os.chdir(imdb_csv)
    for csv in glob.glob('*.csv'):
        csv_file = os.path.join(imdb_csv, csv)
        data = open(csv_file).read().split('\n')
        for i in range(len(data)):
            data[i] = data[i].replace(r'\\', '').replace(r'\"', '')
            print(data[i])
        with open(csv_file, 'w') as f:
            f.write('\n'.join(data))

    assert TMP_DIR == '/tmp'
    sql_files = ["schema", "imdb_load", "fkindexes", "imdb_analyse"]
    conn = psycopg2.connect(CONN_STRING)
    for f in sql_files:
        sql_path = os.path.join(job_dir, f + '.sql')
        with open(sql_path, 'r') as file:
            execute(conn, file.read())
    conn.close()
Exemplo n.º 4
0
def set_query_aqo_param(sql_query, param_name, value, connstring):
    """
    Set aqo parameter for specified SQL query.
    Possible parameters: learn_aqo, use_aqo, fspace_hash, auto_tuning
    """

    conn = psycopg2.connect(connstring)

    execute(
        conn, "UPDATE aqo_queries SET %s = %s \
            WHERE query_hash = \
            (SELECT query_hash from aqo_query_texts \
            WHERE query_text = '%s')" % (param_name, value, sql_query))
    conn.close()

    assert get_query_aqo_param(sql_query, param_name, connstring) == value
Exemplo n.º 5
0
def test_default_aqo_mode(install_postgres):
    """
    Make sure default aqo mode stay without changes
    """

    install_postgres.load_extension('aqo')
    conn = psycopg2.connect(install_postgres.connstring)
    SQL_QUERY = "SELECT boot_val FROM pg_settings WHERE name='aqo.mode'"
    mode = execute(conn, SQL_QUERY)[0][0]
    conn.close()

    assert mode == "controlled"
Exemplo n.º 6
0
def query_to_hash(sql_query, connstring):
    """
    Return query hash for specified SQL query.
    """

    conn = psycopg2.connect(connstring)
    query_hash_sql = execute(
        conn, "SELECT query_hash \
                            FROM aqo_query_texts \
                            WHERE query_text='%s'" % sql_query)
    conn.close()

    return query_hash_sql
Exemplo n.º 7
0
def test_available_modes(install_postgres):
    """
    Make sure aqo extension provides only known modes.
    """

    install_postgres.load_extension('aqo')
    known_modes = ['intelligent', 'forced', 'controlled', 'disabled']

    conn = psycopg2.connect(install_postgres.connstring)
    SQL_QUERY = "SELECT enumvals FROM pg_settings WHERE name='aqo.mode';"
    actual_modes = execute(conn, SQL_QUERY)[0][0]
    conn.close()

    assert known_modes == actual_modes
Exemplo n.º 8
0
def test_in_honor_of_teodor(install_postgres):

    PREP_TABLE_QUERY = """
DROP TABLE IF EXISTS t;
DROP TABLE IF EXISTS a;
DROP TABLE IF EXISTS b;

SELECT id::int4, (id/3)::int4 AS v INTO t FROM GENERATE_SERIES(1,10000) AS id;
CREATE UNIQUE INDEX it ON t (id, v);

SELECT (100.0 * RANDOM())::int4 AS id, (100.0 * RANDOM())::int4 AS v
INTO a FROM GENERATE_SERIES(1,10000) AS id;
CREATE INDEX ia ON a (id, v);

SELECT (100000.0 * RANDOM())::int4 AS id, (100000.0 * RANDOM())::int4 AS v
INTO b FROM GENERATE_SERIES(1,10000) AS id;
CREATE INDEX ib ON b (id, v);
"""

    queries = [
        "SELECT * FROM t t1, t t2 WHERE t1.id = t2.id AND t1.v = t2.v;",
        "SELECT * FROM a t1, a t2 WHERE t1.id = t2.id AND t1.v = t2.v;",
        "SELECT * FROM b t1, b t2 WHERE t1.id = t2.id AND t1.v = t2.v;",
        "SELECT * FROM a t1, t t2 WHERE t1.id = t2.id AND t1.v = t2.v;",
        "SELECT * FROM b t1, t t2 WHERE t1.id = t2.id AND t1.v = t2.v;",
        "SELECT * FROM a t1, b t2 WHERE t1.id = t2.id AND t1.v = t2.v;"
    ]

    install_postgres.load_extension('aqo')
    connstring = install_postgres.connstring
    conn = psycopg2.connect(connstring)
    execute(conn, PREP_TABLE_QUERY)
    execute(conn, 'VACUUM ANALYZE t')
    execute(conn, 'VACUUM ANALYZE a')
    execute(conn, 'VACUUM ANALYZE b')
    conn.close()

    time.sleep(10)

    reset_aqo_stats(connstring)

    install_postgres.set_option('aqo.mode', 'intelligent')
    for q in queries:
        print(q)
        stats = learn_aqo(q, 100)
        plot_stats(stats, connstring)
        evaluate_aqo(stats)
Exemplo n.º 9
0
def get_query_aqo_stat(sql_query, param_name, connstring):
    """
    Return aqo statistics for a specific query:
        - execution time with/without aqo
        - planning time with/without aqo
        - cardinality error with/without aqo
        - executions with/without aqo
    """

    conn = psycopg2.connect(connstring)

    response = execute(
        conn, "SELECT %s FROM aqo_query_stat \
                       WHERE query_hash = \
                       (SELECT query_hash from aqo_query_texts \
                       WHERE query_text = '%s')" % (param_name, sql_query))
    conn.close()

    return response
Exemplo n.º 10
0
def get_query_aqo_param(sql_query, param_name, connstring):
    """
    Return aqo parameter for specified SQL query.
    Possible parameters: learn_aqo, use_aqo, fspace_hash, auto_tuning
    """

    conn = psycopg2.connect(connstring)

    value = execute(
        conn, "SELECT %s FROM aqo_queries \
                       WHERE query_hash = \
                       (SELECT query_hash from aqo_query_texts \
                       WHERE query_text = '%s')" % (param_name, sql_query))
    conn.close()

    if len(value) is not 0:
        return value[0][0]
    else:
        return None
Exemplo n.º 11
0
def test_max_query_length(install_postgres):
    """
    Maximum SQL query length in PostgreSQL is about 1Gb.
    AQO extension stores queries in a special table - aqo_query_texts
    and then uses this column to retrieve query statistics.
    We should make sure that column can store queries
    with maximum length there.
    """

    install_postgres.load_extension('aqo')
    reset_aqo_stats(install_postgres.connstring)
    conn = psycopg2.connect(install_postgres.connstring)

    column_name = 'query_text'
    column_type_query = "SELECT format_type(atttypid, atttypmod) \
                        AS type FROM pg_attribute \
                        WHERE attrelid = 'aqo_query_texts'::regclass \
                        AND attname = '%s';" % column_name

    type = execute(conn, column_type_query)[0][0]
    conn.close()

    assert type == "character varying"
Exemplo n.º 12
0
def test_similar_queries(install_postgres):
    """
    Make sure AQO uses common statistic data for queries
    with the same constants in SQL clauses.
    """

    install_postgres.load_extension('aqo')
    reset_aqo_stats(install_postgres.connstring)

    conn = psycopg2.connect(install_postgres.connstring)
    execute(conn, 'SELECT 1')
    num1 = execute(
        conn, "SELECT COUNT(*) FROM aqo_query_texts"
        " WHERE query_text = 'SELECT 1'")[0][0]
    execute(conn, 'SELECT 2')
    num2 = execute(
        conn, "SELECT COUNT(*) FROM aqo_query_texts"
        " WHERE query_text = 'SELECT 2'")[0][0]
    conn.close()

    assert num1 == 1
    assert num2 == 0
Exemplo n.º 13
0
def test_tuning_max_iterations(install_postgres):
    """
    AQO will disable query optimization after N unsussessful attempts.
    N is defined in auto_tuning_max_iterations in aqo.c
    """

    install_postgres.load_extension('aqo')
    reset_aqo_stats(install_postgres.connstring)

    conn = psycopg2.connect(install_postgres.connstring)
    execute(conn, 'SELECT 1')
    num1 = execute(
        conn, "SELECT COUNT(*) FROM aqo_query_texts"
        " WHERE query_text = 'SELECT 1'")[0][0]
    execute(conn, 'SELECT 2')
    num2 = execute(
        conn, "SELECT COUNT(*) FROM aqo_query_texts "
        "WHERE query_text = 'SELECT 2'")[0][0]
    conn.close()

    assert num1 == 1
    assert num2 == 0
Exemplo n.º 14
0
def learn_aqo(sql_query, connstring, number=AQO_AUTO_TUNING_MAX_ITERATIONS):
    """
    This function is intended to learn AQO with a specific function
    with exact number of iterations equal to AQO_AUTO_TUNING_MAX_ITERATIONS
    """

    conn = psycopg2.connect(connstring)

    stats = {
        'default': [],
        'aqo': [],
        'aqo_stat': [],
        'learn_aqo_true': '',
        'learn_aqo_false': '',
        'use_aqo_true': '',
        'use_aqo_false': '',
        'query': sql_query
    }

    sql_query_analyze = 'EXPLAIN ANALYZE ' + sql_query

    aqo_mode = execute(conn, 'SHOW aqo.mode')[0][0]
    pg_set_option(connstring, 'aqo.mode', 'disabled')
    for i in range(0, number):
        dict = parse_explain_analyze_stat(execute(conn, sql_query_analyze))
        stats['default'].append(dict)

    pg_set_option(connstring, 'aqo.mode', aqo_mode)
    aqo_stat = []
    for i in range(0, number):
        use_aqo = get_query_aqo_param(sql_query_analyze, 'use_aqo', connstring)
        learn_aqo = get_query_aqo_param(sql_query_analyze, 'learn_aqo',
                                        connstring)
        auto_tuning = get_query_aqo_param(sql_query_analyze, 'auto_tuning',
                                          connstring)

        dict = parse_explain_analyze_stat(execute(conn, sql_query_analyze))
        stats['aqo'].append(dict)

        if learn_aqo:
            if stats['learn_aqo_true'] == '':
                stats['learn_aqo_true'] = i
            if use_aqo:
                cardinality_error = get_query_aqo_stat(
                    sql_query_analyze, 'cardinality_error_with_aqo',
                    connstring)[0][0][-1]
            else:
                cardinality_error = get_query_aqo_stat(
                    sql_query_analyze, 'cardinality_error_without_aqo',
                    connstring)[0][0][-1]
        else:
            if stats['learn_aqo_false']:
                stats['learn_aqo_false'] = i
            cardinality_error = 0

        if use_aqo:
            if stats['use_aqo_true'] == '':
                stats['use_aqo_true'] = i
            execution_time = get_query_aqo_stat(sql_query_analyze,
                                                'execution_time_with_aqo',
                                                connstring)[0][0][-1]
            planning_time = get_query_aqo_stat(sql_query_analyze,
                                               'planning_time_with_aqo',
                                               connstring)[0][0][-1]
        elif learn_aqo or auto_tuning:
            execution_time = get_query_aqo_stat(sql_query_analyze,
                                                'execution_time_without_aqo',
                                                connstring)[0][0][-1]
            planning_time = get_query_aqo_stat(sql_query_analyze,
                                               'planning_time_without_aqo',
                                               connstring)[0][0][-1]
        else:
            if stats['use_aqo_false'] == '':
                stats['use_aqo_false'] = i
            execution_time = 0
            planning_time = 0

        dict = {
            'execution_time': execution_time,
            'planning_time': planning_time,
            'cardinality_error': cardinality_error
        }
        aqo_stat.append(dict)

    stats['aqo_stat'] = aqo_stat
    conn.close()

    return stats
Exemplo n.º 15
0
    def test_pgprobackup_compression_continious_backup(self,
                                                       request,
                                                       install_postgres):
        """Test pg_probackup with compression feature
            and full continous backup
        Scenario:
        1. Create backup dir
        2. Set data dir to environment variables
        3. Init backup dir
        4. Set options for pg_probackup
        5. Create tablespace with compression
        6. Create table in compression tablespace
        7. Edit pg_hba_config
        8. Make full backup
        9. Get backup id
        10. Check that backup status is OK
        11. Check that backup validation is OK

        """
        # Step 1
        backup_dir = self.create_backup_directory()
        # Step 2
        os.environ["PGDATA"] = install_postgres.get_option('data_directory')
        # Step 3
        install_postgres.edit_pg_hba_conf(self.PG_HBA_CONFIG)
        install_postgres.manage_psql('restart')
        # Step 4
        self.execute_pg_probackup("init")
        for root, dirs, files in os.walk(backup_dir):
            for d in dirs:
                os.chown(os.path.join(root, d),
                         pwd.getpwnam("postgres").pw_uid,
                         grp.getgrnam("postgres").gr_gid)
            for f in files:
                os.chown(os.path.join(root, f),
                         pwd.getpwnam("postgres").pw_uid,
                         grp.getgrnam("postgres").gr_gid)
        # Step 5
        install_postgres.set_option("ptrack_enable", "on")
        install_postgres.set_option("wal_level", "archive")
        install_postgres.set_option("archive_mode", "on")
        install_postgres.set_option(
            "archive_command",
            "test ! -f {0}/wal/%f && cp %p {1}/wal/%f".format(
                backup_dir, backup_dir))
        install_postgres.set_option("cfs_gc_workers", "0")
        # Step 6
        tablespace_path = os.path.join(TMP_DIR, 'pgprobackup_compression')
        os.mkdir(tablespace_path)
        os.chown(tablespace_path,
                 pwd.getpwnam("postgres").pw_uid,
                 grp.getgrnam("postgres").gr_gid)
        conn = psycopg2.connect(install_postgres.connstring)
        execute(
            conn,
            'CREATE TABLESPACE pgprobackup_compression'
            ' LOCATION \'%s\' WITH(compression = true);' % tablespace_path)
        # Step 7
        execute(conn,
                'CREATE TABLE tbl TABLESPACE pgprobackup_compression'
                ' AS SELECT i, md5(random()::text)'
                ' FROM generate_series(0,1e05) AS i;')
        # Step 8
        self.execute_pg_probackup("backup", "-b", "full",
                                  "-d", "postgres", "-U", "postgres")
        # Step 9
        # Get last backup id and get out for show command with this backup
        pgprobackup_show = subprocess.Popen(
            ["%s/pg_probackup" % pg_bindir(), "show", "-U", "postgres"],
            stdout=subprocess.PIPE)
        awk_backup_id = subprocess.Popen(
            ["awk", "FNR == 4 {print $1}"], stdin=pgprobackup_show.stdout,
            stdout=subprocess.PIPE)
        backup_id = awk_backup_id.communicate()[0].strip()
        pgprobackup_show.stdout.close()
        # Step 10
        backup_info = self.parse_pgprobackup_show_command_out(
            self.execute_pg_probackup("show", backup_id))
        assert backup_info['STATUS'] == 'OK'
        # Step 11
        self.execute_pg_probackup("validate", backup_id)
Exemplo n.º 16
0
def reset_aqo_stats(connstring, sql_query=None):

    conn = psycopg2.connect(connstring)

    if sql_query is not None:
        execute(
            conn, "DELETE FROM aqo_query_stat \
                WHERE query_hash = (SELECT query_hash from aqo_query_texts \
                WHERE query_text = '%s')" % sql_query)
        execute(
            conn, "DELETE FROM aqo_queries \
                WHERE query_hash = (SELECT query_hash from aqo_query_texts \
                WHERE query_text = '%s')" % sql_query)
        execute(
            conn, "DELETE FROM aqo_query_texts \
                WHERE query_text = '%s'" % sql_query)
    else:
        execute(conn, "TRUNCATE aqo_data CASCADE")
        execute(conn, "TRUNCATE aqo_queries CASCADE")
        execute(conn, "TRUNCATE aqo_query_stat CASCADE")
        execute(conn, "TRUNCATE aqo_query_texts CASCADE")
    conn.close()
Exemplo n.º 17
0
def test_tpcds_benchmark(install_postgres):
    """
    TPC-DS benchmark
    """

    # https://github.com/tigvarts/tpcds-kit
    # tools/How_To_Guide.doc

    TPCDS_SCALE = "1"  # 100GB, 300GB, 1TB, 3TB, 10TB, 30TB and 100TB.
    TPCDS_BENCHMARK = "https://github.com/ligurio/tpcds-kit/archive/0.1.zip"

    # GETTING A BENCHMARK

    tpcds_archive = os.path.join(TMP_DIR, "tpcds-kit-0.1.zip")
    if not os.path.exists(tpcds_archive):
        download_file(TPCDS_BENCHMARK, tpcds_archive)

    tpcds_dir = os.path.join(TMP_DIR, "tpcds-kit-0.1")
    if not os.path.exists(tpcds_dir):
        os.mkdir(tpcds_dir)
        subprocess.check_output(["unzip", tpcds_archive, "-d", TMP_DIR])
    tpcds_dir = os.path.join(TMP_DIR, "tpcds-kit-0.1/tools")
    os.chdir(tpcds_dir)

    # SETUP DATABASE (see ./install.sh)

    if platform.system() == 'Darwin':
        shutil.copy("Makefile.osx", "Makefile")
    else:
        shutil.copy("Makefile.suite", "Makefile")
    subprocess.check_call(["make"])
    p = subprocess.Popen([
        "dsdgen", "-verbose", "-force", "-dir", TMP_DIR, "-scale", TPCDS_SCALE,
        "-filter"
    ],
                         stdout=subprocess.PIPE,
                         stderr=subprocess.PIPE)
    stdout, stderr = p.communicate()
    print(stdout, stderr)
    assert p.wait() == 0
    # FIXME: assert -11 == 0

    connstring = install_postgres.connstring
    conn = psycopg2.connect(connstring)
    execute(conn, "DROP DATABASE IF EXISTS tpcds")
    # create_test_database("tpcds")
    execute(conn, "CREATE DATABASE tpcds")
    conn.close()

    sqls = ["tpcds.sql", "tpcds_load.sql", "tpcds_ri.sql"]
    for sql_file in sqls:
        sql_path = os.path.join(tpcds_dir, sql_file)
        sql = subprocess.Popen(["cat", sql_path], subprocess.PIPE)
        psql = subprocess.Popen(["psql", "-d", "tpcds"], stdin=sql.stdout)
        assert psql.wait() == 0
    # os.remove(os.path.join(TMP_DIR, '*.dat'))

    # RUN WORKLOAD

    # relationship between database size (SF) and query streams (N)
    # see tools/How_To_Guide.doc
    # query_streams = {"100": "7",
    #                "300": "9",
    #                "1000": "11",
    #                "3000": "13",
    #                "10000": "15",
    #                "30000": "17",
    #                "100000": "15",
    #                "100000": "19"}

    install_postgres.load_extension('aqo')
    reset_aqo_stats(connstring)
    query_templates = os.path.join(tpcds_dir, "query_variants/*.tpl")
    for template in glob.glob(query_templates):

        stats = learn_aqo(template, connstring, 100)
        plot_stats(stats, connstring)
        evaluate_aqo(stats)
Exemplo n.º 18
0
def test_1C_sample(install_postgres):

    SQL_QUERY = """
SELECT * FROM _AccRgAT31043 , _AccRgAT31043_ttgoab153 T2 WHERE
     (T2._Period = _AccRgAT31043._Period AND
      T2._AccountRRef = _AccRgAT31043._AccountRRef AND
      T2._Fld1009RRef = _AccRgAT31043._Fld1009RRef AND
      (COALESCE(T2._Fld1010RRef,'\\377'::bytea) =
COALESCE(_AccRgAT31043._Fld1010RRef,'\\377 '::bytea)) AND
      (COALESCE(T2._Fld1011RRef,'\\377'::bytea) =
COALESCE(_AccRgAT31043._Fld1011RRef,'\\377'::bytea)) AND
      T2._Fld995 = _AccRgAT31043._Fld995 AND
      (COALESCE(T2._Value1_TYPE,'\\377'::bytea) =
COALESCE(_AccRgAT31043._Value1_TYPE,'\\377'::bytea) AND
     COALESCE(T2._Value1_RTRef,'\\377'::bytea) =
COALESCE(_AccRgAT31043._Value1_RTRef,'\\377'::bytea) AND
     COALESCE(T2._Value1_RRRef,'\\377'::bytea) =
COALESCE(_AccRgAT31043._Value1_RRRef,'\\377'::bytea)) AND
      (COALESCE(T2._Value2_TYPE,'\\377'::bytea) =
COALESCE(_AccRgAT31043._Value2_TYPE,'\\377'::bytea) AND
       COALESCE(T2._Value2_RTRef,'\\377'::bytea) =
COALESCE(_AccRgAT31043._Value2_RTRef,'\\377'::bytea) AND
     COALESCE(T2._Value2_RRRef,'\\377'::bytea) =
COALESCE(_AccRgAT31043._Value2_RRRef, '\\377'::bytea)) AND
     (COALESCE(T2._Value3_TYPE,'\\377'::bytea) =
COALESCE(_AccRgAT31043._Value3_TYPE,'\\377'::bytea) AND
     COALESCE(T2._Value3_RTRef,'\\377'::bytea) =
COALESCE(_AccRgAT31043._Value3_RTRef,'\\377'::bytea) AND
     COALESCE(T2._Value3_RRRef,'\\377'::bytea) =
COALESCE(_AccRgAT31043._Value3_RRRef,'\\377'::bytea)) AND
     _AccRgAT31043._Splitter = 0) AND (T2._EDCount = 3) AND
     (_AccRgAT31043._Fld995 = 271602);
"""

    DUMP_URL = "http://dist.l.postgrespro.ru/vm-images/" \
        "test/blobs/pg.sql.bz"

    archive_path = os.path.join(TMP_DIR, "pg.sql.gz")
    plain_path = os.path.join(TMP_DIR, "pg.sql")
    if not os.path.exists(plain_path):
        if not os.path.exists(archive_path):
            download_file(DUMP_URL, archive_path)
        os.chdir(TMP_DIR)
        subprocess.check_output(['gunzip', archive_path])

    # with open(plain_path, 'r') as file:
    #     psql = subprocess.Popen(["psql"], stdin=file)
    #     psql.wait()
    #     assert psql.returncode == 0

    connstring = install_postgres.connstring
    conn = psycopg2.connect(connstring)
    execute(conn, open(plain_path, "r").read())
    execute(conn, 'VACUUM ANALYZE _accrgat31043')
    conn.close()

    time.sleep(10)

    install_postgres.load_extension('aqo')
    reset_aqo_stats(connstring)
    # FIXME: execute(conn, 'SET escape_string_warning=off')
    # FIXME: execute(conn, 'SET enable_mergejoin=off')

    stats = learn_aqo(SQL_QUERY, connstring)
    plot_stats(stats, connstring)
    evaluate_aqo(stats)
Exemplo n.º 19
0
def test_aqo_mode(aqo_mode, install_postgres):
    """
    Testcase validates available aqo modes

    +-------------+-------------+-------------+-----------------+
    |             | optimize    | optimize    | separate record |
    |  aqo mode   | new queries | old queries | for each query  |
    +-----------------------------------------------------------+
    | intelligent | Yes         | Yes         | Yes             |
    | manual      | No          | Yes         | Yes             |
    | forced      | Yes         | Yes         | No              |
    | disabled    | No          | No          | -               |
    +-------------+-------------+-------------+-----------------+

    """

    OLD_SQL_QUERY = "SELECT * FROM pg_class WHERE relpages > 455"
    NEW_SQL_QUERY = "SELECT * FROM pg_class" \
        " WHERE reltablespace != reltoastrelid"
    old_sql_query_explain = 'EXPLAIN ANALYZE ' + OLD_SQL_QUERY
    new_sql_query_explain = 'EXPLAIN ANALYZE ' + NEW_SQL_QUERY

    install_postgres.load_extension('aqo')
    connstring = install_postgres.connstring
    reset_aqo_stats(connstring)

    install_postgres.set_option('aqo.mode', 'intelligent')
    learn_aqo(OLD_SQL_QUERY, connstring)
    install_postgres.set_option('aqo.mode', aqo_mode)
    learn_aqo(NEW_SQL_QUERY, connstring)

    conn = psycopg2.connect(install_postgres.connstring)
    num_old_sql_query = execute(
        conn, "SELECT COUNT(*) FROM aqo_query_texts"
        " WHERE query_text = '%s'" % old_sql_query_explain)[0][0]
    num_new_sql_query = execute(
        conn, "SELECT COUNT(*) FROM aqo_query_texts"
        " WHERE query_text = '%s'" % new_sql_query_explain)[0][0]

    if aqo_mode == 'forced' or aqo_mode == 'disabled':
        executions_w_aqo = execute(
            conn, "SELECT executions_with_aqo FROM aqo_query_stat"
            " WHERE query_hash = 0;")
        executions_wo_aqo = execute(
            conn, "SELECT executions_without_aqo FROM aqo_query_stat"
            " WHERE query_hash = 0;")
    elif aqo_mode == 'intelligent':
        new_executions_w_aqo = get_query_aqo_stat(new_sql_query_explain,
                                                  'executions_with_aqo',
                                                  connstring)[0][0]
        new_executions_wo_aqo = get_query_aqo_stat(new_sql_query_explain,
                                                   'executions_without_aqo',
                                                   connstring)[0][0]
        old_executions_w_aqo = get_query_aqo_stat(old_sql_query_explain,
                                                  'executions_with_aqo',
                                                  connstring)
        old_executions_wo_aqo = get_query_aqo_stat(old_sql_query_explain,
                                                   'executions_without_aqo',
                                                   connstring)
    conn.close()

    if aqo_mode == 'intelligent':
        assert num_old_sql_query == 1
        assert num_new_sql_query == 1
        assert new_executions_w_aqo + new_executions_wo_aqo == \
            AQO_AUTO_TUNING_MAX_ITERATIONS
        assert old_executions_w_aqo + old_executions_wo_aqo == \
            AQO_AUTO_TUNING_MAX_ITERATIONS
        # TODO: check optimization of old query
        # TODO: check optimization of new query
    elif aqo_mode == 'forced':
        assert num_old_sql_query == 1
        assert num_new_sql_query == 0
        assert executions_w_aqo + executions_wo_aqo == \
            AQO_AUTO_TUNING_MAX_ITERATIONS
        # TODO: check optimization of old query
        # TODO: check optimization of new query
    elif aqo_mode == 'manual':
        assert num_old_sql_query == 1
        assert num_new_sql_query == 0
        # TODO: check optimization of old query
        # TODO: check optimization of new query
    elif aqo_mode == 'disabled':
        assert num_old_sql_query == 1
        assert num_new_sql_query == 0
        assert executions_w_aqo + executions_wo_aqo == 0
        # TODO: check optimization of old queries
        # TODO: check optimization of new queries
    else:
        pytest.fail("Unknown AQO mode - %s" % aqo_mode)