def insert_results_into_table(table, database, *args):
    connection_config_dict = get_db_info.get_values_from_json(
        False, database.db_type)
    connection = None
    cursor = None

    try:
        connect_string = database.connection
        connection = connect_string.connect(**connection_config_dict)
        cursor = connection.cursor()
        sql_insert_query = table.insert_string
        tuple_list = []

        for arg in args:
            tuple_list.append(arg)

        sql_tuple = tuple(tuple_list)

        cursor.execute(sql_insert_query, sql_tuple)
        connection.commit()
        print("Record inserted successfully into the " + table.table_name +
              " table")

    except Exception as error:
        connection.rollback()
        print("Failed to insert into " + table.table_name +
              " table {}".format(error))

    finally:
        # closing database connection.
        if connection is not None:
            cursor.close()
            connection.close()
Esempio n. 2
0
def create_database():
    database = get_db_type.PostgreSQL
    connection_config_dict = dbci.get_values_from_json(True, database.db_type)
    connection = psycopg2.connect(**connection_config_dict)

    cursor = connection.cursor()

    cursor.execute("CREATE DATABASE spyda")
Esempio n. 3
0
def drop_database():
    database = get_db_type.PostgreSQL
    connection_config_dict = dbci.get_values_from_json(True, database.db_type)
    connection = psycopg2.connect(**connection_config_dict)

    cursor = connection.cursor()

    cursor.execute("DROP DATABASE [IF EXISTS] spyda")
def drop_database():
    database = get_db_type.MySQL
    connection_config_dict = dbci.get_values_from_json(True, database.db_type)
    connection = mysql.connector.connect(**connection_config_dict)

    cursor = connection.cursor()

    cursor.execute("DROP DATABASE spyda")
def get_display_latest_call_statistics(database):
    connection_config_dict = get_db_info.get_values_from_json(False, database.db_type)
    connection = None
    cursor = None
    try:
        connect_string = database.connection
        connection = connect_string.connect(**connection_config_dict)
        cursor = connection.cursor()
        display_query = table.CallStatisticsTable.display_query

        cursor.execute(display_query)
        rows = cursor.fetchall()
        df = pd.DataFrame([[ij for ij in i] for i in rows])
        df.rename(columns={0: 'label', 1: 'average', 2: 'response_time', 3: 'error_count', 4: 'error_rate',
                           5: 'suite_run_history_id'},
                  inplace=True)
        df = df.sort_values(['error_rate'], ascending=[1])

        trace1 = go.Bar(
            x=df['label'],
            y=df['error_rate'],
            name='Bar'
        )

        layout = go.Layout(
            title='Call Statistics History',
            xaxis=dict(title='Call Label'),
            yaxis=dict(title='Error Rate'),
        )
        data = [trace1]
        fig = go.Figure(data=data, layout=layout)
        # connection.commit()
        plotly.offline.plot(fig, filename='call_statistics_chart.html')

    except Error as error:
        connection.rollback()

    finally:
        # closing database connection.
        if connection is not None:
            cursor.close()
            connection.close()
def get_display_suite_run_history(database):
    connection_config_dict = get_db_info.get_values_from_json(False, database.db_type)
    connection = None
    cursor = None
    try:
        connect_string = database.connection
        connection = connect_string.connect(**connection_config_dict)
        cursor = connection.cursor()
        display_query = table.SuiteHistoryTable.display_query

        cursor.execute(display_query)
        rows = cursor.fetchall()
        df = pd.DataFrame([[ij for ij in i] for i in rows])
        df.rename(columns={0: 'key', 1: 'id', 2: 'date', 3: 'avg', 4: 'total', 5: 'error_rate', 6: 'error_count'},
                  inplace=True)
        df = df.sort_values(['date'], ascending=[1])

        trace1 = go.Scatter(
            x=df['date'],
            y=df['avg'],
            mode='lines+markers',
            name='Scatter'
        )

        layout = go.Layout(
            title='Suite History',
            xaxis=dict(title='Date'),
            yaxis=dict(title='Average Response Time'),
        )
        data = [trace1]
        fig = go.Figure(data=data, layout=layout)
        # connection.commit()
        plotly.offline.plot(fig, filename='suite_run_history_chart.html')

    except Error as error:
        connection.rollback()

    finally:
        # closing database connection.
        if connection is not None:
            cursor.close()
            connection.close()
def create_tables():
    """ create tables in the PostgreSQL database"""
    commands = (""" CREATE TABLE suite_run_history (
                suite_run_history_key SERIAL PRIMARY KEY,
                suite_run_history_id character varying(50) COLLATE pg_catalog."default" NOT NULL,
                run_date timestamp with time zone DEFAULT CURRENT_TIMESTAMP,
                average_response_time integer,
                total_samples integer,
                error_rate double precision,
                error_count integer,
                CONSTRAINT "suite_run_history_id UNIQUE" UNIQUE (suite_run_history_id)
                )
        """, """
           CREATE TABLE call_statistics (
                call_statistics_key SERIAL PRIMARY KEY,
                date timestamp with time zone NOT NULL DEFAULT CURRENT_TIMESTAMP,
                label text COLLATE pg_catalog."default",
                samples integer,
                average_response_time integer,
                ninety_percent integer,
                ninety_five_percent integer,
                ninety_nine_percent integer,
                minimum_response_time integer,
                maximum_response_time integer,
                average_latency double precision,
                geo_mean_response_time smallint,
                standard_deviation smallint,
                duration_ms integer,
                average_bytes double precision,
                average_throughput double precision,
                median_response_time integer,
                error_count integer,
                error_rate double precision,
                has_passed_thresholds bit(1) DEFAULT NULL::"bit",
                suite_run_history_id character varying(50) COLLATE pg_catalog."default" NOT NULL,
                CONSTRAINT suite_run_history_id FOREIGN KEY (suite_run_history_id)
                    REFERENCES public.suite_run_history (suite_run_history_id) MATCH SIMPLE
                    ON UPDATE RESTRICT
                    ON DELETE RESTRICT
                )
        """)
    conn = None
    try:
        # gathers the connection parameters
        database = get_db_type.PostgreSQL
        connection_config_dict = dbci.get_values_from_json(
            False, database.db_type)
        # connect to the PostgreSQL server
        conn = psycopg2.connect(**connection_config_dict)
        conn.autocommit = True
        cur = conn.cursor()
        # create table one by one
        for command in commands:
            cur.execute(command)
            conn.commit()
        # close communication with the PostgreSQL database server
        cur.close()
        # commit the changes
        conn.commit()
    except (Exception, psycopg2.DatabaseError) as error:
        print(error)
    finally:
        if conn is not None:
            conn.close()
Esempio n. 8
0
def create_tables():
    """ create tables in the MySQL database"""
    commands = (""" CREATE TABLE `suite_run_history` (
                `suite_run_history_key` INT(11) NOT NULL AUTO_INCREMENT,
                `suite_run_history_id` VARCHAR(50) NOT NULL,
                `run_date` TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP,
                `average_response_time` INT(64) NULL DEFAULT NULL,
                `total_samples` INT(64) NULL DEFAULT NULL,
                `error_rate` FLOAT NULL DEFAULT NULL,
                `error_count` INT(64) NULL DEFAULT NULL,
                PRIMARY KEY (`suite_run_history_key`),
                UNIQUE INDEX `ID` (`suite_run_history_id`)
                )
                COLLATE='utf8_general_ci'
                ENGINE=InnoDB
                AUTO_INCREMENT=1;
        """, """
           CREATE TABLE `call_statistics` (
                `call_statistics_key` INT(11) NOT NULL AUTO_INCREMENT,
                `date` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
                `label_id` TEXT NULL,
                `label` TEXT NULL,
                `samples` INT(11) NULL DEFAULT NULL,
                `average_response_time` INT(11) NULL DEFAULT NULL,
                `ninety_percent` INT(11) NULL DEFAULT NULL,
                `ninety_five_percent` INT(11) NULL DEFAULT NULL,
                `ninety_nine_percent` INT(11) NULL DEFAULT NULL,
                `minimum_response_time` INT(11) NULL DEFAULT NULL,
                `maximum_response_time` INT(11) NULL DEFAULT NULL,
                `average_latency` FLOAT NULL DEFAULT NULL,
                `geo_mean_response_time` SMALLINT(6) NULL DEFAULT NULL,
                `standard_deviation` SMALLINT(6) NULL DEFAULT NULL,
                `duration_ms` INT(11) NULL DEFAULT NULL,
                `average_bytes` FLOAT NULL DEFAULT NULL,
                `average_throughput` FLOAT NULL DEFAULT NULL,
                `median_response_time` INT(11) NULL DEFAULT NULL,
                `error_count` INT(11) NULL DEFAULT NULL,
                `error_rate` FLOAT NULL DEFAULT NULL,
                `has_passed_thresholds` BIT(1) NULL DEFAULT NULL,
                `suite_run_history_id` VARCHAR(50) NOT NULL,
                PRIMARY KEY (`call_statistics_key`),
                INDEX `suite_run_history_id` (`suite_run_history_id`),
                CONSTRAINT `call_statistics_ibfk_1` FOREIGN KEY (`suite_run_history_id`) REFERENCES `suite_run_history` 
                (`suite_run_history_id`)
                )
                COLLATE='utf8_general_ci'
                ENGINE=InnoDB
                AUTO_INCREMENT=1;
        """)
    conn = None
    try:
        # gathers connection parameters
        database = get_db_type.MySQL
        connection_config_dict = dbci.get_values_from_json(
            False, database.db_type)
        # connect to the MySQL server
        conn = mysql.connector.connect(**connection_config_dict)
        cur = conn.cursor()
        # create table one by one
        for command in commands:
            cur.execute(command)
            conn.commit()
        # close communication with the database server
        cur.close()
        # commit any final changes
        conn.commit()
    except (Exception, mysql.connector.DatabaseError) as error:
        print(error)
    finally:
        if conn is not None:
            conn.close()