Пример #1
0
 def getLoggingSettings(self, comp):
     util.read_env_file(comp)
     conn = psycopg2.connect(
         "dbname='postgres' user='******' host='localhost'")
     query = "select category, name, setting, short_desc from pg_settings where name in " \
             + repr(tuple(map(str,log_param_dict.keys())))
     cursor = conn.cursor()
     cursor.execute(query)
     columns = [desc[0] for desc in cursor.description]
     result = []
     for res in cursor:
         result.append(dict(zip(columns, res)))
     cursor.close()
     conn.close()
     final_result = []
     for k in result:
         f_row = {}
         f_row['category'] = k['category']
         f_row['name'] = k['name']
         f_row['setting'] = k['setting']
         if k['setting'] == 'on':
             f_row['enabled'] = True
         elif k['setting'] == 'off':
             f_row['enabled'] = False
         f_row['short_desc'] = k['short_desc']
         f_row['recommended'] = log_param_dict[k['name']][1]
         f_row['order'] = log_param_dict[k['name']][0]
         final_result.append(f_row)
     sorted_list = sorted(final_result, key=itemgetter('order'))
     return sorted_list
Пример #2
0
 def pg_settings(self, comp):
     """
     Method to get the settings of cluster.
     :return: It yields json string.
     """
     result = dict()
     result['component'] = comp
     port = util.get_column('port', comp)
     try:
         if comp in self.pgpassfiles:
             os.environ['PGPASSFILE'] = self.pgpassfiles[comp]
         else:
             util.read_env_file(comp)
             self.pgpassfiles[comp] = os.environ['PGPASSFILE']
         pg = PgInstance("localhost", "postgres", "postgres", int(port))
         pg.connect()
         settings_list = pg.get_pg_settings()
         pg.close()
         import itertools
         final_list = []
         for key, group in itertools.groupby(settings_list,
                                             key=lambda x: x['category']):
             final_list.append({'name': str(key), 'settings': list(group)})
         result['list'] = final_list
     except Exception as e:
         pass
     yield self.session.publish('com.bigsql.onPGsettings', result)
Пример #3
0
 def get(self, comp):
     import util
     try:
         result = dict()
         util.read_env_file(comp)
         result['PGUSER'] = os.environ['PGUSER']
         result['PGDATABASE'] = os.environ['PGDATABASE']
         result['PGPORT'] = os.environ['PGPORT']
     except Exception as e:
         result = {}
         result['error'] = 1
         result['msg'] = str(e)
     return result
Пример #4
0
    def switchLogfile(self, comp, logFile):
        util.read_env_file(comp)
        conn = psycopg2.connect(
            "dbname='postgres' user='******' host='localhost'")
        old_isolation_level = conn.isolation_level
        conn.set_isolation_level(0)
        cursor = conn.cursor()
        try:
            if logFile:
                alterQuery = "alter system set log_filename='%s';" % (logFile)
                notice_msg = "pgDevOps by BigSQL: Switched logfile to {0}"
            else:
                alterQuery = "alter system reset log_filename;"
                notice_msg = "pgDevOps by BigSQL: Reset logfile to {0}"
            cursor.execute(alterQuery)
            pgReloadQuery = "select pg_reload_conf();"
            cursor.execute(pgReloadQuery)
            rotateQuery = "select pg_rotate_logfile();"
            cursor.execute(rotateQuery)
            time.sleep(2)
            log_file_name_query = "show log_filename;"
            cursor.execute(log_file_name_query)
            curResult = cursor.fetchone()
            log_file_name = curResult[0]
            log_file_name = log_file_name.replace("%", "%%")
            notice_msg = notice_msg.format(log_file_name)
            query = """BEGIN;
                        SET log_min_messages = notice;
                        DO language plpgsql $$
                        BEGIN
                            RAISE NOTICE '{0}';
                        END
                        $$;
                        RESET log_min_messages;
                        END;"""

            raise_query = query.format(notice_msg)
            cursor.execute(raise_query)

            conn.set_isolation_level(old_isolation_level)
            conn.close()
            return True
        except Exception as e:
            conn.set_isolation_level(old_isolation_level)
            conn.close()
            return str(e)
Пример #5
0
 def changeLoggingParams(self, comp, logDict={}):
     util.read_env_file(comp)
     conn = psycopg2.connect(
         "dbname='postgres' user='******' host='localhost'")
     old_isolation_level = conn.isolation_level
     conn.set_isolation_level(0)
     cursor = conn.cursor()
     try:
         for k in logDict.keys():
             alterQuery = "alter system set %s='%s';" % (k, logDict[k])
             cursor.execute(alterQuery)
         pgReloadQuery = "select pg_reload_conf();"
         cursor.execute(pgReloadQuery)
         conn.set_isolation_level(old_isolation_level)
         conn.close()
         return True
     except Exception as e:
         conn.set_isolation_level(old_isolation_level)
         conn.close()
         return str(e)
Пример #6
0
def do_pgc_tune(p_comp, email, print_json=False):

    tune_metadata = "tuning_service_descriptor"
    tuning_service = get_json_file(tune_metadata, print_json)

    tune_request = {}
    tune_request['tune_request'] = {}

    util.read_env_file(p_comp)
    port = util.get_comp_port(p_comp)
    pg = PgInstance("localhost", "postgres", "postgres", int(port))
    pg.connect()
    tune_request['tune_request']['postgres_config'] = pg.get_raw_pg_settings()
    pg.close()

    tune_request['tune_request']['email'] = email
    tune_request['tune_request']['user_agent'] = 'pgc {}'.format(
        util.get_pgc_version())
    tune_request['tune_request']['postgres_config'][
        'postgres_version'] = util.get_column('version', p_comp)
    tune_request['tune_request']['system_config'] = api.info(
        True, 'na', 'na', False)

    # Web request
    tune_url = tuning_service['bigtuna']['url']
    tune_url = tune_url + tuning_service['bigtuna']['path']
    req = urllib2.Request(tune_url)
    req.add_header('Content-Type', 'application/json')
    tune_result = urllib2.urlopen(req, json.dumps(tune_request))

    result = json.load(tune_result)

    if print_json:
        print(
            json.dumps(result,
                       sort_keys=True,
                       indent=2,
                       separators=(',', ': ')))
    else:
        for key in result['tune_result']:
            print("{0} = {1}".format(key, result['tune_result'][key]))
Пример #7
0
    def generate_profiler_reports(self,
                                  hostname="localhost",
                                  username="******",
                                  port=5432,
                                  database="",
                                  password="",
                                  queries="",
                                  report_title="",
                                  report_desc="",
                                  comp=None):
        if comp:
            util.read_env_file(comp)
            username = os.environ.get("PGUSER")
            port = os.environ.get("PGPORT")

        result = {}
        try:
            from ProfilerReport import ProfilerReport
            args = {}
            args['pgPass'] = password
            args['hostName'] = hostname
            args['pgDB'] = database
            args['pgUser'] = username
            args['pgPort'] = port

            plReport = ProfilerReport(args)
            report_file = plReport.generateSQLReports(queries, report_title,
                                                      report_desc)

            result['error'] = 0
            result['report_file'] = report_file

        except Exception as e:
            import traceback
            print traceback.format_exc()
            print e
            result = {}
            result['error'] = 1
            result['msg'] = str(e)
        yield self.session.publish('com.bigsql.profilerReports', result)
Пример #8
0
 def db_list(self, comp):
     """
     Method to get the list of database available in cluster.
     :return: It yields json string.
     """
     result = dict()
     result['component'] = comp
     port = util.get_column('port', comp)
     try:
         if comp in self.pgpassfiles:
             os.environ['PGPASSFILE'] = self.pgpassfiles[comp]
         else:
             util.read_env_file(comp)
             self.pgpassfiles[comp] = os.environ['PGPASSFILE']
         pg = PgInstance("localhost", "postgres", "postgres", int(port))
         pg.connect()
         database_list = pg.get_database_list()
         pg.close()
         result['list'] = database_list
     except Exception as e:
         pass
     yield self.session.publish('com.bigsql.ondblist', result)
Пример #9
0
 def checkExtension(self, database, comp, extension):
     """
     Method to get the list of database available in cluster.
     :return: It yields json string.
     """
     result = dict()
     result['component'] = comp
     port = util.get_column('port', comp)
     try:
         if comp in self.pgpassfiles:
             os.environ['PGPASSFILE'] = self.pgpassfiles[comp]
         else:
             util.read_env_file(comp)
             self.pgpassfiles[comp] = os.environ['PGPASSFILE']
         pg = PgInstance("localhost", "postgres", database, int(port))
         pg.connect()
         is_extension_installed = pg.is_extension_installed(extension)
         pg.close()
         result['status'] = is_extension_installed
     except Exception as e:
         pass
     yield self.session.publish('com.bigsql.onCheckExtension', result)
Пример #10
0
import subprocess
import os
import sys

MY_HOME = os.getenv("MY_HOME", "")
sys.path.append(os.path.join(MY_HOME, 'hub', 'scripts'))
sys.path.append(os.path.join(MY_HOME, 'hub', 'scripts', 'lib'))

import util

util.set_lang_path()

pgver = "pg9X"

dotver = pgver[2] + "." + pgver[3]

datadir = util.get_column('datadir', pgver)

logdir = util.get_column('logdir', pgver)

autostart = util.get_column('autostart', pgver)

pg_ctl = os.path.join(MY_HOME, pgver, "bin", "pg_ctl")
logfile = util.get_column('logdir', pgver) + os.sep + "postgres.log"

util.read_env_file(pgver)

cmd = pg_ctl + ' start -s -w -D "' + datadir + '" ' + '-l "' + logfile + '"'
util.system(cmd)
Пример #11
0
    def save_dbstats_data(self):
        try:
            connL = sqlite3.connect(db_local)
            c = connL.cursor()
            sql = "SELECT component, port" + \
                  "  FROM components " + \
                  " WHERE project='pg' AND datadir != ''"
            c.execute(sql)
            t_comp = c.fetchall()

            for comp in t_comp:
                pg_comp = str(comp[0])
                try:
                    if pg_comp in self.pgpassfiles:
                        os.environ['PGPASSFILE'] = self.pgpassfiles[pg_comp]
                    else:
                        util.read_env_file(pg_comp)
                        self.pgpassfiles[pg_comp] = os.environ['PGPASSFILE']
                    port = comp[1]
                    pg = PgInstance("localhost", "postgres", "postgres",
                                    int(port))
                    pg.connect()
                    cluster_stats_data = pg.get_cluster_stats()
                    activity_details = pg.get_current_activity()
                    connections = pg.get_active_connections()
                    self.db_activity[pg_comp] = activity_details
                    pg.close()
                    stats_timestamp = datetime.utcnow()
                    stats_time = stats_timestamp.strftime('%Y/%m/%d %H:%M:%S')
                    if cluster_stats_data and cluster_stats_data[0]:
                        comp_db_stats = cluster_stats_data[0]
                    else:
                        continue
                    comp_db_stats['time'] = stats_time
                    if pg_comp in self.db_stats:
                        if len(self.db_stats[pg_comp]) == 60:
                            self.db_stats[pg_comp].pop(0)
                        previous = self.previous_db_stats[pg_comp]
                        self.previous_db_stats[pg_comp] = {}
                        time_diff = stats_timestamp - previous['timestamp']
                        diff_seconds = time_diff.seconds
                        current_xact_commit = (
                            comp_db_stats['xact_commit'] -
                            previous['xact_commit']) / diff_seconds
                        current_xact_rollback = (
                            comp_db_stats['xact_rollback'] -
                            previous['xact_rollback']) / diff_seconds
                        self.previous_db_stats[pg_comp][
                            'xact_commit'] = comp_db_stats['xact_commit']
                        self.previous_db_stats[pg_comp][
                            'xact_rollback'] = comp_db_stats['xact_rollback']
                        self.previous_db_stats[pg_comp][
                            'timestamp'] = stats_timestamp
                        comp_db_stats['xact_commit'] = int(
                            round(current_xact_commit))
                        comp_db_stats['xact_rollback'] = int(
                            round(current_xact_rollback))
                        tup_fetched = (comp_db_stats['tup_fetched'] -
                                       previous['tup_fetched']) / diff_seconds
                        tup_inserted = (
                            comp_db_stats['tup_inserted'] -
                            previous['tup_inserted']) / diff_seconds
                        tup_updated = (comp_db_stats['tup_updated'] -
                                       previous['tup_updated']) / diff_seconds
                        tup_deleted = (comp_db_stats['tup_deleted'] -
                                       previous['tup_deleted']) / diff_seconds
                        self.previous_db_stats[pg_comp][
                            'tup_fetched'] = comp_db_stats['tup_fetched']
                        self.previous_db_stats[pg_comp][
                            'tup_inserted'] = comp_db_stats['tup_inserted']
                        self.previous_db_stats[pg_comp][
                            'tup_updated'] = comp_db_stats['tup_updated']
                        self.previous_db_stats[pg_comp][
                            'tup_deleted'] = comp_db_stats['tup_deleted']
                        self.previous_db_stats[pg_comp][
                            'timestamp'] = stats_timestamp
                        comp_db_stats['tup_fetched'] = int(round(tup_fetched))
                        comp_db_stats['tup_inserted'] = int(
                            round(tup_inserted))
                        comp_db_stats['tup_updated'] = int(round(tup_updated))
                        comp_db_stats['tup_deleted'] = int(round(tup_deleted))
                        comp_db_stats['connections'] = {}
                        for conn in connections:
                            comp_db_stats['connections'][str(
                                conn['state'])] = conn['count']
                        self.db_stats[pg_comp].append(comp_db_stats)
                        self.latest_db_stats[pg_comp] = comp_db_stats
                        pass
                    else:
                        self.db_stats[pg_comp] = []
                        self.previous_db_stats[pg_comp] = {}
                        self.previous_db_stats[pg_comp][
                            'xact_commit'] = comp_db_stats['xact_commit']
                        self.previous_db_stats[pg_comp][
                            'xact_rollback'] = comp_db_stats['xact_rollback']
                        self.previous_db_stats[pg_comp][
                            'tup_fetched'] = comp_db_stats['tup_fetched']
                        self.previous_db_stats[pg_comp][
                            'tup_inserted'] = comp_db_stats['tup_inserted']
                        self.previous_db_stats[pg_comp][
                            'tup_updated'] = comp_db_stats['tup_updated']
                        self.previous_db_stats[pg_comp][
                            'tup_deleted'] = comp_db_stats['tup_deleted']
                        self.previous_db_stats[pg_comp][
                            'timestamp'] = stats_timestamp
                except Exception as e:
                    pass
            connL.close()

        except sqlite3.Error as e:
            print str(e)
Пример #12
0
    def generate_profiler_reports(self,
                                  hostname="localhost",
                                  username="******",
                                  port=5432,
                                  database="",
                                  password="",
                                  queries="",
                                  report_title="",
                                  report_desc="",
                                  action=None,
                                  comp=None):
        if comp:
            util.read_env_file(comp)
            username = os.environ.get("PGUSER")
            port = os.environ.get("PGPORT")

        result = {}
        result['action'] = action
        try:
            from ProfilerReport import ProfilerReport
            args = {}
            args['pgPass'] = password
            args['hostName'] = hostname
            args['pgDB'] = database
            args['pgUser'] = username
            args['pgPort'] = port

            plReport = ProfilerReport(args)
            report_file = ""

            result['error'] = 0
            if action == "enable":
                plReport.enableProfiler()
                result[
                    'msg'] = "Global profiling statistics has been enabled. Execute a PL/pgSQL workload before viewing the report."
            elif action == "disable":
                plReport.disableProfiler()
                result[
                    'msg'] = "Global profiling statistics has been disabled."
            elif action == "check":
                is_enabled = plReport.is_enabled()
                result['enabled'] = is_enabled
                if is_enabled:
                    result['status'] = 'enabled'
                else:
                    result['status'] = 'disabled'
            elif action == "reset":
                plReport.resetSharedData()
                result['msg'] = "Global profiling statistics reset."
            elif action == "profile_query":
                report_file = plReport.generateQueryReports(
                    queries, report_title, report_desc)
            elif action == "generate":
                if plReport.has_data():
                    report_file = plReport.generateGlobalReports(
                        report_title, report_desc)
                else:
                    result['error'] = 1
                    result['msg'] = "No profiling statistics available."
                    if not plReport.is_enabled():
                        result['msg'] = "Profiler is not enabled."

            result['report_file'] = report_file

        except Exception as e:
            import traceback
            print traceback.format_exc()
            print e
            result = {}
            result['error'] = 1
            result['msg'] = str(e)
        yield self.session.publish('com.bigsql.profilerReports', result)