def avg_rate(self, user_id): ''' :param user_id: user id :return: average rate based on user_id rates ''' rates = [x for x in get_column(user_id - 1, self.R) if x != 0] return sum(rates) / len(rates)
def pg_settings(self, comp): """ Method to get the settings of cluster. :return: It yields json string. """ result = dict() result['component'] = comp port = util.get_column('port', comp) try: if comp in self.pgpassfiles: os.environ['PGPASSFILE'] = self.pgpassfiles[comp] else: util.read_env_file(comp) self.pgpassfiles[comp] = os.environ['PGPASSFILE'] pg = PgInstance("localhost", "postgres", "postgres", int(port)) pg.connect() settings_list = pg.get_pg_settings() pg.close() import itertools final_list = [] for key, group in itertools.groupby(settings_list, key=lambda x: x['category']): final_list.append({'name': str(key), 'settings': list(group)}) result['list'] = final_list except Exception as e: pass yield self.session.publish('com.bigsql.onPGsettings', result)
def start_comp(p_comp, p_homedir, p_start_cmd): print(p_comp + " starting") os.chdir(p_homedir) datadir = util.get_column("datadir", p_comp) if datadir == "" or not (os.path.isdir(datadir)): os.system(sys.executable + " -u init-" + p_comp + ".py") os.system(p_start_cmd) return (0)
def vectorize_users(self, user, dim=25): ''' :param user: user id to vectorize :param dim: dimension of result vector. SVD organize features importance in decreasing order. V columns collect user representation with most important features in the first elements of vectors (this why dim = 3, it's like vectorize users in 3-dimensional space). More dimension are taken, more no relevant features are used. For cosine similarity is necessary compute similarity among important features since taking all the whole vectors increse the precision of a single representation but returns bad similarity comparing unimportant features. :return: vector of V that represent user. ''' return get_column(user - 1, self.V)[0:dim]
def stop_comp(p_comp): pidfile = util.get_column("pidfile", p_comp) if os.path.isfile(pidfile): print(p_comp + " stopping") try: with open(pidfile, 'r') as f: pid = f.readline().rstrip(os.linesep) util.kill_pid(int(pid)) os.remove(pidfile) except Exception as e: print(str(e)) else: print(p_comp + " is not running") return 0
def do_pgc_tune(p_comp, email, print_json=False): tune_metadata = "tuning_service_descriptor" tuning_service = get_json_file(tune_metadata, print_json) tune_request = {} tune_request['tune_request'] = {} util.read_env_file(p_comp) port = util.get_comp_port(p_comp) pg = PgInstance("localhost", "postgres", "postgres", int(port)) pg.connect() tune_request['tune_request']['postgres_config'] = pg.get_raw_pg_settings() pg.close() tune_request['tune_request']['email'] = email tune_request['tune_request']['user_agent'] = 'pgc {}'.format( util.get_pgc_version()) tune_request['tune_request']['postgres_config'][ 'postgres_version'] = util.get_column('version', p_comp) tune_request['tune_request']['system_config'] = api.info( True, 'na', 'na', False) # Web request tune_url = tuning_service['bigtuna']['url'] tune_url = tune_url + tuning_service['bigtuna']['path'] req = urllib2.Request(tune_url) req.add_header('Content-Type', 'application/json') tune_result = urllib2.urlopen(req, json.dumps(tune_request)) result = json.load(tune_result) if print_json: print( json.dumps(result, sort_keys=True, indent=2, separators=(',', ': '))) else: for key in result['tune_result']: print("{0} = {1}".format(key, result['tune_result'][key]))
def checkExtension(self, database, comp, extension): """ Method to get the list of database available in cluster. :return: It yields json string. """ result = dict() result['component'] = comp port = util.get_column('port', comp) try: if comp in self.pgpassfiles: os.environ['PGPASSFILE'] = self.pgpassfiles[comp] else: util.read_env_file(comp) self.pgpassfiles[comp] = os.environ['PGPASSFILE'] pg = PgInstance("localhost", "postgres", database, int(port)) pg.connect() is_extension_installed = pg.is_extension_installed(extension) pg.close() result['status'] = is_extension_installed except Exception as e: pass yield self.session.publish('com.bigsql.onCheckExtension', result)
def db_list(self, comp): """ Method to get the list of database available in cluster. :return: It yields json string. """ result = dict() result['component'] = comp port = util.get_column('port', comp) try: if comp in self.pgpassfiles: os.environ['PGPASSFILE'] = self.pgpassfiles[comp] else: util.read_env_file(comp) self.pgpassfiles[comp] = os.environ['PGPASSFILE'] pg = PgInstance("localhost", "postgres", "postgres", int(port)) pg.connect() database_list = pg.get_database_list() pg.close() result['list'] = database_list except Exception as e: pass yield self.session.publish('com.bigsql.ondblist', result)
import os from util import get_column __author__ = 'bingjun' os.chdir("/Volumes/BigTwins/MutatorModelData/Expo_M0.0_R0.0_G1000_N500_BeneMR3.0E-5_DeleMR0.05_BeneE0.03_DeleE0.03_MutStr2_MutMR1.0E-4_AntiMutMR0.0_MutaE0.03") print(get_column('37930029619894_Pop.txt', "\t", 1))
import subprocess import os import sys MY_HOME = os.getenv("MY_HOME", "") sys.path.append(os.path.join(MY_HOME, 'hub', 'scripts')) sys.path.append(os.path.join(MY_HOME, 'hub', 'scripts', 'lib')) import util util.set_lang_path() pgver = "pg9X" dotver = pgver[2] + "." + pgver[3] datadir = util.get_column('datadir', pgver) logdir = util.get_column('logdir', pgver) autostart = util.get_column('autostart', pgver) pg_ctl = os.path.join(MY_HOME, pgver, "bin", "pg_ctl") logfile = util.get_column('logdir', pgver) + os.sep + "postgres.log" util.read_env_file(pgver) cmd = pg_ctl + ' start -s -w -D "' + datadir + '" ' + '-l "' + logfile + '"' util.system(cmd)
logdir = os.path.join(homedir, pgver) parser = argparse.ArgumentParser() parser.add_argument("--port", type=int, default=0) parser.add_argument("--autostart", choices=["on", "off"]) parser.add_argument("--datadir", type=str, default="") parser.add_argument("--logdir", type=str, default="") parser.add_argument("--svcname", type=str, default="") parser.usage = parser.format_usage().replace("--autostart {on,off}", "--autostart={on,off}") args = parser.parse_args() isJson = os.getenv("isJson", None) autostart = util.get_column('autostart', pgver) app_datadir = util.get_comp_datadir(pgver) port = util.get_comp_port(pgver) is_running = False if app_datadir != "" and util.is_socket_busy(int(port), pgver): is_running = True msg = "You cannot change the configuration when the server is running." if isJson: jsonMsg = {} jsonMsg['status'] = "error" jsonMsg['component'] = pgver jsonMsg['msg'] = msg print(json.dumps([jsonMsg])) else:
###### Copyright (c) 2015-2018 BigSQL ########## #################################################################### import os, sys, subprocess, json import util, startup pgver = "pg11" dotver = pgver[2] + "." + pgver[3] PGC_HOME = os.getenv('PGC_HOME', '') homedir = os.path.join(PGC_HOME, pgver) logdir = os.path.join(homedir, pgver) datadir = util.get_column('datadir', pgver) isJson = os.getenv("isJson", None) first_time="no" if not os.path.isdir(datadir): rc=os.system(sys.executable + ' -u ' + homedir + os.sep + 'init-' + pgver + '.py') if rc == 0: rc=os.system(sys.executable + ' -u ' + homedir + os.sep + 'config-' + pgver + '.py') else: sys.exit(rc) datadir = util.get_column('datadir', pgver) first_time="yes" autostart = util.get_column('autostart', pgver) logfile = util.get_column('logdir', pgver) + os.sep + "postgres.log"
MY_HOME = os.getenv("MY_HOME", "") scripts_path = os.path.join(MY_HOME, 'hub', 'scripts') scripts_lib_path = os.path.join(MY_HOME, 'hub', 'scripts', 'lib') if scripts_path not in sys.path: sys.path.append(scripts_path) if scripts_lib_path not in sys.path: sys.path.append(scripts_lib_path) import util, startup pgver = "pg9X" homedir = os.path.join(MY_HOME, pgver) datadir = util.get_column('datadir', pgver) pidfile = os.path.join(datadir, "postmaster.pid") isJson = os.getenv("isJson", None) if os.path.isfile(pidfile): with open(pidfile, 'r') as f: pid = f.readline().rstrip(os.linesep) else: print(pgver + " stopped") sys.exit(0) msg = pgver + " stopping" if isJson: jsonMsg = {}
homedir = os.path.join(MY_HOME, pgver) logdir = os.path.join(homedir, pgver) parser = argparse.ArgumentParser() parser.add_argument("--port", type=int, default=0) parser.add_argument("--autostart", choices=["on", "off"]) parser.add_argument("--datadir", type=str, default="") parser.add_argument("--logdir", type=str, default="") parser.add_argument("--svcname", type=str, default="") parser.usage = parser.format_usage().replace("--autostart {on,off}","--autostart={on,off}") args = parser.parse_args() isJson = os.getenv("isJson", None) autostart = util.get_column('autostart', pgver) app_datadir = util.get_comp_datadir(pgver) port = util.get_comp_port(pgver) is_running = False if app_datadir != "" and util.is_socket_busy(int(port), pgver): is_running = True msg = "You cannot change the configuration when the server is running." if isJson: jsonMsg = {} jsonMsg['status'] = "error" jsonMsg['component'] = pgver jsonMsg['msg'] = msg print(json.dumps([jsonMsg])) else:
util.change_pgconf_keyval("pgXX", "track_commit_timestamp", "on", True) util.change_pgconf_keyval("pgXX", "pglogical.conflict_resolution", "last_update_wins", True) #util.change_pgconf_keyval("pgXX", "log_min_messages", "debug3", True) util.change_pgconf_keyval("pgXX", "log_destination", "stderr, csvlog") util.run_sql_cmd("pgXX", "CREATE EXTENSION file_fdw", True) util.run_sql_cmd("pgXX", "CREATE SERVER pglog FOREIGN DATA WRAPPER file_fdw", True) util.change_pgconf_keyval("pgXX", "pglogical.conflict_resolution", "last_update_wins", True) day = datetime.datetime.now().strftime('%a') logdir = util.get_column("logdir", "pgXX") csvlogfile = logdir + os.sep + "postgresql-" + day + ".csv" sql = \ "CREATE FOREIGN TABLE pglog ( \ log_time timestamp(3) with time zone, \ user_name text, \ database_name text, \ process_id integer, \ connection_from text, \ session_id text, \ session_line_num bigint, \ command_tag text, \ session_start_time timestamp with time zone, \ virtual_transaction_id text, \ transaction_id bigint, \ error_severity text, \
from __future__ import print_function, division #################################################################### ###### Copyright (c) 2015-2019 BigSQL ########## #################################################################### import os, sys import util, startup pgver = "pg9X" autostart = util.get_column('autostart', pgver) if autostart != "on": sys.exit(0) dotver = pgver[2] + "." + pgver[3] APG_HOME = os.getenv('APG_HOME', '') svcname = util.get_column('svcname', pgver, 'PostgreSQL ' + dotver + ' Server') if util.get_platform() == "Windows": sc_path = os.getenv("SYSTEMROOT", "") + os.sep + "System32" + os.sep + "sc" command = sc_path + ' delete "' + svcname + '"' util.system(command, is_admin=True) elif util.get_platform() == "Linux": startup.remove_linux("postgresql" + pgver[2:4], "85", "15")
from __future__ import print_function, division #################################################################### ###### Copyright (c) 2020-2021 PGSQL.IO ########## #################################################################### import os, sys import util, startup pgver = "pg9X" autostart = util.get_column('autostart', pgver) if autostart != "on": sys.exit(0) startup.remove_linux("postgresql" + pgver[2:4], pgver)