def getConnection(connUser): """ Returns a database connection as defined by connUser. If this module already has an open connection for connUser, it returns it; otherwise, it creates a new connection, stores it, and returns it. """ if not _users.has_key(connUser): raise SkunkStandardError, 'user %s is not initialized' % (connUser) connectParams=_users[connUser] if not _connections.has_key(connectParams): try: connectArgs=string.split(connectParams,':') host=None if connectArgs[0]: if '|' in connectArgs[0]: #if specified port host=connectArgs[0].replace('|', ':') db=_connections[connectParams]=pgdb.connect(connectParams, host=host) except pgdb.Error: # XXX Do not raise the connect string! The trace may be seen # by users!!! raise SkunkStandardError, ('cannot connect to PostgreSQL: %s' % (sys.exc_info()[1],)) else: db=_connections[connectParams] if connection_test and not connection_test(db): del db del _connections[connectParams] db=_connections[connectParams]=pgdb.connect(connectParams, host=host) return db
def getConnection(connUser): """ Returns a database connection as defined by connUser. If this module already has an open connection for connUser, it returns it; otherwise, it creates a new connection, stores it, and returns it. """ if not _users.has_key(connUser): raise SkunkStandardError, 'user %s is not initialized' % (connUser) connectParams = _users[connUser] if not _connections.has_key(connectParams): try: connectArgs = string.split(connectParams, ':') host = None if connectArgs[0]: if '|' in connectArgs[0]: #if specified port host = connectArgs[0].replace('|', ':') db = _connections[connectParams] = pgdb.connect(connectParams, host=host) except pgdb.Error: # XXX Do not raise the connect string! The trace may be seen # by users!!! raise SkunkStandardError, ('cannot connect to PostgreSQL: %s' % (sys.exc_info()[1], )) else: db = _connections[connectParams] if connection_test and not connection_test(db): del db del _connections[connectParams] db = _connections[connectParams] = pgdb.connect(connectParams, host=host) return db
def __init__(self, connectArgs): connectArgs = string.split(connectArgs, ':') host = None if connectArgs and connectArgs[0]: #if host is there if '|' in connectArgs[0]: #if specified port host = connectArgs[0].replace('|', ':') if connectArgs and connectArgs[-1] == 'verbose': self.verbose = 1 connectArgs = connectArgs[:-1] #connectArgs = string.join(connectArgs[:-1], ':') else: #connectArgs = string.join(connectArgs, ':') self.verbose = 0 if connectArgs and connectArgs[-1] == 'cache': self.useCacheMod = 1 connectArgs = string.join(connectArgs[:-1], ':') else: connectArgs = string.join(connectArgs, ':') self.useCacheMod = 0 self.connectArgs = connectArgs if self.useCacheMod: import PostgreSql self.conn = PostgreSql.getConnection(connectArgs) else: if host is not None: self.conn = pgdb.connect(connectArgs, host=host) else: self.conn = pgdb.connect(connectArgs) self.bindVariables = 0
def connect_and_authenticate(secret_dict, port, dbname, use_ssl): """Attempt to connect and authenticate to a PostgreSQL instance This helper function tries to connect to the database using connectivity info passed in. If successful, it returns the connection, else None Args: - secret_dict (dict): The Secret Dictionary - port (int): The databse port to connect to - dbname (str): Name of the database - use_ssl (bool): Flag indicating whether connection should use SSL/TLS Returns: Connection: The pymongo.database.Database object if successful. None otherwise Raises: KeyError: If the secret json does not contain the expected keys """ # Try to obtain a connection to the db try: if use_ssl: # Setting sslmode='verify-full' will verify the server's certificate and check the server's host name conn = pgdb.connect(host=secret_dict['host'], user=secret_dict['username'], password=secret_dict['password'], database=dbname, port=port, connect_timeout=5, sslrootcert='/etc/pki/tls/cert.pem', sslmode='verify-full') else: conn = pgdb.connect(host=secret_dict['host'], user=secret_dict['username'], password=secret_dict['password'], database=dbname, port=port, connect_timeout=5, sslmode='disable') logger.info( "Successfully established %s connection as user '%s' with host: '%s'" % ("SSL/TLS" if use_ssl else "non SSL/TLS", secret_dict['username'], secret_dict['host'])) return conn except pg.InternalError as e: if "server does not support SSL, but SSL was required" in e.args[0]: logger.error( "Unable to establish SSL/TLS handshake, SSL/TLS is not enabled on the host: %s" % secret_dict['host']) elif re.search('server common name ".+" does not match host name ".+"', e.args[0]): logger.error( "Hostname verification failed when estlablishing SSL/TLS Handshake with host: %s" % secret_dict['host']) elif re.search('no pg_hba.conf entry for host ".+", SSL off', e.args[0]): logger.error( "Unable to establish SSL/TLS handshake, SSL/TLS is enforced on the host: %s" % secret_dict['host']) return None
def PyPostgreConnect(database, user, password, hostname): global connection import pgdb postgres_host = hostname.strip() try: connection = pgdb.connect( host=postgres_host, user=user, password=password, database=database) return 1 except: # See if postgres is there try: # See if database present connection = pgdb.connect( host=postgres_host, user=user, password=password, database='postgres') cursor = connection.cursor() cursor.execute( 'select datname from pg_database where datname=\'' + database + '\'') r = cursor.fetchone() if (r is not None): return -2 # database found cursor.execute('END') cursor.execute('create database ' + database) connection = pgdb.connect( host=postgres_host, database=database, user=user, password=password) return 0 except: return -1 # failed return connection
def db_connect(pwd): mysql.connector.connect(host='localhost', user='******', password='') # Noncompliant # ^^^^^^^^^^^ mysql.connector.connect(host='localhost', password='', user='******') # Noncompliant mysql.connector.connect('localhost', 'sonarsource', '') # Noncompliant # ^^ mysql.connector.connect('localhost', 'sonarsource', 'hello') # OK mysql.connector.connect('localhost', 'sonarsource', password='******') # OK mysql.connector.connect('localhost', 'sonarsource', password=pwd) # OK mysql.connector.connect('localhost', 'sonarsource', pwd) # OK mysql.connector.connect('localhost', 'sonarsource') # OK mysql.connector.connect('localhost', 'sonarsource', **dict) # OK mysql.connector.connection.MySQLConnection(host='localhost', user='******', password='') # Noncompliant pymysql.connect(host='localhost', user='******', password='') # Noncompliant pymysql.connections.Connection(host='localhost', user='******', password='') # Noncompliant psycopg2.connect(host='localhost', user='******', password='') # Noncompliant pgdb.connect(host='localhost', user='******', password='') # Noncompliant pg.DB(host='localhost', user='******', passwd='') # Noncompliant pg.DB('dbname', 'localhost', 5432, 'opt', 'postgres', '') # Noncompliant pg.connect(host='localhost', user='******', passwd='') # Noncompliant pg.DB(host='localhost', user='******', passwd=pwd) # Compliant pg.DB('dbname', 'localhost', 5432, 'opt', 'postgres', pwd) # Compliant
def load_ddl(iamconnectioninfo, working_dir): with connect(database=iamconnectioninfo.db, host=iamconnectioninfo.hostname_plus_port, user=iamconnectioninfo.username, password=iamconnectioninfo.password) as conn: cursor = conn.cursor() # tpcds if iamconnectioninfo.tpcds != '': ddl = open(f'{working_dir}/ddl/tpcds-ddl.sql', 'r').read() schema = 'tpcds_{}'.format(iamconnectioninfo.tpcds) cursor.execute('create schema if not exists %s' % (schema)) cursor.execute('set search_path to %s' % (schema)) cursor.execute(ddl) # tpch if iamconnectioninfo.tpcds != '': ddl = open(f'{working_dir}/ddl/tpch-ddl.sql', 'r').read() schema = 'tpch_{}'.format(iamconnectioninfo.tpch) cursor.execute('create schema if not exists %s' % (schema)) cursor.execute('set search_path to %s' % (schema)) cursor.execute(ddl) # task status tables for monitoring with connect(dbname='postgres', host='0.0.0.0', user='******') as conn: cursor = conn.cursor() cursor.execute(open(f'{working_dir}/ddl/task_status.sql', 'r').read()) cursor.execute( open(f'{working_dir}/ddl/task_load_status.sql', 'r').read()) cursor.execute( open(f'{working_dir}/ddl/benchmark_query_status.sql', 'r').read()) cursor.execute( open(f'{working_dir}/ddl/benchmark_table_row_counts.sql', 'r').read())
def __init__(self, connectArgs): connectArgs = string.split(connectArgs,':') host = None if connectArgs and connectArgs[0]: #if host is there if '|' in connectArgs[0]: #if specified port host = connectArgs[0].replace('|', ':') if connectArgs and connectArgs[-1] == 'verbose': self.verbose = 1 connectArgs = connectArgs[:-1] #connectArgs = string.join(connectArgs[:-1], ':') else: #connectArgs = string.join(connectArgs, ':') self.verbose=0 if connectArgs and connectArgs[-1] == 'cache': self.useCacheMod = 1 connectArgs = string.join(connectArgs[:-1], ':') else: connectArgs = string.join(connectArgs, ':') self.useCacheMod =0 self.connectArgs = connectArgs if self.useCacheMod: import PostgreSql self.conn = PostgreSql.getConnection(connectArgs) else: if host is not None: self.conn = pgdb.connect(connectArgs, host = host) else: self.conn = pgdb.connect(connectArgs) self.bindVariables = 0
def getConnection(): """ Returns an open database connection """ if controller.IS_PROD: return pgdb.connect(database="", host="", user="", password="") else: return pgdb.connect(database="cpcc", host="localhost", user="******", password="******")
def load_worker(queue, data_set, task_uuid): while True: tbl = queue.get() print('Processing %s (MP: %s) ' % (tbl, mp.current_process().name)) iamconnectioninfo = IamConnection() scale = getattr(iamconnectioninfo, data_set) schema = '{}_{}'.format(data_set, scale) bucket = 'redshift-managed-loads-datasets-us-east-1' copy_sql = f"COPY {tbl} FROM 's3://{bucket}/dataset={data_set}/size={scale}/table={tbl}/{tbl}.manifest' iam_role '{iamconnectioninfo.iamrole}' gzip delimiter '|' COMPUPDATE OFF MANIFEST" copy_sql_double_quoted = copy_sql.translate(str.maketrans({"'": r"''"})) with connect(dbname='postgres', host='0.0.0.0', user='******') as conn: cursor = conn.cursor() cursor.execute( f"INSERT INTO task_load_status(task_uuid,tablename,dataset,status,load_start,querytext) values('{task_uuid}','{tbl}','{schema}','inflight',timezone('utc', now()),'{copy_sql_double_quoted}')" ) with connect(database=iamconnectioninfo.db, host=iamconnectioninfo.hostname_plus_port, user=iamconnectioninfo.username, password=iamconnectioninfo.password) as conn: cursor = conn.cursor() cursor.execute('set search_path to %s' % (schema)) cursor.execute(copy_sql) cursor.execute('select pg_last_copy_id()') query_id = int("".join(filter(str.isdigit, str(cursor.fetchone())))) cursor.execute('select count(*) from %s' % (tbl)) row_count = int("".join(filter(str.isdigit, str(cursor.fetchone())))) cursor.execute( 'select count(*) from stv_blocklist where tbl=\'%s.%s\'::regclass::oid' % (schema, tbl)) block_count = int("".join( filter(str.isdigit, str(cursor.fetchone())))) with connect(dbname='postgres', host='0.0.0.0', user='******') as conn: cursor = conn.cursor() cursor.execute( 'UPDATE task_load_status SET status=\'complete\',load_end=timezone(\'utc\', now()), ' 'query_id=%s,rows_d=%s, size_d=%s WHERE tablename=\'%s\' and dataset=\'%s\'' % (query_id, row_count, block_count, tbl, schema)) queue.task_done()
def listColumns(dbName,dbHost,dbPort,tbl,serverType,passwd,dbUser): if tbl.find(".") >= 0: schema,tname = tbl.split('.') else: tname = tbl print "<h2>Columns in table: " + tbl + "</h2>" print "<table width='75%' class='graybox' cellspacing='0' cellpadding='5'>" print "<tr><th align='left'>Name</th><th align='left'>Type</th><th align='left'>Nullable</th><th align='left'>Default</th></tr>" qstr = """select ordinal_position, column_name, data_type, is_nullable, column_default from information_schema.columns where table_name='%s' order by ordinal_position"""%(tname) #for mysql qstr = show columns in <<table>> if serverType == 'postgresql': mycon = pgdb.connect(database=dbName,host=dbHost+':'+dbPort,user=dbUser) qstr = """select ordinal_position, column_name, data_type, is_nullable, column_default from information_schema.columns where table_name='%s' order by ordinal_position"""%(tname) curs = mycon.cursor() curs.execute(qstr) resList = curs.fetchall() for row in resList: print "<tr><td>%s</td><td class=\"datatype\">%s</td><td>%s</td><td>%s</td></tr>"%(row[1],row[2],row[3],row[4]) else: mycon = MySQLdb.connect(db=dbName,host=dbHost,port=int(dbPort),user=dbUser,passwd=passwd) qstr = "show columns in " + tname curs = mycon.cursor() curs.execute(qstr) resList = curs.fetchall() for row in resList: print "<tr><td>%s</td><td class=\"datatype\">%s</td><td>%s</td><td>%s</td></tr>"%(row[0],row[1],row[2],row[4]) print "</table>"
def open_database_connection(config_registry, pkgdbu=False, db_server=None): connection_info = { # see <http://www.postgresql.org/docs/8.4/static/libpq-connect.html> 'dbname': 'pkgdb', } if config_registry.is_true('pkgdb/requiressl'): connection_info['sslmode'] = 'require' if pkgdbu: # 'host' not specified -> localhost over Unix-domain socket (connection type "local") connection_info['user'] = '******' password_file = '/etc/postgresql/pkgdb.secret' else: if db_server is None: db_server = get_dbservername(config_registry['domainname']) if db_server is None: return None connection_info['host'] = db_server connection_info['user'] = config_registry.get( 'pkgdb/user', '%s$' % (config_registry['hostname'], )) password_file = config_registry.get('pkgdb/pwdfile', '/etc/machine.secret') connection_info['password'] = open(password_file, 'rb').read().rstrip('\n') connectstring = ' '.join([ "%s='%s'" % ( key, value.replace('\\', '\\\\').replace("'", "\\'"), ) for ( key, value, ) in connection_info.items() ]) connection = pgdb.connect(database=connectstring) return connection
def add_book_info(isbn13, book_info): """ Add book information to PostgreSQL server, isbn13 is canonical form of 13-digit ISBN number; book_info is got through methods listed above. """ if book_info == None: return False session = pgdb.connect(user='******', password='******', database='dist') cursor = session.cursor() # Add book record if ISBN13(primary key) does not exist. sql = '''SELECT book_info FROM book_info WHERE isbn13=%d''' % isbn13 cursor.execute(sql) f = cursor.fetchall() if f == [] : cursor.execute('''INSERT INTO book_info(isbn13) VALUES ('%d')''' % isbn13) # Add book info for i in book_info.keys(): sql = u'''UPDATE book_info SET %s='%s' WHERE isbn13='%s' ''' % (i, book_info[i], isbn13) cursor.execute(sql) cursor.close() session.commit() session.close() print "Done."
def printData(dbName,dbHost,dbPort,tbl,serverType,passwd,dbUser): if tbl.find(".") >= 0: schema,tname = tbl.split('.') else: tname = tbl print "<h2>Sample of Data in: " + tbl + "</h2>" print "<table class='graybox' cellspacing='0' cellpadding='5'>" #for mysql qstr = show columns in <<table>> if serverType == 'postgresql': mycon = pgdb.connect(database=dbName,host=dbHost+':'+dbPort,user=dbUser) else: mycon = MySQLdb.connect(db=dbName,host=dbHost,port=int(dbPort),user=dbUser,passwd=passwd) qstr = """select * from %s limit 10"""%(tname) curs = mycon.cursor() curs.execute(qstr) resList = curs.fetchall() print "<tr>" for col in curs.description: print "<th>%s</th>"%(col[0]) print "</tr>" for row in resList: print "<tr>" for col in row: print "<td>%s</td>"%(str(col)[0:30]) print "</tr>" print "</table>"
def __init__(self, *args, **kwargs): AddaModule.__init__(self, *args, **kwargs) self.mFilenameGraph = self.mConfig.get("files", "output_graph") self.mFilenameIndex = self.mConfig.get("files", "output_index") self.mFilenameTransfers = self.mConfig.get("files", "output_fit_transfer") self.mFilenameFit = self.mConfig.get("files", "output_fit") self.mFilenameNids = self.mConfig.get("files", "output_nids") self.mFilenameDomains = self.mConfig.get("files", "output_families") self.mDatabase = self.mConfig.get("load", "database", "adda.db") self.mBackend = self.mConfig.get("load", "database", "sqlite") self.mGuessSize = 1000 self.mInsertQuick = False if self.mBackend == "pg": import pgdb self.dbhandle = pgdb.connect(options.psql_connection) self.mError = pgdb.DatabaseError self.mNull = "NULL" self.mStringValue = "'%s'" self.mInsertMany = False elif self.mBackend == "sqlite": import sqlite3 self.dbhandle = sqlite3.connect(options.database) self.mError = sqlite3.OperationalError self.mNull = None # "NULL" self.mStringValue = "%s" # "'%s'" self.mInsertMany = True
def main(): SSIMs = [] con = connect(database=DB_NAME, host=DB_HOST, user=DB_USER, password=DB_PASSWORD) cursor = con.cursor() # This annotation has a error with the video playing after screenshot # So the s3 image should be correct but the timeinvideo is off cursor.execute('SELECT \ videoid, timeinvideo, image, id \ FROM \ annotations \ WHERE \ userid!=32 and DATE(dateannotated) between DATE(\'01/01/2019\') and DATE(\'05/18/2019\') \ ORDER BY \ random() \ LIMIT 500') # id=4960669' rows = cursor.fetchall() con.close() for row in rows: try: SSIMs.append(compareVideoFramesAndS3(row)) except Exception as e: print(e) print("SSIM table") print(pd.Series(SSIMs).value_counts(bins=5)) print("\'Good\' SSIM (SSIM>.8)") counts = len(SSIMs) print(sum(SSIM > .8 for SSIM in SSIMs) / counts) print("Average SSIM") print(np.mean(SSIMs))
def open_database_connection(config_registry, pkgdbu=False, db_server=None): connection_info = { # see <http://www.postgresql.org/docs/8.4/static/libpq-connect.html> 'dbname': 'pkgdb', } if config_registry.is_true('pkgdb/requiressl'): connection_info['sslmode'] = 'require' if pkgdbu: # 'host' not specified -> localhost over Unix-domain socket (connection type "local") connection_info['user'] = '******' password_file = '/etc/postgresql/pkgdb.secret' else: if db_server is None: db_server = get_dbservername(config_registry['domainname']) if db_server is None: return None connection_info['host'] = db_server connection_info['user'] = config_registry.get('pkgdb/user', '%s$' % (config_registry['hostname'], )) password_file = config_registry.get('pkgdb/pwdfile', '/etc/machine.secret') connection_info['password'] = open(password_file, 'rb').read().rstrip('\n') connectstring = ' '.join( [ "%s='%s'" % (key, value.replace('\\', '\\\\').replace("'", "\\'"), ) for (key, value, ) in connection_info.items() ] ) connection = pgdb.connect(database=connectstring) return connection
def __create_red_connection(self): user, password = self.__credentials conn = pgdb.connect(database=self.__dsn.dbname, host=self.__dsn.host + ':' + str(self.__dsn.port), user=user, password=password) return conn
def setUp(self): startNufw(["-s"]) config = NuauthConf() config["nuauth_log_users"] = '9' config["mysql_prefix_version"] = '1' if POSTGRESQL: config.need_restart = True self.conn = pgdb.connect(host=DB_SERVER, user=DB_USER, password=DB_PASSWORD, database=DB_DBNAME) config["nuauth_user_logs_module"] = '"pgsql"' config["nuauth_user_session_logs_module"] = '"pgsql"' else: self.conn = MySQLdb.Connect(host=DB_SERVER, user=DB_USER, passwd=DB_PASSWORD, db=DB_DBNAME) config["nuauth_user_logs_module"] = '"mysql"' config["nuauth_user_session_logs_module"] = '"mysql"' self.users = USERDB self.user = self.users[0] self.acls = PlaintextAcl() self.acls.addAcl("web", VALID_PORT, self.user.gid, log_prefix=LOG_PREFIX) self.users.install(config) self.acls.install(config) self.nuauth = Nuauth(config) self.start_time = int(time() - 1.1)
def setUp(self): startNufw(["-s"]) config = NuauthConf() config["nuauth_log_users"] = '9' config["mysql_prefix_version"] = '1' if POSTGRESQL: config.need_restart = True self.conn = pgdb.connect( host=DB_SERVER, user=DB_USER, password=DB_PASSWORD, database=DB_DBNAME) config["nuauth_user_logs_module"] = '"pgsql"' config["nuauth_user_session_logs_module"] = '"pgsql"' else: self.conn = MySQLdb.Connect( host=DB_SERVER, user=DB_USER, passwd=DB_PASSWORD, db=DB_DBNAME) config["nuauth_user_logs_module"] = '"mysql"' config["nuauth_user_session_logs_module"] = '"mysql"' self.users = USERDB self.user = self.users[0] self.acls = PlaintextAcl() self.acls.addAcl("web", VALID_PORT, self.user.gid, log_prefix=LOG_PREFIX) self.users.install(config) self.acls.install(config) self.nuauth = Nuauth(config) self.start_time = int(time()-1.1)
def main(argv=None): """script main. parses command line options in sys.argv, unless *argv* is given. """ if argv == None: argv = sys.argv parser = E.OptionParser( version= "%prog version: $Id: gpipe/get_predictions.py 2781 2009-09-10 11:33:14Z andreas $" ) parser.add_option("-s", "--schema", dest="schema", type="string", help="database schema to use.") parser.set_defaults( schema=None, method="ranges", result="exons", ) (options, args) = E.Start(parser, add_psql_options=True) dbhandle = pgdb.connect(options.psql_connection) keys = {'schema': options.schema} ntested, nmissed = 0, 0 if options.method == "ranges": lines = filter(lambda x: x[0] != "#", sys.stdin.readlines()) for line in lines: sbjct_token, sbjct_strand, range_from, range_to = line.split( "\t")[:4] range_from, range_to = map(int, (range_from, range_to)) keys["sbjct_token"] = sbjct_token keys["sbjct_strand"] = sbjct_strand keys["exon_from"] = range_from keys["exon_to"] = range_to ntested += 1 result = GetResult(dbhandle, keys, options) if len(result) == 0: nmissed += 1 continue print "########################" print "# matches for:", line[:-1] for r in result: print "\t".join(map(str, r)) print "# ntested=%i, nmissed=%i" % (ntested, nmissed) E.Stop()
def check_db_info(adminuser, adminpass, mysql): """Validate database login information""" try: if mysql: try: import mysql.connector as mysqld db = mysqld.connect(user=adminuser, password=adminpass) except ImportError: try: import MySQLdb as mysqld db = mysqld.connect(user=adminuser, passwd=adminpass) except ImportError: print(e) return False else: import pgdb db = pgdb.connect(database="postgres", user=adminuser, password=adminpass) except mysqld.Error as e: print(e) return False db.close() return True
def get_db_cursor(dbhost, dbname, dbuser, dbpass, mysql=True): """Creates a database connection""" if mysql: try: import mysql.connector as mysqld db = mysqld.connect(buffered=True, autocommit=True, use_unicode=True, charset="utf8", host=dbhost, database=dbname, user=dbuser, password=dbpass) except ImportError: try: import MySQLdb as mysqld db = mysqld.connect(charset="utf8", host=dbhost, db=dbname, user=dbuser, passwd=dbpass) except ImportError: print(e) exit() else: import pgdb db = pgdb.connect(use_unicode=True, charset="utf8", database=dbname, user=dbuser, password=dbpass) db.autocommit(True) cursor = db.cursor() return db, cursor
def myemsldb_connect(dbconf = None, myemsl_schema_versions=None): if dbconf == None: dbconf = getconfig_secret() if myemsl_schema_versions == None: raise Exception("You did not specify any supported versions") versions = {} for x in myemsl_schema_versions: (major, minor) = x.split('.') versions[major] = minor cnx = pgdb.connect(database=dbconf.get('metadata', 'database'), user=dbconf.get('metadata', 'user'), host=dbconf.get('metadata', 'host'), password=dbconf.get('metadata', 'password')) sql = """ select value from myemsl.system where key='schema_version' """ cursor = cnx.cursor() cursor.execute(sql) rows = cursor.fetchall() schema_version = None for row in rows: schema_version = row[0] (major, minor) = schema_version.split('.') if major in versions: if int(minor) < int(versions[major]): err = "Schema version too old. Got %s.%s, support %s" %(major, minor, str(versions)) logger.error(err) raise Exception(err) else: err = "Schema version mismatch. Got %s.%s, support %s" %(major, minor, str(versions)) logger.error(err) raise Exception(err) return cnx
def processdelete (self, event_id=None): # Verify user is logged-in admin. if (not pageutils.is_admin_p()): raise cherrypy.HTTPRedirect ("/") # Verify we have an event_id if (event_id == None): return pageutils.generate_page ("No Event Id Specified", "Nothing to delete!") try: # Connect to the database and delete the given discussion. dbconnection = pgdb.connect (database_connect_fields) dbcursor = dbconnection.cursor() dbcursor.execute ("DELETE FROM events WHERE event_id=%s", [str(event_id)]) dbconnection.commit() # Close the database cursor and connection. dbcursor.close() dbconnection.close() except: return pageutils.generate_page ("Database Error", "Deletion failed!") return pageutils.generate_page ("Successful Deletion", "The specified event has been deleted.")
def db_reader(name, status, req_queue, config_file): LogUtil.get_instance(config_file, "db_reader") LogUtil.info("db_reader:"+name+" begin") config=configparser.ConfigParser() config.read(config_file) factory=msg.MessageProcessorFactory() db_file=config.get("message_config", "db_file") factory.load_from_db(db_file, []) read_interval=config.getfloat("reqresp","read_interval") if read_interval==None: read_interval=0.5 host=config.get("reqresp", "host") database=config.get("reqresp", "database") user=config.get("reqresp","user") password=config.get("reqresp", "password") last_req_num=0 conn=pgdb.connect(database=database, host=host, user=user, password=password) query_curs=conn.cursor() update_curs=conn.cursor() query_unreported="""SELECT req_num,message_type,appid,oms_order_id,rept_status,req_text FROM req_resp WHERE rept_status='0' AND req_num>%(req_num)s ORDER BY req_num """ query_dict={'req_num':0} update_reported="""UPDATE req_resp SET rept_status=%(rept_status)s,report_time=localtimestamp WHERE req_num in (%(req_num)s) """ update_dict={'rept_status':'0', 'req_num':0} last_read_cnt=1 while status.value==0: if last_read_cnt==0: time.sleep(read_interval) last_read_cnt=0 query_dict['req_num']=last_req_num query_curs.execute(query_unreported,query_dict) for (req_num,message_type,appid,message_id,rept_status,req_text) in query_curs.fetchall(): message_processor=factory.build_message_processor(message_type) send_buff=message_processor.pack(req_text) req_queue.put(send_buff) last_req_num=req_num update_dict['rept_status']='2' update_dict['req_num']=last_req_num update_curs.execute(update_reported,update_dict) last_read_cnt=last_read_cnt+1 LogUtil.debug("db_reader putQ:"+binascii.hexlify(send_buff).decode()) conn.commit() LogUtil.info("db_reader:"+name+" end")
def compose_get_dbh(): # Database settings dbname = instance.config.dbname dbhost = instance.config.dbhost dbuser = instance.config.dbuser dbpasswd = instance.config.dbpasswd return pgdb.connect(database=dbname, host=dbhost, user=dbuser, password=dbpasswd)
def connect(self, database = None): # Parse connection details from relevant locations # self.db = pg.connect(dbname='OPD', host='localhost', user='******', passwd='opd123') # self.db = pgdb.connect('localhost:OPD:opd:opd123') # self.db = pgdb.connect(host = dbdict['host'], database = dbdict['dbname'], user = dbdict['user'], password = dbdict['password']) self.db = pgdb.connect(database) return
def getDefaultZoom(self): """Return a default zoom (pixels per projected unit)""" zoom = None if not self.db_args: return super(osm2pgsql_MapnikLayer, self).getDefaultZoom() else: host = self.db_args["host"] if "port" in self.db_args: host += ":" + str(self.db_args["port"]) con = DBAPI.connect(user=self.db_args["user"], password=self.db_args["password"], host=host, database=self.db_args["database"]) for suffix in ["_point", "_line", "_polygon"]: try: cur = con.cursor() #cur.execute("select ST_AsText(ST_centroid(transform(ST_SetSRID(ST_estimated_extent('%s','way'), Find_SRID('public','%s','way')),4326)))" % (self.mapName + suffix, self.mapName + suffix)) cur.execute("select abs(ST_XMax(extent) - ST_XMin(extent)) as x, abs(ST_YMax(extent) - ST_YMin(extent)) as y from ST_estimated_extent('%s','way') as extent" % (self.mapName + suffix)) size = map(float, cur.fetchall()[0]) zoom = int(max(size[0] / 1024, size[1] / 768)) break except Exception as error: print error con.rollback() if not zoom: zoom = 25 con.close() return zoom
def getrefname(table, term, config): dbconn = pgdb.connect(database=config.get('connect', 'connect_string')) objects = dbconn.cursor() objects.execute(timeoutcommand) if term == None or term == '': return '' if table in ('collectionobjects_common_fieldcollectors', 'collectionobjects_common_briefdescriptions', 'acquisitions_common_owners'): column = 'item' else: column = 'refname' if table == 'collectionobjects_common_briefdescriptions': query = "SELECT item FROM collectionobjects_common_briefdescriptions WHERE item ILIKE '%s' LIMIT 1" % ( term.replace("'", "''")) elif table == 'pahmaaltnumgroup': query = "SELECT pahmaaltnum FROM pahmaaltnumgroup WHERE pahmaaltnum ILIKE '%s' LIMIT 1" % ( term.replace("'", "''")) elif table == 'pahmaaltnumgroup_type': query = "SELECT pahmaaltnumtype FROM pahmaaltnumgroup WHERE pahmaaltnum ILIKE '%s' LIMIT 1" % ( term.replace("'", "''")) else: query = "select %s from %s where %s ILIKE '%%''%s''%%' LIMIT 1" % ( column, table, column, term.replace("'", "''")) try: objects.execute(query) return objects.fetchone()[0] except: return '' raise
def lateststatsforstatgroupbycounttype(dbsource, statgroup, statmetric, config): #pahmadb = pgdb.connect(config.get('connect','connect_string2')) pahmadb = pgdb.connect(database=config.get('connect', 'connect_string')) objects = pahmadb.cursor() objects.execute(timeoutcommand) latestisoruntime = """SELECT MAX(date_trunc('day',isoruntime)) FROM utils.collectionstats WHERE statgroup = '%s'""" % (str(statgroup)) objects.execute(latestisoruntime) latestruntimeresult = objects.fetchone() latestruntime = latestruntimeresult[0] latestCounts = """ SELECT label, statvalue, statpercent, isoruntime, statmetric FROM utils.collectionstats WHERE dbsource = '%s' AND statgroup = '%s' AND statmetric = '%s' AND isoruntime >= '%s' ORDER BY statvalue DESC""" % (str(dbsource), str(statgroup), str(statmetric), str(latestruntime)) objects.execute(latestCounts) count = objects.fetchall() return count
def get_connection(self, create = True): # in case there's a connection already defined (and open) # for the current logical connection returns it to the # proper caller method, otherwise goes for creation if self.connection: return self.connection if not create: return None # creates a new connection and sets it in the current # instance so that it may be properly re-used latter self.connection = pgdb.connect( host = self.host, user = self.user, password = self.password, database = self.database ) self.transaction_level = 0 # sets the isolation level for the connection as the one # defined to be the default one by the "driver" self._execute_query( "set session characteristics as transaction isolation level %s" % self.isolation ).close() # returns the correct connection that has been created # to the caller method, os that it may be used return self.connection
def connect(): logger = logging.getLogger('koji.db') global _DBconn if hasattr(_DBconn, 'conn'): # Make sure the previous transaction has been # closed. This is safe to call multiple times. conn = _DBconn.conn try: # Under normal circumstances, the last use of this connection # will have issued a raw ROLLBACK to close the transaction. To # avoid 'no transaction in progress' warnings (depending on postgres # configuration) we open a new one here. # Should there somehow be a transaction in progress, a second # BEGIN will be a harmless no-op, though there may be a warning. conn.cursor().execute('BEGIN') conn.rollback() return DBWrapper(conn) except pgdb.Error: del _DBconn.conn #create a fresh connection opts = _DBopts if opts is None: opts = {} try: conn = pgdb.connect(**opts) except Exception: logger.error(''.join(traceback.format_exception(*sys.exc_info()))) raise # XXX test # return conn _DBconn.conn = conn return DBWrapper(conn)
def connect_database(dbtype, *args, **kwargs): ''' Connect to a database and return a connection handle Required Arguments: *dbtype*: [ 'sqlite' | 'mysql' | 'postgres' ] The type of database All other arguments are passed to the relevant modules, specifically: - sqlite3.connect() for SQLite - MySQLdb.connect() for MySQL - pgdb.connect() for PostgreSQL ''' if dbtype == 'sqlite': _check_sqlite3_installed() connection = sqlite3.connect(*args, **kwargs) elif dbtype == 'mysql': _check_MySQLdb_installed() connection = MySQLdb.connect(*args, **kwargs) elif dbtype == 'postgres': _check_PyGreSQL_installed() connection = pgdb.connect(*args, **kwargs) else: raise Exception('dbtype should be one of sqlite/mysql/postgres') cursor = connection.cursor() return connection, cursor
def getloclist(searchType, location1, location2, num2ret, config): # 'set' means 'next num2ret locations', otherwise prefix match if searchType == 'set': whereclause = "WHERE locationkey >= replace('" + location1 + "',' ','0')" elif searchType == 'prefix': whereclause = "WHERE locationkey LIKE replace('" + location1 + "%',' ','0')" elif searchType == 'range': whereclause = "WHERE locationkey >= replace('" + location1 + "',' ','0') AND locationkey <= replace('" + location2 + "',' ','0')" dbconn = pgdb.connect(config.get('connect', 'connect_string')) objects = dbconn.cursor() objects.execute(timeoutcommand) if int(num2ret) > 30000: num2ret = 30000 if int(num2ret) < 1: num2ret = 1 getobjects = """ select * from ( select termdisplayname,replace(termdisplayname,' ','0') locationkey FROM loctermgroup ltg INNER JOIN hierarchy h_ltg ON h_ltg.id=ltg.id INNER JOIN hierarchy h_loc ON h_loc.id=h_ltg.parentid INNER JOIN misc ON misc.id=h_loc.id and misc.lifecyclestate <> 'deleted' ) as t """ + whereclause + """ order by locationkey limit """ + str(num2ret) objects.execute(getobjects) #for object in objects.fetchall(): #print object return objects.fetchall()
def index(req): message = "<html><head><title>Sample Python Script</title></head>\n" # connect dbcnx = pgdb.connect(host="dbdev.cs.uiowa.edu:5432:dbdevtest", user="******", password="******") sqlquery = """SELECT ename, salary, dno FROM Emp;""" # create a database cursor cursor = dbcnx.cursor() # execute SQL select cursor.execute(sqlquery) message += "<h2> %s </h2>" message %= sqlquery message += "<table><tr><th>Name</th><th>Salary</th><th>dno</th></tr>\n" # get the number of rows in the resultset numrows = int(cursor.rowcount) # get and display one row at a time for x in range(0, numrows): row = cursor.fetchone() message += "<tr><td> %s </td>\n" message %= row[0] message += " <td> %s </td>\n" message %= row[1] message += " <td> %s </td></tr>\n" message %= row[2] message += "</table>\n" message += "</body></html>\n" return s % (message)
def getobjinfo(museumNumber, config): dbconn = pgdb.connect(database=config.get('connect', 'connect_string')) objects = dbconn.cursor() objects.execute(timeoutcommand) getobjects = """ SELECT co.objectnumber, n.objectname, co.numberofobjects, regexp_replace(fcp.item, '^.*\\)''(.*)''$', '\\1') AS fieldcollectionplace, regexp_replace(apg.assocpeople, '^.*\\)''(.*)''$', '\\1') AS culturalgroup, regexp_replace(pef.item, '^.*\\)''(.*)''$', '\\1') AS ethnographicfilecode FROM collectionobjects_common co LEFT OUTER JOIN hierarchy h1 ON (co.id = h1.parentid AND h1.primarytype='objectNameGroup' AND h1.pos=0) LEFT OUTER JOIN objectnamegroup n ON (n.id=h1.id) LEFT OUTER JOIN collectionobjects_pahma_pahmafieldcollectionplacelist fcp ON (co.id=fcp.id AND fcp.pos=0) LEFT OUTER JOIN collectionobjects_pahma_pahmaethnographicfilecodelist pef on (pef.id=co.id and pef.pos=0) LEFT OUTER JOIN collectionobjects_common_responsibledepartments cm ON (co.id=cm.id AND cm.pos=0) LEFT OUTER JOIN hierarchy h2 ON (co.id=h2.parentid AND h2.primarytype='assocPeopleGroup' AND h2.pos=0) LEFT OUTER JOIN assocpeoplegroup apg ON apg.id=h2.id JOIN misc ON misc.id = co.id AND misc.lifecyclestate <> 'deleted' WHERE co.objectnumber = '%s' LIMIT 1""" % museumNumber objects.execute(getobjects) #for ob in objects.fetchone(): #print ob return objects.fetchone()
def add_book_data(path, isbn): """ Add local book data to PostgreSQL server. """ data = get_book_data(path=path, isbn=isbn) if not data : return False session = pgdb.connect(user='******', password='******', database='dist') cursor = session.cursor() # what if previous record already exists? sql = '''INSERT INTO book_data(isbn,name,mime,size,uri,data) VALUES(%(isbn)d, '%(name)s', '%(mime)s', %(size)d, '%(uri)s', lo_import('%(uri)s') )''' % data try : cursor.execute(sql) except pgdb.DatabaseError: return False cursor.close() session.commit() session.close() print "Done" return True
def __init__(self): # Open connection & cursor to local database. params = { 'host': 'localhost', 'user': '******', 'database': 'nethub', 'password': '******' } self.conn = pgdb.connect(**params) self.cur = self.conn.cursor() self.chosen_category = "Movies" # Searching by movies or series? 0 for movies, 1 for series. self.user_id = 50005 # Which user is currently logged in? self.query = "" # Only one query may be processed at a time. # This is from where we get our queries. self.movie_query = MoviesQueries() self.series_query = SeriesQueries() # Check if user is a child. 0 for not child, 1 for is child. self.is_child = 0 parents = self.cur.execute("select * from parents") all_parents = self.cur.fetchall() for i in range(len(all_parents)): child = all_parents[i][1] if self.user_id == child: self.is_child = 1
def gethierarchy(query, config): dbconn = pgdb.connect(database=config.get('connect', 'connect_string')) institution = config.get('info', 'institution') objects = dbconn.cursor() objects.execute(timeoutcommand) if query == 'taxonomy': gethierarchy = """ SELECT DISTINCT regexp_replace(child.refname, '^.*\\)''(.*)''$', '\\1') AS Child, regexp_replace(parent.refname, '^.*\\)''(.*)''$', '\\1') AS Parent, h1.name AS ChildKey, h2.name AS ParentKey FROM taxon_common child JOIN misc ON (misc.id = child.id) FULL OUTER JOIN hierarchy h1 ON (child.id = h1.id) FULL OUTER JOIN relations_common rc ON (h1.name = rc.subjectcsid) FULL OUTER JOIN hierarchy h2 ON (rc.objectcsid = h2.name) FULL OUTER JOIN taxon_common parent ON (parent.id = h2.id) WHERE child.refname LIKE 'urn:cspace:%s.cspace.berkeley.edu:taxonomyauthority:name(taxon):item:name%%' AND misc.lifecyclestate <> 'deleted' ORDER BY Parent, Child """ % institution elif query != 'places': gethierarchy = """ SELECT DISTINCT regexp_replace(child.refname, '^.*\\)''(.*)''$', '\\1') AS Child, regexp_replace(parent.refname, '^.*\\)''(.*)''$', '\\1') AS Parent, h1.name AS ChildKey, h2.name AS ParentKey FROM concepts_common child JOIN misc ON (misc.id = child.id) FULL OUTER JOIN hierarchy h1 ON (child.id = h1.id) FULL OUTER JOIN relations_common rc ON (h1.name = rc.subjectcsid) FULL OUTER JOIN hierarchy h2 ON (rc.objectcsid = h2.name) FULL OUTER JOIN concepts_common parent ON (parent.id = h2.id) WHERE child.refname LIKE 'urn:cspace:%s.cspace.berkeley.edu:conceptauthorities:name({0})%%' AND misc.lifecyclestate <> 'deleted' ORDER BY Parent, Child""" % institution gethierarchy = gethierarchy.format(query) else: gethierarchy = """ SELECT DISTINCT regexp_replace(child.refname, '^.*\\)''(.*)''$', '\\1') AS Place, regexp_replace(parent.refname, '^.*\\)''(.*)''$', '\\1') AS ParentPlace, h1.name AS ChildKey, h2.name AS ParentKey FROM places_common child JOIN misc ON (misc.id = child.id) FULL OUTER JOIN hierarchy h1 ON (child.id = h1.id) FULL OUTER JOIN relations_common rc ON (h1.name = rc.subjectcsid) FULL OUTER JOIN hierarchy h2 ON (rc.objectcsid = h2.name) FULL OUTER JOIN places_common parent ON (parent.id = h2.id) WHERE misc.lifecyclestate <> 'deleted' ORDER BY ParentPlace, Place """ objects.execute(gethierarchy) return objects.fetchall()
def login_post(): global db_username, db_password, db_database, db_hostname receivedData = json.loads(request.data.decode('utf-8')) _username = receivedData.get('username') _password = receivedData.get('password') myConnection = pgdb.connect( host=db_hostname, user=db_username, password=db_password, database=db_database ) cursor = myConnection.cursor() cursor.execute("SELECT * from loginTable WHERE username = '******'" % (_username)) isAuthenticated = False currentUser = '' errorMessage = "" for (_, username, password) in cursor: if _password == password: isAuthenticated = True session['login'] = True session['username'] = username currentUser = username resp = make_response(jsonify(isAuthenticated=isAuthenticated, username=currentUser, errorMessage=errorMessage)) resp.set_cookie("current_username", username) myConnection.close() return resp myConnection.close() errorMessage = "User-or-password-wrong" return make_response(jsonify(isAuthenticated=isAuthenticated, username=currentUser, errorMessage=errorMessage))
def lateststatsforstatgroupbycounttype(dbsource, statgroup, statmetric, config): #pahmadb = pgdb.connect(config.get('connect','connect_string2')) pahmadb = pgdb.connect(database=config.get('connect', 'connect_string')) objects = pahmadb.cursor() objects.execute(timeoutcommand) latestisoruntime = """SELECT MAX(date_trunc('day',isoruntime)) FROM utils.collectionstats WHERE statgroup = '%s'""" % (str(statgroup)) objects.execute(latestisoruntime) latestruntimeresult = objects.fetchone() latestruntime = latestruntimeresult[0] latestCounts = """ SELECT label, statvalue, statpercent, isoruntime, statmetric FROM utils.collectionstats WHERE dbsource = '%s' AND statgroup = '%s' AND statmetric = '%s' AND isoruntime >= '%s' ORDER BY statvalue DESC""" % (str(dbsource), str(statgroup), str(statmetric), str(latestruntime)) objects.execute(latestCounts) count=objects.fetchall() return count
def get_con(s_name, auth_path=None, db=None): """For MySQL, one can specify a default database to connect to""" con = None auth_path = os.path.dirname(os.path.abspath( __file__)) + '/db.csv' if auth_path is None else auth_path csv_path = util.format_path(auth_path) t_db = pd.read_csv(csv_path) t_db.fillna('', inplace=True) t_db = t_db[t_db['ID'] == s_name] if len(t_db) == 0: util.error_msg('Database %s is not defined!' % s_name) one = t_db.iloc[0] s_db = db or one['DB'] if one['TYPE'] == 'MYSQL': import MySQLdb as mysql #print one['HOST'], one['USR'], one['PWD'], s_db con = mysql.connect(one['HOST'], one['USR'], one['PWD'], s_db) elif one['TYPE'] == 'POSTGRES': import pgdb # make sure you do: #module load postgresql/9.2.4 #export LD_LIBRARY_PATH=.:/tools/GNU/postgresql/9.2.4/lib/ con = pgdb.connect(one['CONNECT']) elif one['TYPE'] == 'ORACLE': import cx_Oracle as oracle con = oracle.connect(one['CONNECT']) else: util.error_msg('Unsupported database engine: %s' % one['TYPE']) return con
def set_connection(dbname, host, port, username, password): """Set a new connection object for this backup :param: dbname - Database name host - Hostname port - Port number username - Username password - Password :return: Conn - Connection to the database Cursor - Cursor to execute the query """ logger.debug("Attempting to create a connection to the database") logger.debug( "Parameters :- Database name: {0}, hostname: {1}, port: {2}, user: {3}" .format(dbname, host, port, username)) try: conn = connect(database=dbname, host=host + ':' + str(port), user=username, password=password) cursor = conn.cursor() except DatabaseError, e: error_logger(e)
def open_db_connection(): ''' Return opened db connection based on information in /etc/fred/pyfred.conf. ''' # reasonable defaults dbhost = '' dbname = 'fred' dbport = '5432' dbuser = '******' dbpassword = '' # read config file config = ConfigParser.ConfigParser() config.read('/etc/fred/pyfred.conf') if config.has_option('General', 'dbhost'): dbhost = config.get('General', 'dbhost') if config.has_option('General', 'dbname'): dbname = config.get('General', 'dbname') if config.has_option('General', 'dbport'): dbport = config.get('General', 'dbport') if config.has_option('General', 'dbuser'): dbuser = config.get('General', 'dbuser') if config.has_option('General', 'dbpassword'): dbpassword = config.get('General', 'dbpassword') # create connection to database return pgdb.connect(host=dbhost + ":" + dbport, database=dbname, user=dbuser, password=dbpassword)
def getchildinfo(museumNumber,config): pahmadb = pgdb.connect(config.get('connect','connect_string')) objects = pahmadb.cursor() objects.execute(timeoutcommand) getchildinfo = """ SELECT c1.objectnumber, c2.objectnumber AS child, h3.name AS childcsid, c2.id AS childid FROM collectionobjects_common c1 JOIN collectionobjects_pahma cp1 ON (c1.id = cp1.id) JOIN hierarchy h1 ON (c1.id = h1.id) JOIN relations_common rc ON (h1.name=rc.subjectcsid AND rc.objectdocumenttype='CollectionObject') JOIN hierarchy h2 ON (rc.objectcsid = h2.name) JOIN collectionobjects_common c2 ON (h2.id = c2.id) JOIN hierarchy h3 ON (c2.id = h3.id) JOIN collectionobjects_pahma cp2 ON (c2.id = cp2.id) WHERE cp2.iscomponent = 'yes' AND cp1.iscomponent = 'no' AND c1.objectnumber = '%s'""" % museumNumber objects.execute(getchildinfo) #for object in objects.fetchone(): #print object return objects.fetchone()
def addcomment (self, body, slug): # Verify user is logged in. if (not pageutils.is_logged_in_p()): raise cherrypy.HTTPRedirect ("/login") # Remove any leading or trailing spaces from comment text. body = string.strip(body) user_id = pageutils.get_user_id() if (user_id == None): raise cherrypy.HTTPRedirect ("/login") try: # Connect to the database and insert the values. dbconnection = pgdb.connect (database_connect_fields) dbcursor = dbconnection.cursor() dbcursor.execute ("SELECT * FROM articles WHERE slug=%s", [slug]) results = dbcursor.fetchone() if (results == None): return pageutils.generate_page ("Invalid Article Specified", "Unable to post comment.") article_id = results[0] # article_id is the first column in the table. dbcursor.execute ("INSERT INTO articles (author_id, body, display, refers_to, creation_date) " + "VALUES (%s, %s, %s, %s, current_timestamp)", [str(user_id), body, "1", str(article_id)]) dbconnection.commit() # Close the database cursor and connection. dbcursor.close() dbconnection.close() except: return pageutils.generate_page ("Invalid SQL Query", "Unable to add comment.")
def connect(self): """The import statement is delayed so the library is loaded ONLY if this factory is really used.""" import pgdb return pgdb.connect(user=self.user, password=self.password, database=self.database, host=self.host)
def get_connection(secret_dict): """Gets a connection to Redshift DB from a secret dictionary This helper function tries to connect to the database grabbing connection info from the secret dictionary. If successful, it returns the connection, else None Args: secret_dict (dict): The Secret Dictionary Returns: Connection: The pgdb.Connection object if successful. None otherwise Raises: KeyError: If the secret json does not contain the expected keys """ # Parse and validate the secret JSON string port = int(secret_dict['port']) if 'port' in secret_dict else 5439 dbname = secret_dict['dbname'] if 'dbname' in secret_dict else "dev" # Try to obtain a connection to the db try: conn = pgdb.connect(host=secret_dict['host'], user=secret_dict['username'], password=secret_dict['password'], database=dbname, port=port, connect_timeout=5) return conn except pg.InternalError: return None
def open_db_connection(): ''' Return opened db connection based on information in /etc/fred/pyfred.conf. ''' # reasonable defaults dbhost = '' dbname = 'fred' dbport = '5432' dbuser = '******' dbpassword = '' # read config file config = ConfigParser.ConfigParser() config.read('/etc/fred/pyfred.conf') if config.has_option('General', 'dbhost'): dbhost = config.get('General', 'dbhost') if config.has_option('General', 'dbname'): dbname = config.get('General', 'dbname') if config.has_option('General', 'dbport'): dbport = config.get('General', 'dbport') if config.has_option('General', 'dbuser'): dbuser = config.get('General', 'dbuser') if config.has_option('General', 'dbpassword'): dbpassword = config.get('General', 'dbpassword') # create connection to database return pgdb.connect(host = dbhost +":"+ dbport, database = dbname, user = dbuser, password = dbpassword)
def __init__(self): #PG-connection setup print( "AUTHORS NOTE: If you submit faulty information here, I am not responsible for the consequences." ) print "The idea is that you, the authorized database user, log in." print "Then the interface is available to employees whos should only be able to enter shipments as they are made." while True: try: params = { 'host': 'nestor2.csc.kth.se', 'user': pgdb.escape_string(raw_input("Username: "******"Password: "******"Record a shipment", "Show stock", "Show shipments", "Exit" ] self.cur = self.conn.cursor()
def _createConnection(self): print "[INFO] Database Type: " + self.dbType print "[INFO] Database Name: " + self.cfg['DATABASE_NAME'] import socket,platform,subprocess host = self._getHost() if host == None: host = socket.gethostname() try: port = self.cfg['DATABASE_PORT'] except KeyError: port = None if self.dbType == 'postgresql': try: import pgdb except ImportError, err: print "[WARN] ImportError %s" % err print "[INFO] Trying to install the package postgresql-python..." input = raw_input("Proceed [y/n]? ") if input != 'y': print "[INFO] Exiting..." sys.exit(0) dist, version, name = platform.dist() if dist == 'redhat': p=subprocess.call(['yum','-y','install','postgresql90-python']) elif dist =='SuSE': p=subprocess.call(['zypper','-y','install','PyGreSQL']) import pgdb if port == None: port = '5432' print '[INFO] Database Host: ' + host + ':' + port self.con = pgdb.connect(user = self.cfg['DATABASE_USERNAME'], password = self.cfg['DATABASE_PASSWORD'], database = self.cfg['DATABASE_NAME'], host = host + ':' + port)
def getDefaultCenter(self): """Return a default center point as [lat,lon]""" center = None if not self.db_args: return super(osm2pgsql_MapnikLayer, self).getDefaultCenter() else: host = self.db_args["host"] if "port" in self.db_args: host += ":" + str(self.db_args["port"]) con = DBAPI.connect(user=self.db_args["user"], password=self.db_args["password"], host=host, database=self.db_args["database"]) for suffix in ["_point", "_line", "_polygon"]: try: cur = con.cursor() cur.execute("select ST_AsText(ST_centroid(transform(ST_SetSRID(ST_estimated_extent('%s','way'), Find_SRID('public','%s','way')),4326)))" % (self.mapName + suffix, self.mapName + suffix)) center = map(float, cur.fetchall()[0][0][6:-1].split()) break except Exception as error: print error con.rollback() if not center: center = [0.0, 0.0] con.close() return center
def hello(): conn = pgdb.connect( host='database', user='******', password='******', database='testnet' ) cur = conn.cursor() cur.execute( "SELECT value FROM kv WHERE key='provider'" ) provider = cur.fetchone()[0] conn.close() return 'Hello '+provider+'!'
def __init__(self, *args, **kwargs ): AddaModule.__init__( self, *args, **kwargs ) self.mFilenameGraph = self.mConfig.get( "files", "output_graph") self.mFilenameIndex = self.mConfig.get( "files", "output_index") self.mFilenameTransfers = self.mConfig.get( "files", "output_fit_transfer" ) self.mFilenameFit = self.mConfig.get( "files", "output_fit" ) self.mFilenameNids = self.mConfig.get( "files", "output_nids" ) self.mFilenameDomains = self.mConfig.get( "files", "output_families" ) self.mDatabase = self.mConfig.get( "load", "database", "adda.db" ) self.mBackend = self.mConfig.get( "load", "database", "sqlite" ) self.mGuessSize = 1000 self.mInsertQuick = False if self.mBackend == "pg": import pgdb self.dbhandle = pgdb.connect( options.psql_connection ) self.mError = pgdb.DatabaseError self.mNull = "NULL" self.mStringValue = "'%s'" self.mInsertMany = False elif self.mBackend == "sqlite": import sqlite3 self.dbhandle = sqlite3.connect( options.database ) self.mError = sqlite3.OperationalError self.mNull = None # "NULL" self.mStringValue = "%s" # "'%s'" self.mInsertMany = True
def __init__(self,dsn,container): self.__con = pgdb.connect(**dsn) self.__cur = self.__con.cursor() self.__explainService = ExplainService(dsn) self.__container = container self.__qID = 0 self.__totalTime = 0