def isDBPort(host, port, timeout=10): # determine if postgres port t = 2 while t < timeout: try: DBAPI.connect(user='', host=host, port=int(port) if port else None, socket_timeout=t) except ProgrammingError: return True # success, this is really a postgres socket, wants user name except InterfaceError: return False # something is there but not postgres except socket.timeout: t = t + 2 # relax - try again with longer timeout return False
def getConnection(self, db): if db in self._connections and self._connections[db]: return self._connections[db]['connection'] connection_begin = time.time() connection = DBAPI.connect(host=self._host, port=int(self._port), database=str(db), user=self._username, password=self._password, socket_timeout=10, ssl=self._ssl) connection_latency = time.time() - connection_begin query_begin = time.time() cursor = connection.cursor() cursor.execute("SELECT 1") cursor.fetchall() cursor.close() query_latency = time.time() - query_begin self._connections[db] = dict( connection=connection, connection_latency=connection_latency, query_latency=query_latency, ) return self._connections[db]['connection']
def postgres_connection(self, uri): return DBAPI.connect( host=str(uri.host()), port=uri.port().toInt()[0] if uri.port() != '' else 5432, database=str(uri.database()), user=str(uri.username()), password=str(uri.password()), ssl=(uri.sslMode() != QgsDataSourceURI.SSLdisable))
def testClosedConnection(self): warnings.simplefilter("ignore") my_db = DBAPI.connect(**db_connect) cursor = my_db.cursor() my_db.close() self.assertRaises(db.InterfaceError, cursor.execute, "VALUES ('hw1'::text)") warnings.resetwarnings()
def connectDB(**db): try: conn = DBAPI.connect(**db) cursor = conn.cursor() except: exceptionType, exceptionValue, exceptionTraceback = sys.exc_info() sys.exit("Database connection failed! ->%s" % (exceptionValue)) return cursor, conn
def get_cursor(): global g_cursor, g_conn if g_cursor == None: conf = {str(key): str(val) for (key, val) in config.items('database')} conf['port'] = int(conf['port']) g_conn = DBAPI.connect(**conf) g_cursor = g_conn.cursor() return g_cursor
def __init__(self, modelXbrl, user, password, host, port, database): self.modelXbrl = modelXbrl self.disclosureSystem = modelXbrl.modelManager.disclosureSystem self.conn = DBAPI.connect(user=user, password=password, host=host, port=int(port) if port else None, database=database) self.tableColTypes = {}
def dbhelper(): conn = DBAPI.connect(host="vdb1.it.jyu.fi", user="******", password="******", database="hamou_db") cursor = conn.cursor() return cursor
def gp_conn( self ): if not self._gp_conn: config = self.config self._gp_conn = DBAPI.connect( host = config.get( 'greenplum', 'pghost' ), port = config.getint( 'greenplum', 'pgport' ), user = config.get( 'greenplum', 'pguser' ), database = config.get( 'greenplum', 'pgdatabase' ) ) return self._gp_conn
def connect(**db): try: # get a connection, if a connect cannot be made an exception will be raised here conn = DBAPI.connect(**db) # conn.cursor will return a cursor object, you can use this cursor to perform queries cursor = conn.cursor() except: # Get the most recent exception exceptionType, exceptionValue, exceptionTraceback = sys.exc_info() # Exit the script and print an error telling what happened. sys.exit("Database connection failed! -> %s" % (exceptionValue)) return cursor, conn
def execu(execu_sql): conn = DBAPI.connect(host="vdb1.it.jyu.fi", user="******", password="******", database="hamou_db") cursor = conn.cursor() try: cursor.execute(execu_sql) print "success once" except Exception, e: print "fails"
def __init__(self, ip, db, login, passwd): from pg8000 import DBAPI import pg8000.types # pg8000 doesn't support uuid pg8000.types.pg_types[2950] = {"bin_in": pg8000_uuid_in} # convert unknown type to string pg8000.types.pg_types[705] = { "bin_in": pg8000.types.varcharin } # TEXT type self.pg8conn = DBAPI.connect(user=login, host=ip, database=db, password=passwd)
def main(*args): global image_dir conn = DBAPI.connect(host='1.2.3.4', port=12345, database='wikiloves', user='******', password='******') cursor = conn.cursor() for entry in inda_generator(): if entry['author'] == 'samat78': # images imported from commons continue if entry['date'] >= datetime(2011, 10, 22): continue id = entry['desc_url'].split('/')[-1] (nu, renumber) = needs_update(id, entry, cursor) cnt = count(entry['id'], entry['license'], nu) if not nu: continue if not entry['id'] or not entry['license']: # invalid entry status = 0 else: status = 1 urllib.urlretrieve(entry['img']['url'], image_dir + id + '.jpg') if nu == 'insert': try: num = find_next_free_num(entry['id'], cursor) cursor.execute( 'INSERT INTO inda(id, koh_id, num, desc_url, date, author_name, author_url, license, img_url, img_size, img_width, img_height, status) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', (id, entry['id'], num, entry['desc_url'], entry['date'], entry['author'], entry['author_url'], entry['license'], entry['img']['url'], entry['img']['size'], entry['img']['width'], entry['img']['height'], status)) except pg8000.errors.ProgrammingError: raise else: if renumber: num = find_next_free_num(entry['id'], cursor) cursor.execute( 'UPDATE inda SET koh_id = %s, num = %s, license = %s, img_size = %s, img_width = %s, img_height = %s, status = %s WHERE id = %s', (entry['id'], num, entry['license'], entry['img']['size'], entry['img']['width'], entry['img']['height'], status, id)) else: cursor.execute( 'UPDATE inda SET koh_id = %s, license = %s, img_size = %s, img_width = %s, img_height = %s, status = %s WHERE id = %s', (entry['id'], entry['license'], entry['img']['size'], entry['img']['width'], entry['img']['height'], status, id)) conn.commit() conn.close() print '%(i)d images, no id: %(no_id)d, no license: %(no_license)d, by: %(by)d, by-sa: %(by_sa)d, inserted: %(inserted)d, updated: %(updated)d' % cnt
def get_database_connection(unix_sock='/tmp/.s.PGSQL.5432', user='******', password='', database='moocdb'): ''' NOTE: right now (2013-11-29) localhost doesn't resolve properly so we need to point to the socket file address. hopefully in the future we can get the host working so that we don't have to specify this ugly file handle. ''' db = DBAPI.connect( unix_sock=unix_sock, #host=host, user=user, password=password, database=database) return db
def connect(): global conn global cursor try: # Create a connection to the database conn = DBAPI.connect(**db) # Create a cursor that will be used to execute queries cursor = conn.cursor() except: # Get the most recent exception exceptionType, exceptionValue, exceptionTraceback = sys.exc_info() # Exit the script/thread and print an error telling what happened. print "Database connection failed! -> %s" % (exceptionValue) sys.exit()
def __init__(self, dbName, user, passwd, host, port=5432): "Etablissement de la connection et crétion du curseur" try: self.baseDonn = DBAPI.connect(host=host, port=port, database=dbName, user=user, password=passwd) except Exception as err: print("La connection avec la base de données a échoué : \n" + "Erreur détectée : {:s}".format(err)) self.echec = 1 else: self.cursor = self.baseDonn.cursor() self.echec = 0
def __init__(self, dbName, user, passwd, host, port=5432): "Établissement de la connexion - Création du curseur" try: self.baseDonn = DBAPI.connect(host=host, port=port, database=dbName, user=user, password=passwd) except Exception as err: print('La connexion avec la base de données a échoué :\n'\ 'Erreur détectée :\n%s' % err) self.echec = 1 else: self.cursor = self.baseDonn.cursor() # création du curseur self.echec = 0
def main(*args,**kwargs): configSection = "Local database" Config = ConfigParser.ConfigParser() Config.read("../t4t_credentials.txt") cfg_username = Config.get(configSection, "username") cfg_password = Config.get(configSection, "password") cfg_database = Config.get(configSection, "database") cfg_server = Config.get(configSection, "server") if cfg_server == "" \ or cfg_database == "" \ or cfg_username == "" \ or cfg_password == "": print "Could not load config file" sys.exit(0) if kwargs['verbosity'] <= DEBUG: print "[debug] main args", args print "[debug] main kwargs", kwargs conn = DBAPI.connect(host=cfg_server, database=cfg_database, user=cfg_username, password=cfg_password) cursor = conn.cursor() if kwargs['reset-db']: print "[INFO] Reset DB, this cannot be undone - are you sure? y/n" inputToken = raw_input('> ') if inputToken == 'y': createTables(conn, cursor) print "done." if kwargs['query_max_id']: print get_max_id(conn,cursor) return loadSearchTerms(kwargs) start_id = get_max_id(conn,cursor) print start_id if start_id is not None: kwargs['start_id'] = int(start_id) rl = GetRateLimiter() rl.api._cache_timeout = 30 geolondon(conn,rl,**kwargs)
def write_titles(): infile = open('wlm_secession_id.txt', 'rt') outfile = codecs.open('wlm_secession_title.txt', 'wt', 'utf-8') conn = DBAPI.connect(host='1.2.3.4', port=12345, database='wikiloves', user='******', password='******') cursor = conn.cursor() for id in infile: id = id.strip().partition('.')[0] cursor.execute( 'SELECT i.koh_id, i.num, m.description FROM inda i JOIN monuments m ON i.koh_id = m.azonosito WHERE i.id = %s', (id, )) row = cursor.fetchall()[0] title = get_title(row) title = title[0].upper() + title[1:] outfile.write(title + '\n')
def connect(self): SQLResource.connect(self) self._connection = \ DBAPI.connect(host = self._server_address[0], port = self._server_address[1], socket_timeout = self._connect_timeout, database = self._database, user = self._username, password = self._password) try: self._cursor = self._connection.cursor() if not self._server_encoding: self._cursor.execute("SHOW SERVER_ENCODING") server_encoding = self._cursor.fetchall()[0][0].lower() self._server_encoding = \ pg_to_py_encodings.get(server_encoding, server_encoding) except: self._connection.close() raise
def main(*args): global image_dir conn = DBAPI.connect(host='1.2.3.4', port=12345, database='wikiloves', user='******', password='******') cursor = conn.cursor() i = 0 for row in db_gen(cursor): i = i + 1 img = image_dir + row['id'] + '.jpg' f = open(img, 'rb') try: exif = EXIF.process_file(f) for key in exif.keys(): if 'exif datetime' in key.lower(): try: row['date'] = datetime.strptime( str(exif[key]), '%Y:%m:%d %H:%M:%S').date() break except ValueError: pass except Exception: print 'exif processing for %s skipped because of errors' % img f.close() try: if process(img, row): # cursor.execute('UPDATE inda SET status = 2 WHERE id = %s', (row['id'],)) # conn.commit() pass except upload.UploadException as e: # dupe - needs patched upload.py if e.type != 'duplicate': raise pywikibot.output('%s is duplicate of %s' % (row['id'], e.param)) # cursor.execute('UPDATE inda SET status = -2 WHERE id = %s', (row['id'],)) # conn.commit() except Exception as e: raise # if i >= 3: # break conn.commit() conn.close()
def connect_to_db(host_input, user_input, password_input, database_input, input_observer=observer): global db, cache_thread, observer observer = input_observer try: db = DBAPI.connect(user=user_input, host=host_input, port=5432, database=database_input, password=password_input) except Exception as e: print("#!DB-LOAD-EXCEPTION: " + str(e)) traceback.print_exc() return False else: cache_thread = FillCache() return True
def main(*args, **kwargs): configSection = "Local database" Config = ConfigParser.ConfigParser() Config.read("t4t_credentials.txt") cfg_username = Config.get(configSection, "username") cfg_password = Config.get(configSection, "password") cfg_database = Config.get(configSection, "database") cfg_server = Config.get(configSection, "server") if cfg_server == "" \ or cfg_database == "" \ or cfg_username == "" \ or cfg_password == "": print "Could not load config file" sys.exit(0) if kwargs['verbosity'] <= DEBUG: print "[debug] main args", args print "[debug] main kwargs", kwargs conn = DBAPI.connect(host=cfg_server, database=cfg_database, user=cfg_username, password=cfg_password) cursor = conn.cursor() if kwargs['query_max_id']: print get_max_id(conn, cursor) return loadSearchTerms(kwargs) start_id = get_max_id(conn, cursor) print start_id if start_id is not None: kwargs['start_id'] = int(start_id) rl = GetRateLimiter() rl.api._cache_timeout = 30 tweets(conn, rl, **kwargs)
def wait_for_db(self, db_name): uri = self.cloud_layer_uri(db_name, "", "") ok = False retries = 5 while not ok and retries > 0: try: connection = DBAPI.connect( host = str(uri.host()), port = uri.port().toInt()[0], database = str(uri.database()), user = str(uri.username()), password = str(uri.password()), socket_timeout = 3, #3s ssl = (uri.sslMode() != QgsDataSourceURI.SSLdisable) ) connection.close() ok = True except Exception: # as err: retries -= 1 if retries == 0: raise else: time.sleep(3)
def run_redshift_benchmark(opts): conn = DBAPI.connect(host=opts.redshift_host, database=opts.redshift_database, user=opts.redshift_username, password=opts.redshift_password, port=5439, socket_timeout=6000) print >> stderr, "Connecting to Redshift..." cursor = conn.cursor() print >> stderr, "Connection succeeded..." times = [] # Clean up old table if still exists try: cursor.execute(CLEAN_QUERY) except: pass for i in range(opts.num_trials): t0 = time.time() cursor.execute(QUERY_MAP[opts.query_num][2]) times.append(time.time() - t0) cursor.execute(CLEAN_QUERY) return times
def main(): global inda_user, mail_id, delay inda = indafoto.Session(inda_user) conn = DBAPI.connect(host='1.2.3.4', port=12345, database='wikiloves', user='******', password='******') cursor = conn.cursor() text = codecs.open('wlm_mail.txt', 'rt', 'utf-8').read() for (name, page) in db_gen( cursor, 'SELECT DISTINCT inda.author_name, inda.author_url FROM inda JOIN inda_mail ON inda.author_url = inda_mail.author_url AND mail_id = %d WHERE status = 2 AND NOT sent' % mail_id): username = page[19:] if inda.sendMessage(username, text.format(user=name)): pywikibot.output('%s (%s) - OK' % (page, name)) cursor.execute( 'UPDATE inda_mail SET sent = true WHERE author_url = %s AND mail_id = %s', (page, mail_id)) conn.commit() else: pywikibot.output('%s (%s) - error' % (page, name)) time.sleep(delay)
from pg8000 import DBAPI db = DBAPI.connect(host="192.168.111.128", user="******", password="******", database="pg8000") cursor = db.cursor() cursor.execute( "CREATE TEMPORARY TABLE book (id SERIAL, title TEXT, author TEXT)") cursor.execute("INSERT INTO book (title, author) VALUES (%s, %s) RETURNING id", ("Ender's Game", "Orson Scott Card")) book_id, = cursor.fetchone() db.commit()
from pg8000 import DBAPI from .connection_settings import db_connect import time db = DBAPI.connect(**db_connect) tests = ( ("(id / 100)::int2", 'int2'), ("id::int4", 'int4'), ("(id * 100)::int8", 'int8'), ("(id %% 2) = 0", 'bool'), ("N'Static text string'", 'txt'), ("id / 100::float4", 'float4'), ("id / 100::float8", 'float8'), ) for txt, name in tests: query = "SELECT %s AS %s_column FROM (SELECT generate_series(1, 10000) AS id) AS tbl" % ( txt, name) cursor = db.cursor() print("Beginning %s test..." % name) for i in range(1, 5): print("Attempt %s" % i) begin_time = time.time() cursor.execute(query) for row in cursor: pass end_time = time.time() print("Attempt %s - %s seconds." % (i, end_time - begin_time)) # bytea
"""Quick way to extract basic summary data """ import pg8000 from pg8000 import DBAPI import numpy as np conn = DBAPI.connect(host="egdb.lhd.nifs.ac.jp", user="******", password="", database="db1") cursor = conn.cursor() vars='nShotnumber,dDatacreationTime,MagneticField,MagneticAxis,Quadruple,GAMMA' varlist = vars.split(',') LHD = {} dim = 140000 varlist.remove('nShotnumber') LHD.update(dict(nShotnumber = -1 + np.zeros(dim, dtype = np.int32))) varlist.remove('dDatacreationTime') LHD.update(dict(dDatacreationTime = np.array(dim * [' ']))) for k in varlist: LHD.update({k: np.nan+np.zeros(dim, dtype=np.float32)}) sql=str('select {vars} from explog2 where nshotnumber between 117000 and 200000 order by nShotnumber' .format(vars=vars)) cursor.execute(sql)
#postcodeRegex = "(GIR 0AA)|(((A[BL]|B[ABDHLNRSTX]?|C[ABFHMORTVW]|D[ADEGHLNTY]|E[HNX]?|F[KY]|G[LUY]?|H[ADGPRSUX]|I[GMPV]|JE|K[ATWY]|L[ADELNSU]?|M[EKL]?|N[EGNPRW]?|O[LX]|P[AEHLOR]|R[GHM]|S[AEGKLMNOPRSTY]?|T[ADFNQRSW]|UB|W[ADFNRSV]|YO|ZE)[1-9]?[0-9]|((E|N|NW|SE|SW|W)1|EC[1-4]|WC[12])[A-HJKMNPR-Y]|(SW|W)([2-9]|[1-9][0-9])|EC[1-9][0-9]) [0-9][ABD-HJLNP-UW-Z]{2})" #addressRegex = r"(\b(at|on)\s\w+\s(st(reet)?|r[(od)]d)[.,\b](station|market))" addressRegex = r"((traffic)\s(in|near|into)\s((\w{4,})(\s(\w{10,}))?)[,.\s$]{1})" currRegex = addressRegex configSection = "Local database" Config = ConfigParser.ConfigParser() Config.read("../../t4t_credentials.txt") cfg_username = Config.get(configSection, "username") cfg_password = Config.get(configSection, "password") cfg_database = Config.get(configSection, "database") cfg_server = Config.get(configSection, "server") conn = DBAPI.connect(host=cfg_server, database=cfg_database,user=cfg_username, password=cfg_password) cursor = conn.cursor() query = "select uname, text from tweets where geolocation is null" cursor.execute(query) counter = 0 for row in cursor: uname = str(row[0]) text = str(row[1]) regexMatch = re.search(currRegex, text, re.IGNORECASE) if not regexMatch == None: #print text[:140] addr = regexMatch.group(4) lraddr = addr.lower() if ("traffic in london" in lraddr) or ("traffic in west london" in lraddr) or ("traffic in central london" in lraddr) or ("traffic in south london" in