def conexion(ubicacion): if ubicacion.lower() == 'pre-opendata-oracle': devolver = cx_Oracle.connect(OPENDATA_USR + "/" + OPENDATA_PASS + "@" + OPENDATA_CONEXION_BD_PRE) elif ubicacion.lower() == 'pro-opendata-oracle': devolver = cx_Oracle.connect(OPENDATA_USR + "/" + OPENDATA_PASS + "@" + OPENDATA_CONEXION_BD) elif ubicacion.lower() == 'des-opendata-oracle': devolver = cx_Oracle.connect(OPENDATA_USR + "/" + OPENDATA_PASS + "@" + OPENDATA_CONEXION_BD_DES) elif ubicacion.lower() == 'app1': #Es la conexion de storage.py correspondiente a app1-ORACLE devolver = cx_Oracle.connect(AST_USR + "/" + AST_PASS + "@" + AST1_CONEXION_BD) elif ubicacion.lower() == 'app2': # Es la conexion de storage.py correspondiente a app2-ORACLE devolver = cx_Oracle.connect(AST_USR + "/" + AST_PASS + "@" + AST2_CONEXION_BD) elif ubicacion.lower() == 'app6': # Es la conexion de storage.py correspondiente a app6-ORACLE devolver = cx_Oracle.connect(AST_USR + "/" + AST_PASS + "@" + AST5_CONEXION_BD) elif ubicacion.lower() == 'app3':# Es la conexion de storage.py correspondiente a app3 que es la de senderos devolver=psycopg2.connect(AST3_CONEXION_BD) elif ubicacion.lower() == 'opendata-postgre': devolver= psycopg2.connect(OPENDATA_POSTGRE_CONEXION_BD) elif ubicacion.lower() == 'opendata-campus':#Es la conexion para la base de datos del campus devolver= psycopg2.connect(OPENDATA_CAMPUS_CONEXION_BD) elif (ubicacion.lower() == 'app4'): # Es la conexion de storage.py correspondiente a app4 devolver= pymssql.connect(host='bov-domus-01.aragon.local', user='******', password='******', database='BD_COLECCION_WEB'); elif (ubicacion.lower() == 'app5'): # Conexion para MySLQ app5 devolver= MySQLdb.connect(host='194.179.110.14',port=3306, user='******', passwd='0p3n-DATA',db='webiaf'); else: print 'No hay conexion para el tipo', tipo, 'y la ubicacio', ubicacion return None; return devolver
def edit_comment(comment_id): if request.method == 'POST': if request.form['action'] == 'delete': with dbapi2.connect(flask.current_app.config['dsn']) as connection: cursor = connection.cursor() query = """DELETE FROM POST WHERE (POSTID= %s)""" cursor.execute(query, [comment_id]) connection.commit() return redirect(url_for('site.main_page')) else: return render_template('edit_comment.html') else: with dbapi2.connect(flask.current_app.config['dsn']) as connection: cursor = connection.cursor() query = """SELECT * FROM POST WHERE POSTID = %s""" cursor.execute(query, [comment_id]) post = cursor.fetchall() connection.commit() return render_template('edit_comment.html', post = post)
def __getattribute__(self, name): threadName = threading.current_thread().name if name == "log" and "Thread-" in threadName: if threadName not in self.loggers: self.loggers[threadName] = logging.getLogger("%s.Thread-%d" % (self.loggerPath, self.lastLoggerIndex)) self.lastLoggerIndex += 1 return self.loggers[threadName] elif name == "conn": if threadName not in self.dbConnections: # First try local socket connection, fall back to a IP-based connection. # That way, if the server is local, we get the better performance of a local socket. try: self.dbConnections[threadName] = psycopg2.connect( dbname=settings.DATABASE_DB_NAME, user=settings.DATABASE_USER, password=settings.DATABASE_PASS ) except psycopg2.OperationalError: self.dbConnections[threadName] = psycopg2.connect( host=settings.DATABASE_IP, dbname=settings.DATABASE_DB_NAME, user=settings.DATABASE_USER, password=settings.DATABASE_PASS, ) # self.dbConnections[threadName].autocommit = True return self.dbConnections[threadName] else: return object.__getattribute__(self, name)
def main(): # Load connection params from the configuration file config = ConfigParser.ConfigParser() config.read(['consumer.conf', 'local_consumer.conf']) dbhost = config.get('Database', 'dbhost') dbname = config.get('Database', 'dbname') dbuser = config.get('Database', 'dbuser') dbpass = config.get('Database', 'dbpass') dbport = config.get('Database', 'dbport') redisdb = config.get('Redis', 'redishost') TERM_OUT = config.get('Consumer', 'term_out') # Handle DBs without password if not dbpass: # Connect without password dbcon = psycopg2.connect("host="+dbhost+" user="******" dbname="+dbname+" port="+dbport) else: dbcon = psycopg2.connect("host="+dbhost+" user="******" password="******" dbname="+dbname+" port="+dbport) dbcon.autocommit = True curs = dbcon.cursor() sql = "INSERT INTO market_data_emdrstats (status_type, status_count, message_timestamp) SELECT status_type, count(id), date_trunc('minute',now()) FROM market_data_emdrstatsworking GROUP BY status_type" curs.execute(sql) sql = "TRUNCATE market_data_emdrstatsworking" curs.execute(sql)
def main_page(): if request.method == 'POST': if request.form['action'] == 'add': newRst = request.form['newRst'] username = current_user.userName with dbapi2.connect(flask.current_app.config['dsn']) as connection: cursor = connection.cursor() query = """INSERT INTO RESTAURANT(NAME, USERNAME) VALUES(%s, %s)""" cursor.execute(query,(newRst, username)) connection.commit() with dbapi2.connect(flask.current_app.config['dsn']) as connection: cursor = connection.cursor() query = """INSERT INTO RST_DETAILS (NAME,LOCATION,CATEGORY) VALUES ('%s', '%s', '%s') """ %(newRst,'not provided yet.','not provided yet.') cursor.execute(query) connection.commit() return redirect(url_for('site.main_page')) if request.method == 'GET': with dbapi2.connect(flask.current_app.config['dsn']) as connection: cursor = connection.cursor() query = """SELECT * FROM RESTAURANT""" cursor.execute(query) names=cursor.fetchall() query = """SELECT * FROM USERS""" cursor.execute(query) users=cursor.fetchall() connection.commit() return render_template('mainpage.html',names=names,users=users,user=current_user)
def selectdb(self, db): if self.engine == 'mysqli': self.cur.execute('USE %s' % db) elif self.engine == 'pgsql': if self.cur: self.cur.close() if self.conn: self.conn.close() try: # psycopg2. self.conn = pgsql.connect( host=str(self.options['host']), port=int(self.options['port']), user=str(self.options['user']), password=str(self.options['passwd']) ) except Exception: # bpsql. self.conn = pgsql.connect( host=str(self.options['host']), port=int(self.options['port']), username=str(self.options['user']), password=str(self.options['passwd']), dbname='' ) self.cur = self.conn.cursor()
def check(self, instance): host = instance.get('host', '') port = instance.get('port', '') user = instance.get('username', '') passwd = instance.get('password', '') tags = instance.get('tags', []) key = '%s:%s' % (host, port) if key in self.dbs: db = self.dbs[key] elif host != '' and user != '': try: import psycopg2 as pg if host == 'localhost' and passwd == '': # Use ident method db = pg.connect("user=%s dbname=postgres" % user) elif port != '': db = pg.connect(host=host, port=port, user=user, password=passwd, database='postgres') else: db = pg.connect(host=host, user=user, password=passwd, database='postgres') except ImportError, e: self.log.exception("Cannot import psypg2") return except Exception, e: #Fixme: catch only pg errors self.log.exception('PostgreSql connection error') return
def edit(id): select_stmt = "SELECT q.name, d.name as department, q.query, q.description " \ "FROM query q JOIN departments d ON q.department_id = d.id WHERE q.id=%s;" update_stmt = "UPDATE query SET description=%s, query=%s WHERE id=%s;" delete_stmt = "DELETE FROM query WHERE id=%s;" if request.args.get('save', '').strip(): description = request.args.get('description', '').strip() query = request.args.get('query', '').strip() with psycopg2.connect(dsn=local) as conn: with conn.cursor() as cursor: cursor.execute(update_stmt, (description, query, str(id))) conn.commit() return redirect(url_for('home')) elif request.args.get('delete', '').strip(): with psycopg2.connect(dsn=local) as conn: with conn.cursor() as cursor: cursor.execute(delete_stmt, (str(id), )) conn.commit() return redirect(url_for('home')) else: with RealDictConnection(dsn=local) as conn: with conn.cursor() as cursor: cursor.execute(select_stmt, (str(id), )) res = cursor.fetchone() return render_template('edit.html', details=res)
def get_currents(): ''' Return dict of current values ''' dbconn = psycopg2.connect(database='iem', host='iemdb', user='******') cursor = dbconn.cursor() dbconn2 = psycopg2.connect(database='isuag', host='iemdb', user='******') cursor2 = dbconn2.cursor() data = {} cursor.execute(""" SELECT id, valid, tmpf, relh from current c JOIN stations t on (t.iemid = c.iemid) WHERE valid > now() - '3 hours'::interval and t.network = 'ISUSM' """) valid = None for row in cursor: data[row[0]] = {'tmpf': row[2], 'rh': row[3], 'valid': row[1], 'high': None} if valid is None: valid = row[1] # Go get daily values cursor2.execute("""SELECT station, tair_c_max from sm_daily where valid = %s """, (valid,)) for row in cursor2: data[row[0]]['high'] = temperature(row[1], 'C').value('F') cursor.close() dbconn.close() return data
def updateMainDB(): global g_connectorRead global g_connectorWrite g_connectorRead = psycopg2.connect( host='localhost', database="FBWatch", user="******", password="******") g_connectorWrite = psycopg2.connect( host='192.168.0.52', database="FBWatch", user="******", password="******") l_suffixList = ['COMM', 'LIKE'] for l_suf in l_suffixList: l_table = 'TB_PRESENCE_' + l_suf l_dtField = 'DT_' + l_suf print('Sending', l_table, 'contents back to main server') sendBackTable(l_table, l_dtField) g_connectorRead.close() g_connectorWrite.close()
def child(ckt,tshash): cur=conn=None try: conn=psycopg2.connect("dbname='timeseries' user='******' host='localhost' password='******'") cur=conn.cursor() print "obtained psycopg2 connection and cursor for circuit: "+str(ckt) for (ts,value) in tshash.items(): print "inserting (timestamp, watts)"+str(ts)+str(value)+"...." try: cur.execute("insert into watts_"+str(Global.logdir)+"_"+str(ckt)+" values(%s,%s)",(ts,value,)) conn.commit() except Exception,e: conn.rollback() if cur is not None: cur.close() if conn is not None: conn.close() conn=psycopg2.connect("dbname='timeseries' user='******' host='localhost' password='******'") cur=conn.cursor() print e except Exception,e: print "user_add DATABASE EXCEPTION!!!!" print e conn.rollback()
def addteam(): if request.method == 'POST': if session['isValid'] == False: return "You are not authorized" with dbapi2.connect(app.config['dsn']) as connection: cursor = connection.cursor() Name = request.form['Name'] CountryID = request.form['selectedValue'] query = """CREATE TABLE IF NOT EXISTS Team ( Team_ID SERIAL PRIMARY KEY NOT NULL, Team_Name CHAR(50) NOT NULL, Team_CountryID INT REFERENCES Country (Country_ID) ON DELETE CASCADE ON UPDATE CASCADE, Team_Total_Points INT DEFAULT 0 );""" cursor.execute(query) try: queryWithFormat = """INSERT INTO Team (Team_Name, Team_CountryID) VALUES (%s, %s)""" cursor.execute(queryWithFormat, (Name, CountryID)) connection.commit() except dbapi2.DatabaseError: connection.rollback() return "error happened" return redirect(url_for('teamlist')) with dbapi2.connect(app.config['dsn']) as connection: cursor = connection.cursor() retval = "" statement = """SELECT Country_ID, Country_Name FROM Country ORDER BY Country_ID""" cursor.execute(statement) countries=[] for Country_ID,Country_Name in cursor: country=(Country(Country_ID, Country_Name)) countries.append(country) return render_template('addteam.html', Countries = countries)
def addaccommodation(): if session['isValid'] == False: return "You are not authorized" if request.method == 'POST': with dbapi2.connect(app.config['dsn']) as connection: cursor = connection.cursor() Name = request.form['Name'] AccommodationID = request.form['selectedValue'] query = """CREATE TABLE IF NOT EXISTS Accommodation ( Accommodation_ID SERIAL PRIMARY KEY NOT NULL, Accommodation_Name CHAR(50) NOT NULL, Accommodation_CityID INT REFERENCES City (City_ID) ON DELETE CASCADE ON UPDATE CASCADE );""" cursor.execute(query) try: queryWithFormat = """INSERT INTO Accommodation (Accommodation_Name, Accommodation_CityID) VALUES (%s, %s)""" cursor.execute(queryWithFormat, (Name, AccommodationID)) except dbapi2.DatabaseError: connection.rollback() return "error happened" return redirect(url_for('accommodationlist')) with dbapi2.connect(app.config['dsn']) as connection: cursor = connection.cursor() retval = "" statement = """SELECT City_ID, City_Name FROM City ORDER BY City_ID""" cursor.execute(statement) cities=[] for City_ID,City_Name in cursor: city=(City(City_ID,City_Name)) cities.append(city) return render_template('addaccommodation.html', Cities = cities)
def addplayer(): if session['isValid'] == False: return "You are not authorized" if request.method == 'POST': with dbapi2.connect(app.config['dsn']) as connection: cursor = connection.cursor() Name = request.form['Name'] TeamID = request.form['selectedValue'] query = """CREATE TABLE IF NOT EXISTS Player ( Player_ID SERIAL PRIMARY KEY NOT NULL, Player_Name CHAR(50) NOT NULL, Player_TeamID INT REFERENCES Team (Team_ID) );""" cursor.execute(query) try: queryWithFormat = """INSERT INTO Player (Player_Name, Player_TeamID) VALUES (%s, %s)""" cursor.execute(queryWithFormat, (Name, TeamID)) except dbapi2.DatabaseError: connection.rollback() return "error happened" return redirect(url_for('playerlist')) with dbapi2.connect(app.config['dsn']) as connection: cursor = connection.cursor() retval = "" statement = """SELECT Team_ID, Team_Name FROM Team ORDER BY Team_ID""" cursor.execute(statement) teams=[] for Team_ID,Team_Name in cursor: team=(Team(Team_ID,Team_Name)) teams.append(team) return render_template('addplayer.html', Teams = teams)
def addtournament(): if request.method == 'POST': with dbapi2.connect(app.config['dsn']) as connection: cursor = connection.cursor() Name = request.form['Name'] City_ID = request.form['selectedValue'] query = """CREATE TABLE IF NOT EXISTS Tournament ( Tournament_ID SERIAL PRIMARY KEY NOT NULL, Tournament_Name CHAR(50) NOT NULL, Tournament_CityID INT REFERENCES City (City_ID) ON DELETE CASCADE ON UPDATE CASCADE);""" cursor.execute(query) try: queryWithFormat = """INSERT INTO Tournament (Tournament_Name, Tournament_CityID ) VALUES (%s, %s)""" cursor.execute(queryWithFormat, (Name, City_ID)) except dbapi2.DatabaseError: connection.rollback() return "error happened" return redirect(url_for('tournamentlist')) with dbapi2.connect(app.config['dsn']) as connection: cursor = connection.cursor() statement = """SELECT City_ID, City_Name FROM City ORDER BY City_ID""" cursor.execute(statement) cities=[] for City_ID,City_Name in cursor: city=(City(City_ID,City_Name)) cities.append(city) return render_template('addtournament.html', Cities = cities)
def createdb(dbname): '''Create a new database. :param dbname: database name string ''' # does our database already exist? try: conn_string = "host='localhost' dbname='%s'" % (dbname) conn = psycopg2.connect(conn_string) print("%sError%s: database %s already esists" % (AnsiEscCode.RED,AnsiEscCode.ENDC,dbname)) # data base doesn't exist except psycopg2.DatabaseError as dbe: os.system("createdb %s" % dbname) conn_string = "host='localhost' dbname='%s'" % (dbname) conn = psycopg2.connect(conn_string) cursor = conn.cursor() cursor.execute('CREATE EXTENSION postgis;') cursor.execute('CREATE EXTENSION postgis_topology;') cursor.execute('COMMIT;') print("%s==>%s %s%s database created%s" % (AnsiEscCode.BLUE, AnsiEscCode.ENDC, AnsiEscCode.BOLD_TXT, args['<dbname>'], AnsiEscCode.ENDC )) except Exception as e: print("other error %s" % e)
def get_connection(self, key, host, port, user, password, dbname, use_cached=True): "Get and memoize connections to instances" if key in self.dbs and use_cached: return self.dbs[key] elif host != "" and user != "": try: import psycopg2 as pg except ImportError: raise ImportError("psycopg2 library cannot be imported. Please check the installation instruction on the Datadog Website.") if host == 'localhost' and password == '': # Use ident method connection = pg.connect("user=%s dbname=%s" % (user, dbname)) elif port != '': connection = pg.connect(host=host, port=port, user=user, password=password, database=dbname) else: connection = pg.connect(host=host, user=user, password=password, database=dbname) else: if not host: raise CheckException("Please specify a Postgres host to connect to.") elif not user: raise CheckException("Please specify a user to connect to Postgres as.") try: connection.autocommit = True except AttributeError: # connection.autocommit was added in version 2.4.2 from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT connection.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) self.dbs[key] = connection return connection
def check(self, instance): host = instance.get('host', '') port = instance.get('port', '') user = instance.get('username', '') passwd = instance.get('password', '') tags = instance.get('tags', []) key = '%s:%s' % (host, port) if key in self.dbs: db = self.dbs[key] elif host != '' and user != '': import psycopg2 as pg if host == 'localhost' and passwd == '': # Use ident method db = pg.connect("user=%s dbname=postgres" % user) elif port != '': db = pg.connect(host=host, port=port, user=user, password=passwd, database='postgres') else: db = pg.connect(host=host, user=user, password=passwd, database='postgres') # Check version version = self._get_version(key, db) self.log.debug("Running check against version %s" % version) # Collect metrics self._collect_stats(db, tags)
def process(self, tup): rx = re.compile('\W+') word = rx.sub('', tup.values[0]).strip() # Write codes to increment the word count in Postgres # Use psycopg to interact with Postgres # Database name: Tcount # Table name: Tweetwordcount # you need to create both the database and the table in advance. conn = psycopg2.connect(database="tcount", user="******", password="******", host="localhost", port="5432") cur = conn.cursor() try: cur.execute("INSERT INTO Tweetwordcount (word,count) VALUES ('%s', 1)" % word) conn.commit() except Exception as e: conn.close() conn = psycopg2.connect(database="tcount", user="******", password="******", host="localhost", port="5432") cur = conn.cursor() cur.execute("UPDATE Tweetwordcount SET count=count+1 WHERE word='%s'" % word) conn.commit() # Increment the local count self.counts[word] += 1 self.emit([word, self.counts[word]]) conn.close() # Log the count - just to see the topology running self.log('%s: %d' % (word, self.counts[word]))
def test_pickle_connection_error(self): # segfaults on psycopg 2.5.1 - see ticket #170 import pickle try: psycopg2.connect('dbname=nosuchdatabasemate') except psycopg2.Error, exc: e = exc
def __init__(self, uri): DBConnector.__init__(self, uri) self.host = uri.host() or os.environ.get("PGHOST") self.port = uri.port() or os.environ.get("PGPORT") username = uri.username() or os.environ.get("PGUSER") or os.environ.get("USER") password = uri.password() or os.environ.get("PGPASSWORD") try: self.connection = psycopg2.connect(self._connectionInfo().encode("utf-8")) except self.connection_error_types(), e: err = str(e) uri = self.uri() conninfo = uri.connectionInfo() for i in range(3): (ok, username, password) = QgsCredentials.instance().get(conninfo, username, password, err) if not ok: raise ConnectionError(e) if username: uri.setUsername(username) if password: uri.setPassword(password) try: self.connection = psycopg2.connect(uri.connectionInfo().encode("utf-8")) QgsCredentials.instance().put(conninfo, username, password) except self.connection_error_types(), e: if i == 2: raise ConnectionError(e) err = str(e)
def connect_to_database(): if config.db_pwd: return psycopg2.connect("dbname={} user={} password={}" .format(config.db_database, config.db_user, config.db_pwd)) else: return psycopg2.connect("dbname={} user={}" .format(config.db_database, config.db_user))
def connect(): BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # local if 'WTFPrinceton' in BASE_DIR: conn = psycopg2.connect( database='d8qajk44a19ere', user='', password='', host='localhost', port='', ) return conn else: # heroku urlparse.uses_netloc.append("postgres") url = urlparse.urlparse(os.environ["DATABASE_URL"]) conn = psycopg2.connect( database=url.path[1:], user=url.username, password=url.password, host=url.hostname, port=url.port, ) return conn
def race_join_page(key=None): if 'username' in session: name = session['username'] with dbapi2.connect(app.config['dsn']) as connection: cursor = connection.cursor() cursor.execute("SELECT memberid FROM MEMBERS WHERE username='******';"%name) id = cursor.fetchone() connection.commit() with dbapi2.connect(app.config['dsn']) as connection: cursor = connection.cursor() query = ("INSERT INTO RACE_RESULTS (MEMBERID, RACEID ) VALUES (%s, %s)") cursor.execute(query, (id, key)) connection.commit() with dbapi2.connect(app.config['dsn']) as connection: cursor = connection.cursor() cursor.execute("SELECT participant_count FROM RACE WHERE id='%s';"%key) participant_count = cursor.fetchone()[0] participant_count = participant_count + 1 connection.commit() with dbapi2.connect(app.config['dsn']) as connection: cursor = connection.cursor() query = ("UPDATE RACE SET participant_count=%s WHERE (id=%s)") cursor.execute(query, (participant_count, key)) connection.commit() now = datetime.datetime.now() return redirect(url_for('race_page', key=key)) else: race = app.store.get_race(key) if key is not None else None now = datetime.datetime.now() return redirect(url_for('race_page', key=key))
def test_success(self): # Case to test for a successful tranformation of a module from # cnxml to html. ident, filename = 2, 'index.cnxml' # m42955 with psycopg2.connect(self.connection_string) as db_connection: with db_connection.cursor() as cursor: # delete module_ident 2 index.cnxml.html cursor.execute("DELETE FROM module_files WHERE module_ident = 2 " "AND filename = 'index.cnxml.html'") self.call_target(ident) with psycopg2.connect(self.connection_string) as db_connection: with db_connection.cursor() as cursor: cursor.execute("SELECT file FROM files " " WHERE fileid = " " (SELECT fileid FROM module_files " " WHERE module_ident = %s " " AND filename = 'index.cnxml.html');", (ident,)) index_html = cursor.fetchone()[0][:] # We only need to test that the file got transformed and placed # placed in the database, the transform itself should be verified. # independent of this code. self.assertTrue(index_html.find('<html') >= 0)
def create_database(self): try: with psycopg2.connect( host=self.host, port=self.port, database="postgres", user=self.user, password=self.password) as conn: conn.autocommit = True with conn.cursor() as curs: curs.execute("CREATE DATABASE {}".format("test")) except: pass self.conn = psycopg2.connect( host=self.host, port=self.port, database="test", user=self.user, password=self.password) self.conn.autocommit = True self.curs = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor) self.curs.execute(create_table_command.format(self.db_name))
def cable_schools(): for myState in ["AK","AL","AS","AZ","AR","CA","CO","CT","DC","DE","FL","GA","GU","HI","ID","IL", "IN","IA","KS","KY","LA","ME","MD","MA","MI","MN","MS","MO","MP","MT","NE","NV","NH", "NJ","NM","NY","NC","ND","OH","OK","OR","PA","PR","RI","SC","TN","TX","UT","VA", "VI","VT","WA","WI","WV","WY"]: myConn = "dbname=" + db + " host=" + myHost + " port=" + myPort + " user="******"block","road","address"]: ctest.execute("select * from information_schema.tables where table_name=%s", ("private_school_transtech_" + myState.lower() + "_" + shp,)) if bool(ctest.rowcount): print myState print shp myConn = "dbname=" + db + " host=" + myHost + " port=" + myPort + " user="******"DROP TABLE IF EXISTS " + schema + ".private_school_unique_cable_" + myState + "_" + shp + "; " c.execute(dropSQL) theSQL = "CREATE TABLE " + schema + ".private_school_unique_cable_" + myState + "_" + shp + " AS " theSQL = theSQL + "SELECT school_id, count(*) FROM " + schema + ".private_school_transtech_" theSQL = theSQL + myState + "_" + shp + " WHERE transtech = '40' or transtech = '41'" theSQL = theSQL + " GROUP BY school_id ORDER BY count desc;" print theSQL c.execute(theSQL) conn.commit() c.close() ctest.close()
def connect(self): if all([ hasattr(self, '_psqlDbIpAddr'), hasattr(self, '_psqlDbName'), hasattr(self, '_psqlUserName'), hasattr(self, '_psqlUserPass'), ]): pass else: try: # hook up login creds (overridden for tests) self._psqlDbIpAddr = settings.PSQL_IP self._psqlDbName = settings.PSQL_DB_NAME self._psqlUserName = settings.PSQL_USER self._psqlUserPass = settings.PSQL_PASS except: print("WARNING: DB Credentials not available. Is this a test environment?") try: self.conn = psycopg2.connect(dbname = self._psqlDbName, user = self._psqlUserName, password = self._psqlUserPass) except psycopg2.OperationalError: self.conn = psycopg2.connect(host = self._psqlDbIpAddr, dbname = self._psqlDbName, user = self._psqlUserName, password = self._psqlUserPass)
def test_blocking(self): statement = """ UPDATE pgbench_branches SET bbalance = bbalance + 10 WHERE bid = 1 """ blocking_conn = psycopg2.connect( database=self.dbname, cursor_factory=psycopg2.extras.NamedTupleCursor ) blocking_cursor = blocking_conn.cursor() blocking_cursor.execute(statement) async_conn = psycopg2.connect( database=self.dbname, cursor_factory=psycopg2.extras.NamedTupleCursor, async=1 ) psycopg2.extras.wait_select(async_conn) async_cursor = async_conn.cursor() async_cursor.execute(statement) with PgExtras(dsn=self.dsn) as pg: results = pg.blocking() self.assertEqual(len(results), 1)
def __init__(self, dsn): pg_host = 'localhost' pg_port = 5432 pg_user = '******' pg_pass = '******' pg_db = 'osmbuildings_miami' # Extent of Large Building Footprints dataset self.bbox = '25.23561, -80.87864, 25.97467, -80.11845' # Downtown MIA self.bbox = '25.770098, -80.200582,25.780107,-80.185132' if dsn is not None: self.conn = psycopg2.connect(dsn) else: self.conn = psycopg2.connect( host=pg_host, port=pg_port, user=pg_user, password=pg_pass, dbname=pg_db) try: psycopg2.extras.register_hstore(self.conn) except: print 'Could not register hstore. Are you running it for the first time (no hstore data in DB). You should be OK next time though.' self.cursor = self.conn.cursor()
import subprocess import os import time import sys, getopt import re import psycopg2 station='yg' #### email notification list to_addr=['*****@*****.**','*****@*****.**'] #to_addr=['*****@*****.**',] subject="r4_auto_copy" try: #print("try connect db") conn = psycopg2.connect("dbname='experiments' user='******' host='131.217.63.180' port ='5432'") except Exception, e: #print("can't connect to postgresql server\n %s" %e) pass else: #print("connected to postgresql db") cur=conn.cursor() ### get name of r4 just done (started day before) sql="""select name from catalog_experiment where name LIKE 'R4%' AND schedate=current_date-INTERVAL '1 DAY';""" cur.execute(sql) query=cur.fetchone() print query exp=query[0].lower() print exp #### has usb been mounted?
def start_backup_job(self): """ Make filelist in super class and tell Postgres that we start a backup now """ bareosfd.DebugMessage(100, "start_backup_job in PostgresPlugin called") try: self.dbCon = psycopg2.connect( "dbname=%s user=%s %s" % (self.dbname, self.dbuser, self.dbOpts)) self.dbCursor = self.dbCon.cursor() self.dbCursor.execute( "SELECT current_setting('server_version_num')") self.pgVersion = int(self.dbCursor.fetchone()[0]) # bareosfd.DebugMessage( # 1, "Connected to Postgres version %d\n" % self.pgVersion, # ) ## WARNING: JobMessages cause fatal errors at this stage JobMessage( M_INFO, "Connected to Postgres version %d\n" % (self.pgVersion), ) except: bareosfd.JobMessage( M_FATAL, "Could not connect to database %s, user %s\n" % (self.dbname, self.dbuser), ) return bRC_Error if chr(self.level) == "F": # For Full we backup the Postgres data directory # Restore object ROP comes later, after file backup # is done. startDir = self.options["postgresDataDir"] self.files_to_backup.append(startDir) bareosfd.DebugMessage( 100, "dataDir: %s\n" % self.options["postgresDataDir"]) else: # If level is not Full, we only backup WAL files # and create a restore object ROP with timestamp information. startDir = self.options["walArchive"] self.files_to_backup.append("ROP") # get current Log Sequence Number (LSN) # PG8: not supported # PG9: pg_get_current_xlog_location # PG10: pg_current_wal_lsn pgMajorVersion = self.pgVersion // 10000 if pgMajorVersion >= 10: getLsnStmt = "SELECT pg_current_wal_lsn()" switchLsnStmt = "SELECT pg_switch_wal()" elif pgMajorVersion >= 9: getLsnStmt = "SELECT pg_current_xlog_location()" switchLsnStmt = "SELECT pg_switch_xlog()" if pgMajorVersion < 9: bareosfd.JobMessage( M_INFO, "WAL switching not supported on Postgres Version < 9\n", ) else: if self.execute_SQL(getLsnStmt): currentLSN = self.formatLSN(self.dbCursor.fetchone()[0]) bareosfd.JobMessage( M_INFO, "Current LSN %s, last LSN: %s\n" % (currentLSN, self.lastLSN), ) else: currrentLSN = 0 bareosfd.JobMessage( M_WARNING, "Could not get current LSN, last LSN was: %s\n" % self.lastLSN, ) if currentLSN > self.lastLSN and self.switchWal: # Let Postgres write latest transaction into a new WAL file now if not self.execute_SQL(switchLsnStmt): bareosfd.JobMessage( M_WARNING, "Could not switch to next WAL segment\n", ) if self.execute_SQL(getLsnStmt): currentLSN = self.formatLSN( self.dbCursor.fetchone()[0]) self.lastLSN = currentLSN # wait some seconds to make sure WAL file gets written time.sleep(10) else: bareosfd.JobMessage( M_WARNING, "Could not read LSN after switching to new WAL segment\n", ) else: # Nothing has changed since last backup - only send ROP this time bareosfd.JobMessage( M_INFO, "Same LSN %s as last time - nothing to do\n" % currentLSN, ) return bRC_OK # Gather files from startDir (Postgres data dir or walArchive for incr/diff jobs) for fileName in os.listdir(startDir): fullName = os.path.join(startDir, fileName) # We need a trailing '/' for directories if os.path.isdir(fullName) and not fullName.endswith("/"): fullName += "/" bareosfd.DebugMessage(100, "fullName: %s\n" % fullName) # Usually Bareos takes care about timestamps when doing incremental backups # but here we have to compare against last BackupPostgres timestamp try: mTime = os.stat(fullName).st_mtime except Exception as e: bareosfd.JobMessage( M_ERROR, 'Could net get stat-info for file %s: %s\n' % (file_to_backup, e), ) continue bareosfd.DebugMessage( 150, "%s fullTime: %d mtime: %d\n" % (fullName, self.lastBackupStopTime, mTime), ) if mTime > self.lastBackupStopTime + 1: bareosfd.DebugMessage( 150, "file: %s, fullTime: %d mtime: %d\n" % (fullName, self.lastBackupStopTime, mTime), ) self.files_to_backup.append(fullName) if os.path.isdir( fullName) and fileName not in self.ignoreSubdirs: for topdir, dirNames, fileNames in os.walk(fullName): for fileName in fileNames: self.files_to_backup.append( os.path.join(topdir, fileName)) for dirName in dirNames: fullDirName = os.path.join(topdir, dirName) + "/" self.files_to_backup.append(fullDirName) # If level is not Full, we are done here and set the new # lastBackupStopTime as reference for future jobs # Will be written into the Restore Object if not chr(self.level) == "F": self.lastBackupStopTime = int(time.time()) return bRC_OK # For Full we check for a running job and tell Postgres that # we want to backup the DB files now. if os.path.exists(self.labelFileName): self.parseBackupLabelFile() bareosfd.JobMessage( M_FATAL, "Another Postgres Backup Operation is in progress (\"{}\" file exists). You may stop it using SELECT pg_stop_backup()\n" .format(self.labelFileName)) return bRC_Error bareosfd.DebugMessage(100, "Send 'SELECT pg_start_backup' to Postgres\n") # We tell Postgres that we want to start to backup file now self.backupStartTime = datetime.datetime.now( tz=dateutil.tz.tzoffset(None, self.tzOffset)) if not self.execute_SQL( "SELECT pg_start_backup('%s');" % self.backupLabelString): bareosfd.JobMessage(M_FATAL, "pg_start_backup statement failed.") return bRC_Error results = self.dbCursor.fetchall() bareosfd.DebugMessage(150, "Start response: %s\n" % str(results)) bareosfd.DebugMessage( 150, "Adding label file %s to fileset\n" % self.labelFileName) self.files_to_backup.append(self.labelFileName) bareosfd.DebugMessage(150, "Filelist: %s\n" % (self.files_to_backup)) self.PostgressFullBackupRunning = True return bRC_OK
def createDb(): conn = db.connect(host="db",database="db", user="******", password="******", port=5432) cur = conn.cursor() # test & create our two tables cur.execute(queries['Create_Play_Table']); cur.execute(queries['Create_User_Table'])
#!/usr/bin/env python """Test that we can connect to the database""" import sys import psycopg2 try: psycopg2.connect( "host='database' port=5432 user='******' password='******' dbname='aiida_db'" ) except Exception as error: # pylint: disable=broad-except print("failed: ", error) sys.exit(1) print("succeeded") sys.exit(0)
def connect(self): logging.info('Connecting.') self.conn = psycopg2.connect("")
def lambda_handler(event, context): today = datetime.datetime.now() mytimestamp = today.strftime("%Y%m%d%H%M%S") srcType = 'postgres' dbUsername = '******' srcPassword = '******' dbPort = 5432 dbHost = 'pgtst1.cpmpdensv4tc.us-east-2.rds.amazonaws.com' dbName = "dvdrental" srcSchema = "public" tablekeyword = "%" rows = [] filename = "tables.txt" bucket = "rdstoolbox" s3_path = "rdstools_schema_snaps" + "/" + dbHost + "/" + dbName + "/" + srcSchema + "/" + mytimestamp + "/" + filename lambda_path = "/tmp/" + srcSchema + mytimestamp + filename #encoded_string = string.encode("utf-8") query = "select datname,pid,usename,application_name,state,wait_event from pg_stat_activity" #this is tested fine#srcTableListQuery = "select tablename from pg_tables where upper(schemaname) like upper('" + srcSchema + "') and upper(tablename) like upper('" + tablekeyword + "') order by tablename" srcTableListQuery = "select json_agg(t) from (select schemaname,tablename,tableowner from pg_catalog.pg_tables where schemaname='public' order by tablename) as t" print('Checking connection ') if srcType == 'oracle': srcConnStr = dbUsername + '/' + srcPassword + '@' + dbHost + ":" + str( dbPort) + "/" + dbName if srcType == 'mysql': srcConnStr = { 'user': dbUsername, 'password': srcPassword, 'host': dbHost, 'database': dbName, 'raise_on_warnings': True, 'use_pure': False, } if srcType == 'postgres': srcConnStr = "user="******" password="******" host=" + dbHost + " port=" + str( dbPort) + " dbname=" + dbName if (srcType == "postgres"): try: print("connection string:" + srcConnStr) conn = psycopg2.connect(srcConnStr) print("SUCCESS") with conn.cursor() as cur: cur.execute(srcTableListQuery) #cur.copy_expert(srcTableListQuery) for row in cur: rows.append(row) print(json.dumps(rows)) try: print("Now writing to file") with open(lambda_path, 'w+') as file: file.write(json.dumps(rows)) file.close() except: print("Error writing to file") try: print("Now writing to S3") s3_client = boto3.client('s3') s3_client.upload_file(lambda_path, bucket, s3_path) print("Done writing to S3") except ClientError as error: print("ERROR:Something went wrong: {} ".format(error)) except psycopg2.Error as error: print("ERROR:Something went wrong: {} ".format(error)) print(error.pgerror) return { 'statusCode': 200, #'body': json.dumps('Hello from Lambda!') 'body': json.dumps(rows) }
def Register(request): from configparser import ConfigParser import os conn = ConfigParser() file_path = os.path.join(os.path.abspath('.'), 'config.ini') if not os.path.exists(file_path): raise FileNotFoundError("文件不存在") conn.read(file_path) pghost = conn.get('api', 'pghost') pgport = conn.get('api', 'pgport') pguser = conn.get('api', 'pguser') pgpassword = conn.get('api', 'pgpassword') pgdatabase = conn.get('api', 'pgdatabase') iotdbIp = conn.get('api', 'iotdbIp') iotdbUser = conn.get('api', 'iotdbUser') iotdbPassword = conn.get('api', 'iotdbPassword') #iotdb_conn = JDBC.connect('org.apache.iotdb.jdbc.IoTDBDriver', "jdbc:iotdb://192.168.3.31:6667/", ['root', 'root'], 'iotdb-jdbc-0.9.0-SNAPSHOT-jar-with-dependencies.jar') iotdb_conn = get_jdbc_connection(iotdbIp, iotdbUser, iotdbPassword) iotdb_curs = iotdb_conn.cursor() # conn = psycopg2.connect(host = '172.16.50.7', port = 5432, user = '******', password = '******', database='protodw') conn = psycopg2.connect(host=pghost, port=pgport, user=pguser, password=pgpassword, database=pgdatabase) cursor = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) print(request.body) body = json.loads(str(request.body, encoding='utf8')) if not body['toidList']: print('No toidList!') cursor.close() conn.close() iotdb_curs.close() iotdb_conn.close() return HttpResponse('No toidList!') toidList = body['toidList'] terminalList = [] coidList = [] roidList = [] carList = [] errors = [] for toid in toidList: sql = "SELECT * FROM plt_cus_terminal WHERE plt_oid = '" + toid + "'" cursor.execute(sql) terminal_qr = cursor.fetchall() terminalList.append(terminal_qr[0]) if terminal_qr[0]["plt_carid"] not in coidList: coidList.append(terminal_qr[0]["plt_carid"]) for coid in coidList: sql = "SELECT * FROM plt_cus_car WHERE plt_oid = '" + coid + "'" cursor.execute(sql) car_qr = cursor.fetchall() carList.append(car_qr[0]) if car_qr[0]["plt_railline"] not in roidList: roidList.append(car_qr[0]["plt_railline"]) for roid in roidList: sql = "SELECT * FROM plt_tsm_railline WHERE plt_oid = '" + roid + "'" cursor.execute(sql) qr = cursor.fetchall() line_id = qr[0]["plt_lineid"] sql = "SELECT * FROM plt_tsm_protocol WHERE plt_oid = '" + qr[0][ "plt_protoid"] + "'" cursor.execute(sql) protocol_qr = cursor.fetchall() work_condition = [] for protocol in protocol_qr: poid = protocol["plt_oid"] sql = "SELECT plt_tsm_template.* FROM ((plt_tsm_protocol INNER JOIN plt_tsm_r_pro2temp ON plt_tsm_protocol.plt_oid = plt_tsm_r_pro2temp.plt_leftoid) INNER JOIN plt_tsm_template ON plt_tsm_r_pro2temp.plt_rightoid = plt_tsm_template.plt_oid) WHERE plt_tsm_protocol.plt_oid = '" + poid + "'" cursor.execute(sql) template_qr = cursor.fetchall() for template in template_qr: toid = template["plt_oid"] sql = "SELECT plt_tsm_templatepara.* FROM ((plt_tsm_template INNER JOIN plt_tsm_r_tem2tempara ON plt_tsm_template.plt_oid = plt_tsm_r_tem2tempara.plt_leftoid) INNER JOIN plt_tsm_templatepara ON plt_tsm_r_tem2tempara.plt_rightoid = plt_tsm_templatepara.plt_oid) WHERE plt_tsm_template.plt_oid = '" + toid + "'" cursor.execute(sql) tempara_qr = cursor.fetchall() for tempara in tempara_qr: if (tempara["plt_paratype"] != "工况参数"): continue name = tempara["plt_paraid"] type = tempara["plt_datatype"] iotdb_type = "" iotdb_encoding = "" if type == "Int": iotdb_type = "INT32" iotdb_encoding = "RLE" elif type == "Long": iotdb_type = "INT64" iotdb_encoding = "RLE" elif type == "Float": iotdb_type = "FLOAT" iotdb_encoding = "GORILLA" elif type == "Double": iotdb_type = "DOUBLE" iotdb_encoding = "GORILLA" elif type == "String": iotdb_type = "TEXT" iotdb_encoding = "PLAIN" elif type == "Boolean": iotdb_type = "BOOLEAN" iotdb_encoding = "RLE" work_condition.append((name, iotdb_type, iotdb_encoding)) for car in carList: if (car["plt_railline"] != roid): continue car_id = car["plt_carid"] coid = car["plt_oid"] for terminal in terminalList: if terminal["plt_carid"] != coid: continue terminal_id = terminal["plt_terminalid"] storage_group = "root." + line_id + "." + car_id + "." + terminal_id iotdb_sql = "set storage group to " + storage_group try: iotdb_curs.execute(iotdb_sql) except Exception as e: if (str(e) != 'java.sql.SQLException: Method not supported'): errors.append(str(e)) try: iotdb_sql = "create timeseries " + storage_group + ".OriginalPackage with datatype=TEXT,encoding=PLAIN" iotdb_curs.execute(iotdb_sql) except Exception as e: if (str(e) != 'java.sql.SQLException: Method not supported'): errors.append(str(e)) for wc in work_condition: try: iotdb_sql = "create timeseries " + storage_group + "." + wc[ 0] + " with datatype=" + wc[1] + ",encoding=" + wc[ 2] iotdb_curs.execute(iotdb_sql) except Exception as e: if (str(e) != 'java.sql.SQLException: Method not supported'): errors.append(str(e)) iotdb_curs.close() iotdb_conn.close() cursor.close() conn.close() return HttpResponse(errors)
def database(): return psycopg2.connect(database=Config.config["database"])
def __init__(self): self.conn = psycopg2.connect(host=PG_HOSTNAME, user=PG_USERNAME, password=PG_PASSWORD, dbname=PG_DATABASE) self.cur = self.conn.cursor()
import psycopg2 import os import urllib.parse as up from dotenv import load_dotenv load_dotenv() up.uses_netloc.append("postgres") url = up.urlparse(os.environ["DATABASE_URL"]) labs_conn = psycopg2.connect(database=url.path[1:], user=url.username, password=url.password, host=url.hostname, port=url.port ) labs_curs = labs_conn.cursor() # ?-- Should we create our own DB id? --? def create_prices_raw(): create_table_Q = """ CREATE TABLE IF NOT EXISTS prices_raw ( id_sauti INTEGER, source VARCHAR(200), country VARCHAR(50), market VARCHAR(25), product_cat VARCHAR(50), product_agg VARCHAR(50), product VARCHAR(50),
def to_neo4j(csv_edges, table='patents'): """ Code and Cyper commands needed to upload the relationship data to neo4j. :param csv_edges: The string that contains the edges :param table: The string that dictates which table will be used to get node information :return: """ #embed() # Set-Up Patents driver = GraphDatabase.driver(os.getenv("NEO4J_URI"), auth=("neo4j", os.getenv("NEO4J_PASSWORD"))) session = driver.session() # Add Index and Constraints # query = '''CREATE INDEX ON :Patent(patent_number)''' # session.run(query) query = '''CREATE CONSTRAINT ON (p:Patent) ASSERT p.patent_number IS UNIQUE''' session.run(query) session.sync() # Get all relevant new patent numbers new_numbers = set(["'{}'".format(x.split(',')[0]) for x in csv_edges.split('\n') if x != '']) # Get Nodes from postgres to csv HOST = os.getenv("POSTGRES_HOST") USER = os.getenv("POSTGRES_USER") PASS = os.getenv("POSTGRES_PASSWORD") conn = psycopg2.connect(host=HOST, dbname="patent_data", user=USER, password=PASS) # Upload data cur = conn.cursor() query = "Select patent_number, title, owner, abstract From %s where patent_number in (%s)" % ( table, ', '.join(new_numbers)) with open('/home/ubuntu/tmp_nodes.txt', 'w') as f: # with open('tmp_nodes.txt', 'w') as f: cur.copy_expert("copy ({}) to stdout with csv header".format(query), f) patents_from_db = [] data = {'patents': []} lines = open('/home/ubuntu/tmp_nodes.txt').readlines() for line in lines[1:]: line = line.strip('\n').split(',') if len(line) < 4: print(line) continue patents_from_db.append(line[0]) data['patents'].append({'patent_number': line[0], 'title': line[1], 'owner': line[2], 'abstract': line[3]}) # load node data query = ''' WITH {data} as q UNWIND q.patents AS data MERGE (p: Patent {patent_number: data.patent_number }) ON CREATE SET p = {patent_number: data.patent_number, title: data.title, owner: data.owner, abstract: data.abstract} ON MATCH SET p += {patent_number: data.patent_number, title: data.title, owner: data.owner, abstract: data.abstract}''' session.run(query, parameters={'data': data}) # Set up Patent Relationships data = {'links': []} lines = csv_edges.split('\n') for line in lines: line = line.strip().split('\t') # limit edges that do not exist in DB if line[0] not in patents_from_db: continue if line[1] not in patents_from_db: continue data['links'].append({'citation': line[1], 'patent_number': line[0]}) query = ''' WITH {data} as q UNWIND q.links AS data MERGE (f: Patent {patent_number: data.patent_number}) MERGE (t: Patent {patent_number: data.citation}) MERGE (f)-[:CITES]->(t) ''' session.run(query, parameters={'data':data}) session.sync() os.remove('/home/ubuntu/tmp_nodes.txt')
import os from dotenv import load_dotenv import psycopg2 load_dotenv() DB_NAME = os.getenv('DB_NAME') DB_USER = os.getenv('DB_USER') DB_PASSWORD = os.getenv('DB_PASSWORD') DB_HOST = os.getenv('DB_HOST') conn = psycopg2.connect(dbname=DB_NAME, user=DB_USER, password=DB_PASSWORD, host=DB_HOST) cur = conn.cursor() cur.execute('SELECT * from test_table;') results = cur.fetchall() print(results)
import pandas as pd import numpy as np import psycopg2 as pg from psycopg2 import Error import spotipy from spotipy.oauth2 import SpotifyClientCredentials import json import requests from sklearn.metrics.pairwise import cosine_similarity client_credentials_manager = SpotifyClientCredentials(client_id=spotify_credentials['client_id'],client_secret=spotify_credentials['client_secret']) sp = spotipy.Spotify(client_credentials_manager=client_credentials_manager) #=============================== SQL Utils ====================================# conn = pg.connect(database=sql_credentials['database'], user=sql_credentials['user'], password=sql_credentials['password'], host=sql_credentials['host']) def run_query(q,conn): '''a function that takes a SQL query as an argument and returns a pandas dataframe of that query''' with conn: try: cur = conn.cursor() cur.execute(q) return pd.read_sql(q, conn) except (Exception, pg.Error) as e: print(e) try: cur.close()
import psycopg2 import psycopg2.extensions import select conn = psycopg2.connect("dbname=postgres user=samba") conn.autocommit = True cursor = conn.cursor() cursor.execute("SELECT create_queue('orderdata')") # some test data cursor.execute("INSERT INTO queue_orderdata VALUES ('order1')") cursor.execute("INSERT INTO queue_orderdata VALUES ('order2')") # prod code def main(): while True: entry = read_queue("orderdata", cursor) print("Received: " + entry) # library code # TODO currently after we receive an event we read the DB twice: # - once to read the entry # - second time when we re-enter the loop # -> we can't rely on that event = entry, NOTIFY can be called manually without an INSERT # -> use a generator to make this nicer # or maybe this is ok for robustnes?? def read_queue(queue_name, cursor): cursor.execute(f"LISTEN queue_{queue_name};")
with open(config_file) as json_data_file: database = json.load(json_data_file)["DATABASE"] print(database["user"]) url = "https://www.studentska-prehrana.si/restaurant/" uClient = uReq(url) page_html = uClient.read() uClient.close() page_soup = soup(page_html, "html.parser") restavracije = page_soup.findAll('div', attrs={'class': 'restaurant-row'}) try: connect_str = f"dbname='{database['db_name']}' user='******'user']}' host='{database['host']}' password='******'password']}'" conn = psycopg2.connect(connect_str) cursor = conn.cursor() query = """INSERT INTO restaurants_sp (name, sp_id, street, zip, city, students_meal_price, latitude, longitude) VALUES (%s, %s, %s, %s, %s, %s, %s, %s);""" tag_query = """INSERT INTO restaurant_tags (restaurant_sp_id, name) VALUES (%s, %s);""" except Exception as e: print(e) vsi_atributi = []
from random import * import pygame import psycopg2 as p from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT #wait for db #sleep(30) con = p.connect( "dbname='python_game_data' user='******' host='192.168.99.100' password='******'" ) cur = con.cursor() ''' #Creation Database and Tables con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) cur = con.cursor() cur.execute("CREATE DATABASE python_game_data;") con = p.connect("dbname='python_game_data' user='******' host='192.168.99.100' password='******'") cur = con.cursor() cur.execute("CREATE TABLE scores(id SERIAL PRIMARY KEY, score INTEGER);") con.commit() cur.execute("CREATE TABLE game_text(id SERIAL PRIMARY KEY, text_name VARCHAR(255) UNIQUE NOT NULL, text_content VARCHAR(255) NOT NULL);") con.commit() cur.execute("INSERT INTO game_text (text_name, text_content) values ('description', 'Vous devez affronter le goblin et obtenir un maximum de points, bonne chance !');") con.commit()
def sync_db(source, dest): # FusionPBX Database Parameters pbx_id = source[0] pbx_host = source[1] if ':' in source[2]: fpbx_hostname = source[2].split(':')[0] fpbx_port = source[2].split(':')[1] else: fpbx_hostname = source[2] fpbx_port = 5432 fpbx_username = source[3] fpbx_password = source[4] pbx_domain_list = source[5] pbx_attr_list = source[6] pbx_type = source[8] fpbx_database = 'fusionpbx' # Kamailio Database Parameters kam_hostname = dest['hostname'] kam_username = dest['username'] kam_password = dest['password'] kam_database = dest['database'] domain_id_list = [] attr_id_list = [] fpbx_conn = None fpbx_curs = None kam_conn = None kam_curs = None try: # Get a connection to Kamailio Server DB kam_conn = MySQLdb.connect(host=kam_hostname, user=kam_username, passwd=kam_password, db=kam_database) kam_curs = kam_conn.cursor() # Delete existing domain for the pbx pbx_domain_list_str = ''.join(str(e) for e in pbx_domain_list) if len(pbx_domain_list_str) > 0: query = "delete from domain where id in ({})".format( pbx_domain_list_str) kam_curs.execute(query) pbx_domain_list = '' # Trying connecting to PostgresSQL database using a Trust releationship first fpbx_conn = psycopg2.connect(dbname=fpbx_database, user=fpbx_username, host=fpbx_hostname, port=fpbx_port, password=fpbx_password) if fpbx_conn is not None: print("Connection to FusionPBX:{} database was successful".format( fpbx_hostname)) fpbx_curs = fpbx_conn.cursor() fpbx_curs.execute( """select domain_name from v_domains where domain_enabled='true'""" ) rows = fpbx_curs.fetchall() if rows is not None: counter = 0 domain_name_str = "" for row in rows: kam_curs.execute( """insert ignore into domain (id,domain,did,last_modified) values (null,%s,%s,NOW())""", (row[0], row[0])) if kam_curs.rowcount > 0: kam_curs.execute( """SELECT AUTO_INCREMENT FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = DATABASE() AND TABLE_NAME IN('domain') ORDER BY FIND_IN_SET(TABLE_NAME, 'domain')""" ) rows_left = kam_curs.fetchall() domain_id_list.append(str(rows_left[0][0] - 1)) # Delete all domain_attrs for the domain first kam_curs.execute("""delete from domain_attrs where did=%s""", [row[0]]) kam_curs.execute( """insert ignore into domain_attrs (id,did,name,type,value,last_modified) values (null,%s,'pbx_ip',2,%s,NOW())""", (row[0], pbx_host)) kam_curs.execute( """insert ignore into domain_attrs (id,did,name,type,value,last_modified) values (null,%s,'pbx_type',2,%s,NOW())""", (row[0], pbx_type)) kam_curs.execute( """insert ignore into domain_attrs (id,did,name,type,value,last_modified) values (null,%s,'created_by',2,%s,NOW())""", (row[0], pbx_id)) kam_curs.execute( """insert ignore into domain_attrs (id,did,name,type,value,last_modified) values (null,%s,'dispatcher_set_id',2,%s,NOW())""", (row[0], pbx_id)) kam_curs.execute( """insert ignore into domain_attrs (id,did,name,type,value,last_modified) values (null,%s,'dispatcher_reg_alg',2,%s,NOW())""", (row[0], 4)) kam_curs.execute( """insert ignore into domain_attrs (id,did,name,type,value,last_modified) values (null,%s,'domain_auth',2,%s,NOW())""", (row[0], 'passthru')) kam_curs.execute( """insert ignore into domain_attrs (id,did,name,type,value,last_modified) values (null,%s,'pbx_list',2,%s,NOW())""", (row[0], pbx_id)) kam_curs.execute( """insert ignore into domain_attrs (id,did,name,type,value,last_modified) values (null,%s,'description',2,%s,NOW())""", (row[0], 'notes:')) counter = counter + 1 domain_name_str += row[0] # Convert to a string seperated by commas domain_id_list = ','.join(domain_id_list) if not pbx_domain_list: # if empty string then this is the first set of domains pbx_domain_list = domain_id_list else: # adding to an existing list of domains pbx_domain_list = pbx_domain_list + "," + domain_id_list print("[sync_db] String of domains: {}".format(domain_name_str)) # Create Hash of the string domain_name_str_hash = hashlib.md5( domain_name_str.encode('utf-8')).hexdigest() print("[sync_db] Hashed String of domains: {}".format( domain_name_str_hash)) kam_curs.execute( """update dsip_multidomain_mapping set domain_list=%s, domain_list_hash=%s,syncstatus=1, lastsync=NOW(),syncerror='' where pbx_id=%s""", (pbx_domain_list, domain_name_str_hash, pbx_id)) kam_conn.commit() except Exception as ex: error = str(ex) try: kam_conn.rollback() kam_curs.execute( "update dsip_multidomain_mapping set syncstatus=4, lastsync=NOW(),syncerror='{}'" .format(error)) kam_conn.commit() except: pass raise ex finally: if fpbx_conn is not None: fpbx_conn.close() if fpbx_curs is not None: fpbx_curs.close() if kam_curs is not None: kam_curs.close() if kam_conn is not None: kam_conn.close()
from flask import Flask, jsonify import psycopg2 from psycopg2.extras import RealDictCursor import json import datetime from flask_cors import CORS conn = psycopg2.connect( user="******", password="******", host="ec2-174-129-33-107.compute-1.amazonaws.com", port="5432", database="dc4ihr1is83988") cur = conn.cursor(cursor_factory=RealDictCursor) def myconverter(o): if isinstance(o, datetime.datetime): return o.__str__() ## Flask setup app = Flask(__name__) CORS(app) @app.route("/") def welcome(): return "Home Page of DB"
#!/usr/bin/env python import os import feedparser import psycopg2 import urlparse urlparse.uses_netloc.append("postgres") url = urlparse.urlparse(os.environ["DATABASE_URL"]) with psycopg2.connect(database=url.path[1:], user=url.username, password=url.password, host=url.hostname, port=url.port) as dbconnect: cur = dbconnect.cursor() url = ( 'http://www.kulr8.com/category/262588/local?clienttype=rss', 'http://www.ktvq.com/category/288947/news?clienttype=rss', 'http://www.kbzk.com/category/289025/news?clienttype=rss', 'http://www.kxlf.com/category/288921/news?clienttype=rss', 'http://www.nbcmontana.com/15193794?format=rss_2.0&view=feed', 'http://www.abcfoxmontana.com/category/262488/rss-feeds?clienttype=rss', 'http://www.krtv.com/category/288973/news?clienttype=rss', 'http://www.kxlh.com/category/288908/news?clienttype=rss', 'http://www.kpax.com/category/288999/news?clienttype=rss', 'http://www.kfbb.com/category/262741/rss-feeds?clienttype=rss', 'http://www.ktvh.com/category/news/feed/', 'http://newstalk955.com/category/local-news-2/feed/', 'http://newstalkkgvo.com/feed/', 'http://klyq.com/category/local-news/feed/', 'http://kmmsam.com/feed/', 'http://www.belgrade-news.com/search/?q=&t=article&l=25&d=&d1=&d2=&s=start_time&sd=desc&c[]=news*&f=rss',
else: create_table_str = "CREATE TABLE " + table_name + ''' (GISJOIN varchar (50), YEAR int, TRACTA int, COUNTY varchar(20), STATE varchar(10), Total_Pop float8, PCT_WHITE float8, PCT_BLACK float8, PCT_OTHER float8, TOTAL_UNITS float8, PCT_OCCUPIED float8, PCT_VACANT float8, PCT_OWN_OCC float8, PCT_RENT_OCC float8, PRIMARY KEY (GISJOIN));''' copy_str = "COPY " + table_name + '''(GISJOIN, year, TRACTA, COUNTY, STATE, Total_Pop, PCT_WHITE, PCT_BLACK, PCT_OTHER, TOTAL_UNITS, PCT_OCCUPIED, PCT_VACANT, PCT_OWN_OCC, PCT_RENT_OCC)''' from_str = (" FROM " + "'" +FILE_PATH+str(year)+ "_census_data.csv' WITH DELIMITER AS ',' CSV HEADER NULL AS '';") return create_table_str, copy_str + from_str conn = psycopg2.connect(database=DB_NAME, user=DB_USER, password=DB_PASS, host=DB_HOST, port=DB_PORT) c = conn.cursor() #read census data and shapefiles into postgres for each year of census data year_list = [1990, 2000, 2010] old_year_list = [1940, 1950, 1960, 1970, 1980] for year in year_list: table, copy = census_to_sql(year) c.execute(table) c.execute(copy) shp_read = "shp2pgsql -s 102003:4326 {}IL_block_{}.shp public.census_{}_shp | psql -d {}".format(FILE_PATH, year, year, DB_NAME) os.system(shp_read) for year in old_year_list: table_old, copy_old = old_census_to_sql(year)
from telebot import types import telebot import re import psycopg2 from datetime import datetime as dt import json from random import randint import random import os import ast #------------------------------------------------------------- API_TOKEN = os.environ['TOKEN'] bot = telebot.TeleBot(API_TOKEN) #------------------------------------------------------------- DB = os.environ['DATABASE_URL'] connect = psycopg2.connect(DB) #------------------------------------------------------------- path = 'files/metronames.json' f=open(path, 'r') data = [] with open(path) as f: for line in f: data.append(json.loads(line)) #------------------------------------------------------------- @bot.message_handler(commands=['start', 'Сменить город']) # команда /start def start(message): kb_start = types.ReplyKeyboardMarkup(resize_keyboard=True) btn_a = types.KeyboardButton('Сменить город') btn_pic = types.KeyboardButton('Показать пример') # btn_lnk # ссылка на меня в инстике
def sync_needed(source, dest): # FusionPBX Database Parameters pbx_id = source[0] pbx_host = source[1] if ':' in source[2]: fpbx_hostname = source[2].split(':')[0] fpbx_port = source[2].split(':')[1] else: fpbx_hostname = source[2] fpbx_port = 5432 fpbx_username = source[3] fpbx_password = source[4] pbx_domain_list = source[5] pbx_domain_list_hash = source[6] pbx_attr_list = source[7] pbx_type = source[8] fpbx_database = 'fusionpbx' # Kamailio Database Parameters kam_hostname = dest['hostname'] kam_username = dest['username'] kam_password = dest['password'] kam_database = dest['database'] domain_id_list = [] attr_id_list = [] need_sync = True fpbx_conn = None fpbx_curs = None kam_conn = None kam_curs = None # Trying connecting to the databases try: # Get a connection to Kamailio Server DB kam_conn = MySQLdb.connect(host=kam_hostname, user=kam_username, passwd=kam_password, db=kam_database) kam_curs = kam_conn.cursor() if kam_curs is not None: print( "[sync_needed] Connection to Kamailio DB:{} database was successful" .format(kam_hostname)) # Get a connection to the FusionPBX Server fpbx_conn = psycopg2.connect(dbname=fpbx_database, user=fpbx_username, host=fpbx_hostname, port=fpbx_port, password=fpbx_password) if fpbx_conn is not None: print( "[sync_needed] Connection to FusionPBX:{} database was successful" .format(fpbx_hostname)) fpbx_curs = fpbx_conn.cursor() fpbx_curs.execute( """select domain_name from v_domains where domain_enabled='true'""" ) rows = fpbx_curs.fetchall() if rows is not None: domain_name_str = "" # Build a string that contains all of the domains for row in rows: domain_name_str += row[0] print( "[sync_needed] String of domains: {}".format(domain_name_str)) # Create Hash of the string domain_name_str_hash = hashlib.md5( domain_name_str.encode('utf-8')).hexdigest() print("[sync_needed] Hashed String of domains: {}".format( domain_name_str_hash)) if domain_name_str_hash == pbx_domain_list_hash: # Sync not needed. Will update the syncstatus=2 to denote a domain change was not detected kam_curs.execute( """update dsip_multidomain_mapping set syncstatus=2, lastsync=NOW()""" ) kam_conn.commit() need_sync = False else: # No domains yet, so no need to sync kam_curs.execute( """update dsip_multidomain_mapping set syncstatus=3, lastsync=NOW()""" ) kam_conn.commit() need_sync = False return need_sync except Exception as e: error = str(e) print(error) try: kam_conn.rollback() kam_curs.execute( "update dsip_multidomain_mapping set syncstatus=4, lastsync=NOW(),syncerror='{}'" .format()) kam_conn.commit() except: pass finally: if fpbx_conn is not None: fpbx_conn.close() if fpbx_curs is not None: fpbx_curs.close() if kam_curs is not None: kam_curs.close() if kam_conn is not None: kam_conn.close()
cur.execute("INSERT INTO question (question, answers, answer_index, answer_descriptions, overall_description) VALUES (%s, %s, %s, %s, %s)", (q3, [a31, a32, a33], i3, [e31, e32, e33], e3)) cur.execute("INSERT INTO question (question, answers, answer_index, answer_descriptions, overall_description) VALUES (%s, %s, %s, %s, %s)", (q4, [a41, a42, a43], i4, [e41, e42, e43], e4)) cur.execute("INSERT INTO question (question, answers, answer_index, answer_descriptions, overall_description) VALUES (%s, %s, %s, %s, %s)", (q5, [a51, a52, a53], i5, [e51, e52, e53], e5)) cur.execute("INSERT INTO question (question, answers, answer_index, answer_descriptions, overall_description) VALUES (%s, %s, %s, %s, %s)", (q6, [a61, a62, a63], i6, [e61, e62, e63], e6)) cur.execute("INSERT INTO question (question, answers, answer_index, answer_descriptions, overall_description) VALUES (%s, %s, %s, %s, %s)", (q7, [a71, a72, a73], i7, [e71, e72, e73], e7)) return {"q1": {"text": q1, "answers": [a11, a12, a13], "explanations": [e11, e12, e13], "explanation": e1, 'answer_index': i1}, "q2": {"text": q2, "answers": [a21, a22, a23], "explanations": [e21, e22, e23], "explanation": e2, 'answer_index': i2}, "q3": {"text": q3, "answers": [a31, a32, a33], "explanations": [e31, e32, e33], "explanation": e3, 'answer_index': i3}, "q4": {"text": q4, "answers": [a41, a42, a43], "explanations": [e41, e42, e43], "explanation": e4, 'answer_index': i4}, "q5": {"text": q5, "answers": [a51, a52, a53], "explanations": [e51, e52, e53], "explanation": e5, 'answer_index': i5}, "q6": {"text": q6, "answers": [a61, a62, a63], "explanations": [e61, e62, e63], "explanation": e6, 'answer_index': i6}, "q7": {"text": q7, "answers": [a71, a72, a73], "explanations": [e71, e72, e73], "explanation": e7, 'answer_index': i7}} # connect to the database manually if run as main, else use the given connection if __name__ == "__main__": try: url = urlparse(os.environ["DATABASE_URL"]) except: url = urlparse("postgres://*****:*****@ec2-54-243-239-66.compute-1.amazonaws.com:5432/den0hekga678pn") db_conn = psycopg2.connect( database=url.path[1:], user=url.username, password=url.password, host=url.hostname, port=url.port, cursor_factory=psycopg2.extras.RealDictCursor ) cur = db_conn.cursor() generate(cur) db_conn.commit()
current_working_directory = os.getcwd() # print 'current working directory: {}'.format(_current_working_directory) #import settings.py file to get database information sys.path.append(current_working_directory) import settings # @params agent & DB interfaces db_host = settings.DATABASES['default']['HOST'] db_port = settings.DATABASES['default']['PORT'] db_database = settings.DATABASES['default']['NAME'] db_user = settings.DATABASES['default']['USER'] db_password = settings.DATABASES['default']['PASSWORD'] db_table_application_registered = settings.DATABASES['default']['TABLE_application_registered'] try: con = psycopg2.connect(host=db_host, port=db_port, database=db_database, user=db_user, password=db_password) cur = con.cursor() # open a cursor to perfomm database operations print("APP Installer >> connects to the database name {} successfully".format(db_database)) except: print("APP Installer >> ERROR: {} fails to connect to the database name {}".format(app_name, db_database)) cur.execute("SELECT executable FROM "+db_table_application_registered+" WHERE app_name=%s", (app_name,)) if cur.rowcount != 0: # app has already been installed and registered print("APP Installer >> the APP name {} exists, this process will re-install the APP".format(app_name)) cur.execute("DELETE FROM "+db_table_application_registered+" WHERE app_name=%s", (app_name,)) else: # go ahead and add this app to database pass # print app_folder print 'APP Installer >> installing APP name {}, in folder {}, with exec {} ...'.format(app_name, app_folder, app_exec) # print 'current working directory: {}'.format(_current_working_directory)
import psycopg2 import time import random from sklearn import tree #establishing the connection to database conn = psycopg2.connect(database="CSI4142", user='******', password='******', host='lileyao1998.synology.me', port='15432') cursor = conn.cursor() ### Load dataset cursor.execute( '''SELECT m.grocery_pharmacy, m.parks, m.transit_stations, m.retail_recreation, m.residential, m.workplaces, CASE s.keyword1 WHEN 'Protect' THEN 25 WHEN 'Restrict' THEN 50 WHEN 'Control' THEN 75 WHEN 'Stay-at-home' THEN 100 ELSE 0 END, w.daily_high_temperature, w.daily_low_temperature, CASE c.gender WHEN 'M' THEN 1 ELSE 0 END, c.age as label FROM covid_fact_table f, mobility m, specialmeasures s, weather w, positivecase c
#!/usr/bin/python # -*- coding: utf-8 -*- import psycopg2 import sys con = None try: con = psycopg2.connect( "host='localhost' dbname='postgres' user='******' password='******'" ) cur = con.cursor() #cur.execute("CREATE TABLE Ups(Id INTEGER PRIMARY KEY, Name VARCHAR(20), Price INT)") cur.execute( "INSERT INTO \"UPS_ups\" VALUES( DEFAULT, 1, 1, 'food', 'no description', 5, 1,1,1, 'open', 'ncnc12345')" ) #cur.execute("INSERT INTO Products VALUES(2,'Sugar',7)") #cur.execute("INSERT INTO Products VALUES(3,'Coffee',3)") #cur.execute("INSERT INTO Products VALUES(4,'Bread',5)") #cur.execute("INSERT INTO Products VALUES(5,'Oranges',3)") con.commit() except psycopg2.DatabaseError as e: if con: con.rollback() print('Error %s' % e) sys.exit(1) finally: if con:
return _cursor.fetchall() def get_last_irrigation(): #Just test getting the last as we don't have irrigation yet _cursor.execute("SELECT MAX (timestamp) FROM " + table_name + " WHERE irrigation = false;") return _cursor.fetchall() def close_connection(): _cursor.close() _connection.close() dbname = os.environ["DATABASE_NAME"] user = os.environ["DATABASE_USER"] password = os.environ["DATABASE_PASSWORD"] host = os.environ["DATABASE_HOST"] port = os.environ["DATABASE_PORT"] _connection = psycopg2.connect( dbname=dbname, user=user, password=password, host=host, port=port ) _cursor = _connection.cursor() table_name = 'plant_properties' #private _create_table()
from flask import Flask, render_template, request import requests #import cgi, cgitbuu import json import APICalls as tmdb import psycopg2 as db try: connection = db.connect(user = "******", password = "******", host = "127.0.0.1", port = "5432", database = "postgres") cursor = connection.cursor() print ( connection.get_dsn_parameters(),"\n") # Print PostgreSQL version cursor.execute("SELECT version();") record = cursor.fetchone() print("You are connected to - ", record,"\n") ''' cursor.execute("select * from userdata;") record = cursor.fetchone() print("Records : ", record,"\n") record = cursor.fetchone() print("Records : ", record,"\n") '''
import sys import psycopg2 from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT conn = psycopg2.connect(database="tcount", user="******", password="******", host="localhost", port="5432") cur = conn.cursor() nums = sys.argv[1].split(",") whereClause = "WHERE count>=%s and count <=%s" % (nums[0], nums[1]) cur.execute("SELECT word, count FROM tweetwordcount " + whereClause) records = cur.fetchall() if len(records) == 0: sys.exit(1) for word, count in records: print "%s: %d" % (word, count)
################################################################################################# dbname = 'dev' port = '' user = '' password = '' aws_access_key_id = '' aws_secret_access_key = '' host = 'redshift-cluster-1.******.us-west-2.redshift.amazonaws.com' host1 = 'aws_iam_role=arn:aws:iam::123456789:role/RedshiftCopyUnload' #myRedshiftRole #Amazon Redshift connect string conn_string = "dbname='dev' port= '' user= '' password= '' \ host= 'training007.*******.us-west-2.redshift.amazonaws.com'" con = psycopg2.connect(conn_string); to_table = 'tweets_{}_{}_{}_{}'.format(hastag, Year, Month, Day) print('Created table name is %s' % to_table) fn = 's3://kaggletest1/{}/{}/{}/{}/weekly_tracks.tsv'.format(hastag, Year, Month, Day) #fn = 's3://kaggletest1/weekly_tracks.tsv' delim = '|' sql0 = """ DROP TABLE IF EXISTS %s """ %(to_table) sql1 = """CREATE table %s( USERID numeric(38,0), TWEETTIME timestamp, TWEET text); """ % (to_table) sql2 ="""COPY %s FROM '%s' credentials '%s' delimiter '%s' region 'us-west-2';""" % (to_table, fn, host1, delim) ################################################################################################ class connect2redshift(luigi.Task):