def connect(self): # print(g_dionaea.config()['modules']['python']['surfids']) self.dbh = pg_driver.connect( user=g_dionaea.config()['modules']['python']['surfids'] ['username'], password=g_dionaea.config()['modules']['python']['surfids'] ['password'], database=g_dionaea.config()['modules']['python']['surfids'] ['dbname'], host=g_dionaea.config()['modules']['python']['surfids']['host'], port=g_dionaea.config()['modules']['python']['surfids']['port']) self.stmt_sensor_type = self.dbh.prepare( "SELECT surfids3_type_from_name('dionaea')") self.sensor_type = self.stmt_sensor_type()[0][0] logger.debug("surfids sensor type %i" % self.sensor_type) self.stmt_attack_add = self.dbh.prepare( "SELECT surfids3_attack_add($1, $2::text::inet, $3, $4::text::inet, $5, $6, $7)" ) self.stmt_detail_add = self.dbh.prepare( "SELECT surfids3_detail_add($1, $2::text::inet, $3, $4)") self.stmt_detail_add_offer = self.dbh.prepare( "SELECT surfids3_detail_add_offer($1::text::inet, $2::text::inet, $3, $4)" ) self.stmt_detail_add_download = self.dbh.prepare( "SELECT surfids3_detail_add_download($1::text::inet, $2::text::inet, $3, $4, $5)" ) self.stmt_attack_update_severity = self.dbh.prepare( "SELECT surfids3_attack_update_severity($1, $2)")
def connect(self): # print(g_dionaea.config()['modules']['python']['surfids']) self.dbh = pg_driver.connect(user = g_dionaea.config()['modules']['python']['surfids']['username'], password = g_dionaea.config()['modules']['python'][ 'surfids']['password'], database = g_dionaea.config()['modules']['python'][ 'surfids']['dbname'], host = g_dionaea.config()['modules']['python'][ 'surfids']['host'], port = g_dionaea.config()['modules']['python']['surfids']['port']) self.stmt_sensor_type = self.dbh.prepare( "SELECT surfids3_type_from_name('dionaea')") self.sensor_type = self.stmt_sensor_type()[0][0] logger.debug("surfids sensor type %i" % self.sensor_type) self.stmt_attack_add = self.dbh.prepare( "SELECT surfids3_attack_add($1, $2::text::inet, $3, $4::text::inet, $5, $6, $7)") self.stmt_detail_add = self.dbh.prepare( "SELECT surfids3_detail_add($1, $2::text::inet, $3, $4)") self.stmt_detail_add_offer = self.dbh.prepare( "SELECT surfids3_detail_add_offer($1::text::inet, $2::text::inet, $3, $4)") self.stmt_detail_add_download = self.dbh.prepare( "SELECT surfids3_detail_add_download($1::text::inet, $2::text::inet, $3, $4, $5)") self.stmt_attack_update_severity = self.dbh.prepare( "SELECT surfids3_attack_update_severity($1, $2)")
def setup_db(): db = pg_driver.connect(user='******', password='******', host='localhost', port=5432, database='face') db.execute("create extension if not exists cube;") db.execute("drop table if exists vectors") db.execute( "create table vectors (id serial, file varchar, vec_low cube, vec_high cube);" ) db.execute("create index vectors_vec_idx on vectors (vec_low, vec_high);")
def __init__(self, host, port, database, user, password): try: driver = pg_driver.connect( user=user, password=password, host=host, port=port, database=database ) self.db = driver except Exception as e: return print(e)
def __enter__(self): self._db = pg_driver.connect(user=_config.pgUser, password=_config.pgPassword, host=_config.pgHost, port=_config.pgPort, database=_config.pgDB) self._installations_query = self._db.prepare( "SELECT * FROM GIOS.ALL_COMPARABLE_INSTALLATIONS " "WHERE STATION = $1 AND TIMESTAMP = $2") self._installations_batch = self._db.prepare( "SELECT * FROM GIOS.ALL_COMPARABLE_INSTALLATIONS " "WHERE STATION = $1 AND TIMESTAMP = ANY($2)") return self
def startup_tests(): #Check sys.argv for id_to_album credentials = getCreds() try: db = pg_driver.connect( user = credentials['db_user'], password = credentials['db_password'], host = 'localhost', port = 5432, database = credentials['db_name']) except Exception as e: print("Error: cannot connect to database\n") print(e, file=sys.stderr) exit(1) print("Zarvox database are online") return db
def get_ticker_list(exchange): try: db = pg_driver.connect(user=username, password=passwd, host=server, port=portval, database=dbname) ps = db.prepare( 'INSERT INTO google_finance.tickers (ticker, exchange, company_name) VALUES($1, $2, $3)' ) # Try to select all NASDAQ company data from a single page path = 'http://www.google.com/finance?q=[%28exchange+%3D%3D+%22{:s}%22%29]&restype=company&noIL=1&num=20000&ei=ul-rVYCFKI2cUvy-ibgC' \ .format(exchange) content = urllib.urlopen(path).read().decode('utf-8') doc = document_fromstring(content) for div in doc.cssselect('table'): if div.get('class') == 'gf-table company_results': for tr in div.cssselect('tr'): if tr.get('class') == 'snippet': ticker = [] company = [] exchange = [] for td in tr.cssselect('td'): if td.get('class') == 'localName nwp': company = td.text_content().strip() elif td.get('class') == 'exch': exchange = td.text_content().strip() elif td.get('class') == 'symbol': ticker = td.text_content().strip() if ticker != []: try: print('Inserting values ({:s}, {:s}, {:s})'. format(ticker, exchange, company)) ps(ticker, exchange, company) except Exception as e: print( "Data for ticker %s could not be inserted, reason: %s" % (ticker, format(e))) db.close() except exceptions.UniqueError as e: pass except exceptions.Exception as e: print("Connection could not be established")
def __init__(self): table_exist = False try: self.db = pg_driver.connect(user='******', password='******', host='localhost', port=5432) for table_name, in self.db.prepare( "SELECT table_name FROM information_schema.tables"): print(table_name) if table_name == "fuel_consumption": table_exist = True break if not table_exist: self.db.execute( "CREATE TABLE FUEL_CONSUMPTION (id SERIAL PRIMARY KEY, date DATE, length FLOAT, coef FLOAT)" ) print("TABLE CREATED") self.insert = self.db.prepare( "INSERT INTO FUEL_CONSUMPTION (date, length, coef) VALUES ($1, $2, $3)" ) self.select_all = self.db.prepare("SELECT * FROM FUEL_CONSUMPTION") self.sum_length = self.db.prepare( "SELECT SUM(length) FROM FUEL_CONSUMPTION") self.sum_coef = self.db.prepare( "SELECT SUM(coef) FROM FUEL_CONSUMPTION") self.period = self.db.prepare( "SELECT * FROM FUEL_CONSUMPTION WHERE date > $1 AND date < $2" ) self.sum_period_length = self.db.prepare( "SELECT SUM(length) FROM FUEL_CONSUMPTION WHERE date > $1 AND date < $2" ) self.sum_period_coef = self.db.prepare( "SELECT SUM(coef) FROM FUEL_CONSUMPTION WHERE date > $1 AND date < $2" ) self.date = self.db.prepare( "SELECT * FROM FUEL_CONSUMPTION WHERE date = $1") except: print("no db connection") exit()
def load_ticker_list(exchange, db): try: if not db: db = pg_driver.connect(user=username, password=passwd, host=server, port=portval, database=dbname) ps = db.prepare( "SELECT ticker FROM google_finance.tickers where exchange='%s'" % (exchange)) tickers = [r[0] for r in ps()] ps_parsed = db.prepare("SELECT ticker FROM google_finance.parsed") parsed = [r[0] for r in ps_parsed()] unchecked = [x for x in tickers if x not in set(parsed)] return unchecked except exceptions.Exception as e: print("Connection could not be established, error: %s" % (format(e))) return None
import re import random import logging import socket import base64 from postgresql import driver import postgresql import json logging.basicConfig(level=logging.DEBUG) '信用广西详情爬取中,新增的十五位注册码' DB = driver.connect(host='106.75.145.80', user='******', password='******', database='cra1', port=9988) # 数据库异常重新连接 def conn_sql(): global DB if DB.closed: DB = postgresql.driver.connect(host='106.75.145.80', user='******', password='******', database='cra1', port=9988) return DB
parser.add_option("--port", action="store", dest="port", help="port for database") (options, args) = parser.parse_args() validateOption("database", options.database) validateOption("schema", options.schema) validateOption("user", options.user) validateOption("password", options.password) validateOption("host", options.host) validateOption("port", options.port) db = pg_driver.connect( \ user = options.user, \ password = options.password, \ host = options.host, \ database = options.database, \ port = options.port \ ) statement = "DELETE from dba.tbl_ro_to_anzsrcfor" print(statement) db.execute(statement) counter = constructCounter() unresolvedFile = openFile("Unresolved.txt", 'w') insertSelectFile = openFile("InsertRegistryObjectDivision.txt", 'w') insertSelectFile.write("INSERT INTO dba.tbl_ro_to_anzsrcfor (ro_to_anzsrcfor_id, registry_object_key, anzsrcfor_division, anzsrcfor_value) VALUES\n") statement = ("SELECT registry_object_key, value from %s.vm_collections_with_anzsrc_for" % options.schema)
def get_connection(): return pg_driver.connect(**PgConfig.__config__)
import postgresql.driver as pg_driver import datetime from EltechAssistant import Prettyfier db = pg_driver.connect(user='******', password='', host='localhost', database='postgres', port=5432) login = '******' password = '******' class FindInDataBase: @staticmethod def access(text): data = db.query( "SELECT name FROM public.students WHERE telegramid LIKE " + "'" + str(text) + "'") return data if data else 0 @staticmethod def write_telegramid(phone_number, telegramid): db.query("UPDATE Students SET TelegramID = " + str(telegramid) + " WHERE students.phonenumber LIKE " + "'" + str(phone_number) + "'") return 0 @staticmethod def find_student_in_group_list(text): data = db.query(
import postgresql.exceptions import postgresql.driver as pg_driver import const user = const.user password = const.password host = const.host db_family = const.db_family db_measurements = const.db_measurements try: db = pg_driver.connect(user=user, password=password, host=host, port=5432, database=db_measurements) print("Connect to postgres successfully!") except postgresql.exceptions.ClientCannotConnectError: print('Cannot connect! Check your internet connection and psql server status.') exit(-1) for i in range(8): psdrop = db.prepare('drop table n000' + str(i) + ';') print(psdrop()) pscreate = db.prepare('create table n000' + str(i) + '(time integer,temperature float);') print(pscreate())
def __init__(self): self.conn = pg_driver.connect(**CREDENTIALS)
def application(environment, start_response): import os, sys, cgi, csv import postgresql.driver as pg_driver try: db = pg_driver.connect(host='localhost', user='******', password='******', database='omnivore', port=5432) except: page = """Can not access database""" from webob import Request, Response #request = Request(environ) #params = request.params #post = request.POST from http.cookies import SimpleCookie cookie = SimpleCookie() login_form = """ <!doctype html> <html> <head> <title> Login </title> <script src="../bootstrap/js/bootstrap-3.3.4.min.js"></script> <link rel="stylesheet" href="../bootstrap/v3/css/bootstrap-3.3.4.min.css"> </head> <body> <h1>Google QC Report</h1> <p>Ban chua dang nhap dung username hoac password. Ban can phai dang nhap lai</p> <div class="modal-dialog"> <div class="modal-content"> <div class="modal-header"> <h4 class="modal-title">Login</h4> </div> <div class="modal-body"> <form class="form-horizontal" role="form" action = '../omnivore/demo/login3.py' method='post'> <div class="form-group"> <label for="inputEmail1" class="col-lg-4 control-label">User name</label> <div class="col-lg-5"> <input type="text" class="form-control" id="inputuser1" name='username' placeholder="user name"> </div> </div> <div class="form-group"> <label for="inputPassword1" class="col-lg-4 control-label">Password</label> <div class="col-lg-5"> <input type="password" class="form-control" id="inputPassword1" name='password' placeholder="Password"> </div> </div> <div class="form-group"> <div class="col-lg-offset-4 col-lg-5"> <div class="checkbox"> <label> <input type="checkbox"> Remember me </label> </div> </div> </div> <div class="form-group"> <div class="col-lg-offset-4 col-lg-5"> <button type="submit" class="btn btn-default">Sign in</button> </div> </div> </form> </div> <div class="modal-footer"> </div> </div><!-- /.modal-content --> </div><!-- /.modal-dialog --> </body> </html>""" if not 'HTTP_COOKIE' in environment: page = login_form else: cookie.load(environment['HTTP_COOKIE']) if not 'username' in cookie: page = login_form else: username = cookie['username'].value password = cookie['password'].value ps = db.prepare( """Select username,account_password,account_level,gmail from account where username= '******' and account_password = '******' """ % (username.replace("'", "''"), password.replace("'", "''"))) if len(ps()) == 0: page = login_form else: if ps()[0][2] == 2: page = "" if environment['REQUEST_METHOD'] == 'POST': post = cgi.FieldStorage(fp=environment['wsgi.input'], environment=environment, keep_blank_values=True) table = post.getvalue('table') page1 = '' if 'file' in post: filefield = post['file'] if not isinstance(filefield, list): filefield = [filefield] for fileitem in filefield: # #account = request.headers["account"] # #time = request.headers["time"] if fileitem.filename: # strip leading path from file name to avoid directory traversal attacks fn = os.path.basename(fileitem.filename) open('/tmp/file_upload/' + fn, 'wb').write(fileitem.file.read()) #page += 'File was uploaded %s roi sao %s'%(fn,table) from datetime import datetime with open('tmp/file_upload/' + fn, 'r', encoding='utf-8') as csvfile: spamreader = list( csv.reader(csvfile, delimiter=';', quotechar='"'))[1][8] years = int( datetime.strptime( spamreader, '%a %b %d %Y %H:%M:%S').year) months = int( datetime.strptime( spamreader, '%a %b %d %Y %H:%M:%S').month) days = int( datetime.strptime( spamreader, '%a %b %d %Y %H:%M:%S').day) db.execute( """create table if not exists omnivore_%s_%s_%s ( id serial8 primary key, no text,agent text, module text,domain text,title text,link text, quest text, answer text, time timestamp, process_time numeric,img_id text, update_time timestamp default now() )""" % (years, months, days)) db = pg_driver.connect(host='localhost', user='******', password='******', database='omnivore', port=5432) try: db.execute( """copy omnivore_%s_%s_%s(no,agent, module,domain,title,link, quest, answer, time, process_time,img_id) from '/tmp/file_upload/%s' delimiter ';' CSV HEADER escape '\\' quote '"' """ % (years, months, days, fn)) #db.execute("""update qc_project_csv%s%s set status =null ,price =null,currency=null,condition=null,availability=null where status ='' or price ='' or currency='' or condition ='' or availability=''"""%(year,month)) page1 += 'The file "' + fn + '" was uploaded and import successfully! <br />' #db.execute("""insert into log_import_csv(filename,agent) values('%s','%s')"""%(fn,username)) #page += "Upload file sucessfull" # #xoa file vua gui len # try: # os.remove('c:/wsgi_app/file_upload/' + fn) # except OSError: # pass except IOError as err: page1 += "I/O error: {0}".format(err) except ValueError: page1 += "Could not import data file csv to database" raise page += """ <html> <head> <title>IU Webmaster redirect</title> <META http-equiv="refresh" content="25;URL=upload_files_monivore_g"> </head> <body bgcolor="#ffffff"> <center>%s You will be redirected to the new location automatically in 25 seconds. If you can not wait , click <a href="upload_files_monivore_g"> Upload file csv</a> </center> </body> </html> """ % page1 cur.execute( "update omnivore_%s_%s_%s set answer='' where answer='unselected'" % (years, months, days)) else: page = u""" <html> <head><title>Upload</title></head> <body> <h1>Upload file csv cua bo dem beta Q . Khong can dat ten bang </h1> <p> <form name="test" method="post" action="" enctype="multipart/form-data"> Chon file (co the chon nhieu file mot luc): <input type="file" name="file" multiple/><br /> <!--Table :<input type="text" name="table" value='' required/> (om_2015_5_7 : bang csv 2015-5-7 , vi du minh hoa, ban co the lay ten bang bat ky) <br /> --><input type="submit" name="submit" value="Submit" /> </form> <p>Note: files with the same name with overwrite any existing files.</p <h1> Upload file csv cho bo dem beta G tro len <a href='upload_files_monivore_g.py'>Tai day</a></h1> </body> </html> """ else: page = login_form db.close() response = Response(body=page, content_type="text/html", charset="utf8", status="200 OK") return response(environment, start_response)
return st def solve(s): """Выбор посетителя""" v = visitorType st = "" c = 0 for i in v.nameList: st += i + ': ' st += switchGenre(s, c) + '\r\n' c += 1 return st db = pg_driver.connect(user='******', password='******', host='localhost', database='postgres', port=5432) def importDB(): """Импортируем БД с посетителя и музыки """ for name, danceType in db.prepare("SELECT * FROM visitors;"): visitor = visitorType(name, replaceMusic(danceType)) visitor.addName() visitor.addDanceType() for nameSong, genre in db.prepare("SELECT * FROM playlist;"): song = playlist(nameSong, genre) song.addName() song.addGenreList() importDB()
dest="port", help="port for database") (options, args) = parser.parse_args() validateOption("database", options.database) validateOption("schema", options.schema) validateOption("user", options.user) validateOption("password", options.password) validateOption("host", options.host) validateOption("port", options.port) db = pg_driver.connect( \ user = options.user, \ password = options.password, \ host = options.host, \ database = options.database, \ port = options.port \ ) statement = "DELETE from dba.tbl_ro_to_anzsrcfor" print(statement) db.execute(statement) counter = constructCounter() unresolvedFile = openFile("Unresolved.txt", 'w') insertSelectFile = openFile("InsertRegistryObjectDivision.txt", 'w') insertSelectFile.write( "INSERT INTO dba.tbl_ro_to_anzsrcfor (ro_to_anzsrcfor_id, registry_object_key, anzsrcfor_division, anzsrcfor_value) VALUES\n" )
p.add_option('-p', '--database-password', dest='database_password', help='the database users password', type="string", action="store") p.add_option('-f', '--sqlite-file', dest='sqlite_file', help='path to sqlite db', type="string", action="store") (options, args) = p.parse_args() if len(args) == 0: print("use {} as args".format( ' '.join(cando.keys()) ) ) db = {} db['sqlite'] = {} db['sqlite']['dbh'] = sqlite3.connect(options.sqlite_file) db['sqlite']['cursor'] = db['sqlite']['dbh'].cursor() db['pg'] = {} db['pg']['dbh'] = pg_driver.connect( user = options.database_user, password = options.database_password, database = options.database, host = options.database_host, port = 5432) for i in args: if i in cando: copy(i, db['sqlite']['cursor'], db['pg']['dbh'], cando[i][0], cando[i][1]) # db['pg']['dbh'].commit()
def connect_to_db(self): """Connects to the database""" if self.db_type == DB_MYSQL: try: import pymysql except ImportError as _err: # TODO: Add python3-mysql when available or consider mysql-connector when available for python3 raise Exception(import_error_to_help(_module="pymysql", _err_obj=_err, _pip_package="pymysql3", _apt_package=None, _win_package=None)) _connection = pymysql.connect(host=self.server, db=self.databasename, user=self.username, passwd=self.password, ) elif self.db_type == DB_POSTGRESQL: try: import postgresql.driver as pg_driver except ImportError as _err: raise Exception(import_error_to_help(_module="postgresql", _err_obj=_err, _pip_package="py-postgresql", _apt_package="python3-postgresql", _win_package=None, _import_comment="2014-04-16: If using apt-get on Debian, " + "check so version is > 1.0.3-2" + " as there is a severe bug in the 1.0.2 version. " + "See https://bugs.debian.org/cgi-bin/bugreport" + ".cgi?bug=724597")) if self.port is None or self.port == "" or self.port == 0: _port = 5432 else: _port = self.port _connection = pg_driver.connect(host=self.server, database=self.databasename, user=self.username, password=self.password, port=_port) elif self.db_type in [DB_SQLSERVER, DB_DB2]: _connection_string = None try: import pyodbc except ImportError as _err: raise Exception(import_error_to_help(_module="pyodbc", _err_obj=_err, _pip_package="pyodbc", _apt_package=None, _win_package=None, _import_comment="(For Linux) 2014-04-16: " + "No apt package (python3-pyodbc)" + " available at this time.")) import platform if self.db_type == DB_SQLSERVER: if platform.system().lower() in ["linux", "darwin"]: #TODO: Set a reasonable timeout _connection_string = "DRIVER={FreeTDS};SERVER=" + self.server + ";DATABASE=" + \ self.databasename + ";TDS_VERSION=8.0;UID=" + self.username + ";PWD=" + \ self.password + ";PORT=" + str(self.port) + ";Trusted_Connection=no;" elif platform.system().lower() == 'windows': _connection_string = "Driver={SQL Server};Server=" + self.server + ";DATABASE=" + \ self.databasename + ";UID=" + self.username + ";PWD=" + self.password + \ ";PORT=" + str(self.port) + ";Trusted_Connection=no" else: raise Exception("connect_to_db: SQL Server connections on " + platform.system() + " not supported yet.") elif self.db_type == DB_DB2: if platform.system().lower() in ["linux", "darwin"]: drivername = "DB2" elif platform.system().lower() == 'windows': drivername = "{IBM DATA SERVER DRIVER for ODBC - C:/PROGRA~1/IBM}" else: raise Exception("connect_to_db: DB2 connections on " + platform.system() + " not supported yet.") # DSN-less?{IBM DB2 ODBC DRIVER} ?? http://www.webmasterworld.com/forum88/4434.htm _connection_string = "Driver=" + drivername + ";Database=" + self.databasename + ";hostname=" + \ self.server + ";port=" + str(self.port) + ";protocol=TCPIP; uid=" + \ self.username + "; pwd=" + self.password # _connection_string = "DSN=" + self.server + ";UID=" + self.username + ";PWD=" + self.password print("Connect to database using connection string: " + _connection_string) _connection = pyodbc.connect(_connection_string, autocommit=self.autocommit) elif self.db_type == DB_ORACLE: try: import cx_Oracle except ImportError as _err: raise Exception(import_error_to_help(_module="cx_Oracle", _err_obj=_err, _pip_package="cx_Oracle", _apt_package=None, _win_package="Download and install binary .msi package from " + "http://cx-oracle.sourceforge.net/ and install.", _import_comment="(Linux) 2014-04-16: No python3-pyodbc available" + " at build time.")) _connection_string = self.username + '/' + self.password + '@' + self.server + ':' + \ str(self.port) + '/' + self.instance print("Connect to database using connection string: " + _connection_string) _connection = cx_Oracle.connect(_connection_string) _connection.autocommit = self.autocommit elif self.db_type == DB_SQLITE: try: import sqlite3 except ImportError as _err: raise Exception("Error importing SQLite3, which is built-in into Python, check your Python " "installation. Error: " + str(_err)) _connection = sqlite3.connect(self.databasename) else: raise Exception("connect_to_db: Invalid database type.") self.connection = _connection if self.on_connect: self.on_connect() self.connected = True return _connection
from bs4 import BeautifulSoup import time import re import random import logging import socket import base64 from postgresql import driver import postgresql import json logging.basicConfig(level=logging.DEBUG) '广西ID抓取' DB = driver.connect(host='', user='******', password='', database='', port=) # 数据库异常重新连接 def conn_sql(): global DB if DB.closed: DB = postgresql.driver.connect(host='106.75.145.80', user='******', password='******', database='cra1', port=9988) return DB # 产生随机数 def rangenum(): little = random.randint(1, 1900000) more = little+1000 return little, more
def application(environ, start_response): import os,sys,cgi import postgresql.driver as pg_driver from webob import Request, Response db = pg_driver.connect(host='localhost',user='******',password='******',database='omnivore',port=5432) #request = Request(environ) #params = request.params #post = request.POST page ="" if environ['REQUEST_METHOD'] == 'POST': post = cgi.FieldStorage( fp=environ['wsgi.input'], environ=environ, keep_blank_values=True ) table = post.getvalue('table') if 'file' in post: filefield = post['file'] if not isinstance(filefield, list): filefield = [filefield] for fileitem in filefield: # #account = request.headers["account"] # #time = request.headers["time"] if fileitem.filename: # strip leading path from file name to avoid directory traversal attacks fn = os.path.basename(fileitem.filename) open('/tmp/file_upload/' + fn, 'wb').write(fileitem.file.read()) #page += 'File was uploaded %s roi sao %s'%(fn,table) db.execute("""create table if not exists %s ( id serial8 primary key, no text,agent text, module text,domain text,title text,link text, quest text, answer text, time timestamp, process_time numeric,img_id text ,update_time timestamp default now() )"""%(table)) try: #db.execute("""copy %s(no,agent, module,domain,title,link, quest, answer, time, process_time,img_id) from '/usr/local/www/apache24/wsgi-scripts/file_upload/%s' delimiter ';' CSV HEADER quote '"' """%(table,fn)) db.execute("""copy %s(no,agent, module,domain,title,link, quest, answer, time, process_time,img_id) from '/tmp/file_upload/%s' delimiter ';' CSV HEADER escape '\\' quote '"' """%(table,fn)) #db.execute("""update qc_project_csv%s%s set status =null ,price =null,currency=null,condition=null,availability=null where status ='' or price ='' or currency='' or condition ='' or availability=''"""%(year,month)) page += 'The file "' + fn + '" was uploaded and import successfully! <br />' #db.execute("""insert into log_import_csv(filename,agent) values('%s','%s')"""%(fn,username)) #page += "Upload file sucessfull" # #xoa file vua gui len # try: # os.remove('c:/wsgi_app/file_upload/' + fn) # except OSError: # pass except IOError as err: page += "I/O error: {0}".format(err) except ValueError: page += "Could not import data file csv to database" raise #db.execute("update omnivore_%s_%s_%s set answer='' where answer='unselected'"%(year,month,day)) else: page = u""" <html> <head><title>Upload Q</title></head> <body> <h1>Upload file csv cho bo dem beta Q </h1> <h1>Chu y :upload file csv cho bo dem beta AC <a href='https://172.16.29.6/wsgi/upload_files_omnivore_z.py'>Tai day</a></h1> <form name="test" method="post" action="" enctype="multipart/form-data"> File: <input type="file" name="file" multiple/><br /> Table :<input type="text" name="table" value='' required/> (omnivore_nam_thang_ngay omnivore_2015_5_7 : bang csv 2015-5-7 ) <br /> <input type="submit" name="submit" value="Submit" /> </form> <p>Note: files with the same name with overwrite any existing files.</p> </body> </html> """ db.close() response = Response(body = page, content_type = "text/html", charset = "utf8", status = "200 OK") return response(environ, start_response)
import pdb from urllib.request import urlopen import postgresql.driver as pg_driver from bs4 import BeautifulSoup from dateutil.parser import parse from datetime import datetime from elasticsearch import Elasticsearch # Ex: search.search(index='events', q='poet') # Ex: search.get(index='events', doc_type='event', id=99) search = Elasticsearch([{'host': '192.168.99.100', 'port': 9200}]) db = pg_driver.connect( user = '******', password = '******', host = 'localhost', port = '5432', database = "worldhistory" ) def createTables(db): db.execute("DROP TABLE IF EXISTS timelines;") db.execute("CREATE TABLE timelines (event_date VARCHAR(64), id INTEGER, event_description VARCHAR(2048));") # If date represents an invalid date then None is returned def getDateKey(date): try: dateObj = parse(date) return dateObj.isoformat() except TypeError: return None
def scrape(): def insert_data(ps, ticker, type, period, div): results = parse_table(div) for date in results: data = json.dumps(results[date], indent=1, sort_keys=True) if db and type and ticker and data: try: print('Inserting values ({:s}, {:s}, {:s}, {:b})'.format( type, date, ticker, period)) ps(type, parse(date), ticker, data, period) except exceptions.Exception as e: print("Connection could not be established, error: %s" % (format(e))) except Exception as e: print( "Data for {ticker: %s, type: %s, date: %s, period: %s} could not be inserted, reason: %s" % (ticker, type, date, period, format(e))) def rotate_proxy(s, ind): ind = 0 if ind == len(proxy_list) - 1 else ind + 1 s.proxies = {"http": proxy_list[ind]} db = pg_driver.connect(user=username, password=passwd, host=server, port=portval, database=dbname) ps = db.prepare( 'INSERT INTO google_finance.financial_data ("type", issue_date, ticker, "data", annual) VALUES($1, $2, $3, $4, $5)' ) ps_parsed = db.prepare( 'INSERT INTO google_finance.parsed(ticker) VALUES ($1)') exchanges = [ r[0] for r in db.query( 'select distinct exchange from google_finance.tickers') ] if exchanges is None: return s = requests.Session() ind = 0 s.proxies = {"http": proxy_list[ind]} try: for exchange in exchanges: # tickers = get_ticker_list(exchange) tickers = load_ticker_list(exchange, db) if tickers is None: return counter = 0 for ticker in tickers: # Check if have processed this ticker checked = False while not checked: path = 'https://www.google.com/finance?q={:s}%3A{:s}&fstype=ii&ei=1Q7qVfGwKcyisAHQn5DADw' \ .format(exchange, ticker) content = s.get(path).text if content: doc = document_fromstring(content) # Check if main menu element exists - otherwise, Google captcha could have been called sel = doc.cssselect('div#gf-nav') if sel: checked = True print(ticker) for div in doc.cssselect('div'): if div.get('id') == 'incinterimdiv': insert_data(ps, ticker, 'income_statement', False, div) elif div.get('id') == 'incannualdiv': insert_data(ps, ticker, 'income_statement', True, div) elif div.get('id') == 'balinterimdiv': insert_data(ps, ticker, 'balance', False, div) elif div.get('id') == 'balannualdiv': insert_data(ps, ticker, 'balance', True, div) elif div.get('id') == 'casinterimdiv': insert_data(ps, ticker, 'cash_flow_statement', False, div) elif div.get('id') == 'casannualdiv': insert_data(ps, ticker, 'cash_flow_statement', True, div) # We do not want to check this ticker several times, if we would need to restart, # so we insert its value into 'parsed' file ps_parsed(ticker) counter += 1 if counter % 1000 == 0: sleep(600) rotate_proxy(s, ind) else: # Google identified a robot - postpone processing for some longer time print( 'Page does not contain financials information and menu. Pausing for 30 min... ' ) sleep(1800) rotate_proxy(s, ind) except URLError as e: pass except HTTPError as e: if e.code == 503: rotate_proxy(s, ind) finally: db.close()
import postgresql.driver as pg_driver db = pg_driver.connect( user='******', password='******', host='127.0.0.1', database='paveldb', port=5432 ) for Films in db.prepare( "SELECT films FROM public.films " ): print(Films)
"--database-password", dest="database_password", help="the database users password", type="string", action="store", ) p.add_option("-f", "--sqlite-file", dest="sqlite_file", help="path to sqlite db", type="string", action="store") (options, args) = p.parse_args() if len(args) == 0: print("use {} as args".format(" ".join(cando.keys()))) db = {} db["sqlite"] = {} db["sqlite"]["dbh"] = sqlite3.connect(options.sqlite_file) db["sqlite"]["cursor"] = db["sqlite"]["dbh"].cursor() db["pg"] = {} db["pg"]["dbh"] = pg_driver.connect( user=options.database_user, password=options.database_password, database=options.database, host=options.database_host, port=5432, ) for i in args: if i in cando: copy(i, db["sqlite"]["cursor"], db["pg"]["dbh"], cando[i][0], cando[i][1]) # db['pg']['dbh'].commit()
from flask import Flask, request, render_template import json import postgresql.driver as pg_driver app = Flask(__name__) app.debug = True app.config.from_pyfile('map.cfg') db = None db = pg_driver.connect( database=app.config['APP_NAME'], user=app.config['PG_DB_USERNAME'], password=app.config['PG_DB_PASSWORD'], host=app.config['PG_DB_HOST'], port=app.config['PG_DB_PORT'] ) @app.route('/') def index(): return render_template('index.html') @app.route('/wfs/') def wfs(): return render_template('wfs.html') # return all parks: @app.route("/layers/") def layers(): # list all tables registered in geometry_columns view result = db.prepare(
def connect(self): self.db = pg_driver.connect(user=self.user, password=self.password, host=self.host, port=self.port, database=self.database)
from argparse import ArgumentParser from configparser import SafeConfigParser as ConfigParser from postgresql import driver parser = ArgumentParser() parser.add_argument('-w', action='store_true') parser.add_argument('configfile') args = parser.parse_args() config = ConfigParser() config.readfp(open(args.configfile)) srvcfg = dict(config.items('server')) datacfg = dict(config.items('datasource')) data = driver.connect(**datacfg) try: if args.w: from wserver.worldserver import WorldServer server = WorldServer(data) else: from wserver.realmserver import RealmServer server = RealmServer(data) server.start( host=srvcfg['host'], cport=int(srvcfg['control_port']), nport=int(srvcfg['notify_port']), backlog=int(srvcfg['backlog']), ntimeout=float(srvcfg['notify_timeout']),
from flask import Flask, request, render_template import json import postgresql.driver as pg_driver app = Flask(__name__) app.debug = True app.config.from_pyfile('map.cfg') db = None db = pg_driver.connect(database=app.config['APP_NAME'], user=app.config['PG_DB_USERNAME'], password=app.config['PG_DB_PASSWORD'], host=app.config['PG_DB_HOST'], port=app.config['PG_DB_PORT']) @app.route('/') def index(): return render_template('index.html') @app.route('/wfs/') def wfs(): return render_template('wfs.html') # return all parks: @app.route("/layers/") def layers(): # list all tables registered in geometry_columns view result = db.prepare(