def __init__(self, size, dsn=None): if dsn: self.db_pool = pool.ThreadedConnectionPool(1, size, dbname=dsn["database"], user=dsn["user"], host=dsn["host"], port=dsn["port"]) else: cfg = Config() self.db_pool = pool.ThreadedConnectionPool(1, size, dbname=cfg.db_name, user=cfg.db_user, password=cfg.db_pass, host=cfg.db_host, port=cfg.db_port)
def __init__(self, conn_settings): try: self.__db_name = conn_settings['db_name'] self.__db_user = conn_settings['db_user'] self.__db_password = conn_settings['db_password'] self.__db_host = conn_settings['db_host'] self.__db_port = conn_settings['db_port'] self.start_year = conn_settings['start_year'] self.end_year = conn_settings['end_year'] self.max_conns = self.end_year - self.start_year except KeyError: print "Please define 'db_name', 'db_user', 'db_password', 'db_host', 'db_port', 'start_year', 'end_year'" exit(1) try: self.conns = pool.ThreadedConnectionPool( 0, self.max_conns, database=self.__db_name.lower(), user=self.__db_user, password=self.__db_password, host=self.__db_host, port=self.__db_port) except psycopg2.Error: raise Exception( "Did you define database parameters in config and run make?")
def createBlueprint(config): cpool = pool.ThreadedConnectionPool(**config['connection_pool']) if not cpool: raise ValueError('Unable to create a connection pool.') book_repo = BookRepo(cpool) book_routes = BookRoutes(book_repo) blueprint = Blueprint('books_api', __name__) blueprint.add_url_rule('/books', view_func=book_routes.get_books, methods=['GET']) blueprint.add_url_rule('/books', view_func=book_routes.create_book, methods=['POST']) blueprint.add_url_rule('/books/<int:id>', view_func=book_routes.get_book, methods=['GET']) blueprint.add_url_rule('/books/<int:id>', view_func=book_routes.update_book, methods=['PATCH']) blueprint.add_url_rule('/books/<int:id>/update', view_func=book_routes.update_book, methods=['POST']) blueprint.add_url_rule('/books/<int:id>', view_func=book_routes.delete_book, methods=['DELETE']) blueprint.add_url_rule('/books/<int:id>/delete', view_func=book_routes.delete_book, methods=['POST']) return blueprint
def __init__(self, config): c = {} if config.dbname: c['dbname'] = config.dbname if config.user: c['user'] = config.user if config.password: c['password'] = config.password if config.host: c['host'] = config.host if config.port: c['port'] = config.port if config.socket: c['query'] = {'unix_socket': config.socket} if config.sslrootcert and config.sslcert and config.sslkey: c['sslmode'] = 'verify-ca' c['sslrootcert'] = config.sslrootcert c['sslcert'] = config.sslcert c['sslkey'] = config.sslkey c['cursor_factory'] = cursor_factory self.pool = pool.ThreadedConnectionPool(1, 10, **c)
def initiateConnectionToDatabase(seed_urls): # initiate connection try: db_pool = pool.ThreadedConnectionPool(3, 10, user=DB_['username'], password=DB_['password'], database=DB_['db_name'], host=DB_['host'], port=DB_['port']) if db_pool: print('PostgreSQL connection pool is succesfully created!') else: print('PostgreSQL connection pool couldn\'t be established!') except Exception as error: print(error) db_connection = db_pool.getconn() cur = db_connection.cursor() # check whether frontier is empty sql_query = "SELECT * FROM crawldb.page LIMIT 1" cur.execute(sql_query) current_page = cur.fetchone() return [current_page, db_connection]
def _create_connection_pool(self, conn_params): ''' Helper to initialize the connection pool. ''' connection_pools_lock.acquire() try: # One more read to prevent a read/write race condition (We do this # here to avoid the overhead of locking each time we get a connection.) if (self.alias not in connection_pools or connection_pools[self.alias]['settings'] != self.settings_dict): logger.info("Creating connection pool for db alias %s" % self.alias) logger.info( " using MIN_CONNS = %s, MAX_CONNS = %s, TEST_ON_BORROW = %s" % (self._min_conns, self._max_conns, self._test_on_borrow)) from psycopg2 import pool connection_pools[self.alias] = { 'pool': pool.ThreadedConnectionPool(self._min_conns, self._max_conns, **conn_params), 'settings': dict(self.settings_dict), } finally: connection_pools_lock.release()
def book_repo(self): cpool = pool.ThreadedConnectionPool( **config_qa.books_api['connection_pool']) if not cpool: raise ValueError('Unable to create a connection pool.') return BookRepo(cpool)
def __init__(self): print("INJECTED") self.__pool = pool.ThreadedConnectionPool(1, 20, host=self.__dbConfig["dbhost"], port=self.__dbConfig["dbport"], database=self.__dbConfig["dbname"], user=self.__dbConfig["dbuser"], password=self.__dbConfig["dbpassword"])
def _create_connection_pool(self): self.connection_pool = pool.ThreadedConnectionPool( self.min_connection, self.max_connection, user=self.user, password=self.password, host=self.host, port=self.port, database=self.database)
def __init__(self, db_host="120.27.162.201"): self.pool = pool.ThreadedConnectionPool( minconn=self.min_connections, maxconn=self.max_connections, database=self.db_name, user=self.db_user, host=db_host, password=self.db_password, )
def __init__(self): try: self.con = pool.ThreadedConnectionPool(5, 100, database='db_v1', user='******', password='******') except: print("I am unable to connect to the database")
def initDbPool(dbname): (database, db_user, db_pwd, db_host, db_port) = getConf(dbname) dbpool = pool.ThreadedConnectionPool(minconn=45, maxconn=45, database=database, user=db_user, password=db_pwd, host=db_host, port=db_port) return dbpool
def start(self): print("Starting connection pool to database") self.connection_pool = pool.ThreadedConnectionPool( minconn=self.min_connections, maxconn=self.max_connections, host=self.host, port=self.port, database=self.database_name, user=self.user, password=self.password) print("Created connection pool to database")
def _create_connection_pool(self, n_connections: int) -> pool: """Create thread-safe connection pool.""" return pool.ThreadedConnectionPool( 0, n_connections, user=self._user, password=self._password, host=self.host, port=self.port, dbname=self.dbname, )
class PostgresConnectorUsePool: project_database_connection_pool = pool.ThreadedConnectionPool( minconn=1, maxconn=5, host=config.project_database_host, port=config.project_database_port, database=config.project_database_name, user=config.project_database_user, password=config.project_database_password, ) daily_price_stock_database_connection_pool = pool.ThreadedConnectionPool( minconn=5, maxconn=50, host=config.daily_price_stock_database_host, port=config.daily_price_stock_database_port, database=config.daily_price_stock_database_name, user=config.daily_price_stock_database_user, password=config.daily_price_stock_database_password, )
def __init__(self): connection_string = str( get_env_variable("RDS_DWH_CONNECTION_STRING", exception_on_failure=True)) try: self.conn_pool = pool.ThreadedConnectionPool(minconn=15, maxconn=100, dsn=connection_string) except Exception as ex: # self.LOGGER.error("Can't create connection pool: {}".format(str(ex))) print("Can't create connection pool")
def initDbPool(dbname, minc=1, maxc=1): (database, db_user, db_pwd, db_host, db_port) = getConf(dbname) try: dbpool = pool.ThreadedConnectionPool(minconn=minc, maxconn=maxc, database=database, user=db_user, password=db_pwd, host=db_host, port=db_port) return dbpool except Exception, e: logging.error('数据库连接池获取失败:%s' % e)
def initConnection(): db_user = os.environ.get('CLOUD_SQL_USERNAME') db_password = os.environ.get('CLOUD_SQL_PASSWORD') db_name = os.environ.get('CLOUD_SQL_DATABASE_NAME') db_connection_name = os.environ.get('CLOUD_SQL_CONNECTION_NAME') host = '/cloudsql/{}'.format(db_connection_name) db_config = { 'user': db_user, 'password': db_password, 'database': db_name, 'host': host } connpool = pool.ThreadedConnectionPool(minconn=1, maxconn=10, **db_config) return connpool
def runQuery(query, params=None): global pool if not pool: pool = pgpool.ThreadedConnectionPool( 1, 3, "dbname=dbuser user=dbuser password=YeiCoo0dujih host=127.0.0.1 port=15432", ) conn = pool.getconn() cur = conn.cursor() cur.execute(query, params) result = cur.fetchall() pool.putconn(conn) return result
def setup_pool(): global pool with open('htc_login.txt') as f: #each of these is expected to appear on a separate line host = f.readline().rstrip() port = f.readline().rstrip() db = f.readline().rstrip() user = f.readline().rstrip() pw = f.readline().rstrip() pool = pgp.ThreadedConnectionPool(20, 100, host=host, port=port, database=db, user=user, password=pw)
def _create_connection_pool(self,min_conn,max_conn,*args,**kwargs): """ minconn: Minimum connection maxconn: Maximum connection Kwargs :: database : Database Name user : User of the database you want to connect as Password : Password for DB Host : Ip address such as 127.0.0.1 port : 5433 """ try: return pool.ThreadedConnectionPool(min_conn,max_conn,*args,**kwargs) except Exception as e: print('ERROR in create_connection_pool ',e) return False
def __init__(self): if DBUtils.__instance is not None: raise Exception("This is a singleton class ") else: logger.info( "Initializing connection pool for database connection, should happen only once during startup. with {}".format( constants.fetch_constant("host"))) self.sales_pool = pool.ThreadedConnectionPool(constants.fetch_constant("min_pool"), constants.fetch_constant("max_pool"), host=constants.fetch_constant("host"), user="******", password=constants.fetch_constant("password"), port="5432", database=constants.fetch_constant("db_name")) logger.info("Made {} max_connections ".format(self.sales_pool.maxconn)) DBUtils.__instance = self
def __init__(self): """ Virtually private constructor. """ if MyDBUtil.__instance is not None: raise Exception("This class is a singleton!") else: logging.info( "Initializing connection pool for database connection, should happen only once during startup. with {}".format( constant.DB_HOST)) global sales_pool sales_pool = pool.ThreadedConnectionPool(constant.DB_MIN_POOL, constant.DB_MAX_POOL, user=constant.DB_USER, password=constant.DB_PASSWORD, host=constant.DB_HOST, port=constant.DB_PORT, database="sales") logging.info("made " + str(sales_pool.maxconn) + " maximum connections") MyDBUtil.__instance = self
def load_and_insert(data_config, con_config, db_config, tbl_config): ''' From the configs, collect the paths for the data files and establish a thread pool connection for psycopg2 to insert data into postgres. Loop through each data file, then loop through each row of data, and insert one by one. ''' datapaths = [ data_config['datapath'] + d for d in listdir(data_config['datapath']) ] threadpool = pool.ThreadedConnectionPool( 5, 20, database=db_config['dbname'], port=con_config['port'], user=con_config['user'], password=con_config['password'], host=con_config['host'] ) # Use pooling to multi-thread connect to db, defaulting 5 threads and up to 20 threads for d in range(len(datapaths)): # Loop through data directory f = gzip.open(datapaths[d], "rt", encoding='utf8') # Open gzipped data file reader = csv.reader(f) # Create a reader next(reader) # Skip headers by iterating over them prior to loop rows = [] # Assign empty list for rows cnt = 0 # Counter for batching rows for row in reader: # Iterate over all rows rows.append(row) cnt += 1 if cnt > 100000: # Batch in sets of 100,000 rows rows = [ [x.replace("'", "`") for x in row] for row in rows ] # Replace apostrophes with tildas to insert into postgres table insertrows(con_config, db_config, tbl_config, threadpool, rows, d) # Insert row into postgres cnt = 0 # Reset counter rows = [] # Reset rows batch list elif any(reader ) is False: # Send the rest off if < 100000 remaining rows = [ [x.replace("'", "`") for x in row] for row in rows ] # Replace apostrophes with tildas to insert into postgres table insertrows(con_config, db_config, tbl_config, threadpool, rows, d) # Insert row into postgres cnt = 0 # Reset counter rows = [] # Reset rows batch list
def connect(self, instance): conf = instance['conf'] _max_conn = int(conf.get('max_conn', MAX_CONN)) _timeout = int(conf.get('timeout', TIMEOUT_CONN)) try: _conn_pool = pool.ThreadedConnectionPool(MIN_CONN, _max_conn, host=conf['host'], port=conf['port'], dbname=conf['db'], user=conf['user'], password=conf['passwd'], connect_timeout=_timeout) instance['conn_pool'] = _conn_pool Log.trace( '>>> Successfully connected to POSTGRES: {}, {}:{}'.format( instance['server'], conf['host'], conf['port'])) except psycopg2.OperationalError as e: Log.error('>>PGSQL ERROR {} {}'.format(conf.get('server'), e))
def __init__(self, minimum_connections, max_connections): # Set a lock object, so that only one connection to the database is allowed self.connection_pool = None try: # read connection parameters params = config() self.connection_pool = pool.ThreadedConnectionPool( minimum_connections, max_connections, user=params.get('user'), password=params.get('password'), host=params.get('host'), database=params.get('database')) if self.connection_pool: print('[DATABASE] Connection successfully established') except (Exception, psycopg2.DatabaseError) as error: print("[ERROR WHILE ESTABLISHING CONNECTION TO DATABASE]", error)
class KslabAuthenticator(Authenticator): passwords = Dict(config=True, help="""dict of username:password for authentication""" ) pg_pool = pool.ThreadedConnectionPool( 5, 15, user=POSTGRES_USER, password=POSTGRES_PASSWORD, host=POSTGRES_HOST, port=POSTGRES_PORT, database='jupyterhub') @gen.coroutine def authenticate(self, handler, data): conn = self.pg_pool.getconn() try: if conn: cur = conn.cursor() cur.execute('SELECT * FROM user_passwords WHERE name = %s AND password = %s', (data['username'], data['password'])) result = cur.fetchone() cur.close() if result: return data['username'] else: #self.log.warning(handler.request.remote_ip) self.log.warning("Failed login for %s (@%s)", (data or {}).get('username', 'unknown user'), handler.request.remote_ip) finally: self.pg_pool.putconn(conn) return None @gen.coroutine def pre_spawn_start(self, user, spawner): """Pass upstream_token to spawner via environment variable""" auth_state = yield user.get_auth_state() if not auth_state: # auth_state not enabled return spawner.environment['UPSTREAM_TOKEN'] = auth_state['upstream_token']
def __init__(self, **args): super(PGConnector, self).__init__('postgresql', **args) if not self.closed: if 'host' not in self.__dict__ or not self.host: raise TypeError( 'Postresql: the host has not been set in config file or parameters.' ) if 'database' not in self.__dict__ or not self.database: raise TypeError( 'Postresql: the database has not been set in config file or parameters.' ) if 'port' not in self.__dict__ or not self.port: self.port = 5432 if 'thread' not in self.__dict__ or not self.thread: self.thread = 1 if 'user' not in self.__dict__ or not self.user: raise TypeError( 'Postresql: the user has not been set in config file or parameters.' ) if 'password' not in self.__dict__ or not self.password: raise TypeError( 'Postresql: The password has not been set in config file or parameters.' ) try: self.__handle = pool.ThreadedConnectionPool( 1, self.thread, dbname=self.database, user=self.user, password=self.password, host=self.host, port=self.port, cursor_factory=extras.RealDictCursor) except Exception as e: raise e
from apistar import App, Route from psycopg2 import pool import logging DB_CONFIG = { 'host': '127.0.0.1', 'user': '******', 'password': '******', 'port': '5432', 'database': 'test' } connection_pool = pool.ThreadedConnectionPool(10, 80, **DB_CONFIG) conn = connection_pool.getconn() def db2(name=None): with conn.cursor() as cur: cur.execute('SELECT salary,address,age,id,name FROM test.company') results = cur.fetchall() return {'posts': jsonify(results)} def jsonify(records): """ Parse asyncpg record response into JSON format """ list_return = [] list_keys = ['salary', 'address', 'age', 'id', 'name'] for r in records:
''' 用来测试psycopg2的数据库连接池 ''' def getConf(dbname='postgres'): from ConfigParser import ConfigParser conFile = r"./database.ini" conFig = ConfigParser() conFig.read(conFile) return (conFig.get(dbname, 'database'), conFig.get(dbname, 'user'), conFig.get(dbname, 'password'), conFig.get(dbname, 'host'), conFig.get(dbname, 'port')) dbname='hisdb' (database, db_user, db_pwd, db_host, db_port) = getConf(dbname) dbpool = pool.ThreadedConnectionPool( minconn=4, maxconn=100, database=database, user=db_user, password=db_pwd, host=db_host, port=db_port ) conn = dbpool.getconn() cur = conn.cursor() sql="select * from dis.tc_table_space_tj_20170707_d limit 10;" cur.execute(sql) print cur.fetchall() cur.close() conn.commit() dbpool.putconn(conn) dbpool.closeall()