def test_unstartedClose(self): """ If L{ConnectionPool.close} is called without L{ConnectionPool.start} having been called, the pool's startup event is cancelled. """ reactor = EventReactor(False) pool = ConnectionPool('twisted.test.test_adbapi', cp_reactor=reactor) # There should be a startup trigger waiting. self.assertEquals(reactor.triggers, [('after', 'startup', pool._start)]) pool.close() # But not anymore. self.assertFalse(reactor.triggers)
def test_startedClose(self): """ If L{ConnectionPool.close} is called after it has been started, but not by its shutdown trigger, the shutdown trigger is cancelled. """ reactor = EventReactor(True) pool = ConnectionPool('twisted.test.test_adbapi', cp_reactor=reactor) # There should be a shutdown trigger waiting. self.assertEqual(reactor.triggers, [('during', 'shutdown', pool.finalClose)]) pool.close() # But not anymore. self.assertFalse(reactor.triggers)
def runserver(dbpath, host, port, logpath): dbpool = ConnectionPool('sqlite3', dbpath, check_same_thread=False) app = Application(dbpool) print('Database: %s' % (dbpath)) if logpath: logfile = open(logpath, 'a') print('Log File: %s' % (logpath)) else: logfile = None print('Host: %s\nPort: %d\n' % (host, port)) app.run(host, port, logfile)
def __init__(self, stats): #Instantiate DB self.dbpool = ConnectionPool('MySQLdb', host=SETTINGS['DB_HOST'], user=SETTINGS['DB_USER'], passwd=SETTINGS['DB_PASSWD'], port=SETTINGS['DB_PORT'], db=SETTINGS['DB_DB'], charset='utf8', use_unicode=True, cursorclass=MySQLdb.cursors.DictCursor) self.stats = stats dispatcher.connect(self.spider_closed, signals.spider_closed)
def __init__(self, log, db_location): self.log = log type = "sqlite" self.coordinator = None self.histcollector = None self._db_location = db_location # Note: cp_max=1 is required otherwise undefined behaviour could occur when using yield icw subsequent # runQuery or runOperation statements if type == "sqlite": self.dbpool = ConnectionPool("sqlite3", db_location, check_same_thread=False, cp_max=1) # Check database schema version and upgrade when required self.updatedb('0.2')
def connect(self): """ Setup our database connection. Throws if cannot connect. """ print("[DatabaseQuery] Connecting to database" "\n -> database:'%s' user:'******' host:'%s'" % (self.postgres_database, self.postgres_user, self.postgres_host)) # get a connection, if a connect cannot be made an exception will be raised here self.dbConnection = ConnectionPool('psycopg2', database=self.postgres_database, user=self.postgres_user, host=self.postgres_host, password=self.postgres_password) print("[DatabaseQuery] Database connection sucsessful")
def _db(self) -> ConnectionPool: if self._state.db is None: db = ConnectionPool( "pymysql", host=self.hostName, port=self.hostPort, database=self.database, user=self.username, password=self.password, cursorclass=Cursor, cp_reconnect=True, ) # self._upgradeSchema(db) self._state.db = db return self._state.db
def setup(cls, settings): conf = settings.get("postgresql_settings") if conf: postgres_connection_settings = dict( host=conf.host, port=conf.port, database=conf.database, user=conf.username, password=conf.password, cp_min=1, cp_max=conf.poolsize, cp_reconnect=True, cp_noisy=settings['debug'], connection_factory=NamedTupleConnection) pg_cpool = ConnectionPool("psycopg2", **postgres_connection_settings) cls.postgresql = pg_cpool print pg_cpool cls.preferred_db_class = PostgresDatabase
#!/usr/bin/env python3 import os from fzone import Repo ROOT = os.path.dirname(__file__) repo = Repo(os.path.join(ROOT, 'client')) from twisted.enterprise.adbapi import ConnectionPool dbpool = ConnectionPool("sqlite3", repo.index(), check_same_thread=False) from twisted.conch.ssh.transport import SSHClientTransport from twisted.internet.defer import succeed from fzone.ssh import FZoneConnection class FZoneClientConnection(FZoneConnection): def serviceStarted(self): super().serviceStarted() self.pull() class FZoneClientTransport(SSHClientTransport): def verifyHostKey(self, hostKey, fingerprint): return succeed(True) def connectionSecure(self): self.requestService(FZoneClientConnection()) from twisted.internet.protocol import ClientFactory
def open(self): """ Access the underlying database. @return: a db2 connection object for this index's underlying data store. """ if not self.initialized: self.pool = ConnectionPool(self.dbapiName, *self.dbapiArgs, **self.dbapikwargs) # sqlite3 is not thread safe which means we have to close the sqlite3 connections in the same thread that # opened them. We need a special thread pool class that has a thread worker function that does a close # when a thread is closed. if self.dbapiName == "sqlite3": self.pool.threadpool.stop() self.pool.threadpool = ConnectionClosingThreadPool(1, 1) self.pool.threadpool.start() self.pool.threadpool.pool = self.pool # # Set up the schema # # Create CALDAV table if needed try: test = (yield self._test_schema_table()) if test: version = (yield self._db_value_for_sql( "select VALUE from CALDAV where KEY = 'SCHEMA_VERSION'" )) dbtype = (yield self._db_value_for_sql( "select VALUE from CALDAV where KEY = 'TYPE'")) if (version != self._db_version()) or (dbtype != self._db_type()): if dbtype != self._db_type(): log.error( "Database {db} has different type ({t1} vs. {t2})", db=self.dbID, t1=dbtype, t2=self._db_type()) # Delete this index and start over yield self._db_remove() yield self._db_init() elif version != self._db_version(): log.error( "Database {db} has different schema (v.{v1} vs. v.{v2})", db=self.dbID, v1=version, v2=self._db_version()) # Upgrade the DB yield self._db_upgrade(version) else: yield self._db_init() self.initialized = True except: # Clean up upon error so we don't end up leaking threads self.pool.close() self.pool = None raise
def connection_pool(self): if not self._connection: self._connection = ConnectionPool(self.module_name, self.db_name, *self.db_args, **self.db_kwargs) return self._connection
def connect(self): self.db_pool = ConnectionPool(self.db_engine, database=self.database, user=self.user)
'--config', help='Configuration file', default='config.json') parser.add_argument('-d', '--database', help='Database file', default='forkingdongles.db') args = parser.parse_args() app = Application('ForkingDongles') config = JSONConfig(args.config, default={ 'core': { 'host': 'irc.example.com', 'port': 6667, 'ssl': False, 'nickname': 'ForkingDongles', 'channels': [] } }) db = ConnectionPool('sqlite3', args.database) log.startLogging(sys.stderr) uri = '{}:{}:{}'.format('ssl' if config['core']['ssl'] else 'tcp', config['core']['host'], config['core']['port']) endpoint = endpoints.clientFromString(reactor, uri) factory = ForkingDonglesFactory(config, db) service = ClientService(endpoint, factory) service.setServiceParent(app) service.startService() reactor.run()
def dbPool(dbdef): pool = ConnectionPool(dbdef.driver, **dbdef.kwargs) return pool
def __init__(self, settings, **kwargs): """Connect to database in the pool.""" if not isinstance(settings, dict): raise NotConfigured('No database connection settings found.') self.settings = settings self.stats = kwargs.get('stats') self.debug = kwargs.get('debug', False) self.paramstyle = ':' self.identifier = '"' # default to ANSI quoting self.queries = { 'select': "SELECT $fields FROM $table:esc WHERE $indices:and", # select on UniqueFields 'selectall': "SELECT $fields FROM $table:esc", 'selectone': "SELECT $fields FROM $table:esc WHERE $indices:and LIMIT 1", # if backend supports LIMIT # 'delete': "DELETE FROM $table:esc WHERE $indices:and", # match on UniqueFields 'deleteme': "DELETE FROM $table:esc WHERE $fields_values:and", # exact item match } self.dbapi = None if self.settings.get('drivername') == 'sqlite': self.dbapi = __import__('sqlite3', fromlist=['']) self.__dbpool = ConnectionPool( 'sqlite3', self.settings.get('database', ':memory:'), # apparently the connection pool / thread pool does not do the teardown in the same thread # https://twistedmatrix.com/trac/ticket/3629 # therefore throwing errors on finalClose at reactor shutdown # TODO: should be able to work around that? check_same_thread= False, # SQLite must be compiled threadsafe to use this # limit connection pool to one thread to avoid "database is locked" errors #cp_max=1, # - or raise the database timeout sufficiently timeout=300, ) # alternative escaping parameter #self.paramstyle = '?' #self.paramstyle = ':' #self.paramstyle = '$' # default statements for sqlite self.queries.update({ 'insert': "INSERT INTO $table:esc SET $fields_values", 'upsert': "INSERT OR REPLACE INTO $table:esc ($fields) VALUES ($values)", 'update': "UPDATE $table:esc SET $fields_values WHERE $indices:and", }) elif self.settings.get('drivername') == 'pgsql': self.dbapi = __import__('psycopg2', fromlist=['']) #from psycopg2.extras import DictCursor self.__dbpool = ConnectionPool( 'psycopg2', database=self.settings.get('database'), user=self.settings.get('username'), password=self.settings.get('password', None), host=self.settings.get('host', None), # default to unix socket port=self.settings.get('port', '5432'), # cursor_factory = DictCursor, ) self.paramstyle = '%s' # default statements for postgres self.queries.update({ 'insert': "INSERT INTO $table:esc ($fields) VALUES ($values)", 'update': "UPDATE $table:esc SET $fields_values WHERE $indices:and", }) elif self.settings.get('drivername') == 'mysql': self.dbapi = __import__('MySQLdb', fromlist=['']) from MySQLdb import cursors self.__dbpool = ReconnectingConnectionPool( 'MySQLdb', db=self.settings.get('database'), user=self.settings.get('username'), passwd=self.settings.get('password', None), host=self.settings.get( 'host', 'localhost'), # should default to unix socket port=self.settings.get('port', 3306), cursorclass=cursors.DictCursor, charset='utf8', use_unicode=True, # connpool settings cp_reconnect=True, #cp_noisy = True, #cp_min = 1, #cp_max = 1, ) self.paramstyle = '%s' self.identifier = '`' # MySQL quoting # default statements for mysql self.queries.update({ 'insert': "INSERT INTO $table:esc ($fields) VALUES ($values)", # 'upsert': "REPLACE INTO $table ($fields) VALUES ($values)", 'upsert': "INSERT INTO $table:esc SET $fields_values ON DUPLICATE KEY UPDATE $fields_values", 'update': "UPDATE $table:esc SET $fields_values WHERE $indices:and", }) elif self.settings.get('drivername') == 'firebird': # untested self.dbapi = __import__('fdb', fromlist=['']) self.__dbpool = ConnectionPool( 'fdb', database=self.settings.get('database'), user=self.settings.get('username'), password=self.settings.get('password', None), host=self.settings.get('host', None), # default to unix socket port=self.settings.get('port', 3050), #dialect = 1, # necessary for all dialect 1 databases charset='UTF8', # specify a character set for the connection ) self.paramstyle = '?' self.queries.update({ 'insert': "INSERT INTO $table:esc ($fields) VALUES ($values)", 'update': "UPDATE $table:esc SET $fields_values WHERE $indices:and", }) self.queries.update(kwargs.get('queries', {}))
def makePool(self, **newkw): """Create a connection pool with additional keyword arguments.""" args, kw = self.getPoolArgs() kw = kw.copy() kw.update(newkw) return ConnectionPool(*args, **kw)
# coding=utf8 from twisted.internet import reactor, defer from twisted.enterprise.adbapi import ConnectionPool import psycopg2 import psycopg2.extras #from eventlet.twistedutil import block_on import time dbpool = ConnectionPool("psycopg2", host="47.93.5.189", user="******", password="******", database="mzhan", cursor_factory=psycopg2.extras.DictCursor) import pymysql db = pymysql.connect( host="47.93.5.189", user="******", password="******", database="spark", charset="utf8" ) cursor = db.cursor() def _getData(txn, user): txn.execute("select * from mzhan_user ") result = txn.fetchall() if result:
def __init__(self, config, custom_reactor, tracer=opentracing.tracer): """ Object that holds state for the entirety of a Sygnal instance. Args: config (dict): Configuration for this Sygnal custom_reactor: a Twisted Reactor to use. tracer (optional): an OpenTracing tracer. The default is the no-op tracer. """ self.config = config self.reactor = custom_reactor self.pushkins = {} self.tracer = tracer logging_dict_config = config["log"]["setup"] logging.config.dictConfig(logging_dict_config) logger.debug("Started logging") observer = twisted_log.PythonLoggingObserver() observer.start() # Old format db config if config.get("db") is not None: logger.warning( "Config includes the legacy 'db' option, please migrate" " to 'database' instead") config["database"] = { "name": "sqlite3", "args": { "dbfile": config["db"]["dbfile"] }, } elif config.get("database") is None: config["database"] = { "name": "sqlite3", "args": { "dbfile": "sygnal.db" }, } sentrycfg = config["metrics"]["sentry"] if sentrycfg["enabled"] is True: import sentry_sdk logger.info("Initialising Sentry") sentry_sdk.init(sentrycfg["dsn"]) promcfg = config["metrics"]["prometheus"] if promcfg["enabled"] is True: prom_addr = promcfg["address"] prom_port = int(promcfg["port"]) logger.info("Starting Prometheus Server on %s port %d", prom_addr, prom_port) prometheus_client.start_http_server(port=prom_port, addr=prom_addr or "") tracecfg = config["metrics"]["opentracing"] if tracecfg["enabled"] is True: if tracecfg["implementation"] == "jaeger": try: import jaeger_client jaeger_cfg = jaeger_client.Config( config=tracecfg["jaeger"], service_name=tracecfg["service_name"], scope_manager=AsyncioScopeManager(), ) self.tracer = jaeger_cfg.initialize_tracer() logger.info("Enabled OpenTracing support with Jaeger") except ModuleNotFoundError: logger.critical( "You have asked for OpenTracing with Jaeger but do not have" " the Python package 'jaeger_client' installed.") raise else: logger.error("Unknown OpenTracing implementation: %s.", tracecfg["impl"]) sys.exit(1) db_name = config["database"]["name"] if db_name == "psycopg2": logger.info("Using postgresql database") self.database_engine = "postgresql" self.database = ConnectionPool( "psycopg2", cp_reactor=self.reactor, **config["database"].get("args"), ) elif db_name == "sqlite3": logger.info("Using sqlite database") self.database_engine = "sqlite" self.database = ConnectionPool( "sqlite3", config["database"]["args"]["dbfile"], cp_reactor=self.reactor, cp_min=1, cp_max=1, check_same_thread=False, ) else: raise Exception("Unsupported database 'name'")