def test_listen_targets_per_subclass(self): """test that listen() called on a subclass remains specific to that subclass.""" canary = [] def listen_one(*args): canary.append("listen_one") def listen_two(*args): canary.append("listen_two") def listen_three(*args): canary.append("listen_three") event.listen(pool.Pool, 'connect', listen_one) event.listen(pool.QueuePool, 'connect', listen_two) event.listen(pool.SingletonThreadPool, 'connect', listen_three) p1 = pool.QueuePool(creator=MockDBAPI().connect) p2 = pool.SingletonThreadPool(creator=MockDBAPI().connect) assert listen_one in p1.dispatch.connect assert listen_two in p1.dispatch.connect assert listen_three not in p1.dispatch.connect assert listen_one in p2.dispatch.connect assert listen_two not in p2.dispatch.connect assert listen_three in p2.dispatch.connect p1.connect() eq_(canary, ["listen_one", "listen_two"]) p2.connect() eq_(canary, ["listen_one", "listen_two", "listen_one", "listen_three"])
def _do_testthreadlocal(self, useclose=False): dbapi = MockDBAPI() for p in pool.QueuePool(creator=dbapi.connect, pool_size=3, max_overflow=-1, use_threadlocal=True), \ pool.SingletonThreadPool(creator=dbapi.connect, use_threadlocal=True): c1 = p.connect() c2 = p.connect() self.assert_(c1 is c2) c3 = p.unique_connection() self.assert_(c3 is not c1) if useclose: c2.close() else: c2 = None c2 = p.connect() self.assert_(c1 is c2) self.assert_(c3 is not c1) if useclose: c2.close() else: c2 = None lazy_gc() if useclose: c1 = p.connect() c2 = p.connect() c3 = p.connect() c3.close() c2.close() self.assert_(c1.connection is not None) c1.close() c1 = c2 = c3 = None # extra tests with QueuePool to ensure connections get # __del__()ed when dereferenced if isinstance(p, pool.QueuePool): lazy_gc() self.assert_(p.checkedout() == 0) c1 = p.connect() c2 = p.connect() if useclose: c2.close() c1.close() else: c2 = None c1 = None lazy_gc() self.assert_(p.checkedout() == 0)
def _test_cleanup(self, strong_refs): """test that the pool's connections are OK after cleanup() has been called.""" dbapi = MockDBAPI() lock = threading.Lock() def creator(): # the mock iterator isn't threadsafe... with lock: return dbapi.connect() p = pool.SingletonThreadPool(creator=creator, pool_size=3) if strong_refs: sr = set() def _conn(): c = p.connect() sr.add(c.connection) return c else: def _conn(): return p.connect() def checkout(): for x in range(10): c = _conn() assert c c.cursor() c.close() time.sleep(.1) threads = [] for i in range(10): th = threading.Thread(target=checkout) th.start() threads.append(th) for th in threads: th.join(join_timeout) assert len(p._all_conns) == 3 if strong_refs: still_opened = len([c for c in sr if not c.close.call_count]) eq_(still_opened, 3)
def _do_testthreadlocal(self, useclose=False): for p in ( pool.QueuePool(creator = lambda: mock_dbapi.connect('foo.db'), pool_size = 3, max_overflow = -1, use_threadlocal = True), pool.SingletonThreadPool(creator = lambda: mock_dbapi.connect('foo.db'), use_threadlocal = True) ): c1 = p.connect() c2 = p.connect() self.assert_(c1 is c2) c3 = p.unique_connection() self.assert_(c3 is not c1) if useclose: c2.close() else: c2 = None c2 = p.connect() self.assert_(c1 is c2) self.assert_(c3 is not c1) if useclose: c2.close() else: c2 = None if useclose: c1 = p.connect() c2 = p.connect() c3 = p.connect() c3.close() c2.close() self.assert_(c1.connection is not None) c1.close() c1 = c2 = c3 = None # extra tests with QueuePool to insure connections get __del__()ed when dereferenced if isinstance(p, pool.QueuePool): self.assert_(p.checkedout() == 0) c1 = p.connect() c2 = p.connect() if useclose: c2.close() c1.close() else: c2 = None c1 = None self.assert_(p.checkedout() == 0)
def getPool(dbtype, params, poolParams): """Get connection pool. Defines getConnection method based on db type Input: (string) database type, (dict) connect() parameters, (dict) pool constructor parameters. Output: (class) pool instance """ if dbtype == T_POSTGRESQL: funct = getPostgresConnection(**params) return pool.QueuePool(creator=funct, **poolParams) elif dbtype == T_MYSQL: funct = getMysqlConnection(**params) return pool.QueuePool(creator=funct, **poolParams) elif dbtype == T_ORACLE: funct = getOracleConnection(**params) return pool.QueuePool(creator=funct, **poolParams) elif dbtype == T_SQLITE: funct = getSqliteConnection(**params) return pool.SingletonThreadPool(creator=funct, **poolParams)
def test_cleanup(self): """test that the pool's connections are OK after cleanup() has been called.""" p = pool.SingletonThreadPool(creator=mock_dbapi.connect, pool_size=3) def checkout(): for x in xrange(10): c = p.connect() assert c c.cursor() c.close() time.sleep(.1) threads = [] for i in xrange(10): th = threading.Thread(target=checkout) th.start() threads.append(th) for th in threads: th.join() assert len(p._all_conns) == 3
def _test_cleanup(self, strong_refs): """test that the pool's connections are OK after cleanup() has been called.""" p = pool.SingletonThreadPool(creator=mock_dbapi.connect, pool_size=3) if strong_refs: sr = set() def _conn(): c = p.connect() sr.add(c.connection) return c else: def _conn(): return p.connect() def checkout(): for x in xrange(10): c = _conn() assert c c.cursor() c.close() time.sleep(.1) threads = [] for i in xrange(10): th = threading.Thread(target=checkout) th.start() threads.append(th) for th in threads: th.join() assert len(p._all_conns) == 3 if strong_refs: still_opened = len([c for c in sr if not c.closed]) eq_(still_opened, 3)
import sqlite def getconn_pg(): c = psycopg2.connect(database='mydb', username='******', password='******') return c def getconn_sl(): c = sqlite.connect(filename='devdata.sqlite') return c pool_pg = pool.QueuePool(getconn_pg, use_threadlocal=True) # SQLite requires use of the SingletonThreadPool pool_sl = pool.SingletonThreadPool(getconn_sl) """ Some of the various pool types that are available in the sqlalchemy.pool module are: AssertionPool Allows only one connection to be checked out at a time and raises an AssertionEr ror when this constraint is violated. NullPool Does no pooling; instead, actually opens and closes the underlying DB-API con- nection on each check out/check in of a connection. QueuePool Maintains a fixed-size connection pool. This is the default connection pool class used for non-sqlite connections.