def test_conn_reusable(self): conn = db.connect() conn.execute(select([1])) assert len(dbapi.connections) == 1 dbapi.shutdown() # raises error try: conn.execute(select([1])) assert False except tsa.exc.DBAPIError: pass assert not conn.closed assert conn.invalidated # ensure all connections closed (pool was recycled) gc_collect() assert len(dbapi.connections) == 0 # test reconnects conn.execute(select([1])) assert not conn.invalidated assert len(dbapi.connections) == 1
def test_autoflush_expressions(self): """test that an expression which is dependent on object state is evaluated after the session autoflushes. This is the lambda inside of strategies.py lazy_clause. """ mapper(User, users, properties={'addresses': relationship(Address, backref="user")}) mapper(Address, addresses) sess = create_session(autoflush=True, autocommit=False) u = User(name='ed', addresses=[Address(email_address='foo')]) sess.add(u) eq_( sess.query(Address).filter(Address.user == u).one(), Address(email_address='foo')) # still works after "u" is garbage collected sess.commit() sess.close() u = sess.query(User).get(u.id) q = sess.query(Address).filter(Address.user == u) del u gc_collect() eq_(q.one(), Address(email_address='foo'))
def profile(*args): gc_collect() samples = [0 for x in range(0, 50)] for x in range(0, 50): func(*args) gc_collect() samples[x] = len(gc.get_objects()) print "sample gc sizes:", samples assert len(_sessions) == 0 for x in samples[-4:]: if x != samples[-5]: flatline = False break else: flatline = True # object count is bigger than when it started if not flatline and samples[-1] > samples[0]: for x in samples[1:-2]: # see if a spike bigger than the endpoint exists if x > samples[-1]: break else: assert False, repr(samples) + " " + repr(flatline)
def all(): setup() try: t, t2 = 0, 0 def usage(label): now = resource.getrusage(resource.RUSAGE_SELF) print "%s: %0.3fs real, %0.3fs user, %0.3fs sys" % ( label, t2 - t, now.ru_utime - usage.last.ru_utime, now.ru_stime - usage.last.ru_stime) usage.snap(now) usage.snap = lambda stats=None: setattr( usage, 'last', stats or resource.getrusage(resource.RUSAGE_SELF)) session = create_session() gc_collect() usage.snap() t = time.clock() people = orm_select(session) t2 = time.clock() usage('load objects') gc_collect() usage.snap() t = time.clock() update_and_flush(session, people) t2 = time.clock() usage('update and flush') finally: metadata.drop_all()
def test_weakref_with_cycles_o2m(self): s = sessionmaker()() mapper(User, users, properties={ "addresses":relation(Address, backref="user") }) mapper(Address, addresses) s.add(User(name="ed", addresses=[Address(email_address="ed1")])) s.commit() user = s.query(User).options(eagerload(User.addresses)).one() user.addresses[0].user # lazyload eq_(user, User(name="ed", addresses=[Address(email_address="ed1")])) del user gc_collect() assert len(s.identity_map) == 0 user = s.query(User).options(eagerload(User.addresses)).one() user.addresses[0].email_address='ed2' user.addresses[0].user # lazyload del user gc_collect() assert len(s.identity_map) == 2 s.commit() user = s.query(User).options(eagerload(User.addresses)).one() eq_(user, User(name="ed", addresses=[Address(email_address="ed2")]))
def test_weakref_with_cycles_o2m(self): s = sessionmaker()() mapper(User, users, properties={ "addresses":relationship(Address, backref="user") }) mapper(Address, addresses) s.add(User(name="ed", addresses=[Address(email_address="ed1")])) s.commit() user = s.query(User).options(joinedload(User.addresses)).one() user.addresses[0].user # lazyload eq_(user, User(name="ed", addresses=[Address(email_address="ed1")])) del user gc_collect() assert len(s.identity_map) == 0 user = s.query(User).options(joinedload(User.addresses)).one() user.addresses[0].email_address='ed2' user.addresses[0].user # lazyload del user gc_collect() assert len(s.identity_map) == 2 s.commit() user = s.query(User).options(joinedload(User.addresses)).one() eq_(user, User(name="ed", addresses=[Address(email_address="ed2")]))
def go(): obj = [ Foo({'a': 1}), Foo({'b': 1}), Foo({'c': 1}), Foo({'d': 1}), Foo({'e': 1}), Foo({'f': 1}), Foo({'g': 1}), Foo({'h': 1}), Foo({'i': 1}), Foo({'j': 1}), Foo({'k': 1}), Foo({'l': 1}), ] session.add_all(obj) session.commit() testing.eq_(len(session.identity_map._mutable_attrs), 12) testing.eq_(len(session.identity_map), 12) obj = None gc_collect() testing.eq_(len(session.identity_map._mutable_attrs), 0) testing.eq_(len(session.identity_map), 0)
def _test_overflow(self, thread_count, max_overflow): gc_collect() def creator(): time.sleep(.05) return mock_dbapi.connect() p = pool.QueuePool(creator=creator, pool_size=3, timeout=2, max_overflow=max_overflow) peaks = [] def whammy(): for i in range(10): try: con = p.connect() time.sleep(.005) peaks.append(p.overflow()) con.close() del con except tsa.exc.TimeoutError: pass threads = [] for i in xrange(thread_count): th = threading.Thread(target=whammy) th.start() threads.append(th) for th in threads: th.join() self.assert_(max(peaks) <= max_overflow) lazy_gc() assert not pool._refs
def test_weakref_with_cycles_o2o(self): s = sessionmaker()() mapper(User, users, properties={ "address":relationship(Address, backref="user", uselist=False) }) mapper(Address, addresses) s.add(User(name="ed", address=Address(email_address="ed1"))) s.commit() user = s.query(User).options(joinedload(User.address)).one() user.address.user eq_(user, User(name="ed", address=Address(email_address="ed1"))) del user gc_collect() assert len(s.identity_map) == 0 user = s.query(User).options(joinedload(User.address)).one() user.address.email_address='ed2' user.address.user # lazyload del user gc_collect() assert len(s.identity_map) == 2 s.commit() user = s.query(User).options(joinedload(User.address)).one() eq_(user, User(name="ed", address=Address(email_address="ed2")))
def test_gced_delete_on_rollback(self): s = self.session() u1 = User(name='ed') s.add(u1) s.commit() s.delete(u1) u1_state = attributes.instance_state(u1) assert u1_state in s.identity_map.all_states() assert u1_state in s._deleted s.flush() assert u1_state not in s.identity_map.all_states() assert u1_state not in s._deleted del u1 gc_collect() assert u1_state.obj() is None s.rollback() assert u1_state in s.identity_map.all_states() u1 = s.query(User).filter_by(name='ed').one() assert u1_state not in s.identity_map.all_states() assert s.scalar(users.count()) == 1 s.delete(u1) s.flush() assert s.scalar(users.count()) == 0 s.commit()
def _test_overflow(self, thread_count, max_overflow): gc_collect() def creator(): time.sleep(0.05) return mock_dbapi.connect() p = pool.QueuePool(creator=creator, pool_size=3, timeout=2, max_overflow=max_overflow) peaks = [] def whammy(): for i in range(10): try: con = p.connect() time.sleep(0.005) peaks.append(p.overflow()) con.close() del con except tsa.exc.TimeoutError: pass threads = [] for i in xrange(thread_count): th = threading.Thread(target=whammy) th.start() threads.append(th) for th in threads: th.join() self.assert_(max(peaks) <= max_overflow) lazy_gc() assert not pool._refs
def go(): obj = [ Foo({'a':1}), Foo({'b':1}), Foo({'c':1}), Foo({'d':1}), Foo({'e':1}), Foo({'f':1}), Foo({'g':1}), Foo({'h':1}), Foo({'i':1}), Foo({'j':1}), Foo({'k':1}), Foo({'l':1}), ] session.add_all(obj) session.commit() testing.eq_(len(session.identity_map._mutable_attrs), 12) testing.eq_(len(session.identity_map), 12) obj = None gc_collect() testing.eq_(len(session.identity_map._mutable_attrs), 0) testing.eq_(len(session.identity_map), 0)
def test_weak_ref_pickled(self): s = create_session() mapper(User, users) s.add(User(name='ed')) s.flush() assert not s.dirty user = s.query(User).one() user.name = 'fred' s.expunge(user) u2 = pickle.loads(pickle.dumps(user)) del user s.add(u2) del u2 gc_collect() assert len(s.identity_map) == 1 assert len(s.dirty) == 1 assert None not in s.dirty s.flush() gc_collect() assert not s.dirty assert not s.identity_map
def test_copy(self): mapper(Parent, self.parents, properties=dict(children=relationship(Child))) mapper(Child, self.children) p = Parent('p1') p.kids.extend(['c1', 'c2']) p_copy = copy.copy(p) del p gc_collect() assert set(p_copy.kids) == set(['c1', 'c2']), p.kids
def test_weak_single(self): data, wim = self._fixture() assert len(data) == len(wim) == len(wim.by_id) oid = id(data[0]) del data[0] gc_collect() assert len(data) == len(wim) == len(wim.by_id) assert oid not in wim.by_id
def test_weak_clear(self): data, wim = self._fixture() assert len(data) == len(wim) == len(wim.by_id) del data[:] gc_collect() eq_(wim, {}) eq_(wim.by_id, {}) eq_(wim._weakrefs, {})
def test_weakref_kaboom(self): p = pool.QueuePool(creator=mock_dbapi.connect, pool_size=3, max_overflow=-1, use_threadlocal=True) c1 = p.connect() c2 = p.connect() c1.close() c2 = None del c1 del c2 gc_collect() assert p.checkedout() == 0 c3 = p.connect() assert c3 is not None
def _profile_cProfile(filename, fn, *args, **kw): import cProfile, gc, pstats, time load_stats = lambda: pstats.Stats(filename) gc_collect() began = time.time() cProfile.runctx('result = fn(*args, **kw)', globals(), locals(), filename=filename) ended = time.time() return ended - began, load_stats, locals()['result']
def test_reconnect(self): """test that an 'is_disconnect' condition will invalidate the connection, and additionally dispose the previous connection pool and recreate.""" pid = id(db.pool) # make a connection conn = db.connect() # connection works conn.execute(select([1])) # create a second connection within the pool, which we'll ensure # also goes away conn2 = db.connect() conn2.close() # two connections opened total now assert len(dbapi.connections) == 2 # set it to fail dbapi.shutdown() try: conn.execute(select([1])) assert False except tsa.exc.DBAPIError: pass # assert was invalidated assert not conn.closed assert conn.invalidated # close shouldnt break conn.close() assert id(db.pool) != pid # ensure all connections closed (pool was recycled) gc_collect() assert len(dbapi.connections) == 0 conn = db.connect() conn.execute(select([1])) conn.close() assert len(dbapi.connections) == 1
def test_identity_map_mutate(self): mapper(User, users) sess = Session() sess.add_all([User(name='u1'), User(name='u2'), User(name='u3')]) sess.commit() u1, u2, u3 = sess.query(User).all() for i, (key, value) in enumerate(sess.identity_map.iteritems()): if i == 2: del u3 gc_collect()
def test_expire_all(self): mapper(User, users, properties={"addresses": relation(Address, backref="user", lazy=False)}) mapper(Address, addresses) sess = create_session() userlist = sess.query(User).order_by(User.id).all() assert self.static.user_address_result == userlist assert len(list(sess)) == 9 sess.expire_all() gc_collect() assert len(list(sess)) == 4 # since addresses were gc'ed userlist = sess.query(User).order_by(User.id).all() u = userlist[1] eq_(self.static.user_address_result, userlist) assert len(list(sess)) == 9
def _profile_hotshot(filename, fn, *args, **kw): import gc, hotshot, hotshot.stats, time load_stats = lambda: hotshot.stats.load(filename) gc_collect() prof = hotshot.Profile(filename) began = time.time() prof.start() try: result = fn(*args, **kw) finally: prof.stop() ended = time.time() prof.close() return ended - began, load_stats, result
def test_prune(self): s = create_session(weak_identity_map=False) mapper(User, users) for o in [User(name='u%s' % x) for x in xrange(10)]: s.add(o) # o is still live after this loop... self.assert_(len(s.identity_map) == 0) self.assert_(s.prune() == 0) s.flush() gc_collect() self.assert_(s.prune() == 9) self.assert_(len(s.identity_map) == 1) id = o.id del o self.assert_(s.prune() == 1) self.assert_(len(s.identity_map) == 0) u = s.query(User).get(id) self.assert_(s.prune() == 0) self.assert_(len(s.identity_map) == 1) u.name = 'squiznart' del u self.assert_(s.prune() == 0) self.assert_(len(s.identity_map) == 1) s.flush() self.assert_(s.prune() == 1) self.assert_(len(s.identity_map) == 0) s.add(User(name='x')) self.assert_(s.prune() == 0) self.assert_(len(s.identity_map) == 0) s.flush() self.assert_(len(s.identity_map) == 1) self.assert_(s.prune() == 1) self.assert_(len(s.identity_map) == 0) u = s.query(User).get(id) s.delete(u) del u self.assert_(s.prune() == 0) self.assert_(len(s.identity_map) == 1) s.flush() self.assert_(s.prune() == 0) self.assert_(len(s.identity_map) == 0)
def test_weak_identity_map(self): mapper(Parent, self.parents, properties=dict(children=relationship(Child))) mapper(Child, self.children) session = create_session(weak_identity_map=True) def add_child(parent_name, child_name): parent = \ session.query(Parent).filter_by(name=parent_name).one() parent.kids.append(child_name) add_child('p1', 'c1') gc_collect() add_child('p1', 'c2') session.flush() p = session.query(Parent).filter_by(name='p1').one() assert set(p.kids) == set(['c1', 'c2']), p.kids
def test_strong_ref(self): s = create_session(weak_identity_map=False) mapper(User, users) # save user s.add(User(name='u1')) s.flush() user = s.query(User).one() user = None print s.identity_map gc_collect() assert len(s.identity_map) == 1 user = s.query(User).one() assert not s.identity_map._modified user.name = 'u2' assert s.identity_map._modified s.flush() eq_(users.select().execute().fetchall(), [(user.id, 'u2')])
def test_expire_all(self): mapper(User, users, properties={ 'addresses':relationship(Address, backref='user', lazy='joined', order_by=addresses.c.id), }) mapper(Address, addresses) sess = create_session() userlist = sess.query(User).order_by(User.id).all() assert self.static.user_address_result == userlist assert len(list(sess)) == 9 sess.expire_all() gc_collect() assert len(list(sess)) == 4 # since addresses were gc'ed userlist = sess.query(User).order_by(User.id).all() u = userlist[1] eq_(self.static.user_address_result, userlist) assert len(list(sess)) == 9
def test_expire_all(self): mapper(User, users, properties={ 'addresses': relationship(Address, backref='user', lazy='joined'), }) mapper(Address, addresses) sess = create_session() userlist = sess.query(User).order_by(User.id).all() assert self.static.user_address_result == userlist assert len(list(sess)) == 9 sess.expire_all() gc_collect() assert len(list(sess)) == 4 # since addresses were gc'ed userlist = sess.query(User).order_by(User.id).all() u = userlist[1] eq_(self.static.user_address_result, userlist) assert len(list(sess)) == 9
def test_weak_ref(self): """test the weak-referencing identity map, which strongly- references modified items.""" s = create_session() mapper(User, users) s.add(User(name='ed')) s.flush() assert not s.dirty user = s.query(User).one() del user gc_collect() assert len(s.identity_map) == 0 user = s.query(User).one() user.name = 'fred' del user gc_collect() assert len(s.identity_map) == 1 assert len(s.dirty) == 1 assert None not in s.dirty s.flush() gc_collect() assert not s.dirty assert not s.identity_map user = s.query(User).one() assert user.name == 'fred' assert s.identity_map
def test_invalidate_trans(self): conn = db.connect() trans = conn.begin() dbapi.shutdown() try: conn.execute(select([1])) assert False except tsa.exc.DBAPIError: pass # assert was invalidated gc_collect() assert len(dbapi.connections) == 0 assert not conn.closed assert conn.invalidated assert trans.is_active try: conn.execute(select([1])) assert False except tsa.exc.InvalidRequestError, e: assert str(e) == "Can't reconnect until invalid transaction is rolled back"
def test_weak_threadhop(self): data, wim = self._fixture() data = set(data) cv = threading.Condition() def empty(obj): cv.acquire() obj.clear() cv.notify() cv.release() th = threading.Thread(target=empty, args=(data, )) cv.acquire() th.start() cv.wait() cv.release() gc_collect() eq_(wim, {}) eq_(wim.by_id, {}) eq_(wim._weakrefs, {})
def test_weak_threadhop(self): data, wim = self._fixture() data = set(data) cv = threading.Condition() def empty(obj): cv.acquire() obj.clear() cv.notify() cv.release() th = threading.Thread(target=empty, args=(data,)) cv.acquire() th.start() cv.wait() cv.release() gc_collect() eq_(wim, {}) eq_(wim.by_id, {}) eq_(wim._weakrefs, {})
def test_invalidate_trans(self): conn = db.connect() trans = conn.begin() dbapi.shutdown() try: conn.execute(select([1])) assert False except tsa.exc.DBAPIError: pass # assert was invalidated gc_collect() assert len(dbapi.connections) == 0 assert not conn.closed assert conn.invalidated assert trans.is_active try: conn.execute(select([1])) assert False except tsa.exc.InvalidRequestError, e: assert str(e) \ == "Can't reconnect until invalid transaction is "\ "rolled back"
def test_autoflush_expressions(self): """test that an expression which is dependent on object state is evaluated after the session autoflushes. This is the lambda inside of strategies.py lazy_clause. """ mapper(User, users, properties={ 'addresses':relationship(Address, backref="user")}) mapper(Address, addresses) sess = create_session(autoflush=True, autocommit=False) u = User(name='ed', addresses=[Address(email_address='foo')]) sess.add(u) eq_(sess.query(Address).filter(Address.user==u).one(), Address(email_address='foo')) # still works after "u" is garbage collected sess.commit() sess.close() u = sess.query(User).get(u.id) q = sess.query(Address).filter(Address.user==u) del u gc_collect() eq_(q.one(), Address(email_address='foo'))
# with this test, run top. make sure the Python process doenst grow in size arbitrarily. class User(object): pass class Address(object): pass attributes.register_attribute(User, 'id', False, False) attributes.register_attribute(User, 'name', False, False) attributes.register_attribute(User, 'addresses', True, False) attributes.register_attribute(Address, 'email', False, False) attributes.register_attribute(Address, 'user', False, False) for i in xrange(1000): for j in xrange(1000): u = User() attributes.manage(u) u.name = str(random.randint(0, 100000000)) for k in xrange(10): a = Address() a.email_address = str(random.randint(0, 100000000)) attributes.manage(a) u.addresses.append(a) a.user = u print "clearing" #managed_attributes.clear() gc_collect()
def assert_no_mappers(): clear_mappers() gc_collect() assert len(_mapper_registry) == 0
def all(): setup() try: t, t2 = 0, 0 def usage(label): now = resource.getrusage(resource.RUSAGE_SELF) print "%s: %0.3fs real, %0.3fs user, %0.3fs sys" % ( label, t2 - t, now.ru_utime - usage.last.ru_utime, now.ru_stime - usage.last.ru_stime) usage.snap(now) usage.snap = lambda stats=None: setattr( usage, 'last', stats or resource.getrusage(resource.RUSAGE_SELF)) gc_collect() usage.snap() t = time.clock() sqlite_select(RawPerson) t2 = time.clock() usage('sqlite select/native') gc_collect() usage.snap() t = time.clock() sqlite_select(Person) t2 = time.clock() usage('sqlite select/instrumented') gc_collect() usage.snap() t = time.clock() sql_select(RawPerson) t2 = time.clock() usage('sqlalchemy.sql select/native') gc_collect() usage.snap() t = time.clock() sql_select(Person) t2 = time.clock() usage('sqlalchemy.sql select/instrumented') gc_collect() usage.snap() t = time.clock() orm_select() t2 = time.clock() usage('sqlalchemy.orm fetch') gc_collect() usage.snap() t = time.clock() joined_orm_select() t2 = time.clock() usage('sqlalchemy.orm "joined" fetch') finally: metadata.drop_all()