def test_invalidate_trans(self): conn = db.connect() trans = conn.begin() dbapi.shutdown() try: conn.execute(select([1])) assert False except tsa.exc.DBAPIError: pass # assert was invalidated gc.collect() assert len(dbapi.connections) == 0 assert not conn.closed assert conn.invalidated assert trans.is_active try: conn.execute(select([1])) assert False except tsa.exc.InvalidRequestError, e: assert str( e ) == "Can't reconnect until invalid transaction is rolled back"
def test_two_phase_recover(self): # MySQL recovery doesn't currently seem to work correctly # Prepared transactions disappear when connections are closed and even # when they aren't it doesn't seem possible to use the recovery id. connection = testing.db.connect() transaction = connection.begin_twophase() connection.execute(users.insert(), user_id=1, user_name='user1') transaction.prepare() connection.close() connection2 = testing.db.connect() self.assertEquals( connection2.execute(select([users.c.user_id]).order_by(users.c.user_id)).fetchall(), [] ) recoverables = connection2.recover_twophase() self.assertTrue( transaction.xid in recoverables ) connection2.commit_prepared(transaction.xid, recover=True) self.assertEquals( connection2.execute(select([users.c.user_id]).order_by(users.c.user_id)).fetchall(), [(1,)] ) connection2.close()
def test_two_phase_recover(self): # MySQL recovery doesn't currently seem to work correctly # Prepared transactions disappear when connections are closed and even # when they aren't it doesn't seem possible to use the recovery id. connection = testing.db.connect() transaction = connection.begin_twophase() connection.execute(users.insert(), user_id=1, user_name='user1') transaction.prepare() connection.close() connection2 = testing.db.connect() self.assertEquals( connection2.execute( select([users.c.user_id ]).order_by(users.c.user_id)).fetchall(), []) recoverables = connection2.recover_twophase() self.assertTrue(transaction.xid in recoverables) connection2.commit_prepared(transaction.xid, recover=True) self.assertEquals( connection2.execute( select([users.c.user_id ]).order_by(users.c.user_id)).fetchall(), [(1, )]) connection2.close()
def test_conn_reusable(self): conn = db.connect() conn.execute(select([1])) assert len(dbapi.connections) == 1 dbapi.shutdown() # raises error try: conn.execute(select([1])) assert False except tsa.exc.DBAPIError: pass assert not conn.closed assert conn.invalidated # ensure all connections closed (pool was recycled) gc.collect() assert len(dbapi.connections) == 0 # test reconnects conn.execute(select([1])) assert not conn.invalidated assert len(dbapi.connections) == 1
def test_explicit_compiled(self): conn1 = testing.db.connect() conn2 = testing.db.connect() conn1.execute(select([func.insert_foo('data1')], autocommit=True)) assert conn2.execute(select([foo.c.data])).fetchall() == [('data1',)] conn1.execute(select([func.insert_foo('data2')]).autocommit()) assert conn2.execute(select([foo.c.data])).fetchall() == [('data1',), ('data2',)] conn1.close() conn2.close()
def test_null_pool(self): engine = engines.reconnecting_engine(options=dict(poolclass=pool.NullPool)) conn = engine.connect() self.assertEquals(conn.execute(select([1])).scalar(), 1) assert not conn.closed engine.test_shutdown() try: conn.execute(select([1])) assert False except tsa.exc.DBAPIError, e: if not e.connection_invalidated: raise
def test_close(self): conn = engine.connect() self.assertEquals(conn.execute(select([1])).scalar(), 1) assert not conn.closed engine.test_shutdown() try: conn.execute(select([1])) assert False except tsa.exc.DBAPIError, e: if not e.connection_invalidated: raise
def test_null_pool(self): engine = engines.reconnecting_engine(options=dict( poolclass=pool.NullPool)) conn = engine.connect() self.assertEquals(conn.execute(select([1])).scalar(), 1) assert not conn.closed engine.test_shutdown() try: conn.execute(select([1])) assert False except tsa.exc.DBAPIError, e: if not e.connection_invalidated: raise
def test_explicit_compiled(self): conn1 = testing.db.connect() conn2 = testing.db.connect() conn1.execute(select([func.insert_foo('data1')], autocommit=True)) assert conn2.execute(select([foo.c.data])).fetchall() == [('data1', )] conn1.execute(select([func.insert_foo('data2')]).autocommit()) assert conn2.execute(select([foo.c.data])).fetchall() == [('data1', ), ('data2', )] conn1.close() conn2.close()
def test_basic(self): for threadlocal in (False, True): engine = engines.reconnecting_engine(options={'pool_recycle':1, 'pool_threadlocal':threadlocal}) conn = engine.contextual_connect() self.assertEquals(conn.execute(select([1])).scalar(), 1) conn.close() engine.test_shutdown() time.sleep(2) conn = engine.contextual_connect() self.assertEquals(conn.execute(select([1])).scalar(), 1) conn.close()
def _test_onetomany(self, passive_updates): mapper(User, users, properties={ 'addresses':relation(Address, passive_updates=passive_updates)}) mapper(Address, addresses) sess = create_session() u1 = User(username='******', fullname='jack') u1.addresses.append(Address(email='jack1')) u1.addresses.append(Address(email='jack2')) sess.add(u1) sess.flush() a1 = u1.addresses[0] self.assertEquals(sa.select([addresses.c.username]).execute().fetchall(), [('jack',), ('jack',)]) assert sess.query(Address).get(a1.id) is u1.addresses[0] u1.username = '******' sess.flush() assert u1.addresses[0].username == 'ed' self.assertEquals(sa.select([addresses.c.username]).execute().fetchall(), [('ed',), ('ed',)]) sess.clear() self.assertEquals([Address(username='******'), Address(username='******')], sess.query(Address).all()) u1 = sess.query(User).get(u1.id) u1.username = '******' def go(): sess.flush() if not passive_updates: self.assert_sql_count(testing.db, go, 4) # test passive_updates=False; load addresses, update user, update 2 addresses else: self.assert_sql_count(testing.db, go, 1) # test passive_updates=True; update user sess.clear() assert User(username='******', addresses=[Address(username='******'), Address(username='******')]) == sess.query(User).get(u1.id) sess.clear() u1 = sess.query(User).get(u1.id) u1.addresses = [] u1.username = '******' sess.flush() sess.clear() a1 = sess.query(Address).get(a1.id) self.assertEquals(a1.username, None) self.assertEquals(sa.select([addresses.c.username]).execute().fetchall(), [(None,), (None,)]) u1 = sess.query(User).get(u1.id) self.assertEquals(User(username='******', fullname='jack'), u1)
def test_mixed_two_phase_transaction(self): connection = testing.db.connect() transaction = connection.begin_twophase() connection.execute(users.insert(), user_id=1, user_name='user1') transaction2 = connection.begin() connection.execute(users.insert(), user_id=2, user_name='user2') transaction3 = connection.begin_nested() connection.execute(users.insert(), user_id=3, user_name='user3') transaction4 = connection.begin() connection.execute(users.insert(), user_id=4, user_name='user4') transaction4.commit() transaction3.rollback() connection.execute(users.insert(), user_id=5, user_name='user5') transaction2.commit() transaction.prepare() transaction.commit() self.assertEquals( connection.execute( select([users.c.user_id ]).order_by(users.c.user_id)).fetchall(), [(1, ), (2, ), (5, )]) connection.close()
def test_mixed_two_phase_transaction(self): connection = testing.db.connect() transaction = connection.begin_twophase() connection.execute(users.insert(), user_id=1, user_name='user1') transaction2 = connection.begin() connection.execute(users.insert(), user_id=2, user_name='user2') transaction3 = connection.begin_nested() connection.execute(users.insert(), user_id=3, user_name='user3') transaction4 = connection.begin() connection.execute(users.insert(), user_id=4, user_name='user4') transaction4.commit() transaction3.rollback() connection.execute(users.insert(), user_id=5, user_name='user5') transaction2.commit() transaction.prepare() transaction.commit() self.assertEquals( connection.execute(select([users.c.user_id]).order_by(users.c.user_id)).fetchall(), [(1,),(2,),(5,)] ) connection.close()
def test_noorm(self): """test the control case""" # I want to display a list of tests owned by owner 1 # if someoption is false or he hasn't specified it yet (null) # but not if he set it to true (example someoption is for hiding) # desired output for owner 1 # test_id, cat_name # 1 'Some Category' # 3 " # not orm style correct query print "Obtaining correct results without orm" result = sa.select([tests.c.id, categories.c.name], sa.and_( tests.c.owner_id == 1, sa.or_(options.c.someoption == None, options.c.someoption == False)), order_by=[tests.c.id], from_obj=[ tests.join(categories).outerjoin( options, sa.and_( tests.c.id == options.c.test_id, tests.c.owner_id == options.c.owner_id)) ]).execute().fetchall() eq_(result, [(1, u'Some Category'), (3, u'Some Category')])
def test_nested_joins(self): # this is testing some subtle column resolution stuff, # concerning corresponding_column() being extremely accurate # as well as how mapper sets up its column properties mapper(Task_Type, task_type) tsk_cnt_join = sa.outerjoin(prj, task, task.c.prj_id == prj.c.id) j = sa.outerjoin(task, msg, task.c.id == msg.c.task_id) jj = sa.select([ task.c.id.label('task_id'), sa.func.count(msg.c.id).label('props_cnt') ], from_obj=[j], group_by=[task.c.id]).alias('prop_c_s') jjj = sa.join(task, jj, task.c.id == jj.c.task_id) mapper(Joined, jjj, properties=dict(type=relation(Task_Type, lazy=False))) session = create_session() eq_( session.query(Joined).limit(10).offset(0).one(), Joined(id=1, title=u'task 1', props_cnt=0))
def test_multiple_two_phase(self): conn = testing.db.connect() xa = conn.begin_twophase() conn.execute(users.insert(), user_id=1, user_name='user1') xa.prepare() xa.commit() xa = conn.begin_twophase() conn.execute(users.insert(), user_id=2, user_name='user2') xa.prepare() xa.rollback() xa = conn.begin_twophase() conn.execute(users.insert(), user_id=3, user_name='user3') xa.rollback() xa = conn.begin_twophase() conn.execute(users.insert(), user_id=4, user_name='user4') xa.prepare() xa.commit() result = conn.execute( select([users.c.user_name]).order_by(users.c.user_id)) self.assertEqual(result.fetchall(), [('user1', ), ('user4', )]) conn.close()
def test_noorm(self): """test the control case""" # I want to display a list of tests owned by owner 1 # if someoption is false or he hasn't specified it yet (null) # but not if he set it to true (example someoption is for hiding) # desired output for owner 1 # test_id, cat_name # 1 'Some Category' # 3 " # not orm style correct query print "Obtaining correct results without orm" result = ( sa.select( [tests.c.id, categories.c.name], sa.and_(tests.c.owner_id == 1, sa.or_(options.c.someoption == None, options.c.someoption == False)), order_by=[tests.c.id], from_obj=[ tests.join(categories).outerjoin( options, sa.and_(tests.c.id == options.c.test_id, tests.c.owner_id == options.c.owner_id) ) ], ) .execute() .fetchall() ) eq_(result, [(1, u"Some Category"), (3, u"Some Category")])
def test_multiple_two_phase(self): conn = testing.db.connect() xa = conn.begin_twophase() conn.execute(users.insert(), user_id=1, user_name='user1') xa.prepare() xa.commit() xa = conn.begin_twophase() conn.execute(users.insert(), user_id=2, user_name='user2') xa.prepare() xa.rollback() xa = conn.begin_twophase() conn.execute(users.insert(), user_id=3, user_name='user3') xa.rollback() xa = conn.begin_twophase() conn.execute(users.insert(), user_id=4, user_name='user4') xa.prepare() xa.commit() result = conn.execute(select([users.c.user_name]).order_by(users.c.user_id)) self.assertEqual(result.fetchall(), [('user1',),('user4',)]) conn.close()
def test_insert(self): r = t.insert().execute() assert r.lastrow_has_defaults() eq_(set(r.context.postfetch_cols), set([t.c.col3, t.c.col5, t.c.col4, t.c.col6])) r = t.insert(inline=True).execute() assert r.lastrow_has_defaults() eq_(set(r.context.postfetch_cols), set([t.c.col3, t.c.col5, t.c.col4, t.c.col6])) t.insert().execute() ctexec = sa.select([currenttime.label('now')], bind=testing.db).scalar() l = t.select().order_by(t.c.col1).execute() today = datetime.date.today() eq_(l.fetchall(), [(x, 'imthedefault', f, ts, ts, ctexec, True, False, 12, today, 'py') for x in range(51, 54)]) t.insert().execute(col9=None) assert r.lastrow_has_defaults() eq_(set(r.context.postfetch_cols), set([t.c.col3, t.c.col5, t.c.col4, t.c.col6])) eq_( t.select(t.c.col1 == 54).execute().fetchall(), [(54, 'imthedefault', f, ts, ts, ctexec, True, False, 12, today, None)])
def test_correlated_lazyload(self): class User(_base.ComparableEntity): pass class Stuff(_base.ComparableEntity): pass mapper(Stuff, stuff) stuff_view = sa.select([ stuff.c.id ]).where(stuff.c.user_id == user_t.c.id).correlate(user_t).order_by( sa.desc(stuff.c.date)).limit(1) mapper(User, user_t, properties={ 'stuff': relation(Stuff, primaryjoin=sa.and_( user_t.c.id == stuff.c.user_id, stuff.c.id == (stuff_view.as_scalar()))) }) sess = create_session() eq_( sess.query(User).all(), [ User(name='user1', stuff=[Stuff(date=datetime.date(2007, 12, 15), id=2)]), User(name='user2', stuff=[Stuff(id=4, date=datetime.date(2008, 1, 15))]), User(name='user3', stuff=[Stuff(id=5, date=datetime.date(2007, 6, 15))]) ])
def test_insert(self): r = t.insert().execute() assert r.lastrow_has_defaults() eq_(set(r.context.postfetch_cols), set([t.c.col3, t.c.col5, t.c.col4, t.c.col6])) r = t.insert(inline=True).execute() assert r.lastrow_has_defaults() eq_(set(r.context.postfetch_cols), set([t.c.col3, t.c.col5, t.c.col4, t.c.col6])) t.insert().execute() ctexec = sa.select([currenttime.label('now')], bind=testing.db).scalar() l = t.select().order_by(t.c.col1).execute() today = datetime.date.today() eq_(l.fetchall(), [ (x, 'imthedefault', f, ts, ts, ctexec, True, False, 12, today, 'py') for x in range(51, 54)]) t.insert().execute(col9=None) assert r.lastrow_has_defaults() eq_(set(r.context.postfetch_cols), set([t.c.col3, t.c.col5, t.c.col4, t.c.col6])) eq_(t.select(t.c.col1==54).execute().fetchall(), [(54, 'imthedefault', f, ts, ts, ctexec, True, False, 12, today, None)])
def test_relation_reference(self): class Address(Base, ComparableEntity): __tablename__ = 'addresses' id = Column('id', Integer, primary_key=True) email = Column('email', String(50)) user_id = Column('user_id', Integer, ForeignKey('users.id')) class User(Base, ComparableEntity): __tablename__ = 'users' id = Column('id', Integer, primary_key=True) name = Column('name', String(50)) addresses = relation("Address", backref="user", primaryjoin=id == Address.user_id) User.address_count = sa.orm.column_property( sa.select([sa.func.count(Address.id)]). where(Address.user_id == User.id).as_scalar()) Base.metadata.create_all() u1 = User(name='u1', addresses=[ Address(email='one'), Address(email='two'), ]) sess = create_session() sess.add(u1) sess.flush() sess.clear() eq_(sess.query(User).all(), [User(name='u1', address_count=2, addresses=[ Address(email='one'), Address(email='two')])])
def mydefault_using_connection(ctx): conn = ctx.connection try: return conn.execute(sa.select([sa.text('12')])).scalar() finally: # ensure a "close()" on this connection does nothing, # since its a "branched" connection conn.close()
def define_tables(self, metadata): t2 = Table('t2', metadata, Column('nextid', Integer)) Table('t1', metadata, Column('id', Integer, primary_key=True, default=sa.select([func.max(t2.c.nextid)]).as_scalar()), Column('data', String(30)))
def test_reconnect(self): """test that an 'is_disconnect' condition will invalidate the connection, and additionally dispose the previous connection pool and recreate.""" pid = id(db.pool) # make a connection conn = db.connect() # connection works conn.execute(select([1])) # create a second connection within the pool, which we'll ensure also goes away conn2 = db.connect() conn2.close() # two connections opened total now assert len(dbapi.connections) == 2 # set it to fail dbapi.shutdown() try: conn.execute(select([1])) assert False except tsa.exc.DBAPIError: pass # assert was invalidated assert not conn.closed assert conn.invalidated # close shouldnt break conn.close() assert id(db.pool) != pid # ensure all connections closed (pool was recycled) gc.collect() assert len(dbapi.connections) == 0 conn =db.connect() conn.execute(select([1])) conn.close() assert len(dbapi.connections) == 1
def test_reconnect(self): """test that an 'is_disconnect' condition will invalidate the connection, and additionally dispose the previous connection pool and recreate.""" pid = id(db.pool) # make a connection conn = db.connect() # connection works conn.execute(select([1])) # create a second connection within the pool, which we'll ensure also goes away conn2 = db.connect() conn2.close() # two connections opened total now assert len(dbapi.connections) == 2 # set it to fail dbapi.shutdown() try: conn.execute(select([1])) assert False except tsa.exc.DBAPIError: pass # assert was invalidated assert not conn.closed assert conn.invalidated # close shouldnt break conn.close() assert id(db.pool) != pid # ensure all connections closed (pool was recycled) gc.collect() assert len(dbapi.connections) == 0 conn = db.connect() conn.execute(select([1])) conn.close() assert len(dbapi.connections) == 1
def test_control(self): # test that not using autocommit does not commit conn1 = testing.db.connect() conn2 = testing.db.connect() conn1.execute(select([func.insert_foo('data1')])) assert conn2.execute(select([foo.c.data])).fetchall() == [] conn1.execute(text("select insert_foo('moredata')")) assert conn2.execute(select([foo.c.data])).fetchall() == [] trans = conn1.begin() trans.commit() assert conn2.execute(select([foo.c.data])).fetchall() == [('data1',), ('moredata',)] conn1.close() conn2.close()
def test_autoincrement_single_col(self): single = Table('single', self.metadata, Column('id', Integer, primary_key=True)) single.create() r = single.insert().execute() id_ = r.last_inserted_ids()[0] assert id_ is not None eq_(1, sa.select([func.count(sa.text('*'))], from_obj=single).scalar())
def test_explicit_text(self): conn1 = testing.db.connect() conn2 = testing.db.connect() conn1.execute(text("select insert_foo('moredata')", autocommit=True)) assert conn2.execute(select([foo.c.data])).fetchall() == [('moredata',)] conn1.close() conn2.close()
def test_basic(self): for threadlocal in (False, True): engine = engines.reconnecting_engine( options={ 'pool_recycle': 1, 'pool_threadlocal': threadlocal }) conn = engine.contextual_connect() self.assertEquals(conn.execute(select([1])).scalar(), 1) conn.close() engine.test_shutdown() time.sleep(2) conn = engine.contextual_connect() self.assertEquals(conn.execute(select([1])).scalar(), 1) conn.close()
class User(Base, ComparableEntity): __tablename__ = 'users' id = Column('id', Integer, primary_key=True) name = Column('name', String(50)) adr_count = sa.orm.column_property( sa.select([sa.func.count(Address.id)], Address.user_id == id). as_scalar()) addresses = relation(Address)
def test_implicit_text(self): conn1 = testing.db.connect() conn2 = testing.db.connect() conn1.execute(text("insert into foo (data) values ('implicitdata')")) assert conn2.execute(select([foo.c.data])).fetchall() == [('implicitdata',)] conn1.close() conn2.close()
def define_tables(self, metadata): t2 = Table('t2', metadata, Column('nextid', Integer)) Table( 't1', metadata, Column('id', Integer, primary_key=True, default=sa.select([func.max(t2.c.nextid)]).as_scalar()), Column('data', String(30)))
def test_explicit_text(self): conn1 = testing.db.connect() conn2 = testing.db.connect() conn1.execute(text("select insert_foo('moredata')", autocommit=True)) assert conn2.execute(select([foo.c.data ])).fetchall() == [('moredata', )] conn1.close() conn2.close()
def test_implicit_text(self): conn1 = testing.db.connect() conn2 = testing.db.connect() conn1.execute(text("insert into foo (data) values ('implicitdata')")) assert conn2.execute(select([foo.c.data ])).fetchall() == [('implicitdata', )] conn1.close() conn2.close()
def test_nesting_with_functions(self): mapper(Data, datas) mapper(Foo, foo, properties={"data": relation(Data, backref=backref("foo", uselist=False))}) mapper(Stat, stats, properties={"data": relation(Data)}) session = create_session() data = [Data(a=x) for x in range(5)] session.add_all(data) session.add_all( ( Stat(data=data[0], somedata=1), Stat(data=data[1], somedata=2), Stat(data=data[2], somedata=3), Stat(data=data[3], somedata=4), Stat(data=data[4], somedata=5), Stat(data=data[0], somedata=6), Stat(data=data[1], somedata=7), Stat(data=data[2], somedata=8), Stat(data=data[3], somedata=9), Stat(data=data[4], somedata=10), ) ) session.flush() arb_data = sa.select( [stats.c.data_id, sa.func.max(stats.c.somedata).label("max")], stats.c.data_id <= 5, group_by=[stats.c.data_id], ).alias("arb") arb_result = arb_data.execute().fetchall() # order the result list descending based on 'max' arb_result.sort(key=lambda a: a["max"], reverse=True) # extract just the "data_id" from it arb_result = [row["data_id"] for row in arb_result] # now query for Data objects using that above select, adding the # "order by max desc" separately q = ( session.query(Data) .options(sa.orm.eagerload("foo")) .select_from(datas.join(arb_data, arb_data.c.data_id == datas.c.id)) .order_by(sa.desc(arb_data.c.max)) .limit(10) ) # extract "data_id" from the list of result objects verify_result = [d.id for d in q] eq_(verify_result, arb_result)
def test_expression(self): expr = select([users]).select_from(users.join(addresses)).limit(5) re_expr = serializer.loads(serializer.dumps(expr), users.metadata, None) eq_(str(expr), str(re_expr)) assert re_expr.bind is testing.db eq_(re_expr.execute().fetchall(), [(7, u'jack'), (8, u'ed'), (8, u'ed'), (8, u'ed'), (9, u'fred')])
def test_control(self): # test that not using autocommit does not commit conn1 = testing.db.connect() conn2 = testing.db.connect() conn1.execute(select([func.insert_foo('data1')])) assert conn2.execute(select([foo.c.data])).fetchall() == [] conn1.execute(text("select insert_foo('moredata')")) assert conn2.execute(select([foo.c.data])).fetchall() == [] trans = conn1.begin() trans.commit() assert conn2.execute(select([foo.c.data ])).fetchall() == [('data1', ), ('moredata', )] conn1.close() conn2.close()
class RealReconnectTest(TestBase): def setUp(self): global engine engine = engines.reconnecting_engine() def tearDown(self): engine.dispose() def test_reconnect(self): conn = engine.connect() self.assertEquals(conn.execute(select([1])).scalar(), 1) assert not conn.closed engine.test_shutdown() try: conn.execute(select([1])) assert False except tsa.exc.DBAPIError, e: if not e.connection_invalidated: raise assert not conn.closed assert conn.invalidated assert conn.invalidated self.assertEquals(conn.execute(select([1])).scalar(), 1) assert not conn.invalidated # one more time engine.test_shutdown() try: conn.execute(select([1])) assert False except tsa.exc.DBAPIError, e: if not e.connection_invalidated: raise
def test_expression(self): expr = select([users]).select_from(users.join(addresses)).limit(5) re_expr = serializer.loads(serializer.dumps(expr), users.metadata, None) eq_( str(expr), str(re_expr) ) assert re_expr.bind is testing.db eq_( re_expr.execute().fetchall(), [(7, u'jack'), (8, u'ed'), (8, u'ed'), (8, u'ed'), (9, u'fred')] )
def testsequence(self): cartitems.insert().execute(description='hi') cartitems.insert().execute(description='there') r = cartitems.insert().execute(description='lala') assert r.last_inserted_ids() and r.last_inserted_ids()[0] is not None id_ = r.last_inserted_ids()[0] eq_(1, sa.select([func.count(cartitems.c.cart_id)], sa.and_(cartitems.c.description == 'lala', cartitems.c.cart_id == id_)).scalar()) cartitems.select().execute().fetchall()
def test_invalidate_trans(self): conn = db.connect() trans = conn.begin() dbapi.shutdown() try: conn.execute(select([1])) assert False except tsa.exc.DBAPIError: pass # assert was invalidated gc.collect() assert len(dbapi.connections) == 0 assert not conn.closed assert conn.invalidated assert trans.is_active try: conn.execute(select([1])) assert False except tsa.exc.InvalidRequestError, e: assert str(e) == "Can't reconnect until invalid transaction is rolled back"
def test_basic(self): subset_select = select([common.c.id, common.c.data]) subset_mapper = mapper(Subset, subset_select) sess = create_session(bind=testing.db) sess.add(Subset(data=1)) sess.flush() sess.expunge_all() eq_(sess.query(Subset).all(), [Subset(data=1)]) eq_(sess.query(Subset).filter(Subset.data == 1).one(), Subset(data=1)) eq_(sess.query(Subset).filter(Subset.data != 1).first(), None) subset_select = sa.orm.class_mapper(Subset).mapped_table eq_(sess.query(Subset).filter(subset_select.c.data == 1).one(), Subset(data=1))
def testsequence(self): cartitems.insert().execute(description='hi') cartitems.insert().execute(description='there') r = cartitems.insert().execute(description='lala') assert r.last_inserted_ids() and r.last_inserted_ids()[0] is not None id_ = r.last_inserted_ids()[0] eq_( 1, sa.select([func.count(cartitems.c.cart_id)], sa.and_(cartitems.c.description == 'lala', cartitems.c.cart_id == id_)).scalar()) cartitems.select().execute().fetchall()
def test_nested_subtransaction_commit(self): connection = testing.db.connect() transaction = connection.begin() connection.execute(users.insert(), user_id=1, user_name='user1') trans2 = connection.begin_nested() connection.execute(users.insert(), user_id=2, user_name='user2') trans2.commit() connection.execute(users.insert(), user_id=3, user_name='user3') transaction.commit() self.assertEquals( connection.execute(select([users.c.user_id]).order_by(users.c.user_id)).fetchall(), [(1,),(2,),(3,)] ) connection.close()
def test_nesting_with_functions(self): mapper(Data, datas) mapper(Foo, foo, properties={ 'data': relation(Data, backref=backref('foo', uselist=False)) }) mapper(Stat, stats, properties={'data': relation(Data)}) session = create_session() data = [Data(a=x) for x in range(5)] session.add_all(data) session.add_all( (Stat(data=data[0], somedata=1), Stat(data=data[1], somedata=2), Stat(data=data[2], somedata=3), Stat(data=data[3], somedata=4), Stat(data=data[4], somedata=5), Stat(data=data[0], somedata=6), Stat(data=data[1], somedata=7), Stat(data=data[2], somedata=8), Stat(data=data[3], somedata=9), Stat(data=data[4], somedata=10))) session.flush() arb_data = sa.select( [stats.c.data_id, sa.func.max(stats.c.somedata).label('max')], stats.c.data_id <= 5, group_by=[stats.c.data_id]).alias('arb') arb_result = arb_data.execute().fetchall() # order the result list descending based on 'max' arb_result.sort(key=lambda a: a['max'], reverse=True) # extract just the "data_id" from it arb_result = [row['data_id'] for row in arb_result] # now query for Data objects using that above select, adding the # "order by max desc" separately q = (session.query(Data).options(sa.orm.eagerload('foo')).select_from( datas.join(arb_data, arb_data.c.data_id == datas.c.id)).order_by( sa.desc(arb_data.c.max)).limit(10)) # extract "data_id" from the list of result objects verify_result = [d.id for d in q] eq_(verify_result, arb_result)
def test_nested_subtransaction_commit(self): connection = testing.db.connect() transaction = connection.begin() connection.execute(users.insert(), user_id=1, user_name='user1') trans2 = connection.begin_nested() connection.execute(users.insert(), user_id=2, user_name='user2') trans2.commit() connection.execute(users.insert(), user_id=3, user_name='user3') transaction.commit() self.assertEquals( connection.execute( select([users.c.user_id ]).order_by(users.c.user_id)).fetchall(), [(1, ), (2, ), (3, )]) connection.close()
def test_basic(self): subset_select = select([common.c.id, common.c.data]) subset_mapper = mapper(Subset, subset_select) sess = create_session(bind=testing.db) sess.add(Subset(data=1)) sess.flush() sess.clear() eq_(sess.query(Subset).all(), [Subset(data=1)]) eq_(sess.query(Subset).filter(Subset.data == 1).one(), Subset(data=1)) eq_(sess.query(Subset).filter(Subset.data != 1).first(), None) subset_select = sa.orm.class_mapper(Subset).mapped_table eq_( sess.query(Subset).filter(subset_select.c.data == 1).one(), Subset(data=1))
def test_nested_joins(self): # this is testing some subtle column resolution stuff, # concerning corresponding_column() being extremely accurate # as well as how mapper sets up its column properties mapper(Task_Type, task_type) tsk_cnt_join = sa.outerjoin(prj, task, task.c.prj_id == prj.c.id) j = sa.outerjoin(task, msg, task.c.id == msg.c.task_id) jj = sa.select( [task.c.id.label("task_id"), sa.func.count(msg.c.id).label("props_cnt")], from_obj=[j], group_by=[task.c.id] ).alias("prop_c_s") jjj = sa.join(task, jj, task.c.id == jj.c.task_id) mapper(Joined, jjj, properties=dict(type=relation(Task_Type, lazy=False))) session = create_session() eq_(session.query(Joined).limit(10).offset(0).one(), Joined(id=1, title=u"task 1", props_cnt=0))
def test_correlated_lazyload(self): class User(_base.ComparableEntity): pass class Stuff(_base.ComparableEntity): pass mapper(Stuff, stuff) stuff_view = sa.select([stuff.c.id]).where(stuff.c.user_id==user_t.c.id).correlate(user_t).order_by(sa.desc(stuff.c.date)).limit(1) mapper(User, user_t, properties={ 'stuff':relation(Stuff, primaryjoin=sa.and_(user_t.c.id==stuff.c.user_id, stuff.c.id==(stuff_view.as_scalar()))) }) sess = create_session() eq_(sess.query(User).all(), [ User(name='user1', stuff=[Stuff(date=datetime.date(2007, 12, 15), id=2)]), User(name='user2', stuff=[Stuff(id=4, date=datetime.date(2008, 1 , 15))]), User(name='user3', stuff=[Stuff(id=5, date=datetime.date(2007, 6, 15))]) ])
def test_two_phase_transaction(self): tlengine.begin_twophase() tlengine.execute(users.insert(), user_id=1, user_name='user1') tlengine.prepare() tlengine.commit() tlengine.begin_twophase() tlengine.execute(users.insert(), user_id=2, user_name='user2') tlengine.commit() tlengine.begin_twophase() tlengine.execute(users.insert(), user_id=3, user_name='user3') tlengine.rollback() tlengine.begin_twophase() tlengine.execute(users.insert(), user_id=4, user_name='user4') tlengine.prepare() tlengine.rollback() self.assertEquals( tlengine.execute(select([users.c.user_id]).order_by(users.c.user_id)).fetchall(), [(1,),(2,)] )
def test_two_phase_transaction(self): tlengine.begin_twophase() tlengine.execute(users.insert(), user_id=1, user_name='user1') tlengine.prepare() tlengine.commit() tlengine.begin_twophase() tlengine.execute(users.insert(), user_id=2, user_name='user2') tlengine.commit() tlengine.begin_twophase() tlengine.execute(users.insert(), user_id=3, user_name='user3') tlengine.rollback() tlengine.begin_twophase() tlengine.execute(users.insert(), user_id=4, user_name='user4') tlengine.prepare() tlengine.rollback() self.assertEquals( tlengine.execute( select([users.c.user_id ]).order_by(users.c.user_id)).fetchall(), [(1, ), (2, )])
def myupdate_with_ctx(ctx): conn = ctx.connection return conn.execute(sa.select([sa.text('13')])).scalar()
def test_no_tables(self): selectable = select(["x", "y", "z"]) self.assertRaisesMessage(sa.exc.InvalidRequestError, "Could not find any Table objects", mapper, Subset, selectable)
assert str(e) == "Can't reconnect until invalid transaction is rolled back" assert trans.is_active try: trans.commit() assert False except tsa.exc.InvalidRequestError, e: assert str(e) == "Can't reconnect until invalid transaction is rolled back" assert trans.is_active trans.rollback() assert not trans.is_active conn.execute(select([1])) assert not conn.invalidated assert len(dbapi.connections) == 1 def test_conn_reusable(self): conn = db.connect() conn.execute(select([1])) assert len(dbapi.connections) == 1 dbapi.shutdown() # raises error try: