def test_key_fallback_result(self): e = testing.db m = self.metadata t = Table('t', m, Column('x', Integer), Column('y', Integer)) m.create_all(e) e.execute(t.insert(), {"x": 1, "y": 1}) @profile_memory def go(): r = e.execute(t.alias().select()) for row in r: row[t.c.x] go()
def test_mutable_identity(self): metadata = MetaData(testing.db) table1 = Table( "mytable", metadata, Column('col1', Integer, primary_key=True, test_needs_autoincrement=True), Column('col2', PickleType(comparator=operator.eq, mutable=True))) class Foo(object): def __init__(self, col2): self.col2 = col2 mapper(Foo, table1) metadata.create_all() session = sessionmaker()() def go(): obj = [ Foo({'a': 1}), Foo({'b': 1}), Foo({'c': 1}), Foo({'d': 1}), Foo({'e': 1}), Foo({'f': 1}), Foo({'g': 1}), Foo({'h': 1}), Foo({'i': 1}), Foo({'j': 1}), Foo({'k': 1}), Foo({'l': 1}), ] session.add_all(obj) session.commit() testing.eq_(len(session.identity_map._mutable_attrs), 12) testing.eq_(len(session.identity_map), 12) obj = None gc_collect() testing.eq_(len(session.identity_map._mutable_attrs), 0) testing.eq_(len(session.identity_map), 0) try: go() finally: metadata.drop_all()
def define_tables(cls, metadata): Table( 'accounts', metadata, Column('account_id', Integer, primary_key=True, test_needs_autoincrement=True), Column('name', String(40))) Table( 'transactions', metadata, Column('transaction_id', Integer, primary_key=True, test_needs_autoincrement=True), Column('name', String(40))) Table( 'entries', metadata, Column('entry_id', Integer, primary_key=True, test_needs_autoincrement=True), Column('name', String(40)), Column('account_id', Integer, ForeignKey('accounts.account_id')), Column('transaction_id', Integer, ForeignKey('transactions.transaction_id')))
def define_tables(cls, metadata): Table( 't1', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('data', String(30)), Column('t2id', Integer, ForeignKey('t2.id'))) Table( 't2', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('data', String(30)), Column('t1id', Integer, ForeignKey('t1.id', use_alter=True, name="foo_fk"))) Table( 't3', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('data', String(30)), Column('t1id', Integer, ForeignKey('t1.id'), nullable=False), Column('t2id', Integer, ForeignKey('t2.id'), nullable=False))
def define_tables(cls, metadata): Table( 'datas', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('a', Integer, nullable=False)) Table( 'foo', metadata, Column('data_id', Integer, ForeignKey('datas.id'), primary_key=True), Column('bar', Integer)) Table( 'stats', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('data_id', Integer, ForeignKey('datas.id')), Column('somedata', Integer, nullable=False))
def test_func_embedded_valuesbase(self): """test can use next_value() in values() of _ValuesBase""" metadata = self.metadata t1 = Table('t', metadata, Column('x', Integer) ) t1.create(testing.db) s = Sequence("my_sequence") testing.db.execute( t1.insert().values(x=s.next_value()) ) self._assert_seq_result( testing.db.scalar(t1.select()) )
def define_tables(cls, metadata): table1 = Table( "table1", metadata, Column('col1', Integer, primary_key=True, test_needs_autoincrement=True), Column('col2', String(30))) table2 = Table( "table2", metadata, Column('col1', Integer, primary_key=True, test_needs_autoincrement=True), Column('col2', String(30)), ) table3 = Table( 'table3', metadata, Column('t1', Integer, ForeignKey('table1.col1')), Column('t2', Integer, ForeignKey('table2.col1')), )
def test_int_default_on_insert_with_returning(self): metadata = self.metadata t = Table('x', metadata, Column('y', Integer, server_default='5', primary_key=True), Column('data', String(10)) ) metadata.create_all() r = t.insert().execute(data='data') eq_(r.inserted_primary_key, [5]) eq_( t.select().execute().fetchall(), [(5, 'data')] )
def test_int_default_none_on_insert(self): metadata = self.metadata t = Table('x', metadata, Column('y', Integer, server_default='5', primary_key=True), Column('data', String(10)), implicit_returning=False) assert t._autoincrement_column is None metadata.create_all() r = t.insert().execute(data='data') eq_(r.inserted_primary_key, [None]) if testing.against('sqlite'): eq_(t.select().execute().fetchall(), [(1, 'data')]) else: eq_(t.select().execute().fetchall(), [(5, 'data')])
def test_string_default_on_insert_with_returning(self): """With implicit_returning, we get a string PK default back no problem.""" metadata = self.metadata t = Table('x', metadata, Column('y', String(10), server_default='key_one', primary_key=True), Column('data', String(10)) ) metadata.create_all() r = t.insert().execute(data='data') eq_(r.inserted_primary_key, ['key_one']) eq_( t.select().execute().fetchall(), [('key_one', 'data')] )
def setup(self): meta = MetaData(testing.db) global table table = Table( 'tables', meta, Column('id', Integer, primary_key=True, key='foo_id', test_needs_autoincrement=True), Column('data', String(20)), ) table.create(checkfirst=True)
def define_tables(cls, metadata): if testing.against('oracle'): fk_args = dict(deferrable=True, initially='deferred') else: fk_args = dict(onupdate='cascade') users = Table('users', metadata, Column('username', String(50), primary_key=True), Column('fullname', String(100)), test_needs_fk=True) addresses = Table('addresses', metadata, Column('email', String(50), primary_key=True), Column('username', String(50), ForeignKey('users.username', **fk_args)), test_needs_fk=True) items = Table('items', metadata, Column('itemname', String(50), primary_key=True), Column('description', String(100)), test_needs_fk=True) users_to_items = Table('users_to_items', metadata, Column('username', String(50), ForeignKey('users.username', **fk_args), primary_key=True), Column('itemname', String(50), ForeignKey('items.itemname', **fk_args), primary_key=True), test_needs_fk=True)
def define_tables(cls, metadata): Table( 'parent', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('type', String(10)), ) Table( 'subparent', metadata, Column('id', Integer, ForeignKey('parent.id'), primary_key=True), Column('data', String(10)), ) Table( 'base', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('type', String(10)), ) Table( 'sub', metadata, Column('id', Integer, ForeignKey('base.id'), primary_key=True), Column('data', String(10)), Column('subparent_id', Integer, ForeignKey('subparent.id'), nullable=False))
def test_non_autoincrement(self): # sqlite INT primary keys can be non-unique! (only for ints) nonai = Table( "nonaitest", self.metadata, Column('id', Integer, autoincrement=False, primary_key=True), Column('data', String(20))) nonai.create() try: # postgresql + mysql strict will fail on first row, # mysql in legacy mode fails on second row nonai.insert().execute(data='row 1') nonai.insert().execute(data='row 2') assert False except sa.exc.DBAPIError, e: assert True
def test_inserted_pk_implicit_returning(self): """test inserted_primary_key contains the result when pk_col=next_value(), when implicit returning is used.""" metadata = self.metadata e = engines.testing_engine(options={'implicit_returning':True}) s = Sequence("my_sequence") metadata.bind = e t1 = Table('t', metadata, Column('x', Integer, primary_key=True) ) t1.create() r = e.execute( t1.insert().values(x=s.next_value()) ) self._assert_seq_result(r.inserted_primary_key[0])
def test_inserted_pk_no_returning(self): """test inserted_primary_key contains [None] when pk_col=next_value(), implicit returning is not used.""" metadata = self.metadata e = engines.testing_engine(options={'implicit_returning':False}) s = Sequence("my_sequence") metadata.bind = e t1 = Table('t', metadata, Column('x', Integer, primary_key=True) ) t1.create() r = e.execute( t1.insert().values(x=s.next_value()) ) eq_(r.inserted_primary_key, [None])
def test_func_embedded_whereclause(self): """test can use next_value() in whereclause""" metadata = self.metadata t1 = Table('t', metadata, Column('x', Integer) ) t1.create(testing.db) testing.db.execute(t1.insert(), [{'x':1}, {'x':300}, {'x':301}]) s = Sequence("my_sequence") eq_( testing.db.execute( t1.select().where(t1.c.x > s.next_value()) ).fetchall(), [(300, ), (301, )] )
def test_many_updates(self): metadata = MetaData(testing.db) wide_table = Table( 't', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), *[Column('col%d' % i, Integer) for i in range(10)]) class Wide(object): pass mapper(Wide, wide_table, _compiled_cache_size=10) metadata.create_all() session = create_session() w1 = Wide() session.add(w1) session.flush() session.close() del session counter = [1] @profile_memory def go(): session = create_session() w1 = session.query(Wide).first() x = counter[0] dec = 10 while dec > 0: # trying to count in binary here, # works enough to trip the test case if pow(2, dec) < x: setattr(w1, 'col%d' % dec, counter[0]) x -= pow(2, dec) dec -= 1 session.flush() session.close() counter[0] += 1 try: go() finally: metadata.drop_all()
def define_tables(cls, metadata): Table( 'node', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('path', String(50), nullable=False), Column('parent_id', Integer, ForeignKey('node.id'), nullable=True), Column('prev_sibling_id', Integer, ForeignKey('node.id'), nullable=True), Column('next_sibling_id', Integer, ForeignKey('node.id'), nullable=True))
def test_create_drop_err_table(self): metadata = MetaData() table = Table('test_table', metadata, Column('foo', Integer)) for meth in [ table.exists, table.create, table.drop, ]: try: meth() assert False except exc.UnboundExecutionError, e: eq_( str(e), "The Table 'test_table' " "is not bound to an Engine or Connection. " "Execution can not proceed without a database to execute " "against. Either execute with an explicit connection or " "assign this Table's .metadata.bind to enable implicit " "execution.")
def test_create_drop_constructor_bound(self): for bind in (testing.db, testing.db.connect()): try: for args in ( ([bind], {}), ([], { 'bind': bind }), ): metadata = MetaData(*args[0], **args[1]) table = Table('test_table', metadata, Column('foo', Integer)) assert metadata.bind is table.bind is bind metadata.create_all() assert table.exists() metadata.drop_all() table.create() table.drop() assert not table.exists() finally: if isinstance(bind, engine.Connection): bind.close()
def test_rollback_deadlock(self): """test that returning connections to the pool clears any object locks.""" conn1 = testing.db.connect() conn2 = testing.db.connect() users = Table('deadlock_users', metadata, Column('user_id', INT, primary_key=True), Column('user_name', VARCHAR(20)), test_needs_acid=True) users.create(conn1) conn1.execute('select * from deadlock_users') conn1.close() # without auto-rollback in the connection pool's return() logic, # this deadlocks in PostgreSQL, because conn1 is returned to the # pool but still has a lock on "deadlock_users". comment out the # rollback in pool/ConnectionFairy._close() to see ! users.drop(conn2) conn2.close()
def test_string_default_none_on_insert(self): """Test that without implicit returning, we return None for a string server default. That is, we don't want to attempt to pre-execute "server_default" generically - the user should use a Python side-default for a case like this. Testing that all backends do the same thing here. """ metadata = self.metadata t = Table('x', metadata, Column('y', String(10), server_default='key_one', primary_key=True), Column('data', String(10)), implicit_returning=False) metadata.create_all() r = t.insert().execute(data='data') eq_(r.inserted_primary_key, [None]) eq_(t.select().execute().fetchall(), [('key_one', 'data')])
def define_tables(cls, metadata): Table('place', metadata, Column('place_id', Integer, sa.Sequence('pid_seq', optional=True), primary_key=True), Column('name', String(30), nullable=False), test_needs_acid=True, ) Table('transition', metadata, Column('transition_id', Integer, sa.Sequence('tid_seq', optional=True), primary_key=True), Column('name', String(30), nullable=False), test_needs_acid=True, ) Table('place_thingy', metadata, Column('thingy_id', Integer, sa.Sequence('thid_seq', optional=True), primary_key=True), Column('place_id', Integer, ForeignKey('place.place_id'), nullable=False), Column('name', String(30), nullable=False), test_needs_acid=True, ) # association table #1 Table('place_input', metadata, Column('place_id', Integer, ForeignKey('place.place_id')), Column('transition_id', Integer, ForeignKey('transition.transition_id')), test_needs_acid=True, ) # association table #2 Table('place_output', metadata, Column('place_id', Integer, ForeignKey('place.place_id')), Column('transition_id', Integer, ForeignKey('transition.transition_id')), test_needs_acid=True, ) Table('place_place', metadata, Column('pl1_id', Integer, ForeignKey('place.place_id')), Column('pl2_id', Integer, ForeignKey('place.place_id')), test_needs_acid=True, )
def test_implicit_execution(self): metadata = MetaData() table = Table( 'test_table', metadata, Column('foo', Integer), test_needs_acid=True, ) conn = testing.db.connect() metadata.create_all(bind=conn) try: trans = conn.begin() metadata.bind = conn t = table.insert() assert t.bind is conn table.insert().execute(foo=5) table.insert().execute(foo=6) table.insert().execute(foo=7) trans.rollback() metadata.bind = None assert conn.execute( 'select count(*) from test_table').scalar() == 0 finally: metadata.drop_all(bind=conn)
def test_py_vs_server_default_detection(self): def has_(name, *wanted): slots = [ 'default', 'onupdate', 'server_default', 'server_onupdate' ] col = tbl.c[name] for slot in wanted: slots.remove(slot) assert getattr(col, slot) is not None, getattr(col, slot) for slot in slots: assert getattr(col, slot) is None, getattr(col, slot) tbl = t has_('col1', 'default') has_('col2', 'default', 'onupdate') has_('col3', 'default', 'onupdate') has_('col4', 'server_default') has_('col5', 'server_default') has_('col6', 'default', 'onupdate') has_('boolcol1', 'default') has_('boolcol2', 'default') has_('col7', 'default', 'onupdate') has_('col8', 'default', 'onupdate') has_('col9', 'default', 'server_default') ColumnDefault, DefaultClause = sa.ColumnDefault, sa.DefaultClause t2 = Table( 't2', MetaData(), Column('col1', Integer, Sequence('foo')), Column('col2', Integer, default=Sequence('foo'), server_default='y'), Column('col3', Integer, Sequence('foo'), server_default='x'), Column('col4', Integer, ColumnDefault('x'), DefaultClause('y')), Column('col4', Integer, ColumnDefault('x'), DefaultClause('y'), DefaultClause('y', for_update=True)), Column('col5', Integer, ColumnDefault('x'), DefaultClause('y'), onupdate='z'), Column('col6', Integer, ColumnDefault('x'), server_default='y', onupdate='z'), Column('col7', Integer, default='x', server_default='y', onupdate='z'), Column('col8', Integer, server_onupdate='u', default='x', server_default='y', onupdate='z')) tbl = t2 has_('col1', 'default') has_('col2', 'default', 'server_default') has_('col3', 'default', 'server_default') has_('col4', 'default', 'server_default', 'server_onupdate') has_('col5', 'default', 'server_default', 'onupdate') has_('col6', 'default', 'server_default', 'onupdate') has_('col7', 'default', 'server_default', 'onupdate') has_('col8', 'default', 'server_default', 'onupdate', 'server_onupdate')
def setup_class(cls): global t, f, f2, ts, currenttime, metadata, default_generator db = testing.db metadata = MetaData(db) default_generator = {'x': 50} def mydefault(): default_generator['x'] += 1 return default_generator['x'] def myupdate_with_ctx(ctx): conn = ctx.connection return conn.execute(sa.select([sa.text('13')])).scalar() def mydefault_using_connection(ctx): conn = ctx.connection try: return conn.execute(sa.select([sa.text('12')])).scalar() finally: # ensure a "close()" on this connection does nothing, # since its a "branched" connection conn.close() use_function_defaults = testing.against('postgresql', 'mssql', 'maxdb') is_oracle = testing.against('oracle') # select "count(1)" returns different results on different DBs also # correct for "current_date" compatible as column default, value # differences currenttime = func.current_date(type_=sa.Date, bind=db) if is_oracle: ts = db.scalar( sa.select([ func.trunc(func.sysdate(), sa.literal_column("'DAY'"), type_=sa.Date).label('today') ])) assert isinstance( ts, datetime.date) and not isinstance(ts, datetime.datetime) f = sa.select([func.length('abcdef')], bind=db).scalar() f2 = sa.select([func.length('abcdefghijk')], bind=db).scalar() # TODO: engine propigation across nested functions not working currenttime = func.trunc(currenttime, sa.literal_column("'DAY'"), bind=db, type_=sa.Date) def1 = currenttime def2 = func.trunc(sa.text("sysdate"), sa.literal_column("'DAY'"), type_=sa.Date) deftype = sa.Date elif use_function_defaults: f = sa.select([func.length('abcdef')], bind=db).scalar() f2 = sa.select([func.length('abcdefghijk')], bind=db).scalar() def1 = currenttime deftype = sa.Date if testing.against('maxdb'): def2 = sa.text("curdate") elif testing.against('mssql'): def2 = sa.text("getdate()") else: def2 = sa.text("current_date") ts = db.scalar(func.current_date()) else: f = len('abcdef') f2 = len('abcdefghijk') def1 = def2 = "3" ts = 3 deftype = Integer t = Table( 'default_test1', metadata, # python function Column('col1', Integer, primary_key=True, default=mydefault), # python literal Column('col2', String(20), default="imthedefault", onupdate="im the update"), # preexecute expression Column('col3', Integer, default=func.length('abcdef'), onupdate=func.length('abcdefghijk')), # SQL-side default from sql expression Column('col4', deftype, server_default=def1), # SQL-side default from literal expression Column('col5', deftype, server_default=def2), # preexecute + update timestamp Column('col6', sa.Date, default=currenttime, onupdate=currenttime), Column('boolcol1', sa.Boolean, default=True), Column('boolcol2', sa.Boolean, default=False), # python function which uses ExecutionContext Column('col7', Integer, default=mydefault_using_connection, onupdate=myupdate_with_ctx), # python builtin Column('col8', sa.Date, default=datetime.date.today, onupdate=datetime.date.today), # combo Column('col9', String(20), default='py', server_default='ddl')) t.create()
def define_tables(cls, metadata): Table("data", metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('data', PickleType(comparator=operator.eq)) )
def test_uses_get_compatible_types(self): """test the use_get optimization with compatible but non-identical types""" User, Address = self.classes.User, self.classes.Address class IntDecorator(TypeDecorator): impl = Integer class SmallintDecorator(TypeDecorator): impl = SmallInteger class SomeDBInteger(sa.Integer): pass for tt in [ Integer, SmallInteger, IntDecorator, SmallintDecorator, SomeDBInteger, ]: m = sa.MetaData() users = Table( 'users', m, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('name', String(30), nullable=False), ) addresses = Table( 'addresses', m, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('user_id', tt, ForeignKey('users.id')), Column('email_address', String(50), nullable=False), ) mapper(Address, addresses, properties=dict(user=relationship(mapper(User, users)))) sess = create_session(bind=testing.db) # load address a1 = sess.query(Address).filter_by( email_address="*****@*****.**").one() # load user that is attached to the address u1 = sess.query(User).get(8) def go(): # lazy load of a1.user should get it from the session assert a1.user is u1 self.assert_sql_count(testing.db, go, 0) sa.orm.clear_mappers()
def define_tables(cls, metadata): Table("data", metadata, Column('pk1', String(10), primary_key=True), Column('pk2', String(10), primary_key=True), )