def test_unknown_types(self): meta = MetaData(testing.db) t = Table("test", meta, Column('foo', sa.DateTime)) import sys dialect_module = sys.modules[testing.db.dialect.__module__] # we're relying on the presence of "ischema_names" in the # dialect module, else we can't test this. we need to be able # to get the dialect to not be aware of some type so we temporarily # monkeypatch. not sure what a better way for this could be, # except for an established dialect hook or dialect-specific tests if not hasattr(dialect_module, 'ischema_names'): return ischema_names = dialect_module.ischema_names t.create() dialect_module.ischema_names = {} try: m2 = MetaData(testing.db) self.assertRaises(tsa.exc.SAWarning, Table, "test", m2, autoload=True) @testing.emits_warning('Did not recognize type') def warns(): m3 = MetaData(testing.db) t3 = Table("test", m3, autoload=True) assert t3.c.foo.type.__class__ == sa.types.NullType finally: dialect_module.ischema_names = ischema_names t.drop()
def test_passive_override(self): """ Primarily for postgres, tests that when we get a primary key column back from reflecting a table which has a default value on it, we pre-execute that DefaultClause upon insert, even though DefaultClause says "let the database execute this", because in postgres we must have all the primary key values in memory before insert; otherwise we can't locate the just inserted row. """ # TODO: move this to dialect/postgres try: meta = MetaData(testing.db) testing.db.execute( """ CREATE TABLE speedy_users ( speedy_user_id SERIAL PRIMARY KEY, user_name VARCHAR NOT NULL, user_password VARCHAR NOT NULL ); """, None) t = Table("speedy_users", meta, autoload=True) t.insert().execute(user_name='user', user_password='******') l = t.select().execute().fetchall() eq_(l, [(1, 'user', 'lala')]) finally: testing.db.execute("drop table speedy_users", None)
def test_nonreflected_fk_raises(self): """test that a NoReferencedColumnError is raised when reflecting a table with an FK to another table which has not included the target column in its reflection. """ meta = MetaData(testing.db) a1 = Table('a', meta, Column('x', sa.Integer, primary_key=True), Column('z', sa.Integer), test_needs_fk=True ) b1 = Table('b', meta, Column('y', sa.Integer, sa.ForeignKey('a.x')), test_needs_fk=True ) meta.create_all() try: m2 = MetaData(testing.db) a2 = Table('a', m2, include_columns=['z'], autoload=True) b2 = Table('b', m2, autoload=True) self.assertRaises(tsa.exc.NoReferencedColumnError, a2.join, b2) finally: meta.drop_all()
def test_tokens(self): m = MetaData() bind = self.mock_engine() sane_alone = Table('t', m, Column('id', Integer)) sane_schema = Table('t', m, Column('id', Integer), schema='s') insane_alone = Table('t t', m, Column('id', Integer)) insane_schema = Table('t t', m, Column('id', Integer), schema='s s') ddl = DDL('%(schema)s-%(table)s-%(fullname)s') self.assertEquals(ddl._expand(sane_alone, bind), '-t-t') self.assertEquals(ddl._expand(sane_schema, bind), 's-t-s.t') self.assertEquals(ddl._expand(insane_alone, bind), '-"t t"-"t t"') self.assertEquals(ddl._expand(insane_schema, bind), '"s s"-"t t"-"s s"."t t"') # overrides are used piece-meal and verbatim. ddl = DDL('%(schema)s-%(table)s-%(fullname)s-%(bonus)s', context={ 'schema': 'S S', 'table': 'T T', 'bonus': 'b' }) self.assertEquals(ddl._expand(sane_alone, bind), 'S S-T T-t-b') self.assertEquals(ddl._expand(sane_schema, bind), 'S S-T T-s.t-b') self.assertEquals(ddl._expand(insane_alone, bind), 'S S-T T-"t t"-b') self.assertEquals(ddl._expand(insane_schema, bind), 'S S-T T-"s s"."t t"-b')
def test_include_columns(self): meta = MetaData(testing.db) foo = Table('foo', meta, *[Column(n, sa.String(30)) for n in ['a', 'b', 'c', 'd', 'e', 'f']]) meta.create_all() try: meta2 = MetaData(testing.db) foo = Table('foo', meta2, autoload=True, include_columns=['b', 'f', 'e']) # test that cols come back in original order self.assertEquals([c.name for c in foo.c], ['b', 'e', 'f']) for c in ('b', 'f', 'e'): assert c in foo.c for c in ('a', 'c', 'd'): assert c not in foo.c # test against a table which is already reflected meta3 = MetaData(testing.db) foo = Table('foo', meta3, autoload=True) foo = Table('foo', meta3, include_columns=['b', 'f', 'e'], useexisting=True) self.assertEquals([c.name for c in foo.c], ['b', 'e', 'f']) for c in ('b', 'f', 'e'): assert c in foo.c for c in ('a', 'c', 'd'): assert c not in foo.c finally: meta.drop_all()
def define_tables(self, meta): Table('user_t', meta, Column('id', Integer, primary_key=True), Column('name', String(50))) Table('stuff', meta, Column('id', Integer, primary_key=True), Column('date', sa.Date), Column('user_id', Integer, ForeignKey('user_t.id')))
def setUpAll(self): global metadata, users metadata = MetaData() users = Table('users', metadata, Column('user_id', sa.Integer, sa.Sequence('user_id_seq', optional=True), primary_key=True), Column('user_name', sa.String(40)), ) addresses = Table('email_addresses', metadata, Column('address_id', sa.Integer, sa.Sequence('address_id_seq', optional=True), primary_key = True), Column('user_id', sa.Integer, sa.ForeignKey(users.c.user_id)), Column('email_address', sa.String(40)), ) orders = Table('orders', metadata, Column('order_id', sa.Integer, sa.Sequence('order_id_seq', optional=True), primary_key = True), Column('user_id', sa.Integer, sa.ForeignKey(users.c.user_id)), Column('description', sa.String(50)), Column('isopen', sa.Integer), ) orderitems = Table('items', metadata, Column('item_id', sa.INT, sa.Sequence('items_id_seq', optional=True), primary_key = True), Column('order_id', sa.INT, sa.ForeignKey("orders")), Column('item_name', sa.VARCHAR(50)), )
def test_iteration(self): metadata = MetaData() table1 = Table('table1', metadata, Column('col1', sa.Integer, primary_key=True), schema='someschema') table2 = Table('table2', metadata, Column('col1', sa.Integer, primary_key=True), Column('col2', sa.Integer, sa.ForeignKey('someschema.table1.col1')), schema='someschema') # ensure this doesnt crash print [t for t in metadata.sorted_tables] buf = StringIO.StringIO() def foo(s, p=None): buf.write(s) gen = sa.create_engine(testing.db.name + "://", strategy="mock", executor=foo) gen = gen.dialect.schemagenerator(gen.dialect, gen) gen.traverse(table1) gen.traverse(table2) buf = buf.getvalue() print buf if testing.db.dialect.preparer(testing.db.dialect).omit_schema: assert buf.index("CREATE TABLE table1") > -1 assert buf.index("CREATE TABLE table2") > -1 else: assert buf.index("CREATE TABLE someschema.table1") > -1 assert buf.index("CREATE TABLE someschema.table2") > -1
def test_compileonattr_rel_backref_a(self): m = MetaData() t1 = Table('t1', m, Column('id', Integer, primary_key=True), Column('x', Integer)) t2 = Table('t2', m, Column('id', Integer, primary_key=True), Column('t1_id', Integer, ForeignKey('t1.id'))) class Base(object): def __init__(self, *args, **kwargs): pass for base in object, Base: class A(base): pass class B(base): pass mapper(A, t1, properties=dict(bs=relation(B, backref='a'))) mapper(B, t2) b = B() assert b.a is None a = A() b.a = a session = create_session() session.add(b) assert a in session, "base is %s" % base
def test_override_keys(self): """test that columns can be overridden with a 'key', and that ForeignKey targeting during reflection still works.""" meta = MetaData(testing.db) a1 = Table('a', meta, Column('x', sa.Integer, primary_key=True), Column('z', sa.Integer), test_needs_fk=True ) b1 = Table('b', meta, Column('y', sa.Integer, sa.ForeignKey('a.x')), test_needs_fk=True ) meta.create_all() try: m2 = MetaData(testing.db) a2 = Table('a', m2, Column('x', sa.Integer, primary_key=True, key='x1'), autoload=True) b2 = Table('b', m2, autoload=True) assert a2.join(b2).onclause.compare(a2.c.x1==b2.c.y) assert b2.c.y.references(a2.c.x1) finally: meta.drop_all()
def setUpAll(self): global cartitems, sometable, metadata metadata = MetaData(testing.db) cartitems = Table( "cartitems", metadata, Column("cart_id", Integer, Sequence('cart_id_seq'), primary_key=True), Column("description", String(40)), Column("createdate", sa.DateTime())) sometable = Table( 'Manager', metadata, Column( 'obj_id', Integer, Sequence('obj_id_seq'), ), Column('name', String(128)), Column('id', Integer, Sequence('Manager_id_seq', optional=True), primary_key=True), ) metadata.create_all()
def test_passive_override(self): """ Primarily for postgres, tests that when we get a primary key column back from reflecting a table which has a default value on it, we pre-execute that DefaultClause upon insert, even though DefaultClause says "let the database execute this", because in postgres we must have all the primary key values in memory before insert; otherwise we can't locate the just inserted row. """ # TODO: move this to dialect/postgres try: meta = MetaData(testing.db) testing.db.execute(""" CREATE TABLE speedy_users ( speedy_user_id SERIAL PRIMARY KEY, user_name VARCHAR NOT NULL, user_password VARCHAR NOT NULL ); """, None) t = Table("speedy_users", meta, autoload=True) t.insert().execute(user_name='user', user_password='******') l = t.select().execute().fetchall() eq_(l, [(1, 'user', 'lala')]) finally: testing.db.execute("drop table speedy_users", None)
def test_compileonattr_rel_backref_b(self): m = MetaData() t1 = Table('t1', m, Column('id', Integer, primary_key=True), Column('x', Integer)) t2 = Table('t2', m, Column('id', Integer, primary_key=True), Column('t1_id', Integer, ForeignKey('t1.id'))) class Base(object): def __init__(self): pass class Base_AKW(object): def __init__(self, *args, **kwargs): pass for base in object, Base, Base_AKW: class A(base): pass class B(base): pass mapper(A, t1) mapper(B, t2, properties=dict(a=relation(A, backref='bs'))) a = A() b = B() b.a = a session = create_session() session.add(a) assert b in session, 'base: %s' % base
def define_tables(self, metadata): users = Table('users', metadata, Column('username', String(50), primary_key=True), Column('fullname', String(100))) addresses = Table( 'addresses', metadata, Column('email', String(50), primary_key=True), Column('username', String(50), ForeignKey('users.username', onupdate="cascade"))) items = Table('items', metadata, Column('itemname', String(50), primary_key=True), Column('description', String(100))) users_to_items = Table( 'users_to_items', metadata, Column('username', String(50), ForeignKey('users.username', onupdate='cascade'), primary_key=True), Column('itemname', String(50), ForeignKey('items.itemname', onupdate='cascade'), primary_key=True), )
def define_tables(self, metadata): Table('table1', metadata, Column('id', Integer, primary_key=True), Column('data', String(30))) Table('table2', metadata, Column('id', Integer, primary_key=True), Column('someid', None, ForeignKey('table1.id')))
def test_index_reflection(self): m1 = MetaData(testing.db) t1 = Table('party', m1, Column('id', sa.Integer, nullable=False), Column('name', sa.String(20), index=True) ) i1 = sa.Index('idx1', t1.c.id, unique=True) i2 = sa.Index('idx2', t1.c.name, t1.c.id, unique=False) m1.create_all() try: m2 = MetaData(testing.db) t2 = Table('party', m2, autoload=True) print len(t2.indexes), t2.indexes assert len(t2.indexes) == 3 # Make sure indexes are in the order we expect them in tmp = [(idx.name, idx) for idx in t2.indexes] tmp.sort() r1, r2, r3 = [idx[1] for idx in tmp] assert r1.name == 'idx1' assert r2.name == 'idx2' assert r1.unique == True assert r2.unique == False assert r3.unique == False assert [t2.c.id] == r1.columns assert [t2.c.name, t2.c.id] == r2.columns assert [t2.c.name] == r3.columns finally: m1.drop_all()
def test_composite_fk(self): """test reflection of composite foreign keys""" meta = MetaData(testing.db) multi = Table( 'multi', meta, Column('multi_id', sa.Integer, primary_key=True), Column('multi_rev', sa.Integer, primary_key=True), Column('multi_hoho', sa.Integer, primary_key=True), Column('name', sa.String(50), nullable=False), Column('val', sa.String(100)), test_needs_fk=True, ) multi2 = Table('multi2', meta, Column('id', sa.Integer, primary_key=True), Column('foo', sa.Integer), Column('bar', sa.Integer), Column('lala', sa.Integer), Column('data', sa.String(50)), sa.ForeignKeyConstraint(['foo', 'bar', 'lala'], ['multi.multi_id', 'multi.multi_rev', 'multi.multi_hoho']), test_needs_fk=True, ) meta.create_all() try: meta2 = MetaData() table = Table('multi', meta2, autoload=True, autoload_with=testing.db) table2 = Table('multi2', meta2, autoload=True, autoload_with=testing.db) self.assert_tables_equal(multi, table) self.assert_tables_equal(multi2, table2) j = sa.join(table, table2) self.assert_(sa.and_(table.c.multi_id==table2.c.foo, table.c.multi_rev==table2.c.bar, table.c.multi_hoho==table2.c.lala).compare(j.onclause)) finally: meta.drop_all()
def define_tables(self, metadata): Table('users', metadata, Column('id', Integer, primary_key=True), Column('name', String(30)), Column('type', String(30))) Table('email_users', metadata, Column('id', Integer, ForeignKey('users.id'), primary_key=True), Column('email_address', String(30)))
def define_tables(self, metadata): Table('users', metadata, Column('id', Integer, primary_key=True), Column('name', String(40)), Column('fullname', String(100)), Column('password', String(15))) Table('addresses', metadata, Column('id', Integer, primary_key=True), Column('email_address', String(100), nullable=False), Column('user_id', Integer, ForeignKey('users.id')))
def define_tables(self, metadata): Table('table_a', metadata, Column('id', Integer, primary_key=True), Column('name', String(30))) Table('table_b', metadata, Column('id', Integer, primary_key=True), Column('name', String(30)), Column('a_id', Integer, ForeignKey('table_a.id')))
def test_with_inheritance(self): metadata = MetaData(testing.db) table1 = Table("mytable", metadata, Column('col1', Integer, primary_key=True), Column('col2', String(30))) table2 = Table( "mytable2", metadata, Column('col1', Integer, ForeignKey('mytable.col1'), primary_key=True), Column('col3', String(30)), ) @profile_memory def go(): class A(_base.ComparableEntity): pass class B(A): pass mapper(A, table1, polymorphic_on=table1.c.col2, polymorphic_identity='a') mapper(B, table2, inherits=A, polymorphic_identity='b') sess = create_session() a1 = A() a2 = A() b1 = B(col3='b1') b2 = B(col3='b2') for x in [a1, a2, b1, b2]: sess.add(x) sess.flush() sess.clear() alist = sess.query(A).order_by(A.col1).all() self.assertEquals([A(), A(), B(col3='b1'), B(col3='b2')], alist) for a in alist: sess.delete(a) sess.flush() # dont need to clear_mappers() del B del A metadata.create_all() try: go() finally: metadata.drop_all() assert_no_mappers()
def test_autoincrement_single_col(self): single = Table('single', self.metadata, Column('id', Integer, primary_key=True)) single.create() r = single.insert().execute() id_ = r.last_inserted_ids()[0] assert id_ is not None eq_(1, sa.select([func.count(sa.text('*'))], from_obj=single).scalar())
def test_session(self): metadata = MetaData(testing.db) table1 = Table("mytable", metadata, Column('col1', Integer, primary_key=True), Column('col2', String(30))) table2 = Table("mytable2", metadata, Column('col1', Integer, primary_key=True), Column('col2', String(30)), Column('col3', Integer, ForeignKey("mytable.col1"))) metadata.create_all() m1 = mapper(A, table1, properties={ "bs": relation(B, cascade="all, delete", order_by=table2.c.col1) }, order_by=table1.c.col1) m2 = mapper(B, table2) m3 = mapper(A, table1, non_primary=True) @profile_memory def go(): sess = create_session() a1 = A(col2="a1") a2 = A(col2="a2") a3 = A(col2="a3") a1.bs.append(B(col2="b1")) a1.bs.append(B(col2="b2")) a3.bs.append(B(col2="b3")) for x in [a1, a2, a3]: sess.add(x) sess.flush() sess.clear() alist = sess.query(A).all() self.assertEquals([ A(col2="a1", bs=[B(col2="b1"), B(col2="b2")]), A(col2="a2", bs=[]), A(col2="a3", bs=[B(col2="b3")]) ], alist) for a in alist: sess.delete(a) sess.flush() go() metadata.drop_all() del m1, m2, m3 assert_no_mappers()
def setUpAll(self): global counters, metadata metadata = MetaData() counters = Table('forupdate_counters', metadata, Column('counter_id', INT, primary_key = True), Column('counter_value', INT), test_needs_acid=True, ) counters.create(testing.db)
def setUpAll(self): global users, metadata metadata = MetaData() users = Table('query_users', metadata, Column('user_id', INT, primary_key = True), Column('user_name', VARCHAR(20)), test_needs_acid=True, ) users.create(testing.db)
def define_tables(self, metadata): Table('items', metadata, Column('item_id', Integer, primary_key=True), Column('name', String(40))) Table('item_keywords', metadata, Column('item_id', Integer, ForeignKey('items.item_id')), Column('keyword_id', Integer, ForeignKey('keywords.keyword_id')), Column('data', String(40))) Table('keywords', metadata, Column('keyword_id', Integer, primary_key=True), Column('name', String(40)))
def setUpAll(self): global users, metadata, tlengine tlengine = create_engine(testing.db.url, strategy='threadlocal') metadata = MetaData() users = Table('query_users', metadata, Column('user_id', INT, Sequence('query_users_id_seq', optional=True), primary_key=True), Column('user_name', VARCHAR(20)), test_needs_acid=True, ) users.create(tlengine)
def define_tables(self, metadata): t2 = Table('t2', metadata, Column('nextid', Integer)) Table( 't1', metadata, Column('id', Integer, primary_key=True, default=sa.select([func.max(t2.c.nextid)]).as_scalar()), Column('data', String(30)))
def define_tables(self, metadata): Table('users', metadata, Column('id', Integer, primary_key=True), Column('username', String(50), unique=True), Column('fullname', String(100))) Table( 'addresses', metadata, Column('id', Integer, primary_key=True), Column('email', String(50)), Column('username', String(50), ForeignKey('users.username', onupdate="cascade")))
def test_pickle_via_reflect(): # this is the most common use case, pickling the results of a # database reflection meta2 = MetaData(bind=testing.db) t1 = Table('mytable', meta2, autoload=True) t2 = Table('othertable', meta2, autoload=True) meta3 = pickle.loads(pickle.dumps(meta2)) assert meta3.bind is None assert meta3.tables['mytable'] is not t1 return (meta3.tables['mytable'], meta3.tables['othertable'])
def define_tables(self, metadata): Table('users_table', metadata, Column('id', Integer, primary_key=True), Column('name', String(64))) Table('addresses_table', metadata, Column('id', Integer, primary_key=True), Column('user_id', Integer, ForeignKey('users_table.id')), Column('email_address', String(128)), Column('purpose', String(16)), Column('bounces', Integer, default=0))
def test_metadata_connect(self): metadata = MetaData() t1 = Table('table1', metadata, Column('col1', Integer, primary_key=True), Column('col2', String(20))) metadata.bind = testing.db metadata.create_all() try: assert t1.count().scalar() == 0 finally: metadata.drop_all()
def setUpAll(self): global users, metadata metadata = MetaData() users = Table( 'query_users', metadata, Column('user_id', INT, primary_key=True), Column('user_name', VARCHAR(20)), test_needs_acid=True, ) users.create(testing.db)
def setUpAll(self): global counters, metadata metadata = MetaData() counters = Table( 'forupdate_counters', metadata, Column('counter_id', INT, primary_key=True), Column('counter_value', INT), test_needs_acid=True, ) counters.create(testing.db)
def setUp(self): global meta, table, engine engine = engines.reconnecting_engine() meta = MetaData(engine) table = Table('sometable', meta, Column('id', Integer, primary_key=True), Column('name', String(50))) meta.create_all() table.insert().execute( [{'id':i, 'name':'row %d' % i} for i in range(1, 100)] )
def define_tables(self, metadata): Table('departments', metadata, Column('department_id', Integer, primary_key=True), Column('name', String(50))) Table( 'employees', metadata, Column('person_id', Integer, primary_key=True), Column('name', String(50)), Column('department_id', Integer, ForeignKey('departments.department_id')))
def define_tables(self, metadata): Table('left', metadata, Column('id', Integer, ForeignKey('middle.id'), primary_key=True), Column('data', String(50), primary_key=True)) Table('middle', metadata, Column('id', Integer, primary_key=True), Column('data', String(50))) Table('right', metadata, Column('id', Integer, ForeignKey('middle.id'), primary_key=True), Column('data', String(50), primary_key=True))
def define_tables(self, metadata): Table('infos', metadata, Column('pk', Integer, primary_key=True), Column('info', String(128))) Table('data', metadata, Column('data_pk', Integer, primary_key=True), Column('info_pk', Integer, ForeignKey('infos.pk')), Column('timeval', Integer), Column('data_val', String(128))) Table('rels', metadata, Column('rel_pk', Integer, primary_key=True), Column('info_pk', Integer, ForeignKey('infos.pk')), Column('start', Integer), Column('finish', Integer))
def test_append_constraint_unique(self): meta = MetaData() users = Table('users', meta, Column('id', sa.Integer)) addresses = Table('addresses', meta, Column('id', sa.Integer), Column('user_id', sa.Integer)) fk = sa.ForeignKeyConstraint(['user_id'],[users.c.id]) addresses.append_constraint(fk) addresses.append_constraint(fk) assert len(addresses.c.user_id.foreign_keys) == 1 assert addresses.constraints == set([addresses.primary_key, fk])
def test_override_existing_fk(self): """test that you can override columns and specify new foreign keys to other reflected tables, on columns which *do* already have that foreign key, and that the FK is not duped. """ meta = MetaData(testing.db) users = Table('users', meta, Column('id', sa.Integer, primary_key=True), Column('name', sa.String(30)), test_needs_fk=True) addresses = Table('addresses', meta, Column('id', sa.Integer, primary_key=True), Column('user_id', sa.Integer, sa.ForeignKey('users.id')), test_needs_fk=True) meta.create_all() try: meta2 = MetaData(testing.db) a2 = Table('addresses', meta2, Column('user_id', sa.Integer, sa.ForeignKey('users.id')), autoload=True) u2 = Table('users', meta2, autoload=True) s = sa.select([a2]) assert s.c.user_id assert len(a2.foreign_keys) == 1 assert len(a2.c.user_id.foreign_keys) == 1 assert len(a2.constraints) == 2 assert [c.parent for c in a2.foreign_keys] == [a2.c.user_id] assert [c.parent for c in a2.c.user_id.foreign_keys] == [a2.c.user_id] assert list(a2.c.user_id.foreign_keys)[0].parent is a2.c.user_id assert u2.join(a2).onclause == u2.c.id==a2.c.user_id meta2 = MetaData(testing.db) u2 = Table('users', meta2, Column('id', sa.Integer, primary_key=True), autoload=True) a2 = Table('addresses', meta2, Column('id', sa.Integer, primary_key=True), Column('user_id', sa.Integer, sa.ForeignKey('users.id')), autoload=True) s = sa.select([a2]) assert s.c.user_id assert len(a2.foreign_keys) == 1 assert len(a2.c.user_id.foreign_keys) == 1 assert len(a2.constraints) == 2 assert [c.parent for c in a2.foreign_keys] == [a2.c.user_id] assert [c.parent for c in a2.c.user_id.foreign_keys] == [a2.c.user_id] assert list(a2.c.user_id.foreign_keys)[0].parent is a2.c.user_id assert u2.join(a2).onclause == u2.c.id==a2.c.user_id finally: meta.drop_all()
def test_empty_insert(self): metadata = MetaData(testing.db) t1 = Table('t1', metadata, Column('is_true', Boolean, server_default=('1'))) metadata.create_all() try: result = t1.insert().execute() self.assertEquals(1, select([func.count(text('*'))], from_obj=t1).scalar()) self.assertEquals(True, t1.select().scalar()) finally: metadata.drop_all()
def test_non_autoincrement(self): # sqlite INT primary keys can be non-unique! (only for ints) nonai = Table("nonaitest", self.metadata, Column('id', Integer, autoincrement=False, primary_key=True), Column('data', String(20))) nonai.create() try: # postgres + mysql strict will fail on first row, # mysql in legacy mode fails on second row nonai.insert().execute(data='row 1') nonai.insert().execute(data='row 2') assert False except sa.exc.SQLError, e: assert True
def test_with_explicit_autoloaded(self): meta = MetaData(testing.db) t1 = Table('t1', meta, Column('id', String(50), primary_key=True), Column('data', String(50))) meta.create_all() try: class MyObj(Base): __table__ = Table('t1', Base.metadata, autoload=True) sess = create_session() m = MyObj(id="someid", data="somedata") sess.add(m) sess.flush() eq_(t1.select().execute().fetchall(), [('someid', 'somedata')]) finally: meta.drop_all()
def test_rollback_deadlock(self): """test that returning connections to the pool clears any object locks.""" conn1 = testing.db.connect() conn2 = testing.db.connect() users = Table('deadlock_users', metadata, Column('user_id', INT, primary_key = True), Column('user_name', VARCHAR(20)), test_needs_acid=True, ) users.create(conn1) conn1.execute("select * from deadlock_users") conn1.close() # without auto-rollback in the connection pool's return() logic, this # deadlocks in Postgres, because conn1 is returned to the pool but # still has a lock on "deadlock_users". # comment out the rollback in pool/ConnectionFairy._close() to see ! users.drop(conn2) conn2.close()
def test_unknown_types(self): meta = MetaData(testing.db) t = Table("test", meta, Column('foo', sa.DateTime)) ischema_names = testing.db.dialect.ischema_names t.create() testing.db.dialect.ischema_names = {} try: m2 = MetaData(testing.db) self.assertRaises(tsa.exc.SAWarning, Table, "test", m2, autoload=True) @testing.emits_warning('Did not recognize type') def warns(): m3 = MetaData(testing.db) t3 = Table("test", m3, autoload=True) assert t3.c.foo.type.__class__ == sa.types.NullType finally: testing.db.dialect.ischema_names = ischema_names t.drop()
def test_override_pkfk(self): """test that you can override columns which contain foreign keys to other reflected tables, where the foreign key column is also a primary key column""" meta = MetaData(testing.db) users = Table('users', meta, Column('id', sa.Integer, primary_key=True), Column('name', sa.String(30))) addresses = Table('addresses', meta, Column('id', sa.Integer, primary_key=True), Column('street', sa.String(30))) meta.create_all() try: meta2 = MetaData(testing.db) a2 = Table('addresses', meta2, Column('id', sa.Integer, sa.ForeignKey('users.id'), primary_key=True), autoload=True) u2 = Table('users', meta2, autoload=True) assert list(a2.primary_key) == [a2.c.id] assert list(u2.primary_key) == [u2.c.id] assert u2.join(a2).onclause == u2.c.id==a2.c.id meta3 = MetaData(testing.db) u3 = Table('users', meta3, autoload=True) a3 = Table('addresses', meta3, Column('id', sa.Integer, sa.ForeignKey('users.id'), primary_key=True), autoload=True) assert list(a3.primary_key) == [a3.c.id] assert list(u3.primary_key) == [u3.c.id] assert u3.join(a3).onclause == u3.c.id==a3.c.id finally: meta.drop_all()
def test_tometadata_with_schema(self): meta = MetaData() table = Table('mytable', meta, Column('myid', Integer, primary_key=True), Column('name', String(40), nullable=True), Column('description', String(30), CheckConstraint("description='hi'")), UniqueConstraint('name'), test_needs_fk=True, ) table2 = Table('othertable', meta, Column('id', Integer, primary_key=True), Column('myid', Integer, ForeignKey('mytable.myid')), test_needs_fk=True, ) meta2 = MetaData() table_c = table.tometadata(meta2, schema='someschema') table2_c = table2.tometadata(meta2, schema='someschema') eq_(str(table_c.join(table2_c).onclause), str(table_c.c.myid == table2_c.c.myid)) eq_(str(table_c.join(table2_c).onclause), "someschema.mytable.myid = someschema.othertable.myid")
def test_autoincrement_fk(self): nodes = Table('nodes', self.metadata, Column('id', Integer, primary_key=True), Column('parent_id', Integer, ForeignKey('nodes.id')), Column('data', String(30))) nodes.create() r = nodes.insert().execute(data='foo') id_ = r.last_inserted_ids()[0] nodes.insert().execute(data='bar', parent_id=id_)
def test_override_nonexistent_fk(self): """test that you can override columns and create new foreign keys to other reflected tables which have no foreign keys. this is common with MySQL MyISAM tables.""" meta = MetaData(testing.db) users = Table('users', meta, Column('id', sa.Integer, primary_key=True), Column('name', sa.String(30))) addresses = Table('addresses', meta, Column('id', sa.Integer, primary_key=True), Column('street', sa.String(30)), Column('user_id', sa.Integer)) meta.create_all() try: meta2 = MetaData(testing.db) a2 = Table('addresses', meta2, Column('user_id', sa.Integer, sa.ForeignKey('users.id')), autoload=True) u2 = Table('users', meta2, autoload=True) assert len(a2.c.user_id.foreign_keys) == 1 assert len(a2.foreign_keys) == 1 assert [c.parent for c in a2.foreign_keys] == [a2.c.user_id] assert [c.parent for c in a2.c.user_id.foreign_keys] == [a2.c.user_id] assert list(a2.c.user_id.foreign_keys)[0].parent is a2.c.user_id assert u2.join(a2).onclause == u2.c.id==a2.c.user_id meta3 = MetaData(testing.db) u3 = Table('users', meta3, autoload=True) a3 = Table('addresses', meta3, Column('user_id', sa.Integer, sa.ForeignKey('users.id')), autoload=True) assert u3.join(a3).onclause == u3.c.id==a3.c.user_id meta4 = MetaData(testing.db) u4 = Table('users', meta4, Column('id', sa.Integer, key='u_id', primary_key=True), autoload=True) a4 = Table('addresses', meta4, Column('id', sa.Integer, key='street', primary_key=True), Column('street', sa.String(30), key='user_id'), Column('user_id', sa.Integer, sa.ForeignKey('users.u_id'), key='id'), autoload=True) assert u4.join(a4).onclause.compare(u4.c.u_id==a4.c.id) assert list(u4.primary_key) == [u4.c.u_id] assert len(u4.columns) == 2 assert len(u4.constraints) == 1 assert len(a4.columns) == 3 assert len(a4.constraints) == 2 finally: meta.drop_all()
def test_prefixes(self): table1 = Table("temporary_table_1", self.metadata, Column("col1", Integer), prefixes = ["TEMPORARY"]) table1.create() assert [str(x) for x in self.engine.mock if 'CREATE TEMPORARY TABLE' in str(x)] del self.engine.mock[:] table2 = Table("temporary_table_2", self.metadata, Column("col1", Integer), prefixes = ["VIRTUAL"]) table2.create() assert [str(x) for x in self.engine.mock if 'CREATE VIRTUAL TABLE' in str(x)]
def test_create_drop_explicit(self): metadata = MetaData() table = Table('test_table', metadata, Column('foo', Integer)) for bind in ( testing.db, testing.db.connect() ): for args in [ ([], {'bind':bind}), ([bind], {}) ]: metadata.create_all(*args[0], **args[1]) assert table.exists(*args[0], **args[1]) metadata.drop_all(*args[0], **args[1]) table.create(*args[0], **args[1]) table.drop(*args[0], **args[1]) assert not table.exists(*args[0], **args[1])
def test_create_drop_constructor_bound(self): for bind in ( testing.db, testing.db.connect() ): try: for args in ( ([bind], {}), ([], {'bind':bind}), ): metadata = MetaData(*args[0], **args[1]) table = Table('test_table', metadata, Column('foo', Integer)) assert metadata.bind is table.bind is bind metadata.create_all() assert table.exists() metadata.drop_all() table.create() table.drop() assert not table.exists() finally: if isinstance(bind, engine.Connection): bind.close()
def test_basic_reflection(self): meta = MetaData(testing.db) users = Table('engine_users', meta, Column('user_id', sa.INT, primary_key=True), Column('user_name', sa.VARCHAR(20), nullable=False), Column('test1', sa.CHAR(5), nullable=False), Column('test2', sa.Float(5), nullable=False), Column('test3', sa.Text), Column('test4', sa.Numeric, nullable = False), Column('test5', sa.DateTime), Column('parent_user_id', sa.Integer, sa.ForeignKey('engine_users.user_id')), Column('test6', sa.DateTime, nullable=False), Column('test7', sa.Text), Column('test8', sa.Binary), Column('test_passivedefault2', sa.Integer, server_default='5'), Column('test9', sa.Binary(100)), Column('test_numeric', sa.Numeric()), test_needs_fk=True, ) addresses = Table('engine_email_addresses', meta, Column('address_id', sa.Integer, primary_key = True), Column('remote_user_id', sa.Integer, sa.ForeignKey(users.c.user_id)), Column('email_address', sa.String(20)), test_needs_fk=True, ) meta.create_all() try: meta2 = MetaData() reflected_users = Table('engine_users', meta2, autoload=True, autoload_with=testing.db) reflected_addresses = Table('engine_email_addresses', meta2, autoload=True, autoload_with=testing.db) self.assert_tables_equal(users, reflected_users) self.assert_tables_equal(addresses, reflected_addresses) finally: addresses.drop() users.drop()
def test_implicit_execution(self): metadata = MetaData() table = Table('test_table', metadata, Column('foo', Integer), test_needs_acid=True, ) conn = testing.db.connect() metadata.create_all(bind=conn) try: trans = conn.begin() metadata.bind = conn t = table.insert() assert t.bind is conn table.insert().execute(foo=5) table.insert().execute(foo=6) table.insert().execute(foo=7) trans.rollback() metadata.bind = None assert conn.execute("select count(1) from test_table").scalar() == 0 finally: metadata.drop_all(bind=conn)
def test_basic_override(self): meta = MetaData(testing.db) table = Table( 'override_test', meta, Column('col1', sa.Integer, primary_key=True), Column('col2', sa.String(20)), Column('col3', sa.Numeric) ) table.create() meta2 = MetaData(testing.db) try: table = Table( 'override_test', meta2, Column('col2', sa.Unicode()), Column('col4', sa.String(30)), autoload=True) self.assert_(isinstance(table.c.col1.type, sa.Integer)) self.assert_(isinstance(table.c.col2.type, sa.Unicode)) self.assert_(isinstance(table.c.col4.type, sa.String)) finally: table.drop()
class DDLEventTest(TestBase): class Canary(object): def __init__(self, schema_item, bind): self.state = None self.schema_item = schema_item self.bind = bind def before_create(self, action, schema_item, bind): assert self.state is None assert schema_item is self.schema_item assert bind is self.bind self.state = action def after_create(self, action, schema_item, bind): assert self.state in ('before-create', 'skipped') assert schema_item is self.schema_item assert bind is self.bind self.state = action def before_drop(self, action, schema_item, bind): assert self.state is None assert schema_item is self.schema_item assert bind is self.bind self.state = action def after_drop(self, action, schema_item, bind): assert self.state in ('before-drop', 'skipped') assert schema_item is self.schema_item assert bind is self.bind self.state = action def setUp(self): self.bind = engines.mock_engine() self.metadata = MetaData() self.table = Table('t', self.metadata, Column('id', Integer)) def test_table_create_before(self): table, bind = self.table, self.bind canary = self.Canary(table, bind) table.ddl_listeners['before-create'].append(canary.before_create) table.create(bind) assert canary.state == 'before-create' table.drop(bind) assert canary.state == 'before-create' def test_table_create_after(self): table, bind = self.table, self.bind canary = self.Canary(table, bind) table.ddl_listeners['after-create'].append(canary.after_create) canary.state = 'skipped' table.create(bind) assert canary.state == 'after-create' table.drop(bind) assert canary.state == 'after-create' def test_table_create_both(self): table, bind = self.table, self.bind canary = self.Canary(table, bind) table.ddl_listeners['before-create'].append(canary.before_create) table.ddl_listeners['after-create'].append(canary.after_create) table.create(bind) assert canary.state == 'after-create' table.drop(bind) assert canary.state == 'after-create' def test_table_drop_before(self): table, bind = self.table, self.bind canary = self.Canary(table, bind) table.ddl_listeners['before-drop'].append(canary.before_drop) table.create(bind) assert canary.state is None table.drop(bind) assert canary.state == 'before-drop' def test_table_drop_after(self): table, bind = self.table, self.bind canary = self.Canary(table, bind) table.ddl_listeners['after-drop'].append(canary.after_drop) table.create(bind) assert canary.state is None canary.state = 'skipped' table.drop(bind) assert canary.state == 'after-drop' def test_table_drop_both(self): table, bind = self.table, self.bind canary = self.Canary(table, bind) table.ddl_listeners['before-drop'].append(canary.before_drop) table.ddl_listeners['after-drop'].append(canary.after_drop) table.create(bind) assert canary.state is None table.drop(bind) assert canary.state == 'after-drop' def test_table_all(self): table, bind = self.table, self.bind canary = self.Canary(table, bind) table.ddl_listeners['before-create'].append(canary.before_create) table.ddl_listeners['after-create'].append(canary.after_create) table.ddl_listeners['before-drop'].append(canary.before_drop) table.ddl_listeners['after-drop'].append(canary.after_drop) assert canary.state is None table.create(bind) assert canary.state == 'after-create' canary.state = None table.drop(bind) assert canary.state == 'after-drop' def test_table_create_before(self): metadata, bind = self.metadata, self.bind canary = self.Canary(metadata, bind) metadata.ddl_listeners['before-create'].append(canary.before_create) metadata.create_all(bind) assert canary.state == 'before-create' metadata.drop_all(bind) assert canary.state == 'before-create' def test_metadata_create_after(self): metadata, bind = self.metadata, self.bind canary = self.Canary(metadata, bind) metadata.ddl_listeners['after-create'].append(canary.after_create) canary.state = 'skipped' metadata.create_all(bind) assert canary.state == 'after-create' metadata.drop_all(bind) assert canary.state == 'after-create' def test_metadata_create_both(self): metadata, bind = self.metadata, self.bind canary = self.Canary(metadata, bind) metadata.ddl_listeners['before-create'].append(canary.before_create) metadata.ddl_listeners['after-create'].append(canary.after_create) metadata.create_all(bind) assert canary.state == 'after-create' metadata.drop_all(bind) assert canary.state == 'after-create' @testing.future def test_metadata_table_isolation(self): metadata, table, bind = self.metadata, self.table, self.bind table_canary = self.Canary(table, bind) table.ddl_listeners['before-create'].append(table_canary.before_create) metadata_canary = self.Canary(metadata, bind) metadata.ddl_listeners['before-create'].append(metadata_canary.before_create) # currently, table.create() routes through the same execution # path that metadata.create_all() does self.table.create(self.bind) assert metadata_canary.state == None def test_append_listener(self): metadata, table, bind = self.metadata, self.table, self.bind fn = lambda *a: None table.append_ddl_listener('before-create', fn) self.assertRaises(LookupError, table.append_ddl_listener, 'blah', fn) metadata.append_ddl_listener('before-create', fn) self.assertRaises(LookupError, metadata.append_ddl_listener, 'blah', fn)
def setUp(self): self.bind = engines.mock_engine() self.metadata = MetaData() self.table = Table('t', self.metadata, Column('id', Integer))
def test_proxy(self): stmts = [] cursor_stmts = [] class MyProxy(ConnectionProxy): def execute(self, conn, execute, clauseelement, *multiparams, **params): stmts.append( (str(clauseelement), params,multiparams) ) return execute(clauseelement, *multiparams, **params) def cursor_execute(self, execute, cursor, statement, parameters, context, executemany): cursor_stmts.append( (statement, parameters, None) ) return execute(cursor, statement, parameters, context) def assert_stmts(expected, received): for stmt, params, posn in expected: if not received: assert False while received: teststmt, testparams, testmultiparams = received.pop(0) teststmt = re.compile(r'[\n\t ]+', re.M).sub(' ', teststmt).strip() if teststmt.startswith(stmt) and (testparams==params or testparams==posn): break for engine in ( engines.testing_engine(options=dict(proxy=MyProxy())), engines.testing_engine(options=dict(proxy=MyProxy(), strategy='threadlocal')) ): m = MetaData(engine) t1 = Table('t1', m, Column('c1', Integer, primary_key=True), Column('c2', String(50), default=func.lower('Foo'), primary_key=True)) m.create_all() try: t1.insert().execute(c1=5, c2='some data') t1.insert().execute(c1=6) assert engine.execute("select * from t1").fetchall() == [(5, 'some data'), (6, 'foo')] finally: m.drop_all() engine.dispose() compiled = [ ("CREATE TABLE t1", {}, None), ("INSERT INTO t1 (c1, c2)", {'c2': 'some data', 'c1': 5}, None), ("INSERT INTO t1 (c1, c2)", {'c1': 6}, None), ("select * from t1", {}, None), ("DROP TABLE t1", {}, None) ] if engine.dialect.preexecute_pk_sequences: cursor = [ ("CREATE TABLE t1", {}, None), ("INSERT INTO t1 (c1, c2)", {'c2': 'some data', 'c1': 5}, [5, 'some data']), ("SELECT lower", {'lower_2':'Foo'}, ['Foo']), ("INSERT INTO t1 (c1, c2)", {'c2': 'foo', 'c1': 6}, [6, 'foo']), ("select * from t1", {}, None), ("DROP TABLE t1", {}, None) ] else: cursor = [ ("CREATE TABLE t1", {}, ()), ("INSERT INTO t1 (c1, c2)", {'c2': 'some data', 'c1': 5}, [5, 'some data']), ("INSERT INTO t1 (c1, c2)", {'c1': 6, "lower_2":"Foo"}, [6, "Foo"]), # bind param name 'lower_2' might be incorrect ("select * from t1", {}, ()), ("DROP TABLE t1", {}, ()) ] assert_stmts(compiled, stmts) assert_stmts(cursor, cursor_stmts)
def setUpAll(self): global t, f, f2, ts, currenttime, metadata, default_generator db = testing.db metadata = MetaData(db) default_generator = {'x':50} def mydefault(): default_generator['x'] += 1 return default_generator['x'] def myupdate_with_ctx(ctx): conn = ctx.connection return conn.execute(sa.select([sa.text('13')])).scalar() def mydefault_using_connection(ctx): conn = ctx.connection try: return conn.execute(sa.select([sa.text('12')])).scalar() finally: # ensure a "close()" on this connection does nothing, # since its a "branched" connection conn.close() use_function_defaults = testing.against('postgres', 'mssql', 'maxdb') is_oracle = testing.against('oracle') # select "count(1)" returns different results on different DBs also # correct for "current_date" compatible as column default, value # differences currenttime = func.current_date(type_=sa.Date, bind=db) if is_oracle: ts = db.scalar(sa.select([func.trunc(func.sysdate(), sa.literal_column("'DAY'"), type_=sa.Date).label('today')])) assert isinstance(ts, datetime.date) and not isinstance(ts, datetime.datetime) f = sa.select([func.length('abcdef')], bind=db).scalar() f2 = sa.select([func.length('abcdefghijk')], bind=db).scalar() # TODO: engine propigation across nested functions not working currenttime = func.trunc(currenttime, sa.literal_column("'DAY'"), bind=db, type_=sa.Date) def1 = currenttime def2 = func.trunc(sa.text("sysdate"), sa.literal_column("'DAY'"), type_=sa.Date) deftype = sa.Date elif use_function_defaults: f = sa.select([func.length('abcdef')], bind=db).scalar() f2 = sa.select([func.length('abcdefghijk')], bind=db).scalar() def1 = currenttime deftype = sa.Date if testing.against('maxdb'): def2 = sa.text("curdate") elif testing.against('mssql'): def2 = sa.text("getdate()") else: def2 = sa.text("current_date") ts = db.func.current_date().scalar() else: f = len('abcdef') f2 = len('abcdefghijk') def1 = def2 = "3" ts = 3 deftype = Integer t = Table('default_test1', metadata, # python function Column('col1', Integer, primary_key=True, default=mydefault), # python literal Column('col2', String(20), default="imthedefault", onupdate="im the update"), # preexecute expression Column('col3', Integer, default=func.length('abcdef'), onupdate=func.length('abcdefghijk')), # SQL-side default from sql expression Column('col4', deftype, server_default=def1), # SQL-side default from literal expression Column('col5', deftype, server_default=def2), # preexecute + update timestamp Column('col6', sa.Date, default=currenttime, onupdate=currenttime), Column('boolcol1', sa.Boolean, default=True), Column('boolcol2', sa.Boolean, default=False), # python function which uses ExecutionContext Column('col7', Integer, default=mydefault_using_connection, onupdate=myupdate_with_ctx), # python builtin Column('col8', sa.Date, default=datetime.date.today, onupdate=datetime.date.today), # combo Column('col9', String(20), default='py', server_default='ddl')) t.create()