def test_unknown_types(self): meta = MetaData(testing.db) t = Table("test", meta, Column('foo', sa.DateTime)) import sys dialect_module = sys.modules[testing.db.dialect.__module__] # we're relying on the presence of "ischema_names" in the # dialect module, else we can't test this. we need to be able # to get the dialect to not be aware of some type so we temporarily # monkeypatch. not sure what a better way for this could be, # except for an established dialect hook or dialect-specific tests if not hasattr(dialect_module, 'ischema_names'): return ischema_names = dialect_module.ischema_names t.create() dialect_module.ischema_names = {} try: m2 = MetaData(testing.db) self.assertRaises(tsa.exc.SAWarning, Table, "test", m2, autoload=True) @testing.emits_warning('Did not recognize type') def warns(): m3 = MetaData(testing.db) t3 = Table("test", m3, autoload=True) assert t3.c.foo.type.__class__ == sa.types.NullType finally: dialect_module.ischema_names = ischema_names t.drop()
def test_create_drop_bound(self): for meta in (MetaData,ThreadLocalMetaData): for bind in ( testing.db, testing.db.connect() ): metadata = meta() table = Table('test_table', metadata, Column('foo', Integer)) metadata.bind = bind assert metadata.bind is table.bind is bind metadata.create_all() assert table.exists() metadata.drop_all() table.create() table.drop() assert not table.exists() metadata = meta() table = Table('test_table', metadata, Column('foo', Integer)) metadata.connect(bind) assert metadata.bind is table.bind is bind metadata.create_all() assert table.exists() metadata.drop_all() table.create() table.drop() assert not table.exists() if isinstance(bind, engine.Connection): bind.close()
def test_create_drop_bound(self): for meta in (MetaData, ThreadLocalMetaData): for bind in (testing.db, testing.db.connect()): metadata = meta() table = Table('test_table', metadata, Column('foo', Integer)) metadata.bind = bind assert metadata.bind is table.bind is bind metadata.create_all() assert table.exists() metadata.drop_all() table.create() table.drop() assert not table.exists() metadata = meta() table = Table('test_table', metadata, Column('foo', Integer)) metadata.connect(bind) assert metadata.bind is table.bind is bind metadata.create_all() assert table.exists() metadata.drop_all() table.create() table.drop() assert not table.exists() if isinstance(bind, engine.Connection): bind.close()
def setUpAll(self): global counters, metadata metadata = MetaData() counters = Table('forupdate_counters', metadata, Column('counter_id', INT, primary_key = True), Column('counter_value', INT), test_needs_acid=True, ) counters.create(testing.db)
def test_autoincrement_single_col(self): single = Table('single', self.metadata, Column('id', Integer, primary_key=True)) single.create() r = single.insert().execute() id_ = r.last_inserted_ids()[0] assert id_ is not None eq_(1, sa.select([func.count(sa.text('*'))], from_obj=single).scalar())
def setUpAll(self): global users, metadata metadata = MetaData() users = Table('query_users', metadata, Column('user_id', INT, primary_key = True), Column('user_name', VARCHAR(20)), test_needs_acid=True, ) users.create(testing.db)
def setUpAll(self): global users, metadata, tlengine tlengine = create_engine(testing.db.url, strategy='threadlocal') metadata = MetaData() users = Table('query_users', metadata, Column('user_id', INT, Sequence('query_users_id_seq', optional=True), primary_key=True), Column('user_name', VARCHAR(20)), test_needs_acid=True, ) users.create(tlengine)
def test_autoincrement_fk(self): nodes = Table('nodes', self.metadata, Column('id', Integer, primary_key=True), Column('parent_id', Integer, ForeignKey('nodes.id')), Column('data', String(30))) nodes.create() r = nodes.insert().execute(data='foo') id_ = r.last_inserted_ids()[0] nodes.insert().execute(data='bar', parent_id=id_)
def test_create_drop_explicit(self): metadata = MetaData() table = Table('test_table', metadata, Column('foo', Integer)) for bind in (testing.db, testing.db.connect()): for args in [([], {'bind': bind}), ([bind], {})]: metadata.create_all(*args[0], **args[1]) assert table.exists(*args[0], **args[1]) metadata.drop_all(*args[0], **args[1]) table.create(*args[0], **args[1]) table.drop(*args[0], **args[1]) assert not table.exists(*args[0], **args[1])
def setUpAll(self): global counters, metadata metadata = MetaData() counters = Table( 'forupdate_counters', metadata, Column('counter_id', INT, primary_key=True), Column('counter_value', INT), test_needs_acid=True, ) counters.create(testing.db)
def setUpAll(self): global users, metadata metadata = MetaData() users = Table( 'query_users', metadata, Column('user_id', INT, primary_key=True), Column('user_name', VARCHAR(20)), test_needs_acid=True, ) users.create(testing.db)
def test_prefixes(self): table1 = Table("temporary_table_1", self.metadata, Column("col1", Integer), prefixes = ["TEMPORARY"]) table1.create() assert [str(x) for x in self.engine.mock if 'CREATE TEMPORARY TABLE' in str(x)] del self.engine.mock[:] table2 = Table("temporary_table_2", self.metadata, Column("col1", Integer), prefixes = ["VIRTUAL"]) table2.create() assert [str(x) for x in self.engine.mock if 'CREATE VIRTUAL TABLE' in str(x)]
def setUpAll(self): global users, metadata, tlengine tlengine = create_engine(testing.db.url, strategy='threadlocal') metadata = MetaData() users = Table( 'query_users', metadata, Column('user_id', INT, Sequence('query_users_id_seq', optional=True), primary_key=True), Column('user_name', VARCHAR(20)), test_needs_acid=True, ) users.create(tlengine)
def test_non_autoincrement(self): # sqlite INT primary keys can be non-unique! (only for ints) nonai = Table( "nonaitest", self.metadata, Column('id', Integer, autoincrement=False, primary_key=True), Column('data', String(20))) nonai.create() try: # postgres + mysql strict will fail on first row, # mysql in legacy mode fails on second row nonai.insert().execute(data='row 1') nonai.insert().execute(data='row 2') assert False except sa.exc.SQLError, e: assert True
def test_non_autoincrement(self): # sqlite INT primary keys can be non-unique! (only for ints) nonai = Table("nonaitest", self.metadata, Column('id', Integer, autoincrement=False, primary_key=True), Column('data', String(20))) nonai.create() try: # postgres + mysql strict will fail on first row, # mysql in legacy mode fails on second row nonai.insert().execute(data='row 1') nonai.insert().execute(data='row 2') assert False except sa.exc.SQLError, e: assert True
def test_create_drop_explicit(self): metadata = MetaData() table = Table('test_table', metadata, Column('foo', Integer)) for bind in ( testing.db, testing.db.connect() ): for args in [ ([], {'bind':bind}), ([bind], {}) ]: metadata.create_all(*args[0], **args[1]) assert table.exists(*args[0], **args[1]) metadata.drop_all(*args[0], **args[1]) table.create(*args[0], **args[1]) table.drop(*args[0], **args[1]) assert not table.exists(*args[0], **args[1])
def test_rollback_deadlock(self): """test that returning connections to the pool clears any object locks.""" conn1 = testing.db.connect() conn2 = testing.db.connect() users = Table('deadlock_users', metadata, Column('user_id', INT, primary_key = True), Column('user_name', VARCHAR(20)), test_needs_acid=True, ) users.create(conn1) conn1.execute("select * from deadlock_users") conn1.close() # without auto-rollback in the connection pool's return() logic, this # deadlocks in Postgres, because conn1 is returned to the pool but # still has a lock on "deadlock_users". # comment out the rollback in pool/ConnectionFairy._close() to see ! users.drop(conn2) conn2.close()
def test_prefixes(self): table1 = Table("temporary_table_1", self.metadata, Column("col1", Integer), prefixes=["TEMPORARY"]) table1.create() assert [ str(x) for x in self.engine.mock if 'CREATE TEMPORARY TABLE' in str(x) ] del self.engine.mock[:] table2 = Table("temporary_table_2", self.metadata, Column("col1", Integer), prefixes=["VIRTUAL"]) table2.create() assert [ str(x) for x in self.engine.mock if 'CREATE VIRTUAL TABLE' in str(x) ]
def test_rollback_deadlock(self): """test that returning connections to the pool clears any object locks.""" conn1 = testing.db.connect() conn2 = testing.db.connect() users = Table( 'deadlock_users', metadata, Column('user_id', INT, primary_key=True), Column('user_name', VARCHAR(20)), test_needs_acid=True, ) users.create(conn1) conn1.execute("select * from deadlock_users") conn1.close() # without auto-rollback in the connection pool's return() logic, this # deadlocks in Postgres, because conn1 is returned to the pool but # still has a lock on "deadlock_users". # comment out the rollback in pool/ConnectionFairy._close() to see ! users.drop(conn2) conn2.close()
def test_unknown_types(self): meta = MetaData(testing.db) t = Table("test", meta, Column('foo', sa.DateTime)) ischema_names = testing.db.dialect.ischema_names t.create() testing.db.dialect.ischema_names = {} try: m2 = MetaData(testing.db) self.assertRaises(tsa.exc.SAWarning, Table, "test", m2, autoload=True) @testing.emits_warning('Did not recognize type') def warns(): m3 = MetaData(testing.db) t3 = Table("test", m3, autoload=True) assert t3.c.foo.type.__class__ == sa.types.NullType finally: testing.db.dialect.ischema_names = ischema_names t.drop()
def test_create_drop_constructor_bound(self): for bind in (testing.db, testing.db.connect()): try: for args in ( ([bind], {}), ([], { 'bind': bind }), ): metadata = MetaData(*args[0], **args[1]) table = Table('test_table', metadata, Column('foo', Integer)) assert metadata.bind is table.bind is bind metadata.create_all() assert table.exists() metadata.drop_all() table.create() table.drop() assert not table.exists() finally: if isinstance(bind, engine.Connection): bind.close()
def test_basic_override(self): meta = MetaData(testing.db) table = Table( 'override_test', meta, Column('col1', sa.Integer, primary_key=True), Column('col2', sa.String(20)), Column('col3', sa.Numeric) ) table.create() meta2 = MetaData(testing.db) try: table = Table( 'override_test', meta2, Column('col2', sa.Unicode()), Column('col4', sa.String(30)), autoload=True) self.assert_(isinstance(table.c.col1.type, sa.Integer)) self.assert_(isinstance(table.c.col2.type, sa.Unicode)) self.assert_(isinstance(table.c.col4.type, sa.String)) finally: table.drop()
def test_create_drop_constructor_bound(self): for bind in ( testing.db, testing.db.connect() ): try: for args in ( ([bind], {}), ([], {'bind':bind}), ): metadata = MetaData(*args[0], **args[1]) table = Table('test_table', metadata, Column('foo', Integer)) assert metadata.bind is table.bind is bind metadata.create_all() assert table.exists() metadata.drop_all() table.create() table.drop() assert not table.exists() finally: if isinstance(bind, engine.Connection): bind.close()
def setUpAll(self): global t, f, f2, ts, currenttime, metadata, default_generator db = testing.db metadata = MetaData(db) default_generator = {'x':50} def mydefault(): default_generator['x'] += 1 return default_generator['x'] def myupdate_with_ctx(ctx): conn = ctx.connection return conn.execute(sa.select([sa.text('13')])).scalar() def mydefault_using_connection(ctx): conn = ctx.connection try: return conn.execute(sa.select([sa.text('12')])).scalar() finally: # ensure a "close()" on this connection does nothing, # since its a "branched" connection conn.close() use_function_defaults = testing.against('postgres', 'mssql', 'maxdb') is_oracle = testing.against('oracle') # select "count(1)" returns different results on different DBs also # correct for "current_date" compatible as column default, value # differences currenttime = func.current_date(type_=sa.Date, bind=db) if is_oracle: ts = db.scalar(sa.select([func.trunc(func.sysdate(), sa.literal_column("'DAY'"), type_=sa.Date).label('today')])) assert isinstance(ts, datetime.date) and not isinstance(ts, datetime.datetime) f = sa.select([func.length('abcdef')], bind=db).scalar() f2 = sa.select([func.length('abcdefghijk')], bind=db).scalar() # TODO: engine propigation across nested functions not working currenttime = func.trunc(currenttime, sa.literal_column("'DAY'"), bind=db, type_=sa.Date) def1 = currenttime def2 = func.trunc(sa.text("sysdate"), sa.literal_column("'DAY'"), type_=sa.Date) deftype = sa.Date elif use_function_defaults: f = sa.select([func.length('abcdef')], bind=db).scalar() f2 = sa.select([func.length('abcdefghijk')], bind=db).scalar() def1 = currenttime deftype = sa.Date if testing.against('maxdb'): def2 = sa.text("curdate") elif testing.against('mssql'): def2 = sa.text("getdate()") else: def2 = sa.text("current_date") ts = db.func.current_date().scalar() else: f = len('abcdef') f2 = len('abcdefghijk') def1 = def2 = "3" ts = 3 deftype = Integer t = Table('default_test1', metadata, # python function Column('col1', Integer, primary_key=True, default=mydefault), # python literal Column('col2', String(20), default="imthedefault", onupdate="im the update"), # preexecute expression Column('col3', Integer, default=func.length('abcdef'), onupdate=func.length('abcdefghijk')), # SQL-side default from sql expression Column('col4', deftype, server_default=def1), # SQL-side default from literal expression Column('col5', deftype, server_default=def2), # preexecute + update timestamp Column('col6', sa.Date, default=currenttime, onupdate=currenttime), Column('boolcol1', sa.Boolean, default=True), Column('boolcol2', sa.Boolean, default=False), # python function which uses ExecutionContext Column('col7', Integer, default=mydefault_using_connection, onupdate=myupdate_with_ctx), # python builtin Column('col8', sa.Date, default=datetime.date.today, onupdate=datetime.date.today), # combo Column('col9', String(20), default='py', server_default='ddl')) t.create()
def setUpAll(self): global t, f, f2, ts, currenttime, metadata, default_generator db = testing.db metadata = MetaData(db) default_generator = {'x': 50} def mydefault(): default_generator['x'] += 1 return default_generator['x'] def myupdate_with_ctx(ctx): conn = ctx.connection return conn.execute(sa.select([sa.text('13')])).scalar() def mydefault_using_connection(ctx): conn = ctx.connection try: return conn.execute(sa.select([sa.text('12')])).scalar() finally: # ensure a "close()" on this connection does nothing, # since its a "branched" connection conn.close() use_function_defaults = testing.against('postgres', 'mssql', 'maxdb') is_oracle = testing.against('oracle') # select "count(1)" returns different results on different DBs also # correct for "current_date" compatible as column default, value # differences currenttime = func.current_date(type_=sa.Date, bind=db) if is_oracle: ts = db.scalar( sa.select([ func.trunc(func.sysdate(), sa.literal_column("'DAY'"), type_=sa.Date).label('today') ])) assert isinstance( ts, datetime.date) and not isinstance(ts, datetime.datetime) f = sa.select([func.length('abcdef')], bind=db).scalar() f2 = sa.select([func.length('abcdefghijk')], bind=db).scalar() # TODO: engine propigation across nested functions not working currenttime = func.trunc(currenttime, sa.literal_column("'DAY'"), bind=db, type_=sa.Date) def1 = currenttime def2 = func.trunc(sa.text("sysdate"), sa.literal_column("'DAY'"), type_=sa.Date) deftype = sa.Date elif use_function_defaults: f = sa.select([func.length('abcdef')], bind=db).scalar() f2 = sa.select([func.length('abcdefghijk')], bind=db).scalar() def1 = currenttime deftype = sa.Date if testing.against('maxdb'): def2 = sa.text("curdate") elif testing.against('mssql'): def2 = sa.text("getdate()") else: def2 = sa.text("current_date") ts = db.func.current_date().scalar() else: f = len('abcdef') f2 = len('abcdefghijk') def1 = def2 = "3" ts = 3 deftype = Integer t = Table( 'default_test1', metadata, # python function Column('col1', Integer, primary_key=True, default=mydefault), # python literal Column('col2', String(20), default="imthedefault", onupdate="im the update"), # preexecute expression Column('col3', Integer, default=func.length('abcdef'), onupdate=func.length('abcdefghijk')), # SQL-side default from sql expression Column('col4', deftype, server_default=def1), # SQL-side default from literal expression Column('col5', deftype, server_default=def2), # preexecute + update timestamp Column('col6', sa.Date, default=currenttime, onupdate=currenttime), Column('boolcol1', sa.Boolean, default=True), Column('boolcol2', sa.Boolean, default=False), # python function which uses ExecutionContext Column('col7', Integer, default=mydefault_using_connection, onupdate=myupdate_with_ctx), # python builtin Column('col8', sa.Date, default=datetime.date.today, onupdate=datetime.date.today), # combo Column('col9', String(20), default='py', server_default='ddl')) t.create()
class DDLEventTest(TestBase): class Canary(object): def __init__(self, schema_item, bind): self.state = None self.schema_item = schema_item self.bind = bind def before_create(self, action, schema_item, bind): assert self.state is None assert schema_item is self.schema_item assert bind is self.bind self.state = action def after_create(self, action, schema_item, bind): assert self.state in ('before-create', 'skipped') assert schema_item is self.schema_item assert bind is self.bind self.state = action def before_drop(self, action, schema_item, bind): assert self.state is None assert schema_item is self.schema_item assert bind is self.bind self.state = action def after_drop(self, action, schema_item, bind): assert self.state in ('before-drop', 'skipped') assert schema_item is self.schema_item assert bind is self.bind self.state = action def setUp(self): self.bind = engines.mock_engine() self.metadata = MetaData() self.table = Table('t', self.metadata, Column('id', Integer)) def test_table_create_before(self): table, bind = self.table, self.bind canary = self.Canary(table, bind) table.ddl_listeners['before-create'].append(canary.before_create) table.create(bind) assert canary.state == 'before-create' table.drop(bind) assert canary.state == 'before-create' def test_table_create_after(self): table, bind = self.table, self.bind canary = self.Canary(table, bind) table.ddl_listeners['after-create'].append(canary.after_create) canary.state = 'skipped' table.create(bind) assert canary.state == 'after-create' table.drop(bind) assert canary.state == 'after-create' def test_table_create_both(self): table, bind = self.table, self.bind canary = self.Canary(table, bind) table.ddl_listeners['before-create'].append(canary.before_create) table.ddl_listeners['after-create'].append(canary.after_create) table.create(bind) assert canary.state == 'after-create' table.drop(bind) assert canary.state == 'after-create' def test_table_drop_before(self): table, bind = self.table, self.bind canary = self.Canary(table, bind) table.ddl_listeners['before-drop'].append(canary.before_drop) table.create(bind) assert canary.state is None table.drop(bind) assert canary.state == 'before-drop' def test_table_drop_after(self): table, bind = self.table, self.bind canary = self.Canary(table, bind) table.ddl_listeners['after-drop'].append(canary.after_drop) table.create(bind) assert canary.state is None canary.state = 'skipped' table.drop(bind) assert canary.state == 'after-drop' def test_table_drop_both(self): table, bind = self.table, self.bind canary = self.Canary(table, bind) table.ddl_listeners['before-drop'].append(canary.before_drop) table.ddl_listeners['after-drop'].append(canary.after_drop) table.create(bind) assert canary.state is None table.drop(bind) assert canary.state == 'after-drop' def test_table_all(self): table, bind = self.table, self.bind canary = self.Canary(table, bind) table.ddl_listeners['before-create'].append(canary.before_create) table.ddl_listeners['after-create'].append(canary.after_create) table.ddl_listeners['before-drop'].append(canary.before_drop) table.ddl_listeners['after-drop'].append(canary.after_drop) assert canary.state is None table.create(bind) assert canary.state == 'after-create' canary.state = None table.drop(bind) assert canary.state == 'after-drop' def test_table_create_before(self): metadata, bind = self.metadata, self.bind canary = self.Canary(metadata, bind) metadata.ddl_listeners['before-create'].append(canary.before_create) metadata.create_all(bind) assert canary.state == 'before-create' metadata.drop_all(bind) assert canary.state == 'before-create' def test_metadata_create_after(self): metadata, bind = self.metadata, self.bind canary = self.Canary(metadata, bind) metadata.ddl_listeners['after-create'].append(canary.after_create) canary.state = 'skipped' metadata.create_all(bind) assert canary.state == 'after-create' metadata.drop_all(bind) assert canary.state == 'after-create' def test_metadata_create_both(self): metadata, bind = self.metadata, self.bind canary = self.Canary(metadata, bind) metadata.ddl_listeners['before-create'].append(canary.before_create) metadata.ddl_listeners['after-create'].append(canary.after_create) metadata.create_all(bind) assert canary.state == 'after-create' metadata.drop_all(bind) assert canary.state == 'after-create' @testing.future def test_metadata_table_isolation(self): metadata, table, bind = self.metadata, self.table, self.bind table_canary = self.Canary(table, bind) table.ddl_listeners['before-create'].append(table_canary.before_create) metadata_canary = self.Canary(metadata, bind) metadata.ddl_listeners['before-create'].append(metadata_canary.before_create) # currently, table.create() routes through the same execution # path that metadata.create_all() does self.table.create(self.bind) assert metadata_canary.state == None def test_append_listener(self): metadata, table, bind = self.metadata, self.table, self.bind fn = lambda *a: None table.append_ddl_listener('before-create', fn) self.assertRaises(LookupError, table.append_ddl_listener, 'blah', fn) metadata.append_ddl_listener('before-create', fn) self.assertRaises(LookupError, metadata.append_ddl_listener, 'blah', fn)
class DDLEventTest(TestBase): class Canary(object): def __init__(self, schema_item, bind): self.state = None self.schema_item = schema_item self.bind = bind def before_create(self, action, schema_item, bind): assert self.state is None assert schema_item is self.schema_item assert bind is self.bind self.state = action def after_create(self, action, schema_item, bind): assert self.state in ('before-create', 'skipped') assert schema_item is self.schema_item assert bind is self.bind self.state = action def before_drop(self, action, schema_item, bind): assert self.state is None assert schema_item is self.schema_item assert bind is self.bind self.state = action def after_drop(self, action, schema_item, bind): assert self.state in ('before-drop', 'skipped') assert schema_item is self.schema_item assert bind is self.bind self.state = action def setUp(self): self.bind = engines.mock_engine() self.metadata = MetaData() self.table = Table('t', self.metadata, Column('id', Integer)) def test_table_create_before(self): table, bind = self.table, self.bind canary = self.Canary(table, bind) table.ddl_listeners['before-create'].append(canary.before_create) table.create(bind) assert canary.state == 'before-create' table.drop(bind) assert canary.state == 'before-create' def test_table_create_after(self): table, bind = self.table, self.bind canary = self.Canary(table, bind) table.ddl_listeners['after-create'].append(canary.after_create) canary.state = 'skipped' table.create(bind) assert canary.state == 'after-create' table.drop(bind) assert canary.state == 'after-create' def test_table_create_both(self): table, bind = self.table, self.bind canary = self.Canary(table, bind) table.ddl_listeners['before-create'].append(canary.before_create) table.ddl_listeners['after-create'].append(canary.after_create) table.create(bind) assert canary.state == 'after-create' table.drop(bind) assert canary.state == 'after-create' def test_table_drop_before(self): table, bind = self.table, self.bind canary = self.Canary(table, bind) table.ddl_listeners['before-drop'].append(canary.before_drop) table.create(bind) assert canary.state is None table.drop(bind) assert canary.state == 'before-drop' def test_table_drop_after(self): table, bind = self.table, self.bind canary = self.Canary(table, bind) table.ddl_listeners['after-drop'].append(canary.after_drop) table.create(bind) assert canary.state is None canary.state = 'skipped' table.drop(bind) assert canary.state == 'after-drop' def test_table_drop_both(self): table, bind = self.table, self.bind canary = self.Canary(table, bind) table.ddl_listeners['before-drop'].append(canary.before_drop) table.ddl_listeners['after-drop'].append(canary.after_drop) table.create(bind) assert canary.state is None table.drop(bind) assert canary.state == 'after-drop' def test_table_all(self): table, bind = self.table, self.bind canary = self.Canary(table, bind) table.ddl_listeners['before-create'].append(canary.before_create) table.ddl_listeners['after-create'].append(canary.after_create) table.ddl_listeners['before-drop'].append(canary.before_drop) table.ddl_listeners['after-drop'].append(canary.after_drop) assert canary.state is None table.create(bind) assert canary.state == 'after-create' canary.state = None table.drop(bind) assert canary.state == 'after-drop' def test_table_create_before(self): metadata, bind = self.metadata, self.bind canary = self.Canary(metadata, bind) metadata.ddl_listeners['before-create'].append(canary.before_create) metadata.create_all(bind) assert canary.state == 'before-create' metadata.drop_all(bind) assert canary.state == 'before-create' def test_metadata_create_after(self): metadata, bind = self.metadata, self.bind canary = self.Canary(metadata, bind) metadata.ddl_listeners['after-create'].append(canary.after_create) canary.state = 'skipped' metadata.create_all(bind) assert canary.state == 'after-create' metadata.drop_all(bind) assert canary.state == 'after-create' def test_metadata_create_both(self): metadata, bind = self.metadata, self.bind canary = self.Canary(metadata, bind) metadata.ddl_listeners['before-create'].append(canary.before_create) metadata.ddl_listeners['after-create'].append(canary.after_create) metadata.create_all(bind) assert canary.state == 'after-create' metadata.drop_all(bind) assert canary.state == 'after-create' @testing.future def test_metadata_table_isolation(self): metadata, table, bind = self.metadata, self.table, self.bind table_canary = self.Canary(table, bind) table.ddl_listeners['before-create'].append(table_canary.before_create) metadata_canary = self.Canary(metadata, bind) metadata.ddl_listeners['before-create'].append( metadata_canary.before_create) # currently, table.create() routes through the same execution # path that metadata.create_all() does self.table.create(self.bind) assert metadata_canary.state == None def test_append_listener(self): metadata, table, bind = self.metadata, self.table, self.bind fn = lambda *a: None table.append_ddl_listener('before-create', fn) self.assertRaises(LookupError, table.append_ddl_listener, 'blah', fn) metadata.append_ddl_listener('before-create', fn) self.assertRaises(LookupError, metadata.append_ddl_listener, 'blah', fn)