def test_deferrable_initially_kw_not_ignored(self): m = MetaData() Table("t1", m, Column("id", Integer, primary_key=True)) t2 = Table( "t2", m, Column( "id", Integer, ForeignKey("t1.id", deferrable=True, initially="DEFERRED"), primary_key=True, ), ) self.assert_compile( schema.CreateTable(t2), "CREATE TABLE t2 (id INTEGER NOT NULL, " "PRIMARY KEY (id), FOREIGN KEY(id) REFERENCES t1 (id) " "DEFERRABLE INITIALLY DEFERRED)", )
def test_money(self): """Exercise type specification for money types.""" columns = [(mssql.MSMoney, [], {}, 'MONEY'), (mssql.MSSmallMoney, [], {}, 'SMALLMONEY')] metadata = MetaData() table_args = ['test_mssql_money', metadata] for index, spec in enumerate(columns): type_, args, kw, res = spec table_args.append(Column('c%s' % index, type_(*args, **kw), nullable=None)) money_table = Table(*table_args) dialect = mssql.dialect() gen = dialect.ddl_compiler(dialect, schema.CreateTable(money_table)) for col in money_table.c: index = int(col.name[1:]) testing.eq_(gen.get_column_specification(col), '%s %s' % (col.name, columns[index][3])) self.assert_(repr(col))
def test_create_table_with_partition_and_other_opts(self): t1 = Table('testtable', MetaData(), Column('id', Integer(), primary_key=True, autoincrement=True), Column('other_id', Integer(), primary_key=True, autoincrement=False), mysql_stats_sample_pages='2', mysql_partitions='2', mysql_partition_by='HASH(other_id)') self.assert_compile( schema.CreateTable(t1), 'CREATE TABLE testtable (' 'id INTEGER NOT NULL AUTO_INCREMENT, ' 'other_id INTEGER NOT NULL, ' 'PRIMARY KEY (id, other_id)' ')STATS_SAMPLE_PAGES=2 PARTITION BY HASH(other_id) PARTITIONS 2')
def test_create_table_with_partition(self): t1 = Table( "testtable", MetaData(), Column("id", Integer(), primary_key=True, autoincrement=True), Column("other_id", Integer(), primary_key=True, autoincrement=False), mysql_partitions="2", mysql_partition_by="KEY(other_id)", ) self.assert_compile( schema.CreateTable(t1), "CREATE TABLE testtable (" "id INTEGER NOT NULL AUTO_INCREMENT, " "other_id INTEGER NOT NULL, " "PRIMARY KEY (id, other_id)" ")PARTITION BY KEY(other_id) PARTITIONS 2", )
def test_fk_match_clause(self): t = Table( "tbl", MetaData(), Column("a", Integer), Column("b", Integer, ForeignKey("tbl.a", match="SIMPLE")), ) self.assert_compile( schema.CreateTable(t), "CREATE TABLE tbl (a INTEGER, b INTEGER, " "FOREIGN KEY(b) REFERENCES tbl " "(a) MATCH SIMPLE)", ) self.assert_compile( schema.AddConstraint(list(t.foreign_keys)[0].constraint), "ALTER TABLE tbl ADD FOREIGN KEY(b) " "REFERENCES tbl (a) MATCH SIMPLE", )
def test_match_kw_raises(self): m = MetaData() Table("t1", m, Column("id", Integer, primary_key=True)) t2 = Table( "t2", m, Column( "id", Integer, ForeignKey("t1.id", match="XYZ"), primary_key=True, ), ) assert_raises_message( exc.CompileError, "MySQL ignores the 'MATCH' keyword while at the same time causes " "ON UPDATE/ON DELETE clauses to be ignored.", schema.CreateTable(t2).compile, dialect=mysql.dialect(), )
def visit_table(self, table, create_ok=False): if not create_ok and not self._can_create_table(table): return table.dispatch.before_create(table, self.connection, checkfirst=self.checkfirst) for column in table.columns: if column.default is not None: self.traverse_single(column.default) self.connection.execute(schema.CreateTable(table)) if hasattr(table, 'indexes'): for index in table.indexes: self.traverse_single(index) table.dispatch.after_create(table, self.connection, checkfirst=self.checkfirst)
def test_multiple(self): m = MetaData() Table("foo", m, Column('id', Integer, primary_key=True), Column('bar', Integer, primary_key=True) ) tb = Table("some_table", m, Column('id', Integer, primary_key=True), Column('foo_id', Integer, ForeignKey('foo.id')), Column('foo_bar', Integer, ForeignKey('foo.bar')), ) self.assert_compile( schema.CreateTable(tb), "CREATE TABLE some_table (" "id INTEGER NOT NULL, " "foo_id INTEGER, " "foo_bar INTEGER, " "PRIMARY KEY (id), " "FOREIGN KEY(foo_id) REFERENCES foo (id), " "FOREIGN KEY(foo_bar) REFERENCES foo (bar))" )
def test_create_table_with_subpartition(self): t1 = Table('testtable', MetaData(), Column('id', Integer(), primary_key=True, autoincrement=True), Column('other_id', Integer(), primary_key=True, autoincrement=False), mysql_partitions='2', mysql_partition_by='KEY(other_id)', mysql_subpartition_by="HASH(some_expr)", mysql_subpartitions='2') self.assert_compile( schema.CreateTable(t1), 'CREATE TABLE testtable (' 'id INTEGER NOT NULL AUTO_INCREMENT, ' 'other_id INTEGER NOT NULL, ' 'PRIMARY KEY (id, other_id)' ')PARTITION BY KEY(other_id) PARTITIONS 2 ' 'SUBPARTITION BY HASH(some_expr) SUBPARTITIONS 2')
def create_table(self, table: "Table") -> None: table.dispatch.before_create( table, self.connection, checkfirst=False, _ddl_runner=self ) self._exec(schema.CreateTable(table)) table.dispatch.after_create( table, self.connection, checkfirst=False, _ddl_runner=self ) for index in table.indexes: self._exec(schema.CreateIndex(index)) with_comment = ( self.dialect.supports_comments and not self.dialect.inline_comments ) comment = table.comment if comment and with_comment: self.create_table_comment(table) for column in table.columns: comment = column.comment if comment and with_comment: self.create_column_comment(column)
def test_render_ck_constraint_inline(self): t, t2 = self._constraint_create_fixture() CheckConstraint( "a < b", name="my_test_constraint", deferrable=True, initially="DEFERRED", table=t, ) # before we create an AddConstraint, # the CONSTRAINT comes out inline self.assert_compile( schema.CreateTable(t), "CREATE TABLE tbl (" "a INTEGER, " "b INTEGER, " "CONSTRAINT my_test_constraint CHECK (a < b) " "DEFERRABLE INITIALLY DEFERRED" ")", )
def test_serial_integer(self): class BITD(TypeDecorator): impl = Integer def load_dialect_impl(self, dialect): if dialect.name == 'postgresql': return BigInteger() else: return Integer() for version, type_, expected in [ (None, Integer, 'SERIAL'), (None, BigInteger, 'BIGSERIAL'), ((9, 1), SmallInteger, 'SMALLINT'), ((9, 2), SmallInteger, 'SMALLSERIAL'), (None, postgresql.INTEGER, 'SERIAL'), (None, postgresql.BIGINT, 'BIGSERIAL'), (None, Integer().with_variant(BigInteger(), 'postgresql'), 'BIGSERIAL'), (None, Integer().with_variant(postgresql.BIGINT, 'postgresql'), 'BIGSERIAL'), ((9, 2), Integer().with_variant(SmallInteger, 'postgresql'), 'SMALLSERIAL'), (None, BITD(), 'BIGSERIAL') ]: m = MetaData() t = Table('t', m, Column('c', type_, primary_key=True)) if version: dialect = postgresql.dialect() dialect._get_server_version_info = Mock(return_value=version) dialect.initialize(testing.db.connect()) else: dialect = testing.db.dialect ddl_compiler = dialect.ddl_compiler(dialect, schema.CreateTable(t)) eq_(ddl_compiler.get_column_specification(t.c.c), "c %s NOT NULL" % expected)
def test_external_ck_constraint_cancels_internal(self): t, t2 = self._constraint_create_fixture() constraint = CheckConstraint( "a < b", name="my_test_constraint", deferrable=True, initially="DEFERRED", table=t, ) schema.AddConstraint(constraint) # once we make an AddConstraint, # inline compilation of the CONSTRAINT # is disabled self.assert_compile( schema.CreateTable(t), "CREATE TABLE tbl (" "a INTEGER, " "b INTEGER" ")", )
def create_table(self, table): table.dispatch.before_create(table, self.connection, checkfirst=False, _ddl_runner=self) self._exec(schema.CreateTable(table)) table.dispatch.after_create(table, self.connection, checkfirst=False, _ddl_runner=self) for index in table.indexes: self._exec(schema.CreateIndex(index)) with_comment = (sqla_compat._dialect_supports_comments(self.dialect) and not self.dialect.inline_comments) comment = sqla_compat._comment_attribute(table) if comment and with_comment: self.create_table_comment(table) for column in table.columns: comment = sqla_compat._comment_attribute(column) if comment and with_comment: self.create_column_comment(column)
def test_varchar_raise(self): for type_ in ( String, VARCHAR, String(), VARCHAR(), NVARCHAR(), Unicode, Unicode(), ): type_ = sqltypes.to_instance(type_) assert_raises_message(exc.CompileError, "VARCHAR requires a length on dialect mysql", type_.compile, dialect=mysql.dialect()) t1 = Table('sometable', MetaData(), Column('somecolumn', type_)) assert_raises_message( exc.CompileError, r"\(in table 'sometable', column 'somecolumn'\)\: " r"(?:N)?VARCHAR requires a length on dialect mysql", schema.CreateTable(t1).compile, dialect=mysql.dialect())
def test_numeric(self): "Exercise type specification and options for numeric types." columns = [ # column type, args, kwargs, expected ddl (types.NUMERIC, [], {}, "NUMERIC"), (types.NUMERIC, [None], {}, "NUMERIC"), (types.NUMERIC, [12, 4], {}, "NUMERIC(12, 4)"), (types.Float, [], {}, "FLOAT"), (types.Float, [None], {}, "FLOAT"), (types.Float, [12], {}, "FLOAT(12)"), (mssql.MSReal, [], {}, "REAL"), (types.Integer, [], {}, "INTEGER"), (types.BigInteger, [], {}, "BIGINT"), (mssql.MSTinyInteger, [], {}, "TINYINT"), (types.SmallInteger, [], {}, "SMALLINT"), ] metadata = MetaData() table_args = ["test_mssql_numeric", metadata] for index, spec in enumerate(columns): type_, args, kw, res = spec table_args.append( Column("c%s" % index, type_(*args, **kw), nullable=None) ) numeric_table = Table(*table_args) dialect = mssql.dialect() gen = dialect.ddl_compiler(dialect, schema.CreateTable(numeric_table)) for col in numeric_table.c: index = int(col.name[1:]) testing.eq_( gen.get_column_specification(col), "%s %s" % (col.name, columns[index][3]), ) self.assert_(repr(col))
def test_boolean(self): "Exercise type specification for boolean type." columns = [ # column type, args, kwargs, expected ddl (Boolean, [], {}, 'BIT'), ] metadata = MetaData() table_args = ['test_mssql_boolean', metadata] for index, spec in enumerate(columns): type_, args, kw, res = spec table_args.append( Column('c%s' % index, type_(*args, **kw), nullable=None)) boolean_table = Table(*table_args) dialect = mssql.dialect() gen = dialect.ddl_compiler(dialect, schema.CreateTable(boolean_table)) for col in boolean_table.c: index = int(col.name[1:]) testing.eq_(gen.get_column_specification(col), "%s %s" % (col.name, columns[index][3])) self.assert_(repr(col))
def test_serial_integer(self): for version, type_, expected in [ (None, Integer, 'SERIAL'), (None, BigInteger, 'BIGSERIAL'), ((9, 1), SmallInteger, 'SMALLINT'), ((9, 2), SmallInteger, 'SMALLSERIAL'), (None, postgresql.INTEGER, 'SERIAL'), (None, postgresql.BIGINT, 'BIGSERIAL'), ]: m = MetaData() t = Table('t', m, Column('c', type_, primary_key=True)) if version: dialect = postgresql.dialect() dialect._get_server_version_info = Mock(return_value=version) dialect.initialize(testing.db.connect()) else: dialect = testing.db.dialect ddl_compiler = dialect.ddl_compiler(dialect, schema.CreateTable(t)) eq_(ddl_compiler.get_column_specification(t.c.c), "c %s NOT NULL" % expected)
def test_dates(self): "Exercise type specification for date types." columns = [ # column type, args, kwargs, expected ddl (mssql.MSDateTime, [], {}, 'DATETIME', None), (types.DATE, [], {}, 'DATE', None), (types.Date, [], {}, 'DATE', None), (types.Date, [], {}, 'DATETIME', MS_2005_VERSION), (mssql.MSDate, [], {}, 'DATE', None), (mssql.MSDate, [], {}, 'DATETIME', MS_2005_VERSION), (types.TIME, [], {}, 'TIME', None), (types.Time, [], {}, 'TIME', None), (mssql.MSTime, [], {}, 'TIME', None), (mssql.MSTime, [1], {}, 'TIME(1)', None), (types.Time, [], {}, 'DATETIME', MS_2005_VERSION), (mssql.MSTime, [], {}, 'TIME', None), (mssql.MSSmallDateTime, [], {}, 'SMALLDATETIME', None), (mssql.MSDateTimeOffset, [], {}, 'DATETIMEOFFSET', None), (mssql.MSDateTimeOffset, [1], {}, 'DATETIMEOFFSET(1)', None), (mssql.MSDateTime2, [], {}, 'DATETIME2', None), (mssql.MSDateTime2, [0], {}, 'DATETIME2(0)', None), (mssql.MSDateTime2, [1], {}, 'DATETIME2(1)', None), (mssql.MSTime, [0], {}, 'TIME(0)', None), (mssql.MSDateTimeOffset, [0], {}, 'DATETIMEOFFSET(0)', None), ] metadata = MetaData() table_args = ['test_mssql_dates', metadata] for index, spec in enumerate(columns): type_, args, kw, res, server_version = spec table_args.append( Column('c%s' % index, type_(*args, **kw), nullable=None)) date_table = Table(*table_args) dialect = mssql.dialect() dialect.server_version_info = MS_2008_VERSION ms_2005_dialect = mssql.dialect() ms_2005_dialect.server_version_info = MS_2005_VERSION gen = dialect.ddl_compiler(dialect, schema.CreateTable(date_table)) gen2005 = ms_2005_dialect.ddl_compiler( ms_2005_dialect, schema.CreateTable(date_table)) for col in date_table.c: index = int(col.name[1:]) server_version = columns[index][4] if not server_version: testing.eq_( gen.get_column_specification(col), "%s %s" % (col.name, columns[index][3])) else: testing.eq_( gen2005.get_column_specification(col), "%s %s" % (col.name, columns[index][3])) self.assert_(repr(col))
def test_dates(self): "Exercise type specification for date types." columns = [ # column type, args, kwargs, expected ddl (mssql.MSDateTime, [], {}, "DATETIME", []), (types.DATE, [], {}, "DATE", [">=", (10,)]), (types.Date, [], {}, "DATE", [">=", (10,)]), (types.Date, [], {}, "DATETIME", ["<", (10,)], mssql.MSDateTime), (mssql.MSDate, [], {}, "DATE", [">=", (10,)]), (mssql.MSDate, [], {}, "DATETIME", ["<", (10,)], mssql.MSDateTime), (types.TIME, [], {}, "TIME", [">=", (10,)]), (types.Time, [], {}, "TIME", [">=", (10,)]), (mssql.MSTime, [], {}, "TIME", [">=", (10,)]), (mssql.MSTime, [1], {}, "TIME(1)", [">=", (10,)]), (types.Time, [], {}, "DATETIME", ["<", (10,)], mssql.MSDateTime), (mssql.MSTime, [], {}, "TIME", [">=", (10,)]), (mssql.MSSmallDateTime, [], {}, "SMALLDATETIME", []), (mssql.MSDateTimeOffset, [], {}, "DATETIMEOFFSET", [">=", (10,)]), ( mssql.MSDateTimeOffset, [1], {}, "DATETIMEOFFSET(1)", [">=", (10,)], ), (mssql.MSDateTime2, [], {}, "DATETIME2", [">=", (10,)]), (mssql.MSDateTime2, [0], {}, "DATETIME2(0)", [">=", (10,)]), (mssql.MSDateTime2, [1], {}, "DATETIME2(1)", [">=", (10,)]), ] metadata = self.metadata table_args = ["test_mssql_dates", metadata] for index, spec in enumerate(columns): type_, args, kw, res, requires = spec[0:5] if ( requires and testing._is_excluded("mssql", *requires) or not requires ): c = Column("c%s" % index, type_(*args, **kw), nullable=None) testing.db.dialect.type_descriptor(c.type) table_args.append(c) dates_table = Table(*table_args) gen = testing.db.dialect.ddl_compiler( testing.db.dialect, schema.CreateTable(dates_table) ) for col in dates_table.c: index = int(col.name[1:]) testing.eq_( gen.get_column_specification(col), "%s %s" % (col.name, columns[index][3]), ) self.assert_(repr(col)) dates_table.create(checkfirst=True) reflected_dates = Table( "test_mssql_dates", MetaData(), autoload_with=testing.db ) for col in reflected_dates.c: self.assert_types_base(col, dates_table.c[col.key])
def test_dates(self): "Exercise type specification for date types." columns = [ # column type, args, kwargs, expected ddl (mssql.MSDateTime, [], {}, 'DATETIME', []), (types.DATE, [], {}, 'DATE', ['>=', (10,)]), (types.Date, [], {}, 'DATE', ['>=', (10,)]), (types.Date, [], {}, 'DATETIME', ['<', (10,)], mssql.MSDateTime), (mssql.MSDate, [], {}, 'DATE', ['>=', (10,)]), (mssql.MSDate, [], {}, 'DATETIME', ['<', (10,)], mssql.MSDateTime), (types.TIME, [], {}, 'TIME', ['>=', (10,)]), (types.Time, [], {}, 'TIME', ['>=', (10,)]), (mssql.MSTime, [], {}, 'TIME', ['>=', (10,)]), (mssql.MSTime, [1], {}, 'TIME(1)', ['>=', (10,)]), (types.Time, [], {}, 'DATETIME', ['<', (10,)], mssql.MSDateTime), (mssql.MSTime, [], {}, 'TIME', ['>=', (10,)]), (mssql.MSSmallDateTime, [], {}, 'SMALLDATETIME', []), (mssql.MSDateTimeOffset, [], {}, 'DATETIMEOFFSET', ['>=', (10,)]), (mssql.MSDateTimeOffset, [1], {}, 'DATETIMEOFFSET(1)', ['>=', (10,)]), (mssql.MSDateTime2, [], {}, 'DATETIME2', ['>=', (10,)]), (mssql.MSDateTime2, [0], {}, 'DATETIME2(0)', ['>=', (10,)]), (mssql.MSDateTime2, [1], {}, 'DATETIME2(1)', ['>=', (10,)]), ] table_args = ['test_mssql_dates', metadata] for index, spec in enumerate(columns): type_, args, kw, res, requires = spec[0:5] if requires and \ testing._is_excluded('mssql', *requires) or not requires: c = Column('c%s' % index, type_(*args, **kw), nullable=None) testing.db.dialect.type_descriptor(c.type) table_args.append(c) dates_table = Table(*table_args) gen = testing.db.dialect.ddl_compiler( testing.db.dialect, schema.CreateTable(dates_table)) for col in dates_table.c: index = int(col.name[1:]) testing.eq_(gen.get_column_specification(col), '%s %s' % (col.name, columns[index][3])) self.assert_(repr(col)) dates_table.create(checkfirst=True) reflected_dates = Table('test_mssql_dates', MetaData(testing.db), autoload=True) for col in reflected_dates.c: self.assert_types_base(col, dates_table.c[col.key])
def test_add_drop_constraint(self): m = MetaData() t = Table('tbl', m, Column('a', Integer), Column('b', Integer)) t2 = Table('t2', m, Column('a', Integer), Column('b', Integer)) constraint = CheckConstraint('a < b', name="my_test_constraint", deferrable=True, initially='DEFERRED', table=t) # before we create an AddConstraint, # the CONSTRAINT comes out inline self.assert_compile( schema.CreateTable(t), "CREATE TABLE tbl (" "a INTEGER, " "b INTEGER, " "CONSTRAINT my_test_constraint CHECK (a < b) DEFERRABLE INITIALLY DEFERRED" ")") self.assert_compile( schema.AddConstraint(constraint), "ALTER TABLE tbl ADD CONSTRAINT my_test_constraint " "CHECK (a < b) DEFERRABLE INITIALLY DEFERRED") # once we make an AddConstraint, # inline compilation of the CONSTRAINT # is disabled self.assert_compile(schema.CreateTable(t), "CREATE TABLE tbl (" "a INTEGER, " "b INTEGER" ")") self.assert_compile( schema.DropConstraint(constraint), "ALTER TABLE tbl DROP CONSTRAINT my_test_constraint") self.assert_compile( schema.DropConstraint(constraint, cascade=True), "ALTER TABLE tbl DROP CONSTRAINT my_test_constraint CASCADE") constraint = ForeignKeyConstraint(["b"], ["t2.a"]) t.append_constraint(constraint) self.assert_compile( schema.AddConstraint(constraint), "ALTER TABLE tbl ADD FOREIGN KEY(b) REFERENCES t2 (a)") constraint = ForeignKeyConstraint([t.c.a], [t2.c.b]) t.append_constraint(constraint) self.assert_compile( schema.AddConstraint(constraint), "ALTER TABLE tbl ADD FOREIGN KEY(a) REFERENCES t2 (b)") constraint = UniqueConstraint("a", "b", name="uq_cst") t2.append_constraint(constraint) self.assert_compile( schema.AddConstraint(constraint), "ALTER TABLE t2 ADD CONSTRAINT uq_cst UNIQUE (a, b)") constraint = UniqueConstraint(t2.c.a, t2.c.b, name="uq_cs2") self.assert_compile( schema.AddConstraint(constraint), "ALTER TABLE t2 ADD CONSTRAINT uq_cs2 UNIQUE (a, b)") assert t.c.a.primary_key is False constraint = PrimaryKeyConstraint(t.c.a) assert t.c.a.primary_key is True self.assert_compile(schema.AddConstraint(constraint), "ALTER TABLE tbl ADD PRIMARY KEY (a)")
def test_ndb_createtable_override(self): test_engine = self.test_engine self.assertRegex( str(schema.CreateTable(_TEST_TABLE).compile( dialect=test_engine.dialect)), "ENGINE=NDBCLUSTER")
def test_char(self): """Exercise COLLATE-ish options on string types.""" columns = [ (mssql.MSChar, [], {}, "CHAR"), (mssql.MSChar, [1], {}, "CHAR(1)"), ( mssql.MSChar, [1], { "collation": "Latin1_General_CI_AS" }, "CHAR(1) COLLATE Latin1_General_CI_AS", ), (mssql.MSNChar, [], {}, "NCHAR"), (mssql.MSNChar, [1], {}, "NCHAR(1)"), ( mssql.MSNChar, [1], { "collation": "Latin1_General_CI_AS" }, "NCHAR(1) COLLATE Latin1_General_CI_AS", ), (mssql.MSString, [], {}, "VARCHAR(max)"), (mssql.MSString, [1], {}, "VARCHAR(1)"), ( mssql.MSString, [1], { "collation": "Latin1_General_CI_AS" }, "VARCHAR(1) COLLATE Latin1_General_CI_AS", ), (mssql.MSNVarchar, [], {}, "NVARCHAR(max)"), (mssql.MSNVarchar, [1], {}, "NVARCHAR(1)"), ( mssql.MSNVarchar, [1], { "collation": "Latin1_General_CI_AS" }, "NVARCHAR(1) COLLATE Latin1_General_CI_AS", ), (mssql.MSText, [], {}, "TEXT"), ( mssql.MSText, [], { "collation": "Latin1_General_CI_AS" }, "TEXT COLLATE Latin1_General_CI_AS", ), (mssql.MSNText, [], {}, "NTEXT"), ( mssql.MSNText, [], { "collation": "Latin1_General_CI_AS" }, "NTEXT COLLATE Latin1_General_CI_AS", ), ] metadata = MetaData() table_args = ["test_mssql_charset", metadata] for index, spec in enumerate(columns): type_, args, kw, res = spec table_args.append( Column("c%s" % index, type_(*args, **kw), nullable=None)) charset_table = Table(*table_args) dialect = mssql.dialect() gen = dialect.ddl_compiler(dialect, schema.CreateTable(charset_table)) for col in charset_table.c: index = int(col.name[1:]) testing.eq_( gen.get_column_specification(col), "%s %s" % (col.name, columns[index][3]), ) self.assert_(repr(col))
def test_create(self): conn = self._conn_fixture() m = MetaData() t = Table('test', m, Column('x', Integer), Column('y', Integer)) self._exec_stmt(conn, schema.CreateTable(t)) eq_(list(conn._namespace['test'].keys()), ['x', 'y'])