def test_precision_float_roundtrip(self, metadata, connection): t = Table( "t", metadata, Column( "scale_value", mysql.DOUBLE(precision=15, scale=12, asdecimal=True), ), Column( "unscale_value", mysql.DOUBLE(decimal_return_scale=12, asdecimal=True), ), ) t.create(connection) connection.execute( t.insert(), dict( scale_value=45.768392065789, unscale_value=45.768392065789, ), ) result = connection.scalar(select(t.c.scale_value)) eq_(result, decimal.Decimal("45.768392065789")) result = connection.scalar(select(t.c.unscale_value)) eq_(result, decimal.Decimal("45.768392065789"))
def test_precision_float_roundtrip(self): t = Table( "t", self.metadata, Column( "scale_value", mysql.DOUBLE(precision=15, scale=12, asdecimal=True), ), Column( "unscale_value", mysql.DOUBLE(decimal_return_scale=12, asdecimal=True), ), ) with testing.db.connect() as conn: t.create(conn) conn.execute( t.insert(), scale_value=45.768392065789, unscale_value=45.768392065789, ) result = conn.scalar(select([t.c.scale_value])) eq_(result, decimal.Decimal("45.768392065789")) result = conn.scalar(select([t.c.unscale_value])) eq_(result, decimal.Decimal("45.768392065789"))
def test_precision_float_roundtrip(self): t = Table( 't', self.metadata, Column('scale_value', mysql.DOUBLE(precision=15, scale=12, asdecimal=True)), Column('unscale_value', mysql.DOUBLE(decimal_return_scale=12, asdecimal=True))) t.create(testing.db) testing.db.execute(t.insert(), scale_value=45.768392065789, unscale_value=45.768392065789) result = testing.db.scalar(select([t.c.scale_value])) eq_(result, decimal.Decimal("45.768392065789")) result = testing.db.scalar(select([t.c.unscale_value])) eq_(result, decimal.Decimal("45.768392065789"))
class ReflectionTest(fixtures.TestBase, AssertsCompiledSQL): __only_on__ = "mysql", "mariadb" __backend__ = True @testing.combinations( ( mysql.VARCHAR(10, collation="utf8_unicode_ci"), DefaultClause(""), "''", ), (String(10), DefaultClause("abc"), "'abc'"), (String(10), DefaultClause("0"), "'0'"), ( TIMESTAMP, DefaultClause("2009-04-05 12:00:00"), "'2009-04-05 12:00:00'", ), (TIMESTAMP, None, None), ( TIMESTAMP, DefaultClause( sql.text("CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP")), re.compile( r"CURRENT_TIMESTAMP(\(\))? ON UPDATE CURRENT_TIMESTAMP(\(\))?", re.I, ), ), (mysql.DOUBLE(), DefaultClause("0.0000"), "0"), (mysql.DOUBLE(22, 6), DefaultClause("0.0000"), "0.000000"), (Integer, DefaultClause("1"), "1"), (Integer, DefaultClause("-1"), "-1"), (mysql.DOUBLE, DefaultClause("-25.03"), "-25.03"), (mysql.DOUBLE, DefaultClause("-.001"), "-0.001"), argnames="datatype, default, expected", ) def test_default_reflection(self, datatype, default, expected, metadata, connection): t1 = Table("t1", metadata, Column("x", datatype, default)) t1.create(connection) insp = inspect(connection) datatype_inst = types.to_instance(datatype) col = insp.get_columns("t1")[0] if hasattr(expected, "match"): assert expected.match(col["default"]) elif isinstance(datatype_inst, (Integer, Numeric)): pattern = re.compile(r"\'?%s\'?" % expected) assert pattern.match(col["default"]) else: eq_(col["default"], expected) def test_reflection_with_table_options(self, metadata, connection): comment = r"""Comment types type speedily ' " \ '' Fun!""" if testing.against("mariadb"): kwargs = dict( mariadb_engine="MEMORY", mariadb_default_charset="utf8", mariadb_auto_increment="5", mariadb_avg_row_length="3", mariadb_password="******", mariadb_connection="fish", ) else: kwargs = dict( mysql_engine="MEMORY", mysql_default_charset="utf8", mysql_auto_increment="5", mysql_avg_row_length="3", mysql_password="******", mysql_connection="fish", ) def_table = Table("mysql_def", metadata, Column("c1", Integer()), comment=comment, **kwargs) conn = connection def_table.create(conn) reflected = Table("mysql_def", MetaData(), autoload_with=conn) if testing.against("mariadb"): assert def_table.kwargs["mariadb_engine"] == "MEMORY" assert def_table.comment == comment assert def_table.kwargs["mariadb_default_charset"] == "utf8" assert def_table.kwargs["mariadb_auto_increment"] == "5" assert def_table.kwargs["mariadb_avg_row_length"] == "3" assert def_table.kwargs["mariadb_password"] == "secret" assert def_table.kwargs["mariadb_connection"] == "fish" assert reflected.kwargs["mariadb_engine"] == "MEMORY" assert reflected.comment == comment assert reflected.kwargs["mariadb_comment"] == comment assert reflected.kwargs["mariadb_default charset"] == "utf8" assert reflected.kwargs["mariadb_avg_row_length"] == "3" assert reflected.kwargs["mariadb_connection"] == "fish" # This field doesn't seem to be returned by mariadb itself. # assert reflected.kwargs['mariadb_password'] == 'secret' # This is explicitly ignored when reflecting schema. # assert reflected.kwargs['mariadb_auto_increment'] == '5' else: assert def_table.kwargs["mysql_engine"] == "MEMORY" assert def_table.comment == comment assert def_table.kwargs["mysql_default_charset"] == "utf8" assert def_table.kwargs["mysql_auto_increment"] == "5" assert def_table.kwargs["mysql_avg_row_length"] == "3" assert def_table.kwargs["mysql_password"] == "secret" assert def_table.kwargs["mysql_connection"] == "fish" assert reflected.kwargs["mysql_engine"] == "MEMORY" assert reflected.comment == comment assert reflected.kwargs["mysql_comment"] == comment assert reflected.kwargs["mysql_default charset"] == "utf8" assert reflected.kwargs["mysql_avg_row_length"] == "3" assert reflected.kwargs["mysql_connection"] == "fish" # This field doesn't seem to be returned by mysql itself. # assert reflected.kwargs['mysql_password'] == 'secret' # This is explicitly ignored when reflecting schema. # assert reflected.kwargs['mysql_auto_increment'] == '5' def test_reflection_on_include_columns(self, metadata, connection): """Test reflection of include_columns to be sure they respect case.""" meta = metadata case_table = Table( "mysql_case", meta, Column("c1", String(10)), Column("C2", String(10)), Column("C3", String(10)), ) case_table.create(connection) reflected = Table( "mysql_case", MetaData(), autoload_with=connection, include_columns=["c1", "C2"], ) for t in case_table, reflected: assert "c1" in t.c.keys() assert "C2" in t.c.keys() reflected2 = Table( "mysql_case", MetaData(), autoload_with=connection, include_columns=["c1", "c2"], ) assert "c1" in reflected2.c.keys() for c in ["c2", "C2", "C3"]: assert c not in reflected2.c.keys() def test_autoincrement(self, metadata, connection): meta = metadata Table( "ai_1", meta, Column("int_y", Integer, primary_key=True, autoincrement=True), Column("int_n", Integer, DefaultClause("0"), primary_key=True), mysql_engine="MyISAM", ) Table( "ai_2", meta, Column("int_y", Integer, primary_key=True, autoincrement=True), Column("int_n", Integer, DefaultClause("0"), primary_key=True), mysql_engine="MyISAM", ) Table( "ai_3", meta, Column( "int_n", Integer, DefaultClause("0"), primary_key=True, autoincrement=False, ), Column("int_y", Integer, primary_key=True, autoincrement=True), mysql_engine="MyISAM", ) Table( "ai_4", meta, Column( "int_n", Integer, DefaultClause("0"), primary_key=True, autoincrement=False, ), Column( "int_n2", Integer, DefaultClause("0"), primary_key=True, autoincrement=False, ), mysql_engine="MyISAM", ) Table( "ai_5", meta, Column("int_y", Integer, primary_key=True, autoincrement=True), Column( "int_n", Integer, DefaultClause("0"), primary_key=True, autoincrement=False, ), mysql_engine="MyISAM", ) Table( "ai_6", meta, Column("o1", String(1), DefaultClause("x"), primary_key=True), Column("int_y", Integer, primary_key=True, autoincrement=True), mysql_engine="MyISAM", ) Table( "ai_7", meta, Column("o1", String(1), DefaultClause("x"), primary_key=True), Column("o2", String(1), DefaultClause("x"), primary_key=True), Column("int_y", Integer, primary_key=True, autoincrement=True), mysql_engine="MyISAM", ) Table( "ai_8", meta, Column("o1", String(1), DefaultClause("x"), primary_key=True), Column("o2", String(1), DefaultClause("x"), primary_key=True), mysql_engine="MyISAM", ) meta.create_all(connection) table_names = [ "ai_1", "ai_2", "ai_3", "ai_4", "ai_5", "ai_6", "ai_7", "ai_8", ] mr = MetaData() mr.reflect(connection, only=table_names) for tbl in [mr.tables[name] for name in table_names]: for c in tbl.c: if c.name.startswith("int_y"): assert c.autoincrement elif c.name.startswith("int_n"): assert not c.autoincrement connection.execute(tbl.insert()) if "int_y" in tbl.c: assert connection.scalar(select(tbl.c.int_y)) == 1 assert (list(connection.execute( tbl.select()).first()).count(1) == 1) else: assert 1 not in list(connection.execute(tbl.select()).first()) def test_view_reflection(self, metadata, connection): Table("x", metadata, Column("a", Integer), Column("b", String(50))) metadata.create_all(connection) conn = connection conn.exec_driver_sql("CREATE VIEW v1 AS SELECT * FROM x") conn.exec_driver_sql( "CREATE ALGORITHM=MERGE VIEW v2 AS SELECT * FROM x") conn.exec_driver_sql( "CREATE ALGORITHM=UNDEFINED VIEW v3 AS SELECT * FROM x") conn.exec_driver_sql( "CREATE DEFINER=CURRENT_USER VIEW v4 AS SELECT * FROM x") @event.listens_for(metadata, "before_drop") def cleanup(*arg, **kw): with testing.db.begin() as conn: for v in ["v1", "v2", "v3", "v4"]: conn.exec_driver_sql("DROP VIEW %s" % v) insp = inspect(connection) for v in ["v1", "v2", "v3", "v4"]: eq_( [(col["name"], col["type"].__class__) for col in insp.get_columns(v)], [("a", mysql.INTEGER), ("b", mysql.VARCHAR)], ) def test_skip_not_describable(self, metadata, connection): @event.listens_for(metadata, "before_drop") def cleanup(*arg, **kw): with testing.db.begin() as conn: conn.exec_driver_sql("DROP TABLE IF EXISTS test_t1") conn.exec_driver_sql("DROP TABLE IF EXISTS test_t2") conn.exec_driver_sql("DROP VIEW IF EXISTS test_v") conn = connection conn.exec_driver_sql("CREATE TABLE test_t1 (id INTEGER)") conn.exec_driver_sql("CREATE TABLE test_t2 (id INTEGER)") conn.exec_driver_sql("CREATE VIEW test_v AS SELECT id FROM test_t1") conn.exec_driver_sql("DROP TABLE test_t1") m = MetaData() with expect_warnings( "Skipping .* Table or view named .?test_v.? could not be " "reflected: .* references invalid table"): m.reflect(views=True, bind=conn) eq_(m.tables["test_t2"].name, "test_t2") assert_raises_message( exc.UnreflectableTableError, "references invalid table", Table, "test_v", MetaData(), autoload_with=conn, ) @testing.exclude("mysql", "<", (5, 0, 0), "no information_schema support") def test_system_views(self): dialect = testing.db.dialect connection = testing.db.connect() view_names = dialect.get_view_names(connection, "information_schema") self.assert_("TABLES" in view_names) def test_nullable_reflection(self, metadata, connection): """test reflection of NULL/NOT NULL, in particular with TIMESTAMP defaults where MySQL is inconsistent in how it reports CREATE TABLE. """ meta = metadata # this is ideally one table, but older MySQL versions choke # on the multiple TIMESTAMP columns row = connection.exec_driver_sql( "show variables like '%%explicit_defaults_for_timestamp%%'").first( ) explicit_defaults_for_timestamp = row[1].lower() in ("on", "1", "true") reflected = [] for idx, cols in enumerate([ [ "x INTEGER NULL", "y INTEGER NOT NULL", "z INTEGER", "q TIMESTAMP NULL", ], ["p TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP"], ["r TIMESTAMP NOT NULL"], ["s TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP"], ["t TIMESTAMP"], ["u TIMESTAMP DEFAULT CURRENT_TIMESTAMP"], ]): Table("nn_t%d" % idx, meta) # to allow DROP connection.exec_driver_sql(""" CREATE TABLE nn_t%d ( %s ) """ % (idx, ", \n".join(cols))) reflected.extend({ "name": d["name"], "nullable": d["nullable"], "default": d["default"], } for d in inspect(connection).get_columns("nn_t%d" % idx)) if connection.dialect._is_mariadb_102: current_timestamp = "current_timestamp()" else: current_timestamp = "CURRENT_TIMESTAMP" eq_( reflected, [ { "name": "x", "nullable": True, "default": None }, { "name": "y", "nullable": False, "default": None }, { "name": "z", "nullable": True, "default": None }, { "name": "q", "nullable": True, "default": None }, { "name": "p", "nullable": True, "default": current_timestamp }, { "name": "r", "nullable": False, "default": None if explicit_defaults_for_timestamp else ("%(current_timestamp)s " "ON UPDATE %(current_timestamp)s") % { "current_timestamp": current_timestamp }, }, { "name": "s", "nullable": False, "default": current_timestamp }, { "name": "t", "nullable": True if explicit_defaults_for_timestamp else False, "default": None if explicit_defaults_for_timestamp else ("%(current_timestamp)s " "ON UPDATE %(current_timestamp)s") % { "current_timestamp": current_timestamp }, }, { "name": "u", "nullable": True if explicit_defaults_for_timestamp else False, "default": current_timestamp, }, ], ) def test_reflection_with_unique_constraint(self, metadata, connection): insp = inspect(connection) meta = metadata uc_table = Table( "mysql_uc", meta, Column("a", String(10)), UniqueConstraint("a", name="uc_a"), ) uc_table.create(connection) # MySQL converts unique constraints into unique indexes. # separately we get both indexes = dict((i["name"], i) for i in insp.get_indexes("mysql_uc")) constraints = set(i["name"] for i in insp.get_unique_constraints("mysql_uc")) self.assert_("uc_a" in indexes) self.assert_(indexes["uc_a"]["unique"]) self.assert_("uc_a" in constraints) # reflection here favors the unique index, as that's the # more "official" MySQL construct reflected = Table("mysql_uc", MetaData(), autoload_with=testing.db) indexes = dict((i.name, i) for i in reflected.indexes) constraints = set(uc.name for uc in reflected.constraints) self.assert_("uc_a" in indexes) self.assert_(indexes["uc_a"].unique) self.assert_("uc_a" not in constraints) def test_reflect_fulltext(self, metadata, connection): mt = Table( "mytable", metadata, Column("id", Integer, primary_key=True), Column("textdata", String(50)), mariadb_engine="InnoDB", mysql_engine="InnoDB", ) Index( "textdata_ix", mt.c.textdata, mysql_prefix="FULLTEXT", mariadb_prefix="FULLTEXT", ) metadata.create_all(connection) mt = Table("mytable", MetaData(), autoload_with=testing.db) idx = list(mt.indexes)[0] eq_(idx.name, "textdata_ix") eq_(idx.dialect_options[testing.db.name]["prefix"], "FULLTEXT") self.assert_compile( CreateIndex(idx), "CREATE FULLTEXT INDEX textdata_ix ON mytable (textdata)", ) @testing.requires.mysql_ngram_fulltext def test_reflect_fulltext_comment( self, metadata, connection, ): mt = Table( "mytable", metadata, Column("id", Integer, primary_key=True), Column("textdata", String(50)), mysql_engine="InnoDB", ) Index( "textdata_ix", mt.c.textdata, mysql_prefix="FULLTEXT", mysql_with_parser="ngram", ) metadata.create_all(connection) mt = Table("mytable", MetaData(), autoload_with=connection) idx = list(mt.indexes)[0] eq_(idx.name, "textdata_ix") eq_(idx.dialect_options["mysql"]["prefix"], "FULLTEXT") eq_(idx.dialect_options["mysql"]["with_parser"], "ngram") self.assert_compile( CreateIndex(idx), "CREATE FULLTEXT INDEX textdata_ix ON mytable " "(textdata) WITH PARSER ngram", ) def test_non_column_index(self, metadata, connection): m1 = metadata t1 = Table("add_ix", m1, Column("x", String(50)), mysql_engine="InnoDB") Index("foo_idx", t1.c.x.desc()) m1.create_all(connection) insp = inspect(connection) eq_( insp.get_indexes("add_ix"), [{ "name": "foo_idx", "column_names": ["x"], "unique": False }], ) def _bug_88718_96365_casing_0(self): fkeys_casing_0 = [ { "name": "FK_PlaylistTTrackId", "constrained_columns": ["TTrackID"], "referred_schema": "Test_Schema", "referred_table": "Track", "referred_columns": ["trackid"], "options": {}, }, { "name": "FK_PlaylistTrackId", "constrained_columns": ["TrackID"], "referred_schema": None, "referred_table": "Track", "referred_columns": ["trackid"], "options": {}, }, ] ischema_casing_0 = [ ("Test", "Track", "TrackID"), ("Test_Schema", "Track", "TrackID"), ] return fkeys_casing_0, ischema_casing_0 def _bug_88718_96365_casing_1(self): fkeys_casing_1 = [ { "name": "FK_PlaylistTTrackId", "constrained_columns": ["TTrackID"], "referred_schema": "Test_Schema", "referred_table": "Track", "referred_columns": ["trackid"], "options": {}, }, { "name": "FK_PlaylistTrackId", "constrained_columns": ["TrackID"], "referred_schema": None, "referred_table": "Track", "referred_columns": ["trackid"], "options": {}, }, ] ischema_casing_1 = [ (util.u("Test"), util.u("Track"), "TrackID"), (util.u("Test_Schema"), util.u("Track"), "TrackID"), ] return fkeys_casing_1, ischema_casing_1 def _bug_88718_96365_casing_2(self): fkeys_casing_2 = [ { "name": "FK_PlaylistTTrackId", "constrained_columns": ["TTrackID"], # I haven't tested schema name but since col/table both # do it, assume schema name also comes back wrong "referred_schema": "test_schema", "referred_table": "track", "referred_columns": ["trackid"], "options": {}, }, { "name": "FK_PlaylistTrackId", "constrained_columns": ["TrackID"], "referred_schema": None, # table name also comes back wrong (probably schema also) # with casing=2, see https://bugs.mysql.com/bug.php?id=96365 "referred_table": "track", "referred_columns": ["trackid"], "options": {}, }, ] ischema_casing_2 = [ ("Test", "Track", "TrackID"), ("Test_Schema", "Track", "TrackID"), ] return fkeys_casing_2, ischema_casing_2 def test_correct_for_mysql_bugs_88718_96365(self): dialect = mysql.dialect() for casing, (fkeys, ischema) in [ (0, self._bug_88718_96365_casing_0()), (1, self._bug_88718_96365_casing_1()), (2, self._bug_88718_96365_casing_2()), ]: dialect._casing = casing dialect.default_schema_name = "Test" connection = mock.Mock(dialect=dialect, execute=lambda stmt, params: ischema) dialect._correct_for_mysql_bugs_88718_96365(fkeys, connection) eq_( fkeys, [ { "name": "FK_PlaylistTTrackId", "constrained_columns": ["TTrackID"], "referred_schema": "Test_Schema", "referred_table": "Track", "referred_columns": ["TrackID"], "options": {}, }, { "name": "FK_PlaylistTrackId", "constrained_columns": ["TrackID"], "referred_schema": None, "referred_table": "Track", "referred_columns": ["TrackID"], "options": {}, }, ], ) def test_case_sensitive_column_constraint_reflection( self, metadata, connection): # test for issue #4344 which works around # MySQL 8.0 bug https://bugs.mysql.com/bug.php?id=88718 m1 = metadata Table( "Track", m1, Column("TrackID", Integer, primary_key=True), mysql_engine="InnoDB", ) Table( "Track", m1, Column("TrackID", Integer, primary_key=True), schema=testing.config.test_schema, mysql_engine="InnoDB", ) Table( "PlaylistTrack", m1, Column("id", Integer, primary_key=True), Column( "TrackID", ForeignKey("Track.TrackID", name="FK_PlaylistTrackId"), ), Column( "TTrackID", ForeignKey( "%s.Track.TrackID" % (testing.config.test_schema, ), name="FK_PlaylistTTrackId", ), ), mysql_engine="InnoDB", ) m1.create_all(connection) if connection.dialect._casing in (1, 2): # the original test for the 88718 fix here in [ticket:4344] # actually set referred_table='track', with the wrong casing! # this test was never run. with [ticket:4751], I've gone through # the trouble to create docker containers with true # lower_case_table_names=2 and per # https://bugs.mysql.com/bug.php?id=96365 the table name being # lower case is also an 8.0 regression. eq_( inspect(connection).get_foreign_keys("PlaylistTrack"), [ { "name": "FK_PlaylistTTrackId", "constrained_columns": ["TTrackID"], "referred_schema": testing.config.test_schema, "referred_table": "Track", "referred_columns": ["TrackID"], "options": {}, }, { "name": "FK_PlaylistTrackId", "constrained_columns": ["TrackID"], "referred_schema": None, "referred_table": "Track", "referred_columns": ["TrackID"], "options": {}, }, ], ) else: eq_( sorted( inspect(connection).get_foreign_keys("PlaylistTrack"), key=lambda elem: elem["name"], ), [ { "name": "FK_PlaylistTTrackId", "constrained_columns": ["TTrackID"], "referred_schema": testing.config.test_schema, "referred_table": "Track", "referred_columns": ["TrackID"], "options": {}, }, { "name": "FK_PlaylistTrackId", "constrained_columns": ["TrackID"], "referred_schema": None, "referred_table": "Track", "referred_columns": ["TrackID"], "options": {}, }, ], ) @testing.requires.mysql_fully_case_sensitive def test_case_sensitive_reflection_dual_case_references( self, metadata, connection): # this tests that within the fix we do for MySQL bug # 88718, we don't do case-insensitive logic if the backend # is case sensitive m = metadata Table( "t1", m, Column("some_id", Integer, primary_key=True), mysql_engine="InnoDB", ) Table( "T1", m, Column("Some_Id", Integer, primary_key=True), mysql_engine="InnoDB", ) Table( "t2", m, Column("id", Integer, primary_key=True), Column("t1id", ForeignKey("t1.some_id", name="t1id_fk")), Column("cap_t1id", ForeignKey("T1.Some_Id", name="cap_t1id_fk")), mysql_engine="InnoDB", ) m.create_all(connection) eq_( dict((rec["name"], rec) for rec in inspect(connection).get_foreign_keys("t2")), { "cap_t1id_fk": { "name": "cap_t1id_fk", "constrained_columns": ["cap_t1id"], "referred_schema": None, "referred_table": "T1", "referred_columns": ["Some_Id"], "options": {}, }, "t1id_fk": { "name": "t1id_fk", "constrained_columns": ["t1id"], "referred_schema": None, "referred_table": "t1", "referred_columns": ["some_id"], "options": {}, }, }, )
def test_default_reflection(self): """Test reflection of column defaults.""" # TODO: this test is a mess. should be broken into individual # combinations from sqlalchemy.dialects.mysql import VARCHAR def_table = Table( "mysql_def", MetaData(), Column( "c1", VARCHAR(10, collation="utf8_unicode_ci"), DefaultClause(""), nullable=False, ), Column("c2", String(10), DefaultClause("0")), Column("c3", String(10), DefaultClause("abc")), Column("c4", TIMESTAMP, DefaultClause("2009-04-05 12:00:00")), Column("c5", TIMESTAMP), Column( "c6", TIMESTAMP, DefaultClause( sql.text( "CURRENT_TIMESTAMP " "ON UPDATE CURRENT_TIMESTAMP" ) ), ), Column("c7", mysql.DOUBLE(), DefaultClause("0.0000")), Column("c8", mysql.DOUBLE(22, 6), DefaultClause("0.0000")), ) def_table.create(testing.db) try: reflected = Table( "mysql_def", MetaData(), autoload_with=testing.db ) finally: def_table.drop(testing.db) assert def_table.c.c1.server_default.arg == "" assert def_table.c.c2.server_default.arg == "0" assert def_table.c.c3.server_default.arg == "abc" assert def_table.c.c4.server_default.arg == "2009-04-05 12:00:00" assert str(reflected.c.c1.server_default.arg) == "''" assert str(reflected.c.c2.server_default.arg) == "'0'" assert str(reflected.c.c3.server_default.arg) == "'abc'" assert ( str(reflected.c.c4.server_default.arg) == "'2009-04-05 12:00:00'" ) assert reflected.c.c5.default is None assert reflected.c.c5.server_default is None assert reflected.c.c6.default is None assert str(reflected.c.c7.server_default.arg) in ("0", "'0'") # this is because the numeric is 6 decimal places, MySQL # formats it to that many places. assert str(reflected.c.c8.server_default.arg) in ( "0.000000", "'0.000000'", ) assert re.match( r"CURRENT_TIMESTAMP(\(\))? ON UPDATE CURRENT_TIMESTAMP(\(\))?", str(reflected.c.c6.server_default.arg).upper(), ) reflected.create(testing.db) try: reflected2 = Table( "mysql_def", MetaData(), autoload_with=testing.db ) finally: reflected.drop(testing.db) assert str(reflected2.c.c1.server_default.arg) == "''" assert str(reflected2.c.c2.server_default.arg) == "'0'" assert str(reflected2.c.c3.server_default.arg) == "'abc'" assert ( str(reflected2.c.c4.server_default.arg) == "'2009-04-05 12:00:00'" ) assert reflected.c.c5.default is None assert reflected.c.c5.server_default is None assert reflected.c.c6.default is None assert str(reflected.c.c7.server_default.arg) in ("0", "'0'") assert str(reflected.c.c8.server_default.arg) in ( "0.000000", "'0.000000'", ) assert re.match( r"CURRENT_TIMESTAMP(\(\))? ON UPDATE CURRENT_TIMESTAMP(\(\))?", str(reflected.c.c6.server_default.arg).upper(), )