def test_year(self):
        """Exercise YEAR."""

        year_table = Table(
            "mysql_year",
            self.metadata,
            Column("y1", mysql.MSYear),
            Column("y2", mysql.MSYear),
            Column("y3", mysql.MSYear),
            Column("y5", mysql.MSYear(4)),
        )

        for col in year_table.c:
            self.assert_(repr(col))
        year_table.create()
        reflected = Table("mysql_year", MetaData(testing.db), autoload=True)

        for table in year_table, reflected:
            with testing.db.connect() as conn:
                conn.execute(table.insert(["1950", "50", None, 1950]))
                row = conn.execute(table.select()).first()
                eq_(list(row), [1950, 2050, None, 1950])
                conn.execute(table.delete())
                self.assert_(colspec(table.c.y1).startswith("y1 YEAR"))
                eq_regex(colspec(table.c.y5), r"y5 YEAR(?:\(4\))?")
    def test_precision_float_roundtrip(self):
        t = Table(
            "t",
            self.metadata,
            Column(
                "scale_value",
                mysql.DOUBLE(precision=15, scale=12, asdecimal=True),
            ),
            Column(
                "unscale_value",
                mysql.DOUBLE(decimal_return_scale=12, asdecimal=True),
            ),
        )
        with testing.db.connect() as conn:
            t.create(conn)
            conn.execute(
                t.insert(),
                scale_value=45.768392065789,
                unscale_value=45.768392065789,
            )
            result = conn.scalar(select([t.c.scale_value]))
            eq_(result, decimal.Decimal("45.768392065789"))

            result = conn.scalar(select([t.c.unscale_value]))
            eq_(result, decimal.Decimal("45.768392065789"))
 def test_index_against_text_separate(self):
     metadata = MetaData()
     idx = Index("y", text("some_function(q)"))
     t = Table("x", metadata, Column("q", String(50)))
     t.append_constraint(idx)
     self.assert_compile(schema.CreateIndex(idx),
                         "CREATE INDEX y ON x (some_function(q))")
    def test_label_and_alias(self):
        # Lower case names, should not quote
        metadata = MetaData()
        table = Table("t1", metadata, Column("col1", Integer))
        x = select([table.c.col1.label("label1")]).alias("alias1")
        self.assert_compile(
            select([x.c.label1]),
            "SELECT "
            "alias1.label1 "
            "FROM ("
            "SELECT "
            "t1.col1 AS label1 "
            "FROM t1"
            ") AS alias1",
        )

        # Not lower case names, should quote
        metadata = MetaData()
        table = Table("T1", metadata, Column("Col1", Integer))
        x = select([table.c.Col1.label("Label1")]).alias("Alias1")
        self.assert_compile(
            select([x.c.Label1]),
            "SELECT "
            '"Alias1"."Label1" '
            "FROM ("
            "SELECT "
            '"T1"."Col1" AS "Label1" '
            'FROM "T1"'
            ') AS "Alias1"',
        )
Beispiel #5
0
    def test_reflection_with_unique_constraint(self):
        insp = inspect(testing.db)

        meta = self.metadata
        uc_table = Table(
            "mysql_uc",
            meta,
            Column("a", String(10)),
            UniqueConstraint("a", name="uc_a"),
        )

        uc_table.create()

        # MySQL converts unique constraints into unique indexes.
        # separately we get both
        indexes = dict((i["name"], i) for i in insp.get_indexes("mysql_uc"))
        constraints = set(i["name"]
                          for i in insp.get_unique_constraints("mysql_uc"))

        self.assert_("uc_a" in indexes)
        self.assert_(indexes["uc_a"]["unique"])
        self.assert_("uc_a" in constraints)

        # reflection here favors the unique index, as that's the
        # more "official" MySQL construct
        reflected = Table("mysql_uc", MetaData(testing.db), autoload=True)

        indexes = dict((i.name, i) for i in reflected.indexes)
        constraints = set(uc.name for uc in reflected.constraints)

        self.assert_("uc_a" in indexes)
        self.assert_(indexes["uc_a"].unique)
        self.assert_("uc_a" not in constraints)
 def test_rowcount_flag(self):
     metadata = self.metadata
     engine = engines.testing_engine(options={"enable_rowcount": True})
     assert engine.dialect.supports_sane_rowcount
     metadata.bind = engine
     t = Table("t1", metadata, Column("data", String(10)))
     metadata.create_all()
     r = t.insert().execute({"data": "d1"}, {"data": "d2"}, {"data": "d3"})
     r = t.update().where(t.c.data == "d2").values(data="d3").execute()
     eq_(r.rowcount, 1)
     r = t.delete().where(t.c.data == "d3").execute()
     eq_(r.rowcount, 2)
     r = t.delete().execution_options(enable_rowcount=False).execute()
     eq_(r.rowcount, -1)
     engine.dispose()
     engine = engines.testing_engine(options={"enable_rowcount": False})
     assert not engine.dialect.supports_sane_rowcount
     metadata.bind = engine
     r = t.insert().execute({"data": "d1"}, {"data": "d2"}, {"data": "d3"})
     r = t.update().where(t.c.data == "d2").values(data="d3").execute()
     eq_(r.rowcount, -1)
     r = t.delete().where(t.c.data == "d3").execute()
     eq_(r.rowcount, -1)
     r = t.delete().execution_options(enable_rowcount=True).execute()
     eq_(r.rowcount, 1)
     r.close()
     engine.dispose()
Beispiel #7
0
    def test_reflect_fulltext_comment(self):
        mt = Table(
            "mytable",
            self.metadata,
            Column("id", Integer, primary_key=True),
            Column("textdata", String(50)),
            mysql_engine="InnoDB",
        )
        Index(
            "textdata_ix",
            mt.c.textdata,
            mysql_prefix="FULLTEXT",
            mysql_with_parser="ngram",
        )

        self.metadata.create_all(testing.db)

        mt = Table("mytable", MetaData(), autoload_with=testing.db)
        idx = list(mt.indexes)[0]
        eq_(idx.name, "textdata_ix")
        eq_(idx.dialect_options["mysql"]["prefix"], "FULLTEXT")
        eq_(idx.dialect_options["mysql"]["with_parser"], "ngram")
        self.assert_compile(
            CreateIndex(idx),
            "CREATE FULLTEXT INDEX textdata_ix ON mytable "
            "(textdata) WITH PARSER ngram",
        )
    def test_update_to_select_schema(self):
        meta = MetaData()
        table = Table(
            "sometable",
            meta,
            Column("sym", String),
            Column("val", Integer),
            schema="schema",
        )
        other = Table("#other", meta, Column("sym", String),
                      Column("newval", Integer))
        stmt = table.update().values(val=select([other.c.newval]).where(
            table.c.sym == other.c.sym).as_scalar())

        self.assert_compile(
            stmt,
            "UPDATE [schema].sometable SET val="
            "(SELECT [#other].newval FROM [#other] "
            "WHERE [schema].sometable.sym = [#other].sym)",
        )

        stmt = (table.update().values(val=other.c.newval).where(
            table.c.sym == other.c.sym))
        self.assert_compile(
            stmt,
            "UPDATE [schema].sometable SET val="
            "[#other].newval FROM [schema].sometable, "
            "[#other] WHERE [schema].sometable.sym = [#other].sym",
        )
    def test_foreignkey_missing_insert(self):
        Table("t1", self.metadata, Column("id", Integer, primary_key=True))
        t2 = Table(
            "t2",
            self.metadata,
            Column("id", Integer, ForeignKey("t1.id"), primary_key=True),
        )
        self.metadata.create_all()

        # want to ensure that "null value in column "id" violates not-
        # null constraint" is raised (IntegrityError on psycoopg2, but
        # ProgrammingError on pg8000), and not "ProgrammingError:
        # (ProgrammingError) relationship "t2_id_seq" does not exist".
        # the latter corresponds to autoincrement behavior, which is not
        # the case here due to the foreign key.

        for eng in [
                engines.testing_engine(options={"implicit_returning": False}),
                engines.testing_engine(options={"implicit_returning": True}),
        ]:
            with expect_warnings(
                    ".*has no Python-side or server-side default.*"):
                assert_raises(
                    (exc.IntegrityError, exc.ProgrammingError),
                    eng.execute,
                    t2.insert(),
                )
    def test_unicode_enum(self):
        metadata = self.metadata
        t1 = Table(
            "table",
            metadata,
            Column("id", Integer, primary_key=True),
            Column("value", Enum(u("réveillé"), u("drôle"), u("S’il"))),
            Column("value2", mysql.ENUM(u("réveillé"), u("drôle"), u("S’il"))),
        )
        metadata.create_all()
        t1.insert().execute(value=u("drôle"), value2=u("drôle"))
        t1.insert().execute(value=u("réveillé"), value2=u("réveillé"))
        t1.insert().execute(value=u("S’il"), value2=u("S’il"))
        eq_(
            t1.select().order_by(t1.c.id).execute().fetchall(),
            [
                (1, u("drôle"), u("drôle")),
                (2, u("réveillé"), u("réveillé")),
                (3, u("S’il"), u("S’il")),
            ],
        )

        # test reflection of the enum labels

        m2 = MetaData(testing.db)
        t2 = Table("table", m2, autoload=True)

        # TODO: what's wrong with the last element ?  is there
        # latin-1 stuff forcing its way in ?

        eq_(t2.c.value.type.enums[0:2],
            [u("réveillé"), u("drôle")])  # u'S’il') # eh ?

        eq_(t2.c.value2.type.enums[0:2],
            [u("réveillé"), u("drôle")])  # u'S’il') # eh ?
Beispiel #11
0
    def test_conflicting_backref_subclass(self):
        meta = MetaData()

        a = Table("a", meta, Column("id", Integer, primary_key=True))
        b = Table(
            "b",
            meta,
            Column("id", Integer, primary_key=True),
            Column("a_id", Integer, ForeignKey("a.id")),
        )

        class A(object):
            pass

        class B(object):
            pass

        class C(B):
            pass

        mapper(
            A,
            a,
            properties={
                "b": relationship(B, backref="a"),
                "c": relationship(C, backref="a"),
            },
        )
        mapper(B, b)
        mapper(C, None, inherits=B)

        assert_raises_message(sa_exc.ArgumentError, "Error creating backref",
                              configure_mappers)
Beispiel #12
0
 def _baseline_1_create_tables(self):
     Table(
         "Zoo",
         self.metadata,
         Column(
             "ID",
             Integer,
             Sequence("zoo_id_seq"),
             primary_key=True,
             index=True,
         ),
         Column("Name", Unicode(255)),
         Column("Founded", Date),
         Column("Opens", Time),
         Column("LastEscape", DateTime),
         Column("Admission", Float),
     )
     Table(
         "Animal",
         self.metadata,
         Column("ID", Integer, Sequence("animal_id_seq"), primary_key=True),
         Column("ZooID", Integer, ForeignKey("Zoo.ID"), index=True),
         Column("Name", Unicode(100)),
         Column("Species", Unicode(100)),
         Column("Legs", Integer, default=4),
         Column("LastEscape", DateTime),
         Column("Lifespan", Float(4)),
         Column("MotherID", Integer, ForeignKey("Animal.ID")),
         Column("PreferredFoodID", Integer),
         Column("AlternateFoodID", Integer),
     )
     self.metadata.create_all()
Beispiel #13
0
    def setup_class(cls):

        global t1, t2, metadata
        metadata = MetaData()
        t1 = Table(
            "t1",
            metadata,
            Column("c1", Integer, primary_key=True),
            Column("c2", String(30)),
        )

        t2 = Table(
            "t2",
            metadata,
            Column("c1", Integer, primary_key=True),
            Column("c2", String(30)),
        )

        cls.dialect = default.DefaultDialect()

        # do a "compile" ahead of time to load
        # deferred imports, use the dialect to pre-load
        # dialect-level types
        t1.insert().compile(dialect=cls.dialect)

        # go through all the TypeEngine
        # objects in use and pre-load their _type_affinity
        # entries.
        for t in (t1, t2):
            for c in t.c:
                c.type._type_affinity
        from sqlalchemy_1_3.sql import sqltypes

        for t in list(sqltypes._type_map.values()):
            t._type_affinity
Beispiel #14
0
    def test_preexecute_passivedefault(self):
        """test that when we get a primary key column back from
        reflecting a table which has a default value on it, we pre-
        execute that DefaultClause upon insert."""

        try:
            meta = MetaData(testing.db)
            testing.db.execute(
                """
             CREATE TABLE speedy_users
             (
                 speedy_user_id   SERIAL     PRIMARY KEY,

                 user_name        VARCHAR    NOT NULL,
                 user_password    VARCHAR    NOT NULL
             );
            """
            )
            t = Table("speedy_users", meta, autoload=True)
            r = t.insert().execute(user_name="user", user_password="******")
            assert r.inserted_primary_key == [1]
            result = t.select().execute().fetchall()
            assert result == [(1, "user", "lala")]
        finally:
            testing.db.execute("drop table speedy_users")
    def test_subquery_four(self):

        # Not lower case names, quotes off, should not quote
        metadata = MetaData()
        t1 = Table(
            "T1",
            metadata,
            Column("Col1", Integer, quote=False),
            schema="Foo",
            quote=False,
            quote_schema=False,
        )
        a = t1.select().alias("Anon")
        b = select([1], a.c.Col1 == 2, from_obj=a)
        self.assert_compile(
            b,
            "SELECT 1 "
            "FROM ("
            "SELECT "
            "Foo.T1.Col1 AS Col1 "
            "FROM "
            "Foo.T1"
            ') AS "Anon" '
            "WHERE "
            '"Anon".Col1 = :Col1_1',
        )
Beispiel #16
0
    def test_select(self):
        t = Table("t", MetaData(), Column("x", Integer))
        s = t.select()

        is_(inspect(s), s)
        assert s.is_selectable
        is_(s.selectable, s)
Beispiel #17
0
 def test_checksfor_sequence(self):
     meta1 = self.metadata
     seq = Sequence("fooseq")
     t = Table("mytable", meta1, Column("col1", Integer, seq))
     seq.drop()
     testing.db.execute("CREATE SEQUENCE fooseq")
     t.create(checkfirst=True)
Beispiel #18
0
    def define_tables(cls, metadata):
        global foo, bar, foo_bar
        foo = Table(
            "foo",
            metadata,
            Column(
                "id",
                Integer,
                Sequence("foo_id_seq", optional=True),
                primary_key=True,
            ),
            Column("data", String(20)),
        )

        bar = Table(
            "bar",
            metadata,
            Column("bid", Integer, ForeignKey("foo.id"), primary_key=True),
        )

        foo_bar = Table(
            "foo_bar",
            metadata,
            Column("foo_id", Integer, ForeignKey("foo.id")),
            Column("bar_id", Integer, ForeignKey("bar.bid")),
        )
    def test_use_alter(self):
        m = MetaData()
        Table("t", m, Column("a", Integer))

        Table(
            "t2",
            m,
            Column("a", Integer, ForeignKey("t.a",
                                            use_alter=True,
                                            name="fk_ta")),
            Column("b", Integer, ForeignKey("t.a", name="fk_tb")),
        )

        e = engines.mock_engine(dialect_name="postgresql")
        m.create_all(e)
        m.drop_all(e)

        e.assert_sql([
            "CREATE TABLE t (a INTEGER)",
            "CREATE TABLE t2 (a INTEGER, b INTEGER, CONSTRAINT fk_tb "
            "FOREIGN KEY(b) REFERENCES t (a))",
            "ALTER TABLE t2 "
            "ADD CONSTRAINT fk_ta FOREIGN KEY(a) REFERENCES t (a)",
            "ALTER TABLE t2 DROP CONSTRAINT fk_ta",
            "DROP TABLE t2",
            "DROP TABLE t",
        ])
Beispiel #20
0
 def define_tables(cls, metadata):
     Table(
         "people",
         metadata,
         Column("people_id", Integer, primary_key=True),
         Column("age", Integer),
         Column("name", String(30)),
     )
     Table(
         "bookcases",
         metadata,
         Column("bookcase_id", Integer, primary_key=True),
         Column(
             "bookcase_owner_id", Integer, ForeignKey("people.people_id")
         ),
         Column("bookcase_shelves", Integer),
         Column("bookcase_width", Integer),
     )
     Table(
         "books",
         metadata,
         Column("book_id", Integer, primary_key=True),
         Column(
             "bookcase_id", Integer, ForeignKey("bookcases.bookcase_id")
         ),
         Column("book_owner_id", Integer, ForeignKey("people.people_id")),
         Column("book_weight", Integer),
     )
    def test_illegal_initial_char(self):
        # Create table with quote defaults
        metadata = MetaData()
        t1 = Table(
            "$table", metadata, Column("$column", Integer), schema="$schema"
        )

        # Note that the names are quoted b/c the initial
        # character is in ['$','0', '1' ... '9']
        result = 'CREATE TABLE "$schema"."$table" ("$column" INTEGER)'
        self.assert_compile(schema.CreateTable(t1), result)

        # Create the same table with quotes set to False now
        metadata = MetaData()
        t1 = Table(
            "$table",
            metadata,
            Column("$column", Integer, quote=False),
            schema="$schema",
            quote=False,
            quote_schema=False,
        )

        # Note that the names are now unquoted
        result = "CREATE TABLE $schema.$table ($column INTEGER)"
        self.assert_compile(schema.CreateTable(t1), result)
 def test_multiple(self):
     m = MetaData()
     Table(
         "foo",
         m,
         Column("id", Integer, primary_key=True),
         Column("bar", Integer, primary_key=True),
     )
     tb = Table(
         "some_table",
         m,
         Column("id", Integer, primary_key=True),
         Column("foo_id", Integer, ForeignKey("foo.id")),
         Column("foo_bar", Integer, ForeignKey("foo.bar")),
     )
     self.assert_compile(
         schema.CreateTable(tb),
         "CREATE TABLE some_table ("
         "id INTEGER NOT NULL, "
         "foo_id INTEGER, "
         "foo_bar INTEGER, "
         "PRIMARY KEY (id), "
         "FOREIGN KEY(foo_id) REFERENCES foo (id), "
         "FOREIGN KEY(foo_bar) REFERENCES foo (bar))",
     )
    def test_enum_parse(self):

        with testing.expect_deprecated("Manually quoting ENUM value literals"):
            enum_table = Table(
                "mysql_enum",
                self.metadata,
                Column("e1", mysql.ENUM("'a'")),
                Column("e2", mysql.ENUM("''")),
                Column("e3", mysql.ENUM("a")),
                Column("e4", mysql.ENUM("")),
                Column("e5", mysql.ENUM("'a'", "''")),
                Column("e6", mysql.ENUM("''", "'a'")),
                Column("e7", mysql.ENUM("''", "'''a'''", "'b''b'", "''''")),
            )

        for col in enum_table.c:
            self.assert_(repr(col))

        enum_table.create()
        reflected = Table("mysql_enum", MetaData(testing.db), autoload=True)
        for t in enum_table, reflected:
            eq_(t.c.e1.type.enums, ["a"])
            eq_(t.c.e2.type.enums, [""])
            eq_(t.c.e3.type.enums, ["a"])
            eq_(t.c.e4.type.enums, [""])
            eq_(t.c.e5.type.enums, ["a", ""])
            eq_(t.c.e6.type.enums, ["", "a"])
            eq_(t.c.e7.type.enums, ["", "'a'", "b'b", "'"])
    def test_too_long_index_name(self):
        dialect = testing.db.dialect.__class__()

        for max_ident, max_index in [(22, None), (256, 22)]:
            dialect.max_identifier_length = max_ident
            dialect.max_index_name_length = max_index

            for tname, cname, exp in [
                ("sometable", "this_name_is_too_long", "ix_sometable_t_09aa"),
                ("sometable", "this_name_alsois_long", "ix_sometable_t_3cf1"),
            ]:

                t1 = Table(tname, MetaData(), Column(cname,
                                                     Integer,
                                                     index=True))
                ix1 = list(t1.indexes)[0]

                self.assert_compile(
                    schema.CreateIndex(ix1),
                    "CREATE INDEX %s "
                    "ON %s (%s)" % (exp, tname, cname),
                    dialect=dialect,
                )

        dialect.max_identifier_length = 22
        dialect.max_index_name_length = None

        t1 = Table("t", MetaData(), Column("c", Integer))
        assert_raises(
            exc.IdentifierError,
            schema.CreateIndex(
                Index("this_other_name_is_too_long_for_what_were_doing",
                      t1.c.c)).compile,
            dialect=dialect,
        )
Beispiel #25
0
    def define_tables(cls, metadata):
        global principals
        global users
        global groups
        global user_group_map

        principals = Table(
            "principals",
            metadata,
            Column(
                "principal_id",
                Integer,
                Sequence("principal_id_seq", optional=False),
                primary_key=True,
            ),
            Column("name", String(50), nullable=False),
        )

        users = Table(
            "prin_users",
            metadata,
            Column(
                "principal_id",
                Integer,
                ForeignKey("principals.principal_id"),
                primary_key=True,
            ),
            Column("password", String(50), nullable=False),
            Column("email", String(50), nullable=False),
            Column("login_id", String(50), nullable=False),
        )

        groups = Table(
            "prin_groups",
            metadata,
            Column(
                "principal_id",
                Integer,
                ForeignKey("principals.principal_id"),
                primary_key=True,
            ),
        )

        user_group_map = Table(
            "prin_user_group_map",
            metadata,
            Column(
                "user_id",
                Integer,
                ForeignKey("prin_users.principal_id"),
                primary_key=True,
            ),
            Column(
                "group_id",
                Integer,
                ForeignKey("prin_groups.principal_id"),
                primary_key=True,
            ),
        )
    def test_cycle_named_fks(self):
        metadata = MetaData(testing.db)

        Table(
            "a",
            metadata,
            Column("id", Integer, primary_key=True),
            Column("bid", Integer, ForeignKey("b.id")),
        )

        Table(
            "b",
            metadata,
            Column("id", Integer, primary_key=True),
            Column(
                "aid",
                Integer,
                ForeignKey("a.id", use_alter=True, name="aidfk"),
            ),
        )

        assertions = [
            AllOf(
                CompiledSQL("CREATE TABLE b ("
                            "id INTEGER NOT NULL, "
                            "aid INTEGER, "
                            "PRIMARY KEY (id)"
                            ")"),
                CompiledSQL("CREATE TABLE a ("
                            "id INTEGER NOT NULL, "
                            "bid INTEGER, "
                            "PRIMARY KEY (id), "
                            "FOREIGN KEY(bid) REFERENCES b (id)"
                            ")"),
            ),
            CompiledSQL("ALTER TABLE b ADD CONSTRAINT aidfk "
                        "FOREIGN KEY(aid) REFERENCES a (id)"),
        ]
        with self.sql_execution_asserter() as asserter:
            metadata.create_all(checkfirst=False)

        if testing.db.dialect.supports_alter:
            asserter.assert_(*assertions)

            with self.sql_execution_asserter() as asserter:
                metadata.drop_all(checkfirst=False)

            asserter.assert_(
                CompiledSQL("ALTER TABLE b DROP CONSTRAINT aidfk"),
                AllOf(CompiledSQL("DROP TABLE b"),
                      CompiledSQL("DROP TABLE a")),
            )
        else:
            with self.sql_execution_asserter() as asserter:
                metadata.drop_all(checkfirst=False)

            asserter.assert_(
                AllOf(CompiledSQL("DROP TABLE b"),
                      CompiledSQL("DROP TABLE a")))
    def _constraint_create_fixture(self):
        m = MetaData()

        t = Table("tbl", m, Column("a", Integer), Column("b", Integer))

        t2 = Table("t2", m, Column("a", Integer), Column("b", Integer))

        return t, t2
    def test_reflection(self):

        Table("mysql_json", self.metadata, Column("foo", mysql.JSON))
        self.metadata.create_all()

        reflected = Table("mysql_json", MetaData(), autoload_with=testing.db)
        is_(reflected.c.foo.type._type_affinity, sqltypes.JSON)
        assert isinstance(reflected.c.foo.type, mysql.JSON)
    def _table_seq_fixture(self):
        m = MetaData()

        s1 = Sequence("s1")
        s2 = Sequence("s2")
        t1 = Table("t1", m, Column("x", Integer, s1, primary_key=True))
        t2 = Table("t2", m, Column("x", Integer, s2, primary_key=True))

        return m, t1, t2, s1, s2
    def test_dates(self):
        "Exercise type specification for date types."

        columns = [
            # column type, args, kwargs, expected ddl
            (mssql.MSDateTime, [], {}, "DATETIME", []),
            (types.DATE, [], {}, "DATE", [">=", (10, )]),
            (types.Date, [], {}, "DATE", [">=", (10, )]),
            (types.Date, [], {}, "DATETIME", ["<", (10, )], mssql.MSDateTime),
            (mssql.MSDate, [], {}, "DATE", [">=", (10, )]),
            (mssql.MSDate, [], {}, "DATETIME", ["<",
                                                (10, )], mssql.MSDateTime),
            (types.TIME, [], {}, "TIME", [">=", (10, )]),
            (types.Time, [], {}, "TIME", [">=", (10, )]),
            (mssql.MSTime, [], {}, "TIME", [">=", (10, )]),
            (mssql.MSTime, [1], {}, "TIME(1)", [">=", (10, )]),
            (types.Time, [], {}, "DATETIME", ["<", (10, )], mssql.MSDateTime),
            (mssql.MSTime, [], {}, "TIME", [">=", (10, )]),
            (mssql.MSSmallDateTime, [], {}, "SMALLDATETIME", []),
            (mssql.MSDateTimeOffset, [], {}, "DATETIMEOFFSET", [">=", (10, )]),
            (
                mssql.MSDateTimeOffset,
                [1],
                {},
                "DATETIMEOFFSET(1)",
                [">=", (10, )],
            ),
            (mssql.MSDateTime2, [], {}, "DATETIME2", [">=", (10, )]),
            (mssql.MSDateTime2, [0], {}, "DATETIME2(0)", [">=", (10, )]),
            (mssql.MSDateTime2, [1], {}, "DATETIME2(1)", [">=", (10, )]),
        ]

        table_args = ["test_mssql_dates", metadata]
        for index, spec in enumerate(columns):
            type_, args, kw, res, requires = spec[0:5]
            if (requires and testing._is_excluded("mssql", *requires)
                    or not requires):
                c = Column("c%s" % index, type_(*args, **kw), nullable=None)
                testing.db.dialect.type_descriptor(c.type)
                table_args.append(c)
        dates_table = Table(*table_args)
        gen = testing.db.dialect.ddl_compiler(testing.db.dialect,
                                              schema.CreateTable(dates_table))
        for col in dates_table.c:
            index = int(col.name[1:])
            testing.eq_(
                gen.get_column_specification(col),
                "%s %s" % (col.name, columns[index][3]),
            )
            self.assert_(repr(col))
        dates_table.create(checkfirst=True)
        reflected_dates = Table("test_mssql_dates",
                                MetaData(testing.db),
                                autoload=True)
        for col in reflected_dates.c:
            self.assert_types_base(col, dates_table.c[col.key])